hexsha stringlengths 40 40 | size int64 5 2.06M | ext stringclasses 11 values | lang stringclasses 1 value | max_stars_repo_path stringlengths 3 251 | max_stars_repo_name stringlengths 4 130 | max_stars_repo_head_hexsha stringlengths 40 78 | max_stars_repo_licenses listlengths 1 10 | max_stars_count int64 1 191k ⌀ | max_stars_repo_stars_event_min_datetime stringlengths 24 24 ⌀ | max_stars_repo_stars_event_max_datetime stringlengths 24 24 ⌀ | max_issues_repo_path stringlengths 3 251 | max_issues_repo_name stringlengths 4 130 | max_issues_repo_head_hexsha stringlengths 40 78 | max_issues_repo_licenses listlengths 1 10 | max_issues_count int64 1 116k ⌀ | max_issues_repo_issues_event_min_datetime stringlengths 24 24 ⌀ | max_issues_repo_issues_event_max_datetime stringlengths 24 24 ⌀ | max_forks_repo_path stringlengths 3 251 | max_forks_repo_name stringlengths 4 130 | max_forks_repo_head_hexsha stringlengths 40 78 | max_forks_repo_licenses listlengths 1 10 | max_forks_count int64 1 105k ⌀ | max_forks_repo_forks_event_min_datetime stringlengths 24 24 ⌀ | max_forks_repo_forks_event_max_datetime stringlengths 24 24 ⌀ | content stringlengths 1 1.05M | avg_line_length float64 1 1.02M | max_line_length int64 3 1.04M | alphanum_fraction float64 0 1 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
90c1b22f3826d40d5a8ce3040c417f17a6d33b58 | 1,247 | py | Python | digraph/source/generator.py | addy1997/python-RRT | 93983e17f2e6e93ff79c8f04a86ce28718ba2779 | [
"MIT"
] | 11 | 2020-05-28T00:55:55.000Z | 2022-01-03T10:59:26.000Z | digraph/source/generator.py | addy1997/Internship-HTIC | 93983e17f2e6e93ff79c8f04a86ce28718ba2779 | [
"MIT"
] | 1 | 2020-10-19T16:20:30.000Z | 2021-03-22T19:01:14.000Z | digraph/source/generator.py | addy1997/Internship-HTIC | 93983e17f2e6e93ff79c8f04a86ce28718ba2779 | [
"MIT"
] | 2 | 2021-07-07T01:09:50.000Z | 2022-03-12T23:40:56.000Z | #!/usr/bin/env python
# coding: utf-8
# In[2]:
from time import time
import networkx as nx
from source.DiGraph import DiGraph
from source.model.Edge import Edge
# In[ ]:
| 18.893939 | 51 | 0.572574 |
90c3c10d1bd2a3bf341092a750b3ced35a3d1342 | 451 | py | Python | [1] BEGINNER/1000 - Hello World!.py | tiago040/URI-SOLUTIONS | 519d3950252a6002e8926416b2f8217ba08fe721 | [
"MIT"
] | 1 | 2022-03-15T03:03:26.000Z | 2022-03-15T03:03:26.000Z | [1] BEGINNER/1000 - Hello World!.py | tiago040/URI-SOLUTIONS | 519d3950252a6002e8926416b2f8217ba08fe721 | [
"MIT"
] | null | null | null | [1] BEGINNER/1000 - Hello World!.py | tiago040/URI-SOLUTIONS | 519d3950252a6002e8926416b2f8217ba08fe721 | [
"MIT"
] | null | null | null | '''
https://resources.urionlinejudge.com.br/gallery/images/problems/UOJ_1000.png
Bem-vindo ao URI Online Judge!
O seu primeiro programa em qualquer linguagem de programao normalmente o "Hello World!". Neste primeiro problema tudo o que voc precisa fazer imprimir esta mensagem na tela.
Entrada
Este problema no possui nenhuma entrada.
Sada
Voc deve imprimir a mensagem "Hello World!" conforme o exemplo abaixo.
'''
print('Hello World!') | 30.066667 | 180 | 0.78714 |
90c4143d466cb79c3fc483f709341546a5611433 | 615 | py | Python | py/py_0668_square_root_smooth_numbers.py | lcsm29/project-euler | fab794ece5aa7a11fc7c2177f26250f40a5b1447 | [
"MIT"
] | null | null | null | py/py_0668_square_root_smooth_numbers.py | lcsm29/project-euler | fab794ece5aa7a11fc7c2177f26250f40a5b1447 | [
"MIT"
] | null | null | null | py/py_0668_square_root_smooth_numbers.py | lcsm29/project-euler | fab794ece5aa7a11fc7c2177f26250f40a5b1447 | [
"MIT"
] | null | null | null | # Solution of;
# Project Euler Problem 668: Square root smooth Numbers
# https://projecteuler.net/problem=668
#
# A positive integer is called square root smooth if all of its prime factors
# are strictly less than its square root. Including the number $1$, there are
# $29$ square root smooth numbers not exceeding $100$. How many square root
# smooth numbers are there not exceeding $10\,000\,000\,000$?
#
# by lcsm29 http://github.com/lcsm29/project-euler
import timed
if __name__ == '__main__':
n = 1000
i = 10000
prob_id = 668
timed.caller(dummy, n, i, prob_id)
| 26.73913 | 78 | 0.705691 |
90c531d029592c14df121556138eab86864faa16 | 2,927 | py | Python | user/forms.py | Zidan-Kharisma-Sakana/uts-f02 | d29652cb73829ffa63e0ca4d0e5f8d6d62500367 | [
"Unlicense"
] | null | null | null | user/forms.py | Zidan-Kharisma-Sakana/uts-f02 | d29652cb73829ffa63e0ca4d0e5f8d6d62500367 | [
"Unlicense"
] | null | null | null | user/forms.py | Zidan-Kharisma-Sakana/uts-f02 | d29652cb73829ffa63e0ca4d0e5f8d6d62500367 | [
"Unlicense"
] | null | null | null | from django import forms
from django.contrib.auth import authenticate
from django.contrib.auth.models import User
from django.contrib.auth.forms import AuthenticationForm, UserCreationForm
from django.forms import ValidationError, EmailField
from user import models
| 32.522222 | 98 | 0.585924 |
90c5a20a145e1728ffe9af8247c9eff4f8b98279 | 3,476 | py | Python | data-structures-and-algorithms/examples/binary_tree_recursive.py | vinnyhoward/til | 8bb0055578220c4d83ddd12ac34bbb2fd3ae000e | [
"MIT"
] | null | null | null | data-structures-and-algorithms/examples/binary_tree_recursive.py | vinnyhoward/til | 8bb0055578220c4d83ddd12ac34bbb2fd3ae000e | [
"MIT"
] | 32 | 2020-07-16T07:11:35.000Z | 2022-02-27T19:01:03.000Z | data-structures-and-algorithms/examples/binary_tree_recursive.py | vinnyhoward/til | 8bb0055578220c4d83ddd12ac34bbb2fd3ae000e | [
"MIT"
] | null | null | null |
"""Try doing Post-Order tomorrow"""
# Visualization of Current Tree
# Pre-Order Output: 1--2--4--9--10--11--5--3--6--7--8--
# In-Order Output: 11--10--9--4--2--5--1--6--3--7--8--
# Pre-Order Output: 11--10--9--4--5--2--6--8--7--3--1--
# 1
# / \
# 2 3
# / | / |
# 4 5 6 7
# / \
# 9 8
# /
# 10
# /
# 11
# Tree Set-Up
# Another implementation
# class BinaryTree(object):
# def __init__(self, root):
# self.root = Node(root)
# def search(self, find_val):
# return self.preorder_search(tree.root, find_val)
# def print_tree(self):
# return self.preorder_print(tree.root, "")[:-1]
# def preorder_search(self, start, find_val):
# if start:
# if start.value == find_val:
# return True
# else:
# return self.preorder_search(start.left, find_val) or self.preorder_search(start.right, find_val)
# return False
# def preorder_print(self, start, traversal):
# if start:
# traversal += (str(start.value) + "-")
# traversal = self.preorder_print(start.left, traversal)
# traversal = self.preorder_print(start.right, traversal)
# return traversal
tree = BinaryTree(1)
tree.root.left = Node(2)
tree.root.right = Node(3)
tree.root.left.left = Node(4)
tree.root.left.right = Node(5)
tree.root.right.left = Node(6)
tree.root.right.right = Node(7)
tree.root.right.right.right = Node(8)
tree.root.left.left.left = Node(9)
tree.root.left.left.left.left = Node(10)
tree.root.left.left.left.left.left = Node(11)
# print(tree.print_tree("preorder"))
# print(tree.print_tree("inorder"))
print(tree.print_tree("postorder"))
| 31.315315 | 114 | 0.585155 |
90c608f7d84094a6c38930b235bc4cc2b22ca8af | 2,044 | py | Python | powerranger/files.py | clayboone/powerranger | 09315c8b37132add56ce31f1b0c1dd0b1692bd23 | [
"MIT"
] | null | null | null | powerranger/files.py | clayboone/powerranger | 09315c8b37132add56ce31f1b0c1dd0b1692bd23 | [
"MIT"
] | 8 | 2020-04-18T20:20:08.000Z | 2020-05-06T13:39:03.000Z | powerranger/files.py | clayboone/powerranger | 09315c8b37132add56ce31f1b0c1dd0b1692bd23 | [
"MIT"
] | null | null | null | import curses
import itertools
import os
from pathlib import Path
import stat
from typing import Optional, Union
import config
from colors import Colors
class Directory:
"""A list of items inside of a directory."""
| 27.253333 | 98 | 0.620841 |
90c62d1dcdf46b749f960d31f0195049399228f2 | 12,647 | py | Python | parlai/mturk/webapp/run_mocks/mock_turk_manager.py | lizekang/ParlAI | 9e113105d1690deda0b372f6aeaf933ab58e3a72 | [
"BSD-3-Clause"
] | 1 | 2019-04-19T06:39:41.000Z | 2019-04-19T06:39:41.000Z | parlai/mturk/webapp/run_mocks/mock_turk_manager.py | lizekang/ParlAI | 9e113105d1690deda0b372f6aeaf933ab58e3a72 | [
"BSD-3-Clause"
] | null | null | null | parlai/mturk/webapp/run_mocks/mock_turk_manager.py | lizekang/ParlAI | 9e113105d1690deda0b372f6aeaf933ab58e3a72 | [
"BSD-3-Clause"
] | null | null | null | #!/usr/bin/env python3
# Copyright (c) 2017-present, Facebook, Inc.
# All rights reserved.
# This source code is licensed under the BSD-style license found in the
# LICENSE file in the root directory of this source tree. An additional grant
# of patent rights can be found in the PATENTS file in the same directory.
import logging
import os
import threading
import time
import uuid
from parlai.mturk.core.agents import AssignState
from parlai.mturk.core.socket_manager import Packet
from parlai.mturk.webapp.run_mocks.mock_turk_agent import MockTurkAgent
import parlai.mturk.core.data_model as data_model
import parlai.mturk.core.shared_utils as shared_utils
parent_dir = os.path.dirname(os.path.abspath(__file__))
| 39.276398 | 79 | 0.633826 |
90c6e79ecc4dd80005f6bae2d4343c17f1c96324 | 6,517 | py | Python | scripts/train_model.py | allenai/sledgehammer | 03982da9cd0c543a76832a903033c2d97dbfb113 | [
"Apache-2.0"
] | 47 | 2020-04-14T18:10:05.000Z | 2022-02-16T05:17:03.000Z | scripts/train_model.py | yardenTal1/sledgehammer | 842a48023451a81c83cdf29cdd7c601b1114a207 | [
"Apache-2.0"
] | 1 | 2020-05-04T19:35:06.000Z | 2020-06-08T02:53:53.000Z | scripts/train_model.py | yardenTal1/sledgehammer | 842a48023451a81c83cdf29cdd7c601b1114a207 | [
"Apache-2.0"
] | 9 | 2020-04-12T17:31:12.000Z | 2022-03-19T21:23:59.000Z | #!/usr/bin/env python
import sys
import os
import random
import copy
import subprocess
from argparse import ArgumentParser, ArgumentDefaultsHelpFormatter
# PYTHON_DIR="/".join(os.environ['CONDA_EXE'].split("/")[:-2])+'/envs/allennlp_0.8.4/bin/'
exit_threshold=0.9
def arg_parser():
"""Extracting CLI arguments"""
p = ArgumentParser(add_help=False)
p.add_argument("-b", "--batch_size", help="Batch size", type=int, default=72)
p.add_argument("-s", "--start",
help="First experiment index to run",
type=int, default=0)
p.add_argument("-t", "--bert_type", help="Bert type (bert-{base,large}-{cased,uncased})", type=str,
default='bert-large-uncased')
p.add_argument("-n", "--n_tests", help="Number of grid search experiments to run", type=int, default=1)
p.add_argument("-x", "--max_pieces", help="Maximum number of word pieces for BERT", type=int, default=512)
p.add_argument("-c", "--num_epochs", help="Number of epochs to run", type=int, default=2)
p.add_argument("-l", "--layer_indices", help="Indices of layers to train classifiers for", type=str, default="23")
p.add_argument("-d", "--dataset", help="Dataset to work with", required=True)
p.add_argument("-i", "--nli", help="Is this an NLI experiment? (if not, it's text_cat)", action='store_true')
p.add_argument("-r", "--slurm", help="Run jobs on SLURM using this server", type=str)
p.add_argument('-w', '--work_dir', help="Working directory. Should contain a directory for the bert_type, which contains another directory for the dataset", type=str, default="")
p.add_argument('--data_dir', help="Dataset directory. Should contain 'text_cat' and/or 'nli' folders, containing a directory for the dataset, which contains three files: train, dev and test", type=str, required=True)
p.add_argument("-u", "--cuda_device", help="CUDA device (or -1 for CPU)", type=int, default=0)
return ArgumentParser(description=__doc__,
formatter_class=ArgumentDefaultsHelpFormatter,
parents=[p])
if __name__ == '__main__':
sys.exit(main())
| 45.894366 | 570 | 0.603038 |
90c797e8f34b8974fc670dfa1133ddb2a05d7a20 | 918 | py | Python | utils/gather_files.py | letsgo247/KFG | dd055fc8349a9ea95497d04ecc4b52c04e09debc | [
"MIT"
] | null | null | null | utils/gather_files.py | letsgo247/KFG | dd055fc8349a9ea95497d04ecc4b52c04e09debc | [
"MIT"
] | null | null | null | utils/gather_files.py | letsgo247/KFG | dd055fc8349a9ea95497d04ecc4b52c04e09debc | [
"MIT"
] | null | null | null | import os
import shutil
import time
src_path = "C:\dev\KFG\Data/Korean/AFAD/AFAD-Lite" #
new_path = "C:\dev\KFG\Data/Korean/AFAD/AFAD_gathered" #
start_time = time.time() #
file_list = read_all_file(src_path)
copy_all_file(file_list, new_path)
print("=" * 40)
print(" : {}".format(time.time() - start_time)) # | 27 | 65 | 0.604575 |
90c806da308d72b961e15453707e27dd9643ad2b | 1,897 | py | Python | code/diva_evaluation_cli/bin/commands/actev_get_system_subcommands/git_command.py | wenhel/Argus | 39768a8d1671eb80f86bbd67e58478a4cbdcdeca | [
"MIT"
] | 4 | 2019-06-28T23:27:43.000Z | 2021-09-27T03:17:58.000Z | code/diva_evaluation_cli/bin/commands/actev_get_system_subcommands/git_command.py | wenhel/Argus | 39768a8d1671eb80f86bbd67e58478a4cbdcdeca | [
"MIT"
] | 2 | 2020-01-16T19:39:44.000Z | 2021-02-24T22:45:37.000Z | code/diva_evaluation_cli/bin/commands/actev_get_system_subcommands/git_command.py | wenhel/Argus | 39768a8d1671eb80f86bbd67e58478a4cbdcdeca | [
"MIT"
] | 1 | 2019-09-09T07:40:45.000Z | 2019-09-09T07:40:45.000Z | """Actev module: get-system git
Actev modules are used to parse actev commands in order to get arguments
before calling associated entry point methods to execute systems.
Warning: this file should not be modified: see src/entry_points to add your source code.
"""
from diva_evaluation_cli.bin.commands.actev_command import ActevCommand
| 43.113636 | 111 | 0.641012 |
90c8c7759fc79bbde613e0b6f813d1b176c1ecaf | 902 | py | Python | 1.8.first-promise.py | senpl/course-promises | df4b64a02d0a7ab4bb372c0fae5e92c98830cd44 | [
"MIT"
] | 3 | 2017-02-05T03:17:13.000Z | 2019-03-11T19:44:34.000Z | 1.8.first-promise.py | senpl/course-promises | df4b64a02d0a7ab4bb372c0fae5e92c98830cd44 | [
"MIT"
] | null | null | null | 1.8.first-promise.py | senpl/course-promises | df4b64a02d0a7ab4bb372c0fae5e92c98830cd44 | [
"MIT"
] | 7 | 2016-06-19T02:24:09.000Z | 2018-09-11T01:32:02.000Z | import re
textinput = widget_inputs["text1"]
comments = []
is_correct = False
result = re.match(".*window.*", textinput, flags=re.IGNORECASE)
if result:
is_correct = True
commentizer("You're right, but there's a little more to it than that. Make sure you watch the solution video.")
result = re.match(".*global.*", textinput, flags=re.IGNORECASE)
if result:
is_correct = True
commentizer("Right! It's the global object.")
result = re.match(".*promise.*", textinput, flags=re.IGNORECASE)
if result:
is_correct = False
commentizer("It's not the Promise. Take another look!")
if not is_correct and len(comments) == 0:
commentizer("Not quite. Just log `this` somewhere in the Promise to see what happens.")
grade_result["comment"] = "\n\n".join(comments)
grade_result["correct"] = is_correct | 29.096774 | 115 | 0.701774 |
90c8e1c41e29404a0d0d0511d6b46db43890fb89 | 4,106 | py | Python | test/test_markdown_parser.py | Asana/SGTM | 0e9e236980ed68e80e021470da6374945bbac501 | [
"MIT"
] | 8 | 2020-12-05T00:13:03.000Z | 2022-01-11T11:35:51.000Z | test/test_markdown_parser.py | Asana/SGTM | 0e9e236980ed68e80e021470da6374945bbac501 | [
"MIT"
] | 12 | 2020-12-14T18:21:21.000Z | 2022-03-29T17:06:20.000Z | test/test_markdown_parser.py | Asana/SGTM | 0e9e236980ed68e80e021470da6374945bbac501 | [
"MIT"
] | 2 | 2021-06-27T09:32:55.000Z | 2022-02-27T23:17:36.000Z | import unittest
from html import escape
from src.markdown_parser import convert_github_markdown_to_asana_xml
if __name__ == "__main__":
from unittest import main as run_tests
run_tests()
| 33.933884 | 115 | 0.585485 |
90cb7a749d3451fbf2614d7b4cc6d0e278b4fb6b | 3,724 | py | Python | ansible/my_env/lib/python2.7/site-packages/ansible/modules/network/ftd/ftd_file_upload.py | otus-devops-2019-02/yyashkin_infra | 0cd0c003884155ac922e3e301305ac202de7028c | [
"MIT"
] | 1 | 2019-04-16T21:23:15.000Z | 2019-04-16T21:23:15.000Z | ansible/my_env/lib/python2.7/site-packages/ansible/modules/network/ftd/ftd_file_upload.py | otus-devops-2019-02/yyashkin_infra | 0cd0c003884155ac922e3e301305ac202de7028c | [
"MIT"
] | 5 | 2020-02-26T20:10:50.000Z | 2021-09-23T23:23:18.000Z | ansible/my_env/lib/python2.7/site-packages/ansible/modules/network/ftd/ftd_file_upload.py | otus-devops-2019-02/yyashkin_infra | 0cd0c003884155ac922e3e301305ac202de7028c | [
"MIT"
] | null | null | null | #!/usr/bin/python
# Copyright (c) 2018 Cisco and/or its affiliates.
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'network'}
DOCUMENTATION = """
---
module: ftd_file_upload
short_description: Uploads files to Cisco FTD devices over HTTP(S)
description:
- Uploads files to Cisco FTD devices including disk files, backups, and upgrades.
version_added: "2.7"
author: "Cisco Systems, Inc."
options:
operation:
description:
- The name of the operation to execute.
- Only operations that upload file can be used in this module.
required: true
type: str
file_to_upload:
description:
- Absolute path to the file that should be uploaded.
required: true
type: path
version_added: "2.8"
register_as:
description:
- Specifies Ansible fact name that is used to register received response from the FTD device.
type: str
"""
EXAMPLES = """
- name: Upload disk file
ftd_file_upload:
operation: 'postuploaddiskfile'
file_to_upload: /tmp/test1.txt
"""
RETURN = """
msg:
description: The error message describing why the module failed.
returned: error
type: string
"""
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.connection import Connection
from ansible.module_utils.network.ftd.common import construct_ansible_facts, FtdServerError, HTTPMethod
from ansible.module_utils.network.ftd.fdm_swagger_client import OperationField
if __name__ == '__main__':
main()
| 33.854545 | 117 | 0.705961 |
90cbba8be9d213494b028b083d0b0f3629b1868d | 1,244 | py | Python | agent/indy_catalyst_agent/messaging/trustping/routes.py | nairobi222/indy-catalyst | dcbd80524ace7747ecfecd716ff932e9b571d69a | [
"Apache-2.0"
] | 2 | 2019-08-16T11:21:10.000Z | 2022-01-03T21:10:59.000Z | agent/indy_catalyst_agent/messaging/trustping/routes.py | nairobi222/indy-catalyst | dcbd80524ace7747ecfecd716ff932e9b571d69a | [
"Apache-2.0"
] | null | null | null | agent/indy_catalyst_agent/messaging/trustping/routes.py | nairobi222/indy-catalyst | dcbd80524ace7747ecfecd716ff932e9b571d69a | [
"Apache-2.0"
] | null | null | null | """Trust ping admin routes."""
from aiohttp import web
from aiohttp_apispec import docs
from ..connections.models.connection_record import ConnectionRecord
from .messages.ping import Ping
from ...storage.error import StorageNotFoundError
async def register(app: web.Application):
"""Register routes."""
app.add_routes([web.post("/connections/{id}/send-ping", connections_send_ping)])
| 29.619048 | 84 | 0.730707 |
90cceb9760edd56688a99f6ee73a68bdf983a84e | 619 | py | Python | python/wotdbg.py | wanyancan/wot-debugserver | 4c6dd5c511659885abb355bef7c9318ed3e42937 | [
"MIT"
] | 32 | 2015-01-23T16:13:20.000Z | 2021-05-29T21:11:42.000Z | python/wotdbg.py | wanyancan/wot-debugserver | 4c6dd5c511659885abb355bef7c9318ed3e42937 | [
"MIT"
] | null | null | null | python/wotdbg.py | wanyancan/wot-debugserver | 4c6dd5c511659885abb355bef7c9318ed3e42937 | [
"MIT"
] | 16 | 2015-08-25T08:02:52.000Z | 2022-01-19T19:16:16.000Z | import os.path
import tcprepl
import BigWorld
def echo(s):
'''Send string to client'''
if tcprepl.write_client is not None:
tcprepl.write_client(s)
def exec_file(filename, exec_globals=None):
'''
Execute file
Try to find file named `filename` and execute it. If `exec_globals` is
specified it is used as globals-dict in exec context.
'''
if exec_globals is None:
exec_globals = {}
if not os.path.isfile(filename):
filename = BigWorld.wg_resolveFileName(filename)
with open(filename, 'r') as f:
code = f.read()
exec code in exec_globals
| 22.107143 | 74 | 0.660743 |
90d0a5da5e0a56dfea4ae7b895d09b65486f6dc4 | 1,861 | py | Python | folderikon/exceptions.py | demberto/FolderIkon | 62088b3e46266a6a53632a1e85bdb3d0a46e8cab | [
"MIT"
] | 1 | 2021-10-30T17:49:28.000Z | 2021-10-30T17:49:28.000Z | folderikon/exceptions.py | demberto/FolderIkon | 62088b3e46266a6a53632a1e85bdb3d0a46e8cab | [
"MIT"
] | null | null | null | folderikon/exceptions.py | demberto/FolderIkon | 62088b3e46266a6a53632a1e85bdb3d0a46e8cab | [
"MIT"
] | null | null | null | """Exception which are not actually thrown, only their docstrings are used."""
import colorama
import sys
__all__ = [
"Error",
"ParentIsNotAFolderError",
"InvalidURLError",
"ImageFormatNotSupportedError",
"ImageNotSpecifiedError",
"FolderIconAlreadyExistsError",
"DesktopIniError",
"exception_exit",
]
def exception_exit(exc):
print(repr(exc()))
sys.exit(-1)
| 22.975309 | 108 | 0.652875 |
90d0ff663bec28d2147cf9908b2baeccc98682a7 | 1,224 | py | Python | quickbooks/objects/companycurrency.py | varunbheemaiah/python-quickbooks | f5459a07619ae220e24099c6e0c8e8db890bb66b | [
"MIT"
] | 234 | 2015-08-25T02:41:33.000Z | 2020-03-30T15:30:23.000Z | quickbooks/objects/companycurrency.py | varunbheemaiah/python-quickbooks | f5459a07619ae220e24099c6e0c8e8db890bb66b | [
"MIT"
] | 170 | 2015-09-12T07:02:32.000Z | 2020-03-20T13:34:34.000Z | quickbooks/objects/companycurrency.py | varunbheemaiah/python-quickbooks | f5459a07619ae220e24099c6e0c8e8db890bb66b | [
"MIT"
] | 142 | 2015-08-26T07:08:56.000Z | 2020-03-20T11:59:52.000Z | from six import python_2_unicode_compatible
from .base import QuickbooksManagedObject, QuickbooksTransactionEntity, Ref, CustomField, MetaData
| 28.465116 | 107 | 0.688725 |
90d1622a98abf59a298f4b58a00b76f812b4c744 | 604 | py | Python | books/migrations/0004_alter_book_category.py | MwinyiMoha/books-service | 31a980a8505c5d5c2acad698bb493fad8c0ce8fe | [
"MIT"
] | null | null | null | books/migrations/0004_alter_book_category.py | MwinyiMoha/books-service | 31a980a8505c5d5c2acad698bb493fad8c0ce8fe | [
"MIT"
] | 3 | 2021-04-08T17:44:07.000Z | 2021-04-12T09:38:26.000Z | books/migrations/0004_alter_book_category.py | MwinyiMoha/books-service | 31a980a8505c5d5c2acad698bb493fad8c0ce8fe | [
"MIT"
] | null | null | null | # Generated by Django 3.2 on 2021-04-10 12:34
from django.db import migrations, models
| 24.16 | 57 | 0.47351 |
90d18046bd7a28075eedb453f3f4d1aabe4f7e65 | 16,575 | py | Python | syft/execution/placeholder.py | juharris/PySyft | dbb70f24cc55a7dca032fb06f1a8662cb15092a9 | [
"Apache-2.0"
] | null | null | null | syft/execution/placeholder.py | juharris/PySyft | dbb70f24cc55a7dca032fb06f1a8662cb15092a9 | [
"Apache-2.0"
] | null | null | null | syft/execution/placeholder.py | juharris/PySyft | dbb70f24cc55a7dca032fb06f1a8662cb15092a9 | [
"Apache-2.0"
] | null | null | null | from itertools import zip_longest
import syft
from syft.generic.frameworks.hook import hook_args
from syft.generic.abstract.tensor import AbstractTensor
from syft.workers.abstract import AbstractWorker
from syft_proto.execution.v1.placeholder_pb2 import Placeholder as PlaceholderPB
def instantiate(self, tensor):
"""
Add a tensor as a child attribute. All operations on the placeholder will be also
executed on this child tensor.
We remove Placeholders if is there are any.
"""
if isinstance(tensor, PlaceHolder):
self.child = tensor.child
else:
self.child = tensor
if hasattr(self.child, "shape"):
self.expected_shape = tuple(self.child.shape)
if hasattr(self.child, "dtype"):
self.expected_dtype = self.child.dtype
return self
def __str__(self) -> str:
"""
Compact representation of a Placeholder, including tags and optional child
"""
tags = " ".join(list(self.tags or []))
out = f"{type(self).__name__ }[Id:{self.id.value}]"
if hasattr(self, "child") and self.child is not None:
out += f">{self.child}"
return out
__repr__ = __str__
def send(self, *args, **kwargs):
"""
calls move on child & register_action to role
"""
response = self.child.send(*args, **kwargs)
placeholder = PlaceHolder.convert_to_placeholders(response, self)
command = ("send", self, args, kwargs)
self.role.register_action(
(command, placeholder), syft.execution.communication.CommunicationAction
)
return placeholder
def move(self, *args, **kwargs):
"""
calls move on a pointer tensor & register_action to role
"""
response = self.child.move(*args, **kwargs)
placeholder = PlaceHolder.convert_to_placeholders(response, self)
command = ("move", self, args, kwargs)
self.role.register_action(
(command, placeholder), syft.execution.communication.CommunicationAction
)
return placeholder
def share(self, *args, **kwargs):
"""
Send a command to remote worker to additively share a tensor via pointer tensor
"""
response = self.child.share(*args, **kwargs)
placeholder = PlaceHolder.convert_to_placeholders(response, self)
command = ("share", self, args, kwargs)
self.role.register_action(
(command, placeholder), syft.execution.communication.CommunicationAction
)
return placeholder
def fix_prec(self, *args, **kwargs):
"""
sends command to remote worker to transform a tensor to fix_precision via pointer tensor
"""
response = self.child.fix_prec(*args, **kwargs)
placeholder = PlaceHolder.convert_to_placeholders(response, self)
command = ("fix_prec", self, args, kwargs)
self.role.register_action(
(command, placeholder), syft.execution.computation.ComputationAction
)
return placeholder
def remote_get(self, *args, **kwargs):
"""
calls remote_get on child & register_action to role
"""
response = self.child.remote_get(*args, **kwargs)
placeholder = PlaceHolder.convert_to_placeholders(response, self)
command = ("remote_get", self, args, kwargs)
self.role.register_action(
(command, placeholder), syft.execution.communication.CommunicationAction
)
return placeholder
def remote_send(self, *args, **kwargs):
"""
calls remote_send on child & register_action to role
"""
response = self.child.remote_send(*args, **kwargs)
placeholder = PlaceHolder.convert_to_placeholders(response, self)
command = ("remote_send", self, args, kwargs)
self.role.register_action(
(command, placeholder), syft.execution.communication.CommunicationAction
)
return placeholder
def share_(self, *args, **kwargs):
"""
calls share_ on child & register_action to role
"""
response = self.child.share_(*args, **kwargs)
placeholder = PlaceHolder.convert_to_placeholders(response, self)
command = ("share_", self, args, kwargs)
self.role.register_action(
(command, placeholder), syft.execution.communication.CommunicationAction
)
return placeholder
def get(self, *args, **kwargs):
"""Requests the tensor/chain being pointed to, be serialized and return via child"""
response = self.child.get(*args, **kwargs)
placeholder = PlaceHolder.convert_to_placeholders(response, self)
command = ("get", self, args, kwargs)
self.role.register_action(
(command, placeholder), syft.execution.communication.CommunicationAction
)
return placeholder
def copy(self):
"""
Copying a placeholder doesn't duplicate the child attribute, because all
copy operations happen locally where we want to keep reference to the same
instantiated object. As the child doesn't get sent, this is not an issue.
"""
placeholder = PlaceHolder(
role=self.role,
tracing=self.tracing,
tags=self.tags,
shape=self.expected_shape,
expected_dtype=self.expected_dtype,
)
placeholder.child = self.child
if self.tracing:
command = ("copy", self, (), {}), placeholder
self.role.register_action(command, syft.execution.computation.ComputationAction)
return placeholder
### Register the tensor with hook_args.py ###
hook_args.default_register_tensor(PlaceHolder)
| 37.247191 | 98 | 0.640422 |
90d1a9736c8c20db26fd37eb2b021b1ef2da5b41 | 21,547 | py | Python | creativeflow/blender/render_main.py | idaho777/creativeflow | adf7a9e1cf70005560cfbf8064137fb1236bc574 | [
"MIT"
] | 53 | 2019-10-02T17:10:49.000Z | 2022-03-04T17:35:26.000Z | creativeflow/blender/render_main.py | idaho777/creativeflow | adf7a9e1cf70005560cfbf8064137fb1236bc574 | [
"MIT"
] | 1 | 2021-09-24T01:00:54.000Z | 2021-09-24T03:06:43.000Z | creativeflow/blender/render_main.py | idaho777/creativeflow | adf7a9e1cf70005560cfbf8064137fb1236bc574 | [
"MIT"
] | 6 | 2019-11-26T11:44:45.000Z | 2021-11-11T22:23:50.000Z | """
MAIN STYLING AND RENDERING FILE
Requirements:
------------------------------------------------------------------------------
IMPORTANT! This has only been tested with Blender 2.79 API. We have run this
on Linux and MacOS.
Execution:
------------------------------------------------------------------------------
This script is intended to run inside blender launched in background mode.
Sample invocation is:
blender --background --factory-startup --python-exit-code 1 PATH_TO_MY_BLEND.blend \
--python blender/render_main.py -- \
--width=500 <ANY OTHER PYTHON FLAGS FROM render_main.py>
'--factory-startup' is used to prevent custom settings from interfering.
'--python-exit-code 1' makes blender exit with code 1 if this script throws an error
'--' causes blender to ignore all following arguments so python can use them.
See blender --help for details. See pipeline.sh for sample usage.
Capabilities:
------------------------------------------------------------------------------
It is assumed that blender is invoked with a single blend. This script is a
jack-of-all-trades for setting up camera, lighting, styling, and rendering for
a custom stylized animation benchmark. We found it easier to run the script
separately for each phase of data processing (see pipeline.sh),
as this way the output can be easily examined for problems after every stage.
However, one-shot execution should also be possible.
See flags below for full capabilities. The trickiest bit is: different metadata
only works with particular render engine option. The script will raise errors
if incorrect engine is specified:
- Vertex paint for correspondences - blender render (no gamma correction!)
- Normals in camera space - blender render (no gamma correction!)
- Flow vector pass - cycles (blender render is buggy)
- Red stylit reference material - cycles
- Env lighting for mixamo models - blender render only
"""
import bpy
import argparse
import logging
import os
import random
import sys
import time
import traceback
# Add to path to make sure we can import modules while running inside Blender.
__sdir = os.path.dirname(os.path.realpath(__file__))
if __sdir not in sys.path:
sys.path.append(__sdir)
import color_util
import geo_util
import io_util
import render_util
import stylit_util
LOG = logging.getLogger(__name__)
if __name__ == "__main__":
try:
# FLAGS
# --------------------------------------------------------------------------
parser = argparse.ArgumentParser(
description='Configurable utility to modify blend and/or render images/flow/metadata.')
parser.add_argument(
'--random_seed', action='store', type=int, default=-1,
help='Integer seed for random number generator; used if > 0.')
# Rendering ----------------------------------------------------------------
parser.add_argument(
'--width', action='store', type=int, default=1500,
help='Width to render at.')
parser.add_argument(
'--height', action='store', type=int, default=1500,
help='Height to render at.')
parser.add_argument(
'--quality_samples', action='store', type=int, default=-1,
help='If positive and using cycles, will use this many samples per pixel; ' +
'e.g. 128 is slow, 10 is comparatively fast.')
parser.add_argument(
'--start_frame', action='store', type=int, default=0,
help='Frame to start rendering at (relative to first frame).')
parser.add_argument(
'--rendered_frames', action='store', type=int, default=0,
help='Maximum frames to render; 0 for none; -1 for all.')
parser.add_argument(
'--skip_existing_frames', action='store_true', default=False,
help='If true, skips existing frames matching --frame_output_prefix.')
parser.add_argument(
'--use_cycles', action='store_true', default=False,
help='If true, sets Cycles as the rendering engine, else leaves unchanged.')
parser.add_argument(
'--use_blender_render', action='store_true', default=False,
help='If true, sets Blender Render as the rendering engine, else leaves unchanged.')
# Outputs ------------------------------------------------------------------
parser.add_argument(
'--frame_output_prefix', action='store', type=str, default='',
help='If set, will set image output to <frame_output_prefix><frame#>.PNG; ' +
'should include full path.')
parser.add_argument(
'--render_metadata_exr', action='store_true', default=False,
help='If true, renders all metadata passes as a multilayer EXR file.')
parser.add_argument(
'--objectids_key_file', action='store', type=str, default='',
help='Directory to write objectids to, as images.')
parser.add_argument(
'--world_normals_output_dir', action='store', type=str, default='',
help='Directory to write world space normals to, as images ' +
'(only compatible with --use_cycles.')
parser.add_argument(
'--camera_normals_output_dir', action='store', type=str, default='',
help='Directory to write camera space normals to, as images ' +
'(only compatible with --use_blender_render.')
parser.add_argument(
'--enable_gamma_correction', action='store_true', default=False,
help='We disable gamma correction by default, as it corrupts the ' +
'metadata rendering; set this on to enable.')
parser.add_argument(
'--bg_name', action='store', type=str, default="STYMO_BG",
help='If any object name matches this substring, it will be treated as ' +
'background for the purpose of id labeling and stylit rendering.')
parser.add_argument(
'--output_blend', action='store', type=str, default='',
help='If set, will output modified blend here (must be absolute path); ' +
'if setting linestyle and/or material, will replace special substrings ' +
'<M> and <L> with material and linestyle.')
parser.add_argument(
'--info_file', action='store', type=str, default='',
help='If set, may output auxiliary information into this file.')
# Camera -------------------------------------------------------------------
parser.add_argument(
'--set_camera', action='store', type=int, default=0,
help='If >= 0, selects ith camera and deletes all other cameras; ' +
'if i > num cameras, generates a random one instead.')
parser.add_argument(
'--keep_extra_cameras', action='store_true',
help='If --set_camera, will not delete extra cameras.')
parser.add_argument(
'--add_random_camera_motion', action='store_true',
help='If generating a random camera and this is true, creates zoom/flyaround/pan; '
'WARNING: parameters are tuned for mixamo character blends.')
# Animation range ----------------------------------------------------------
parser.add_argument(
'--offset_scene_start_frame_by', action='store', type=int, default=0,
help='Unlike --start_frame, which just controls the rendering range, this ' +
'flag offsets the current scene start frame in the timeline by the ' +
'specified amount. Relevant to blends that do not begin at frame 0.')
parser.add_argument(
'--offset_scene_end_frame_by', action='store', type=int, default=0,
help='Unlike --rendered_frames, which just controls the rendering range, this ' +
'flag offsets the current scene end frame in the timeline by the ' +
'specified amount. Relevant to blends that do not begin at frame 0.')
# Lighting -----------------------------------------------------------------
parser.add_argument(
'--set_env_lighting_image', action='store', type=str, default='',
help='Set to image path or directory of environment map images to set ' +
'environment lighting; only works with --use_blender_render.')
parser.add_argument(
'--set_stylit_lighting', action='store_true',
help='If true, sets consistent lighting to render input for stylit.')
# Styles -------------------------------------------------------------------
parser.add_argument(
'--set_stylit_style', action='store_true',
help='If true, sets red material style used for stylit style transfer.')
parser.add_argument(
'--set_corresp_style', action='store_true',
help='If true, will set per-vertex materials to render correspondences.')
parser.add_argument(
'--set_objectids_style', action='store_true',
help='If true, will set objectids to render using flat materials.')
parser.add_argument(
'--deterministic_objectid_colors', action='store_true',
help='If true, objectid colors will not be shuffled; use for testing.')
parser.add_argument(
'--linestyles_blend', action='store', type=str, default='',
help='Path to blend containing all the line styles.')
parser.add_argument(
'--set_linestyle_matching', action='store', type=str, default='',
help='Regex matching linestyle(s) in --line_styles_blend; '
'if more than one match, picks random one; '
'"" for none; ".*" for all; "hi|bye" to match either.')
parser.add_argument(
'--randomize_line_color', action='store_true',
help='If true, randomizes line color if line is set.')
parser.add_argument(
'--materials_blend', action='store', type=str, default='',
help='Path to blend containing all the material styles (e.g. textured blender styles).')
parser.add_argument(
'--set_materials_matching', action='store', type=str, default='',
help='Regex matching materials(s) in --materials_blend; '
'if more than one match, picks random one; '
'"" for none; ".*" for all; "hi|bye" to match either.')
parser.add_argument(
'--randomize_material_color', action='store_true',
help='If true, randomizes material color if material is set.')
# Custom color control
parser.add_argument(
'--material_color_choices', action='store', type=str, default='',
help='String of format R,G,B R2,G2,B2 ... of colors to choose from if ' +
'randomizing material colors.')
parser.add_argument(
'--line_hue_range', action='store', type=str, default='0,1.0',
help='If --randomize_line_color, will keep HSV Hue in this range (two numbers,csv).')
parser.add_argument(
'--line_sat_range', action='store', type=str, default='0,1.0',
help='If --randomize_line_color, will keep HSV Saturation in this range (two numbers,csv).')
parser.add_argument(
'--line_value_range', action='store', type=str, default='0,1.0',
help='If --randomize_line_color, will keep HSV Value in this range (two numbers,csv).')
# Parse only arguments after --
# --------------------------------------------------------------------------
argv = sys.argv
if "--" not in argv:
argv = [] # as if no args are passed
else:
argv = argv[argv.index("--") + 1:]
args = parser.parse_args(argv)
if args.random_seed > 0:
print('Using --random_seed=%d as random seed.' % args.random_seed)
random.seed(args.random_seed)
else:
print('Using time as random seed.')
random.seed(time.time())
render_util.print_blend_diagnostics()
# Handle camera ------------------------------------------------------------
if args.set_camera >= 0:
cam = None
if args.keep_extra_cameras:
cam = geo_util.get_camera_by_number(args.set_camera)
else:
cam = geo_util.delete_all_but_one_camera(args.set_camera)
if cam is None:
print('Generating a random camera.')
bbox = geo_util.get_scene_bbox()
cam = geo_util.create_random_camera(bbox, 2.5, 2.5, 2.5)
if args.add_random_camera_motion:
print('Adding motion to camera.')
geo_util.mixamo_add_random_camera_motion(cam)
geo_util.disable_camera_depth_of_field(cam)
else:
cam = geo_util.get_single_camera_or_die()
# Set active camera
bpy.context.scene.camera = cam
# Handle frame bounds ------------------------------------------------------
orig_start = bpy.context.scene.frame_start
bpy.context.scene.frame_start = orig_start + args.offset_scene_start_frame_by
if args.offset_scene_end_frame_by > 0:
bpy.context.scene.frame_end = orig_start + args.offset_scene_end_frame_by
# Handle lighting ----------------------------------------------------------
info_file = None
if args.info_file:
info_file = open(args.info_file, 'w')
if len(args.set_env_lighting_image) > 0:
if not args.use_blender_render:
raise RuntimeError(
'Error: --set_env_lighting_image="img" only works with --use_blender_render')
render_util.setup_realistic_lighting(args.set_env_lighting_image, 10.0, False)
if args.set_stylit_lighting:
if not args.use_cycles:
raise RuntimeError(
'Error: --set_stylit_lighting only works with --use_cycles')
stylit_util.setup_stylit_lighting()
# Handle styles ------------------------------------------------------------
nstyles = len([x for x in [args.set_stylit_lighting,
args.set_corresp_style, args.set_objectids_style,
(args.set_linestyle_matching or args.set_materials_matching)]
if x])
if nstyles > 1:
raise RuntimeError(
'Error: incompatible rendering styles specified; only one of these can be true: ' +
'--set_stylit_lighting OR ' +
'--set_corresp_style OR --set_objectids_style OR ' +
'(--set_linestyle_matching and/or --set_materials_matching)')
linestyle_name = 'default'
material_name = 'default'
if args.set_stylit_style: # Red material used for stylit rendering
if not args.use_cycles:
raise RuntimeError(
'Error: --set_stylit_style only works with --use_cycles')
render_util.clear_unnecessary_settings()
stylit_util.setup_stylit_materials(bg_name=args.bg_name)
elif args.set_corresp_style: # Per-vertex correspondence rendering
if not args.use_blender_render:
raise RuntimeError(
'Correspondence rendering (--set_corresp_style) only implemented for ' +
'--use_blender_render')
render_util.clear_unnecessary_settings()
render_util.set_correspondence_style()
elif args.set_objectids_style: # Object Ids rendered in flat color
if not args.use_blender_render:
raise RuntimeError(
'Correspondence rendering (--set_objectids_style) only implemented for ' +
'--use_blender_render')
render_util.clear_unnecessary_settings()
idsinfo = render_util.set_objectids_style(
bg_name=args.bg_name, deterministic=args.deterministic_objectid_colors)
if idsinfo and args.objectids_key_file:
with open(os.path.join(args.objectids_key_file), 'w') as f:
for i in range(len(idsinfo)):
f.write('%s %d %d %d\n' %
(idsinfo[i][0], idsinfo[i][1][0],
idsinfo[i][1][1], idsinfo[i][1][2]))
elif args.set_linestyle_matching or args.set_materials_matching: # Freestyle / toon shading
if not args.use_blender_render:
raise RuntimeError(
'Linestyles and materials only implemented for --use_blender_render')
render_util.clear_unnecessary_settings()
if len(args.set_linestyle_matching) > 0:
if len(args.linestyles_blend) == 0:
raise RuntimeError(
'Error: Must set --linestyles_blend with line exemplars ' +
'if requesting --set_linestyle_matching.')
line_color = None
if args.randomize_line_color:
line_color = color_util.get_random_color(
prob_dark=0.8,
bounds=color_util.parse_hsv_bounds(args.line_hue_range,
args.line_sat_range,
args.line_value_range))
linestyle_name = render_util.set_linestyle(
args.linestyles_blend, args.set_linestyle_matching,
color=line_color)
if info_file:
info_file.write('LINESTYLE %s\n' % io_util.strip_blender_name(linestyle_name))
if len(args.set_materials_matching) > 0:
if len(args.materials_blend) == 0:
raise RuntimeError(
'Error: Must set --materials_blend with material ' +
'exemplars if requesting --set_materials_matching.')
mat_color_randomizer = None
if args.randomize_material_color:
if args.material_color_choices:
mat_color_randomizer = color_util.make_color_getter(
args.material_color_choices)
else:
mat_color_randomizer = color_util.make_random_color_getter()
material_name = render_util.set_materials(
args.materials_blend, args.set_materials_matching,
color_randomizer=mat_color_randomizer)
if info_file:
info_file.write('MATSTYLE %s\n' % io_util.strip_blender_name(material_name))
# Handle rendering settings ------------------------------------------------
if args.use_cycles and args.use_blender_render:
raise RuntimeError('Can specify only one of --use_cycles and --use_blender_render')
if args.use_cycles or args.use_blender_render:
nsamples = (args.quality_samples if args.quality_samples > 0 else None)
render_util.set_render_settings(args.use_cycles, nsamples=nsamples,
enable_gamma=args.enable_gamma_correction)
if args.width > 0 and args.height > 0:
render_util.set_width_height(args.width, args.height)
if args.world_normals_output_dir or args.camera_normals_output_dir:
if args.world_normals_output_dir and args.camera_normals_output_dir:
raise RuntimeError('Only one type of normals can be output at once.')
if args.world_normals_output_dir and not args.use_cycles:
raise RuntimeError('World normals can only be output with --use_cycles.')
elif args.camera_normals_output_dir and not args.use_blender_render:
raise RuntimeError('Camera space normals can only be output with --use_blender_render.')
render_util.init_normals_render_nodes(
(args.world_normals_output_dir or args.camera_normals_output_dir),
use_cycles=args.use_cycles)
# Handle saving -------------------------------------------------------
if len(args.output_blend) > 0:
bpy.ops.file.pack_all()
args.output_blend = args.output_blend.replace('<M>', io_util.strip_blender_name(material_name))
args.output_blend = args.output_blend.replace('<L>', io_util.strip_blender_name(linestyle_name))
print('Saving blend to %s' % args.output_blend)
geo_util.save_blend(args.output_blend)
if args.rendered_frames != 0:
if args.render_metadata_exr and not args.use_cycles:
raise RuntimeError('Must set --use_cycles=True to render out flow with ' +
'--render_metadata_exr')
print('Rendering frames')
render_util.render_animation(
args.frame_output_prefix, args.rendered_frames,
start_frame_offset=args.start_frame,
render_exr=args.render_metadata_exr,
skip_existing=args.skip_existing_frames)
except Exception as e:
tb = traceback.format_exc()
LOG.critical(tb)
LOG.critical('Script failed')
raise e
LOG.critical('Script completed')
| 49.194064 | 108 | 0.58616 |
90d3d04333f570bee6151ef38e0d8057f563ad15 | 5,004 | py | Python | atlas/foundations_contrib/src/test/archiving/test_artifact_downloader.py | DeepLearnI/atlas | 8aca652d7e647b4e88530b93e265b536de7055ed | [
"Apache-2.0"
] | 296 | 2020-03-16T19:55:00.000Z | 2022-01-10T19:46:05.000Z | atlas/foundations_contrib/src/test/archiving/test_artifact_downloader.py | DeepLearnI/atlas | 8aca652d7e647b4e88530b93e265b536de7055ed | [
"Apache-2.0"
] | 57 | 2020-03-17T11:15:57.000Z | 2021-07-10T14:42:27.000Z | atlas/foundations_contrib/src/test/archiving/test_artifact_downloader.py | DeepLearnI/atlas | 8aca652d7e647b4e88530b93e265b536de7055ed | [
"Apache-2.0"
] | 38 | 2020-03-17T21:06:05.000Z | 2022-02-08T03:19:34.000Z |
from foundations_spec import *
from unittest.mock import call | 42.769231 | 146 | 0.689249 |
90d3d4041471f93cb0f82d38f927e393e1e21818 | 34,846 | py | Python | lib/python3.5/functional/test/test_functional.py | mklan/NX-Rom-Market | 33613d2177b63df9e0568038ffdf1dd91ad334d8 | [
"MIT"
] | 21 | 2021-01-10T16:44:55.000Z | 2022-03-03T13:15:07.000Z | lib/python3.5/functional/test/test_functional.py | mklan/NX-Rom-Market | 33613d2177b63df9e0568038ffdf1dd91ad334d8 | [
"MIT"
] | 3 | 2021-01-10T15:38:50.000Z | 2021-04-29T09:45:47.000Z | lib/python3.5/functional/test/test_functional.py | mklan/NX-Rom-Market | 33613d2177b63df9e0568038ffdf1dd91ad334d8 | [
"MIT"
] | 1 | 2021-01-10T15:07:38.000Z | 2021-01-10T15:07:38.000Z | # pylint: skip-file
import unittest
import array
from collections import namedtuple
from itertools import product
from functional.pipeline import Sequence, is_iterable, _wrap, extend
from functional.transformations import name
from functional import seq, pseq
Data = namedtuple("Data", "x y")
| 34.229862 | 88 | 0.544711 |
90d42edecd2171b0bdded78b6fa56be73310676a | 9,412 | py | Python | tests/test_git_commit_one_file.py | mubashshirjamal/code | d9c7adf7efed8e9c1ab3ff8cdeb94e7eb1a45382 | [
"BSD-3-Clause"
] | 1,582 | 2015-01-05T02:41:44.000Z | 2022-03-30T20:03:22.000Z | tests/test_git_commit_one_file.py | mubashshirjamal/code | d9c7adf7efed8e9c1ab3ff8cdeb94e7eb1a45382 | [
"BSD-3-Clause"
] | 66 | 2015-01-23T07:58:04.000Z | 2021-11-12T02:23:27.000Z | tests/test_git_commit_one_file.py | mubashshirjamal/code | d9c7adf7efed8e9c1ab3ff8cdeb94e7eb1a45382 | [
"BSD-3-Clause"
] | 347 | 2015-01-05T07:47:07.000Z | 2021-09-20T21:22:32.000Z | # -*- coding: utf-8 -*-
import os
from vilya.models.project import CodeDoubanProject
from vilya.models import git
from tests.base import TestCase
from tests.utils import mkdtemp
from vilya.libs import gyt
from vilya.libs.permdir import get_repo_root
| 42.977169 | 95 | 0.595516 |
90d5aacb48382d69453f984a43223f2aa79a4272 | 11,704 | py | Python | py/desitarget/train/data_preparation/PredCountsFromQLF_ClassModule.py | echaussidon/desitarget | 1206380dac5155b9e7bf238c7cb187bc797d78a3 | [
"BSD-3-Clause"
] | 13 | 2016-02-02T00:26:21.000Z | 2022-01-14T07:31:59.000Z | py/desitarget/train/data_preparation/PredCountsFromQLF_ClassModule.py | echaussidon/desitarget | 1206380dac5155b9e7bf238c7cb187bc797d78a3 | [
"BSD-3-Clause"
] | 674 | 2015-09-15T15:02:06.000Z | 2022-02-23T18:39:02.000Z | py/desitarget/train/data_preparation/PredCountsFromQLF_ClassModule.py | echaussidon/desitarget | 1206380dac5155b9e7bf238c7cb187bc797d78a3 | [
"BSD-3-Clause"
] | 29 | 2015-06-09T13:51:48.000Z | 2021-06-05T06:03:18.000Z | #!/usr/bin/env python3
# -*- coding:utf-8 -*-
import re
import numpy as np
from scipy.interpolate import interp2d
from scipy.interpolate import interp1d
| 38.5 | 114 | 0.605092 |
90d6fa60d16e379bff07b720da304a90340377ce | 1,135 | py | Python | safe_control_gym/controllers/__init__.py | gokhanalcan/safe-control-gym | e9086e102663a60a66f2cc9c8cd7610888744056 | [
"MIT"
] | null | null | null | safe_control_gym/controllers/__init__.py | gokhanalcan/safe-control-gym | e9086e102663a60a66f2cc9c8cd7610888744056 | [
"MIT"
] | null | null | null | safe_control_gym/controllers/__init__.py | gokhanalcan/safe-control-gym | e9086e102663a60a66f2cc9c8cd7610888744056 | [
"MIT"
] | null | null | null | """Register controllers.
"""
from safe_control_gym.utils.registration import register
register(id="mpc",
entry_point="safe_control_gym.controllers.mpc.mpc:MPC",
config_entry_point="safe_control_gym.controllers.mpc:mpc.yaml")
register(id="linear_mpc",
entry_point="safe_control_gym.controllers.mpc.linear_mpc:LinearMPC",
config_entry_point="safe_control_gym.controllers.mpc:linear_mpc.yaml")
register(id="gp_mpc",
entry_point="safe_control_gym.controllers.mpc.gp_mpc:GPMPC",
config_entry_point="safe_control_gym.controllers.mpc:gp_mpc.yaml")
register(id="mpsc",
entry_point="safe_control_gym.controllers.mpsc.mpsc:MPSC",
config_entry_point="safe_control_gym.controllers.mpsc:mpsc.yaml")
register(id="ppo",
entry_point="safe_control_gym.controllers.ppo.ppo:PPO",
config_entry_point="safe_control_gym.controllers.ppo:ppo.yaml")
register(id="safe_explorer_ppo",
entry_point="safe_control_gym.controllers.safe_explorer.safe_ppo:SafeExplorerPPO",
config_entry_point="safe_control_gym.controllers.safe_explorer:safe_ppo.yaml")
| 39.137931 | 91 | 0.757709 |
90d7c582c62cd57d3012bffede812a95aec2d89a | 378 | py | Python | tests/plot_profile/test_utils.py | mizeller/plot_profile | 832f1d47a182d65747f18cf1ac90afc9a3b821c1 | [
"MIT"
] | null | null | null | tests/plot_profile/test_utils.py | mizeller/plot_profile | 832f1d47a182d65747f18cf1ac90afc9a3b821c1 | [
"MIT"
] | 3 | 2021-11-10T15:37:27.000Z | 2022-03-28T13:29:42.000Z | tests/plot_profile/test_utils.py | MeteoSwiss-APN/plot_profile | 832f1d47a182d65747f18cf1ac90afc9a3b821c1 | [
"MIT"
] | null | null | null | """Test module ``plot_profile/utils.py``."""
# Standard library
import logging
# First-party
from plot_profile.utils import count_to_log_level
| 27 | 51 | 0.759259 |
90d92df312eb9bb3199567f0c02a6aff1611d818 | 1,116 | py | Python | spider/utilities/util_config.py | YunofHD/PSpider | c1c9e1e7c61365e140a55541cc558d7c1b9e50f2 | [
"BSD-2-Clause"
] | null | null | null | spider/utilities/util_config.py | YunofHD/PSpider | c1c9e1e7c61365e140a55541cc558d7c1b9e50f2 | [
"BSD-2-Clause"
] | null | null | null | spider/utilities/util_config.py | YunofHD/PSpider | c1c9e1e7c61365e140a55541cc558d7c1b9e50f2 | [
"BSD-2-Clause"
] | null | null | null | # _*_ coding: utf-8 _*_
"""
util_config.py by xianhu
"""
__all__ = [
"CONFIG_FETCH_MESSAGE",
"CONFIG_PARSE_MESSAGE",
"CONFIG_MESSAGE_PATTERN",
"CONFIG_URL_LEGAL_PATTERN",
"CONFIG_URL_ILLEGAL_PATTERN",
]
# define the structure of message, used in Fetcher and Parser
CONFIG_FETCH_MESSAGE = "priority=%s, keys=%s, deep=%s, repeat=%s, url=%s"
CONFIG_PARSE_MESSAGE = "priority=%s, keys=%s, deep=%s, url=%s"
CONFIG_MESSAGE_PATTERN = r"priority=(?P<priority>\d+),\s*keys=(?P<keys>.+?),\s*deep=(?P<deep>\d+),\s*(repeat=(?P<repeat>\d+),\s*)?url=(?P<url>.+)$"
# define url_legal_pattern and url_illegal_pattern
CONFIG_URL_LEGAL_PATTERN = r"^https?:[^\s]+?\.[^\s]+?"
CONFIG_URL_ILLEGAL_PATTERN = r"\.(cab|iso|zip|rar|tar|gz|bz2|7z|tgz|apk|exe|app|pkg|bmg|rpm|deb|dmg|jar|jad|bin|msi|" \
"pdf|doc|docx|xls|xlsx|ppt|pptx|txt|md|odf|odt|rtf|py|java|c|cc|js|css|log|csv|tsv|" \
"jpg|jpeg|png|gif|bmp|xpm|xbm|ico|drm|dxf|eps|psd|pcd|pcx|tif|tiff|" \
"mp3|mp4|swf|mkv|avi|flv|mov|wmv|wma|3gp|mpg|mpeg|mp4a|wav|ogg|rmvb)$"
| 42.923077 | 147 | 0.634409 |
90d9a5726836680355d0f136ca02e9d3ff263f57 | 1,087 | py | Python | modcma/__main__.py | IOHprofiler/ModularCMAES | 5ae3310d68b7e2bc37ef10de07945e89c16d6654 | [
"MIT"
] | 2 | 2021-04-08T06:16:21.000Z | 2022-01-25T18:18:51.000Z | modcma/__main__.py | IOHprofiler/ModularCMAES | 5ae3310d68b7e2bc37ef10de07945e89c16d6654 | [
"MIT"
] | 3 | 2020-11-16T15:24:53.000Z | 2021-11-10T10:27:50.000Z | modcma/__main__.py | IOHprofiler/ModularCMAES | 5ae3310d68b7e2bc37ef10de07945e89c16d6654 | [
"MIT"
] | 2 | 2021-01-13T15:36:46.000Z | 2021-04-08T06:24:25.000Z | """Allows the user to call the library as a cli-module."""
from argparse import ArgumentParser
from .modularcmaes import evaluate_bbob
parser = ArgumentParser(description="Run single function CMAES")
parser.add_argument(
"-f", "--fid", type=int, help="bbob function id", required=False, default=5
)
parser.add_argument(
"-d", "--dim", type=int, help="dimension", required=False, default=5
)
parser.add_argument(
"-i",
"--iterations",
type=int,
help="number of iterations per agent",
required=False,
default=50,
)
parser.add_argument(
"-l", "--logging", required=False, action="store_true", default=False
)
parser.add_argument("-L", "--label", type=str, required=False, default="")
parser.add_argument("-s", "--seed", type=int, required=False, default=42)
parser.add_argument("-p", "--data_folder", type=str, required=False)
parser.add_argument("-a", "--arguments", nargs="+", required=False)
args = vars(parser.parse_args())
for arg in args.pop("arguments") or []:
# pylint: disable=exec-used
exec(arg, None, args)
evaluate_bbob(**args)
| 29.378378 | 79 | 0.689052 |
90da83a21ced32c3bda1f81dfc548d05d09d8656 | 7,303 | py | Python | src/rto/optimization/solvers/de.py | vicrsp/rto | e9e0b87533ae6bc5e6ad228bb26172384802f9b7 | [
"MIT"
] | null | null | null | src/rto/optimization/solvers/de.py | vicrsp/rto | e9e0b87533ae6bc5e6ad228bb26172384802f9b7 | [
"MIT"
] | 17 | 2020-10-24T18:03:54.000Z | 2020-11-11T22:25:16.000Z | src/rto/optimization/solvers/de.py | vicrsp/rto | e9e0b87533ae6bc5e6ad228bb26172384802f9b7 | [
"MIT"
] | null | null | null | import numpy as np
| 37.451282 | 121 | 0.546899 |
90db629f11aa0043b43ed88cb2e40c8fc351233e | 2,122 | py | Python | openmdao/matrices/csr_matrix.py | onodip/OpenMDAO | 96a99806fb3a547b881d2ad3da2733bca9978567 | [
"Apache-2.0"
] | null | null | null | openmdao/matrices/csr_matrix.py | onodip/OpenMDAO | 96a99806fb3a547b881d2ad3da2733bca9978567 | [
"Apache-2.0"
] | null | null | null | openmdao/matrices/csr_matrix.py | onodip/OpenMDAO | 96a99806fb3a547b881d2ad3da2733bca9978567 | [
"Apache-2.0"
] | null | null | null | """Define the CSRmatrix class."""
import numpy as np
from scipy.sparse import coo_matrix
from six import iteritems
from openmdao.matrices.coo_matrix import COOMatrix
| 34.786885 | 83 | 0.60132 |
90decc71935f62f946a40921c43b3f8580f075de | 2,398 | py | Python | setup.py | medchemfi/sdfconf | 81b1ed383c1d4b3e633fdc555e4027091226b025 | [
"MIT"
] | 6 | 2021-12-27T07:55:16.000Z | 2022-01-26T04:36:53.000Z | setup.py | medchemfi/sdfconf | 81b1ed383c1d4b3e633fdc555e4027091226b025 | [
"MIT"
] | null | null | null | setup.py | medchemfi/sdfconf | 81b1ed383c1d4b3e633fdc555e4027091226b025 | [
"MIT"
] | 3 | 2022-01-06T13:54:48.000Z | 2022-01-26T04:36:54.000Z | # -*- coding: utf-8 -*-
from setuptools import setup, find_packages
import re
import os
with open("src/sdfconf/_version.py", "rt") as vf:
VSRE = r"^__version__ = ['\"]([^'\"]*)['\"]"
for line in vf:
mo = re.search(VSRE, line, re.M)
if mo:
verstr = mo.group(1)
break
if not mo:
raise RuntimeError("Unable to find version string in %s." % (VERSIONFILE,))
setup(name = 'sdfconf',
version = verstr,
description = ("Diverse manipulation and analysis tool for .sdf files."),
long_description = read('README.rst'),
install_requires = ['numpy>=1.7.1','matplotlib>=1.4.2'],
author = 'Sakari Ltti',
author_email = 'sakari.latti@jyu.fi',
maintainer = 'Sakari Ltti',
maintainer_email = 'sakari.latti@jyu.fi',
packages = ['sdfconf'],
package_dir = {'sdfconf':'src/sdfconf'},
keywords = 'sdf mol2 conformation analyze histogram',
url = 'http://users.jyu.fi/~pentikai/',
license = 'MIT/expat',
entry_points =
{'console_scripts': ['sdfconf = sdfconf.runner:main'],
'setuptools.installation': ['eggsecutable = sdfconf.runner:main',],
},
classifiers= ['Development Status :: 4 - Beta',
'Environment :: Console',
'Intended Audience :: Science/Research',
'License :: OSI Approved :: MIT License',
'Natural Language :: English',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2.7',
#'Programming Language :: Python :: 3',
'Topic :: Scientific/Engineering :: Bio-Informatics',
'Topic :: Scientific/Engineering :: Chemistry' ,
'Topic :: Software Development :: Libraries',
],
##FIXME
#'''
#package_data = {
# 'sample':['sample_data.sdf']
# },
#'''
) | 42.821429 | 97 | 0.470809 |
90df7ee581fbe2b55cfc18df37ae0baf763d0554 | 2,453 | py | Python | stacks/cognito_stack.py | adamdubey/devops-serverless-app-aws-cdk | 3e1ba3f9905c4f8469275e51448fb908b64a736a | [
"MIT"
] | null | null | null | stacks/cognito_stack.py | adamdubey/devops-serverless-app-aws-cdk | 3e1ba3f9905c4f8469275e51448fb908b64a736a | [
"MIT"
] | null | null | null | stacks/cognito_stack.py | adamdubey/devops-serverless-app-aws-cdk | 3e1ba3f9905c4f8469275e51448fb908b64a736a | [
"MIT"
] | null | null | null | from aws_cdk import (
aws_cognito as cognito,
aws_iam as iam,
aws_ssm as ssm,
core
)
| 34.069444 | 77 | 0.538932 |
90dfa79ebe79555eaeba7589bba91bb7586ad592 | 3,570 | py | Python | cloudrail/knowledge/rules/aws/context_aware/s3_bucket_policy_vpc_endpoint_rule.py | my-devops-info/cloudrail-knowledge | b7c1bbd6fe1faeb79c105a01c0debbe24d031a0e | [
"MIT"
] | null | null | null | cloudrail/knowledge/rules/aws/context_aware/s3_bucket_policy_vpc_endpoint_rule.py | my-devops-info/cloudrail-knowledge | b7c1bbd6fe1faeb79c105a01c0debbe24d031a0e | [
"MIT"
] | null | null | null | cloudrail/knowledge/rules/aws/context_aware/s3_bucket_policy_vpc_endpoint_rule.py | my-devops-info/cloudrail-knowledge | b7c1bbd6fe1faeb79c105a01c0debbe24d031a0e | [
"MIT"
] | null | null | null | from typing import Dict, List
from cloudrail.knowledge.context.aws.iam.policy import S3Policy
from cloudrail.knowledge.context.aws.iam.policy_statement import StatementEffect
from cloudrail.knowledge.context.aws.s3.s3_bucket import S3Bucket
from cloudrail.knowledge.context.aws.ec2.vpc import Vpc
from cloudrail.knowledge.context.aws.ec2.vpc_endpoint import VpcEndpoint
from cloudrail.knowledge.context.aws.aws_environment_context import AwsEnvironmentContext
from cloudrail.knowledge.rules.aws.aws_base_rule import AwsBaseRule
from cloudrail.knowledge.rules.base_rule import Issue
from cloudrail.knowledge.rules.rule_parameters.base_paramerter import ParameterType
| 51.73913 | 123 | 0.671989 |
90e09e247242d171d4b02ca536f204cbdf79e682 | 2,034 | py | Python | src/gluonts/core/serde/_json.py | PascalIversen/gluon-ts | cb6b5145b94f942f7803383a70813363f6953509 | [
"Apache-2.0"
] | 1 | 2020-11-30T18:05:24.000Z | 2020-11-30T18:05:24.000Z | src/gluonts/core/serde/_json.py | PascalIversen/gluon-ts | cb6b5145b94f942f7803383a70813363f6953509 | [
"Apache-2.0"
] | null | null | null | src/gluonts/core/serde/_json.py | PascalIversen/gluon-ts | cb6b5145b94f942f7803383a70813363f6953509 | [
"Apache-2.0"
] | null | null | null | # Copyright 2018 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License").
# You may not use this file except in compliance with the License.
# A copy of the License is located at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# or in the "license" file accompanying this file. This file is distributed
# on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
# express or implied. See the License for the specific language governing
# permissions and limitations under the License.
"""
JSON Serialization/Deserialization
----------------------------------
The canonical way to do this is to define and `default` and `object_hook`
parameters to the json.dumps and json.loads methods. Unfortunately, due
to https://bugs.python.org/issue12657 this is not possible at the moment,
as support for custom NamedTuple serialization is broken.
To circumvent the issue, we pass the input value through custom encode
and decode functions that map nested object terms to JSON-serializable
data structures with explicit recursion.
"""
import json
from typing import Any, Optional
from ._base import encode, decode
def dump_json(o: Any, indent: Optional[int] = None) -> str:
"""
Serializes an object to a JSON string.
Parameters
----------
o
The object to serialize.
indent
An optional number of spaced to use as an indent.
Returns
-------
str
A string representing the object in JSON format.
See Also
--------
load_json
Inverse function.
"""
return json.dumps(encode(o), indent=indent, sort_keys=True)
def load_json(s: str) -> Any:
"""
Deserializes an object from a JSON string.
Parameters
----------
s
A string representing the object in JSON format.
Returns
-------
Any
The deserialized object.
See Also
--------
dump_json
Inverse function.
"""
return decode(json.loads(s))
| 26.076923 | 75 | 0.67355 |
90e1d06ca5448614066727df6e46a00f224d0fe4 | 36,937 | py | Python | run.py | Aisbergg/docker-image-arch-aur-makepkg | 26b7fed14838f52560c8fd4ba222c1c3d41ce00d | [
"MIT"
] | 7 | 2017-07-24T15:19:29.000Z | 2018-11-18T21:59:13.000Z | run.py | Aisbergg/docker-image-arch-aur-makepkg | 26b7fed14838f52560c8fd4ba222c1c3d41ce00d | [
"MIT"
] | 1 | 2018-06-12T10:30:23.000Z | 2018-06-12T15:54:01.000Z | run.py | Aisbergg/docker-image-arch-aur-makepkg | 26b7fed14838f52560c8fd4ba222c1c3d41ce00d | [
"MIT"
] | null | null | null | #!/usr/bin/python3
import argparse
import os
import sys
import re
import shutil
import tempfile
import pwd
import grp
import tarfile
import time
import glob
import urllib.request
from subprocess import Popen, PIPE
import aur
import pacman
local_source_dir = '/makepkg/local_src'
build_dir = os.path.abspath('/makepkg/build')
pacman_cache_dir = '/var/cache/pacman/pkg'
accepted_architectures = ['any', 'x86_64', 'i686']
packages_in_cache = None
packages_in_offical_repositories = None
def printInfo(message):
"""Print a colorful info message.
Args:
message (str): Message to be printed
"""
print(ConsoleColors.blue + message + ConsoleColors.reset)
def printSuccessfull(message):
"""Print a colorful successfull message.
Args:
message (str): Message to be printed
"""
print(ConsoleColors.green + message + ConsoleColors.reset)
def printWarning(message):
"""Print a colorful warning message.
Args:
message (str): Message to be printed
"""
print(ConsoleColors.yellow + message + ConsoleColors.reset)
def printError(message):
"""Print a colorful error message.
Args:
message (str): Message to be printed
"""
print(ConsoleColors.red + message + ConsoleColors.reset)
def change_user(uid):
"""Temporarily change the UID and GID for code execution."""
return set_uid_and_guid
def run_command(command, uid=None, print_output=True):
"""Run a command in a subprocess.
Args:
command (string): Command to run
uid (int): UID of the user to run with
print_output (bool): True if the output should be printed to stdout and stderr
Returns:
(int, list, list). Return code of the subprocess, sdtout and stderr
"""
if uid:
process = Popen(command, stdout=PIPE, stderr=PIPE, universal_newlines=True, preexec_fn=change_user(uid))
else:
process = Popen(command, stdout=PIPE, stderr=PIPE, universal_newlines=True)
if print_output:
err = []
out = []
while True:
tmp = process.stdout.readline()
if tmp:
tmp = tmp.rstrip('\n ')
if tmp != '':
out.append(tmp)
print(tmp)
if process.poll() is not None:
break
time.sleep(.05)
for line in process.stdout.readlines():
tmp = line.rstrip('\n ')
out.append(tmp)
print(tmp)
rc = process.poll()
if rc != 0:
for line in process.stderr.readlines():
tmp = line.rstrip('\n ')
printError(tmp)
err.append(tmp)
return (rc, out, err)
else:
out, err = process.communicate()
rc = process.returncode
return (rc, out.splitlines(), err.splitlines())
def get_package_recursive(pkg_name,
explicit_build,
pkg_dict,
locally_available_package_sources,
remove_dowloaded_source,
is_make_dependency):
"""Get a package and all their dependencies.
Args:
pkg_name (str): Name of the package
explicit_build (bool): True if package source is given by the user
pkg_dict (dict): Store for package information
locally_available_package_sources (list): List of all locally available package sources
remove_dowloaded_source (bool): If True remove the source downloaded by 'makepkg' before build. If False
the sources will be kept, under the condition that the source is of the same
version of the package to be build
is_make_dependency (bool): True if package shall be installed as a make dependency
"""
# check if package is already in pkg_dict
if pkg_name in pkg_dict:
return
# check if package is in official repo
for pcm_info in packages_in_offical_repositories:
if pcm_info['id'] == pkg_name:
pcm_pkg = PacmanPackage(pkg_name)
pcm_pkg.is_make_dependency = is_make_dependency
pkg_dict[pkg_name] = pcm_pkg
return
# check if package source is locally available
if pkg_name in locally_available_package_sources:
pkg_path = os.path.join(local_source_dir, pkg_name)
lcl_pkg = PackageSource(pkg_name, remove_dowloaded_source, pkg_path)
if lcl_pkg.name in pkg_dict:
return
lcl_pkg.explicit_build = explicit_build
lcl_pkg.explicit_build = is_make_dependency
pkg_dict[pkg_name] = lcl_pkg
# if split package the name can defer
pkg_dict[lcl_pkg.name] = lcl_pkg
if not lcl_pkg.error_info:
for dependency in lcl_pkg.dependencies:
get_package_recursive(dependency,
False,
pkg_dict,
locally_available_package_sources,
remove_dowloaded_source,
True if is_make_dependency else False)
for make_dependency in lcl_pkg.make_dependencies:
get_package_recursive(make_dependency,
False,
pkg_dict,
locally_available_package_sources,
remove_dowloaded_source,
True)
# check for the package in the AUR
else:
aur_pkg = PackageSource(pkg_name, remove_dowloaded_source, None)
if aur_pkg.name in pkg_dict:
return
aur_pkg.explicit_build = explicit_build
pkg_dict[pkg_name] = aur_pkg
# if split package the name can defer
pkg_dict[aur_pkg.name] = aur_pkg
if not aur_pkg.error_info:
for dependency in aur_pkg.dependencies:
get_package_recursive(dependency,
False,
pkg_dict,
locally_available_package_sources,
remove_dowloaded_source,
True if is_make_dependency else False)
for make_dependency in aur_pkg.make_dependencies:
get_package_recursive(make_dependency,
False,
pkg_dict,
locally_available_package_sources,
remove_dowloaded_source,
True)
def build_package_recursive(pkg_name,
pkg_dict,
rebuild,
install_all_dependencies,
uid,
gid):
"""Build a package and all their dependencies.
Args:
pkg_name (str): Name of the package
pkg_dict (dict): Store for package information
rebuild (int): Rebuild behaviour:
0: Build only new versions of packages (default)
1: Rebuild all explicit listed packages
2: Rebuild all explicit listed packages and their dependencies
uid (int): UID of the build user
gid (int): GID of the build user
"""
pkg = pkg_dict[pkg_name]
# break if a error occurred
if pkg.error_info:
return
# break if the package has already been processed
if type(pkg) is PackageSource and pkg.build_status != 0:
return
if type(pkg) is PacmanPackage:
# break if the package has already been processed
if pkg.installation_status < 0 or pkg.installation_status == 3:
return
# install pacman package if it is a make dependency
if (pkg.is_make_dependency or install_all_dependencies):
pkg.install()
return
dependency_changed = False
for dependency in pkg.get_all_dependencies():
pkg_dependency = pkg_dict[dependency]
build_package_recursive(dependency, pkg_dict, rebuild, install_all_dependencies, uid, gid)
if pkg_dependency.error_info:
pkg.build_status = 4
return
else:
if type(pkg_dependency) is PackageSource and \
pkg_dependency.build_status == 1:
dependency_changed = True
pkg.get_installation_status()
if dependency_changed:
if pkg.makepkg(uid, gid):
pkg.build_status = 1
else:
pkg.build_status = 3
else:
# rebuild only if new version is available
if rebuild == 0:
if pkg.cache_available < 2:
if pkg.makepkg(uid, gid):
pkg.build_status = 1
else:
pkg.build_status = 3
else:
pkg.build_status = 2
# rebuild if explicit or a new version is available
elif rebuild == 1:
if pkg.cache_available < 2 or pkg.explicit_build:
if pkg.makepkg(uid, gid):
pkg.build_status = 1
else:
pkg.build_status = 3
else:
pkg.build_status = 2
# rebuild all
elif rebuild == 2:
if pkg.makepkg(uid, gid):
pkg.build_status = 1
else:
pkg.build_status = 3
if install_all_dependencies:
pkg.install()
return
def format_log(pkg, msg, prefix=''):
"""Format a build log for a given packge.
Args:
pkg (PackageBase): The package
msg (str): Message for the package
prefix (str): Prefix added for message in multiple lines
Returns:
str. The formatted build log
"""
msg_lines = msg.splitlines()
if len(msg_lines) > 1:
for i in range(1, len(msg_lines)):
msg_lines[i] = prefix + ' ' + msg_lines[i]
msg = '\n'.join(msg_lines)
if pkg.version:
return "{0} {1}: {2}".format(pkg.name, pkg.version, msg)
return "{0}: {1}".format(pkg.name, msg)
def print_build_log_recursive(pkg_names, pkg_dict, prefix='', is_root=False):
"""Recursivly prints a build log for a given package.
Args:
pkg_names (PackageBase): The package
pkg_dict (dict): Store for package information
prefix (str): Prefix for the message
is_root (bool): True if first recursion
Returns:
(bool, list). Tuple consting of the build status and the log messages as a list
"""
success = True
log = []
log_prefix = prefix + ' '
intermediate_prefix = prefix + '| '
for pos, anchor, pkg_name in enumerate_package_names(pkg_names):
pkg = pkg_dict[pkg_name]
log_dep = []
if is_root:
log_prefix = ""
intermediate_prefix = ""
elif anchor == 1:
log_prefix = prefix + ' '
intermediate_prefix = prefix + ' '
if type(pkg) == PacmanPackage:
if pkg.installation_status < 0:
success = False
log.append(log_prefix + format_log(
pkg, "Failed to install: " + str(pkg.error_info), intermediate_prefix))
elif pkg.installation_status == 0:
log.append(log_prefix + format_log(pkg, "Not installed"))
elif pkg.installation_status == 1:
log.append(log_prefix + format_log(pkg, "Skipped install"))
elif pkg.installation_status == 3:
log.append(log_prefix + format_log(pkg, "Successfully installed"))
else:
deps = pkg.get_all_dependencies()
if len(deps) > 0:
success, log_dep = print_build_log_recursive(
deps,
pkg_dict,
intermediate_prefix)
if not success:
log.append(log_prefix + format_log(
pkg, "Dependency Failed: " + str(pkg.error_info), intermediate_prefix))
elif pkg.error_info:
success = False
log.append(log_prefix + format_log(
pkg, "Failed: " + str(pkg.error_info), intermediate_prefix))
else:
if pkg.build_status == 1:
log.append(log_prefix + format_log(
pkg, "Successfully build"))
elif pkg.build_status == 2:
log.append(log_prefix + format_log(
pkg, "Skipped"))
elif pkg.build_status == 3:
log.append(log_prefix + format_log(pkg, "Failed"))
success = False
elif pkg.build_status == 4:
log.append(log_prefix + format_log(pkg, "Dependency Failed"))
success = False
log = log + log_dep
return success, log
def print_build_log(pkg_name, pkg_dict):
"""Print a build log for a given package.
Args:
pkg_names (PackageBase): The package
pkg_dict (dict): Store for package information
"""
success, log = print_build_log_recursive(
[pkg_name], pkg_dict, '', True)
for line in log:
if success:
printSuccessfull(line)
else:
printError(line)
def main(argv):
"""Run the main logic.
Args:
argv (list): Command line arguments
"""
parser = argparse.ArgumentParser(
prog='aur-makepkg',
description='Build Pacman packages with makepkg from local source or the AUR',
epilog=''
)
parser.add_argument('-g', '--gid', dest='gid', type=int, default=1000,
help="GID of the build user")
parser.add_argument('-i', '--install-all-dependencies', action='store_true',
dest='install_all_dependencies', default=False,
help="Install all dependencies, not only 'make dependencies'")
parser.add_argument('-k', '--keyrings', dest='keyrings', default=None,
help="Pacman keyrings initialized prior building (comma seperated list)")
parser.add_argument('-p', '--pacman-update', action='store_true',
dest='pacman_update', default=False,
help="Update all installed pacman packages before build")
parser.add_argument('-r', '--rebuild', dest='rebuild', type=int, default=0,
help="""Rebuild behaviour:
0: Build only new versions of packages (default)
1: Rebuild all explicit listed packages
2: Rebuild all explicit listed packages and their dependencies""")
parser.add_argument('--remove-downloaded-source',
dest='remove_dowloaded_source',
action='store_true', default=False,
help="""Remove the source downloaded by 'makepkg' before build. If not
the sources will be kept, under the condition that the source is of the same
version of the package to be build. (Note: Sources of packages build from a Git repository
will always be removed.)""")
parser.add_argument('-u', '--uid', dest='uid', type=int, default=1000,
help="UID of the build user")
parser.add_argument('build_package_names', nargs='+',
help="Name fo packages to be build from local source or the AUR")
args = parser.parse_args(argv)
# create build user and group
try:
grp.getgrgid(args.gid)
except Exception:
os.system("groupadd -g {0} build-user".format(args.gid))
try:
pwd.getpwuid(args.uid)
except Exception:
os.system(
"useradd -p /makepkg/build -m -g {1} -s /bin/bash -u {0} build-user".format(args.uid, args.gid))
# refresh pacman package database
if args.keyrings:
printInfo("Initializing pacman keyring...")
run_command(['pacman-key', '--init'], print_output=False)
rc, out, err = run_command(['pacman-key', '--populate'] + args.keyrings.split(','), print_output=True)
if rc != 0:
raise Exception("Failed to initialize Pacman keyrings: " + '\n'.join(err))
# refresh pacman package database
printInfo("Update pacman package database...")
pacman.refresh()
global packages_in_cache, packages_in_offical_repositories
packages_in_cache = [x for x in os.listdir(pacman_cache_dir) if
os.path.isfile(os.path.join(pacman_cache_dir, x))]
packages_in_offical_repositories = pacman.get_available()
if args.pacman_update:
# upgrade installed pacman packages
printInfo("Upgrading installed pacman packages...")
rc, out, err = run_command(['pacman', '-Su', '--noconfirm', '--force',
'--ignore', 'package-query', '--ignore',
'pacman-mirrorlist', '--cachedir',
pacman_cache_dir], print_output=True)
if rc != 0:
raise Exception("Failed to upgrade Pacman packages: " + '\n'.join(err))
pkg_dict = dict()
build_package_names = [x.lower() for x in args.build_package_names]
# look for local package sources
locally_available_package_sources = []
if os.path.exists(local_source_dir) and \
os.path.isdir(local_source_dir):
for d in os.listdir(local_source_dir):
pkgbuild_file_path = os.path.join(d, "PKGBUILD")
if os.path.exists(pkgbuild_file_path) and \
os.path.isfile(pkgbuild_file_path):
locally_available_package_sources.append(os.path.basename(d))
# get packages and their dependencies
for pkg_name in build_package_names:
printInfo("Collecting information about {0}...".format(pkg_name))
get_package_recursive(pkg_name,
True,
pkg_dict,
locally_available_package_sources,
args.remove_dowloaded_source,
False)
# build packages
if pkg_name in pkg_dict:
build_package_recursive(pkg_name,
pkg_dict,
args.rebuild,
args.install_all_dependencies,
args.uid,
args.gid)
# print build statistics
printInfo("\nBuild Statistics:")
for pkg_name in build_package_names:
if pkg_name in pkg_dict:
print_build_log(pkg_name, pkg_dict)
try:
main(sys.argv[1:])
exit(0)
except Exception as e:
printError(str(e))
exit(1)
| 35.861165 | 166 | 0.570133 |
90e34a12e8b9e4a3553e847fbec09a27e4f92d6a | 12,680 | py | Python | test/test_schemagen.py | hd23408/nist-schemagen | 258e453d6f3bdc763e48e2c32668c932f7ffcd40 | [
"MIT"
] | 1 | 2021-09-01T02:11:03.000Z | 2021-09-01T02:11:03.000Z | test/test_schemagen.py | hd23408/nist-schemagen | 258e453d6f3bdc763e48e2c32668c932f7ffcd40 | [
"MIT"
] | null | null | null | test/test_schemagen.py | hd23408/nist-schemagen | 258e453d6f3bdc763e48e2c32668c932f7ffcd40 | [
"MIT"
] | null | null | null | """Test methods for testing the schemagen package (specifically,
the SchemaGenerator class).
Typical usage example:
python -m unittest
or, to run a single test:
python -m unittest -k test__build_schema
"""
import unittest
import pathlib
import logging
import copy
import os
import pandas as pd
import numpy as np
import schemagen
import filecmp
import string
# Suppress log messages so they don't confuse readers of the test output
logging.basicConfig(level=os.environ.get("LOGLEVEL", "CRITICAL"))
# Sample files for testing
INVALID_INPUT_DATA_FILE = str(pathlib.Path(__file__).parent.
joinpath("files_for_testing/invalid_input_data.csv"))
EMPTY_INPUT_DATA_FILE = str(pathlib.Path(__file__).parent.
joinpath("files_for_testing/empty_input_data.csv"))
VALID_INPUT_DATA_FILE = str(pathlib.Path(__file__).parent.
joinpath("files_for_testing/valid_input_data.csv"))
VALID_SCHEMA_FILE = str(pathlib.Path(__file__).parent.
joinpath("files_for_testing/parameters.json"))
TEST_OUTPUT_DIRECTORY = str(pathlib.Path(__file__).parent.
joinpath("test_output_files"))
VALID_OUTPUT_PARAMETERS_FILE = str(pathlib.Path(__file__).parent.
joinpath("files_for_testing/writing_tests/parameters.json"))
VALID_OUTPUT_DATATYPES_FILE = str(pathlib.Path(__file__).parent.
joinpath("files_for_testing/writing_tests/column_datatypes.json"))
# Test dataframes to convert to a schema. This should contain
# an assortment of the different types that we expect to parse:
# A - float numeric categorical (with missing values)
# B - int32 numeric range
# C - string categorical
#
VALID_TEST_DATAFRAME = pd.DataFrame.from_dict(
{
"A": [1, 2, 3, 4, 5, None, None, None, None, None] * 5,
"B": list(range(1000000, 1000050, 1)),
"C": ["A", "B", "C", "D", "E"] * 10,
"D": list(string.ascii_letters)[0 : 50]
}
)
# This isn't really a dataframe, it's a dict
INVALID_TEST_DATAFRAME = {
"A": ["a", "b", "c", "d", "e", "f", "g"],
"B": list(range(1, 8, 1))
}
# The appropriate schema and column datatypes to create from the test data above
VALID_TEST_SCHEMA = {
"schema": {
"A": {
"dtype": "float", # This gets turned into a float because of the 'None's
"kind": "categorical",
"values": [ 1.0, 2.0, 3.0, 4.0, 5.0 ],
"codes": [ 1, 2, 3, 4, 5 ]
},
"B": {
"dtype": "uint32",
"kind": "numeric",
"min": 1000000,
"max": 1000049,
"bins": 10
},
"C": {
"dtype": "str",
"kind": "categorical",
"values": ["A", "B", "C", "D", "E"],
"codes": [1, 2, 3, 4, 5]
},
"D": {
"dtype": "str",
"kind": "text"
}
}
}
VALID_TEST_COLUMN_DATATYPES = {
"dtype": {
"A": "float",
"B": "uint32",
"C": "str",
"D": "str"
}
}
if __name__ == "__main__":
unittest.main()
| 38.895706 | 146 | 0.711435 |
90e49b3af233dbd74b52999ca2aa64df02b0beff | 368 | py | Python | core/migrations/0008_auto_20190528_1802.py | peterson-dev/code-snippet-app | b5ecb7b8b679c307d361a7ce100d4115f92d99a5 | [
"MIT"
] | 2 | 2019-05-22T21:54:43.000Z | 2019-05-26T22:22:14.000Z | core/migrations/0008_auto_20190528_1802.py | peterson-dev/code-snippet-app | b5ecb7b8b679c307d361a7ce100d4115f92d99a5 | [
"MIT"
] | 14 | 2020-02-12T00:04:05.000Z | 2022-03-11T23:51:10.000Z | core/migrations/0008_auto_20190528_1802.py | peterson-dev/kode-kangaroo | b5ecb7b8b679c307d361a7ce100d4115f92d99a5 | [
"MIT"
] | null | null | null | # Generated by Django 2.2.1 on 2019-05-28 22:02
from django.db import migrations
| 19.368421 | 47 | 0.589674 |
90e4c87211faae293a93093b6b860b2f8d021a50 | 2,740 | py | Python | scripts/Biupdownsample/grad_check.py | dongdong93/a2u_matting | 1d0ad8e630cce50c5b36c40ad384c888d292f9a8 | [
"MIT"
] | 22 | 2021-04-28T03:48:53.000Z | 2022-01-24T09:42:53.000Z | scripts/Biupdownsample/grad_check.py | dongdong93/a2u_matting | 1d0ad8e630cce50c5b36c40ad384c888d292f9a8 | [
"MIT"
] | 1 | 2021-08-08T20:10:18.000Z | 2021-08-23T07:33:38.000Z | scripts/Biupdownsample/grad_check.py | dongdong93/a2u_matting | 1d0ad8e630cce50c5b36c40ad384c888d292f9a8 | [
"MIT"
] | 5 | 2021-09-17T08:02:06.000Z | 2022-01-24T09:43:03.000Z | import os.path as osp
import sys
import subprocess
subprocess.call(['pip', 'install', 'cvbase'])
import cvbase as cvb
import torch
from torch.autograd import gradcheck
sys.path.append(osp.abspath(osp.join(__file__, '../../')))
from biupdownsample import biupsample_naive, BiupsampleNaive
from biupdownsample import bidownsample_naive, BidownsampleNaive
feat = torch.randn(2, 64, 2, 2, requires_grad=True, device='cuda:0').double()
mask = torch.randn(
2, 100, 4, 4, requires_grad=True, device='cuda:0').sigmoid().double()
print('Gradcheck for biupsample naive...')
test = gradcheck(BiupsampleNaive(5, 4, 2), (feat, mask), atol=1e-4, eps=1e-4)
print(test)
feat = torch.randn(
2, 1024, 100, 100, requires_grad=True, device='cuda:0').float()
mask = torch.randn(
2, 25, 200, 200, requires_grad=True, device='cuda:0').sigmoid().float()
loop_num = 500
time_naive_forward = 0
time_naive_backward = 0
bar = cvb.ProgressBar(loop_num)
timer = cvb.Timer()
for i in range(loop_num):
x = biupsample_naive(feat.clone(), mask.clone(), 5, 1, 2)
torch.cuda.synchronize()
time_naive_forward += timer.since_last_check()
x.sum().backward(retain_graph=True)
torch.cuda.synchronize()
time_naive_backward += timer.since_last_check()
bar.update()
forward_speed = (time_naive_forward + 1e-3) * 1e3 / loop_num
backward_speed = (time_naive_backward + 1e-3) * 1e3 / loop_num
print('\nBiupsample naive time forward: '
f'{forward_speed} ms/iter | time backward: {backward_speed} ms/iter')
# ---------------------------------------------------------------
feat = torch.randn(2, 64, 4, 4, requires_grad=True, device='cuda:0').double()
mask = torch.randn(
2, 16, 4, 4, requires_grad=True, device='cuda:0').double()
print('Gradcheck for bidownsample naive...')
test = gradcheck(BidownsampleNaive(4, 1, 1), (feat, mask), atol=1e-4, eps=1e-4)
print(test)
feat = torch.randn(
2, 512, 200, 200, requires_grad=True, device='cuda:0').float()
mask = torch.randn(
2, 100, 100, 100, requires_grad=True, device='cuda:0').sigmoid().float()
loop_num = 500
time_naive_forward = 0
time_naive_backward = 0
bar = cvb.ProgressBar(loop_num)
timer = cvb.Timer()
for i in range(loop_num):
x = bidownsample_naive(feat.clone(), mask.clone(), 10, 1, 2)
torch.cuda.synchronize()
time_naive_forward += timer.since_last_check()
x.sum().backward(retain_graph=True)
torch.cuda.synchronize()
time_naive_backward += timer.since_last_check()
bar.update()
forward_speed = (time_naive_forward + 1e-3) * 1e3 / loop_num
backward_speed = (time_naive_backward + 1e-3) * 1e3 / loop_num
print('\nBidownsample naive time forward: '
f'{forward_speed} ms/iter | time backward: {backward_speed} ms/iter')
| 31.494253 | 79 | 0.691606 |
90e637b1e45a7f0c6d8c0bde7e551fe538035277 | 521 | py | Python | sunshinectf2020/speedrun/exploit_05.py | nhtri2003gmail/ctf-write-ups | 7e969c47027c39b614e10739ae3a953eed17dfa3 | [
"MIT"
] | 101 | 2020-03-09T17:40:47.000Z | 2022-03-31T23:26:55.000Z | sunshinectf2020/speedrun/exploit_05.py | nhtri2003gmail/ctf-write-ups | 7e969c47027c39b614e10739ae3a953eed17dfa3 | [
"MIT"
] | 1 | 2021-11-09T13:39:40.000Z | 2021-11-10T19:15:04.000Z | sunshinectf2020/speedrun/exploit_05.py | datajerk/ctf-write-ups | 1bc4ecc63a59de7d924c7214b1ce467801792da0 | [
"MIT"
] | 31 | 2020-05-27T12:29:50.000Z | 2022-03-31T23:23:32.000Z | #!/usr/bin/env python3
from pwn import *
binary = context.binary = ELF('./chall_05')
if not args.REMOTE:
p = process(binary.path)
else:
p = remote('chal.2020.sunshinectf.org', 30005)
p.sendlineafter('Race, life\'s greatest.\n','foobar')
p.recvuntil('Yes I\'m going to win: ')
_ = p.recvline().strip()
main = int(_,16)
binary.address = main - binary.sym.main
log.info('binary.address: ' + hex(binary.address))
payload = b''
payload += 56 * b'A'
payload += p64(binary.sym.win)
p.sendline(payload)
p.interactive()
| 20.038462 | 53 | 0.677543 |
90e6b33612e9aeabab01c1e1b9a83bc6ae67ea29 | 1,047 | py | Python | coderedcms/wagtail_flexible_forms/edit_handlers.py | mikiec84/coderedcms | d72de2118d777f23d9512dc348691d3d7b46d0e5 | [
"BSD-3-Clause"
] | 9 | 2019-01-03T16:57:27.000Z | 2020-07-08T07:17:35.000Z | coderedcms/wagtail_flexible_forms/edit_handlers.py | mikiec84/coderedcms | d72de2118d777f23d9512dc348691d3d7b46d0e5 | [
"BSD-3-Clause"
] | 1 | 2019-04-30T18:30:15.000Z | 2019-04-30T18:30:15.000Z | coderedcms/wagtail_flexible_forms/edit_handlers.py | mikiec84/coderedcms | d72de2118d777f23d9512dc348691d3d7b46d0e5 | [
"BSD-3-Clause"
] | 4 | 2019-06-04T21:05:02.000Z | 2020-04-20T00:39:52.000Z | from django.template.loader import render_to_string
from django.utils.safestring import mark_safe
from django.utils.translation import ugettext as _
from wagtail.admin.edit_handlers import EditHandler
| 33.774194 | 78 | 0.669532 |
90e6e714df05484ba942b1438ac0306e81a7602d | 2,189 | py | Python | python/elasticache/cache/helper/vpc.py | chejef/aws-cdk-examples-proserve | ea0b72475e2d28192a9088ac8ee8cb6498f6cb39 | [
"MIT-0"
] | 6 | 2021-11-10T21:42:53.000Z | 2022-03-17T13:22:07.000Z | python/elasticache/cache/helper/vpc.py | chejef/aws-cdk-examples-proserve | ea0b72475e2d28192a9088ac8ee8cb6498f6cb39 | [
"MIT-0"
] | 9 | 2021-11-11T14:33:35.000Z | 2022-02-14T15:25:41.000Z | python/elasticache/cache/helper/vpc.py | chejef/aws-cdk-examples-proserve | ea0b72475e2d28192a9088ac8ee8cb6498f6cb39 | [
"MIT-0"
] | 10 | 2021-11-13T17:32:06.000Z | 2022-01-17T18:13:02.000Z | from aws_cdk import (
core as cdk,
aws_elasticache as elasticache,
aws_ec2 as ec2,
)
from aws_cdk.core import Tags
from config import config_util as config
def get_vpc(scope: cdk.Construct) -> ec2.Vpc:
"""
Look up and return the none default vpc.
Args:
scope: the cdk construct.
Returns:
ec2.Vpc: The ec2 VPC object based on the vpc id.
"""
vpc = ec2.Vpc.from_lookup(
scope, "vpc", is_default=False, vpc_id=config.get_vpc_id()
)
return vpc
def get_security_group(scope: cdk.Construct) -> ec2.SecurityGroup:
"""
Create and return the security group for the cluster which allows for any ipv4 and configured port number.
Args:
scope: the cdk construct.
Returns:
ec2.SecurityGroup: The ec2 Security Group object for the cluster.
"""
cluster_name = config.get_cluster_name()
vpc = get_vpc(scope)
security_group = ec2.SecurityGroup(
scope, "ElastiCacheSecurityGroup",
vpc=vpc,
allow_all_outbound=True,
security_group_name=f"elasticache-sg-{cluster_name}",
description=f"Security Group for {cluster_name} ElastiCache Cluster",
)
Tags.of(security_group).add("Name", f"elasticache-sg-{cluster_name}")
for allowed_cidr in config.get_allowed_cidrs():
security_group.add_ingress_rule(
ec2.Peer.ipv4(allowed_cidr),
ec2.Port.tcp(config.get_port_number()),
f"Allows connection to ElastiCache cluster {cluster_name}."
)
return security_group
def get_subnet_group(scope: cdk.Construct) -> elasticache.CfnSubnetGroup:
"""
Create and return the elasticache subnet group.
Args:
scope: the cdk construct.
Returns:
elasticache.CfnSubnetGroup: The subnet group that contains the subnets in vpc.
"""
cluster_name = config.get_cluster_name()
subnet_group = elasticache.CfnSubnetGroup(
scope, "ElastiCacheSubnetGroup",
cache_subnet_group_name=f"{cluster_name}-subnet-group",
description=f"ElastiCache subnet group for {cluster_name}",
subnet_ids=config.get_subnet_ids()
)
return subnet_group
| 28.802632 | 110 | 0.677935 |
90e79b451ed65372d3a6d5c89f828338b8e17cd2 | 5,101 | py | Python | src/pyscaffold/extensions/namespace.py | jayvdb/pyscaffold | c97ab5fd17ace3e1250741434f8728999359c8c2 | [
"MIT"
] | 2 | 2019-08-23T12:59:04.000Z | 2021-04-14T14:45:36.000Z | .eggs/PyScaffold-3.2.3-py3.7.egg/pyscaffold/extensions/namespace.py | nkapchenko/HW | 92f51d1ac4fc009e24350844172a1fb3b111ea27 | [
"MIT"
] | 19 | 2020-03-24T18:12:18.000Z | 2022-03-29T22:27:59.000Z | .eggs/PyScaffold-3.2.3-py3.7.egg/pyscaffold/extensions/namespace.py | nkapchenko/HW | 92f51d1ac4fc009e24350844172a1fb3b111ea27 | [
"MIT"
] | 1 | 2020-05-10T06:24:38.000Z | 2020-05-10T06:24:38.000Z | # -*- coding: utf-8 -*-
"""
Extension that adjust project file tree to include a namespace package.
This extension adds a **namespace** option to
:obj:`~pyscaffold.api.create_project` and provides correct values for the
options **root_pkg** and **namespace_pkg** to the following functions in the
action list.
"""
import argparse
import os
from os.path import isdir
from os.path import join as join_path
from .. import templates, utils
from ..api import Extension, helpers
from ..log import logger
def create_namespace_parser(obj_ref):
"""Create a namespace parser.
Args:
obj_ref (Extension): object reference to the actual extension
Returns:
NamespaceParser: parser for namespace cli argument
"""
return NamespaceParser
def enforce_namespace_options(struct, opts):
"""Make sure options reflect the namespace usage."""
opts.setdefault('namespace', None)
if opts['namespace']:
opts['ns_list'] = utils.prepare_namespace(opts['namespace'])
opts['root_pkg'] = opts['ns_list'][0]
opts['qual_pkg'] = ".".join([opts['ns_list'][-1], opts['package']])
return struct, opts
def add_namespace(struct, opts):
"""Prepend the namespace to a given file structure
Args:
struct (dict): directory structure as dictionary of dictionaries
opts (dict): options of the project
Returns:
tuple(dict, dict):
directory structure as dictionary of dictionaries and input options
"""
if not opts['namespace']:
return struct, opts
namespace = opts['ns_list'][-1].split('.')
base_struct = struct
struct = base_struct[opts['project']]['src']
pkg_struct = struct[opts['package']]
del struct[opts['package']]
for sub_package in namespace:
struct[sub_package] = {'__init__.py': templates.namespace(opts)}
struct = struct[sub_package]
struct[opts['package']] = pkg_struct
return base_struct, opts
def move_old_package(struct, opts):
"""Move old package that may be eventually created without namespace
Args:
struct (dict): directory structure as dictionary of dictionaries
opts (dict): options of the project
Returns:
tuple(dict, dict):
directory structure as dictionary of dictionaries and input options
"""
old_path = join_path(opts['project'], 'src', opts['package'])
namespace_path = opts['qual_pkg'].replace('.', os.sep)
target = join_path(opts['project'], 'src', namespace_path)
old_exists = opts['pretend'] or isdir(old_path)
# ^ When pretending, pretend also an old folder exists
# to show a worst case scenario log to the user...
if old_exists and opts['qual_pkg'] != opts['package']:
if not opts['pretend']:
logger.warning(
'\nA folder %r exists in the project directory, and it is '
'likely to have been generated by a PyScaffold extension or '
'manually by one of the current project authors.\n'
'Moving it to %r, since a namespace option was passed.\n'
'Please make sure to edit all the files that depend on this '
'package to ensure the correct location.\n',
opts['package'], namespace_path)
utils.move(old_path, target=target,
log=True, pretend=opts['pretend'])
return struct, opts
| 32.909677 | 79 | 0.634581 |
90e88281f8a4c42ecdd83892971dad7b739f5530 | 2,324 | py | Python | tests/solr_tests/tests/test_templatetags.py | speedplane/django-haystack | 4ace30aea6aa1b1708f79a5a9df20a00fa0b4d96 | [
"BSD-3-Clause"
] | 1 | 2017-10-12T14:25:06.000Z | 2017-10-12T14:25:06.000Z | tests/solr_tests/tests/templatetags.py | ericholscher/django-haystack | 1fde37afa4921c2121a95a4902f2012bbf837bf1 | [
"BSD-3-Clause"
] | 1 | 2016-08-03T18:01:43.000Z | 2016-08-03T18:03:00.000Z | tests/solr_tests/tests/templatetags.py | ericholscher/django-haystack | 1fde37afa4921c2121a95a4902f2012bbf837bf1 | [
"BSD-3-Clause"
] | 2 | 2015-08-11T17:00:42.000Z | 2021-01-04T08:39:33.000Z | # encoding: utf-8
from mock import call, patch
from django.template import Template, Context
from django.test import TestCase
from core.models import MockModel
| 41.5 | 173 | 0.645869 |
90e8e47f66221f7bc0ae337311c1f500db447d05 | 669 | py | Python | tests_project/homepage/views/__init__.py | wynnw/django-mako-plus | 8a33eb3911fc84ddddd590152f475fd78c6a501f | [
"Apache-2.0"
] | 79 | 2015-01-21T23:29:16.000Z | 2021-08-22T03:38:20.000Z | tests_project/homepage/views/__init__.py | wynnw/django-mako-plus | 8a33eb3911fc84ddddd590152f475fd78c6a501f | [
"Apache-2.0"
] | 34 | 2015-01-08T03:11:07.000Z | 2021-09-07T15:04:43.000Z | tests_project/homepage/views/__init__.py | wynnw/django-mako-plus | 8a33eb3911fc84ddddd590152f475fd78c6a501f | [
"Apache-2.0"
] | 23 | 2015-01-08T03:11:26.000Z | 2021-05-22T11:12:24.000Z | from django_mako_plus.converter import ParameterConverter
from django_mako_plus import view_function
from django.http import HttpRequest
| 47.785714 | 105 | 0.741405 |
90e95d3f579e468dcd63f6bfea79961b11c3e5b8 | 1,953 | py | Python | jupyterlab_bigquery/jupyterlab_bigquery/__init__.py | shunr/jupyter-extensions | a2fb310215664e29fd7252e5fe353f60a91a0aba | [
"Apache-2.0"
] | null | null | null | jupyterlab_bigquery/jupyterlab_bigquery/__init__.py | shunr/jupyter-extensions | a2fb310215664e29fd7252e5fe353f60a91a0aba | [
"Apache-2.0"
] | 1 | 2020-07-20T23:09:46.000Z | 2020-07-20T23:09:46.000Z | jupyterlab_bigquery/jupyterlab_bigquery/__init__.py | shunr/jupyter-extensions | a2fb310215664e29fd7252e5fe353f60a91a0aba | [
"Apache-2.0"
] | null | null | null | from notebook.utils import url_path_join
from jupyterlab_bigquery.list_items_handler import handlers
from jupyterlab_bigquery.details_handler import DatasetDetailsHandler, TablePreviewHandler, TableDetailsHandler
from jupyterlab_bigquery.version import VERSION
from jupyterlab_bigquery.pagedAPI_handler import PagedQueryHandler
from jupyterlab_bigquery.query_incell_editor import QueryIncellEditor, _cell_magic
__version__ = VERSION
def load_jupyter_server_extension(nb_server_app):
"""
Called when the extension is loaded.
Args:
nb_server_app (NotebookWebApplication): handle to the Notebook webserver instance.
"""
host_pattern = '.*$'
app = nb_server_app.web_app
gcp_v1_endpoint = url_path_join(app.settings['base_url'], 'bigquery', 'v1')
app.add_handlers(
host_pattern,
[
(url_path_join(gcp_v1_endpoint, k) + "(.*)", v)
for (k, v) in handlers.items()
],
)
app.add_handlers(host_pattern, [
# TODO(cbwilkes): Add auth checking if needed.
# (url_path_join(gcp_v1_endpoint, auth'), AuthHandler)
make_endpoint('list', ListHandler),
make_endpoint('datasetdetails', DatasetDetailsHandler),
make_endpoint('tabledetails', TableDetailsHandler),
make_endpoint('tablepreview', TablePreviewHandler),
make_endpoint('query', PagedQueryHandler)
])
def load_ipython_extension(ipython):
"""Called by IPython when this module is loaded as an IPython extension."""
ipython.register_magic_function(
_cell_magic, magic_kind="line", magic_name="bigquery_editor"
)
ipython.register_magic_function(
_cell_magic, magic_kind="cell", magic_name="bigquery_editor"
)
| 35.509091 | 111 | 0.721966 |
90eb050355216ee7d1a8b303ce6104092d1b2ec7 | 581 | py | Python | ios_notifications/migrations/0004_auto_20141105_1515.py | chillbear/django-ios-notifications | d48a7862eaa499672f27c192a3cf6f06e06f8117 | [
"BSD-3-Clause"
] | 2 | 2021-12-01T21:34:49.000Z | 2021-12-13T19:22:12.000Z | ios_notifications/migrations/0004_auto_20141105_1515.py | chillbear/django-ios-notifications | d48a7862eaa499672f27c192a3cf6f06e06f8117 | [
"BSD-3-Clause"
] | 1 | 2019-10-04T01:18:32.000Z | 2019-10-04T01:18:32.000Z | ios_notifications/migrations/0004_auto_20141105_1515.py | chillbear/django-ios-notifications | d48a7862eaa499672f27c192a3cf6f06e06f8117 | [
"BSD-3-Clause"
] | null | null | null | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
import django_fields.fields
| 26.409091 | 142 | 0.667814 |
90eb7ca2d6e3944281a7dc60550b6a349ec9b59b | 9,232 | py | Python | fairseq/tasks/audio_pretraining.py | hwp/fairseq | f5cf2278d10a2aa8ee5759ce924d23aef6f82e6f | [
"MIT"
] | 4 | 2021-09-06T06:40:41.000Z | 2022-02-14T09:59:37.000Z | fairseq/tasks/audio_pretraining.py | hwp/fairseq | f5cf2278d10a2aa8ee5759ce924d23aef6f82e6f | [
"MIT"
] | null | null | null | fairseq/tasks/audio_pretraining.py | hwp/fairseq | f5cf2278d10a2aa8ee5759ce924d23aef6f82e6f | [
"MIT"
] | 1 | 2021-07-12T12:34:47.000Z | 2021-07-12T12:34:47.000Z | # Copyright (c) 2017-present, Facebook, Inc.
# All rights reserved.
#
# This source code is licensed under the license found in the LICENSE file in
# the root directory of this source tree. An additional grant of patent rights
# can be found in the PATENTS file in the same directory.
import editdistance
import os
import sys
import torch
from fairseq.data import AddTargetDataset, Dictionary, FileAudioDataset, encoders
from fairseq.data.data_utils import post_process
from . import LegacyFairseqTask, register_task
from .. import utils
from ..logging import metrics
| 36.634921 | 96 | 0.604311 |
90eb98699fec58dcce70daf5b1617e8fd2c20143 | 20,440 | py | Python | caffe-int8-convert-tool-dev.py | daquexian/caffe-int8-convert-tools | 07bbb40e2b39493a0777f2be564114721a99c501 | [
"BSD-3-Clause"
] | null | null | null | caffe-int8-convert-tool-dev.py | daquexian/caffe-int8-convert-tools | 07bbb40e2b39493a0777f2be564114721a99c501 | [
"BSD-3-Clause"
] | null | null | null | caffe-int8-convert-tool-dev.py | daquexian/caffe-int8-convert-tools | 07bbb40e2b39493a0777f2be564114721a99c501 | [
"BSD-3-Clause"
] | 1 | 2021-04-22T08:55:38.000Z | 2021-04-22T08:55:38.000Z | # -*- coding: utf-8 -*-
# SenseNets is pleased to support the open source community by making caffe-int8-convert-tool available.
#
# Copyright (C) 2018 SenseNets Technology Ltd. All rights reserved.
#
# Licensed under the BSD 3-Clause License (the "License"); you may not use this file except
# in compliance with the License. You may obtain a copy of the License at
#
# https://opensource.org/licenses/BSD-3-Clause
#
# Unless required by applicable law or agreed to in writing, software distributed
# under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
# CONDITIONS OF ANY KIND, either express or implied. See the License for the
# specific language governing permissions and limitations under the License.
"""
Quantization module for generating the calibration tables will be used by
quantized (INT8) models from FP32 models.
This tool is based on Caffe Framework.
"""
from __future__ import division
from __future__ import print_function
import argparse
import numpy as np
import math, copy
import matplotlib.pyplot as plt
import sys,os
import caffe
import caffe.proto.caffe_pb2 as caffe_pb2
import time
import datetime
from google.protobuf import text_format
global args, parser
args, parser = parse_args()
# global params
QUANTIZE_NUM = 127
STATISTIC = 1
INTERVAL_NUM = 2048
# ugly global params
quantize_layer_lists = []
def add_to_distribution(blob, distribution, interval):
"""
add the distribution
Args:
blob: the output blob of caffe layer
distribution: a list ,size is 2048
interval: a float number
Returns:
none
"""
max_index = len(distribution) - 1
indexes = np.minimum((np.abs(blob[blob!=0]) / interval).astype(np.int32), max_index)
for index in indexes:
distribution[index] = distribution[index] + 1
def normalize_distribution(distribution):
"""
Normalize the input list
Args:
distribution: a list ,size is 2048
Returns:
none
"""
num_sum = sum(distribution)
for i, data in enumerate(distribution):
distribution[i] = data / float(num_sum)
def compute_kl_divergence(dist_a, dist_b):
"""
Returen kl_divergence between
Args:
dist_a: list original
dist_b: list expand
Returns:
kl_divergence: float, kl_divergence
"""
nonzero_inds = dist_a != 0
return np.sum(dist_a[nonzero_inds] * np.log(dist_a[nonzero_inds] / dist_b[nonzero_inds]))
def threshold_distribution(distribution, target_bin=128):
"""
Returen the best cut off num of bin
Args:
distribution: list, activations has been processed by histogram and normalize,size is 2048
target_bin: int, the num of bin that is used by quantize, Int8 default value is 128
Returns:
target_threshold: int, num of bin with the minimum KL
"""
target_threshold = target_bin
min_kl_divergence = 1000
length = distribution.size
quantize_distribution = np.zeros(target_bin)
threshold_sum = 0.0
threshold_sum = sum(distribution[target_bin:])
for threshold in range(target_bin, length):
t_distribution = copy.deepcopy(distribution[:threshold])
t_distribution[threshold-1] = t_distribution[threshold-1] + threshold_sum
threshold_sum = threshold_sum - distribution[threshold]
# ************************ threshold ************************
quantize_distribution = np.zeros(target_bin)
num_per_bin = threshold / target_bin
for i in range(0, target_bin):
start = i * num_per_bin
end = start + num_per_bin
left_upper = (int)(math.ceil(start))
if(left_upper > start):
left_scale = left_upper - start
quantize_distribution[i] += left_scale * distribution[left_upper - 1]
right_lower = (int)(math.floor(end))
if (right_lower < end):
right_scale = end - right_lower
quantize_distribution[i] += right_scale * distribution[right_lower]
for j in range(left_upper, right_lower):
quantize_distribution[i] += distribution[j]
# ************************ threshold ************************
# ************************ quantize ************************
expand_distribution = np.zeros(threshold, dtype=np.float32)
for i in range(0, target_bin):
start = i * num_per_bin
end = start + num_per_bin
count = 0
left_upper = (int)(math.ceil(start))
left_scale = 0.0
if (left_upper > start):
left_scale = left_upper - start
if (distribution[left_upper - 1] != 0):
count += left_scale
right_lower = (int)(math.floor(end))
right_scale = 0.0
if (right_lower < end):
right_scale = end - right_lower
if (distribution[right_lower] != 0):
count += right_scale
for j in range(left_upper, right_lower):
if (distribution[j] != 0):
count = count + 1
if count == 0:
continue;
expand_value = quantize_distribution[i] / count
if (left_upper > start):
if (distribution[left_upper - 1] != 0):
expand_distribution[left_upper - 1] += expand_value * left_scale
if (right_lower < end):
if (distribution[right_lower] != 0):
expand_distribution[right_lower] += expand_value * right_scale
for j in range(left_upper, right_lower):
if (distribution[j] != 0):
expand_distribution[j] += expand_value
# ************************ quantize ************************
kl_divergence = compute_kl_divergence(t_distribution, expand_distribution)
if kl_divergence < min_kl_divergence:
min_kl_divergence = kl_divergence
target_threshold = threshold
return target_threshold
def net_forward(net, image_path, transformer):
"""
network inference and statistics the cost time
Args:
net: the instance of Caffe inference
image_path: a image need to be inference
transformer:
Returns:
none
"""
# load image
image = caffe.io.load_image(image_path)
# transformer.preprocess the image
net.blobs['data'].data[...] = transformer.preprocess('data',image)
# net forward
start = time.clock()
output = net.forward()
end = time.clock()
print("%s forward time : %.3f s" % (image_path, end - start))
def file_name(file_dir):
"""
Find the all file path with the directory
Args:
file_dir: The source file directory
Returns:
files_path: all the file path into a list
"""
files_path = []
for root, dir, files in os.walk(file_dir):
for name in files:
file_path = root + "/" + name
print(file_path)
files_path.append(file_path)
return files_path
def network_prepare(net, mean, norm):
"""
instance the prepare process param of caffe network inference
Args:
net: the instance of Caffe inference
mean: the value of mean
norm: the value of normalize
Returns:
none
"""
print("Network initial")
img_mean = np.array(mean)
# initial transformer
transformer = caffe.io.Transformer({'data': net.blobs['data'].data.shape})
# convert shape from RBG to BGR
transformer.set_transpose('data', (2,0,1))
# load meanfile
transformer.set_mean('data', img_mean)
# resize image data from [0,1] to [0,255]
transformer.set_raw_scale('data', 255)
# convert RGB -> BGR
transformer.set_channel_swap('data', (2,1,0))
# normalize
transformer.set_input_scale('data', norm)
return transformer
def weight_quantize(net, net_file, group_on):
"""
CaffeModel convolution weight blob Int8 quantize
Args:
net: the instance of Caffe inference
net_file: deploy caffe prototxt
Returns:
none
"""
print("\nQuantize the kernel weight:")
# parse the net param from deploy prototxt
params = caffe_pb2.NetParameter()
with open(net_file) as f:
text_format.Merge(f.read(), params)
for i, layer in enumerate(params.layer):
if i == 0:
if layer.type != "Input":
raise ValueError("First layer should be input")
# find the convolution 3x3 and 1x1 layers to get out the weight_scale
if(layer.type == "Convolution" or layer.type == "ConvolutionDepthwise"):
kernel_size = layer.convolution_param.kernel_size[0]
if(kernel_size == 3 or kernel_size == 1):
weight_blob = net.params[layer.name][0].data
# initial the instance of QuantizeLayer Class lists,you can use enable group quantize to generate int8 scale for each group layer.convolution_param.group
if (group_on == 1):
quanitze_layer = QuantizeLayer(layer.name, layer.bottom[0], layer.convolution_param.group)
else:
quanitze_layer = QuantizeLayer(layer.name, layer.bottom[0], 1)
# quantize the weight value
quanitze_layer.quantize_weight(weight_blob)
# add the quantize_layer into the save list
quantize_layer_lists.append(quanitze_layer)
return None
def activation_sparse(net, transformer, images_files):
"""
Activation bottom blob sparse analyze
Args:
net: the instance of Caffe inference
transformer:
images_files: calibration dataset
Returns:
none
"""
print("\nAnalyze the sparse info of the Activation:")
# run float32 inference on calibration dataset to find the activations range
for i , image in enumerate(images_files):
net_forward(net, image, transformer)
print("loop stage 1 : %d" % (i))
# find max threshold
for layer in quantize_layer_lists:
blob = net.blobs[layer.blob_name].data[0].flatten()
layer.initial_blob_max(blob)
# calculate statistic blob scope and interval distribution
for layer in quantize_layer_lists:
layer.initial_blob_distubution_interval()
return None
def activation_quantize(net, transformer, images_files):
"""
Activation Int8 quantize, optimaize threshold selection with KL divergence,
given a dataset, find the optimal threshold for quantizing it.
Ref: http://on-demand.gputechconf.com/gtc/2017/presentation/s7310-8-bit-inference-with-tensorrt.pdf
Args:
net: the instance of Caffe inference
transformer:
images_files: calibration dataset
Returns:
none
"""
print("\nQuantize the Activation:")
# run float32 inference on calibration dataset to find the activations range
for i , image in enumerate(images_files):
net_forward(net, image, transformer)
print("loop stage 1 : %d" % (i))
# find max threshold
for layer in quantize_layer_lists:
blob = net.blobs[layer.blob_name].data[0].flatten()
layer.initial_blob_max(blob)
# calculate statistic blob scope and interval distribution
for layer in quantize_layer_lists:
layer.initial_blob_distubution_interval()
# for each layers
# collect histograms of activations
print("\nCollect histograms of activations:")
for i, image in enumerate(images_files):
net_forward(net, image, transformer)
print("loop stage 2 : %d" % (i))
start = time.clock()
for layer in quantize_layer_lists:
blob = net.blobs[layer.blob_name].data[0].flatten()
layer.initial_histograms(blob)
end = time.clock()
print("add cost %.3f s" % (end - start))
# calculate threshold with KL divergence
for layer in quantize_layer_lists:
layer.quantize_blob()
return None
def usage_info():
"""
usage info
"""
print("Input params is illegal...(3)")
print("try it again:\n python caffe-int8-scale-tools.py -h")
def main():
"""
main function
"""
# time start
time_start = datetime.datetime.now()
print(args)
if args.proto == None or args.model == None or args.mean == None or args.images == None:
usage_info()
return None
# deploy caffe prototxt path
net_file = args.proto
# trained caffemodel path
caffe_model = args.model
# mean value
mean = args.mean
# norm value
norm = 1.0
if args.norm != 1.0:
norm = args.norm[0]
# calibration dataset
images_path = args.images
# the output calibration file
calibration_path = args.output
# enable the group scale
group_on = args.group
# default use CPU to forwark
if args.gpu != 0:
caffe.set_device(0)
caffe.set_mode_gpu()
# initial caffe net and the forword model(GPU or CPU)
net = caffe.Net(net_file,caffe_model,caffe.TEST)
# prepare the cnn network
transformer = network_prepare(net, mean, norm)
# get the calibration datasets images files path
images_files = file_name(images_path)
# quanitze kernel weight of the caffemodel to find it's calibration table
# weight_quantize(net, net_file)
weight_quantize(net, net_file, group_on)
# quantize activation value of the caffemodel to find it's calibration table
activation_quantize(net, transformer, images_files)
# save the calibration tables,best wish for your INT8 inference have low accuracy loss :)
save_calibration_file(calibration_path)
# time end
time_end = datetime.datetime.now()
print("\nCaffe Int8 Calibration table create success, it's cost %s, best wish for your INT8 inference has a low accuracy loss...\(^^)/...2333..." % (time_end - time_start))
if __name__ == "__main__":
main()
| 35.241379 | 201 | 0.623092 |
90ebe6d271ee6c6cf02d97187537a0fc19ca7af6 | 2,404 | py | Python | example_project/views.py | AKuederle/flask-template-master | d6d8d2747eccc6629658caf4295106db1b3326a1 | [
"MIT"
] | 2 | 2018-04-06T19:40:22.000Z | 2019-10-09T18:58:03.000Z | example_project/views.py | AKuederle/flask-template-master | d6d8d2747eccc6629658caf4295106db1b3326a1 | [
"MIT"
] | null | null | null | example_project/views.py | AKuederle/flask-template-master | d6d8d2747eccc6629658caf4295106db1b3326a1 | [
"MIT"
] | null | null | null | """
All your views aka. your template endpoints go here.
There are two ways to create a view.
1. Create a new Subclass inheriting from one of the flask_template_master views
2. Use the view-factory function flask_template_master.views.create_template_endpoint
Each view requires an 1 (and 2 optional) things:
1. An environment: The environment provides the templates and handles all options of how templates are rendered
2. (optional) An global provider: A global provider provides variables that are accessible in all templates of the endpoint
3. (optional) An compiler: The compiler gets the rendered template and can handle a postprocessing step and controls the
data that is returned. This can e.g. be used to run a Latex compilation.
"""
import jinja2
from flask_template_master.compiler import LatexCompiler
from flask_template_master.views import BaseTemplateView, create_template_endpoint
from flask_template_master import Api
from flask_template_master.global_provider import DictGlobalProvider
from flask_template_master.environments import LATEX_TEMPLATE_CONFIG
api = Api() # create an instance of an flask-restfull API. Always required!
# This registers '/class_test/' for the overview and '/class_test/<template_name> for the individual templates
TestView.add_as_resource(api, '/class_test/')
# This is an example on how to use the factory function
# Setting up the jinja2 enviroemnt using a file loader with LaTex config
environment = jinja2.Environment(loader=jinja2.FileSystemLoader('./templates'), **LATEX_TEMPLATE_CONFIG)
compiler = LatexCompiler()
create_template_endpoint(api, '/factory_test/', environment=environment, compiler=compiler)
| 50.083333 | 123 | 0.784526 |
90ee4b4c80b7c7885dba22f095e28a087fcc1818 | 5,534 | py | Python | pytorch_translate/test/test_data.py | dpacgopinath/translate-1 | 032246359eab512bcc76208d357eac71e6017eca | [
"BSD-3-Clause"
] | null | null | null | pytorch_translate/test/test_data.py | dpacgopinath/translate-1 | 032246359eab512bcc76208d357eac71e6017eca | [
"BSD-3-Clause"
] | null | null | null | pytorch_translate/test/test_data.py | dpacgopinath/translate-1 | 032246359eab512bcc76208d357eac71e6017eca | [
"BSD-3-Clause"
] | null | null | null | #!/usr/bin/env python3
import unittest
import os
from pytorch_translate import data
from pytorch_translate import dictionary
from pytorch_translate.test import utils as test_utils
| 39.248227 | 104 | 0.568666 |
90ee60eea24c42a26b118fc9be18f7e7dc1fe829 | 2,877 | py | Python | CompareWHDR.py | Z7Gao/InverseRenderingOfIndoorScene | f245d20dcbe05b1de766c2e53af79fd489f58d74 | [
"MIT"
] | 171 | 2020-06-28T04:03:23.000Z | 2022-03-30T08:50:20.000Z | CompareWHDR.py | Z7Gao/InverseRenderingOfIndoorScene | f245d20dcbe05b1de766c2e53af79fd489f58d74 | [
"MIT"
] | 9 | 2020-08-20T08:56:38.000Z | 2022-01-19T19:53:51.000Z | CompareWHDR.py | Z7Gao/InverseRenderingOfIndoorScene | f245d20dcbe05b1de766c2e53af79fd489f58d74 | [
"MIT"
] | 19 | 2020-06-23T11:49:03.000Z | 2022-01-22T01:49:26.000Z | import numpy as np
import sys
import json
import glob
import os.path as osp
import cv2
#root = './testReal_cascade0_black_height120_width160/cascade0/iiw/'
root = 'IIW_cascade1/results_brdf2_brdf1/'
rootGt = '/home/zhl/CVPR20/Resubmission/Dataset/IIW/iiw-dataset/data/'
suffix = 'albedoBS1.png'
count = 0.0
whdr_sum = 0.0
whdr_mean = 0.0
img_list = glob.glob(osp.join(root, '*_%s' % suffix ) )
for img_path in img_list:
#load CGI precomputed file
judgement_path = osp.join(rootGt, img_path.split('/')[-1].split('_')[0] + '.json' )
judgements = json.load(open(judgement_path) )
count+=1.0
ourR = cv2.imread(img_path ).astype(np.float32 ) / 255.0
whdr, _, _ = compute_whdr(ourR, judgements )
whdr_sum += whdr
print('img_path: {0}, whdr: current {1} average {2}'.
format(img_path.split('/')[-1].split('_')[0], whdr, whdr_sum / count ) )
whdr_mean = whdr_sum / count
print('whdr ours: {0}'.format(whdr_mean ) )
| 30.284211 | 132 | 0.601321 |
90f09eaebc2c156f0e6cd05610e75beced48fbbf | 141 | py | Python | theonionbox/stamp.py | ralphwetzel/theonionbox | 9812fce48153955e179755ea7a58413c3bee182f | [
"MIT"
] | 120 | 2015-12-30T09:41:56.000Z | 2022-03-23T02:30:05.000Z | theonionbox/stamp.py | nwithan8/theonionbox | 9e51fe0b4d07fc89a8a133fdeceb5f97d5d58713 | [
"MIT"
] | 57 | 2015-12-29T21:55:14.000Z | 2022-01-07T09:48:51.000Z | theonionbox/stamp.py | nwithan8/theonionbox | 9e51fe0b4d07fc89a8a133fdeceb5f97d5d58713 | [
"MIT"
] | 17 | 2018-02-05T08:57:46.000Z | 2022-02-28T16:44:41.000Z | __title__ = 'The Onion Box'
__description__ = 'Dashboard to monitor Tor node operations.'
__version__ = '20.2'
__stamp__ = '20200119|095654'
| 28.2 | 61 | 0.758865 |
90f0c3ec3d38a98bb3fcc92188fa52c0d41d3751 | 649 | py | Python | UnicodeTraps.py | loamhoof/sublime-plugins-dump | 57518b19a96e090670e2592438688c600a5b875a | [
"MIT"
] | null | null | null | UnicodeTraps.py | loamhoof/sublime-plugins-dump | 57518b19a96e090670e2592438688c600a5b875a | [
"MIT"
] | null | null | null | UnicodeTraps.py | loamhoof/sublime-plugins-dump | 57518b19a96e090670e2592438688c600a5b875a | [
"MIT"
] | null | null | null | import re
from sublime import Region
import sublime_plugin
REPLACEMENTS = {
'\u00a0': ' ', # no-break space
'\u200b': '', # zero-width space
}
| 24.037037 | 87 | 0.66718 |
90f11b4939ee595b17ff6883b04027fa19911aa3 | 2,743 | py | Python | simba/ROI_multiply.py | KonradDanielewski/simba | d7a448222e33dcb9880b65c14b5b676933cc6fd7 | [
"MIT"
] | 172 | 2019-12-18T22:19:42.000Z | 2022-03-29T01:58:25.000Z | simba/ROI_multiply.py | KonradDanielewski/simba | d7a448222e33dcb9880b65c14b5b676933cc6fd7 | [
"MIT"
] | 165 | 2020-01-10T19:05:16.000Z | 2022-03-31T16:08:36.000Z | simba/ROI_multiply.py | KonradDanielewski/simba | d7a448222e33dcb9880b65c14b5b676933cc6fd7 | [
"MIT"
] | 80 | 2019-12-20T00:01:43.000Z | 2022-03-29T16:20:10.000Z | import glob
import pandas as pd
from configparser import ConfigParser
import os
from simba.drop_bp_cords import *
| 55.979592 | 131 | 0.647466 |
90f1959d5f3a4e728e1f0156fde3bb7e15e63fa1 | 689 | py | Python | src/utils/ccxt/fetch_order_book.py | YasunoriMATSUOKA/crypto-asset-easy-management | 5c33fd8612b843ed39f0ec1fd84efa83f3967e42 | [
"MIT"
] | null | null | null | src/utils/ccxt/fetch_order_book.py | YasunoriMATSUOKA/crypto-asset-easy-management | 5c33fd8612b843ed39f0ec1fd84efa83f3967e42 | [
"MIT"
] | 2 | 2020-12-05T09:31:01.000Z | 2020-12-05T12:28:33.000Z | src/utils/ccxt/fetch_order_book.py | YasunoriMATSUOKA/crypto-asset-easy-management | 5c33fd8612b843ed39f0ec1fd84efa83f3967e42 | [
"MIT"
] | null | null | null | from logging import getLogger
import traceback
from .get_public_exchange import get_public_exchange
logger = getLogger("__main__").getChild(__name__)
| 27.56 | 52 | 0.70537 |
90f2e833a7f803e0952b3382cebde491d441fdf8 | 20,080 | py | Python | smoke-classifier/detect_fire.py | agnes-yang/firecam | 9282d1b5b83be3abf6a137f7a72c090a9eca05f6 | [
"Apache-2.0"
] | 10 | 2019-12-19T02:37:33.000Z | 2021-12-07T04:47:08.000Z | smoke-classifier/detect_fire.py | agnes-yang/firecam | 9282d1b5b83be3abf6a137f7a72c090a9eca05f6 | [
"Apache-2.0"
] | 5 | 2019-10-27T23:22:52.000Z | 2020-02-13T23:08:15.000Z | smoke-classifier/detect_fire.py | agnes-yang/firecam | 9282d1b5b83be3abf6a137f7a72c090a9eca05f6 | [
"Apache-2.0"
] | 13 | 2019-09-24T18:53:24.000Z | 2021-07-16T05:57:18.000Z | # Copyright 2018 The Fuego Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""
@author: Kinshuk Govil
This is the main code for reading images from webcams and detecting fires
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
import sys
fuegoRoot = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
sys.path.insert(0, os.path.join(fuegoRoot, 'lib'))
sys.path.insert(0, fuegoRoot)
import settings
settings.fuegoRoot = fuegoRoot
import collect_args
import rect_to_squares
import goog_helper
os.environ['TF_CPP_MIN_LOG_LEVEL'] = '3' # quiet down tensorflow logging (must be done before tf_helper)
import tf_helper
import db_manager
import email_helper
import sms_helper
import img_archive
from detection_policies import policies
import logging
import pathlib
import tempfile
import shutil
import time, datetime, dateutil.parser
import random
import re
import hashlib
from urllib.request import urlretrieve
import tensorflow as tf
from PIL import Image, ImageFile, ImageDraw, ImageFont
ImageFile.LOAD_TRUNCATED_IMAGES = True
def getNextImage(dbManager, cameras, cameraID=None):
"""Gets the next image to check for smoke
Uses a shared counter being updated by all cooperating detection processes
to index into the list of cameras to download the image to a local
temporary directory
Args:
dbManager (DbManager):
cameras (list): list of cameras
cameraID (str): optional specific camera to get image from
Returns:
Tuple containing camera name, current timestamp, and filepath of the image
"""
if getNextImage.tmpDir == None:
getNextImage.tmpDir = tempfile.TemporaryDirectory()
logging.warning('TempDir %s', getNextImage.tmpDir.name)
if cameraID:
camera = list(filter(lambda x: x['name'] == cameraID, cameras))[0]
else:
index = dbManager.getNextSourcesCounter() % len(cameras)
camera = cameras[index]
timestamp = int(time.time())
imgPath = img_archive.getImgPath(getNextImage.tmpDir.name, camera['name'], timestamp)
# logging.warning('urlr %s %s', camera['url'], imgPath)
try:
urlretrieve(camera['url'], imgPath)
except Exception as e:
logging.error('Error fetching image from %s %s', camera['name'], str(e))
return getNextImage(dbManager, cameras)
md5 = hashlib.md5(open(imgPath, 'rb').read()).hexdigest()
if ('md5' in camera) and (camera['md5'] == md5) and not cameraID:
logging.warning('Camera %s image unchanged', camera['name'])
# skip to next camera
return getNextImage(dbManager, cameras)
camera['md5'] = md5
return (camera['name'], timestamp, imgPath, md5)
getNextImage.tmpDir = None
# XXXXX Use a fixed stable directory for testing
# from collections import namedtuple
# Tdir = namedtuple('Tdir', ['name'])
# getNextImage.tmpDir = Tdir('c:/tmp/dftest')
def getNextImageFromDir(imgDirectory):
"""Gets the next image to check for smoke from given directory
A variant of getNextImage() above but works with files already present
on the locla filesystem.
Args:
imgDirectory (str): directory containing the files
Returns:
Tuple containing camera name, current timestamp, and filepath of the image
"""
if getNextImageFromDir.tmpDir == None:
getNextImageFromDir.tmpDir = tempfile.TemporaryDirectory()
logging.warning('TempDir %s', getNextImageFromDir.tmpDir.name)
if not getNextImageFromDir.files:
allFiles = os.listdir(imgDirectory)
# filter out files with _Score suffix because they contain annotated scores
# generated by drawFireBox() function below.
getNextImageFromDir.files = list(filter(lambda x: '_Score.jpg' not in x, allFiles))
getNextImageFromDir.index += 1
if getNextImageFromDir.index < len(getNextImageFromDir.files):
fileName = getNextImageFromDir.files[getNextImageFromDir.index]
origPath = os.path.join(imgDirectory, fileName)
destPath = os.path.join(getNextImageFromDir.tmpDir.name, fileName)
shutil.copyfile(origPath, destPath)
parsed = img_archive.parseFilename(fileName)
if not parsed:
# failed to parse, so skip to next image
return getNextImageFromDir(imgDirectory)
md5 = hashlib.md5(open(destPath, 'rb').read()).hexdigest()
return (parsed['cameraID'], parsed['unixTime'], destPath, md5)
logging.warning('Finished processing all images in directory. Exiting')
exit(0)
getNextImageFromDir.files = None
getNextImageFromDir.index = -1
getNextImageFromDir.tmpDir = None
def checkAndUpdateAlerts(dbManager, camera, timestamp, driveFileIDs):
"""Check if alert has been recently sent out for given camera
Args:
dbManager (DbManager):
camera (str): camera name
timestamp (int):
driveFileIDs (list): List of Google drive IDs for the uploaded image files
Returns:
True if this is a new alert, False otherwise
"""
# Only alert if there has not been a detection in the last hour. This prevents spam
# from long lasting fires.
sqlTemplate = """SELECT * FROM detections
where CameraName='%s' and timestamp > %s and timestamp < %s"""
sqlStr = sqlTemplate % (camera, timestamp - 60*60, timestamp)
dbResult = dbManager.query(sqlStr)
if len(dbResult) > 0:
logging.warning('Supressing new alert due to recent detection')
return False
dbRow = {
'CameraName': camera,
'Timestamp': timestamp,
'ImageID': driveFileIDs[0] if driveFileIDs else ''
}
dbManager.add_data('alerts', dbRow)
return True
def alertFire(constants, cameraID, imgPath, annotatedFile, driveFileIDs, fireSegment, timestamp):
"""Send alerts about given fire through all channels (currently email and sms)
Args:
constants (dict): "global" contants
cameraID (str): camera name
imgPath: filepath of the original image
annotatedFile: filepath of the annotated image
driveFileIDs (list): List of Google drive IDs for the uploaded image files
fireSegment (dictionary): dictionary with information for the segment with fire/smoke
timestamp (int): time.time() value when image was taken
"""
emailFireNotification(constants, cameraID, imgPath, annotatedFile, driveFileIDs, fireSegment, timestamp)
smsFireNotification(constants['dbManager'], cameraID)
def emailFireNotification(constants, cameraID, imgPath, annotatedFile, driveFileIDs, fireSegment, timestamp):
"""Send an email alert for a potential new fire
Send email with information about the camera and fire score includeing
image attachments
Args:
constants (dict): "global" contants
cameraID (str): camera name
imgPath: filepath of the original image
annotatedFile: filepath of the annotated image
driveFileIDs (list): List of Google drive IDs for the uploaded image files
fireSegment (dictionary): dictionary with information for the segment with fire/smoke
timestamp (int): time.time() value when image was taken
"""
dbManager = constants['dbManager']
subject = 'Possible (%d%%) fire in camera %s' % (int(fireSegment['score']*100), cameraID)
body = 'Please check the attached images for fire.'
# commenting out links to google drive because they appear as extra attachments causing confusion
# and some email recipients don't even have permissions to access drive.
# for driveFileID in driveFileIDs:
# driveTempl = '\nAlso available from google drive as https://drive.google.com/file/d/%s'
# driveBody = driveTempl % driveFileID
# body += driveBody
# emails are sent from settings.fuegoEmail and bcc to everyone with active emails in notifications SQL table
dbResult = dbManager.getNotifications(filterActiveEmail = True)
emails = [x['email'] for x in dbResult]
if len(emails) > 0:
# attach images spanning a few minutes so reviewers can evaluate based on progression
startTimeDT = datetime.datetime.fromtimestamp(timestamp - 3*60)
endTimeDT = datetime.datetime.fromtimestamp(timestamp - 1*60)
with tempfile.TemporaryDirectory() as tmpDirName:
oldImages = img_archive.getHpwrenImages(constants['googleServices'], settings, tmpDirName,
constants['camArchives'], cameraID, startTimeDT, endTimeDT, 1)
attachments = oldImages or []
attachments.append(imgPath)
if annotatedFile:
attachments.append(annotatedFile)
email_helper.sendEmail(constants['googleServices']['mail'], settings.fuegoEmail, emails, subject, body, attachments)
def smsFireNotification(dbManager, cameraID):
"""Send an sms (phone text message) alert for a potential new fire
Args:
dbManager (DbManager):
cameraID (str): camera name
"""
message = 'Fuego fire notification in camera %s. Please check email for details' % cameraID
dbResult = dbManager.getNotifications(filterActivePhone = True)
phones = [x['phone'] for x in dbResult]
if len(phones) > 0:
for phone in phones:
sms_helper.sendSms(settings, phone, message)
def deleteImageFiles(imgPath, origImgPath, annotatedFile):
"""Delete all image files given in segments
Args:
imgPath: filepath of the original image
annotatedFile: filepath of the annotated image
"""
os.remove(imgPath)
if imgPath != origImgPath:
os.remove(origImgPath)
if annotatedFile:
os.remove(annotatedFile)
ppath = pathlib.PurePath(imgPath)
# leftoverFiles = os.listdir(str(ppath.parent))
# if len(leftoverFiles) > 0:
# logging.warning('leftover files %s', str(leftoverFiles))
def heartBeat(filename):
"""Inform monitor process that this detection process is alive
Informs by updating the timestamp on given file
Args:
filename (str): file path of file used for heartbeating
"""
pathlib.Path(filename).touch()
def genDiffImage(imgPath, earlierImgPath, minusMinutes):
"""Subtract the two given images and store result in new difference image file
Args:
imgPath (str): filepath of the current image (to subtract from)
imgPath (str): filepath of the earlier image (value to subtract)
minusMinutes (int): number of minutes separating subtracted images
Returns:
file path to the difference image
"""
imgA = Image.open(imgPath)
imgB = Image.open(earlierImgPath)
imgDiff = img_archive.diffImages(imgA, imgB)
parsedName = img_archive.parseFilename(imgPath)
parsedName['diffMinutes'] = minusMinutes
imgDiffName = img_archive.repackFileName(parsedName)
ppath = pathlib.PurePath(imgPath)
imgDiffPath = os.path.join(str(ppath.parent), imgDiffName)
imgDiff.save(imgDiffPath, format='JPEG')
return imgDiffPath
def updateTimeTracker(timeTracker, processingTime):
"""Update the time tracker data with given time to process current image
If enough samples new samples have been reorded, resets the history and
updates the average timePerSample
Args:
timeTracker (dict): tracks recent image processing times
processingTime (float): number of seconds needed to process current image
"""
timeTracker['totalTime'] += processingTime
timeTracker['numSamples'] += 1
# after N samples, update the rate to adapt to current conditions
# N = 50 should be big enough to be stable yet small enough to adapt
if timeTracker['numSamples'] > 50:
timeTracker['timePerSample'] = timeTracker['totalTime'] / timeTracker['numSamples']
timeTracker['totalTime'] = 0
timeTracker['numSamples'] = 0
logging.warning('New timePerSample %.2f', timeTracker['timePerSample'])
def initializeTimeTracker():
"""Initialize the time tracker
Returns:
timeTracker (dict):
"""
return {
'totalTime': 0.0,
'numSamples': 0,
'timePerSample': 3 # start off with estimate of 3 seconds per camera
}
def getArchivedImages(constants, cameras, startTimeDT, timeRangeSeconds, minusMinutes):
"""Get random images from HPWREN archive matching given constraints and optionally subtract them
Args:
constants (dict): "global" contants
cameras (list): list of cameras
startTimeDT (datetime): starting time of time range
timeRangeSeconds (int): number of seconds in time range
minusMinutes (int): number of desired minutes between images to subract
Returns:
Tuple containing camera name, current timestamp, filepath of regular image, and filepath of difference image
"""
if getArchivedImages.tmpDir == None:
getArchivedImages.tmpDir = tempfile.TemporaryDirectory()
logging.warning('TempDir %s', getArchivedImages.tmpDir.name)
cameraID = cameras[int(len(cameras)*random.random())]['name']
timeDT = startTimeDT + datetime.timedelta(seconds = random.random()*timeRangeSeconds)
if minusMinutes:
prevTimeDT = timeDT + datetime.timedelta(seconds = -60 * minusMinutes)
else:
prevTimeDT = timeDT
files = img_archive.getHpwrenImages(constants['googleServices'], settings, getArchivedImages.tmpDir.name,
constants['camArchives'], cameraID, prevTimeDT, timeDT, minusMinutes or 1)
# logging.warning('files %s', str(files))
if not files:
return (None, None, None, None)
if minusMinutes:
if len(files) > 1:
if files[0] >= files[1]: # files[0] is supposed to be earlier than files[1]
logging.warning('unexpected file order %s', str(files))
for file in files:
os.remove(file)
return (None, None, None, None)
imgDiffPath = genDiffImage(files[1], files[0], minusMinutes)
os.remove(files[0]) # no longer needed
parsedName = img_archive.parseFilename(files[1])
return (cameraID, parsedName['unixTime'], files[1], imgDiffPath)
else:
logging.warning('unexpected file count %s', str(files))
for file in files:
os.remove(file)
return (None, None, None, None)
elif len(files) > 0:
parsedName = img_archive.parseFilename(files[0])
return (cameraID, parsedName['unixTime'], files[0], files[0])
return (None, None, None, None)
getArchivedImages.tmpDir = None
if __name__=="__main__":
main()
| 42.008368 | 165 | 0.68745 |
90f4632c21bed176e470218adb359ed714cf422b | 8,165 | py | Python | torch_agents/cogment_verse_torch_agents/third_party/hive/mlp.py | kharyal/cogment-verse | 12bcb855bc742e3ec4ed11c40a1b475e95a32515 | [
"Apache-2.0"
] | null | null | null | torch_agents/cogment_verse_torch_agents/third_party/hive/mlp.py | kharyal/cogment-verse | 12bcb855bc742e3ec4ed11c40a1b475e95a32515 | [
"Apache-2.0"
] | null | null | null | torch_agents/cogment_verse_torch_agents/third_party/hive/mlp.py | kharyal/cogment-verse | 12bcb855bc742e3ec4ed11c40a1b475e95a32515 | [
"Apache-2.0"
] | null | null | null | import math
import numpy as np
import torch
import torch.nn.functional as F
from torch import nn
| 32.145669 | 116 | 0.54158 |
90f47be06645ba00851f14cd7c007a7d8432d2b8 | 1,705 | py | Python | phonenumbers/data/region_AC.py | ayushgoel/FixGoogleContacts | e49e58db6718bef8f95b6f767241605441c7fe41 | [
"MIT"
] | 2 | 2019-02-22T05:27:22.000Z | 2020-12-30T19:33:18.000Z | phonenumbers/data/region_AC.py | ayushgoel/FixGoogleContacts | e49e58db6718bef8f95b6f767241605441c7fe41 | [
"MIT"
] | null | null | null | phonenumbers/data/region_AC.py | ayushgoel/FixGoogleContacts | e49e58db6718bef8f95b6f767241605441c7fe41 | [
"MIT"
] | null | null | null | """Auto-generated file, do not edit by hand. AC metadata"""
from ..phonemetadata import NumberFormat, PhoneNumberDesc, PhoneMetadata
PHONE_METADATA_AC = PhoneMetadata(id='AC', country_code=247, international_prefix='00',
general_desc=PhoneNumberDesc(national_number_pattern='[2-467]\\d{3}', possible_number_pattern='\\d{4}'),
fixed_line=PhoneNumberDesc(national_number_pattern='(?:[267]\\d|3[0-5]|4[4-69])\\d{2}', possible_number_pattern='\\d{4}', example_number='6889'),
mobile=PhoneNumberDesc(national_number_pattern='NA', possible_number_pattern='NA'),
toll_free=PhoneNumberDesc(national_number_pattern='NA', possible_number_pattern='NA'),
premium_rate=PhoneNumberDesc(national_number_pattern='NA', possible_number_pattern='NA'),
shared_cost=PhoneNumberDesc(national_number_pattern='NA', possible_number_pattern='NA'),
personal_number=PhoneNumberDesc(national_number_pattern='NA', possible_number_pattern='NA'),
voip=PhoneNumberDesc(national_number_pattern='NA', possible_number_pattern='NA'),
pager=PhoneNumberDesc(national_number_pattern='NA', possible_number_pattern='NA'),
uan=PhoneNumberDesc(national_number_pattern='NA', possible_number_pattern='NA'),
emergency=PhoneNumberDesc(national_number_pattern='911', possible_number_pattern='\\d{3}', example_number='911'),
voicemail=PhoneNumberDesc(national_number_pattern='NA', possible_number_pattern='NA'),
short_code=PhoneNumberDesc(national_number_pattern='NA', possible_number_pattern='NA'),
standard_rate=PhoneNumberDesc(national_number_pattern='NA', possible_number_pattern='NA'),
no_international_dialling=PhoneNumberDesc(national_number_pattern='NA', possible_number_pattern='NA'))
| 85.25 | 149 | 0.794721 |
90f5a5e0a26c00e35828acb499a24e15b010c10d | 1,574 | py | Python | awx/main/migrations/0156_capture_mesh_topology.py | ziegenberg/awx | a3e29317c5d4220fffe28370ec73c73802255246 | [
"Apache-2.0"
] | 1 | 2019-07-21T11:19:50.000Z | 2019-07-21T11:19:50.000Z | awx/main/migrations/0156_capture_mesh_topology.py | ziegenberg/awx | a3e29317c5d4220fffe28370ec73c73802255246 | [
"Apache-2.0"
] | 2 | 2022-02-10T11:57:21.000Z | 2022-02-27T22:43:44.000Z | awx/main/migrations/0156_capture_mesh_topology.py | ziegenberg/awx | a3e29317c5d4220fffe28370ec73c73802255246 | [
"Apache-2.0"
] | null | null | null | # Generated by Django 2.2.20 on 2021-12-17 19:26
from django.db import migrations, models
import django.db.models.deletion
| 34.977778 | 141 | 0.540661 |
90f60f7f71e4d156824089d61cc0fca325d7afa6 | 2,981 | py | Python | models/dsd/bicubic.py | VinAIResearch/blur-kernel-space-exploring | 619c9b3b33961ef9311399d7cbbf92050a0c6b51 | [
"Apache-2.0"
] | 93 | 2021-05-11T08:35:24.000Z | 2022-03-30T10:41:14.000Z | models/dsd/bicubic.py | abcdef2000/blur-kernel-space-exploring | 56f707c6fa6e3c4a570f7816b0d60cd45441de0e | [
"Apache-2.0"
] | 14 | 2021-05-20T05:05:19.000Z | 2022-01-22T22:09:36.000Z | models/dsd/bicubic.py | abcdef2000/blur-kernel-space-exploring | 56f707c6fa6e3c4a570f7816b0d60cd45441de0e | [
"Apache-2.0"
] | 29 | 2021-05-13T04:16:56.000Z | 2022-03-03T02:07:24.000Z | import torch
from torch import nn
from torch.nn import functional as F
| 38.714286 | 113 | 0.570949 |
90f65c76bdf901f4200cf464ae0ba5ac4a47f91a | 655 | py | Python | control/webapp/__init__.py | doismellburning/control-panel | 516feeaac3a0f4c704105204b6efe75a94ba42c3 | [
"MIT"
] | 1 | 2020-07-22T21:47:15.000Z | 2020-07-22T21:47:15.000Z | control/webapp/__init__.py | doismellburning/control-panel | 516feeaac3a0f4c704105204b6efe75a94ba42c3 | [
"MIT"
] | 5 | 2020-08-02T09:59:25.000Z | 2021-11-03T08:02:39.000Z | control/webapp/__init__.py | doismellburning/control-panel | 516feeaac3a0f4c704105204b6efe75a94ba42c3 | [
"MIT"
] | 2 | 2020-08-02T09:02:25.000Z | 2020-12-20T17:45:30.000Z | import logging
from flask import Flask
from . import utils, home, member, society, signup, jobs, admin
from .flask_seasurf import SeaSurf
from flask_talisman import Talisman
app = Flask(__name__,
template_folder="../templates",
static_folder="../static")
app.config['CSRF_CHECK_REFERER'] = False
csrf = SeaSurf(app)
Talisman(app)
logging.basicConfig(level=logging.DEBUG if app.debug else logging.INFO)
utils.setup_app(app)
app.register_blueprint(home.bp)
app.register_blueprint(member.bp)
app.register_blueprint(society.bp)
app.register_blueprint(signup.bp)
app.register_blueprint(jobs.bp)
app.register_blueprint(admin.bp)
| 23.392857 | 71 | 0.770992 |
90f6a8b251142a71fb640f4f91f600712ec01fc8 | 3,569 | py | Python | tests/rules/test_duplicates.py | imbillu/arche | 67a24b83e2f936f1f6f8d1c7bbd4aa147f128b66 | [
"MIT"
] | 1 | 2022-02-22T11:56:03.000Z | 2022-02-22T11:56:03.000Z | tests/rules/test_duplicates.py | imbillu/arche | 67a24b83e2f936f1f6f8d1c7bbd4aa147f128b66 | [
"MIT"
] | null | null | null | tests/rules/test_duplicates.py | imbillu/arche | 67a24b83e2f936f1f6f8d1c7bbd4aa147f128b66 | [
"MIT"
] | null | null | null | import arche.rules.duplicates as duplicates
from arche.rules.result import Level, Outcome
from conftest import create_result
import numpy as np
import pandas as pd
import pytest
unique_inputs = [
({}, {}, {Level.INFO: [(Outcome.SKIPPED,)]}),
(
{"id": ["0", "0", "1"]},
{"unique": ["id"]},
{
Level.ERROR: [
("id contains 1 duplicated value(s)", None, {"same '0' `id`": [0, 1]})
]
},
),
(
{
"id": ["47" for x in range(6)],
"name": ["Walt", "Juan", "Juan", "Walt", "Walt", "John"],
},
{"unique": ["id", "name"]},
{
Level.ERROR: [
(
"id contains 1 duplicated value(s)",
None,
{"same '47' `id`": [i for i in range(6)]},
),
(
"name contains 2 duplicated value(s)",
None,
{"same 'Juan' `name`": [1, 2], "same 'Walt' `name`": [0, 3, 4]},
),
]
},
),
({"name": ["a", "b"]}, {"unique": ["name"]}, {}),
]
| 29.741667 | 86 | 0.42589 |
90f6bebd46777b051dad40dbf866fa9c85663a73 | 2,602 | py | Python | Question_1.py | Queen-Jonnie/Work | 0197644b09700c8ed9576f8270f3f334588cabc9 | [
"Apache-2.0"
] | null | null | null | Question_1.py | Queen-Jonnie/Work | 0197644b09700c8ed9576f8270f3f334588cabc9 | [
"Apache-2.0"
] | null | null | null | Question_1.py | Queen-Jonnie/Work | 0197644b09700c8ed9576f8270f3f334588cabc9 | [
"Apache-2.0"
] | null | null | null | # This is the word list from where the answers for the hangman game will come from.
word_list = [
2015,
"Fred Swaniker",
"Rwanda and Mauritius",
2,
"Dr, Gaidi Faraj",
"Sila Ogidi",
"Madagascar",
94,
8,
"Mauritius"
]
# Here we are defining the variables 'Right'(for when they get the question correct) and \n
# 'tries'(for when they get a question wrong).
Right = 0
tries = 0
# This function below after called, will greet the user when they input their name.
user_name = input("What is your name?")
greet(user_name)
# This functions below when called, will check when guess is returned whether the user's guess is in the word_list\n
# or not and will print out the appropriate responses while consecutively adding to the 'Right' or 'tries' variable.
guess1 = int(input("When was ALU founded?"))
if alu(guess1):
Right += 1
else:
check(guess1)
tries += 1
guess2 = input("Who is the CEO of ALU")
if alu(guess2):
Right += 1
else:
check(guess2)
tries += 1
guess3 = input("Where are ALU campuses?")
if alu(guess3):
Right += 1
else:
check(guess3)
tries += 1
guess4 = int(input("How many campuses does ALU have?"))
if alu(guess4):
Right += 1
else:
check(guess4)
tries += 1
guess5 = input("What is the name of ALU Rwanda's Dean?")
if alu(guess5):
Right += 1
else:
check(guess5)
tries += 1
guess6 = input("Who is in charge of Student Life?")
if alu(guess6):
Right += 1
else:
check(guess6)
tries += 1
if tries == 6:
exit("You lost")
guess7 = input("What is the name of our Lab?")
if alu(guess7):
Right += 1
else:
check(guess7)
tries += 1
if tries == 6:
exit("You lost")
guess8 = int(input("How many students do we have in Year 2 CS?"))
if alu(guess8):
Right += 1
else:
check(guess8)
tries += 1
if tries == 6:
exit("You lost")
guess9 = int(input("How many degrees does ALU offer?"))
if alu(guess9):
Right += 1
else:
check(guess9)
tries += 1
if tries == 6:
exit("You lost")
guess10 = input("Where are the headquarters of ALU?")
if alu(guess10):
Right += 1
else:
check(guess10)
tries += 1
if tries == 6:
exit("You lost")
| 22.050847 | 117 | 0.595696 |
90f6d9fde4d8fbf472167b25a5be418433abe7aa | 1,416 | py | Python | node-api/get-block-transfers/request.py | Venoox/casper-integrations | 2da32cc5db8278fff09d2a966c24491c6b936c80 | [
"Apache-2.0"
] | 5 | 2021-07-01T13:58:19.000Z | 2022-03-02T10:46:32.000Z | node-api/get-block-transfers/request.py | Venoox/casper-integrations | 2da32cc5db8278fff09d2a966c24491c6b936c80 | [
"Apache-2.0"
] | 3 | 2021-06-07T15:15:36.000Z | 2021-10-10T15:38:44.000Z | node-api/get-block-transfers/request.py | Venoox/casper-integrations | 2da32cc5db8278fff09d2a966c24491c6b936c80 | [
"Apache-2.0"
] | 6 | 2021-06-05T16:04:28.000Z | 2022-01-06T07:30:05.000Z | import json
import os
import pycspr
# A known casper test-net node address.
_NODE_ADDRESS = os.getenv("CASPER_NODE_ADDRESS", "3.136.227.9")
# A known block hash.
_BLOCK_HASH: bytes = bytes.fromhex("c7148e1e2e115d8fba357e04be2073d721847c982dc70d5c36b5f6d3cf66331c")
# A known block height.
_BLOCK_HEIGHT: int = 20652
def main():
"""Retrieves transfers by block.
"""
# Set client.
client = pycspr.NodeClient(pycspr.NodeConnectionInfo(host=_NODE_ADDRESS))
# Set block by known hash.
block_transers_1: tuple = client.queries.get_block_transfers(_BLOCK_HASH)
# Set block by known height.
block_transers_2: tuple = client.queries.get_block_transfers(_BLOCK_HEIGHT)
# Verify block information equivalence.
assert block_transers_1 == block_transers_2
print("-----------------------------------------------------------------------------------------------------")
print(f"QUERIED TEST-NET NODE {_NODE_ADDRESS}")
print("-----------------------------------------------------------------------------------------------------")
print(f"Block transfers = {json.dumps(block_transers_1, indent=4)}")
print("-----------------------------------------------------------------------------------------------------")
if __name__ == "__main__":
try:
main()
except Exception as err:
print(f"API ERROR @ NODE {_NODE_ADDRESS} :: {err}")
| 30.782609 | 114 | 0.551554 |
90f942653019dc0ae99aab8acbed167dceaab3ce | 582 | py | Python | tests/test_util.py | re3turn/twicrawler | 139c8f72a619b188433e32c271e64d3b5dc0c77e | [
"MIT"
] | 14 | 2019-10-18T04:01:38.000Z | 2022-02-24T15:28:54.000Z | tests/test_util.py | re3turn/twicrawler | 139c8f72a619b188433e32c271e64d3b5dc0c77e | [
"MIT"
] | 47 | 2019-10-05T13:32:56.000Z | 2021-10-17T06:40:25.000Z | tests/test_util.py | re3turn/twicrawler | 139c8f72a619b188433e32c271e64d3b5dc0c77e | [
"MIT"
] | 4 | 2019-10-05T13:23:47.000Z | 2020-10-01T05:32:26.000Z | import nose2.tools
from typing import Union
from app.util import has_attributes
| 20.068966 | 89 | 0.575601 |
90f9829385890920a9abc0c7d59d052db4801faf | 4,427 | py | Python | commands/data/fusion_data.py | Christ0ph990/Fusion360DevTools | fce10e34b3b92a058d275956c07d1b891ce02192 | [
"MIT"
] | 3 | 2022-02-12T21:00:39.000Z | 2022-03-18T13:17:17.000Z | commands/data/fusion_data.py | Christ0ph990/Fusion360DevTools | fce10e34b3b92a058d275956c07d1b891ce02192 | [
"MIT"
] | null | null | null | commands/data/fusion_data.py | Christ0ph990/Fusion360DevTools | fce10e34b3b92a058d275956c07d1b891ce02192 | [
"MIT"
] | null | null | null | # Copyright 2022 by Autodesk, Inc.
# Permission to use, copy, modify, and distribute this software in object code form
# for any purpose and without fee is hereby granted, provided that the above copyright
# notice appears in all copies and that both that copyright notice and the limited
# warranty and restricted rights notice below appear in all supporting documentation.
#
# AUTODESK PROVIDES THIS PROGRAM "AS IS" AND WITH ALL FAULTS. AUTODESK SPECIFICALLY
# DISCLAIMS ANY IMPLIED WARRANTY OF MERCHANTABILITY OR FITNESS FOR A PARTICULAR USE.
# AUTODESK, INC. DOES NOT WARRANT THAT THE OPERATION OF THE PROGRAM WILL BE
# UNINTERRUPTED OR ERROR FREE.
from dataclasses import dataclass, field
import base64
import pprint
import adsk.core
app = adsk.core.Application.get()
| 42.161905 | 120 | 0.688954 |
90f9b4568f182777eba66204bdc021fa7e74466b | 5,044 | py | Python | src/config-producer/config_topic.py | DougFigueroa/realde-kafka-assesment | 06582907d05a51a68cc368d00f85ba97f95fd533 | [
"MIT"
] | null | null | null | src/config-producer/config_topic.py | DougFigueroa/realde-kafka-assesment | 06582907d05a51a68cc368d00f85ba97f95fd533 | [
"MIT"
] | null | null | null | src/config-producer/config_topic.py | DougFigueroa/realde-kafka-assesment | 06582907d05a51a68cc368d00f85ba97f95fd533 | [
"MIT"
] | null | null | null | """
This process creates the two kafka topics to be used.
The input-topic with ten partitions and the output-topic with one partition.
Also preloads the kafka cluster with test data (if flag is set to true).
"""
import os
import time
import json
import logging
from confluent_kafka.admin import AdminClient, NewTopic
from confluent_kafka import Producer
# defining logger
logger = logging.getLogger()
logger.setLevel(logging.INFO)
# reading the environement variables defined on the docker compose
KAFKA_CLUSTER = os.environ.get('KAFKA_CLUSTER_CONNECT', 'localhost:9092')
LOAD_DATA = os.environ.get('LOAD_SAMPLE_DATA', False)
logging.info(
(f'>Env variables: KAFKA_CLUSTER_CONNECT={KAFKA_CLUSTER} '
f'LOAD_SAMPLE_DATA={LOAD_DATA}'))
BROKER_CONFIG = {'bootstrap.servers': KAFKA_CLUSTER}
def read_json_file(file_route: str) -> dict:
"""
Read the json configuration file to set topics and partitions.
Args:
- str, the route(with name) of the configuration file.
Returns:
- dict, with the configurations defined on the json file.
"""
with open(file_route, 'r') as f:
config = json.load(f)
logging.info('JSON file readed.')
return config
def create_topics(admin: object, config: dict) -> None:
"""Create the kafka topics based on the configuration file.
Args:
- object, the admin client kafka object.
- dict, json configuration of the process.
Returns: None.
"""
# read the topic configuration and create the NewTopic objects
topics = []
for k, v in config.items():
topics.append(NewTopic(
v['topic_name'],
num_partitions=v['partitions_quantity'],
replication_factor=1
)
)
logging.info(f'Starting the creation of the topics: {topics}...')
creation_response = admin.create_topics(topics)
# the response has futures (which runs asynchronously) so we validate them
# to see if they succeeded or not
for topic, f in creation_response.items():
try:
f.result()
logging.info(f'Creation of the topic {topic} completed.')
except Exception as e:
logger.error(f'Error creating the kafka topic: {topic}. {e}')
raise Exception(f'Error creating the kafka topic: {topic}. {e}')
def list_topics_and_config(admin: object) -> None:
"""Check the topics that exists at a specifid.
And displays other configs of the Kafka Cluster.
Args:
- object, the admin client kafka object.
Returns: None.
"""
list_response = admin.list_topics(timeout=5)
# get all the broker info
logging.info('>Broker details:')
for counter, broker in enumerate(list_response.brokers.items(), start=1):
logging.info(f'{counter}-Broker info: {broker}')
logging.info('>Topics details:')
# get all the topic names
for counter, topic_data in enumerate(list_response.topics.items(), start=1):
logging.info(f'{counter}-Topic info: {topic_data}')
def load_sample_data(topic: str, sample_data: list) -> None:
"""Loads the sample data to the input kafka topic.
This will load data across 10 different partitions.
Args:
- str, the topic name where the data is going to be loaded.
- list, the sample data to be loaded by the producer across
all the partitions of the specified topic.
Returns: None
"""
producer = Producer(BROKER_CONFIG)
# iterate through partitions
for data in sample_data:
for number in data['values']:
try:
producer.produce(topic, str(number), None, data['partition'])
except Exception as e:
logger.error(
f'Producer failed to produce a message to the topic. {e}')
raise Exception(
f'Failed to produce a message from Kakfia. {e}')
producer.poll(0)
# ensure all the delivery queue has been loaded
producer.flush()
logging.info('Data successfully produced and loaded to the specify topic.')
def main() -> None:
"""Orchestrates all the process execution.
From configuring the cluster topics to load the sample input data.
"""
configuration_file = 'topic_config.json'
data_file = 'sample_data.json'
time.sleep(5)
actual_path = os.path.dirname(__file__)
configuration_path = os.path.join(actual_path, configuration_file)
data_path = os.path.join(actual_path, data_file)
config = read_json_file(configuration_path)
# defining the admin client needed to create topics
admin = AdminClient(BROKER_CONFIG)
create_topics(admin, config)
# this step its only for validation purposes
list_topics_and_config(admin)
# start the load of the sample data to the input topic
if LOAD_DATA:
in_topic_name = config['in_topic_conf']['topic_name']
sample_data = read_json_file(data_path)
load_sample_data(in_topic_name, sample_data)
if __name__ == '__main__':
main()
| 35.77305 | 80 | 0.673672 |
90f9cab42c98867e4c26010b699fc6f4bbfe103f | 167 | py | Python | deallocate/params.py | jefferycwc/tacker-example-plugin | 641d2acebca3b95c7d2d635769b6f0f2d84051b2 | [
"Apache-2.0"
] | null | null | null | deallocate/params.py | jefferycwc/tacker-example-plugin | 641d2acebca3b95c7d2d635769b6f0f2d84051b2 | [
"Apache-2.0"
] | null | null | null | deallocate/params.py | jefferycwc/tacker-example-plugin | 641d2acebca3b95c7d2d635769b6f0f2d84051b2 | [
"Apache-2.0"
] | 1 | 2022-01-19T01:35:43.000Z | 2022-01-19T01:35:43.000Z | OS_MA_NFVO_IP = '192.168.1.197'
OS_USER_DOMAIN_NAME = 'Default'
OS_USERNAME = 'admin'
OS_PASSWORD = '0000'
OS_PROJECT_DOMAIN_NAME = 'Default'
OS_PROJECT_NAME = 'admin' | 27.833333 | 34 | 0.772455 |
90f9e5e1aabce04f1b2939743e6a19578017960f | 1,591 | py | Python | anoplura/patterns/body_part.py | rafelafrance/traiter_lice | 29d653a80b57225203c98198c26fd9fcbdb08843 | [
"MIT"
] | null | null | null | anoplura/patterns/body_part.py | rafelafrance/traiter_lice | 29d653a80b57225203c98198c26fd9fcbdb08843 | [
"MIT"
] | 1 | 2020-06-07T18:25:08.000Z | 2020-06-07T18:25:08.000Z | anoplura/patterns/body_part.py | rafelafrance/traiter_lice | 29d653a80b57225203c98198c26fd9fcbdb08843 | [
"MIT"
] | null | null | null | """Extract body part annotations."""
import re
import spacy
from traiter.const import COMMA
from traiter.patterns.matcher_patterns import MatcherPatterns
from anoplura.pylib.const import COMMON_PATTERNS
from anoplura.pylib.const import CONJ
from anoplura.pylib.const import MISSING
from anoplura.pylib.const import REPLACE
JOINER = CONJ + COMMA
JOINER_RE = "|".join(JOINER + [r"\s"])
JOINER_RE = re.compile(rf"\b(?:{JOINER_RE})\b", flags=re.IGNORECASE)
MISSING_RE = "|".join([fr"\b{m}\b" for m in MISSING])
MISSING_RE = re.compile(MISSING_RE, flags=re.IGNORECASE)
BODY_PART = MatcherPatterns(
"body_part",
on_match="anoplura.body_part.v1",
decoder=COMMON_PATTERNS
| {
"seg": {"ENT_TYPE": "segmented"},
"ord": {"ENT_TYPE": {"IN": ["ordinal", "number_word"]}},
},
patterns=[
"missing part+",
"missing? any_part* part",
"part+ &/,/or* part* &/,/or* part+",
"part+ ord -? ord",
"part+ 99? -? 99",
"part+ ord?",
"part+ 99?",
"part+ ord -? seg",
"part+ 99 -? seg",
"ord? -? seg? part+",
"99 - seg part+",
],
)
| 26.081967 | 68 | 0.60088 |
90fa1b52a86892da98479fc272386682615fa765 | 17,478 | py | Python | Termux-pkg-apt.py | Hironotori/Termux-pkg-apt | db1c33b750e82943c8c5b2780d69654ab4afde96 | [
"BSL-1.0"
] | 1 | 2021-04-12T18:33:25.000Z | 2021-04-12T18:33:25.000Z | Termux-pkg-apt.py | Hironotori/Termux-pkg-apt | db1c33b750e82943c8c5b2780d69654ab4afde96 | [
"BSL-1.0"
] | null | null | null | Termux-pkg-apt.py | Hironotori/Termux-pkg-apt | db1c33b750e82943c8c5b2780d69654ab4afde96 | [
"BSL-1.0"
] | 1 | 2021-10-17T00:44:37.000Z | 2021-10-17T00:44:37.000Z | #!/usr/bin/python3
import os
import time
import sys
os.system("clear")
print('''\033[91m
CREATED BY Hironotori
''')
slowprint(''' \033[93m
[1] apt-pkg pip-pip3 [2] apt-pkg python
[3] apt-pkg python2 [4] apt-pkg bash
[5] apt-pkg git [6] apt-pkg perl
[7] apt-pkg nano [8] apt-pkg curl
[9] apt-pkg openssl [10] apt-pkg openssh
[11] apt-pkg wget [12] apt-pkg clang
[13] apt-pkg nmap [14] apt-pkg w3m
[15] apt-pkg ruby [16] apt-pkg dnsutils
[17] apt-pkg coreutils [18] apt-pkg fish.
[19] apt-pkg zip [20] apt-pkg figlet.
[21] apt-pkg cowsay [22] apt-pkg unzip.
[23] apt-pkg vim [24] apt-pkg wcalc.
[25] apt-pkg bmon [26] apt-pkg unrar.
[27] apt-pkg proot [28] apt-pkg golang.
[29] apt-pkg tsu [30] apt-pkg tor.
[31] apt-pkg php
[00] [0] ''')
print (" ")
choice = input("\033[93m : ")
if choice == '0' : sys.exit()
if choice == '1' : os.system ("apt upgrade -y")
os.system ("pkg install")
os.system ("pkg upgrade")
os.system ("apt install")
os.system ("apt upgrade")
os.system ("apt update")
os.system ("pkg update")
os.system("python -m pip install --upgrade pip")
os.system ("pip3 install --upgrade setuptools pip")
os.system ("termux-setup-storage")
sys.exit ()
if choice == '2' : os.system ("apt upgrade -y")
os.system ("pkg install")
os.system ("pkg upgrade")
os.system ("apt install")
os.system ("apt upgrade")
os.system ("apt update")
os.system ("pkg update")
os.system ("pkg install python -y")
os.system ("pkg upgrade python -y")
os.system ("apt install python -y")
os.system ("apt upgrade python -y")
os.system ("termux-setup-storage")
sys.exit ()
if choice == '3' : os.system ("apt upgrade -y")
os.system ("pkg install")
os.system ("pkg upgrade")
os.system ("apt install")
os.system ("apt upgrade")
os.system ("apt update")
os.system ("pkg update")
os.system ("pkg install python2 -y")
os.system ("pkg upgrade python2 -y")
os.system ("apt install python2 -y")
os.system ("apt upgrade python2 -y")
os.system ("termux-setup-storage")
sys.exit ()
if choice == '4' : os.system ("apt upgrade -y")
os.system ("pkg install")
os.system ("pkg upgrade")
os.system ("apt install")
os.system ("apt upgrade")
os.system ("apt update")
os.system ("pkg update")
os.system ("pkg install bash")
os.system ("apt install bash")
os.system ("pkg upgrade bash")
os.system ("apt upgrade bash")
os.system ("termux-setup-storage")
sys.exit ()
if choice == '5' : os.system ("apt upgrade -y")
os.system ("pkg install")
os.system ("pkg upgrade")
os.system ("apt install")
os.system ("apt upgrade")
os.system ("apt update")
os.system ("pkg update")
os.system ("apt install git -y")
os.system ("pkg install git -y")
os.system ("pkg upgrade git -y")
os.system ("apt upgrade git -y")
os.system ("termux-setup-storage")
sys.exit ()
if choice == '6' : os.system ("apt upgrade -y")
os.system ("pkg install")
os.system ("pkg upgrade")
os.system ("apt install")
os.system ("apt upgrade")
os.system ("apt update")
os.system ("pkg update")
os.system ("pkg install perl -y")
os.system ("apt install perl -y")
os.system ("pkg upgrade perl -y")
os.system ("apt upgrade perl -y")
os.system ("termux-setup-storage")
sys.exit ()
if choice == '7' : os.system ("apt upgrade -y")
os.system ("pkg install")
os.system ("pkg upgrade")
os.system ("apt install")
os.system ("apt upgrade")
os.system ("apt update")
os.system ("pkg update")
os.system ("pkg install nano -y")
os.system ("apt install nano -y")
os.system ("pkg upgrade nano -y")
os.system ("apt upgrade nano -y")
os.system ("termux-setup-storage")
sys.exit ()
if choice == '8' : os.system ("apt upgrade -y")
os.system ("pkg install")
os.system ("pkg upgrade")
os.system ("apt install")
os.system ("apt upgrade")
os.system ("apt update")
os.system ("pkg update")
os.system ("pkg install curl -y")
os.system ("apt install curl -y")
os.system ("pkg upgrade curl -y")
os.system ("apt upgrade curl -y")
os.system ("termux-setup-storage")
sys.exit ()
if choice == '9' : os.system ("apt upgrade -y")
os.system ("pkg install")
os.system ("pkg upgrade")
os.system ("apt install")
os.system ("apt upgrate")
os.system ("apt update")
os.system ("pkg update")
os.system ("pkg install openssl -y")
os.system ("apt install openssl -y")
os.system ("pkg upgrade openssl -y")
os.system ("apt upgrade openssl -y")
os.system ("termux-setup-storage")
sys.exit ()
if choice == '10' : os.system ("apt upgrade -y")
os.system ("pkg install")
os.system ("pkg upgrade")
os.system ("apt install")
os.system ("apt upgrate")
os.system ("apt update")
os.system ("pkg update")
os.system ("pkg install openssh -y")
os.system ("apt install openssh -y")
os.system ("pkg upgrade openssh -y")
os.system ("apt upgrade openssh -y")
os.system ("termux-setup-storage")
sys.exit ()
if choice == '11' : os.system ("apt upgrade -y")
os.system ("pkg install")
os.system ("pkg upgrade")
os.system ("apt install")
os.system ("apt upgrate")
os.system ("apt update")
os.system ("pkg update")
os.system ("pkg install wget -y")
os.system ("apt install wget -y")
os.system ("pkg upgrade wget -y")
os.system ("apt upgrade wget -y")
os.system ("termux-setup-storage")
sys.exit ()
if choice == '12' : os.system ("apt upgrade -y")
os.system ("pkg install")
os.system ("pkg upgrade")
os.system ("apt install")
os.system ("apt upgrate")
os.system ("apt update")
os.system ("pkg update")
os.system ("pkg install clang -y")
os.system ("apt install clang -y")
os.system ("pkg upgrade clang -y")
os.system ("apt upgrade clang -y")
os.system ("termux-setup-storage")
sys.exit ()
if choice == '13' : os.system ("apt upgrade -y")
os.system ("pkg install")
os.system ("pkg upgrade")
os.system ("apt install")
os.system ("apt upgrate")
os.system ("apt update")
os.system ("pkg update")
os.system ("pkg install nmap -y")
os.system ("apt install nmap -y")
os.system ("pkg upgrade nmap -y")
os.system ("apt upgrade nmap -y")
os.system ("termux-setup-storage")
sys.exit ()
if choice == '14' : os.system ("apt upgrade -y")
os.system ("pkg install")
os.system ("pkg upgrade")
os.system ("apt install")
os.system ("apt upgrate")
os.system ("apt update")
os.system ("pkg update")
os.system ("pkg install w3m -y")
os.system ("apt install w3m -y")
os.system ("pkg upgrade w3m -y")
os.system ("apt upgrade w3m -y")
os.system ("termux-setup-storage")
sys.exit ()
if choice == '15' : os.system ("apt upgrade -y")
os.system ("pkg install")
os.system ("pkg upgrade")
os.system ("apt install")
os.system ("apt upgrate")
os.system ("apt update")
os.system ("pkg update")
os.system ("pkg install ruby -y")
os.system ("apt install ruby -y")
os.system ("pkg upgrade ruby -y")
os.system ("apt upgrade ruby -y")
os.system ("termux-setup-storage")
sys.exit ()
if choice == '16' : os.system ("apt upgrade -y")
os.system ("pkg install")
os.system ("pkg upgrade")
os.system ("apt install")
os.system ("apt upgrate")
os.system ("apt update")
os.system ("pkg update")
os.system ("pkg install dnsutils -y")
os.system ("apt install dnsutils -y")
os.system ("pkg upgrade dnsutils -y")
os.system ("apt upgrade dnsutils -y")
os.system ("termux-setup-storage")
sys.exit ()
if choice == '17' : os.system ("apt upgrade -y")
os.system ("pkg install")
os.system ("pkg upgrade")
os.system ("apt install")
os.system ("apt upgrate")
os.system ("apt update")
os.system ("pkg update")
os.system ("pkg install coreutils -y")
os.system ("apt install coreutils -y")
os.system ("pkg upgrade coreutils -y")
os.system ("apt upgrade coreutils -y")
os.system ("termux-setup-storage")
sys.exit ()
if choice == '18' : os.system ("apt upgrade -y")
os.system ("pkg install")
os.system ("pkg upgrade")
os.system ("apt install")
os.system ("apt upgrate")
os.system ("apt update")
os.system ("pkg update")
os.system ("pkg install fish -y")
os.system ("apt install fish -y")
os.system ("pkg upgrade fish -y")
os.system ("apt upgrade fish -y")
os.system ("termux-setup-storage")
sys.exit ()
if choice == '19' : os.system ("apt upgrade -y")
os.system ("pkg install")
os.system ("pkg upgrade")
os.system ("apt install")
os.system ("apt upgrate")
os.system ("apt update")
os.system ("pkg update")
os.system ("pkg install zip -y")
os.system ("apt install zip -y")
os.system ("pkg upgrade zip -y")
os.system ("apt upgrade zip -y")
os.system ("termux-setup-storage")
sys.exit ()
if choice == '20' : os.system ("apt upgrade -y")
os.system ("pkg install")
os.system ("pkg upgrade")
os.system ("apt install")
os.system ("apt upgrate")
os.system ("apt update")
os.system ("pkg update")
os.system ("pkg install figlet -y")
os.system ("apt install figlet -y")
os.system ("pkg upgrade figlet -y")
os.system ("apt upgrade figlet -y")
os.system ("termux-setup-storage")
sys.exit ()
if choice == '21' : os.system ("apt upgrade -y")
os.system ("pkg install")
os.system ("pkg upgrade")
os.system ("apt install")
os.system ("apt upgrate")
os.system ("apt update")
os.system ("pkg update")
os.system ("pkg install cowsay -y")
os.system ("apt install cowsay -y")
os.system ("pkg upgrade cowsay -y")
os.system ("apt upgrade cowsay -y")
os.system ("termux-setup-storage")
sys.exit ()
if choice == '22' : os.system ("apt upgrade -y")
os.system ("pkg install")
os.system ("pkg upgrade")
os.system ("apt install")
os.system ("apt upgrate")
os.system ("apt update")
os.system ("pkg update")
os.system ("pkg install unzip -y")
os.system ("apt install unzip -y")
os.system ("pkg upgrade unzip -y")
os.system ("apt upgrade unzip -y")
os.system ("termux-setup-storage")
sys.exit ()
if choice == '23' : os.system ("apt upgrade -y")
os.system ("pkg install")
os.system ("pkg upgrade")
os.system ("apt install")
os.system ("apt upgrate")
os.system ("apt update")
os.system ("pkg update")
os.system ("pkg install vim -y")
os.system ("apt install vim -y")
os.system ("pkg upgrade vim -y")
os.system ("apt upgrade vim -y")
os.system ("termux-setup-storage")
sys.exit ()
if choice == '24' : os.system ("apt upgrade -y")
os.system ("pkg install")
os.system ("pkg upgrade")
os.system ("apt install")
os.system ("apt upgrate")
os.system ("apt update")
os.system ("pkg update")
os.system ("pkg install wcalc -y")
os.system ("apt install wcalc -y")
os.system ("pkg upgrade wcalc -y")
os.system ("apt upgrade wcalc -y")
os.system ("termux-setup-storage")
sys.exit ()
if choice == '25' : os.system ("apt upgrade -y")
os.system ("pkg install")
os.system ("pkg upgrade")
os.system ("apt install")
os.system ("apt upgrate")
os.system ("apt update")
os.system ("pkg update")
os.system ("pkg install bmon -y")
os.system ("apt install bmon -y")
os.system ("pkg upgrade bmon -y")
os.system ("apt upgrade bmon -y")
os.system ("termux-setup-storage")
sys.exit ()
if choice == '26' : os.system ("apt upgrade -y")
os.system ("pkg install")
os.system ("pkg upgrade")
os.system ("apt install")
os.system ("apt upgrate")
os.system ("apt update")
os.system ("pkg update")
os.system ("pkg install unrar -y")
os.system ("apt install unrar -y")
os.system ("pkg upgrade unrar -y")
os.system ("apt upgrade unrar -y")
os.system ("termux-setup-storage")
sys.exit ()
if choice == '27' : os.system ("apt upgrade -y")
os.system ("pkg install")
os.system ("pkg upgrade")
os.system ("apt install")
os.system ("apt upgrate")
os.system ("apt update")
os.system ("pkg update")
os.system ("pkg install proot -y")
os.system ("apt install proot -y")
os.system ("pkg upgrade proot -y")
os.system ("apt upgrade proot -y")
os.system ("termux-setup-storage")
sys.exit ()
if choice == '28' : os.system ("apt upgrade -y")
os.system ("pkg install")
os.system ("pkg upgrade")
os.system ("apt install")
os.system ("apt upgrate")
os.system ("apt update")
os.system ("pkg update")
os.system ("pkg install golang -y")
os.system ("apt install golang -y")
os.system ("pkg upgrade golang -y")
os.system ("apt upgrade golang -y")
os.system ("termux-setup-storage")
sys.exit ()
if choice == '29' : os.system ("apt upgrade -y")
os.system ("pkg install")
os.system ("pkg upgrade")
os.system ("apt install")
os.system ("apt upgrate")
os.system ("apt update")
os.system ("pkg update")
os.system("pkg install tsu-y")
os.system ("apt install tsu -y")
os.system ("pkg upgrade tsu -y")
os.system ("apt upgrade tsu -y")
os.system ("termux-setup-storage")
sys.exit ()
if choice == '30' : os.system ("apt upgrade -y")
os.system ("pkg install")
os.system ("pkg upgrade")
os.system ("apt install")
os.system ("apt upgrate")
os.system ("apt update")
os.system ("pkg update")
os.system ("pkg install tor")
os.system ("termux-setup-storage")
sys.exit ()
if choice == '31' : os.system ("apt upgrade -y")
os.system ("pkg install")
os.system ("pkg upgrade")
os.system ("apt install")
os.system ("apt upgrate")
os.system ("apt update")
os.system ("pkg update")
os.system ("pkg install php -y")
os.system ("pkg upgrade php -y")
os.system ("apt install php -y")
os.system ("apt upgrade php -y")
os.system ("termux-setup-storage")
sys.exit ()
if choice == '00' : os.system ("apt upgrade -y")
os.system ("pkg install")
os.system ("pkg upgrade")
os.system ("apt install")
os.system ("apt upgrate")
os.system ("apt update")
os.system ("pkg update")
os.system("python -m pip install --upgrade pip")
os.system ("pip3 install --upgrade setuptools pip")
os.system ("pkg install python -y")
os.system ("pkg upgrade python -y")
os.system ("apt install python -y")
os.system ("apt upgrade python -y")
os.system ("pkg install python2 -y")
os.system ("pkg upgrade python2 -y")
os.system ("apt install python2 -y")
os.system ("apt upgrade python2 -y")
os.system ("pkg install php -y")
os.system ("pkg upgrade php -y")
os.system ("apt install php -y")
os.system ("apt upgrade php -y")
os.system ("pkg install bash")
os.system ("apt install bash")
os.system ("pkg upgrade bash")
os.system ("apt upgrade bash")
os.system ("apt install git -y")
os.system ("pkg install git -y")
os.system ("pkg upgrade git -y")
os.system ("apt upgrade git -y")
os.system ("pkg install perl -y")
os.system ("apt install perl -y")
os.system ("pkg upgrade perl -y")
os.system ("apt upgrade perl -y")
os.system ("pkg install nano -y")
os.system ("apt install nano -y")
os.system ("pkg upgrade nano -y")
os.system ("apt upgrade nano -y")
os.system ("pkg install curl -y")
os.system ("apt install curl -y")
os.system ("pkg upgrade curl -y")
os.system ("apt upgrade curl -y")
os.system ("pkg install openssl -y")
os.system ("apt install openssl -y")
os.system ("pkg upgrade openssl -y")
os.system ("apt upgrade openssl -y")
os.system ("pkg install openssh -y")
os.system ("apt install openssh -y")
os.system ("pkg upgrade openssh -y")
os.system ("apt upgrade openssh -y")
os.system ("pkg install wget -y")
os.system ("apt install wget -y")
os.system ("pkg upgrade wget -y")
os.system ("apt upgrade wget -y")
os.system ("pkg install clang -y")
os.system ("apt install clang -y")
os.system ("pkg upgrade clang -y")
os.system ("apt upgrade clang -y")
os.system ("pkg install nmap -y")
os.system ("apt install nmap -y")
os.system ("pkg upgrade nmap -y")
os.system ("apt upgrade nmap -y")
os.system ("pkg install w3m -y")
os.system ("apt install w3m -y")
os.system ("pkg upgrade w3m -y")
os.system ("apt upgrade w3m -y")
os.system ("pkg install ruby -y")
os.system ("apt install ruby -y")
os.system ("pkg upgrade ruby -y")
os.system ("apt upgrade ruby -y")
os.system ("pkg install dnsutils -y")
os.system ("apt install dnsutils -y")
os.system ("pkg upgrade dnsutils -y")
os.system ("apt upgrade dnsutils -y")
os.system ("pkg install coreutils -y")
os.system ("apt install coreutils -y")
os.system ("pkg upgrade coreutils -y")
os.system ("apt upgrade coreutils -y")
os.system ("pkg install fish -y")
os.system ("apt install fish -y")
os.system ("pkg upgrade fish -y")
os.system ("apt upgrade fish -y")
os.system ("pkg install zip -y")
os.system ("apt install zip -y")
os.system ("pkg upgrade zip -y")
os.system ("apt upgrade zip -y")
os.system ("pkg install figlet -y")
os.system ("apt install figlet -y")
os.system ("pkg upgrade figlet -y")
os.system ("apt upgrade figlet -y")
os.system ("pkg install cowsay -y")
os.system ("apt install cowsay -y")
os.system ("pkg upgrade cowsay -y")
os.system ("apt upgrade cowsay -y")
os.system ("pkg install unzip -y")
os.system ("apt install unzip -y")
os.system ("pkg upgrade unzip -y")
os.system ("apt upgrade unzip -y")
os.system ("pkg install vim -y")
os.system ("apt install vim -y")
os.system ("pkg upgrade vim -y")
os.system ("apt upgrade vim -y")
os.system ("pkg install wcalc -y")
os.system ("apt install wcalc -y")
os.system ("pkg upgrade wcalc -y")
os.system ("apt upgrade wcalc -y")
os.system ("pkg install bmon -y")
os.system ("apt install bmon -y")
os.system ("pkg upgrade bmon -y")
os.system ("apt upgrade bmon -y")
os.system ("pkg install unrar -y")
os.system ("apt install unrar -y")
os.system ("pkg upgrade unrar -y")
os.system ("apt upgrade unrar -y")
os.system ("pkg install proot -y")
os.system ("apt install proot -y")
os.system ("pkg upgrade proot -y")
os.system ("apt upgrade proot -y")
os.system ("pkg install golang -y")
os.system ("apt install golang -y")
os.system ("pkg upgrade golang -y")
os.system ("apt upgrade golang -y")
os.system("pkg install tsu-y")
os.system ("apt install tsu -y")
os.system ("pkg upgrade tsu -y")
os.system ("apt upgrade tsu -y")
os.system ("pkg install tor")
os.system ("termux-setup-storage")
sys.exit () | 31.099644 | 54 | 0.66947 |
90faa5d3c27957f6280791f2da201e228021ab56 | 10,255 | py | Python | RFEM/Loads/solidSetLoad.py | DavidNaizheZhou/RFEM_Python_Client | a5f7790b67de3423907ce10c0aa513c0a1aca47b | [
"MIT"
] | 16 | 2021-10-13T21:00:11.000Z | 2022-03-21T11:12:09.000Z | RFEM/Loads/solidSetLoad.py | DavidNaizheZhou/RFEM_Python_Client | a5f7790b67de3423907ce10c0aa513c0a1aca47b | [
"MIT"
] | 49 | 2021-10-19T13:18:51.000Z | 2022-03-30T08:20:17.000Z | RFEM/Loads/solidSetLoad.py | DavidNaizheZhou/RFEM_Python_Client | a5f7790b67de3423907ce10c0aa513c0a1aca47b | [
"MIT"
] | 7 | 2021-10-13T06:06:24.000Z | 2022-03-29T17:48:39.000Z | from RFEM.initModel import Model, clearAtributes, ConvertToDlString
from RFEM.enums import SolidSetLoadType, SolidSetLoadDistribution, SolidSetLoadDirection
| 36.756272 | 202 | 0.648757 |
90ff9054dbaf433bbb08f4f56988df6c49765e6b | 838 | py | Python | examples_2d/patch.py | 5A5H/PyFEMP | 94ebf58a52230680fd87b699f295ccb3efa6c46a | [
"MIT"
] | 1 | 2021-12-09T06:40:39.000Z | 2021-12-09T06:40:39.000Z | examples_2d/patch.py | 5A5H/PyFEMP | 94ebf58a52230680fd87b699f295ccb3efa6c46a | [
"MIT"
] | null | null | null | examples_2d/patch.py | 5A5H/PyFEMP | 94ebf58a52230680fd87b699f295ccb3efa6c46a | [
"MIT"
] | null | null | null | # 2D example tensile test
import numpy as np
import matplotlib.pyplot as plt
import PyFEMP
import PyFEMP.elements.Elmt_BaMo_2D as ELEMENT
FEM = PyFEMP.FEM_Simulation(ELEMENT)
n = 4
XI, Elem = PyFEMP.msh_rec([0.0, 0.0], [10.0, 10.0], [n, n], type='Q1')
FEM.Add_Mesh(XI, Elem)
FEM.Add_Material([2100, 0.3], "All")
FEM.Add_EBC("x==0", "UX", 0)
FEM.Add_EBC("y==0", "UY", 0)
FEM.Add_EBC("x==10", "UX", 1)
FEM.Analysis()
FEM.NextStep(1.0, 1.0)
print( FEM.NewtonIteration() )
print( FEM.NewtonIteration() )
ux = FEM.NodalDof("x==10 and y==10", "UX")
uy = FEM.NodalDof("x==10 and y==10", "UY")
print('ux :',ux, 'uy :',uy)
fig, ax = plt.subplots(1,1, figsize=(8.0, 8.0))
postplot = FEM.ShowMesh(ax, ec='b', label='reference config.')
postplot = FEM.ShowMesh(ax, deformedmesh=True, ec='r', label='current config.')
ax.legend()
plt.show() | 25.393939 | 79 | 0.656325 |
2902581b3eec77bc0cde12663550bb823357a3a8 | 547 | py | Python | common-patterns/producer_consumer_client.py | kyeett/websockets-examples | bf4f3d848bd5ac523563fc2a5624aaec85c6124d | [
"MIT"
] | null | null | null | common-patterns/producer_consumer_client.py | kyeett/websockets-examples | bf4f3d848bd5ac523563fc2a5624aaec85c6124d | [
"MIT"
] | null | null | null | common-patterns/producer_consumer_client.py | kyeett/websockets-examples | bf4f3d848bd5ac523563fc2a5624aaec85c6124d | [
"MIT"
] | null | null | null | #!/usr/bin/env python
import asyncio
import websockets
import os
port = int(os.environ.get('PORT', '8765'))
asyncio.get_event_loop().run_until_complete(hello())
| 21.038462 | 75 | 0.586837 |
290322d74e5c2e76a5c2f837892bc099f63d97a5 | 1,073 | py | Python | wagtailflags/forms.py | cfpb/wagtail-flags | 3ef8b51b7e3c8f0f55ec4fbed07668210cc04274 | [
"CC0-1.0"
] | 75 | 2017-02-02T19:25:50.000Z | 2022-03-23T08:09:20.000Z | wagtailflags/forms.py | cfpb/wagtail-flags | 3ef8b51b7e3c8f0f55ec4fbed07668210cc04274 | [
"CC0-1.0"
] | 31 | 2017-02-02T16:48:44.000Z | 2021-12-01T19:36:39.000Z | wagtailflags/forms.py | cfpb/wagtail-flags | 3ef8b51b7e3c8f0f55ec4fbed07668210cc04274 | [
"CC0-1.0"
] | 17 | 2017-01-31T18:52:19.000Z | 2021-09-20T14:34:17.000Z | from django import forms
from flags.forms import FlagStateForm as DjangoFlagsFlagStateForm
from flags.models import FlagState
from flags.sources import get_flags
| 24.953488 | 65 | 0.61603 |
29036ba6db6b8f13ddf689e6933342fcab6c293b | 830 | py | Python | src/utils/utils.py | GuiYuDaniel/CGC_of_Sn | c54e4e65a5ecff09d3e4c5fed76bf30b3804fefa | [
"MIT"
] | null | null | null | src/utils/utils.py | GuiYuDaniel/CGC_of_Sn | c54e4e65a5ecff09d3e4c5fed76bf30b3804fefa | [
"MIT"
] | 2 | 2022-01-19T04:36:29.000Z | 2022-01-27T09:15:38.000Z | src/utils/utils.py | GuiYuDaniel/CGC_of_Sn | c54e4e65a5ecff09d3e4c5fed76bf30b3804fefa | [
"MIT"
] | null | null | null | # -*- coding:utf8 -*-
"""
"""
import uuid
from enum import Enum, unique
from utils.log import get_logger
logger = get_logger(__name__)
| 21.282051 | 92 | 0.657831 |
2903a87ca8ef20b939b217181d37a2379c18a9f6 | 693 | py | Python | extras/scripts/finish_ci.py | connornishijima/PixieChroma | 4812c72087550797d17f6fe4d003eda2ce1cc25a | [
"MIT"
] | 20 | 2021-10-30T19:15:27.000Z | 2022-03-22T14:59:13.000Z | extras/scripts/finish_ci.py | connornishijima/PixieChroma | 4812c72087550797d17f6fe4d003eda2ce1cc25a | [
"MIT"
] | 39 | 2021-10-29T22:21:53.000Z | 2022-02-06T17:50:05.000Z | extras/scripts/finish_ci.py | connornishijima/PixieChroma | 4812c72087550797d17f6fe4d003eda2ce1cc25a | [
"MIT"
] | 5 | 2021-10-30T23:55:41.000Z | 2022-01-07T09:05:47.000Z | # This is just for GitHub, and is used to clean up leftover files after
# automatic testing has completed, and generate developer reports about
# anything left undocumented!
# run: "sudo python ./extras/scripts/finish_ci.py"
import os
import sys
os.system("sudo python ./extras/scripts/generate_doxygen_report.py")
os.system("sudo python ./extras/scripts/generate_keywords_report.py")
os.system("sudo python ./extras/scripts/generate_overview_report.py")
print("Cleaning up CI junk...")
os.system("ls .")
os.system("git add *")
os.system("sudo rm -r *.tar*")
os.system("sudo rm -r examples/*/build")
os.system("git commit -a -m 'Automated Cleanup'")
os.system("git push")
print("Done!" )
| 30.130435 | 71 | 0.74026 |
2903ad47c07cf0194fa493bc2ea6ee436c013991 | 9,549 | py | Python | matchingGame.py | VinnieM-3/MemoryGames | 65b92b2eeaa56879fd491b1169bad84dfd9e672e | [
"MIT"
] | null | null | null | matchingGame.py | VinnieM-3/MemoryGames | 65b92b2eeaa56879fd491b1169bad84dfd9e672e | [
"MIT"
] | null | null | null | matchingGame.py | VinnieM-3/MemoryGames | 65b92b2eeaa56879fd491b1169bad84dfd9e672e | [
"MIT"
] | 1 | 2021-03-29T20:31:34.000Z | 2021-03-29T20:31:34.000Z | import pygame
import random
pygame.init()
pygame.font.init()
def get_matching_card(card_list, card_to_match):
""" This function returns the card that matches the one passed in """
the_matching_card = None
for test_card in card_list:
if test_card.value == card_to_match.value and test_card != card_to_match:
the_matching_card = test_card
break
return the_matching_card
def cards_remaining(card_list):
""" this function returns the number of cards that have not been matched yet """
num_remaining = 0
for c in card_list:
if c.is_unsolved():
num_remaining += 1
return num_remaining
if __name__ == "__main__":
display_width = 600
display_height = 600
card_font = pygame.font.SysFont('Comic Sans MS', 48)
front_col = pygame.Color('white')
solved_col = pygame.Color('#636363')
back_col = pygame.Color('#293a32')
font_col = pygame.Color('black')
score_font = pygame.font.SysFont('Comic Sans MS', 24)
score_txt_col = pygame.Color('#d4c38f')
score_y_margin = 50
score_x_margin = 20
player_closed_app = False
new_game = False
cards = []
game_display = pygame.display.set_mode((display_width, display_height))
pygame.display.set_caption('Matching Game')
game_display.fill(pygame.Color('#b5c9a6'))
score_rect = pygame.draw.rect(game_display, pygame.Color('black'), pygame.Rect(0, 0, display_width, score_y_margin))
surf_8x8_txt = score_font.render("8 x 8", True, score_txt_col)
left_pos = (game_display.get_width() - score_x_margin - surf_8x8_txt.get_width())
surf_8x8_rect = game_display.blit(surf_8x8_txt, (left_pos, (score_y_margin - surf_8x8_txt.get_height()) / 2))
surf_6x6_txt = score_font.render("6 x 6", True, score_txt_col)
left_pos = left_pos - surf_6x6_txt.get_width() - score_x_margin
surf_6x6_rect = game_display.blit(surf_6x6_txt, (left_pos, (score_y_margin - surf_6x6_txt.get_height()) / 2))
surf_4x4_txt = score_font.render("4 x 4", True, score_txt_col)
left_pos = left_pos - surf_4x4_txt.get_width() - score_x_margin
surf_4x4_rect = game_display.blit(surf_4x4_txt, (left_pos, (score_y_margin - surf_4x4_txt.get_height()) / 2))
surf_sel_txt = score_font.render("Select Game:", True, score_txt_col)
left_pos = left_pos - surf_sel_txt.get_width() - score_x_margin
game_display.blit(surf_sel_txt, (left_pos, (score_y_margin - surf_sel_txt.get_height()) / 2))
num_cols = 0
num_rows = 0
pick_1 = None # variable to hold first card selected by player
score = 0
max_score = 0 # maximum score a player can get
while not player_closed_app:
for event in pygame.event.get():
if event.type == pygame.QUIT:
player_closed_app = True
if new_game:
pygame.draw.rect(game_display, pygame.Color('#b5c9a6'),
pygame.Rect(0, score_y_margin, display_width, display_height - score_y_margin))
total_pairs = (num_cols * num_rows) / 2
max_score = total_pairs - 1 # player gets no credit for last two cards remaining
pairs = range(1, total_pairs + 1) + range(1, total_pairs + 1) # create numbered pairs
# calculate the width and height of the cards and the space between them
card_horz_width = int((display_width * 0.8) / num_cols)
space_horz_width = int((display_width * 0.2) / (num_cols + 1))
card_vert_height = int(((display_height - score_y_margin) * 0.8) / num_rows)
space_vert_height = int(((display_height - score_y_margin) * 0.2) / (num_rows + 1))
# create cards and randomly assign the numbered pairs
random.random()
del cards[:]
for row in range(1, num_rows + 1):
for col in range(1, num_cols + 1):
rnd_item = random.choice(pairs)
pairs.remove(rnd_item)
new_card_x = ((col - 1) * card_horz_width) + (col * space_horz_width)
new_card_y = ((row - 1) * card_vert_height) + (row * space_vert_height) + score_y_margin
crd = Card(new_card_x, new_card_y, card_horz_width, card_vert_height,
back_col, front_col, solved_col, game_display, font_col, card_font, str(rnd_item))
cards.append(crd)
crd.hide_card()
score = 0
new_game = False
if pygame.mouse.get_pressed()[0]:
if surf_4x4_rect.collidepoint(pygame.mouse.get_pos()): # start new game 4 x 4
new_game = True
num_cols = 4
num_rows = 4
pygame.time.wait(200) # wait 200ms to avoid multiple new game mouse click events
if surf_6x6_rect.collidepoint(pygame.mouse.get_pos()): # start new game 6 x 6
new_game = True
num_cols = 6
num_rows = 6
pygame.time.wait(200)
if surf_8x8_rect.collidepoint(pygame.mouse.get_pos()): # start new game 8 x 8
new_game = True
num_cols = 8
num_rows = 8
pygame.time.wait(200)
for crd in cards:
if crd.is_clicked(pygame.mouse.get_pos()) and crd.is_hidden() and crd.is_unsolved():
crd.show_card()
pygame.display.flip()
if pick_1 is None:
pick_1 = crd # player picked first card
else: # player picked second card.
if pick_1.value == crd.value: # it is a match!
pick_1.solved()
crd.solved()
if crd.times_seen > 1 and cards_remaining(cards) > 0:
score += 1 # if you have seen the matching card at least once before, you get a point
elif crd.times_seen == 1 and cards_remaining(cards) > 0:
max_score -= 1 # no points for luck, we just reduce the max possible score
pygame.time.wait(500) # show matching values for 500ms
else: # it did not match
pick_1.hide_card()
crd.hide_card()
matching_card = get_matching_card(cards, pick_1)
if matching_card.times_seen > 0:
score -= 1 # player has seen the matching card before! 1 point penalty!
if crd.times_seen > 1:
score -= 1 # player should have known this card was not a match! 1 point penalty!
pygame.time.wait(1500) # show card values for 1.5sec
pick_1 = None # get ready for next pair of selections by player
break
# update score
surf_wrong = score_font.render("Score = " + str(score) + " out of " + str(max_score), True, score_txt_col)
pygame.draw.rect(game_display, pygame.Color('black'),
pygame.Rect(score_x_margin, 0, surf_wrong.get_width() + 100, score_y_margin))
game_display.blit(surf_wrong, (score_x_margin, (score_y_margin - surf_wrong.get_height()) / 2))
pygame.display.flip()
# player existed application
pygame.quit()
quit()
| 43.404545 | 120 | 0.597549 |
29057d1781f0e8f9898d6f1c32f5772d89c7df3a | 1,889 | py | Python | darts_search_space/imagenet/rlnas/evolution_search/config.py | megvii-model/RLNAS | a7e2ef9debcd06a93b075181a027b806b737b106 | [
"MIT"
] | 17 | 2021-05-17T04:54:17.000Z | 2022-01-23T09:59:02.000Z | darts_search_space/imagenet/rlnas/evolution_search/config.py | megvii-model/RLNAS | a7e2ef9debcd06a93b075181a027b806b737b106 | [
"MIT"
] | 2 | 2021-07-09T05:14:29.000Z | 2022-02-05T10:15:31.000Z | darts_search_space/imagenet/rlnas/evolution_search/config.py | megvii-model/RLNAS | a7e2ef9debcd06a93b075181a027b806b737b106 | [
"MIT"
] | 8 | 2021-05-28T00:04:20.000Z | 2021-10-18T02:41:34.000Z | import os
for i in ['exp_name']:
print('{}: {}'.format(i,eval('config.{}'.format(i))))
| 25.527027 | 112 | 0.528322 |
2908444cad199e2ad0cbe23b1b79f2e9191d879c | 6,801 | py | Python | packages/python/plotly/plotly/graph_objs/layout/geo/_projection.py | eranws/plotly.py | 5b0e8d3ccab55fe1a6e4ba123cfc9d718a9ffc5a | [
"MIT"
] | null | null | null | packages/python/plotly/plotly/graph_objs/layout/geo/_projection.py | eranws/plotly.py | 5b0e8d3ccab55fe1a6e4ba123cfc9d718a9ffc5a | [
"MIT"
] | null | null | null | packages/python/plotly/plotly/graph_objs/layout/geo/_projection.py | eranws/plotly.py | 5b0e8d3ccab55fe1a6e4ba123cfc9d718a9ffc5a | [
"MIT"
] | null | null | null | from plotly.basedatatypes import BaseLayoutHierarchyType as _BaseLayoutHierarchyType
import copy as _copy
| 30.773756 | 86 | 0.53845 |
29098333a5185a77af4d5f34240be5bf5108f278 | 1,286 | py | Python | j3d/string_table.py | blank63/j3dview | 498225e12119a9a2b4af9beed1be95f28d8410e2 | [
"MIT"
] | 13 | 2016-04-22T10:45:08.000Z | 2022-02-23T23:50:53.000Z | j3d/string_table.py | blank63/j3dview | 498225e12119a9a2b4af9beed1be95f28d8410e2 | [
"MIT"
] | 1 | 2017-03-19T20:31:03.000Z | 2017-03-20T17:09:36.000Z | j3d/string_table.py | blank63/j3dview | 498225e12119a9a2b4af9beed1be95f28d8410e2 | [
"MIT"
] | 2 | 2016-09-10T07:35:35.000Z | 2021-12-29T23:23:36.000Z | from btypes.big_endian import *
cstring_sjis = CString('shift-jis')
def unsigned_to_signed_byte(b):
return b - 0x100 if b & 0x80 else b
def calculate_hash(string):
h = 0
for b in string:
h = (h*3 + unsigned_to_signed_byte(b)) & 0xFFFF
return h
def pack(stream, strings):
strings = [string.encode('shift-jis') for string in strings]
header = Header()
header.string_count = len(strings)
Header.pack(stream, header)
offset = Header.sizeof() + Entry.sizeof()*len(strings)
for string in strings:
entry = Entry()
entry.string_hash = calculate_hash(string)
entry.string_offset = offset
Entry.pack(stream, entry)
offset += len(string) + 1
for string in strings:
stream.write(string)
stream.write(b'\0')
| 22.172414 | 72 | 0.650855 |
2909ec5a9aa5da7302caa8a0154901f2a00348b4 | 4,549 | py | Python | deConzSensors.py | peterstadler/deConzSensors | c30cd78083cea89f9f0416c046e472774e0bb54d | [
"MIT"
] | null | null | null | deConzSensors.py | peterstadler/deConzSensors | c30cd78083cea89f9f0416c046e472774e0bb54d | [
"MIT"
] | null | null | null | deConzSensors.py | peterstadler/deConzSensors | c30cd78083cea89f9f0416c046e472774e0bb54d | [
"MIT"
] | null | null | null | #!/usr/bin/env python3.5
from time import sleep, time
from datetime import datetime, timedelta
from pid.decorator import pidfile
#from subprocess import call
from RPi import GPIO
import requests
import json
#import config
import logging
import signal
import sys
#13: grn
#16: braun
#19: orange
#20: grn
#21: braun
#26: orange
SENSORS = [
{
"GPIOpinIN": 26,
"GPIOpinOUT": 19,
"SENSORID": 4,
"NAME": "Garagentor"
},
{
"GPIOpinIN": 20,
"GPIOpinOUT": 13,
"SENSORID": 2,
"NAME": "Garagentr"
}
]
# deConz REST API settings
APIKEY = "" # API key for the deConz REST API
APIHOST = "" # IP address of the deConz REST API, e.g. "192.168.1.100"
APISCHEME = "http" # scheme for the deConz REST API, e.g. "http"
# program settings
POLL_INTERVALL = 7 # duration in seconds to wait between polls
logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(levelname)s - %(message)s', filename='/var/log/deConzSensors.log')
# creating a PID file to prevent double execution of this script
if __name__ == '__main__':
main()
| 36.392 | 130 | 0.603869 |
290a002c458607061f9182749313cea5d389910f | 1,757 | py | Python | src/admin.py | kappa243/agh-db-proj | 73a3e69fa11e65e196b3d8a34be0b1051654a7eb | [
"MIT"
] | null | null | null | src/admin.py | kappa243/agh-db-proj | 73a3e69fa11e65e196b3d8a34be0b1051654a7eb | [
"MIT"
] | null | null | null | src/admin.py | kappa243/agh-db-proj | 73a3e69fa11e65e196b3d8a34be0b1051654a7eb | [
"MIT"
] | null | null | null | from flask import Blueprint, request, render_template, flash, redirect, url_for
from flask_login import login_user, login_required, current_user, logout_user
from models import User
from werkzeug.security import generate_password_hash, check_password_hash
from app import db, login_manager
admin = Blueprint('admin', __name__)
| 39.931818 | 100 | 0.636881 |
290c6742df1f8f4ad0e590b81b60add7140d2294 | 4,321 | py | Python | test/lib/test_map.py | oldmantaiter/inferno | 88da465625d18c6848f4be5fb37e20a5ae2c6db1 | [
"MIT"
] | 1 | 2015-10-15T04:18:14.000Z | 2015-10-15T04:18:14.000Z | test/lib/test_map.py | oldmantaiter/inferno | 88da465625d18c6848f4be5fb37e20a5ae2c6db1 | [
"MIT"
] | null | null | null | test/lib/test_map.py | oldmantaiter/inferno | 88da465625d18c6848f4be5fb37e20a5ae2c6db1 | [
"MIT"
] | null | null | null | import datetime
import types
from nose.tools import eq_
from nose.tools import ok_
from inferno.lib.map import keyset_map
from inferno.lib.rule import InfernoRule
| 36.008333 | 80 | 0.592918 |
290cd15c3b088b77632afac10c8cccada862dde1 | 1,882 | bzl | Python | dart/build_rules/internal/pub.bzl | nickclmb/rules_dart | 2cae27be60b858bfa45c649db15946cabb245556 | [
"Apache-2.0"
] | null | null | null | dart/build_rules/internal/pub.bzl | nickclmb/rules_dart | 2cae27be60b858bfa45c649db15946cabb245556 | [
"Apache-2.0"
] | null | null | null | dart/build_rules/internal/pub.bzl | nickclmb/rules_dart | 2cae27be60b858bfa45c649db15946cabb245556 | [
"Apache-2.0"
] | null | null | null | # Copyright 2016 The Bazel Authors. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
_pub_uri = "https://storage.googleapis.com/pub.dartlang.org/packages"
"""A set of BUILD rules that facilitate using or building on "pub"."""
pub_repository = repository_rule(
attrs = {
"output": attr.string(),
"package": attr.string(mandatory = True),
"version": attr.string(mandatory = True),
"pub_deps": attr.string_list(default = []),
},
implementation = _pub_repository_impl,
)
| 29.40625 | 74 | 0.685441 |
290d81aaa0377937c4a0c3f9c72f2fe1d0c01962 | 928 | py | Python | auror_core/__init__.py | millengustavo/auror-core | e215af6a5a1f3822693ad2d0882cbcd7882a2e35 | [
"Apache-2.0"
] | 11 | 2018-12-15T04:07:52.000Z | 2020-12-07T13:06:22.000Z | auror_core/__init__.py | millengustavo/auror-core | e215af6a5a1f3822693ad2d0882cbcd7882a2e35 | [
"Apache-2.0"
] | 12 | 2018-12-15T17:48:59.000Z | 2021-10-14T02:49:06.000Z | auror_core/__init__.py | millengustavo/auror-core | e215af6a5a1f3822693ad2d0882cbcd7882a2e35 | [
"Apache-2.0"
] | 5 | 2019-10-14T02:28:38.000Z | 2020-10-01T14:32:01.000Z | import copy
import os
| 23.794872 | 70 | 0.579741 |
2910bdc7ca3987dad2077d2434ff681fd6095d52 | 2,337 | py | Python | app.py | Vaishnavid14/snakegame | cd98926ea23ed8494689ed9b0a6372e469b37083 | [
"MIT"
] | 5 | 2018-06-03T19:07:45.000Z | 2022-03-22T00:58:50.000Z | app.py | Vaishnavid14/snakegame | cd98926ea23ed8494689ed9b0a6372e469b37083 | [
"MIT"
] | 1 | 2020-10-02T03:24:04.000Z | 2020-10-02T03:24:04.000Z | app.py | Vaishnavid14/snakegame | cd98926ea23ed8494689ed9b0a6372e469b37083 | [
"MIT"
] | 3 | 2018-10-13T16:47:09.000Z | 2020-12-11T09:09:36.000Z | '''
Purpose: Server responsible for routing
Author: Md. Tanvir Islam
Command to execute: python app.py
'''
from flask import Flask
from flask import render_template
from flask import json
from flask import request
import random
import sys
app = Flask(__name__)
print("Server is live...", file = sys.stderr)
users = []
# sends the x and y coordinates to the client
# sends the size of the snake to the server
'''
Function: dimensions
Purpose: generates a random x and y coordinate within a limit to send it the client
in: obj
'''
'''
Function: random_number
Purpose: generates a random number between a particular range
in: min, max
'''
if __name__ == "__main__":
app.run(host = "localhost", port = 2406, debug = True) | 22.04717 | 90 | 0.641849 |
2910c5c59d8ba3a8c530e8551e58a52db38377c4 | 254 | py | Python | grafana_api/api/__init__.py | sedan07/grafana_api | 4cc0b85e8660d9f21c8bd1997c5163a730ac2ee3 | [
"MIT"
] | null | null | null | grafana_api/api/__init__.py | sedan07/grafana_api | 4cc0b85e8660d9f21c8bd1997c5163a730ac2ee3 | [
"MIT"
] | null | null | null | grafana_api/api/__init__.py | sedan07/grafana_api | 4cc0b85e8660d9f21c8bd1997c5163a730ac2ee3 | [
"MIT"
] | null | null | null | from .base import Base
from .admin import Admin
from .dashboard import Dashboard
from .datasource import Datasource
from .folder import Folder
from .organisation import Organisation, Organisations
from .search import Search
from .user import User, Users
| 28.222222 | 53 | 0.826772 |
291160bd0c346287015511cf9e4797089cf45195 | 9,176 | py | Python | shop/views.py | Ayushman-Singh/ecommerce | 78f007fea89ead412d10554e69b9f4854f67d277 | [
"MIT"
] | 1 | 2019-11-25T06:42:47.000Z | 2019-11-25T06:42:47.000Z | shop/views.py | Ayushman-Singh/ecommerce | 78f007fea89ead412d10554e69b9f4854f67d277 | [
"MIT"
] | 12 | 2020-02-12T02:54:15.000Z | 2022-03-12T00:06:14.000Z | shop/views.py | Ayushman-Singh/ecommerce | 78f007fea89ead412d10554e69b9f4854f67d277 | [
"MIT"
] | 3 | 2019-11-25T19:53:18.000Z | 2020-10-01T12:01:24.000Z | from shop.forms import UserForm
from django.views import generic
from django.urls import reverse_lazy
from django.shortcuts import render, redirect, get_object_or_404
from django.contrib.auth import authenticate, login, logout
from django.contrib.auth.models import auth
from .models import Product, Contact, Category, Product, Order, OrderItem
from django.contrib import messages
from django.views.decorators.csrf import ensure_csrf_cookie
from math import ceil
import json
from shop.models import User
from django.views.decorators.csrf import csrf_exempt
# from PayTm import checksum
# Create your views here.
from django.http import HttpResponse
from django.core.paginator import Paginator, EmptyPage, PageNotAnInteger
MERCHANT_KEY = 'Your-Merchant-Key-Here'
def searchMatch(query, item):
'''return true only if query matches the item'''
if query in item.description.lower() or query in item.name.lower():
return True
else:
return False
from django.views.generic.edit import UpdateView
| 32.196491 | 92 | 0.53095 |
2911bd336905b53ebe454ea6d5b0bca66ffa304e | 347 | py | Python | SimMuon/GEMDigitizer/python/muonGEMDigi_cff.py | NTrevisani/cmssw | a212a27526f34eb9507cf8b875c93896e6544781 | [
"Apache-2.0"
] | 3 | 2018-08-24T19:10:26.000Z | 2019-02-19T11:45:32.000Z | SimMuon/GEMDigitizer/python/muonGEMDigi_cff.py | NTrevisani/cmssw | a212a27526f34eb9507cf8b875c93896e6544781 | [
"Apache-2.0"
] | 7 | 2016-07-17T02:34:54.000Z | 2019-08-13T07:58:37.000Z | SimMuon/GEMDigitizer/python/muonGEMDigi_cff.py | NTrevisani/cmssw | a212a27526f34eb9507cf8b875c93896e6544781 | [
"Apache-2.0"
] | 5 | 2018-08-21T16:37:52.000Z | 2020-01-09T13:33:17.000Z | import FWCore.ParameterSet.Config as cms
from SimMuon.GEMDigitizer.muonGEMDigis_cfi import *
from SimMuon.GEMDigitizer.muonGEMPadDigis_cfi import *
from SimMuon.GEMDigitizer.muonGEMPadDigiClusters_cfi import *
muonGEMDigiTask = cms.Task(simMuonGEMDigis, simMuonGEMPadDigis, simMuonGEMPadDigiClusters)
muonGEMDigi = cms.Sequence(muonGEMDigiTask)
| 38.555556 | 90 | 0.864553 |
2913388d5314daafd30d30378e540525568a897d | 8,843 | py | Python | paas-ce/paas/esb/lib/redis_rate_limit/ratelimit.py | renmcc/bk-PaaS | 1c9e4e9cfb40fc3375cd6b5f08af8c84203de246 | [
"Apache-2.0"
] | 767 | 2019-03-25T06:35:43.000Z | 2022-03-30T08:57:51.000Z | paas-ce/paas/esb/lib/redis_rate_limit/ratelimit.py | renmcc/bk-PaaS | 1c9e4e9cfb40fc3375cd6b5f08af8c84203de246 | [
"Apache-2.0"
] | 194 | 2019-03-29T07:16:41.000Z | 2022-03-30T06:17:49.000Z | paas-ce/paas/esb/lib/redis_rate_limit/ratelimit.py | renmcc/bk-PaaS | 1c9e4e9cfb40fc3375cd6b5f08af8c84203de246 | [
"Apache-2.0"
] | 381 | 2019-03-25T07:19:54.000Z | 2022-03-29T03:22:42.000Z | # -*- coding: utf-8 -*-
"""
Tencent is pleased to support the open source community by making PaaS (BlueKing PaaS Community Edition) available.
Copyright (C) 2017-2018 THL A29 Limited, a Tencent company. All rights reserved.
Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at
http://opensource.org/licenses/MIT
Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License.
""" # noqa
"""A distributed rate limiter rely on redis
based on `token bucket <https://en.wikipedia.org/wiki/Token_bucket>` algorithm
Usage
~~~~~
.. code-block:: python
# Init a redis connection pool
import redis
redisdb = redis.Redis()
rate = RateLimiter(redisdb, identifier='ip=127.0.0.1 path=/get_user_info/')
# Allow 10 requests every 1 minute
# period also accepts seconds/minutes/hours/days as key
rate.add_rule(tokens=10, period={'minute': 1})
# You could add multiple rules for on limiter
# rate.add_rule(tokens=200, period={'hour': 1})
print rate.acquire()
# returns {'allowed': True, 'remaining_tokens': 9.0}
"""
import time
import logging
from redis import WatchError
logger = logging.getLogger('root')
| 36.241803 | 305 | 0.622413 |
291353fc46f6b36f8dcc9b15e6f8fd7bfb761f8c | 7,094 | py | Python | tela_cadastro_loja_embala.py | lucasHashi/PyQt5-gerenciador-de-vendas-de-comidas | 56588bfb8543ea070ccb53635486a14ddfda6202 | [
"MIT"
] | 1 | 2020-02-21T22:54:05.000Z | 2020-02-21T22:54:05.000Z | tela_cadastro_loja_embala.py | lucasHashi/PyQt5-gerenciador-de-vendas-de-comidas | 56588bfb8543ea070ccb53635486a14ddfda6202 | [
"MIT"
] | 1 | 2020-01-22T04:27:02.000Z | 2020-01-22T04:27:02.000Z | tela_cadastro_loja_embala.py | lucasHashi/PyQt5-gerenciador-de-vendas-de-comidas | 56588bfb8543ea070ccb53635486a14ddfda6202 | [
"MIT"
] | null | null | null | import sys
from PyQt5 import QtCore, QtGui, QtWidgets, uic
import database_receita
import pyqt5_aux
qt_tela_inicial = "telas/tela_cadastro_loja_embala.ui"
Ui_MainWindow, QtBaseClass = uic.loadUiType(qt_tela_inicial)
| 37.734043 | 175 | 0.695235 |
2913e256dfb84f164f18ed5f1a7cbb5235605636 | 2,253 | py | Python | test/test_layers.py | mukeshv0/ParallelWaveGAN | 40fd282d0364c8d8711efed21d9689653d85b3a2 | [
"MIT"
] | null | null | null | test/test_layers.py | mukeshv0/ParallelWaveGAN | 40fd282d0364c8d8711efed21d9689653d85b3a2 | [
"MIT"
] | null | null | null | test/test_layers.py | mukeshv0/ParallelWaveGAN | 40fd282d0364c8d8711efed21d9689653d85b3a2 | [
"MIT"
] | null | null | null | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
# Copyright 2019 Tomoki Hayashi
# MIT License (https://opensource.org/licenses/MIT)
import logging
import numpy as np
import torch
from parallel_wavegan.layers import Conv1d
from parallel_wavegan.layers import Conv1d1x1
from parallel_wavegan.layers import Conv2d
from parallel_wavegan.layers import ConvInUpsampleNetwork
from parallel_wavegan.layers import UpsampleNetwork
logging.basicConfig(
level=logging.DEBUG, format="%(asctime)s (%(module)s:%(lineno)d) %(levelname)s: %(message)s")
| 40.232143 | 98 | 0.626276 |
29145e423185507525f6b1aaf218e49896993e52 | 22,417 | py | Python | geotrek/tourism/models.py | ker2x/Geotrek-admin | 78e154894d1d78dbb35789285c7def8deaaa2dd3 | [
"BSD-2-Clause"
] | null | null | null | geotrek/tourism/models.py | ker2x/Geotrek-admin | 78e154894d1d78dbb35789285c7def8deaaa2dd3 | [
"BSD-2-Clause"
] | null | null | null | geotrek/tourism/models.py | ker2x/Geotrek-admin | 78e154894d1d78dbb35789285c7def8deaaa2dd3 | [
"BSD-2-Clause"
] | null | null | null | import os
import re
import logging
from django.conf import settings
from django.contrib.gis.db import models
from django.utils.translation import ugettext_lazy as _
from django.utils.formats import date_format
from easy_thumbnails.alias import aliases
from easy_thumbnails.exceptions import InvalidImageFormatError
from easy_thumbnails.files import get_thumbnailer
from mapentity.registry import registry
from mapentity.models import MapEntityMixin
from mapentity.serializers import plain_text, smart_plain_text
from geotrek.authent.models import StructureRelated
from geotrek.core.models import Topology
from geotrek.common.mixins import (NoDeleteMixin, TimeStampedModelMixin,
PictogramMixin, OptionalPictogramMixin,
PublishableMixin, PicturesMixin,
AddPropertyMixin)
from geotrek.common.models import Theme
from geotrek.common.utils import intersecting
from extended_choices import Choices
if 'modeltranslation' in settings.INSTALLED_APPS:
from modeltranslation.manager import MultilingualManager
else:
from django.db.models import Manager as MultilingualManager
logger = logging.getLogger(__name__)
def _get_target_choices():
""" Populate choices using installed apps names.
"""
apps = [('public', _("Public website"))]
for model, entity in registry.registry.items():
if entity.menu:
appname = model._meta.app_label.lower()
apps.append((appname, unicode(entity.label)))
return tuple(apps)
GEOMETRY_TYPES = Choices(
('POINT', 'point', _('Point')),
('LINE', 'line', _('Line')),
('POLYGON', 'polygon', _('Polygon')),
('ANY', 'any', _('Any')),
)
Topology.add_property('touristic_contents', lambda self: intersecting(TouristicContent, self), _(u"Touristic contents"))
Topology.add_property('published_touristic_contents', lambda self: intersecting(TouristicContent, self).filter(published=True), _(u"Published touristic contents"))
TouristicContent.add_property('touristic_contents', lambda self: intersecting(TouristicContent, self), _(u"Touristic contents"))
TouristicContent.add_property('published_touristic_contents', lambda self: intersecting(TouristicContent, self).filter(published=True), _(u"Published touristic contents"))
TouristicEvent.add_property('touristic_contents', lambda self: intersecting(TouristicContent, self), _(u"Touristic contents"))
TouristicEvent.add_property('published_touristic_contents', lambda self: intersecting(TouristicContent, self).filter(published=True), _(u"Published touristic contents"))
Topology.add_property('touristic_events', lambda self: intersecting(TouristicEvent, self), _(u"Touristic events"))
Topology.add_property('published_touristic_events', lambda self: intersecting(TouristicEvent, self).filter(published=True), _(u"Published touristic events"))
TouristicContent.add_property('touristic_events', lambda self: intersecting(TouristicEvent, self), _(u"Touristic events"))
TouristicContent.add_property('published_touristic_events', lambda self: intersecting(TouristicEvent, self).filter(published=True), _(u"Published touristic events"))
TouristicEvent.add_property('touristic_events', lambda self: intersecting(TouristicEvent, self), _(u"Touristic events"))
TouristicEvent.add_property('published_touristic_events', lambda self: intersecting(TouristicEvent, self).filter(published=True), _(u"Published touristic events"))
| 43.868885 | 171 | 0.657581 |
2914b183f0a48cc6b1b59de7781f38a975146534 | 6,313 | py | Python | Service_Components/Sink/Sink_DataFlow.py | mydata-sdk/mydata-sdk-1.x | 74064d7a42fc0435511eae6e77e49ddc7d9723f3 | [
"MIT"
] | null | null | null | Service_Components/Sink/Sink_DataFlow.py | mydata-sdk/mydata-sdk-1.x | 74064d7a42fc0435511eae6e77e49ddc7d9723f3 | [
"MIT"
] | 2 | 2018-04-20T23:07:01.000Z | 2018-04-21T01:01:20.000Z | Service_Components/Sink/Sink_DataFlow.py | fititnt/mydata-sdk--hiit | 19d7a2ddbc3b5a05665539fbcc7f461c13793e03 | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
from signed_requests.signed_request_auth import SignedRequest
__author__ = 'alpaloma'
from flask import Blueprint, current_app, request
from helpers import Helpers
import requests
from json import dumps, loads
from DetailedHTTPException import error_handler
from flask_restful import Resource, Api
import logging
from jwcrypto import jwk
from Templates import Sequences
debug_log = logging.getLogger("debug")
logger = logging.getLogger("sequence")
api_Sink_blueprint = Blueprint("api_Sink_blueprint", __name__)
api = Api()
api.init_app(api_Sink_blueprint)
sq = Sequences("Service_Components Mgmnt (Sink)", {})
# import xmltodict
# @api.representation('application/xml')
# def output_xml(data, code, headers=None):
# if isinstance(data, dict):
# xm = {"response": data}
# resp = make_response(xmltodict.unparse(xm, pretty=True), code)
# resp.headers.extend(headers)
# return resp
api.add_resource(Status, '/init')
api.add_resource(DataFlow, '/dc')
#api.add_resource(DataFlow, '/user/<string:user_id>/consentRecord/<string:cr_id>/resourceSet/<string:rs_id>')
#"http://service_components:7000/api/1.2/sink_flow/user/95479a08-80cc-4359-ba28-b8ca23ff5572_53af88dc-33de-44be-bc30-e0826db9bd6c/consentRecord/cd431509-777a-4285-8211-95c5ac577537/resourceSet/http%3A%2F%2Fservice_components%3A7000%7C%7C9aebb487-0c83-4139-b12c-d7fcea93a3ad" | 42.655405 | 274 | 0.642009 |
29153ac2726304eba8abb15e0e28ea926d19d5f2 | 1,075 | py | Python | alignment/find_bug/is_bored.py | LaudateCorpus1/code-align-evals-data | 97446d992c3785d6605f1500b2c9b95d042e7b9c | [
"MIT"
] | 3 | 2021-07-29T23:40:15.000Z | 2021-08-12T10:18:09.000Z | alignment/find_bug/is_bored.py | openai/code-align-evals-data | 97446d992c3785d6605f1500b2c9b95d042e7b9c | [
"MIT"
] | 1 | 2021-09-19T06:44:15.000Z | 2021-09-19T06:44:15.000Z | alignment/find_bug/is_bored.py | LaudateCorpus1/code-align-evals-data | 97446d992c3785d6605f1500b2c9b95d042e7b9c | [
"MIT"
] | 1 | 2021-09-19T06:44:03.000Z | 2021-09-19T06:44:03.000Z | def is_bored(S):
"""
You'll be given a string of words, and your task is to count the number
of boredoms. A boredom is a sentence that starts with the word "I".
Sentences are delimited by '.', '?' or '!'.
For example:
>>> is_bored("Hello world")
0
>>> is_bored("The sky is blue. The sun is shining. I love this weather")
1
Example solution:
# line 1
import re
# line 2
sentences = re.split(r'[.?!]\s*', S)
# line 3
return sum(sentence[0:3] == 'I ' for sentence in sentences)
"""
# Please print out which line of the above program contains an error. E.g. if the bug is on line 4 then print 4
# END OF CONTEXT
print("3")
# END OF SOLUTION
if __name__ == '__main__':
check(is_bored)
| 24.431818 | 115 | 0.584186 |
2915a5cec252d8d1a0fc059343505bed4b8a4276 | 23,882 | py | Python | Support/Python/tbdata/printing.py | twitchplayskh/open-brush | 2094339be6851731d293f2574c1fc706ee388d84 | [
"Apache-2.0"
] | null | null | null | Support/Python/tbdata/printing.py | twitchplayskh/open-brush | 2094339be6851731d293f2574c1fc706ee388d84 | [
"Apache-2.0"
] | null | null | null | Support/Python/tbdata/printing.py | twitchplayskh/open-brush | 2094339be6851731d293f2574c1fc706ee388d84 | [
"Apache-2.0"
] | 1 | 2021-02-04T21:45:45.000Z | 2021-02-04T21:45:45.000Z | # Copyright 2020 The Tilt Brush Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Helpers for 3d printing."""
import os
import re
import sys
import math
import pprint
import shutil
import itertools
import subprocess
from collections import Counter
import numpy
try:
from tiltbrush.tilt import Tilt
except ImportError:
print("You need the Tilt Brush Toolkit (https://github.com/googlevr/tilt-brush-toolkit)")
print("and then put its Python directory in your PYTHONPATH.")
sys.exit(1)
from tbdata.brush_lookup import BrushLookup
# Convert strokes for 3d printing.
# True Don't touch these strokes
# False Remove these strokes from the sketch
# <name> Replace the brush for these strokes
# names can also be guids, which is useful when the name is ambiguous
BRUSH_REPLACEMENTS = [
# Good brushes
('SquarePaper', True),
('ThickGeometry', True),
('Wire', True),
# Brushes that should be replaced
('TaperedMarker', 'ThickGeometry'),
('OilPaint', 'ThickGeometry'),
('Ink', 'ThickGeometry'),
('Marker', 'ThickGeometry'),
('Paper', 'ThickGeometry'),
('FlatDeprecated','ThickGeometry'),
# Questionable
('Highlighter', 'ThickGeometry'),
('Light', 'Wire'),
# Remove particles
('Smoke', None),
('Snow', None),
('Embers', None),
('Stars', None),
# Remove animated
('Fire', None),
# Remove shader-based
('Plasma', None),
('Rainbow', None),
('Streamers', None),
]
# ----------------------------------------------------------------------
# Little utilities
# ----------------------------------------------------------------------
def rgb8_to_hsl(rgb):
"""Takes a rgb8 tuple, returns a hsl tuple."""
HUE_MAX = 6
r = rgb[0] / 255.0
g = rgb[1] / 255.0
b = rgb[2] / 255.0
cmin = min(r, g, b)
cmax = max(r, g, b)
delta = cmax - cmin
h = 0
s = 0
l = (cmax + cmin)
if delta != 0:
if l < 0.5:
s = delta / l
else:
s = delta / (2 - l)
if r == cmax:
h = (g - b) / delta
elif g == cmax:
h = 2 + (b - r) / delta
elif b == cmax:
h = 4 + (r - g) / delta
return h, s, l
# ----------------------------------------------------------------------
# Brush conversion
# ----------------------------------------------------------------------
def get_replacements_by_guid(replacements_by_name):
"""Returns a lookup table that is by-guid rather than by-name."""
brush_lookup = BrushLookup.get()
dct = {}
for before, after in replacements_by_name:
before_guid = guid_or_name_to_guid(before)
if after is True:
after_guid = before_guid
elif after is None:
after_guid = None
else:
after_guid = guid_or_name_to_guid(after)
dct[before_guid] = after_guid
return dct
def convert_brushes(tilt, replacements_by_name, show_removed=False):
"""Convert brushes to 3d-printable versions, or remove their strokes from the tilt."""
replacements = get_replacements_by_guid(replacements_by_name)
brush_lookup = BrushLookup.get()
with tilt.mutable_metadata() as dct:
index_to_guid = dct['BrushIndex']
# First, show us what brushes the tilt file uses
used_guids = Counter()
for stroke in tilt.sketch.strokes:
guid = index_to_guid[stroke.brush_idx]
used_guids[guid] += 1
print("Brushes used:")
for guid, n in sorted(list(used_guids.items()), key=lambda p:-p[1]):
print(" %5d %s" % (n, brush_lookup.guid_to_name.get(guid)))
sys.stdout.flush()
del used_guids
index_to_new_index = {}
for i, guid in enumerate(index_to_guid):
name = brush_lookup.guid_to_name.get(guid, guid)
try:
new_guid = replacements[guid]
except KeyError:
print("%d: Don't know what to do with brush %s" % (i, name))
index_to_new_index[i] = i
else:
new_name = brush_lookup.guid_to_name.get(new_guid, new_guid)
if new_guid is None:
print("%d: Remove %s" % (i, name))
index_to_new_index[i] = None
else:
if guid == new_guid:
print("%d: Keep %s" % (i, name))
elif name == new_name:
print("%d: Replace %s/%s -> %s/%s" % (i, name, guid, new_name, new_guid))
else:
print("%d: Replace %s -> %s" % (i, name, new_name))
try:
new_idx = index_to_guid.index(new_guid)
except ValueError:
new_idx = len(index_to_guid)
index_to_guid.append(new_guid)
index_to_new_index[i] = new_idx
brush_indices_to_remove = set(i for (i, new_i) in list(index_to_new_index.items()) if new_i is None)
if brush_indices_to_remove:
old_len = len(tilt.sketch.strokes)
if show_removed:
# Render in magenta instead of removing
for stroke in tilt.sketch.strokes:
if stroke.brush_idx in brush_indices_to_remove:
stroke.brush_color = (1, 0, 1, 1)
else:
stroke.brush_color = stroke.brush_color
else:
tilt.sketch.strokes[:] = [s for s in tilt.sketch.strokes if s.brush_idx not in brush_indices_to_remove]
new_len = len(tilt.sketch.strokes)
print("Strokes %d -> %d" % (old_len, new_len))
for stroke in tilt.sketch.strokes:
new_idx = index_to_new_index[stroke.brush_idx]
# Might be none if it's a removed brush
if new_idx is not None:
stroke.brush_idx = new_idx
# ----------------------------------------------------------------------
# Stroke simplification
# ----------------------------------------------------------------------
# ----------------------------------------------------------------------
# Stray strokes
# ----------------------------------------------------------------------
# ----------------------------------------------------------------------
# Color reduction
# ----------------------------------------------------------------------
def get_most_similar_factors(n):
"""Factorize n into two numbers.
Returns the best pair, in the sense that the numbers are the closest to each other."""
i = int(n**0.5 + 0.5)
while n % i != 0:
i -= 1
return i, n/i
def get_good_factors(n, max_aspect_ratio=None):
"""Factorize n into two integers that are closest to each other.
If max_aspect_ratio is passed, search numbers >= n until
a pair is found whose aspect ratio is <= max_aspect_ratio."""
if max_aspect_ratio is None:
return get_most_similar_factors(n)
for i in itertools.count():
a, b = get_most_similar_factors(n + i)
if float(b)/a <= max_aspect_ratio:
return a, b
def rgbaf_to_rgb8(rgbaf):
"""Convert [r, g, b, a] floats to (r, g, b) bytes."""
return tuple(int(channel * 255) for channel in rgbaf[0:3])
def rgb8_to_rgbaf(rgb8):
"""Convert (r, g, b) bytes to [r, g, b, a] floats."""
lst = [channel / 255.0 for channel in rgb8]
lst.append(1.0)
return lst
# ----------------------------------------------------------------------
# Split export into multiple .obj files
# ----------------------------------------------------------------------
# ----------------------------------------------------------------------
# Main
# ----------------------------------------------------------------------
if __name__=='__main__':
main()
| 33.031812 | 166 | 0.638682 |
29160e7d22c4f8b9d6d61e7d7e39b92b66f54862 | 33,150 | py | Python | src/tests/scenarios/Maxwell_Main.py | ian-cooke/basilisk_mag | a8b1e37c31c1287549d6fd4d71fcaa35b6fc3f14 | [
"0BSD"
] | null | null | null | src/tests/scenarios/Maxwell_Main.py | ian-cooke/basilisk_mag | a8b1e37c31c1287549d6fd4d71fcaa35b6fc3f14 | [
"0BSD"
] | 1 | 2019-03-13T20:52:22.000Z | 2019-03-13T20:52:22.000Z | src/tests/scenarios/Maxwell_Main.py | ian-cooke/basilisk_mag | a8b1e37c31c1287549d6fd4d71fcaa35b6fc3f14 | [
"0BSD"
] | null | null | null | ''' '''
'''
ISC License
Copyright (c) 2016, Autonomous Vehicle Systems Lab, University of Colorado at Boulder
Permission to use, copy, modify, and/or distribute this software for any
purpose with or without fee is hereby granted, provided that the above
copyright notice and this permission notice appear in all copies.
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
'''
#
# Basilisk Scenario Script and Integrated Test
#
# Purpose: Integrated test of the spacecraftPlus(), extForceTorque, simpleNav() and
# MRP_Feedback() modules. Illustrates a 6-DOV spacecraft detumbling in orbit.
# This scenario is the same as scenarioAttitudeControl, but with the
# difference that here the control and dynamics are executed at different
# frequencies or time steps.
# Author: Hanspeter Schaub
# Creation Date: Nov. 25, 2016
#
import pytest
import os
import numpy as np
import matplotlib.pyplot as plt
from datetime import datetime
from Basilisk import __path__
# import general simulation support files
from Basilisk.utilities import SimulationBaseClass
from Basilisk.utilities import unitTestSupport # general support file with common unit test functions
from Basilisk.utilities import simIncludeGravBody
from Basilisk.utilities import macros
from Basilisk.utilities import orbitalMotion
from Basilisk.utilities import simIncludeRW
from Basilisk.utilities import fswSetupRW
# import simulation related support
from Basilisk.simulation import spacecraftPlus
from Basilisk.simulation import extForceTorque
from Basilisk.simulation import simMessages
from Basilisk.simulation import sim_model
from Basilisk.simulation import simple_nav
from Basilisk.simulation import mag_meter
from Basilisk.simulation import imu_sensor
from Basilisk.simulation import coarse_sun_sensor
from Basilisk.simulation import reactionWheelStateEffector
from Basilisk.simulation import rwVoltageInterface
from Basilisk.simulation import torqueRodDynamicEffector
# import FSW Algorithm related support
from Basilisk.fswAlgorithms import B_DOT
from Basilisk.fswAlgorithms import inertial3D
from Basilisk.fswAlgorithms import attTrackingError
from Basilisk.fswAlgorithms import rwMotorVoltage
from Basilisk.fswAlgorithms import rwMotorTorque
from Basilisk.fswAlgorithms import maxwellLS
from Basilisk.fswAlgorithms import QUAT_PD
from Basilisk.fswAlgorithms import sunSafePoint
# import message declarations
from Basilisk.fswAlgorithms import fswMessages
bskPath = __path__[0]
from Basilisk import pyswice
def run(show_plots, detumble, saturate, sunpoint, useUnmodeledTorque, useJitterSimple, useRWVoltageIO):
'''Call this routine directly to run the tutorial scenario.'''
# Create simulation variable names
dynTaskName = "dynTask"
dynProcessName = "dynProcess"
fswTaskName = "fswTask"
fswProcessName = "fswProcess"
# Create a sim module as an empty container
scSim = SimulationBaseClass.SimBaseClass()
scSim.TotalSim.terminateSimulation()
# set the simulation time variable used later on
simulationTime = macros.min2nano(2)
#
# create the simulation process
#
dynProcess = scSim.CreateNewProcess(dynProcessName)
fswProcess = scSim.CreateNewProcess(fswProcessName)
# Process message interfaces.
# this step is used to copy messages between the dyn and fsw processes
# as long as the message has the same name, it will get copied over automatically
dyn2FSWInterface = sim_model.SysInterface()
fsw2DynInterface = sim_model.SysInterface()
dyn2FSWInterface.addNewInterface(dynProcessName, fswProcessName)
fsw2DynInterface.addNewInterface(fswProcessName, dynProcessName)
fswProcess.addInterfaceRef(dyn2FSWInterface)
dynProcess.addInterfaceRef(fsw2DynInterface)
# create the dynamics task and specify the integration update time
simTimeStep = macros.sec2nano(0.1)
dynProcess.addTask(scSim.CreateNewTask(dynTaskName, simTimeStep))
fswTimeStep = macros.sec2nano(0.1)
fswProcess.addTask(scSim.CreateNewTask(fswTaskName, fswTimeStep))
# if this scenario is to interface with the BSK Viz, uncomment the following lines
# unitTestSupport.enableVisualization(scSim, dynProcess, simProcessName, 'earth') # The Viz only support 'earth', 'mars', or 'sun'
#
# setup the simulation tasks/objects
#
# initialize spacecraftPlus object and set properties
scObject = spacecraftPlus.SpacecraftPlus()
scObject.ModelTag = "spacecraftBody"
# define the simulation inertia
I = [0.0511, 0., 0.,
0., 0.1522, 0.,
0., 0., 0.1179]
scObject.hub.mHub = 10.0 # kg - spacecraft mass
scObject.hub.r_BcB_B = [[0.0], [0.0], [0.0]] # m - position vector of body-fixed point B relative to CM
scObject.hub.IHubPntBc_B = unitTestSupport.np2EigenMatrix3d(I)
# add spacecraftPlus object to the simulation process
scSim.AddModelToTask(dynTaskName, scObject)
# clear prior gravitational body and SPICE setup definitions
gravFactory = simIncludeGravBody.gravBodyFactory()
gravBodies = gravFactory.createBodies(['earth','sun','moon'])
# setup Earth Gravity Body
earth = gravBodies['earth']
earth.isCentralBody = True # ensure this is the central gravitational body
mu = earth.mu
simIncludeGravBody.loadGravFromFile(bskPath + '/supportData/LocalGravData/GGM03S.txt'
, earth.spherHarm
, 100)
# attach gravity model to spaceCraftPlus
scObject.gravField.gravBodies = spacecraftPlus.GravBodyVector(gravFactory.gravBodies.values())
# setup simulation start data/time
timeInitString = "2020 March 1 00:28:30.0"
spiceTimeStringFormat = '%Y %B %d %H:%M:%S.%f'
timeInit = datetime.strptime(timeInitString, spiceTimeStringFormat)
# setup SPICE module
gravFactory.createSpiceInterface(bskPath + '/supportData/EphemerisData/', timeInitString)
gravFactory.spiceObject.zeroBase = 'Earth'
# add SPICE interface to task list
scSim.AddModelToTask(dynTaskName, gravFactory.spiceObject, None, -1)
# attach gravity model to spaceCraftPlus
scObject.gravField.gravBodies = spacecraftPlus.GravBodyVector(gravFactory.gravBodies.values())
#
# set initial Spacecraft States
#
# setup the orbit using classical orbit elements
oe = orbitalMotion.ClassicElements()
orbitRadius = 550.0
oe.a = (6371.0 + orbitRadius) * 1000.0 # meters
oe.e = 0.0001
oe.i = 45 * macros.D2R
oe.Omega = 0.0 * macros.D2R
oe.omega = 0.0 * macros.D2R
oe.f = 180.0 * macros.D2R
rN, vN = orbitalMotion.elem2rv(mu, oe)
scObject.hub.r_CN_NInit = unitTestSupport.np2EigenVectorXd(rN) # m - r_CN_N
scObject.hub.v_CN_NInit = unitTestSupport.np2EigenVectorXd(vN) # m/s - v_CN_N
scObject.hub.sigma_BNInit = [[0.1], [0.2], [-0.3]] # sigma_BN_B
if detumble:
scObject.hub.omega_BN_BInit = [[13*macros.D2R], [13*macros.D2R], [13*macros.D2R]] # rad/s - omega_BN_B
if sunpoint:
scObject.hub.omega_BN_BInit = [[0.001*macros.D2R], [0.001*macros.D2R], [0.001*macros.D2R]] # rad/s - omega_BN_B
if saturate or sunpoint:
#
# Add RW devices
#
rwFactory = simIncludeRW.rwFactory()
# store the RW dynamical model type
varRWModel = rwFactory.BalancedWheels
if useJitterSimple:
varRWModel = rwFactory.JitterSimple
# create each RW by specifying the RW type, the spin axis gsHat, plus optional arguments
RW1 = rwFactory.create('NanoAvionics_RW0', [0.422618261740699, 0.906307787036650, 0], maxMomentum=0.02, Omega=0. # RPM
, RWModel=varRWModel,
)
RW2 = rwFactory.create('NanoAvionics_RW0', [0.422618261740699, 0, 0.906307787036650], maxMomentum=0.02, Omega=0. # RPM
, RWModel=varRWModel,
)
RW3 = rwFactory.create('NanoAvionics_RW0', [0.422618261740699, -0.906307787036650, 0], maxMomentum=0.02, Omega=0. # RPM
, RWModel=varRWModel,
)
RW4 = rwFactory.create('NanoAvionics_RW0', [0.422618261740699, 0, -0.906307787036650], maxMomentum=0.02, Omega=0.
, RWModel=varRWModel,
)
numRW = rwFactory.getNumOfDevices()
# create RW object container and tie to spacecraft object
rwStateEffector = reactionWheelStateEffector.ReactionWheelStateEffector()
rwStateEffector.InputCmds = "reactionwheel_cmds"
rwFactory.addToSpacecraft("ReactionWheels", rwStateEffector, scObject)
# add RW object array to the simulation process
scSim.AddModelToTask(dynTaskName, rwStateEffector, None, 2)
if useRWVoltageIO:
rwVoltageIO = rwVoltageInterface.RWVoltageInterface()
rwVoltageIO.ModelTag = "rwVoltageInterface"
# set module parameters(s)
rwVoltageIO.setGains(np.array([0.2 / 10.] * 3)) # [Nm/V] conversion gain
# Add test module to runtime call list
scSim.AddModelToTask(dynTaskName, rwVoltageIO)
# add the simple Navigation sensor module. This sets the SC attitude, rate, position
# velocity navigation message
sNavObject = simple_nav.SimpleNav()
sNavObject.ModelTag = "SimpleNavigation"
scSim.AddModelToTask(dynTaskName, sNavObject)
#
# setup sensors
#
# Add IMU Sensor
ImuSensor = imu_sensor.ImuSensor()
ImuSensor.ModelTag = "imusensor"
r_SB_B = np.array([0.0, 0.0, 0.0]) # Sensor position wrt body frame origin
ImuSensor.sensorPos_B = np.array(r_SB_B)
# IMU Parameters
accelLSBIn = 0.0 # Not Used
gyroLSBIn = 0.0001 # Discretization value (least significant bit)
senRotBiasIn = 0.0 # Rotational sensor bias
senRotMaxIn = 50.0 # Gyro saturation value
gyroScale = [1., 1., 1.] # Scale factor for each axis
errorBoundsGryo = [0] * 3 # Bounds random walk
gyroNoise = 0.000 # Noise
ImuSensor.setLSBs(accelLSBIn, gyroLSBIn)
ImuSensor.senRotBias = np.array([senRotBiasIn] * 3)
ImuSensor.senRotMax = senRotMaxIn
ImuSensor.gyroScale = np.array(gyroScale)
ImuSensor.PMatrixGyro = np.eye(3) * gyroNoise
ImuSensor.walkBoundsGyro = np.array(errorBoundsGryo)
# add IMU to Simulation Process
scSim.AddModelToTask(dynTaskName, ImuSensor)
# Add Mag Meter
MagMeter = mag_meter.MagMeter()
MagMeter.ModelTag = "MagMeter"
MagMeterNoise = 0.00000
MagMeterBias = 0.0000
ImuSensor.senRotBias = np.array([MagMeterBias] * 3)
MagMeter.PMatrix = np.eye(3) * MagMeterNoise
MagMeter.inclination = oe.i
MagMeter.orbitRadius = oe.a / 1000 # 6371.0 + orbitRadius
scSim.AddModelToTask(dynTaskName, MagMeter)
# # Add Coarse Sun Sensors
cssConstellation = coarse_sun_sensor.CSSConstellation()
CSSOrientationList = [
[0.866, 0.000, -0.500], # 1 - 13 G
[0.866, -0.433, 0.250], # 2 - 14 G
[0.866, 0.433, 0.250], # 3 - 12 G
[0.500, 0.866, 0.000], # 4 - 10 G
[0.500, -0.866, 0.000], # 5 - 7 G
[0.000, -0.866, -0.500], # 6 - 9 G
[0.500, 0.866, 0.000], # 7 - 5 G
[0.000, 0.866, -0.500], # 8 - 11 G
[0.000, 0.866, 0.500], # 9 - 6 G
[0.500, -0.866, 0.000], # 10 - 4 G
[0.000, -0.866, 0.500], # 11 - 8 G
[0.866, -0.433, -0.250], # 12 - 3 G
[0.866, 0.000, 0.500], # 13 - 1 G
[0.866, 0.433, -0.250] # 14 - 2 G
]
for CSSHat in CSSOrientationList:
newCSS = coarse_sun_sensor.CoarseSunSensor()
newCSS.minOutput = 0.
newCSS.senNoiseStd = 0.00
newCSS.nHat_B = CSSHat
cssConstellation.appendCSS(newCSS)
cssConstellation.outputConstellationMessage = "css_sensors_data"
scSim.AddModelToTask(dynTaskName, cssConstellation)
# Add the normals to the vehicle Config data struct
cssConstVehicle = fswMessages.CSSConfigFswMsg()
totalCSSList = []
for CSSHat in CSSOrientationList:
newCSS = fswMessages.CSSUnitConfigFswMsg()
newCSS.nHat_B = CSSHat
newCSS.CBias = 1.0
totalCSSList.append(newCSS)
cssConstVehicle.nCSS = len(CSSOrientationList)
cssConstVehicle.cssVals = totalCSSList
# setup Sun Position
pyswice.furnsh_c(gravFactory.spiceObject.SPICEDataPath + 'de430.bsp') # solar system bodies
pyswice.furnsh_c(gravFactory.spiceObject.SPICEDataPath + 'naif0011.tls') # leap second file
pyswice.furnsh_c(gravFactory.spiceObject.SPICEDataPath + 'de-403-masses.tpc') # solar system masses
pyswice.furnsh_c(gravFactory.spiceObject.SPICEDataPath + 'pck00010.tpc') # generic Planetary Constants Kernel
sunPositionMsg = simMessages.SpicePlanetStateSimMsg()
sunInitialState = 1000 * pyswice.spkRead('SUN', timeInitString, 'J2000', 'EARTH')
rN_sun = sunInitialState[0:3] # meters
vN_sun = sunInitialState[3:6] # m/s
sunPositionMsg.PositionVector = rN_sun
sunPositionMsg.VelocityVector = vN_sun
#
# setup the FSW algorithm tasks
#
# setup inertial3D guidance module
inertial3DConfig = inertial3D.inertial3DConfig()
inertial3DWrap = scSim.setModelDataWrap(inertial3DConfig)
inertial3DWrap.ModelTag = "inertial3D"
scSim.AddModelToTask(fswTaskName, inertial3DWrap, inertial3DConfig)
inertial3DConfig.sigma_R0N = [0., 0., 0.] # set the desired inertial orientation
inertial3DConfig.outputDataName = "guidanceInertial3D"
# setup the attitude tracking error evaluation module
attErrorConfig = attTrackingError.attTrackingErrorConfig()
attErrorWrap = scSim.setModelDataWrap(attErrorConfig)
attErrorWrap.ModelTag = "attErrorInertial3D"
scSim.AddModelToTask(fswTaskName, attErrorWrap, attErrorConfig)
attErrorConfig.outputDataName = "attErrorInertial3DMsg"
attErrorConfig.inputRefName = inertial3DConfig.outputDataName
attErrorConfig.inputNavName = sNavObject.outputAttName
if detumble:
# setup the MRP Feedback control module
bdotControlConfig = B_DOT.B_DOTConfig()
bdotControlWrap = scSim.setModelDataWrap(bdotControlConfig)
bdotControlWrap.ModelTag = "B_DOT"
scSim.AddModelToTask(fswTaskName, bdotControlWrap, bdotControlConfig)
bdotControlConfig.inputMagMeterName = MagMeter.outputStateMessage
bdotControlConfig.vehConfigInMsgName = "vehicleConfigName"
bdotControlConfig.outputDataName = "LrRequested"
bdotControlConfig.K_detumble = 1000.0
if saturate:
bdotControlConfig.use_rw_wheels = 1
bdotControlConfig.rwParamsInMsgName = "rwa_config_data_parsed"
bdotControlConfig.inputRWSpeedsName = rwStateEffector.OutputDataString
# add module that maps the Lr control torque into the RW motor torques
rwMotorTorqueConfig = rwMotorTorque.rwMotorTorqueConfig()
rwMotorTorqueWrap = scSim.setModelDataWrap(rwMotorTorqueConfig)
rwMotorTorqueWrap.ModelTag = "rwMotorTorque"
scSim.AddModelToTask(dynTaskName, rwMotorTorqueWrap, rwMotorTorqueConfig)
# Initialize the test module msg names
if useRWVoltageIO:
rwMotorTorqueConfig.outputDataName = "rw_torque_Lr"
else:
rwMotorTorqueConfig.outputDataName = rwStateEffector.InputCmds
rwMotorTorqueConfig.inputVehControlName = bdotControlConfig.outputDataName
rwMotorTorqueConfig.rwParamsInMsgName = bdotControlConfig.rwParamsInMsgName
# Make the RW control all three body axes
controlAxes_B = [
1, 0, 0,
0, 1, 0,
0, 0, 1
]
rwMotorTorqueConfig.controlAxes_B = controlAxes_B
if useRWVoltageIO:
fswRWVoltageConfig = rwMotorVoltage.rwMotorVoltageConfig()
fswRWVoltageWrap = scSim.setModelDataWrap(fswRWVoltageConfig)
fswRWVoltageWrap.ModelTag = "rwMotorVoltage"
# Add test module to runtime call list
scSim.AddModelToTask(dynTaskName, fswRWVoltageWrap, fswRWVoltageConfig)
# Initialize the test module configuration data
fswRWVoltageConfig.torqueInMsgName = rwMotorTorqueConfig.outputDataName
fswRWVoltageConfig.rwParamsInMsgName = bdotControlConfig.rwParamsInMsgName
fswRWVoltageConfig.voltageOutMsgName = rwVoltageIO.rwVoltageInMsgName
# set module parameters
fswRWVoltageConfig.VMin = 0.0 # Volts
fswRWVoltageConfig.VMax = 5.0 # Volts
else:
bdotControlConfig.use_rw_wheels = 0
torqueRodConfig = torqueRodDynamicEffector.torqueRodDynamicEffector()
# torqueRodWrap = scSim.setModelDataWrap(torqueRodConfig)
torqueRodConfig.ModelTag = "torqueRods"
torqueRodConfig.magFieldMsgName = MagMeter.outputStateMessage
torqueRodConfig.cmdTorqueRodsMsgName = bdotControlConfig.outputDataName
torqueRodConfig.MaxDipoleMoment = 0.11 # [Am^2]
scObject.addDynamicEffector(torqueRodConfig)
scSim.AddModelToTask(dynTaskName, torqueRodConfig)
if sunpoint:
# Add Maxwell LS
sunVectorConfig = maxwellLS.maxwellLSConfig()
sunVectorWrap = scSim.setModelDataWrap(sunVectorConfig)
sunVectorWrap.ModelTag = "maxwellLS"
sunVectorConfig.cssDataInMsgName = "css_sensors_data"
sunVectorConfig.cssConfigInMsgName = "css_config_data"
sunVectorConfig.navStateOutMsgName = "css_nav_sunHeading"
sunVectorConfig.sunpointOutMsgName = "sun_direction"
sunVectorConfig.sensorUseThresh = 0.15
scSim.AddModelToTask(fswTaskName, sunVectorWrap, sunVectorConfig)
# setup the QUAT PD control module
quatControlConfig = QUAT_PD.QUAT_PDConfig()
quatControlWrap = scSim.setModelDataWrap(quatControlConfig)
quatControlWrap.ModelTag = "QUAT_PD"
scSim.AddModelToTask(fswTaskName, quatControlWrap, quatControlConfig)
quatControlConfig.inputSunName = "sun_direction"
quatControlConfig.inputAttName = sNavObject.outputAttName
quatControlConfig.inputGuidName = attErrorConfig.outputDataName
quatControlConfig.inputRatesName = ImuSensor.OutputDataMsg
quatControlConfig.vehConfigInMsgName = "vehicleConfigName"
quatControlConfig.outputDataName = "LrRequested"
quatControlConfig.rwParamsInMsgName = "rwa_config_data_parsed"
quatControlConfig.inputRWSpeedsName = rwStateEffector.OutputDataString
quatControlConfig.outputErrorName = "controlError"
quatControlConfig.K = 0.015
quatControlConfig.P = 0.01
# add module that maps the Lr control torque into the RW motor torques
rwMotorTorqueConfig = rwMotorTorque.rwMotorTorqueConfig()
rwMotorTorqueWrap = scSim.setModelDataWrap(rwMotorTorqueConfig)
rwMotorTorqueWrap.ModelTag = "rwMotorTorque"
scSim.AddModelToTask(dynTaskName, rwMotorTorqueWrap, rwMotorTorqueConfig)
# Initialize the test module msg names
if useRWVoltageIO:
rwMotorTorqueConfig.outputDataName = "rw_torque_Lr"
else:
rwMotorTorqueConfig.outputDataName = rwStateEffector.InputCmds
rwMotorTorqueConfig.inputVehControlName = quatControlConfig.outputDataName
rwMotorTorqueConfig.rwParamsInMsgName = quatControlConfig.rwParamsInMsgName
# Make the RW control all three body axes
controlAxes_B = [
1, 0, 0,
0, 1, 0,
0, 0, 1
]
rwMotorTorqueConfig.controlAxes_B = controlAxes_B
if useRWVoltageIO:
fswRWVoltageConfig = rwMotorVoltage.rwMotorVoltageConfig()
fswRWVoltageWrap = scSim.setModelDataWrap(fswRWVoltageConfig)
fswRWVoltageWrap.ModelTag = "rwMotorVoltage"
# Add test module to runtime call list
scSim.AddModelToTask(dynTaskName, fswRWVoltageWrap, fswRWVoltageConfig)
# Initialize the test module configuration data
fswRWVoltageConfig.torqueInMsgName = rwMotorTorqueConfig.outputDataName
fswRWVoltageConfig.rwParamsInMsgName = quatControlConfig.rwParamsInMsgName
fswRWVoltageConfig.voltageOutMsgName = rwVoltageIO.rwVoltageInMsgName
# set module parameters
fswRWVoltageConfig.VMin = 0.0 # Volts
fswRWVoltageConfig.VMax = 5.0 # Volts
#
# Setup data logging before the simulation is initialized
#
numDataPoints = 100000
samplingTime = simulationTime / (numDataPoints - 1)
if detumble:
# scSim.TotalSim.logThisMessage(bdotControlConfig.outputDataName, samplingTime)
# scSim.TotalSim.logThisMessage(attErrorConfig.outputDataName, samplingTime)
# scSim.TotalSim.logThisMessage(sNavObject.outputTransName, samplingTime)
# scSim.TotalSim.logThisMessage(sNavObject.outputAttName, samplingTime)
scSim.TotalSim.logThisMessage(ImuSensor.OutputDataMsg, samplingTime)
scSim.TotalSim.logThisMessage(MagMeter.outputStateMessage, samplingTime)
scSim.TotalSim.logThisMessage(bdotControlConfig.inputMagMeterName, samplingTime)
# create the FSW vehicle configuration message
vehicleConfigOut = fswMessages.VehicleConfigFswMsg()
vehicleConfigOut.ISCPntB_B = I # use the same inertia in the FSW algorithm as in the simulation
unitTestSupport.setMessage(scSim.TotalSim,
fswProcessName,
bdotControlConfig.vehConfigInMsgName,
vehicleConfigOut)
if saturate:
scSim.TotalSim.logThisMessage(bdotControlConfig.inputRWSpeedsName, samplingTime)
rwOutName = ["rw_config_0_data", "rw_config_1_data", "rw_config_2_data", "rw_config_3_data"]
for item in rwOutName:
scSim.TotalSim.logThisMessage(item, samplingTime)
if useRWVoltageIO:
scSim.TotalSim.logThisMessage(fswRWVoltageConfig.voltageOutMsgName, samplingTime)
# FSW RW configuration message
# use the same RW states in the FSW algorithm as in the simulation
fswSetupRW.clearSetup()
for key, rw in rwFactory.rwList.iteritems():
fswSetupRW.create(unitTestSupport.EigenVector3d2np(rw.gsHat_B), rw.Js, 0.2)
fswSetupRW.writeConfigMessage(bdotControlConfig.rwParamsInMsgName, scSim.TotalSim, dynProcessName)
if sunpoint:
scSim.TotalSim.logThisMessage(cssConstellation.outputConstellationMessage, samplingTime)
scSim.TotalSim.logThisMessage(sunVectorConfig.sunpointOutMsgName, samplingTime)
scSim.TotalSim.logThisMessage(attErrorConfig.outputDataName, samplingTime)
scSim.TotalSim.logThisMessage(sNavObject.outputAttName, samplingTime)
scSim.TotalSim.logThisMessage(quatControlConfig.inputRWSpeedsName, samplingTime)
scSim.TotalSim.logThisMessage(quatControlConfig.outputErrorName, samplingTime)
scSim.TotalSim.logThisMessage(attErrorConfig.outputDataName, samplingTime)
rwOutName = ["rw_config_0_data", "rw_config_1_data", "rw_config_2_data", "rw_config_3_data"]
for item in rwOutName:
scSim.TotalSim.logThisMessage(item, samplingTime)
if useRWVoltageIO:
scSim.TotalSim.logThisMessage(fswRWVoltageConfig.voltageOutMsgName, samplingTime)
# create the FSW vehicle configuration message
vehicleConfigOut = fswMessages.VehicleConfigFswMsg()
vehicleConfigOut.ISCPntB_B = I # use the same inertia in the FSW algorithm as in the simulation
unitTestSupport.setMessage(scSim.TotalSim,
fswProcessName,
quatControlConfig.vehConfigInMsgName,
vehicleConfigOut)
# FSW RW configuration message
# use the same RW states in the FSW algorithm as in the simulation
fswSetupRW.clearSetup()
for key, rw in rwFactory.rwList.iteritems():
fswSetupRW.create(unitTestSupport.EigenVector3d2np(rw.gsHat_B), rw.Js, 0.2)
fswSetupRW.writeConfigMessage(quatControlConfig.rwParamsInMsgName, scSim.TotalSim, dynProcessName)
#
# initialize Simulation
#
scSim.InitializeSimulationAndDiscover()
# this next call ensures that the FSW and Dynamics Message that have the same
# name are copied over every time the simulation ticks forward. This function
# has to be called after the simulation is initialized to ensure that all modules
# have created their own output/input messages declarations.
# dyn2FSWInterface.discoverAllMessages()
# fsw2DynInterface.discoverAllMessages()
#
# configure a simulation stop time time and execute the simulation run
#
scSim.ConfigureStopTime(simulationTime)
scSim.ExecuteSimulation()
#
# retrieve the logged data
#
if detumble:
# dataLr = scSim.pullMessageLogData(bdotControlConfig.outputDataName + ".torqueRequestBody", range(3))
# dataPos = scSim.pullMessageLogData(sNavObject.outputTransName + ".r_BN_N", range(3))
dataOmegaIMU = scSim.pullMessageLogData(ImuSensor.OutputDataMsg + ".AngVelPlatform", range(3))
dataMagBody = scSim.pullMessageLogData(bdotControlConfig.inputMagMeterName + ".mag_bf", range(3))
dataMagLVLH = scSim.pullMessageLogData(bdotControlConfig.inputMagMeterName + ".mag_hill", range(3))
if saturate:
dataOmegaRW = scSim.pullMessageLogData(bdotControlConfig.inputRWSpeedsName + ".wheelSpeeds", range(numRW))
np.set_printoptions(precision=16)
if sunpoint:
dataCSSArray = scSim.pullMessageLogData(cssConstellation.outputConstellationMessage + ".CosValue",
range(len(CSSOrientationList)))
dataSunVector = scSim.pullMessageLogData(sunVectorConfig.sunpointOutMsgName + ".q_des_RN", range(4))
dataOmegaRW = scSim.pullMessageLogData(quatControlConfig.inputRWSpeedsName + ".wheelSpeeds", range(numRW))
dataSigmaBN = scSim.pullMessageLogData(sNavObject.outputAttName + ".sigma_BN", range(3))
dataOmegaBN = scSim.pullMessageLogData(sNavObject.outputAttName + ".omega_BN_B", range(3))
dataSigmaBR = scSim.pullMessageLogData(attErrorConfig.outputDataName + ".sigma_BR", range(3))
#
# plot the results
#
fileName = os.path.basename(os.path.splitext(__file__)[0])
plt.close("all") # clears out plots from earlier test runs
if detumble:
plt.figure(1)
for idx in range(1, 4):
plt.plot(dataOmegaIMU[:, 0] * macros.NANO2MIN, dataOmegaIMU[:, idx] * macros.R2D,
color=unitTestSupport.getLineColor(idx, 3),
label='$\omega_' + str(idx) + '$')
plt.title('Detumbling Simulation Angular Rates', fontsize=16, fontweight='bold')
plt.legend(loc='upper right', fontsize=16)
plt.xlabel('Time (min)', fontsize=16)
plt.ylabel('Angular Rates (deg/s)', fontsize=16)
# # Mag Meter Body
# plt.figure(6)
# plt.plot(dataMagBody[:, 0] * macros.NANO2HOUR, dataMagBody[:, 1],
# color='blue',
# label='x')
# plt.plot(dataMagBody[:, 0] * macros.NANO2HOUR, dataMagBody[:, 2],
# color='red',
# label='y')
# plt.plot(dataMagBody[:, 0] * macros.NANO2HOUR, dataMagBody[:, 3],
# color='black',
# label='z')
# plt.grid(True)
# plt.legend(loc='upper right', fontsize=16)
# plt.title('Magnetic Field - Body Frame', fontsize=16)
# plt.xlabel('Time (h)', fontsize=16)
# plt.ylabel('Magnetic Field Magnitude (T)', fontsize=16)
# # Mag Meter LVLH
# plt.figure(7)
# plt.plot(dataMagLVLH[:, 0] * macros.NANO2HOUR, dataMagLVLH[:, 1],
# color='blue',
# label='$i_r$')
# plt.plot(dataMagLVLH[:, 0] * macros.NANO2HOUR, dataMagLVLH[:, 2],
# color='red',
# label='$i_{\\theta}$')
# plt.plot(dataMagLVLH[:, 0] * macros.NANO2HOUR, dataMagLVLH[:, 3],
# color='black',
# label='$i_h$')
# plt.grid(True)
# plt.legend(loc='upper right', fontsize=16)
# plt.title('Basilisk (Simple Tilted Dipole) - 90 degree inclination', fontsize=16)
# plt.xlabel('Time (h)', fontsize=16)
# plt.ylabel('Magnetic Field Magnitude (T)', fontsize=16)
if saturate:
plt.figure(2)
for idx in range(1, numRW + 1):
plt.plot(dataOmegaRW[:, 0] * macros.NANO2MIN, dataOmegaRW[:, idx] / macros.RPM,
color=unitTestSupport.getLineColor(idx, numRW),
label='$\Omega_{' + str(idx) + '}$')
plt.title('Reaction Wheel Spin Rates', fontsize=16, fontweight='bold')
plt.legend(loc='upper right', fontsize=16)
plt.xlabel('Time (min)', fontsize=16)
plt.ylabel('RW Speed [RPM]', fontsize=16)
if sunpoint:
# CSS Sensor Readings
plt.figure(1)
for idx in range(1, 15): # range(1,len(CSSList)+1) currently hardcoded. Remove when initialization block
plt.plot(dataCSSArray[:, 0] * macros.NANO2SEC, dataCSSArray[:, idx],
# color=unitTestSupport.getLineColor(idx,2),
label='CSS$_{' + str(idx) + '}$')
plt.title('CSS raw sensor readings', fontsize=12, fontweight='bold')
plt.xlabel('Time [sec]', fontsize=10, fontweight='bold')
plt.legend(fontsize=10)
plt.ylabel("CSS Voltage", fontsize=10, fontweight='bold')
# plt.figure(2)
# for idx in range(1, 5):
# plt.plot(dataSunVector[:, 0] * macros.NANO2SEC, dataSunVector[:, idx],
# color=unitTestSupport.getLineColor(idx, 4),
# label='$\\beta_{' + str(idx) + '}$')
# plt.legend(loc='lower right')
# plt.title('Sun Vector Estimation Quaternion')
# plt.xlabel('Time [sec]')
# plt.ylabel('Quaternion $\\beta_{B/R}$')
plt.figure(7)
for idx in range(1, 4):
plt.plot(dataSigmaBR[:, 0] * macros.NANO2SEC, dataSigmaBR[:, idx],
color=unitTestSupport.getLineColor(idx, 3),
label='$\sigma_' + str(idx) + '$')
plt.title('Control Error', fontsize=16, fontweight='bold')
plt.legend(loc='upper right', fontsize=16)
plt.xlabel('Time (s)', fontsize=16)
plt.ylabel('$\sigma_{B/R}$', fontsize=16)
plt.figure(4)
for idx in range(1, numRW + 1):
plt.plot(dataOmegaRW[:, 0] * macros.NANO2SEC, dataOmegaRW[:, idx] / macros.RPM,
color=unitTestSupport.getLineColor(idx, numRW),
label='$\Omega_{' + str(idx) + '}$')
plt.legend(loc='lower right')
plt.xlabel('Time [sec]')
plt.ylabel('RW Speed (RPM) ')
# plt.figure(5)
# for idx in range(1,4):
# plt.plot(dataSigmaBN[:, 0] * macros.NANO2SEC, dataSigmaBN[:, idx],
# color=unitTestSupport.getLineColor(idx, 3),
# label='$\sigma_' + str(idx) + '$')
# plt.legend(loc='lower right')
# plt.xlabel('Time [min]')
# plt.ylabel('Inertial Attitude $\sigma_{B/N}$')
plt.figure(6)
for idx in range(1,4):
plt.plot(dataOmegaBN[:, 0] * macros.NANO2SEC, dataOmegaBN[:, idx] * macros.R2D,
color=unitTestSupport.getLineColor(idx, 3),
label='$\omega_' + str(idx) + '$')
plt.legend(loc='lower right')
plt.xlabel('Time [sec]')
plt.ylabel('Angular Rates')
if show_plots:
plt.show()
# close the plots being saved off to avoid over-writing old and new figures
plt.close("all")
return numDataPoints
#
# This statement below ensures that the unit test scrip can be run as a
# stand-along python script
#
if __name__ == "__main__":
run(
True, # show_plots
False, # detumble
False, # saturate
True, # sunpoint
False, # useUnmodeledTorque
False, # useJitterSimple
False, # useRWVoltageIO
)
| 45.163488 | 135 | 0.672851 |
29173261ce308b02b389884ae4d161ff95145254 | 2,458 | py | Python | generators/map_parallel.py | CodyKochmann/generators | a637bf9cb5e48251aa800753ba0aa79b3ca18dcf | [
"MIT"
] | 6 | 2017-12-21T04:32:35.000Z | 2022-02-15T07:06:45.000Z | generators/map_parallel.py | CodyKochmann/generators | a637bf9cb5e48251aa800753ba0aa79b3ca18dcf | [
"MIT"
] | 21 | 2017-09-08T13:02:18.000Z | 2020-03-28T19:10:01.000Z | generators/map_parallel.py | CodyKochmann/generators | a637bf9cb5e48251aa800753ba0aa79b3ca18dcf | [
"MIT"
] | 2 | 2018-09-30T16:16:10.000Z | 2019-05-06T02:16:11.000Z | from multiprocessing import Pool
from multiprocessing.pool import ThreadPool
from queue import Queue
from .chunks import chunks
__all__ = 'map_parallel', 'map_multicore', 'map_multithread'
def map_multicore(pipe, fn, workers):
''' This streams map operations through a Pool without needing to load
the entire stream into a massive list first, like Pool.map normally
requires.
'''
assert callable(fn), fn
assert isinstance(workers, int), workers
assert workers > 0, workers
pipe = iter(pipe)
return _pool_map_stream(Pool, **locals())
def map_multithread(pipe, fn, workers):
''' This streams map operations through a ThreadPool without needing to
load the entire stream into a massive list first, like ThreadPool.map
normally requires.
'''
assert callable(fn), fn
assert isinstance(workers, int), workers
assert workers > 0, workers
pipe = iter(pipe)
return _pool_map_stream(ThreadPool, **locals())
if __name__ == '__main__':
import random, time
l = G(
range(10)
).map(
float
).map_parallel(
work,
5
).print().run()
| 30.345679 | 78 | 0.627339 |
2917e89341b91949b9706419236eae722cd755a7 | 492 | py | Python | apps/bot/classes/messages/attachments/AudioAttachment.py | Xoma163/Petrovich | 026e246f6b7d492d9be2dea205e351ac83acd89e | [
"MIT"
] | null | null | null | apps/bot/classes/messages/attachments/AudioAttachment.py | Xoma163/Petrovich | 026e246f6b7d492d9be2dea205e351ac83acd89e | [
"MIT"
] | null | null | null | apps/bot/classes/messages/attachments/AudioAttachment.py | Xoma163/Petrovich | 026e246f6b7d492d9be2dea205e351ac83acd89e | [
"MIT"
] | null | null | null | from apps.bot.classes.messages.attachments.Attachment import Attachment
| 30.75 | 80 | 0.691057 |
2917f428a5344543b5b9765a392fb7105a798a1a | 4,384 | py | Python | app/models.py | TrigeekSpace/academia-bknd | bd3b821240ef50868cd7d7b59c8d25e71086e70e | [
"BSD-3-Clause"
] | null | null | null | app/models.py | TrigeekSpace/academia-bknd | bd3b821240ef50868cd7d7b59c8d25e71086e70e | [
"BSD-3-Clause"
] | null | null | null | app/models.py | TrigeekSpace/academia-bknd | bd3b821240ef50868cd7d7b59c8d25e71086e70e | [
"BSD-3-Clause"
] | null | null | null | """ SQLAlchemy database models. """
from datetime import datetime
from depot.fields.sqlalchemy import UploadedFileField
from app import db
from app.util.data import many_to_many, foreign_key
from app.config import TOKEN_LEN
| 47.652174 | 87 | 0.707345 |