hexsha
stringlengths 40
40
| size
int64 3
1.03M
| ext
stringclasses 10
values | lang
stringclasses 1
value | max_stars_repo_path
stringlengths 3
972
| max_stars_repo_name
stringlengths 6
130
| max_stars_repo_head_hexsha
stringlengths 40
78
| max_stars_repo_licenses
listlengths 1
10
| max_stars_count
int64 1
191k
⌀ | max_stars_repo_stars_event_min_datetime
stringlengths 24
24
⌀ | max_stars_repo_stars_event_max_datetime
stringlengths 24
24
⌀ | max_issues_repo_path
stringlengths 3
972
| max_issues_repo_name
stringlengths 6
130
| max_issues_repo_head_hexsha
stringlengths 40
78
| max_issues_repo_licenses
listlengths 1
10
| max_issues_count
int64 1
116k
⌀ | max_issues_repo_issues_event_min_datetime
stringlengths 24
24
⌀ | max_issues_repo_issues_event_max_datetime
stringlengths 24
24
⌀ | max_forks_repo_path
stringlengths 3
972
| max_forks_repo_name
stringlengths 6
130
| max_forks_repo_head_hexsha
stringlengths 40
78
| max_forks_repo_licenses
listlengths 1
10
| max_forks_count
int64 1
105k
⌀ | max_forks_repo_forks_event_min_datetime
stringlengths 24
24
⌀ | max_forks_repo_forks_event_max_datetime
stringlengths 24
24
⌀ | content
stringlengths 3
1.03M
| avg_line_length
float64 1.13
941k
| max_line_length
int64 2
941k
| alphanum_fraction
float64 0
1
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
fcaf742a9bc077abb182556e6e68989c473b3011
| 4,951
|
py
|
Python
|
tests/library/utils.py
|
arenadata/adcm
|
a499caa30adc2a53e7b3f46c96a865f9e4079e4e
|
[
"Apache-2.0"
] | 16
|
2019-11-28T18:05:21.000Z
|
2021-12-08T18:09:18.000Z
|
tests/library/utils.py
|
arenadata/adcm
|
a499caa30adc2a53e7b3f46c96a865f9e4079e4e
|
[
"Apache-2.0"
] | 1,127
|
2019-11-29T08:57:25.000Z
|
2022-03-31T20:21:32.000Z
|
tests/library/utils.py
|
arenadata/adcm
|
a499caa30adc2a53e7b3f46c96a865f9e4079e4e
|
[
"Apache-2.0"
] | 10
|
2019-11-28T18:05:06.000Z
|
2022-01-13T06:16:40.000Z
|
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Common utils for ADCM tests"""
import time
import json
import random
from typing import Tuple
import requests
from adcm_pytest_plugin.plugin import parametrized_by_adcm_version
class RequestFailedException(Exception):
"""Request to ADCM API has status code >= 400"""
def get_action_by_name(client, cluster, name):
"""
Get action by name from some object
Args:
client: ADCM client API objects
cluster: cluster object
name: action name
Returns:
(action object): Action object by name
Raises:
:py:class:`ValueError`
If action is not found
"""
action_list = client.cluster.action.list(cluster_id=cluster['id'])
for action in action_list:
if action['name'] == name:
return action
raise ValueError(f"Action with name '{name}' is not found in cluster '{cluster}'")
def filter_action_by_name(actions, name):
"""
Filter action list by name and return filtered list
"""
return list(filter(lambda x: x['name'] == name, actions))
def get_random_service(client):
"""
Get random service object from ADCM
:param client: ADCM client API objects
"""
service_list = client.stack.service.list()
return random.choice(service_list)
def get_service_id_by_name(client, service_name: str) -> int:
"""
Get service id by name from ADCM
Args:
client: ADCM client API objects
service_name: service name
Returns:
(int): Service id by name
Raises:
:py:class:`ValueError`
If service is not found
"""
service_list = client.stack.service.list()
for service in service_list:
if service['name'] == service_name:
return service['id']
raise ValueError(f"Service with name '{service_name}' is not found")
def get_random_cluster_prototype(client) -> dict:
"""
Get random cluster prototype object from ADCM
:param client: ADCM client API objects
"""
return random.choice(client.stack.cluster.list())
def get_random_host_prototype(client) -> dict:
"""
Get random host prototype object from ADCM
:param client: ADCM client API objects
"""
return random.choice(client.stack.host.list())
def get_random_cluster_service_component(client, cluster, service) -> dict:
"""
Get random cluster service component
Args:
client: ADCM client API objects
cluster: some cluster object
service: some service object in cluster
Raises:
:py:class:`ValueError`
If service is not found
"""
components = client.cluster.service.component.list(cluster_id=cluster['id'], service_id=service['id'])
if components:
return random.choice(components)
raise ValueError('Service has not components')
def get_host_by_fqdn(client, fqdn):
"""
Get host object by fqdn from ADCM
Args:
client: ADCM client API objects
fqdn: host's fqdn
Returns:
(host object): Host object by fqdn
Raises:
:py:class:`ValueError`
If host is not found
"""
host_list = client.host.list()
for host in host_list:
if host['fqdn'] == fqdn:
return host
raise ValueError(f"Host with fqdn '{fqdn}' is not found in a host list")
def wait_until(client, task, interval=1, timeout=30):
"""
Wait until task status becomes either success or failed
Args:
client: ADCM client API objects
task: some task for wait its status
interval: interval with which task status will be requested
timeout: time during which status success or failed is expected
"""
start = time.time()
while not (task['status'] == 'success' or task['status'] == 'failed') and time.time() - start < timeout:
time.sleep(interval)
task = client.task.read(task_id=task['id'])
def get_json_or_text(response: requests.Response):
"""
Try to get JSON or text if JSON can't be parsed from requests.Response.
Use this function to provide more info on ADCM API Error.
"""
try:
return response.json()
except json.JSONDecodeError:
return response.text
def previous_adcm_version_tag() -> Tuple[str, str]:
"""Get tag of previous ADCM version"""
return parametrized_by_adcm_version(adcm_min_version="2021.03.10")[0][-1]
| 26.475936
| 108
| 0.66633
|
0efad5e64d3fd0903b86eb2baedb60a7755a2ad4
| 3,335
|
py
|
Python
|
04_pages/bookstore_project/settings.py
|
tlanc007/DjangoForProfessionals_exercises
|
e110e50d7f7e69bee7235f660622b435dae9011a
|
[
"MIT"
] | null | null | null |
04_pages/bookstore_project/settings.py
|
tlanc007/DjangoForProfessionals_exercises
|
e110e50d7f7e69bee7235f660622b435dae9011a
|
[
"MIT"
] | 13
|
2020-06-05T22:39:44.000Z
|
2022-02-10T13:05:24.000Z
|
04_pages/bookstore_project/settings.py
|
tlanc007/DjangoForProfessionals_exercises
|
e110e50d7f7e69bee7235f660622b435dae9011a
|
[
"MIT"
] | 1
|
2020-12-17T09:53:09.000Z
|
2020-12-17T09:53:09.000Z
|
"""
Django settings for bookstore_project project.
Generated by 'django-admin startproject' using Django 2.2.4.
For more information on this file, see
https://docs.djangoproject.com/en/2.2/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/2.2/ref/settings/
"""
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/2.2/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = '$ode284gs@3x$0b++@5*q_1it@@0d#!x!cmsh*@&oj%8$le3xt'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'users.apps.UsersConfig',
'pages.apps.PagesConfig',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'bookstore_project.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [os.path.join(BASE_DIR, 'templates')],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'bookstore_project.wsgi.application'
# Database
# https://docs.djangoproject.com/en/2.2/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql',
'NAME': 'postgres',
'USER': 'postgres',
'PASSWORD': 'postgres',
'HOST': 'db',
'PORT': 5432
}
}
# Password validation
# https://docs.djangoproject.com/en/2.2/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/2.2/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/2.2/howto/static-files/
STATIC_URL = '/static/'
AUTH_USER_MODEL = 'users.CustomUser'
| 25.653846
| 91
| 0.689655
|
d94cffde872df39bb2e73232008c19d3ca620709
| 738
|
py
|
Python
|
src/wai/common/meta/typing/_typing.py
|
waikato-datamining/wai-common
|
bf3d7ae6e01bcb7ffe9f5c2b5d10a05908a68c34
|
[
"MIT"
] | null | null | null |
src/wai/common/meta/typing/_typing.py
|
waikato-datamining/wai-common
|
bf3d7ae6e01bcb7ffe9f5c2b5d10a05908a68c34
|
[
"MIT"
] | 8
|
2020-07-01T02:11:31.000Z
|
2020-12-17T01:57:17.000Z
|
src/wai/common/meta/typing/_typing.py
|
waikato-datamining/wai-common
|
bf3d7ae6e01bcb7ffe9f5c2b5d10a05908a68c34
|
[
"MIT"
] | null | null | null |
from typing import Tuple, Any, Dict, Callable, TypeVar
# The type of a standard *args parameter
POSITIONAL_ARGS_TYPE = Tuple[Any, ...]
# The type of a standard **kwargs parameter
KEYWORD_ARGS_TYPE = Dict[str, Any]
# The type of *args, **kwargs combined
VAR_ARGS_TYPE = Tuple[POSITIONAL_ARGS_TYPE, KEYWORD_ARGS_TYPE]
# The type of a callable (non-generic)
AnyCallable = Callable[[Any], Any]
# The generic type of arguments to a callable
ArgType = TypeVar("ArgType")
# The generic return type of a callable
ReturnType = TypeVar("ReturnType")
# The type of a generic callable
GenericCallable = Callable[[ArgType], ReturnType]
# The type of a generic decorator function
GenericDecorator = Callable[[GenericCallable], GenericCallable]
| 28.384615
| 63
| 0.765583
|
b7cf49cb8ef94c62898a0de4ad2b8f61e1ec923f
| 1,486
|
py
|
Python
|
setup.py
|
gilsonbp/django-baton
|
ec137e13c5639885b58ead3c2d19e851fd259ad7
|
[
"MIT"
] | null | null | null |
setup.py
|
gilsonbp/django-baton
|
ec137e13c5639885b58ead3c2d19e851fd259ad7
|
[
"MIT"
] | null | null | null |
setup.py
|
gilsonbp/django-baton
|
ec137e13c5639885b58ead3c2d19e851fd259ad7
|
[
"MIT"
] | null | null | null |
import os
from setuptools import setup
with open(os.path.join(os.path.dirname(__file__), 'README.md')) as readme:
README = readme.read()
# allow setup.py to be run from any path
os.chdir(os.path.normpath(os.path.join(os.path.abspath(__file__), os.pardir)))
setup(
name='django-baton',
version='1.1.2',
packages=['baton', 'baton.autodiscover', 'baton.templatetags'],
include_package_data=True,
license='MIT License',
description='A cool, modern and responsive django admin application',
long_description=README,
url='http://github.com/otto-torino/django-baton',
author='abidibo',
author_email='abidibo@gmail.com',
install_requires=[
'google-api-python-client',
'oauth2client==1.5.2',
],
classifiers=[
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2.5',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.5',
'Topic :: Software Development',
'Topic :: Software Development :: User Interfaces',
]
)
| 34.55814
| 78
| 0.631225
|
9613e6fae2e0bb978bcc5d1156682300e2547ca4
| 8,971
|
py
|
Python
|
compiler/modules/replica_column.py
|
marwaneltoukhy/OpenRAM
|
ed9d32c7bc105db2a438d36d4b2d852152a79e3b
|
[
"BSD-3-Clause"
] | 1
|
2020-11-21T05:37:53.000Z
|
2020-11-21T05:37:53.000Z
|
compiler/modules/replica_column.py
|
marwaneltoukhy/OpenRAM
|
ed9d32c7bc105db2a438d36d4b2d852152a79e3b
|
[
"BSD-3-Clause"
] | null | null | null |
compiler/modules/replica_column.py
|
marwaneltoukhy/OpenRAM
|
ed9d32c7bc105db2a438d36d4b2d852152a79e3b
|
[
"BSD-3-Clause"
] | null | null | null |
# See LICENSE for licensing information.
#
# Copyright (c) 2016-2019 Regents of the University of California
# All rights reserved.
#
import debug
import design
from tech import cell_properties
from sram_factory import factory
from vector import vector
from globals import OPTS
class replica_column(design.design):
"""
Generate a replica bitline column for the replica array.
Rows is the total number of rows i the main array.
Left_rbl and right_rbl are the number of left and right replica bitlines.
Replica bit specifies which replica column this is (to determine where to put the
replica cell.
"""
def __init__(self, name, rows, left_rbl, right_rbl, replica_bit,
column_offset=0):
design.design.__init__(self, name)
self.rows = rows
self.left_rbl = left_rbl
self.right_rbl = right_rbl
self.replica_bit = replica_bit
# left, right, regular rows plus top/bottom dummy cells
self.total_size = self.left_rbl + rows + self.right_rbl + 2
self.column_offset = column_offset
debug.check(replica_bit != 0 and replica_bit != rows,
"Replica bit cannot be the dummy row.")
debug.check(replica_bit <= left_rbl or replica_bit >= self.total_size - right_rbl - 1,
"Replica bit cannot be in the regular array.")
if OPTS.tech_name == "sky130":
debug.check(rows % 2 == 0 and (left_rbl + 1) % 2 == 0,
"sky130 currently requires rows to be even and to start with X mirroring"
+ " (left_rbl must be odd) for LVS.")
self.create_netlist()
if not OPTS.netlist_only:
self.create_layout()
def create_netlist(self):
self.add_modules()
self.add_pins()
self.create_instances()
def create_layout(self):
self.height = self.total_size * self.cell.height
self.width = self.cell.width
self.place_instances()
self.add_layout_pins()
self.add_boundary()
self.DRC_LVS()
def add_pins(self):
for bl_name in self.cell.get_all_bitline_names():
# In the replica column, these are only outputs!
self.add_pin("{0}_{1}".format(bl_name, 0), "OUTPUT")
for row in range(self.total_size):
for wl_name in self.cell.get_all_wl_names():
self.add_pin("{0}_{1}".format(wl_name, row), "INPUT")
self.add_pin("vdd", "POWER")
self.add_pin("gnd", "GROUND")
def add_modules(self):
self.replica_cell = factory.create(module_type="replica_{}".format(OPTS.bitcell))
self.add_mod(self.replica_cell)
self.dummy_cell = factory.create(module_type="dummy_{}".format(OPTS.bitcell))
self.add_mod(self.dummy_cell)
try:
edge_module_type = ("col_cap" if cell_properties.bitcell.end_caps else "dummy")
except AttributeError:
edge_module_type = "dummy"
self.edge_cell = factory.create(module_type=edge_module_type + "_" + OPTS.bitcell)
self.add_mod(self.edge_cell)
# Used for pin names only
self.cell = factory.create(module_type="bitcell")
def create_instances(self):
try:
end_caps_enabled = cell_properties.bitcell.end_caps
except AttributeError:
end_caps_enabled = False
self.cell_inst = {}
for row in range(self.total_size):
name="rbc_{0}".format(row)
# Top/bottom cell are always dummy cells.
# Regular array cells are replica cells (>left_rbl and <rows-right_rbl)
# Replic bit specifies which other bit (in the full range (0,rows) to make a replica cell.
if (row > self.left_rbl and row < self.total_size - self.right_rbl - 1):
self.cell_inst[row]=self.add_inst(name=name,
mod=self.replica_cell)
self.connect_inst(self.get_bitcell_pins(0, row))
elif row==self.replica_bit:
self.cell_inst[row]=self.add_inst(name=name,
mod=self.replica_cell)
self.connect_inst(self.get_bitcell_pins(0, row))
elif (row == 0 or row == self.total_size - 1):
self.cell_inst[row]=self.add_inst(name=name,
mod=self.edge_cell)
if end_caps_enabled:
self.connect_inst(self.get_bitcell_pins_col_cap(0, row))
else:
self.connect_inst(self.get_bitcell_pins(0, row))
else:
self.cell_inst[row]=self.add_inst(name=name,
mod=self.dummy_cell)
self.connect_inst(self.get_bitcell_pins(0, row))
def place_instances(self):
from tech import cell_properties
# Flip the mirrors if we have an odd number of replica+dummy rows at the bottom
# so that we will start with mirroring rather than not mirroring
rbl_offset = (self.left_rbl + 1) %2
# if our bitcells are mirrored on the y axis, check if we are in global
# column that needs to be flipped.
dir_y = False
xoffset = 0
if cell_properties.bitcell.mirror.y and self.column_offset % 2:
dir_y = True
xoffset = self.replica_cell.width
for row in range(self.total_size):
# name = "bit_r{0}_{1}".format(row, "rbl")
dir_x = cell_properties.bitcell.mirror.x and (row + rbl_offset) % 2
offset = vector(xoffset, self.cell.height * (row + (row + rbl_offset) % 2))
if dir_x and dir_y:
dir_key = "XY"
elif dir_x:
dir_key = "MX"
elif dir_y:
dir_key = "MY"
else:
dir_key = ""
self.cell_inst[row].place(offset=offset,
mirror=dir_key)
def add_layout_pins(self):
""" Add the layout pins """
for bl_name in self.cell.get_all_bitline_names():
bl_pin = self.cell_inst[0].get_pin(bl_name)
self.add_layout_pin(text=bl_name,
layer=bl_pin.layer,
offset=bl_pin.ll().scale(1, 0),
width=bl_pin.width(),
height=self.height)
try:
end_caps_enabled = cell_properties.bitcell.end_caps
except AttributeError:
end_caps_enabled = False
if end_caps_enabled:
row_range_max = self.total_size - 1
row_range_min = 1
else:
row_range_max = self.total_size
row_range_min = 0
for row in range(row_range_min, row_range_max):
for wl_name in self.cell.get_all_wl_names():
wl_pin = self.cell_inst[row].get_pin(wl_name)
self.add_layout_pin(text="{0}_{1}".format(wl_name, row),
layer=wl_pin.layer,
offset=wl_pin.ll().scale(0, 1),
width=self.width,
height=wl_pin.height())
# Supplies are only connected in the ends
for (index, inst) in self.cell_inst.items():
for pin_name in ["vdd", "gnd"]:
if inst in [self.cell_inst[0], self.cell_inst[self.total_size - 1]]:
self.copy_power_pins(inst, pin_name)
else:
self.copy_layout_pin(inst, pin_name)
def get_bitcell_pins(self, col, row):
""" Creates a list of connections in the bitcell,
indexed by column and row, for instance use in bitcell_array """
bitcell_pins = []
pin_names = self.cell.get_all_bitline_names()
for pin in pin_names:
bitcell_pins.append(pin + "_{0}".format(col))
pin_names = self.cell.get_all_wl_names()
for pin in pin_names:
bitcell_pins.append(pin + "_{0}".format(row))
bitcell_pins.append("vdd")
bitcell_pins.append("gnd")
return bitcell_pins
def get_bitcell_pins_col_cap(self, col, row):
""" Creates a list of connections in the bitcell,
indexed by column and row, for instance use in bitcell_array """
bitcell_pins = []
pin_names = self.cell.get_all_bitline_names()
for pin in pin_names:
bitcell_pins.append(pin + "_{0}".format(col))
bitcell_pins.append("vdd")
return bitcell_pins
def exclude_all_but_replica(self):
"""Excludes all bits except the replica cell (self.replica_bit)."""
for row, cell in self.cell_inst.items():
if row != self.replica_bit:
self.graph_inst_exclude.add(cell)
| 39.004348
| 102
| 0.578085
|
587618974c383405fd4c0fc2607f752cde2859ec
| 460
|
py
|
Python
|
smot/util.py
|
arendsee/sap
|
c2dd2788c584d0d0924a5b21052c4e9042c2e94e
|
[
"MIT"
] | 4
|
2021-12-09T17:14:08.000Z
|
2022-02-16T19:09:53.000Z
|
smot/util.py
|
arendsee/sap
|
c2dd2788c584d0d0924a5b21052c4e9042c2e94e
|
[
"MIT"
] | 2
|
2022-02-15T14:09:59.000Z
|
2022-02-21T20:07:45.000Z
|
smot/util.py
|
arendsee/sap
|
c2dd2788c584d0d0924a5b21052c4e9042c2e94e
|
[
"MIT"
] | 3
|
2022-01-24T12:23:31.000Z
|
2022-02-16T18:40:55.000Z
|
from __future__ import annotations
from typing import List, TypeVar, Optional
import sys
A = TypeVar("A")
def log(msg: str) -> None:
print(msg, file=sys.stderr)
def die(msg: str) -> None:
print(msg, file=sys.stderr)
sys.exit(1)
def concat(xss: List[List[A]]) -> List[A]:
ys: List[A]
ys = []
for xs in xss:
ys + xs
return ys
def rmNone(xs: List[Optional[A]]) -> List[A]:
return [x for x in xs if x is not None]
| 16.428571
| 45
| 0.606522
|
7d194bbab73ddee762569cdb2ae7dc7d55c62df1
| 50,456
|
py
|
Python
|
test/functional/test_framework/test_framework.py
|
fivebalanceID/Fivebalance_V3
|
353986eb40326fe0c66d9172ea2b0a925b31c734
|
[
"MIT"
] | 7
|
2019-06-01T01:36:35.000Z
|
2020-11-12T11:15:57.000Z
|
test/functional/test_framework/test_framework.py
|
fivebalanceID/Fivebalance_V3
|
353986eb40326fe0c66d9172ea2b0a925b31c734
|
[
"MIT"
] | null | null | null |
test/functional/test_framework/test_framework.py
|
fivebalanceID/Fivebalance_V3
|
353986eb40326fe0c66d9172ea2b0a925b31c734
|
[
"MIT"
] | 1
|
2021-05-20T05:38:23.000Z
|
2021-05-20T05:38:23.000Z
|
#!/usr/bin/env python3
# Copyright (c) 2014-2017 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Base class for RPC testing."""
from enum import Enum
from io import BytesIO
import logging
import optparse
import os
import pdb
import shutil
from struct import pack
import sys
import tempfile
import time
from . import coverage
from .address import wif_to_privkey
from .authproxy import JSONRPCException
from .blocktools import (
create_block,
create_coinbase_pos,
create_transaction_from_outpoint,
is_zerocoin,
)
from .key import CECKey
from .messages import (
COIN,
COutPoint,
CTransaction,
CTxIn,
CTxOut,
hash256,
)
from .script import (
CScript,
OP_CHECKSIG,
)
from .test_node import TestNode
from .util import (
MAX_NODES,
PortSeed,
assert_equal,
assert_greater_than,
check_json_precision,
connect_nodes,
connect_nodes_clique,
disconnect_nodes,
DEFAULT_FEE,
get_datadir_path,
hex_str_to_bytes,
bytes_to_hex_str,
initialize_datadir,
set_node_times,
SPORK_ACTIVATION_TIME,
SPORK_DEACTIVATION_TIME,
sync_blocks,
sync_mempools,
vZC_DENOMS,
)
class TestStatus(Enum):
PASSED = 1
FAILED = 2
SKIPPED = 3
TEST_EXIT_PASSED = 0
TEST_EXIT_FAILED = 1
TEST_EXIT_SKIPPED = 77
TMPDIR_PREFIX = "fivebalance_func_test_"
class FivebalanceTestFramework():
"""Base class for a fivebalance test script.
Individual fivebalance test scripts should subclass this class and override the set_test_params() and run_test() methods.
Individual tests can also override the following methods to customize the test setup:
- add_options()
- setup_chain()
- setup_network()
- setup_nodes()
The __init__() and main() methods should not be overridden.
This class also contains various public and private helper methods."""
def __init__(self):
"""Sets test framework defaults. Do not override this method. Instead, override the set_test_params() method"""
self.setup_clean_chain = False
self.nodes = []
self.mocktime = 0
self.supports_cli = False
self.set_test_params()
assert hasattr(self, "num_nodes"), "Test must set self.num_nodes in set_test_params()"
def main(self):
"""Main function. This should not be overridden by the subclass test scripts."""
parser = optparse.OptionParser(usage="%prog [options]")
parser.add_option("--nocleanup", dest="nocleanup", default=False, action="store_true",
help="Leave fivebalanceds and test.* datadir on exit or error")
parser.add_option("--noshutdown", dest="noshutdown", default=False, action="store_true",
help="Don't stop fivebalanceds after the test execution")
parser.add_option("--srcdir", dest="srcdir", default=os.path.normpath(os.path.dirname(os.path.realpath(__file__))+"/../../../src"),
help="Source directory containing fivebalanced/fivebalance-cli (default: %default)")
parser.add_option("--cachedir", dest="cachedir", default=os.path.normpath(os.path.dirname(os.path.realpath(__file__)) + "/../../cache"),
help="Directory for caching pregenerated datadirs")
parser.add_option("--tmpdir", dest="tmpdir", help="Root directory for datadirs")
parser.add_option("-l", "--loglevel", dest="loglevel", default="INFO",
help="log events at this level and higher to the console. Can be set to DEBUG, INFO, WARNING, ERROR or CRITICAL. Passing --loglevel DEBUG will output all logs to console. Note that logs at all levels are always written to the test_framework.log file in the temporary test directory.")
parser.add_option("--tracerpc", dest="trace_rpc", default=False, action="store_true",
help="Print out all RPC calls as they are made")
parser.add_option("--portseed", dest="port_seed", default=os.getpid(), type='int',
help="The seed to use for assigning port numbers (default: current process id)")
parser.add_option("--coveragedir", dest="coveragedir",
help="Write tested RPC commands into this directory")
parser.add_option("--configfile", dest="configfile",
help="Location of the test framework config file")
parser.add_option('--legacywallet', dest="legacywallet", default=False, action="store_true",
help='create pre-HD wallets only')
parser.add_option("--pdbonfailure", dest="pdbonfailure", default=False, action="store_true",
help="Attach a python debugger if test fails")
parser.add_option("--usecli", dest="usecli", default=False, action="store_true",
help="use fivebalance-cli instead of RPC for all commands")
self.add_options(parser)
(self.options, self.args) = parser.parse_args()
PortSeed.n = self.options.port_seed
os.environ['PATH'] = self.options.srcdir + ":" + self.options.srcdir + "/qt:" + os.environ['PATH']
check_json_precision()
self.options.cachedir = os.path.abspath(self.options.cachedir)
# Set up temp directory and start logging
if self.options.tmpdir:
self.options.tmpdir = os.path.abspath(self.options.tmpdir)
os.makedirs(self.options.tmpdir, exist_ok=False)
else:
self.options.tmpdir = tempfile.mkdtemp(prefix=TMPDIR_PREFIX)
self._start_logging()
success = TestStatus.FAILED
try:
if self.options.usecli and not self.supports_cli:
raise SkipTest("--usecli specified but test does not support using CLI")
self.setup_chain()
self.setup_network()
time.sleep(5)
self.run_test()
success = TestStatus.PASSED
except JSONRPCException as e:
self.log.exception("JSONRPC error")
except SkipTest as e:
self.log.warning("Test Skipped: %s" % e.message)
success = TestStatus.SKIPPED
except AssertionError as e:
self.log.exception("Assertion failed")
except KeyError as e:
self.log.exception("Key error")
except Exception as e:
self.log.exception("Unexpected exception caught during testing")
except KeyboardInterrupt as e:
self.log.warning("Exiting after keyboard interrupt")
if success == TestStatus.FAILED and self.options.pdbonfailure:
print("Testcase failed. Attaching python debugger. Enter ? for help")
pdb.set_trace()
if not self.options.noshutdown:
self.log.info("Stopping nodes")
if self.nodes:
self.stop_nodes()
else:
for node in self.nodes:
node.cleanup_on_exit = False
self.log.info("Note: fivebalanceds were not stopped and may still be running")
if not self.options.nocleanup and not self.options.noshutdown and success != TestStatus.FAILED:
self.log.info("Cleaning up")
shutil.rmtree(self.options.tmpdir)
else:
self.log.warning("Not cleaning up dir %s" % self.options.tmpdir)
if success == TestStatus.PASSED:
self.log.info("Tests successful")
exit_code = TEST_EXIT_PASSED
elif success == TestStatus.SKIPPED:
self.log.info("Test skipped")
exit_code = TEST_EXIT_SKIPPED
else:
self.log.error("Test failed. Test logging available at %s/test_framework.log", self.options.tmpdir)
self.log.error("Hint: Call {} '{}' to consolidate all logs".format(os.path.normpath(os.path.dirname(os.path.realpath(__file__)) + "/../combine_logs.py"), self.options.tmpdir))
exit_code = TEST_EXIT_FAILED
logging.shutdown()
sys.exit(exit_code)
# Methods to override in subclass test scripts.
def set_test_params(self):
"""Tests must this method to change default values for number of nodes, topology, etc"""
raise NotImplementedError
def add_options(self, parser):
"""Override this method to add command-line options to the test"""
pass
def setup_chain(self):
"""Override this method to customize blockchain setup"""
self.log.info("Initializing test directory " + self.options.tmpdir)
if self.setup_clean_chain:
self._initialize_chain_clean()
else:
self._initialize_chain()
def setup_network(self):
"""Override this method to customize test network topology"""
self.setup_nodes()
# Connect the nodes as a "chain". This allows us
# to split the network between nodes 1 and 2 to get
# two halves that can work on competing chains.
#
# Topology looks like this:
# node0 <-- node1 <-- node2 <-- node3
#
# If all nodes are in IBD (clean chain from genesis), node0 is assumed to be the source of blocks (miner). To
# ensure block propagation, all nodes will establish outgoing connections toward node0.
# See fPreferredDownload in net_processing.
#
# If further outbound connections are needed, they can be added at the beginning of the test with e.g.
# connect_nodes(self.nodes[1], 2)
for i in range(self.num_nodes - 1):
connect_nodes(self.nodes[i + 1], i)
self.sync_all()
def setup_nodes(self):
"""Override this method to customize test node setup"""
extra_args = None
if hasattr(self, "extra_args"):
extra_args = self.extra_args
self.add_nodes(self.num_nodes, extra_args)
self.start_nodes()
def run_test(self):
"""Tests must override this method to define test logic"""
raise NotImplementedError
# Public helper methods. These can be accessed by the subclass test scripts.
def add_nodes(self, num_nodes, extra_args=None, rpchost=None, timewait=None, binary=None):
"""Instantiate TestNode objects"""
if extra_args is None:
extra_args = [[]] * num_nodes
# Check wallet version
if self.options.legacywallet:
for arg in extra_args:
arg.append('-legacywallet')
self.log.info("Running test with legacy (pre-HD) wallet")
if binary is None:
binary = [None] * num_nodes
assert_equal(len(extra_args), num_nodes)
assert_equal(len(binary), num_nodes)
for i in range(num_nodes):
self.nodes.append(TestNode(i, self.options.tmpdir, extra_args[i], rpchost, timewait=timewait, binary=binary[i], stderr=None, mocktime=self.mocktime, coverage_dir=self.options.coveragedir, use_cli=self.options.usecli))
def start_node(self, i, *args, **kwargs):
"""Start a fivebalanced"""
node = self.nodes[i]
node.start(*args, **kwargs)
node.wait_for_rpc_connection()
time.sleep(10)
if self.options.coveragedir is not None:
coverage.write_all_rpc_commands(self.options.coveragedir, node.rpc)
def start_nodes(self, extra_args=None, *args, **kwargs):
"""Start multiple fivebalanceds"""
if extra_args is None:
extra_args = [None] * self.num_nodes
assert_equal(len(extra_args), self.num_nodes)
try:
for i, node in enumerate(self.nodes):
node.start(extra_args[i], *args, **kwargs)
for node in self.nodes:
node.wait_for_rpc_connection()
except:
# If one node failed to start, stop the others
self.stop_nodes()
raise
time.sleep(10)
if self.options.coveragedir is not None:
for node in self.nodes:
coverage.write_all_rpc_commands(self.options.coveragedir, node.rpc)
def stop_node(self, i):
"""Stop a fivebalanced test node"""
self.nodes[i].stop_node()
self.nodes[i].wait_until_stopped()
def stop_nodes(self):
"""Stop multiple fivebalanced test nodes"""
for node in self.nodes:
# Issue RPC to stop nodes
node.stop_node()
for node in self.nodes:
# Wait for nodes to stop
time.sleep(5)
node.wait_until_stopped()
def restart_node(self, i, extra_args=None):
"""Stop and start a test node"""
self.stop_node(i)
self.start_node(i, extra_args)
def assert_start_raises_init_error(self, i, extra_args=None, expected_msg=None, *args, **kwargs):
with tempfile.SpooledTemporaryFile(max_size=2**16) as log_stderr:
try:
self.start_node(i, extra_args, stderr=log_stderr, *args, **kwargs)
self.stop_node(i)
except Exception as e:
assert 'fivebalanced exited' in str(e) # node must have shutdown
self.nodes[i].running = False
self.nodes[i].process = None
if expected_msg is not None:
log_stderr.seek(0)
stderr = log_stderr.read().decode('utf-8')
if expected_msg not in stderr:
raise AssertionError("Expected error \"" + expected_msg + "\" not found in:\n" + stderr)
else:
if expected_msg is None:
assert_msg = "fivebalanced should have exited with an error"
else:
assert_msg = "fivebalanced should have exited with expected error " + expected_msg
raise AssertionError(assert_msg)
def wait_for_node_exit(self, i, timeout):
self.nodes[i].process.wait(timeout)
def split_network(self):
"""
Split the network of four nodes into nodes 0/1 and 2/3.
"""
disconnect_nodes(self.nodes[1], 2)
disconnect_nodes(self.nodes[2], 1)
self.sync_all([self.nodes[:2], self.nodes[2:]])
def join_network(self):
"""
Join the (previously split) network halves together.
"""
connect_nodes(self.nodes[1], 2)
self.sync_all()
def sync_all(self, node_groups=None):
if not node_groups:
node_groups = [self.nodes]
for group in node_groups:
sync_blocks(group)
sync_mempools(group)
def enable_mocktime(self):
"""Enable mocktime for the script.
mocktime may be needed for scripts that use the cached version of the
blockchain. If the cached version of the blockchain is used without
mocktime then the mempools will not sync due to IBD.
Sets mocktime to Tuesday, October 31, 2017 6:21:20 PM GMT (1572546080)
"""
self.mocktime = 1572546080
def disable_mocktime(self):
self.mocktime = 0
# Private helper methods. These should not be accessed by the subclass test scripts.
def _start_logging(self):
# Add logger and logging handlers
self.log = logging.getLogger('TestFramework')
self.log.setLevel(logging.DEBUG)
# Create file handler to log all messages
fh = logging.FileHandler(self.options.tmpdir + '/test_framework.log')
fh.setLevel(logging.DEBUG)
# Create console handler to log messages to stderr. By default this logs only error messages, but can be configured with --loglevel.
ch = logging.StreamHandler(sys.stdout)
# User can provide log level as a number or string (eg DEBUG). loglevel was caught as a string, so try to convert it to an int
ll = int(self.options.loglevel) if self.options.loglevel.isdigit() else self.options.loglevel.upper()
ch.setLevel(ll)
# Format logs the same as fivebalanced's debug.log with microprecision (so log files can be concatenated and sorted)
formatter = logging.Formatter(fmt='%(asctime)s.%(msecs)03d000 %(name)s (%(levelname)s): %(message)s', datefmt='%Y-%m-%d %H:%M:%S')
formatter.converter = time.gmtime
fh.setFormatter(formatter)
ch.setFormatter(formatter)
# add the handlers to the logger
self.log.addHandler(fh)
self.log.addHandler(ch)
if self.options.trace_rpc:
rpc_logger = logging.getLogger("BitcoinRPC")
rpc_logger.setLevel(logging.DEBUG)
rpc_handler = logging.StreamHandler(sys.stdout)
rpc_handler.setLevel(logging.DEBUG)
rpc_logger.addHandler(rpc_handler)
def _initialize_chain(self, toPosPhase=False):
"""Initialize a pre-mined blockchain for use by the test."""
def create_cachedir(cachedir):
if os.path.isdir(cachedir):
shutil.rmtree(cachedir)
os.makedirs(cachedir)
def copy_cachedir(origin, destination, num_nodes=MAX_NODES):
for i in range(num_nodes):
from_dir = get_datadir_path(origin, i)
to_dir = get_datadir_path(destination, i)
shutil.copytree(from_dir, to_dir)
initialize_datadir(destination, i) # Overwrite port/rpcport in fivebalance.conf
def clone_cache_from_node_1(cachedir, from_num=4):
""" Clones cache subdir from node 1 to nodes from 'from_num' to MAX_NODES"""
def copy_and_overwrite(from_path, to_path):
if os.path.exists(to_path):
shutil.rmtree(to_path)
shutil.copytree(from_path, to_path)
assert from_num < MAX_NODES
node_0_datadir = os.path.join(get_datadir_path(cachedir, 0), "regtest")
for i in range(from_num, MAX_NODES):
node_i_datadir = os.path.join(get_datadir_path(cachedir, i), "regtest")
for subdir in ["blocks", "chainstate", "sporks", "zerocoin"]:
copy_and_overwrite(os.path.join(node_0_datadir, subdir),
os.path.join(node_i_datadir, subdir))
initialize_datadir(cachedir, i) # Overwrite port/rpcport in fivebalance.conf
def cachedir_valid(cachedir):
for i in range(MAX_NODES):
if not os.path.isdir(get_datadir_path(cachedir, i)):
return False
# nodes directories exist. check if the first one has the .incomplete flagfile
return (not os.path.exists(os.path.join(get_datadir_path(cachedir, 0), ".incomplete")))
def clean_cache_subdir(cachedir):
os.remove(os.path.join(get_datadir_path(cachedir, 0), ".incomplete"))
def cache_path(n, *paths):
return os.path.join(get_datadir_path(cachedir, n), "regtest", *paths)
for i in range(MAX_NODES):
for entry in os.listdir(cache_path(i)):
if entry not in ['wallet.dat', 'chainstate', 'blocks', 'sporks', 'zerocoin', 'backups']:
os.remove(cache_path(i, entry))
def clean_cache_dir():
if os.path.isdir(self.options.cachedir):
# migrate old cache dir
if cachedir_valid(self.options.cachedir):
powcachedir = os.path.join(self.options.cachedir, "pow")
self.log.info("Found old cachedir. Migrating to %s" % str(powcachedir))
copy_cachedir(self.options.cachedir, powcachedir)
# remove everything except pow and pos subdirs
for entry in os.listdir(self.options.cachedir):
if entry not in ['pow', 'pos']:
entry_path = os.path.join(self.options.cachedir, entry)
if os.path.isfile(entry_path):
os.remove(entry_path)
elif os.path.isdir(entry_path):
shutil.rmtree(entry_path)
# no cachedir found
else:
os.makedirs(self.options.cachedir)
def start_nodes_from_dir(ddir, num_nodes=MAX_NODES):
self.log.info("Starting %d nodes..." % num_nodes)
for i in range(num_nodes):
datadir = initialize_datadir(ddir, i)
if i == 0:
# Add .incomplete flagfile
# (removed at the end during clean_cache_subdir)
open(os.path.join(datadir, ".incomplete"), 'a').close()
args = [os.getenv("BITCOIND", "fivebalanced"), "-spendzeroconfchange=1", "-server", "-keypool=1",
"-datadir=" + datadir, "-discover=0"]
self.nodes.append(
TestNode(i, ddir, extra_args=[], rpchost=None, timewait=None, binary=None, stderr=None,
mocktime=self.mocktime, coverage_dir=None))
self.nodes[i].args = args
self.start_node(i)
self.log.info("Node %d started." % i)
# Wait for RPC connections to be ready
self.log.info("Nodes started. Waiting for RPC connections...")
for node in range(4):
self.nodes[node].wait_for_rpc_connection()
self.log.info("Connecting nodes")
connect_nodes_clique(self.nodes)
def stop_and_clean_cache_dir(ddir):
self.stop_nodes()
self.nodes = []
# Copy cache for nodes 5 to MAX_NODES
self.log.info("Copying cache dir to non-started nodes")
clone_cache_from_node_1(ddir)
self.log.info("Cleaning up.")
clean_cache_subdir(ddir)
def generate_pow_cache():
### POW Cache ###
# Create a 200-block-long chain; each of the 4 first nodes
# gets 25 mature blocks and 25 immature.
# Note: To preserve compatibility with older versions of
# initialize_chain, only 4 nodes will generate coins.
#
# blocks are created with timestamps 1 minutes apart
# starting from 331 minutes in the past
# Create cache directories, run fivebalanceds:
create_cachedir(powcachedir)
self.log.info("Creating 'PoW-chain': 200 blocks")
start_nodes_from_dir(powcachedir, 4)
# Mine the blocks
self.log.info("Mining 200 blocks")
self.enable_mocktime()
block_time = self.mocktime - (331 * 60)
for i in range(2):
for peer in range(4):
for j in range(25):
set_node_times(self.nodes, block_time)
self.nodes[peer].generate(1)
block_time += 60
# Must sync before next peer starts generating blocks
sync_blocks(self.nodes)
# Shut them down, and clean up cache directories:
self.log.info("Stopping nodes")
stop_and_clean_cache_dir(powcachedir)
self.log.info("---> pow cache created")
self.disable_mocktime()
assert self.num_nodes <= MAX_NODES
clean_cache_dir()
powcachedir = os.path.join(self.options.cachedir, "pow")
is_powcache_valid = cachedir_valid(powcachedir)
poscachedir = os.path.join(self.options.cachedir, "pos")
is_poscache_valid = cachedir_valid(poscachedir)
if not toPosPhase and not is_powcache_valid:
self.log.info("PoW-CACHE NOT FOUND or INVALID.")
self.log.info("Creating new cached blockchain data.")
generate_pow_cache()
elif toPosPhase and not is_poscache_valid:
self.log.info("PoS-CACHE NOT FOUND or INVALID.")
self.log.info("Creating new cached blockchain data.")
# check if first 200 blocks (pow cache) is present. if not generate it.
if not is_powcache_valid:
self.log.info("PoW-CACHE NOT FOUND or INVALID. Generating it first.")
generate_pow_cache()
self.enable_mocktime()
block_time = self.mocktime - (131 * 60)
### POS Cache ###
# Create a 330-block-long chain
# First 200 PoW blocks are copied from PoW chain.
# The next 48 PoW blocks are mined in 12-blocks bursts by the first 4 nodes.
# The last 2 PoW blocks are then mined by the last node (Node 3).
# Then 80 PoS blocks are generated in 20-blocks bursts by the first 4 nodes.
#
# - Node 0 and node 1 get 62 mature blocks (pow) + 20 immmature (pos)
# 42 rewards spendable (62 mature blocks - 20 spent rewards)
# - Node 2 gets 56 mature blocks (pow) + 26 immmature (6 pow + 20 pos)
# 35 rewards spendable (55 mature blocks - 20 spent rewards)
# - Node 3 gets 50 mature blocks (pow) + 34 immmature (14 pow + 20 pos)
# 30 rewards spendable (50 mature blocks - 20 spent rewards)
# - Nodes 2 and 3 mint one zerocoin for each denom (tot 6666 FBN) on block 301/302
# 8 mature zc + 8/3 rewards spendable (35/30 - 27 spent) + change 83.92
#
# Block 331-336 will mature last 6 pow blocks mined by node 2.
# Then 337-350 will mature last 14 pow blocks mined by node 3.
# Then staked blocks start maturing at height 351.
# Create cache directories, run fivebalanceds:
create_cachedir(poscachedir)
self.log.info("Creating 'PoS-chain': 330 blocks")
self.log.info("Copying 200 initial blocks from pow cache")
copy_cachedir(powcachedir, poscachedir)
# Change datadir and restart the nodes (only 4 of them)
start_nodes_from_dir(poscachedir, 4)
# Mine 50 more blocks to reach PoS start.
self.log.info("Mining 50 more blocks to reach PoS phase")
for peer in range(4):
for j in range(12):
set_node_times(self.nodes, block_time)
self.nodes[peer].generate(1)
block_time += 60
# Must sync before next peer starts generating blocks
if peer < 3:
sync_blocks(self.nodes)
set_node_times(self.nodes, block_time)
self.nodes[3].generate(2)
block_time += 60
sync_blocks(self.nodes)
# Then stake 80 blocks.
self.log.info("Staking 80 blocks...")
nBlocks = 250
res = [] # used to save the two txids for change outputs of mints (locked)
for peer in range(4):
for j in range(20):
# Stake block
block_time = self.generate_pos(peer, block_time)
nBlocks += 1
# Mint zerocoins with node-2 at block 301 and with node-3 at block 302
if nBlocks == 301 or nBlocks == 302:
# mints 7 zerocoins, one for each denom (tot 6666 FBN), fee = 0.01 * 8
# consumes 27 utxos (tot 6750 FBN), change = 6750 - 6666 - fee
res.append(self.nodes[nBlocks-299].mintzerocoin(6666))
self.sync_all()
# lock the change output (so it's not used as stake input in generate_pos)
assert (self.nodes[nBlocks-299].lockunspent(False, [{"txid": res[-1]['txid'], "vout": 8}]))
# Must sync before next peer starts generating blocks
sync_blocks(self.nodes)
time.sleep(1)
self.log.info("80 blocks staked")
# Unlock previously locked change outputs
for i in [2, 3]:
assert (self.nodes[i].lockunspent(True, [{"txid": res[i-2]['txid'], "vout": 8}]))
# Verify height and balances
self.test_PoS_chain_balances()
# Shut nodes down, and clean up cache directories:
self.log.info("Stopping nodes")
stop_and_clean_cache_dir(poscachedir)
self.log.info("--> pos cache created")
self.disable_mocktime()
else:
self.log.info("CACHE FOUND.")
# Copy requested cache to tempdir
if toPosPhase:
self.log.info("Copying datadir from %s to %s" % (poscachedir, self.options.tmpdir))
copy_cachedir(poscachedir, self.options.tmpdir, self.num_nodes)
else:
self.log.info("Copying datadir from %s to %s" % (powcachedir, self.options.tmpdir))
copy_cachedir(powcachedir, self.options.tmpdir, self.num_nodes)
def _initialize_chain_clean(self):
"""Initialize empty blockchain for use by the test.
Create an empty blockchain and num_nodes wallets.
Useful if a test case wants complete control over initialization."""
for i in range(self.num_nodes):
initialize_datadir(self.options.tmpdir, i)
### FIVEBALANCE Specific TestFramework ###
###################################
def init_dummy_key(self):
self.DUMMY_KEY = CECKey()
self.DUMMY_KEY.set_secretbytes(hash256(pack('<I', 0xffff)))
def test_PoS_chain_balances(self):
from .util import DecimalAmt
# 330 blocks
# - Nodes 0 and 1 get 82 blocks:
# 62 pow + 20 pos (20 immature)
# - Nodes 2 gets 82 blocks:
# 62 pow + 20 pos (26 immature)
# - Nodes 3 gets 84 blocks:
# 64 pow + 20 pos (34 immature)
# - Nodes 2 and 3 have 6666 FBN worth of zerocoins
zc_tot = sum(vZC_DENOMS)
zc_fee = len(vZC_DENOMS) * 0.01
used_utxos = (zc_tot // 250) + 1
zc_change = 250 * used_utxos - zc_tot - zc_fee
# check at least 1 node and at most 5
num_nodes = min(5, len(self.nodes))
assert_greater_than(num_nodes, 0)
# each node has the same height and tip
best_block = self.nodes[0].getbestblockhash()
for i in range(num_nodes):
assert_equal(self.nodes[i].getblockcount(), 330)
if i > 0:
assert_equal(self.nodes[i].getbestblockhash(), best_block)
# balance is mature pow blocks rewards minus stake inputs (spent)
w_info = [self.nodes[i].getwalletinfo() for i in range(num_nodes)]
assert_equal(w_info[0]["balance"], DecimalAmt(250.0 * (62 - 20)))
assert_equal(w_info[1]["balance"], DecimalAmt(250.0 * (62 - 20)))
assert_equal(w_info[2]["balance"], DecimalAmt(250.0 * (56 - 20) - (used_utxos * 250) + zc_change))
assert_equal(w_info[3]["balance"], DecimalAmt(250.0 * (50 - 20) - (used_utxos * 250) + zc_change))
for i in range(4, num_nodes):
# only first 4 nodes have mined/staked
assert_equal(w_info[i]["balance"], DecimalAmt(0))
# immature balance is immature pow blocks rewards plus
# immature stakes (outputs=inputs+rewards)
assert_equal(w_info[0]["immature_balance"], DecimalAmt(500.0 * 20))
assert_equal(w_info[1]["immature_balance"], DecimalAmt(500.0 * 20))
assert_equal(w_info[2]["immature_balance"], DecimalAmt((250.0 * 6) + (500.0 * 20)))
assert_equal(w_info[3]["immature_balance"], DecimalAmt((250.0 * 14) + (500.0 * 20)))
for i in range(4, num_nodes):
# only first 4 nodes have mined/staked
assert_equal(w_info[i]["immature_balance"], DecimalAmt(0))
# check zerocoin balances / mints
for peer in [2, 3]:
if num_nodes > peer:
zcBalance = self.nodes[peer].getzerocoinbalance()
zclist = self.nodes[peer].listmintedzerocoins(True)
zclist_spendable = self.nodes[peer].listmintedzerocoins(True, True)
assert_equal(len(zclist), len(vZC_DENOMS))
assert_equal(zcBalance['Total'], 6666)
assert_equal(zcBalance['Immature'], 0)
if peer == 2:
assert_equal(len(zclist), len(zclist_spendable))
assert_equal(set([x['denomination'] for x in zclist]), set(vZC_DENOMS))
assert_equal([x['confirmations'] for x in zclist], [30-peer] * len(vZC_DENOMS))
self.log.info("Balances of first %d nodes check out" % num_nodes)
def get_prevouts(self, node_id, utxo_list, zpos=False, nHeight=-1):
""" get prevouts (map) for each utxo in a list
:param node_id: (int) index of the CTestNode used as rpc connection. Must own the utxos.
utxo_list: <if zpos=False> (JSON list) utxos returned from listunspent used as input
<if zpos=True> (JSON list) mints returned from listmintedzerocoins used as input
zpos: (bool) type of utxo_list
nHeight: (int) height of the previous block. used only if zpos=True for
stake checksum. Optional, if not provided rpc_conn's height is used.
:return: prevouts: ({bytes --> (int, bytes, int)} dictionary)
maps CStake "uniqueness" (i.e. serialized COutPoint -or hash stake, for zfbn-)
to (amount, prevScript, timeBlockFrom).
For zfbn prevScript is replaced with serialHash hex string.
"""
assert_greater_than(len(self.nodes), node_id)
rpc_conn = self.nodes[node_id]
prevouts = {}
for utxo in utxo_list:
if not zpos:
outPoint = COutPoint(int(utxo['txid'], 16), utxo['vout'])
outValue = int(utxo['amount']) * COIN
prevtx_json = rpc_conn.getrawtransaction(utxo['txid'], 1)
prevTx = CTransaction()
prevTx.deserialize(BytesIO(hex_str_to_bytes(prevtx_json['hex'])))
if (prevTx.is_coinbase() or prevTx.is_coinstake()) and utxo['confirmations'] < 100:
# skip immature coins
continue
prevScript = prevtx_json['vout'][utxo['vout']]['scriptPubKey']['hex']
prevTime = prevtx_json['blocktime']
prevouts[outPoint.serialize_uniqueness()] = (outValue, prevScript, prevTime)
else:
uniqueness = bytes.fromhex(utxo['hash stake'])[::-1]
prevouts[uniqueness] = (int(utxo["denomination"]) * COIN, utxo["serial hash"], 0)
return prevouts
def make_txes(self, node_id, spendingPrevOuts, to_pubKey):
""" makes a list of CTransactions each spending an input from spending PrevOuts to an output to_pubKey
:param node_id: (int) index of the CTestNode used as rpc connection. Must own spendingPrevOuts.
spendingPrevouts: ({bytes --> (int, bytes, int)} dictionary)
maps CStake "uniqueness" (i.e. serialized COutPoint -or hash stake, for zfbn-)
to (amount, prevScript, timeBlockFrom).
For zfbn prevScript is replaced with serialHash hex string.
to_pubKey (bytes) recipient public key
:return: block_txes: ([CTransaction] list)
"""
assert_greater_than(len(self.nodes), node_id)
rpc_conn = self.nodes[node_id]
block_txes = []
for uniqueness in spendingPrevOuts:
if is_zerocoin(uniqueness):
# spend zFBN
_, serialHash, _ = spendingPrevOuts[uniqueness]
raw_spend = rpc_conn.createrawzerocoinspend(serialHash, "", False)
else:
# spend FBN
value_out = int(spendingPrevOuts[uniqueness][0] - DEFAULT_FEE * COIN)
scriptPubKey = CScript([to_pubKey, OP_CHECKSIG])
prevout = COutPoint()
prevout.deserialize_uniqueness(BytesIO(uniqueness))
tx = create_transaction_from_outpoint(prevout, b"", value_out, scriptPubKey)
# sign tx
raw_spend = rpc_conn.signrawtransaction(bytes_to_hex_str(tx.serialize()))['hex']
# add signed tx to the list
signed_tx = CTransaction()
signed_tx.from_hex(raw_spend)
block_txes.append(signed_tx)
return block_txes
def stake_block(self, node_id,
nHeight,
prevHhash,
prevModifier,
stakeableUtxos,
startTime=None,
privKeyWIF=None,
vtx=[],
fDoubleSpend=False):
""" manually stakes a block selecting the coinstake input from a list of candidates
:param node_id: (int) index of the CTestNode used as rpc connection. Must own stakeableUtxos.
nHeight: (int) height of the block being produced
prevHash: (string) hex string of the previous block hash
prevModifier (string) hex string of the previous block stake modifier
stakeableUtxos: ({bytes --> (int, bytes, int)} dictionary)
maps CStake "uniqueness" (i.e. serialized COutPoint -or hash stake, for zfbn-)
to (amount, prevScript, timeBlockFrom).
For zfbn prevScript is replaced with serialHash hex string.
startTime: (int) epoch time to be used as blocktime (iterated in solve_stake)
privKeyWIF: (string) private key to be used for staking/signing
If empty string, it will be used the pk from the stake input
(dumping the sk from rpc_conn). If None, then the DUMMY_KEY will be used.
vtx: ([CTransaction] list) transactions to add to block.vtx
fDoubleSpend: (bool) wether any tx in vtx is allowed to spend the coinstake input
:return: block: (CBlock) block produced, must be manually relayed
"""
assert_greater_than(len(self.nodes), node_id)
rpc_conn = self.nodes[node_id]
if not len(stakeableUtxos) > 0:
raise Exception("Need at least one stakeable utxo to stake a block!")
# Get start time to stake
if startTime is None:
startTime = time.time()
# Create empty block with coinbase
nTime = int(startTime) & 0xfffffff0
coinbaseTx = create_coinbase_pos(nHeight)
block = create_block(int(prevHhash, 16), coinbaseTx, nTime)
# Find valid kernel hash - iterates stakeableUtxos, then block.nTime
block.solve_stake(stakeableUtxos, int(prevModifier, 16))
# Check if this is a zPoS block or regular/cold stake - sign stake tx
block_sig_key = CECKey()
isZPoS = is_zerocoin(block.prevoutStake)
if isZPoS:
# !TODO: remove me
raise Exception("zPOS tests discontinued")
else:
coinstakeTx_unsigned = CTransaction()
prevout = COutPoint()
prevout.deserialize_uniqueness(BytesIO(block.prevoutStake))
coinstakeTx_unsigned.vin.append(CTxIn(prevout, b"", 0xffffffff))
coinstakeTx_unsigned.vout.append(CTxOut())
amount, prevScript, _ = stakeableUtxos[block.prevoutStake]
outNValue = int(amount + 250 * COIN)
coinstakeTx_unsigned.vout.append(CTxOut(outNValue, hex_str_to_bytes(prevScript)))
if privKeyWIF == "":
# Use dummy key
if not hasattr(self, 'DUMMY_KEY'):
self.init_dummy_key()
block_sig_key = self.DUMMY_KEY
# replace coinstake output script
coinstakeTx_unsigned.vout[1].scriptPubKey = CScript([block_sig_key.get_pubkey(), OP_CHECKSIG])
else:
if privKeyWIF == None:
# Use pk of the input. Ask sk from rpc_conn
rawtx = rpc_conn.getrawtransaction('{:064x}'.format(prevout.hash), True)
privKeyWIF = rpc_conn.dumpprivkey(rawtx["vout"][prevout.n]["scriptPubKey"]["addresses"][0])
# Use the provided privKeyWIF (cold staking).
# export the corresponding private key to sign block
privKey, compressed = wif_to_privkey(privKeyWIF)
block_sig_key.set_compressed(compressed)
block_sig_key.set_secretbytes(bytes.fromhex(privKey))
# Sign coinstake TX and add it to the block
stake_tx_signed_raw_hex = rpc_conn.signrawtransaction(
bytes_to_hex_str(coinstakeTx_unsigned.serialize()))['hex']
# Add coinstake to the block
coinstakeTx = CTransaction()
coinstakeTx.from_hex(stake_tx_signed_raw_hex)
block.vtx.append(coinstakeTx)
# Add provided transactions to the block.
# Don't add tx doublespending the coinstake input, unless fDoubleSpend=True
for tx in vtx:
if not fDoubleSpend:
# assume txes don't double spend zFBN inputs when fDoubleSpend is false. It needs to
# be checked outside until a convenient tx.spends(zerocoin) is added to the framework.
if not isZPoS and tx.spends(prevout):
continue
block.vtx.append(tx)
# Get correct MerkleRoot and rehash block
block.hashMerkleRoot = block.calc_merkle_root()
block.rehash()
# sign block with block signing key and return it
block.sign_block(block_sig_key)
return block
def stake_next_block(self, node_id,
stakeableUtxos,
btime=None,
privKeyWIF=None,
vtx=[],
fDoubleSpend=False):
""" Calls stake_block appending to the current tip"""
assert_greater_than(len(self.nodes), node_id)
nHeight = self.nodes[node_id].getblockcount()
prevHhash = self.nodes[node_id].getblockhash(nHeight)
prevModifier = self.nodes[node_id].getblock(prevHhash)['stakeModifier']
return self.stake_block(node_id,
nHeight+1,
prevHhash,
prevModifier,
stakeableUtxos,
btime,
privKeyWIF,
vtx,
fDoubleSpend)
def check_tx_in_chain(self, node_id, txid):
assert_greater_than(len(self.nodes), node_id)
rawTx = self.nodes[node_id].getrawtransaction(txid, 1)
assert_greater_than(rawTx["confirmations"], 0)
def spend_inputs(self, node_id, inputs, outputs):
""" auxiliary function used by spend_utxo / spend_utxos """
assert_greater_than(len(self.nodes), node_id)
rpc_conn = self.nodes[node_id]
spendingTx = rpc_conn.createrawtransaction(inputs, outputs)
spendingTx_signed = rpc_conn.signrawtransaction(spendingTx)
if spendingTx_signed["complete"]:
txhash = rpc_conn.sendrawtransaction(spendingTx_signed["hex"])
return txhash
else:
return ""
def spend_utxo(self, node_id, utxo, recipient=''):
""" spend amount from previously unspent output to a provided address
:param node_id: (int) index of the CTestNode used as rpc connection. Must own the utxo.
utxo: (JSON) returned from listunspent used as input
recipient: (string) destination address (new one if not provided)
:return: txhash: (string) tx hash if successful, empty string otherwise
"""
assert_greater_than(len(self.nodes), node_id)
rpc_conn = self.nodes[node_id]
inputs = [{"txid": utxo["txid"], "vout": utxo["vout"]}]
out_amount = float(utxo["amount"]) - DEFAULT_FEE
outputs = {}
if recipient == '':
recipient = rpc_conn.getnewaddress()
outputs[recipient] = out_amount
return self.spend_inputs(node_id, inputs, outputs)
def spend_utxos(self, node_id, utxo_list, recipient='', fMultiple=False):
""" spend utxos to provided list of addresses or 10 new generate ones.
:param node_id: (int) index of the CTestNode used as rpc connection. Must own the utxo.
utxo_list: (JSON list) returned from listunspent used as input
recipient: (string, optional) destination address (new one if not provided)
fMultiple: (boolean, optional, default=false) spend each utxo on a different tx
:return: txHashes: (string list) list of hashes of completed txs
"""
assert_greater_than(len(self.nodes), node_id)
rpc_conn = self.nodes[node_id]
txHashes = []
# If no recipient is given, create a new one
if recipient == '':
recipient = rpc_conn.getnewaddress()
# If fMultiple=True send one tx for each utxo
if fMultiple:
for utxo in utxo_list:
txHash = self.spend_utxo(node_id, utxo, recipient)
if txHash != "":
txHashes.append(txHash)
# Otherwise make a single tx with all the inputs
else:
inputs = [{"txid": x["txid"], "vout": x["vout"]} for x in utxo_list]
out_amount = sum([float(x["amount"]) for x in utxo_list]) - DEFAULT_FEE
outputs = {}
if recipient == '':
recipient = rpc_conn.getnewaddress()
outputs[recipient] = out_amount
txHash = self.spend_inputs(node_id, inputs, outputs)
if txHash != "":
txHashes.append(txHash)
return txHashes
def generate_pos(self, node_id, btime=None):
""" stakes a block using generate on nodes[node_id]"""
assert_greater_than(len(self.nodes), node_id)
rpc_conn = self.nodes[node_id]
ss = rpc_conn.getstakingstatus()
assert ss["walletunlocked"]
assert ss["stakeablecoins"] > 0
assert ss["stakingbalance"] > 0.0
if btime is not None:
next_btime = btime + 60
fStaked = False
failures = 0
while not fStaked:
try:
rpc_conn.generate(1)
fStaked = True
except JSONRPCException as e:
if ("Couldn't create new block" in str(e)):
failures += 1
# couldn't generate block. check that this node can still stake (after 60 failures)
if failures > 60:
ss = rpc_conn.getstakingstatus()
if not (ss["walletunlocked"] and ss["stakeablecoins"] > 0 and ss["stakingbalance"] > 0.0):
raise AssertionError("Node %d unable to stake!" % node_id)
# try to stake one sec in the future
if btime is not None:
btime += 1
set_node_times(self.nodes, btime)
else:
time.sleep(1)
else:
raise e
# block generated. adjust block time
if btime is not None:
btime = max(btime + 1, next_btime)
set_node_times(self.nodes, btime)
return btime
else:
return None
def generate_pow(self, node_id, btime=None):
""" stakes a block using generate on nodes[node_id]"""
assert_greater_than(len(self.nodes), node_id)
self.nodes[node_id].generate(1)
if btime is not None:
btime += 60
set_node_times(self.nodes, btime)
return btime
def set_spork(self, node_id, sporkName, value):
assert_greater_than(len(self.nodes), node_id)
return self.nodes[node_id].spork(sporkName, value)
def get_spork(self, node_id, sporkName):
assert_greater_than(len(self.nodes), node_id)
return self.nodes[node_id].spork("show")[sporkName]
def activate_spork(self, node_id, sporkName):
return self.set_spork(node_id, sporkName, SPORK_ACTIVATION_TIME)
def deactivate_spork(self, node_id, sporkName):
return self.set_spork(node_id, sporkName, SPORK_DEACTIVATION_TIME)
def is_spork_active(self, node_id, sporkName):
assert_greater_than(len(self.nodes), node_id)
return self.nodes[node_id].spork("active")[sporkName]
### ------------------------------------------------------
class ComparisonTestFramework(FivebalanceTestFramework):
"""Test framework for doing p2p comparison testing
Sets up some fivebalanced binaries:
- 1 binary: test binary
- 2 binaries: 1 test binary, 1 ref binary
- n>2 binaries: 1 test binary, n-1 ref binaries"""
def set_test_params(self):
self.num_nodes = 2
self.setup_clean_chain = True
def add_options(self, parser):
parser.add_option("--testbinary", dest="testbinary",
default=os.getenv("BITCOIND", "fivebalanced"),
help="fivebalanced binary to test")
parser.add_option("--refbinary", dest="refbinary",
default=os.getenv("BITCOIND", "fivebalanced"),
help="fivebalanced binary to use for reference nodes (if any)")
def setup_network(self):
extra_args = [['-whitelist=127.0.0.1']] * self.num_nodes
if hasattr(self, "extra_args"):
extra_args = self.extra_args
self.add_nodes(self.num_nodes, extra_args,
binary=[self.options.testbinary] +
[self.options.refbinary] * (self.num_nodes - 1))
self.start_nodes()
class SkipTest(Exception):
"""This exception is raised to skip a test"""
def __init__(self, message):
self.message = message
| 44.533098
| 310
| 0.594756
|
fdcb6411e0b3644ceb5d8907af86983ac1dc1e60
| 666
|
py
|
Python
|
manage.py
|
yashpatel7025/django-email-service-AUS
|
1a584c6e3c4df4264b18c05060553f9a80ea9b96
|
[
"MIT"
] | null | null | null |
manage.py
|
yashpatel7025/django-email-service-AUS
|
1a584c6e3c4df4264b18c05060553f9a80ea9b96
|
[
"MIT"
] | null | null | null |
manage.py
|
yashpatel7025/django-email-service-AUS
|
1a584c6e3c4df4264b18c05060553f9a80ea9b96
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
"""Django's command-line utility for administrative tasks."""
import os
import sys
def main():
"""Run administrative tasks."""
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'django_AUS.settings')
try:
from django.core.management import execute_from_command_line
except ImportError as exc:
raise ImportError(
"Couldn't import Django. Are you sure it's installed and "
"available on your PYTHONPATH environment variable? Did you "
"forget to activate a virtual environment?"
) from exc
execute_from_command_line(sys.argv)
if __name__ == '__main__':
main()
| 28.956522
| 74
| 0.68018
|
de8bbcecc8ab616db10291fe5e0d6316229b8416
| 4,905
|
py
|
Python
|
tests/test_infer.py
|
lnielsen/pyhf
|
3d98dc445c384d2919a77b9af0a202e12343a707
|
[
"Apache-2.0"
] | null | null | null |
tests/test_infer.py
|
lnielsen/pyhf
|
3d98dc445c384d2919a77b9af0a202e12343a707
|
[
"Apache-2.0"
] | null | null | null |
tests/test_infer.py
|
lnielsen/pyhf
|
3d98dc445c384d2919a77b9af0a202e12343a707
|
[
"Apache-2.0"
] | null | null | null |
import pytest
import pyhf
import numpy as np
@pytest.fixture(scope='module')
def hypotest_args():
pdf = pyhf.simplemodels.hepdata_like(
signal_data=[12.0, 11.0], bkg_data=[50.0, 52.0], bkg_uncerts=[3.0, 7.0]
)
mu_test = 1.0
data = [51, 48] + pdf.config.auxdata
return mu_test, data, pdf
def check_uniform_type(in_list):
return all(
[isinstance(item, type(pyhf.tensorlib.astensor(item))) for item in in_list]
)
def test_hypotest_default(tmpdir, hypotest_args):
"""
Check that the default return structure of pyhf.infer.hypotest is as expected
"""
tb = pyhf.tensorlib
kwargs = {}
result = pyhf.infer.hypotest(*hypotest_args, **kwargs)
# CLs_obs
assert len(list(result)) == 1
assert isinstance(result, type(tb.astensor(result)))
def test_hypotest_return_tail_probs(tmpdir, hypotest_args):
"""
Check that the return structure of pyhf.infer.hypotest with the
return_tail_probs keyword arg is as expected
"""
tb = pyhf.tensorlib
kwargs = {'return_tail_probs': True}
result = pyhf.infer.hypotest(*hypotest_args, **kwargs)
# CLs_obs, [CL_sb, CL_b]
assert len(list(result)) == 2
assert isinstance(result[0], type(tb.astensor(result[0])))
assert len(result[1]) == 2
assert check_uniform_type(result[1])
def test_hypotest_return_expected(tmpdir, hypotest_args):
"""
Check that the return structure of pyhf.infer.hypotest with the
additon of the return_expected keyword arg is as expected
"""
tb = pyhf.tensorlib
kwargs = {'return_tail_probs': True, 'return_expected': True}
result = pyhf.infer.hypotest(*hypotest_args, **kwargs)
# CLs_obs, [CLsb, CLb], CLs_exp
assert len(list(result)) == 3
assert isinstance(result[0], type(tb.astensor(result[0])))
assert len(result[1]) == 2
assert check_uniform_type(result[1])
assert isinstance(result[2], type(tb.astensor(result[2])))
def test_hypotest_return_expected_set(tmpdir, hypotest_args):
"""
Check that the return structure of pyhf.infer.hypotest with the
additon of the return_expected_set keyword arg is as expected
"""
tb = pyhf.tensorlib
kwargs = {
'return_tail_probs': True,
'return_expected': True,
'return_expected_set': True,
}
result = pyhf.infer.hypotest(*hypotest_args, **kwargs)
# CLs_obs, [CLsb, CLb], CLs_exp, CLs_exp @[-2, -1, 0, +1, +2]sigma
assert len(list(result)) == 4
assert isinstance(result[0], type(tb.astensor(result[0])))
assert len(result[1]) == 2
assert check_uniform_type(result[1])
assert isinstance(result[2], type(tb.astensor(result[2])))
assert len(result[3]) == 5
assert check_uniform_type(result[3])
def test_inferapi_pyhf_independence():
'''
pyhf.infer should eventually be factored out so it should be
infependent from pyhf internals. This is testing that
a much simpler model still can run through pyhf.infer.hypotest
'''
from pyhf import get_backend
class _NonPyhfConfig(object):
def __init__(self):
self.poi_index = 0
self.npars = 2
def suggested_init(self):
return [1.0, 1.0]
def suggested_bounds(self):
return [[0.0, 10.0], [0.0, 10.0]]
class NonPyhfModel(object):
def __init__(self, spec):
self.sig, self.nominal, self.uncert = spec
self.factor = (self.nominal / self.uncert) ** 2
self.aux = 1.0 * self.factor
self.config = _NonPyhfConfig()
def _make_main_pdf(self, pars):
mu, gamma = pars
expected_main = gamma * self.nominal + mu * self.sig
return pyhf.probability.Poisson(expected_main)
def _make_constraint_pdf(self, pars):
mu, gamma = pars
return pyhf.probability.Poisson(gamma * self.factor)
def expected_data(self, pars, include_auxdata=True):
tensorlib, _ = get_backend()
expected_main = tensorlib.astensor(
[self._make_main_pdf(pars).expected_data()]
)
aux_data = tensorlib.astensor(
[self._make_constraint_pdf(pars).expected_data()]
)
if not include_auxdata:
return expected_main
return tensorlib.concatenate([expected_main, aux_data])
def logpdf(self, pars, data):
tensorlib, _ = get_backend()
maindata, auxdata = data
main = self._make_main_pdf(pars).log_prob(maindata)
constraint = self._make_constraint_pdf(pars).log_prob(auxdata)
return tensorlib.astensor([main + constraint])
model = NonPyhfModel([5, 50, 7])
cls = pyhf.infer.hypotest(
1.0, model.expected_data(model.config.suggested_init()), model
)
assert np.isclose(cls[0], 0.7267836451638846)
| 32.483444
| 83
| 0.64261
|
bb7931d25f521ef63125a2bacbb2c8f1c6918658
| 716
|
py
|
Python
|
wildlifecompliance/migrations/0426_auto_20200212_1624.py
|
preranaandure/wildlifecompliance
|
bc19575f7bccf7e19adadbbaf5d3eda1d1aee4b5
|
[
"Apache-2.0"
] | 1
|
2020-12-07T17:12:40.000Z
|
2020-12-07T17:12:40.000Z
|
wildlifecompliance/migrations/0426_auto_20200212_1624.py
|
preranaandure/wildlifecompliance
|
bc19575f7bccf7e19adadbbaf5d3eda1d1aee4b5
|
[
"Apache-2.0"
] | 14
|
2020-01-08T08:08:26.000Z
|
2021-03-19T22:59:46.000Z
|
wildlifecompliance/migrations/0426_auto_20200212_1624.py
|
preranaandure/wildlifecompliance
|
bc19575f7bccf7e19adadbbaf5d3eda1d1aee4b5
|
[
"Apache-2.0"
] | 15
|
2020-01-08T08:02:28.000Z
|
2021-11-03T06:48:32.000Z
|
# -*- coding: utf-8 -*-
# Generated by Django 1.10.8 on 2020-02-12 08:24
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('wildlifecompliance', '0425_merge_20200210_1630'),
]
operations = [
migrations.AddField(
model_name='applicationselectedactivity',
name='additional_fee',
field=models.DecimalField(decimal_places=2, default='0', max_digits=8),
),
migrations.AddField(
model_name='applicationselectedactivity',
name='additional_fee_text',
field=models.TextField(blank=True, null=True),
),
]
| 27.538462
| 83
| 0.638268
|
3d48f4feb380168ee32b2541a207483248233814
| 1,816
|
py
|
Python
|
tests/eigenstepsbuilder_test_with_plot.py
|
AyoubBelhadji/FrameBuilder
|
537094ca2de904a61d4babb2c7a4f7bbc3e312fd
|
[
"MIT"
] | null | null | null |
tests/eigenstepsbuilder_test_with_plot.py
|
AyoubBelhadji/FrameBuilder
|
537094ca2de904a61d4babb2c7a4f7bbc3e312fd
|
[
"MIT"
] | null | null | null |
tests/eigenstepsbuilder_test_with_plot.py
|
AyoubBelhadji/FrameBuilder
|
537094ca2de904a61d4babb2c7a4f7bbc3e312fd
|
[
"MIT"
] | null | null | null |
import sys
#sys.path.append('../')
sys.path.insert(0, '..')
from FrameBuilder.eigenstepsbuilder import *
import numpy as np
import matplotlib.pyplot as plt
N = 10
d = 2
Q = np.zeros((N,d))
for _ in range(0,d):
Q[_,_] = 1
lv_scores_vector = d/N*np.ones(N)
lv_scores_vector = np.linspace(1, 1000, num=N)
lv_scores_vector = d*lv_scores_vector/(np.sum(lv_scores_vector))
I_sorting = list(reversed(np.argsort(lv_scores_vector)))
lv_scores_vector = np.asarray(list(reversed(np.sort(lv_scores_vector))))
#N= 50
#d = 2
E = np.zeros((N,N)) #(d,N)
mu_vector = lv_scores_vector
lambda_vector = np.zeros((N))
lambda_vector[0:d] = np.ones((d))
#mu_vector = np.linspace(1, 0.1, num=N)
sum_mu_vector = np.sum(mu_vector)
mu_vector = d/sum_mu_vector*mu_vector
#mu_vector = d/N*np.ones((N,1))
E_test = get_eigensteps_random(mu_vector,lambda_vector,N,d)
E_ = np.zeros((d,N+1))
for i in range(d):
E_[i,1:N+1] = E_test[i,:]
F_test = get_F(d,N,np.asmatrix(E_),mu_vector)
Q = np.transpose(F_test)
print("The covariance matrix")
print(np.dot(F_test[:,0:N],F_test[:,0:N].T))
print("The input vector of the spectrum")
print(lambda_vector)
print("The diagonal of the gram matrix")
print(np.diag(np.dot(F_test[:,0:N].T,F_test[:,0:N])))
print("The input vector of the lengths")
print(mu_vector)
fig=plt.figure(1)
plt.gca().set_aspect('equal')
plt.axis([-0.7,0.7,-0.7,0.7])
ax=fig.add_subplot(1,1,1)
plt.scatter(Q[:,0], Q[:,1],marker = 'o', s=10, color = 'red')
circ_list = []
for i in range(N):
circ_i=plt.Circle((0,0), radius=np.sqrt(lv_scores_vector[i]), color="#3F5D7D", fill=False)
ax.add_patch(circ_i)
#circ_list.append(circ_list)
#circ_1=plt.Circle((0,0), radius=0.1, color='g', fill=False)
#circ_2=plt.Circle((0,0), radius=0.05, color='r', fill=False)
#ax.add_patch(circ_1)
#ax.add_patch(circ_2)
plt.show()
| 27.938462
| 94
| 0.692181
|
beaa96e2d1611b131a91008677266f88954e70e4
| 4,178
|
py
|
Python
|
hpimdm/tree/tree_if_root_originator.py
|
pedrofran12/hpim_dm
|
fe949294b5e75ab544dcd40ff51ceafc1d3b2f0c
|
[
"MIT"
] | 1
|
2020-02-04T20:59:03.000Z
|
2020-02-04T20:59:03.000Z
|
hpimdm/tree/tree_if_root_originator.py
|
pedrofran12/hpim_dm
|
fe949294b5e75ab544dcd40ff51ceafc1d3b2f0c
|
[
"MIT"
] | 3
|
2020-06-09T16:37:01.000Z
|
2021-08-30T00:31:12.000Z
|
hpimdm/tree/tree_if_root_originator.py
|
pedrofran12/hpim_dm
|
fe949294b5e75ab544dcd40ff51ceafc1d3b2f0c
|
[
"MIT"
] | 1
|
2020-11-23T06:47:46.000Z
|
2020-11-23T06:47:46.000Z
|
import logging
import traceback
from threading import Timer
from threading import Thread
from . import data_packets_socket
from .hpim_globals import SOURCE_LIFETIME
from .tree_interface import TreeInterface
class TreeInterfaceRootOriginator(TreeInterface):
LOGGER = logging.getLogger('hpim.KernelEntry.RootInterface')
def __init__(self, kernel_entry, interface_id, current_tree_state):
extra_dict_logger = kernel_entry.kernel_entry_logger.extra.copy()
extra_dict_logger['vif'] = interface_id
extra_dict_logger['interfacename'] = kernel_entry.get_interface_name(interface_id)
logger = logging.LoggerAdapter(TreeInterfaceRootOriginator.LOGGER, extra_dict_logger)
TreeInterface.__init__(self, kernel_entry, interface_id, None, current_tree_state, logger)
# Originator state
self._source_active_timer = None
self.set_source_active_timer()
# TODO TESTE SOCKET RECV DATA PCKTS
self.socket_is_enabled = True
(s, g) = self.get_tree_id()
interface_name = self.get_interface_name()
self.socket_pkt = data_packets_socket.get_s_g_bpf_filter_code(s, g, interface_name)
# run receive method in background
receive_thread = Thread(target=self.socket_recv)
receive_thread.daemon = True
receive_thread.start()
self.logger.debug('Created RootInterfaceOriginator')
def socket_recv(self):
"""
Socket used to receive data packets from the Root interface...
Useful in order to control the tree state of Originator routers
"""
while self.socket_is_enabled:
try:
self.socket_pkt.recvfrom(0)
print("DATA RECEIVED")
self.recv_data_msg()
except:
traceback.print_exc()
continue
print("EXIT SOCKET")
##########################################
# Set timers
##########################################
# Originator timers
def set_source_active_timer(self):
"""
Set Source Active timer
"""
self.clear_source_active_timer()
self._source_active_timer = Timer(SOURCE_LIFETIME, self.source_active_timeout)
self._source_active_timer.start()
def clear_source_active_timer(self):
"""
Stop Source Active timer
"""
if self._source_active_timer is not None:
self._source_active_timer.cancel()
###########################################
# Timer timeout
###########################################
def source_active_timeout(self):
"""
Source Active timer expired... react to this event
"""
self._kernel_entry.sat_expires()
###########################################
# Recv packets
###########################################
def recv_data_msg(self):
"""
Root interface received data packet
"""
if not self.is_tree_inactive():
self.set_source_active_timer()
if self.is_tree_unsure():
self._kernel_entry.sat_running()
###########################################
# Change to in/out-tree
###########################################
def node_is_out_tree(self):
return
def node_is_in_tree(self):
return
####################################################################
def is_forwarding(self):
"""
This interface must not be included in the OIL of the multicast routing table, thus returning False
"""
return False
def delete(self):
"""
Tree interface is being removed... due to change of interface roles or
due to the removal of the tree by this router
Clear all state from this interface regarding this tree
"""
self.socket_is_enabled = False
try:
from socket import SHUT_RDWR
self.socket_pkt.shutdown(SHUT_RDWR)
except:
pass
self.socket_pkt.close()
super().delete()
self.clear_source_active_timer()
self._source_active_timer = None
| 33.15873
| 107
| 0.578267
|
892930d333201a7442a04c9c230c53b879477dad
| 22,296
|
py
|
Python
|
crosslingual_NER/model/ner_model.py
|
baozuyi/MLMA
|
a9a8be912da1655936399dd6c51302bcb08a5dbd
|
[
"Apache-2.0"
] | 6
|
2019-11-05T07:41:15.000Z
|
2021-11-12T04:25:55.000Z
|
crosslingual_NER/model/ner_model.py
|
baozuyi/MLMA
|
a9a8be912da1655936399dd6c51302bcb08a5dbd
|
[
"Apache-2.0"
] | 3
|
2020-01-28T22:19:11.000Z
|
2020-11-13T17:37:40.000Z
|
crosslingual_NER/model/ner_model.py
|
baozuyi/MLMA
|
a9a8be912da1655936399dd6c51302bcb08a5dbd
|
[
"Apache-2.0"
] | 1
|
2019-10-28T04:58:43.000Z
|
2019-10-28T04:58:43.000Z
|
import numpy as np
import os
import tensorflow as tf
import tensorflow_hub as hub
from .data_utils import minibatches, pad_sequences, get_chunks, pad_sequences_trans, _pad_sequences
from .general_utils import Progbar
from .base_model import BaseModel
def shape_list(x):
ps = x.get_shape().as_list()
ts = tf.shape(x)
return [ts[i] if ps[i] is None else ps[i] for i in range(len(ps))]
class NERModel(BaseModel):
"""Specialized class of Model for NER"""
def __init__(self, config):
super(NERModel, self).__init__(config)
self.trans_layer = config.layer
self.layers = config.trans_layer
self.trans_dim = config.trans_dim
self.use_transformer = config.use_transformer
self.idx_to_tag = {idx: tag for tag, idx in
self.config.vocab_tags.items()}
def add_output(self, model, wid, lang):
return model(wid, signature=lang)
def add_transformer_output(self):
model_dir = self.config.model_dir
self.logger.info('Using transformer model from: {}'.format(model_dir))
transformer_model = hub.Module(model_dir, trainable=self.config.train_embeddings)
#print(transformer_model)
#for k, v in transformer_model.variable_map.items():
# print(k, v)
# x = tf.placeholder(dtype=tf.int64, shape=[None, None])
#transformer_out = transformer_model(self.word_ids_trans, signature=self.signature)
if self.config.trans_type == 'monolingual':
self.logger.info('Using monolingual transformer...')
transformer_out = transformer_model(self.word_ids_trans)
else:
self.logger.info('Using cross-lingual transformer...')
transformer_out = tf.cond(tf.equal(self.signature, self.config.src_lang),
lambda: self.add_output(transformer_model, self.word_ids_trans, lang=self.config.src_lang),
lambda: self.add_output(transformer_model, self.word_ids_trans, lang=self.config.tgt_lang))
seq_len = shape_list(transformer_out)[1]
batch_size = shape_list(transformer_out)[0]
transformer_out = tf.reshape(transformer_out, shape=[-1, seq_len, 2, self.layers, self.trans_dim])
transformer_out = tf.transpose(transformer_out, [0, 1, 3, 2, 4]) # [batch_size, seq_len, 7, 2, 512]
if self.trans_layer is not None:
self.layers = 1
self.transformer_out = tf.reshape(transformer_out[:, :, self.trans_layer, :, :], [-1, seq_len, 2 * self.trans_dim])
else:
self.transformer_out = tf.reshape(transformer_out, [-1, seq_len, self.layers * 2 * self.trans_dim])
def add_placeholders(self):
"""Define placeholders = entries to computational graph"""
# shape = (batch size, max length of sentence in batch)
self.word_ids = tf.placeholder(tf.int32, shape=[None, None],
name="word_ids")
self.word_ids_trans = tf.placeholder(tf.int64, shape=[None, None],
name="word_ids_trans")
self.mask_trans = tf.placeholder(tf.float32, shape=[None, None],
name="mask_trans")
# shape = (batch size)
self.sequence_lengths = tf.placeholder(tf.int32, shape=[None],
name="sequence_lengths")
# shape = (batch size, max length of sentence, max length of word)
self.char_ids = tf.placeholder(tf.int32, shape=[None, None, None],
name="char_ids")
# shape = (batch_size, max_length of sentence)
self.word_lengths = tf.placeholder(tf.int32, shape=[None, None],
name="word_lengths")
# shape = (batch size, max length of sentence in batch)
self.labels = tf.placeholder(tf.int32, shape=[None, None],
name="labels")
# hyper parameters
self.dropout = tf.placeholder(dtype=tf.float32, shape=[],
name="dropout")
self.lr = tf.placeholder(dtype=tf.float32, shape=[],
name="lr")
self.dropout_trans = tf.placeholder(dtype=tf.float32, shape=[], name="dropout_trans")
self.signature = tf.placeholder(dtype=tf.string)
# self.n_updates = tf.placeholder(dtype=tf.float32, shape=[], name="n_updates")
def get_feed_dict(self, words, labels=None, lr=None, dropout=None, dropout_trans=0.1, n_updates=1, signature='en'):
"""Given some data, pad it and build a feed dictionary
Args:
words: list of sentences. A sentence is a list of ids of a list of
words. A word is a list of ids
labels: list of ids
lr: (float) learning rate
dropout: (float) keep prob
Returns:
dict {placeholder: value}
"""
# perform padding of the given data
if self.config.use_chars:
#char_ids, word_ids, word_ids_trans = zip(*words)
if self.use_transformer:
# word_ids_trans, sequence_lengths_trans, mask = pad_sequences_trans(word_ids, start_pos=self.n_vocab, max_length=self.n_ctx)
char_ids, word_ids, word_ids_trans = zip(*words)
word_ids_trans, sequence_lengths_trans = pad_sequences(word_ids_trans, 0)
else:
char_ids, word_ids = zip(*words)
word_ids, sequence_lengths = pad_sequences(word_ids, 0)
char_ids, word_lengths = pad_sequences(char_ids, pad_tok=0,
nlevels=2)
else:
word_ids, word_ids_trans = zip(*words)
if self.use_transformer:
word_ids_trans, sequence_lengths_trans = pad_sequences(word_ids_trans, 0)
word_ids, sequence_lengths = pad_sequences(word_ids, 0)
# build feed dictionary
# if self.use_transformer:
# sequence_lengths = sequence_lengths_trans
feed = {
self.sequence_lengths: sequence_lengths,
self.word_ids: word_ids,
}
if self.use_transformer:
feed[self.word_ids_trans] = word_ids_trans
# feed[self.mask_trans] = mask
if self.config.use_chars:
feed[self.char_ids] = char_ids
feed[self.word_lengths] = word_lengths
if labels is not None:
# if self.use_transformer:
# labels, _ = _pad_sequences(labels, 0, self.n_ctx)
# else:
labels, _ = pad_sequences(labels, 0)
feed[self.labels] = labels
if lr is not None:
feed[self.lr] = lr
if dropout is not None:
feed[self.dropout] = dropout
# feed[self.n_updates] = n_updates
feed[self.dropout_trans] = dropout_trans
feed[self.signature] = signature
return feed, sequence_lengths
def add_word_embeddings_op(self):
"""Defines self.word_embeddings
If self.config.embeddings is not None and is a np array initialized
with pre-trained word vectors, the word embeddings is just a look-up
and we don't train the vectors. Otherwise, a random matrix with
the correct shape is initialized.
"""
with tf.variable_scope("words"):
if self.config.embeddings is None:
self.logger.info("WARNING: randomly initializing word vectors")
_word_embeddings = tf.get_variable(
name="_word_embeddings",
dtype=tf.float32,
shape=[self.config.nwords, self.config.dim_word])
else:
_word_embeddings = tf.Variable(
self.config.embeddings,
name="_word_embeddings",
dtype=tf.float32,
trainable=self.config.train_embeddings)
word_embeddings = tf.nn.embedding_lookup(_word_embeddings,
self.word_ids, name="word_embeddings")
if self.config.use_transformer:
trans_out = self.transformer_out
if self.config.trans_concat == 'fws':
self.logger.info('Using 7*1024 weighted sum of transformer output...')
with tf.variable_scope('weighted_sum_full'):
weighted = tf.get_variable('weights', shape=[self.layers, 2 * self.trans_dim], initializer=tf.random_normal_initializer(stddev=0.02))
weighted = tf.nn.softmax(weighted, 0)
self.weighted_input = tf.reduce_mean(weighted, -1)
#weighted_abs = tf.sqrt(weighted_ * weighted_)
#self.l1_loss = tf.reduce_sum(tf.reduce_sum(weighted_abs, -1), -1)
# self.weight_input = weighted
seq_len = shape_list(self.transformer_out)[1]
weighted_sum = tf.expand_dims(tf.expand_dims(weighted, 0), 0) * tf.reshape(self.transformer_out, [-1, seq_len, self.layers, 2 * self.trans_dim])
trans_out = tf.reduce_sum(weighted_sum, -2)
elif self.config.trans_concat == 'sws':
self.logger.info('Using sws...')
batch = shape_list(self.transformer_out)[0]
seq_len = shape_list(self.transformer_out)[1]
with tf.variable_scope('attn_proj'):
w1 = tf.get_variable('w1', shape=[self.layers * 2 * self.trans_dim, 512], initializer=tf.random_normal_initializer(stddev=0.02))
b1 = tf.get_variable('b1', shape=[512], initializer=tf.zeros_initializer())
w2 = tf.get_variable('w2', shape=[512, self.layers], initializer=tf.random_normal_initializer(stddev=0.02))
b2 = tf.get_variable('b2', shape=[self.layers], initializer=tf.zeros_initializer())
transformer_out = tf.reshape(self.transformer_out, [-1, self.layers * 2 * self.trans_dim])
o1 = tf.tanh(tf.matmul(transformer_out, w1) + b1)
o2 = tf.tanh(tf.matmul(o1, w2) + b2)
weight = tf.nn.softmax(tf.reshape(o2, [batch, seq_len, self.layers]), -1)
self.weighted_input = tf.reduce_mean(tf.reduce_mean(weight, 1), 0)
out = tf.expand_dims(weight, -1) * tf.reshape(transformer_out, [batch, seq_len, self.layers, 2 * self.trans_dim])
trans_out = tf.reduce_sum(out, -2)
if self.config.no_glove:
self.logger.info('Not use glove embeddings...')
word_embeddings = trans_out
else:
word_embeddings = tf.concat([word_embeddings, trans_out], -1)
with tf.variable_scope("chars"):
if self.config.use_chars:
_char_embeddings = tf.get_variable(
name="_char_embeddings",
dtype=tf.float32,
shape=[self.config.nchars, self.config.dim_char])
char_embeddings = tf.nn.embedding_lookup(_char_embeddings,
self.char_ids, name="char_embeddings")
# put the time dimension on axis=1
s = tf.shape(char_embeddings)
char_embeddings = tf.reshape(char_embeddings,
shape=[s[0]*s[1], s[-2], self.config.dim_char])
word_lengths = tf.reshape(self.word_lengths, shape=[s[0]*s[1]])
# bi lstm on chars
cell_fw = tf.contrib.rnn.LSTMCell(self.config.hidden_size_char,
state_is_tuple=True)
cell_bw = tf.contrib.rnn.LSTMCell(self.config.hidden_size_char,
state_is_tuple=True)
_output = tf.nn.bidirectional_dynamic_rnn(
cell_fw, cell_bw, char_embeddings,
sequence_length=word_lengths, dtype=tf.float32)
# read and concat output
_, ((_, output_fw), (_, output_bw)) = _output
output = tf.concat([output_fw, output_bw], axis=-1)
# shape = (batch size, max sentence length, char hidden size)
output = tf.reshape(output,
shape=[s[0], s[1], 2*self.config.hidden_size_char])
word_embeddings = tf.concat([word_embeddings, output], axis=-1)
self.word_embeddings = tf.nn.dropout(word_embeddings, self.dropout)
def add_logits_op(self):
"""Defines self.logits
For each word in each sentence of the batch, it corresponds to a vector
of scores, of dimension equal to the number of tags.
"""
with tf.variable_scope("bi-lstm"):
cell_fw = tf.contrib.rnn.LSTMCell(self.config.hidden_size_lstm)
cell_bw = tf.contrib.rnn.LSTMCell(self.config.hidden_size_lstm)
(output_fw, output_bw), _ = tf.nn.bidirectional_dynamic_rnn(
cell_fw, cell_bw, self.word_embeddings,
sequence_length=self.sequence_lengths, dtype=tf.float32)
output = tf.concat([output_fw, output_bw], axis=-1)
output = tf.nn.dropout(output, self.dropout)
with tf.variable_scope("proj"):
hidden_size = 2*self.config.hidden_size_lstm
W = tf.get_variable("W", dtype=tf.float32,
shape=[hidden_size, self.config.ntags])
b = tf.get_variable("b", shape=[self.config.ntags],
dtype=tf.float32, initializer=tf.zeros_initializer())
nsteps = tf.shape(output)[1]
output = tf.reshape(output, [-1, hidden_size])
pred = tf.matmul(output, W) + b
self.logits = tf.reshape(pred, [-1, nsteps, self.config.ntags])
def add_pred_op(self):
"""Defines self.labels_pred
This op is defined only in the case where we don't use a CRF since in
that case we can make the prediction "in the graph" (thanks to tf
functions in other words). With theCRF, as the inference is coded
in python and not in pure tensroflow, we have to make the prediciton
outside the graph.
"""
if not self.config.use_crf:
self.labels_pred = tf.cast(tf.argmax(self.logits, axis=-1),
tf.int32)
def add_loss_op(self):
"""Defines the loss"""
if self.config.use_crf:
log_likelihood, trans_params = tf.contrib.crf.crf_log_likelihood(
self.logits, self.labels, self.sequence_lengths)
self.trans_params = trans_params # need to evaluate it for decoding
self.loss = tf.reduce_mean(-log_likelihood)
else:
losses = tf.nn.sparse_softmax_cross_entropy_with_logits(
logits=self.logits, labels=self.labels)
mask = tf.sequence_mask(self.sequence_lengths)
losses = tf.boolean_mask(losses, mask)
self.loss = tf.reduce_mean(losses)
# for tensorboard
tf.summary.scalar("loss", self.loss)
def build(self):
# NER specific functions
#print("Before build model...")
self.add_placeholders()
if self.use_transformer:
self.add_transformer_output()
self.add_word_embeddings_op()
self.add_logits_op()
self.add_pred_op()
self.add_loss_op()
# Generic functions that add training op and initialize session
self.add_train_op(self.config.lr_method, self.lr, self.loss,
self.config.clip)
#print("After model build...")
self.initialize_session() # now self.sess is defined and vars are init
#print("After initialize session...")
def predict_batch(self, words, lang):
"""
Args:
words: list of sentences
Returns:
labels_pred: list of labels for each sentence
sequence_length
"""
fd, sequence_lengths = self.get_feed_dict(words, dropout=1.0, dropout_trans=0, signature=lang)
if self.config.use_crf:
# get tag scores and transition params of CRF
viterbi_sequences = []
logits, trans_params = self.sess.run(
[self.logits, self.trans_params], feed_dict=fd)
# iterate over the sentences because no batching in vitervi_decode
for logit, sequence_length in zip(logits, sequence_lengths):
logit = logit[:sequence_length] # keep only the valid steps
viterbi_seq, viterbi_score = tf.contrib.crf.viterbi_decode(
logit, trans_params)
viterbi_sequences += [viterbi_seq]
return viterbi_sequences, sequence_lengths
else:
labels_pred = self.sess.run(self.labels_pred, feed_dict=fd)
return labels_pred, sequence_lengths
def run_epoch(self, train, dev, epoch):
"""Performs one complete pass over the train set and evaluate on dev
Args:
train: dataset that yields tuple of sentences, tags
dev: dataset
epoch: (int) index of the current epoch
Returns:
f1: (python float), score to select model on, higher is better
"""
# progbar stuff for logging
batch_size = self.config.batch_size
nbatches = (len(train) + batch_size - 1) // batch_size
n_updates_total = nbatches * self.config.nepochs
prog = Progbar(target=nbatches)
# iterate over dataset
for i, (words, labels) in enumerate(minibatches(train, batch_size)):
fd, _ = self.get_feed_dict(words, labels, self.config.lr,
self.config.dropout, dropout_trans=0.2, n_updates=n_updates_total, signature=train.lang)
_, train_loss, summary = self.sess.run(
[self.train_op, self.loss, self.merged], feed_dict=fd)
prog.update(i + 1, [("train loss", train_loss)])
# tensorboard
if i % 10 == 0:
self.file_writer.add_summary(summary, epoch*nbatches + i)
metrics = self.run_evaluate(dev)
msg = " - ".join(["{} {:04.2f}".format(k, v)
for k, v in metrics.items()])
self.logger.info(msg)
if self.config.is_pos:
return metrics["acc"]
else:
return metrics["f1"]
def write_result(self, fout, labels_pred):
for labels in labels_pred:
for label in labels:
fout.write(str(label) + '\n')
fout.write('\n')
def run_evaluate(self, test):
"""Evaluates performance on test set
Args:
test: dataset that yields tuple of (sentences, tags)
Returns:
metrics: (dict) metrics["acc"] = 98.4, ...
"""
#fout = open(os.path.join(self.config.dir_output, 'tag_result.txt'), 'w')
accs = []
correct_preds, total_correct, total_preds = 0., 0., 0.
type_correct_preds = {}
type_total_correct = {}
type_total_preds = {}
for words, labels in minibatches(test, self.config.batch_size):
labels_pred, sequence_lengths = self.predict_batch(words, test.lang)
#self.write_result(fout, labels_pred)
for lab, lab_pred, length in zip(labels, labels_pred,
sequence_lengths):
lab = lab[:length]
lab_pred = lab_pred[:length]
accs += [a==b for (a, b) in zip(lab, lab_pred)]
if not self.config.is_pos:
lab_chunks = set(get_chunks(lab, self.config.vocab_tags))
lab_pred_chunks = set(get_chunks(lab_pred,
self.config.vocab_tags))
correct_preds += len(lab_chunks & lab_pred_chunks)
total_preds += len(lab_pred_chunks)
total_correct += len(lab_chunks)
for chunk in lab_chunks:
t = chunk[0]
if t not in type_correct_preds:
type_correct_preds[t], type_total_correct[t], type_total_preds[t] = 0., 0., 0.
type_total_correct[t] += 1
if chunk in lab_pred_chunks:
type_correct_preds[t] += 1
for chunk in lab_pred_chunks:
t = chunk[0]
if t not in type_correct_preds:
type_correct_preds[t], type_total_correct[t], type_total_preds[t] = 0., 0., 0.
type_total_preds[t] += 1
p = correct_preds / total_preds if correct_preds > 0 else 0
r = correct_preds / total_correct if correct_preds > 0 else 0
f1 = 2 * p * r / (p + r) if correct_preds > 0 else 0
acc = np.mean(accs)
d = {"acc": 100*acc, "f1": 100*f1}
if not self.config.is_pos:
type_p, type_r, type_f1 = {}, {}, {}
for t in type_correct_preds:
type_p[t] = type_correct_preds[t] / type_total_preds[t] if type_correct_preds[t] > 0 else 0
type_r[t] = type_correct_preds[t] / type_total_correct[t] if type_correct_preds[t] > 0 else 0
type_f1[t] = 2 * type_p[t] * type_r[t] / (type_p[t] + type_r[t]) if type_correct_preds[t] > 0 else 0
d.update(type_f1)
return d
def predict(self, words_raw):
"""Returns list of tags
Args:
words_raw: list of words (string), just one sentence (no batch)
Returns:
preds: list of tags (string), one for each word in the sentence
"""
words = [self.config.processing_word(w) for w in words_raw]
if type(words[0]) == tuple:
words = zip(*words)
pred_ids, _ = self.predict_batch([words])
preds = [self.idx_to_tag[idx] for idx in list(pred_ids[0])]
return preds
| 42.876923
| 168
| 0.577054
|
05ab2ddb22dd745b695098958433e63534040656
| 739
|
py
|
Python
|
lepmlutils/xgbutils/test_partition.py
|
Lewington-pitsos/mlutils
|
c92322a8a2fc0b5342d44b0d92051a93c6eede44
|
[
"MIT"
] | null | null | null |
lepmlutils/xgbutils/test_partition.py
|
Lewington-pitsos/mlutils
|
c92322a8a2fc0b5342d44b0d92051a93c6eede44
|
[
"MIT"
] | null | null | null |
lepmlutils/xgbutils/test_partition.py
|
Lewington-pitsos/mlutils
|
c92322a8a2fc0b5342d44b0d92051a93c6eede44
|
[
"MIT"
] | null | null | null |
import unittest
import os
import pandas as pd
from .gridsearcher import GridSearcher
from .partition import Partition
class TestPartition(unittest.TestCase):
def setUp(self):
pass
def test_partitions_correctly(self):
dirname = os.path.dirname(__file__)
dataset = pd.read_csv(dirname + "/resources/train.csv")
p = Partition(dataset, 5)
count = 0
for partition in p:
count += 1
rows, cols = partition["test"].shape
self.assertEqual(12, cols)
self.assertLess(176, rows)
rows, cols = partition["train"].shape
self.assertEqual(12, cols)
self.assertLess(710, rows)
self.assertEqual(count, 5)
| 26.392857
| 63
| 0.614344
|
29c401794a332f18d9d82febdd0d2088f2c9e4e3
| 16,503
|
py
|
Python
|
reagent/test/preprocessing/test_preprocessing.py
|
dmitryvinn/ReAgent
|
f98825b9d021ec353a1f9087840a05fea259bf42
|
[
"BSD-3-Clause"
] | null | null | null |
reagent/test/preprocessing/test_preprocessing.py
|
dmitryvinn/ReAgent
|
f98825b9d021ec353a1f9087840a05fea259bf42
|
[
"BSD-3-Clause"
] | null | null | null |
reagent/test/preprocessing/test_preprocessing.py
|
dmitryvinn/ReAgent
|
f98825b9d021ec353a1f9087840a05fea259bf42
|
[
"BSD-3-Clause"
] | null | null | null |
#!/usr/bin/env python3
# Copyright (c) Facebook, Inc. and its affiliates. All rights reserved.
import unittest
import numpy as np
import numpy.testing as npt
import six
import torch
from reagent.preprocessing import identify_types, normalization, transforms
from reagent.preprocessing.identify_types import BOXCOX, CONTINUOUS, ENUM
from reagent.preprocessing.normalization import (
MISSING_VALUE,
NormalizationParameters,
sort_features_by_normalization,
)
from reagent.preprocessing.preprocessor import Preprocessor
from reagent.test.base.utils import NumpyFeatureProcessor
from reagent.test.preprocessing.preprocessing_util import (
CONTINUOUS_FEATURE_ID,
BOXCOX_FEATURE_ID,
ENUM_FEATURE_ID,
PROBABILITY_FEATURE_ID,
id_to_type,
read_data,
)
from scipy import special
class TestPreprocessing(unittest.TestCase):
def _feature_type_override(self, feature_id):
"""
This should only be used to test CONTINUOUS_ACTION
"""
if id_to_type(feature_id) == identify_types.CONTINUOUS_ACTION:
return identify_types.CONTINUOUS_ACTION
return None
def test_prepare_normalization_and_normalize(self) -> None:
feature_value_map = read_data()
normalization_parameters = {}
for name, values in feature_value_map.items():
normalization_parameters[name] = normalization.identify_parameter(
name, values, 10, feature_type=self._feature_type_override(name)
)
for k, v in normalization_parameters.items():
if id_to_type(k) == CONTINUOUS:
self.assertEqual(v.feature_type, CONTINUOUS)
self.assertIs(v.boxcox_lambda, None)
self.assertIs(v.boxcox_shift, None)
elif id_to_type(k) == BOXCOX:
self.assertEqual(v.feature_type, BOXCOX)
self.assertIsNot(v.boxcox_lambda, None)
self.assertIsNot(v.boxcox_shift, None)
else:
assert v.feature_type == id_to_type(k)
preprocessor = Preprocessor(normalization_parameters, False)
sorted_features, _ = sort_features_by_normalization(normalization_parameters)
input_matrix = torch.zeros([10000, len(sorted_features)])
for i, feature in enumerate(sorted_features):
input_matrix[:, i] = torch.from_numpy(feature_value_map[feature])
normalized_feature_matrix = preprocessor(
input_matrix, (input_matrix != MISSING_VALUE)
)
normalized_features = {}
on_column = 0
for feature in sorted_features:
norm = normalization_parameters[feature]
if norm.feature_type == ENUM:
column_size = len(norm.possible_values)
else:
column_size = 1
normalized_features[feature] = normalized_feature_matrix[
:, on_column : (on_column + column_size)
]
on_column += column_size
self.assertTrue(
all(
[
np.isfinite(parameter.stddev) and np.isfinite(parameter.mean)
for parameter in normalization_parameters.values()
]
)
)
for k, v in six.iteritems(normalized_features):
v = v.numpy()
self.assertTrue(np.all(np.isfinite(v)))
feature_type = normalization_parameters[k].feature_type
if feature_type == identify_types.PROBABILITY:
sigmoidv = special.expit(v)
self.assertTrue(
np.all(
np.logical_and(np.greater(sigmoidv, 0), np.less(sigmoidv, 1))
)
)
elif feature_type == identify_types.ENUM:
possible_values = normalization_parameters[k].possible_values
self.assertEqual(v.shape[0], len(feature_value_map[k]))
self.assertEqual(v.shape[1], len(possible_values))
possible_value_map = {}
for i, possible_value in enumerate(possible_values):
possible_value_map[possible_value] = i
for i, row in enumerate(v):
original_feature = feature_value_map[k][i]
if abs(original_feature - MISSING_VALUE) < 0.01:
self.assertEqual(0.0, np.sum(row))
else:
self.assertEqual(
possible_value_map[original_feature],
np.where(row == 1)[0][0],
)
elif feature_type == identify_types.QUANTILE:
for i, feature in enumerate(v[0]):
original_feature = feature_value_map[k][i]
expected = NumpyFeatureProcessor.value_to_quantile(
original_feature, normalization_parameters[k].quantiles
)
self.assertAlmostEqual(feature, expected, 2)
elif feature_type == identify_types.BINARY:
pass
elif (
feature_type == identify_types.CONTINUOUS
or feature_type == identify_types.BOXCOX
):
one_stddev = np.isclose(np.std(v, ddof=1), 1, atol=0.01)
zero_stddev = np.isclose(np.std(v, ddof=1), 0, atol=0.01)
zero_mean = np.isclose(np.mean(v), 0, atol=0.01)
self.assertTrue(
np.all(zero_mean),
"mean of feature {} is {}, not 0".format(k, np.mean(v)),
)
self.assertTrue(np.all(np.logical_or(one_stddev, zero_stddev)))
elif feature_type == identify_types.CONTINUOUS_ACTION:
less_than_max = v < 1
more_than_min = v > -1
self.assertTrue(
np.all(less_than_max),
"values are not less than 1: {}".format(v[less_than_max == False]),
)
self.assertTrue(
np.all(more_than_min),
"values are not more than -1: {}".format(v[more_than_min == False]),
)
else:
raise NotImplementedError()
def test_normalize_dense_matrix_enum(self) -> None:
normalization_parameters = {
1: NormalizationParameters(
identify_types.ENUM,
None,
None,
None,
None,
[12, 4, 2],
None,
None,
None,
),
2: NormalizationParameters(
identify_types.CONTINUOUS, None, 0, 0, 1, None, None, None, None
),
3: NormalizationParameters(
identify_types.ENUM, None, None, None, None, [15, 3], None, None, None
),
}
preprocessor = Preprocessor(normalization_parameters, False)
inputs = np.zeros([4, 3], dtype=np.float32)
feature_ids = [2, 1, 3] # Sorted according to feature type
inputs[:, feature_ids.index(1)] = [12, 4, 2, 2]
inputs[:, feature_ids.index(2)] = [1.0, 2.0, 3.0, 3.0]
inputs[:, feature_ids.index(3)] = [15, 3, 15, normalization.MISSING_VALUE]
inputs = torch.from_numpy(inputs)
normalized_feature_matrix = preprocessor(inputs, (inputs != MISSING_VALUE))
np.testing.assert_allclose(
np.array(
[
[1.0, 1, 0, 0, 1, 0],
[2.0, 0, 1, 0, 0, 1],
[3.0, 0, 0, 1, 1, 0],
[3.0, 0, 0, 1, 0, 0], # Missing values should go to all 0
]
),
normalized_feature_matrix,
)
def test_persistency(self) -> None:
feature_value_map = read_data()
normalization_parameters = {}
for name, values in feature_value_map.items():
normalization_parameters[name] = normalization.identify_parameter(
name, values, feature_type=self._feature_type_override(name)
)
values[0] = MISSING_VALUE # Set one entry to MISSING_VALUE to test that
s = normalization.serialize(normalization_parameters)
read_parameters = normalization.deserialize(s)
# Unfortunately, Thrift serializatin seems to lose a bit of precision.
# Using `==` will be false.
self.assertEqual(read_parameters.keys(), normalization_parameters.keys())
for k in normalization_parameters:
self.assertEqual(
read_parameters[k].feature_type,
normalization_parameters[k].feature_type,
)
self.assertEqual(
read_parameters[k].possible_values,
normalization_parameters[k].possible_values,
)
for field in [
"boxcox_lambda",
"boxcox_shift",
"mean",
"stddev",
"quantiles",
"min_value",
"max_value",
]:
if getattr(normalization_parameters[k], field) is None:
self.assertEqual(
getattr(read_parameters[k], field),
getattr(normalization_parameters[k], field),
)
else:
npt.assert_allclose(
getattr(read_parameters[k], field),
getattr(normalization_parameters[k], field),
)
def test_quantile_boundary_logic(self) -> None:
"""Test quantile logic when feaure value == quantile boundary."""
input = torch.tensor([[0.0], [80.0], [100.0]])
norm_params = NormalizationParameters(
feature_type="QUANTILE",
boxcox_lambda=None,
boxcox_shift=None,
mean=0,
stddev=1,
possible_values=None,
quantiles=[0.0, 80.0, 100.0],
min_value=0.0,
max_value=100.0,
)
preprocessor = Preprocessor({1: norm_params}, False)
output = preprocessor._preprocess_QUANTILE(0, input.float(), [norm_params])
expected_output = torch.tensor([[0.0], [0.5], [1.0]])
self.assertTrue(np.all(np.isclose(output, expected_output)))
def test_preprocessing_network(self) -> None:
feature_value_map = read_data()
normalization_parameters = {}
name_preprocessed_blob_map = {}
for feature_name, feature_values in feature_value_map.items():
normalization_parameters[feature_name] = normalization.identify_parameter(
feature_name,
feature_values,
feature_type=self._feature_type_override(feature_name),
)
feature_values[
0
] = MISSING_VALUE # Set one entry to MISSING_VALUE to test that
preprocessor = Preprocessor(
{feature_name: normalization_parameters[feature_name]}, False
)
feature_values_matrix = torch.from_numpy(np.expand_dims(feature_values, -1))
normalized_feature_values = preprocessor(
feature_values_matrix, (feature_values_matrix != MISSING_VALUE)
)
name_preprocessed_blob_map[feature_name] = normalized_feature_values.numpy()
test_features = NumpyFeatureProcessor.preprocess(
feature_value_map, normalization_parameters
)
for feature_name in feature_value_map:
normalized_features = name_preprocessed_blob_map[feature_name]
if feature_name != ENUM_FEATURE_ID:
normalized_features = np.squeeze(normalized_features, -1)
tolerance = 0.01
if feature_name == BOXCOX_FEATURE_ID:
# At the limit, boxcox has some numerical instability
tolerance = 0.5
non_matching = np.where(
np.logical_not(
np.isclose(
normalized_features.flatten(),
test_features[feature_name].flatten(),
rtol=tolerance,
atol=tolerance,
)
)
)
self.assertTrue(
np.all(
np.isclose(
normalized_features.flatten(),
test_features[feature_name].flatten(),
rtol=tolerance,
atol=tolerance,
)
),
"{} does not match: {} \n!=\n {}".format(
feature_name,
normalized_features.flatten()[non_matching],
test_features[feature_name].flatten()[non_matching],
),
)
def test_type_override_binary(self) -> None:
# Take a feature that should be identified as probability
feature_value_map = read_data()
probability_values = feature_value_map[PROBABILITY_FEATURE_ID]
# And ask for a binary anyways
parameter = normalization.identify_parameter(
"_", probability_values, feature_type=identify_types.BINARY
)
self.assertEqual(parameter.feature_type, "BINARY")
def test_type_override_continuous(self) -> None:
# Take a feature that should be identified as BOXCOX
feature_value_map = read_data()
probability_values = feature_value_map[BOXCOX_FEATURE_ID]
# And ask for a CONTINUOUS anyways
parameter = normalization.identify_parameter(
"_", probability_values, feature_type=identify_types.CONTINUOUS
)
self.assertEqual(parameter.feature_type, "CONTINUOUS")
def test_type_override_boxcox(self) -> None:
# Take a feature that should be identified as CONTINUOUS
feature_value_map = read_data()
probability_values = feature_value_map[CONTINUOUS_FEATURE_ID]
# And ask for a BOXCOX anyways
parameter = normalization.identify_parameter(
"_", probability_values, feature_type=identify_types.BOXCOX
)
self.assertEqual(parameter.feature_type, "BOXCOX")
def test_type_override_quantile(self) -> None:
# Take a feature that should be identified as CONTINUOUS
feature_value_map = read_data()
probability_values = feature_value_map[BOXCOX_FEATURE_ID]
# And ask for a QUANTILE anyways
parameter = normalization.identify_parameter(
"_", probability_values, feature_type=identify_types.QUANTILE
)
self.assertEqual(parameter.feature_type, "QUANTILE")
def test_columnvector(self) -> None:
def format_input2output(test_keys, inp_form):
test_data = {}
for ky in test_keys:
test_data[ky] = inp_form
test_instance = transforms.ColumnVector(test_keys)
output_data = test_instance(test_data)
return output_data
test_values = range(0, 5)
test_keys = []
for k in test_values:
test_keys.append(str(k))
# Possible input formats: tuple, list, torch.Tensor
for n_len in [1, 3]:
test_input_forms = [
(np.ones((n_len, 1)), 0),
n_len * [1],
torch.tensor(np.ones((n_len, 1))),
]
for inp_form in test_input_forms:
output_data = format_input2output(test_keys, inp_form)
for ky in test_keys:
self.assertEqual(output_data[ky].shape[0], n_len)
self.assertEqual(output_data[ky].shape[1], 1)
# Input as in row format
test_data = {}
for ky in test_keys:
test_data[ky] = (np.ones((1, 3)), 0)
test_instance = transforms.ColumnVector(test_keys)
with self.assertRaisesRegex(AssertionError, "Invalid shape for key"):
output_data = test_instance(test_data)
# Input as unimplemented type (number)
test_data = {}
for ky in test_keys:
test_data[ky] = 1
test_instance = transforms.ColumnVector(test_keys)
with self.assertRaisesRegex(NotImplementedError, "value of type"):
output_data = test_instance(test_data)
| 40.349633
| 88
| 0.571714
|
10540a0d4d53dee16420cba902e7e954b2a67507
| 10,171
|
py
|
Python
|
magenta/models/drums_rnn/drums_rnn_generate.py
|
cristianmtr/magenta
|
ac2d8ae455fdd07f4b46dec82aedab22fcb6bbbd
|
[
"Apache-2.0"
] | 1
|
2019-01-24T07:22:48.000Z
|
2019-01-24T07:22:48.000Z
|
magenta/models/drums_rnn/drums_rnn_generate.py
|
dkun7944/magenta
|
8f930263b7cfd67f27eb12cd871b4e5fa87d382e
|
[
"Apache-2.0"
] | null | null | null |
magenta/models/drums_rnn/drums_rnn_generate.py
|
dkun7944/magenta
|
8f930263b7cfd67f27eb12cd871b4e5fa87d382e
|
[
"Apache-2.0"
] | 1
|
2019-11-26T06:30:52.000Z
|
2019-11-26T06:30:52.000Z
|
# Copyright 2018 The Magenta Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Generate drum tracks from a trained checkpoint of a drums RNN model.
Uses flags to define operation.
"""
import ast
import os
import time
import magenta
from magenta.models.drums_rnn import drums_rnn_config_flags
from magenta.models.drums_rnn import drums_rnn_model
from magenta.models.drums_rnn import drums_rnn_sequence_generator
from magenta.protobuf import generator_pb2
from magenta.protobuf import music_pb2
import tensorflow as tf
FLAGS = tf.app.flags.FLAGS
tf.app.flags.DEFINE_string(
'run_dir', None,
'Path to the directory where the latest checkpoint will be loaded from.')
tf.app.flags.DEFINE_string(
'checkpoint_file', None,
'Path to the checkpoint file. run_dir will take priority over this flag.')
tf.app.flags.DEFINE_string(
'bundle_file', None,
'Path to the bundle file. If specified, this will take priority over '
'run_dir and checkpoint_file, unless save_generator_bundle is True, in '
'which case both this flag and either run_dir or checkpoint_file are '
'required')
tf.app.flags.DEFINE_boolean(
'save_generator_bundle', False,
'If true, instead of generating a sequence, will save this generator as a '
'bundle file in the location specified by the bundle_file flag')
tf.app.flags.DEFINE_string(
'bundle_description', None,
'A short, human-readable text description of the bundle (e.g., training '
'data, hyper parameters, etc.).')
tf.app.flags.DEFINE_string(
'output_dir', '/tmp/drums_rnn/generated',
'The directory where MIDI files will be saved to.')
tf.app.flags.DEFINE_integer(
'num_outputs', 10,
'The number of drum tracks to generate. One MIDI file will be created for '
'each.')
tf.app.flags.DEFINE_integer(
'num_steps', 128,
'The total number of steps the generated drum tracks should be, priming '
'drum track length + generated steps. Each step is a 16th of a bar.')
tf.app.flags.DEFINE_string(
'primer_drums', '',
'A string representation of a Python list of tuples containing drum pitch '
'values. For example: '
'"[(36,42),(),(),(),(42,),(),(),()]". If specified, this drum track will '
'be used as the priming drum track. If a priming drum track is not '
'specified, drum tracks will be generated from scratch.')
tf.app.flags.DEFINE_string(
'primer_midi', '',
'The path to a MIDI file containing a drum track that will be used as a '
'priming drum track. If a primer drum track is not specified, drum tracks '
'will be generated from scratch.')
tf.app.flags.DEFINE_float(
'qpm', None,
'The quarters per minute to play generated output at. If a primer MIDI is '
'given, the qpm from that will override this flag. If qpm is None, qpm '
'will default to 120.')
tf.app.flags.DEFINE_float(
'temperature', 1.0,
'The randomness of the generated drum tracks. 1.0 uses the unaltered '
'softmax probabilities, greater than 1.0 makes tracks more random, less '
'than 1.0 makes tracks less random.')
tf.app.flags.DEFINE_integer(
'beam_size', 1,
'The beam size to use for beam search when generating drum tracks.')
tf.app.flags.DEFINE_integer(
'branch_factor', 1,
'The branch factor to use for beam search when generating drum tracks.')
tf.app.flags.DEFINE_integer(
'steps_per_iteration', 1,
'The number of steps to take per beam search iteration.')
tf.app.flags.DEFINE_string(
'log', 'INFO',
'The threshold for what messages will be logged DEBUG, INFO, WARN, ERROR, '
'or FATAL.')
def get_checkpoint():
"""Get the training dir or checkpoint path to be used by the model."""
if ((FLAGS.run_dir or FLAGS.checkpoint_file) and
FLAGS.bundle_file and not FLAGS.save_generator_bundle):
raise magenta.music.SequenceGeneratorError(
'Cannot specify both bundle_file and run_dir or checkpoint_file')
if FLAGS.run_dir:
train_dir = os.path.join(os.path.expanduser(FLAGS.run_dir), 'train')
return train_dir
elif FLAGS.checkpoint_file:
return os.path.expanduser(FLAGS.checkpoint_file)
else:
return None
def get_bundle():
"""Returns a generator_pb2.GeneratorBundle object based read from bundle_file.
Returns:
Either a generator_pb2.GeneratorBundle or None if the bundle_file flag is
not set or the save_generator_bundle flag is set.
"""
if FLAGS.save_generator_bundle:
return None
if FLAGS.bundle_file is None:
return None
bundle_file = os.path.expanduser(FLAGS.bundle_file)
return magenta.music.read_bundle_file(bundle_file)
def run_with_flags(generator):
"""Generates drum tracks and saves them as MIDI files.
Uses the options specified by the flags defined in this module.
Args:
generator: The DrumsRnnSequenceGenerator to use for generation.
"""
if not FLAGS.output_dir:
tf.logging.fatal('--output_dir required')
return
FLAGS.output_dir = os.path.expanduser(FLAGS.output_dir)
primer_midi = None
if FLAGS.primer_midi:
primer_midi = os.path.expanduser(FLAGS.primer_midi)
if not tf.gfile.Exists(FLAGS.output_dir):
tf.gfile.MakeDirs(FLAGS.output_dir)
primer_sequence = None
qpm = FLAGS.qpm if FLAGS.qpm else magenta.music.DEFAULT_QUARTERS_PER_MINUTE
if FLAGS.primer_drums:
primer_drums = magenta.music.DrumTrack(
[frozenset(pitches)
for pitches in ast.literal_eval(FLAGS.primer_drums)])
primer_sequence = primer_drums.to_sequence(qpm=qpm)
elif primer_midi:
primer_sequence = magenta.music.midi_file_to_sequence_proto(primer_midi)
if primer_sequence.tempos and primer_sequence.tempos[0].qpm:
qpm = primer_sequence.tempos[0].qpm
else:
tf.logging.warning(
'No priming sequence specified. Defaulting to a single bass drum hit.')
primer_drums = magenta.music.DrumTrack([frozenset([36])])
primer_sequence = primer_drums.to_sequence(qpm=qpm)
# Derive the total number of seconds to generate based on the QPM of the
# priming sequence and the num_steps flag.
seconds_per_step = 60.0 / qpm / generator.steps_per_quarter
total_seconds = FLAGS.num_steps * seconds_per_step
# Specify start/stop time for generation based on starting generation at the
# end of the priming sequence and continuing until the sequence is num_steps
# long.
generator_options = generator_pb2.GeneratorOptions()
if primer_sequence:
input_sequence = primer_sequence
# Set the start time to begin on the next step after the last note ends.
if primer_sequence.notes:
last_end_time = max(n.end_time for n in primer_sequence.notes)
else:
last_end_time = 0
generate_section = generator_options.generate_sections.add(
start_time=last_end_time + seconds_per_step,
end_time=total_seconds)
if generate_section.start_time >= generate_section.end_time:
tf.logging.fatal(
'Priming sequence is longer than the total number of steps '
'requested: Priming sequence length: %s, Generation length '
'requested: %s',
generate_section.start_time, total_seconds)
return
else:
input_sequence = music_pb2.NoteSequence()
input_sequence.tempos.add().qpm = qpm
generate_section = generator_options.generate_sections.add(
start_time=0,
end_time=total_seconds)
generator_options.args['temperature'].float_value = FLAGS.temperature
generator_options.args['beam_size'].int_value = FLAGS.beam_size
generator_options.args['branch_factor'].int_value = FLAGS.branch_factor
generator_options.args[
'steps_per_iteration'].int_value = FLAGS.steps_per_iteration
tf.logging.debug('input_sequence: %s', input_sequence)
tf.logging.debug('generator_options: %s', generator_options)
# Make the generate request num_outputs times and save the output as midi
# files.
date_and_time = time.strftime('%Y-%m-%d_%H%M%S')
digits = len(str(FLAGS.num_outputs))
for i in range(FLAGS.num_outputs):
generated_sequence = generator.generate(input_sequence, generator_options)
midi_filename = '%s_%s.mid' % (date_and_time, str(i + 1).zfill(digits))
midi_path = os.path.join(FLAGS.output_dir, midi_filename)
magenta.music.sequence_proto_to_midi_file(generated_sequence, midi_path)
tf.logging.info('Wrote %d MIDI files to %s',
FLAGS.num_outputs, FLAGS.output_dir)
def main(unused_argv):
"""Saves bundle or runs generator based on flags."""
tf.logging.set_verbosity(FLAGS.log)
bundle = get_bundle()
if bundle:
config_id = bundle.generator_details.id
config = drums_rnn_model.default_configs[config_id]
config.hparams.parse(FLAGS.hparams)
else:
config = drums_rnn_config_flags.config_from_flags()
# Having too large of a batch size will slow generation down unnecessarily.
config.hparams.batch_size = min(
config.hparams.batch_size, FLAGS.beam_size * FLAGS.branch_factor)
generator = drums_rnn_sequence_generator.DrumsRnnSequenceGenerator(
model=drums_rnn_model.DrumsRnnModel(config),
details=config.details,
steps_per_quarter=config.steps_per_quarter,
checkpoint=get_checkpoint(),
bundle=bundle)
if FLAGS.save_generator_bundle:
bundle_filename = os.path.expanduser(FLAGS.bundle_file)
if FLAGS.bundle_description is None:
tf.logging.warning('No bundle description provided.')
tf.logging.info('Saving generator bundle to %s', bundle_filename)
generator.create_bundle_file(bundle_filename, FLAGS.bundle_description)
else:
run_with_flags(generator)
def console_entry_point():
tf.app.run(main)
if __name__ == '__main__':
console_entry_point()
| 38.673004
| 80
| 0.737784
|
12af7493069d5c510deae0126430e7122645a872
| 1,751
|
py
|
Python
|
py/hash/test_hash_files.py
|
lakshmi2005/buck
|
012a59d5d2e5a45b483e85fb190d2b67ea0c56ab
|
[
"Apache-2.0"
] | 1
|
2018-02-28T06:26:56.000Z
|
2018-02-28T06:26:56.000Z
|
py/hash/test_hash_files.py
|
lakshmi2005/buck
|
012a59d5d2e5a45b483e85fb190d2b67ea0c56ab
|
[
"Apache-2.0"
] | 1
|
2018-12-10T15:54:22.000Z
|
2018-12-10T19:30:37.000Z
|
py/hash/test_hash_files.py
|
lakshmi2005/buck
|
012a59d5d2e5a45b483e85fb190d2b67ea0c56ab
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2015-present Facebook, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import os.path
import shutil
import tempfile
import unittest
from .hash_files import hash_files
class TestHashFiles(unittest.TestCase):
def setUp(self):
self.test_dir = tempfile.mkdtemp()
def tearDown(self):
# Remove the directory after the test
shutil.rmtree(self.test_dir)
def __write_to_file(self, filename, content):
with open(filename, 'w') as input_file:
input_file.write(content)
def test_hash_file(self):
file_to_hash_name = os.path.join(self.test_dir, 'file_to_hash')
self.__write_to_file(file_to_hash_name, 'Some text to hash')
self.assertEquals('3eecc85e6440899b28a9ea6d8369f01c',
hash_files([file_to_hash_name]))
def test_hash_multiple_file(self):
files_to_hash = []
for i in range(1, 5):
file_to_hash_name = os.path.join(self.test_dir, 'file_to_hash_%s' % i)
self.__write_to_file(file_to_hash_name, 'Some text to hash')
files_to_hash.append(file_to_hash_name)
self.assertEquals('93d3c1c8adf801b7bb80b37ffeb73965',
hash_files(files_to_hash))
| 33.673077
| 82
| 0.698458
|
5ef4ca0dd05d1ceeda4c92325042af78fb11acab
| 4,146
|
py
|
Python
|
compiler-rt/test/profile/lit.cfg.py
|
ornata/llvm-project
|
494913b8b4e4bce0b3525e5569d8e486e82b9a52
|
[
"Apache-2.0"
] | null | null | null |
compiler-rt/test/profile/lit.cfg.py
|
ornata/llvm-project
|
494913b8b4e4bce0b3525e5569d8e486e82b9a52
|
[
"Apache-2.0"
] | null | null | null |
compiler-rt/test/profile/lit.cfg.py
|
ornata/llvm-project
|
494913b8b4e4bce0b3525e5569d8e486e82b9a52
|
[
"Apache-2.0"
] | null | null | null |
# -*- Python -*-
import os
def get_required_attr(config, attr_name):
attr_value = getattr(config, attr_name, None)
if attr_value == None:
lit_config.fatal(
"No attribute %r in test configuration! You may need to run "
"tests from your build directory or add this attribute "
"to lit.site.cfg.py " % attr_name)
return attr_value
# Setup config name.
config.name = 'Profile-' + config.target_arch
# Setup source root.
config.test_source_root = os.path.dirname(__file__)
# Setup executable root.
if hasattr(config, 'profile_lit_binary_dir') and \
config.profile_lit_binary_dir is not None:
config.test_exec_root = os.path.join(config.profile_lit_binary_dir, config.name)
if config.host_os in ['Linux']:
extra_link_flags = ["-ldl"]
elif config.host_os in ['Windows']:
# InstrProf is incompatible with incremental linking. Disable it as a
# workaround.
extra_link_flags = ["-Wl,-incremental:no"]
else:
extra_link_flags = []
# Test suffixes.
config.suffixes = ['.c', '.cpp', '.m', '.mm', '.ll', '.test']
# What to exclude.
config.excludes = ['Inputs']
# Clang flags.
target_cflags=[get_required_attr(config, "target_cflags")]
clang_cflags = target_cflags + extra_link_flags
clang_cxxflags = config.cxx_mode_flags + clang_cflags
def build_invocation(compile_flags, with_lto = False):
lto_flags = []
if with_lto and config.lto_supported:
lto_flags += config.lto_flags
return " " + " ".join([config.clang] + lto_flags + compile_flags) + " "
def exclude_unsupported_files_for_aix(dirname):
for filename in os.listdir(dirname):
source_path = os.path.join( dirname, filename)
if os.path.isdir(source_path):
continue
f = open(source_path, 'r')
try:
data = f.read()
# -fprofile-instr-generate and rpath are not supported on AIX, exclude all tests with them.
if ("%clang_profgen" in data or "%clangxx_profgen" in data or "-rpath" in data):
config.excludes += [ filename ]
finally:
f.close()
# Add clang substitutions.
config.substitutions.append( ("%clang ", build_invocation(clang_cflags)) )
config.substitutions.append( ("%clangxx ", build_invocation(clang_cxxflags)) )
config.substitutions.append( ("%clang_profgen ", build_invocation(clang_cflags) + " -fprofile-instr-generate ") )
config.substitutions.append( ("%clang_profgen=", build_invocation(clang_cflags) + " -fprofile-instr-generate=") )
config.substitutions.append( ("%clang_pgogen ", build_invocation(clang_cflags) + " -fprofile-generate ") )
config.substitutions.append( ("%clang_pgogen=", build_invocation(clang_cflags) + " -fprofile-generate=") )
config.substitutions.append( ("%clangxx_profgen ", build_invocation(clang_cxxflags) + " -fprofile-instr-generate ") )
config.substitutions.append( ("%clangxx_profgen=", build_invocation(clang_cxxflags) + " -fprofile-instr-generate=") )
config.substitutions.append( ("%clangxx_pgogen ", build_invocation(clang_cxxflags) + " -fprofile-generate ") )
config.substitutions.append( ("%clangxx_pgogen=", build_invocation(clang_cxxflags) + " -fprofile-generate=") )
config.substitutions.append( ("%clang_profgen_gcc=", build_invocation(clang_cflags) + " -fprofile-generate=") )
config.substitutions.append( ("%clang_profuse_gcc=", build_invocation(clang_cflags) + " -fprofile-use=") )
config.substitutions.append( ("%clang_profuse=", build_invocation(clang_cflags) + " -fprofile-instr-use=") )
config.substitutions.append( ("%clangxx_profuse=", build_invocation(clang_cxxflags) + " -fprofile-instr-use=") )
config.substitutions.append( ("%clang_lto_profgen=", build_invocation(clang_cflags, True) + " -fprofile-instr-generate=") )
if config.host_os not in ['Windows', 'Darwin', 'FreeBSD', 'Linux', 'NetBSD', 'SunOS', 'AIX']:
config.unsupported = True
if config.host_os in ['AIX']:
config.available_features.add('system-aix')
exclude_unsupported_files_for_aix(config.test_source_root)
exclude_unsupported_files_for_aix(config.test_source_root + "/Posix")
if config.target_arch in ['armv7l']:
config.unsupported = True
if config.android:
config.unsupported = True
| 41.878788
| 123
| 0.723589
|
26b525667720996a220256c15ce2aa77c31aac1c
| 149
|
py
|
Python
|
src/mathlib1/dot.py
|
PartehDev/mathlib1
|
65047d1d5c4ecddee4c889786552592be1d1ea94
|
[
"MIT"
] | null | null | null |
src/mathlib1/dot.py
|
PartehDev/mathlib1
|
65047d1d5c4ecddee4c889786552592be1d1ea94
|
[
"MIT"
] | null | null | null |
src/mathlib1/dot.py
|
PartehDev/mathlib1
|
65047d1d5c4ecddee4c889786552592be1d1ea94
|
[
"MIT"
] | null | null | null |
def dotproduct(vec1, vec2):
dotproduct=0
for vec1,vec2 in zip(vec1,vec2):
dotproduct = dotproduct+vec1*vec2
return dotproduct
| 29.8
| 42
| 0.671141
|
81b3eb453e61f542ed2c28ad75d17ed40834c915
| 2,759
|
py
|
Python
|
yumipy/yumi_state.py
|
allenyh/yumipy
|
9914fa8d501c683b6ad5bd6976cfc522234cfd18
|
[
"Apache-2.0"
] | 58
|
2017-04-06T16:39:15.000Z
|
2022-03-04T01:19:06.000Z
|
yumipy/yumi_state.py
|
allenyh/yumipy
|
9914fa8d501c683b6ad5bd6976cfc522234cfd18
|
[
"Apache-2.0"
] | 31
|
2017-05-11T04:01:36.000Z
|
2021-12-02T12:11:14.000Z
|
yumipy/yumi_state.py
|
allenyh/yumipy
|
9914fa8d501c683b6ad5bd6976cfc522234cfd18
|
[
"Apache-2.0"
] | 41
|
2017-04-06T16:39:18.000Z
|
2021-12-19T00:18:42.000Z
|
'''
State Encapsulation for YuMi robot
Author: Jacky
'''
import numpy as np
class YuMiState:
""" Object that encapsulates a yumi arm joint angle configuration.
"""
NUM_JOINTS = 7
NAME = "YuMi"
def __init__(self, vals = [0] * NUM_JOINTS):
for i, val in enumerate(vals):
setattr(self, '_joint{0}'.format(i+1), val)
def __str__(self):
return str(self.joints)
def __repr__(self):
return "YuMiState({0})".format(self.joints)
@property
def joints(self):
joints = [getattr(self, '_joints{0}'.format(i)) for i in range(YuMiState.NUM_JOINTS)]
return joints
@property
def in_radians(self):
return [np.pi / 180.0 * t for t in self.joints]
@property
def in_degrees(self):
return self.joints
@property
def joint1(self):
return self._joint1
@joint1.setter
def joint1(self, val):
self._joint1 = val
@property
def joint2(self):
return self._joint2
@joint2.setter
def joint2(self, val):
self._joint2 = val
@property
def joint3(self):
return self._joint3
@joint3.setter
def joint3(self, val):
self._joint3 = val
@property
def joint4(self):
return self._joint4
@joint4.setter
def joint4(self, val):
self._joint4 = val
@property
def joint5(self):
return self._joint5
@joint5.setter
def joint5(self, val):
self._joint5 = val
@property
def joint6(self):
return self._joint6
@joint6.setter
def joint6(self, val):
self._joint6 = val
@property
def joint7(self):
return self._joint7
@joint7.setter
def joint7(self, val):
self._joint7 = val
@property
def joints(self):
joints = [getattr(self, 'joint{0}'.format(i+1)) for i in range(YuMiState.NUM_JOINTS)]
return joints
def copy(self):
return YuMiState(self.joints)
def mirror(self):
return YuMiState([
self.joint1 * -1,
self.joint2,
self.joint3,
self.joint4 * -1,
self.joint5,
self.joint6,
self.joint7 * -1
])
def __str__(self):
return str(self.joints)
def __repr__(self):
return "YuMiState({0})".format(str(self.joints))
def __eq__(self, other):
if isinstance(other, self.__class__):
return self.__dict__ == other.__dict__
return False
def __ne__(self, other):
if isinstance(other, self.__class__):
return not self.__eq__(other)
return NotImplemented
def __hash__(self):
return hash(tuple(sorted(self.__dict__.items())))
| 21.223077
| 93
| 0.58137
|
05346a1b1b094bef468464684d33a90ecdeac9cc
| 758
|
py
|
Python
|
text/symbols.py
|
riderjensen/tacotron2
|
6d0116fc6c234e72e485c7f81bb6fc5525661ca8
|
[
"BSD-3-Clause"
] | 5
|
2020-08-07T21:41:13.000Z
|
2020-12-24T02:11:34.000Z
|
text/symbols.py
|
riderjensen/tacotron2
|
6d0116fc6c234e72e485c7f81bb6fc5525661ca8
|
[
"BSD-3-Clause"
] | null | null | null |
text/symbols.py
|
riderjensen/tacotron2
|
6d0116fc6c234e72e485c7f81bb6fc5525661ca8
|
[
"BSD-3-Clause"
] | 2
|
2020-07-29T14:56:53.000Z
|
2021-12-02T05:54:23.000Z
|
""" from https://github.com/keithito/tacotron """
'''
Defines the set of symbols used in text input to the model.
The default is a set of ASCII characters that works well for English or text that has been run through Unidecode. For other data, you can modify _characters. See TRAINING_DATA.md for details. '''
from text import cmudict
_pad = '_'
_punctuation = '!\'(),.:;? '
_special = '-'
_letters = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyzəeɪɑæəɔaʊaɪʧðɛərhɪʤŋoʊɔɪʃθʊuʒijɹɡʌ'
# Prepend "@" to ARPAbet symbols to ensure uniqueness (some are the same as uppercase letters):
_arpabet = ['@' + s for s in cmudict.valid_symbols]
# Export all symbols:
symbols = [_pad] + list(_special) + list(_punctuation) + list(_letters) + _arpabet
| 37.9
| 195
| 0.737467
|
f363f8f3f40fa78911c34d3ba2fd35d050991d57
| 253,507
|
py
|
Python
|
release/scripts/startup/bl_ui/space_view3d.py
|
ACFX/Blender
|
79aa29ce663fbd552062fae520dcc3539be67bab
|
[
"Naumen",
"Condor-1.1",
"MS-PL"
] | null | null | null |
release/scripts/startup/bl_ui/space_view3d.py
|
ACFX/Blender
|
79aa29ce663fbd552062fae520dcc3539be67bab
|
[
"Naumen",
"Condor-1.1",
"MS-PL"
] | null | null | null |
release/scripts/startup/bl_ui/space_view3d.py
|
ACFX/Blender
|
79aa29ce663fbd552062fae520dcc3539be67bab
|
[
"Naumen",
"Condor-1.1",
"MS-PL"
] | null | null | null |
# ##### BEGIN GPL LICENSE BLOCK #####
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# ##### END GPL LICENSE BLOCK #####
# <pep8 compliant>
import bpy
from bpy.types import (
Header,
Menu,
Panel,
)
from bl_ui.properties_paint_common import (
UnifiedPaintPanel,
brush_basic_texpaint_settings,
)
from bl_ui.properties_grease_pencil_common import (
AnnotationDataPanel,
AnnotationOnionSkin,
GreasePencilMaterialsPanel,
GreasePencilVertexcolorPanel,
)
from bl_ui.space_toolsystem_common import (
ToolActivePanelHelper,
)
from bpy.app.translations import contexts as i18n_contexts
class VIEW3D_HT_tool_header(Header):
bl_space_type = 'VIEW_3D'
bl_region_type = 'TOOL_HEADER'
def draw(self, context):
layout = self.layout
layout.row(align=True).template_header()
self.draw_tool_settings(context)
layout.separator_spacer()
VIEW3D_HT_header.draw_xform_template(layout, context)
layout.separator_spacer()
self.draw_mode_settings(context)
def draw_tool_settings(self, context):
layout = self.layout
tool_mode = context.mode
# Active Tool
# -----------
from bl_ui.space_toolsystem_common import ToolSelectPanelHelper
tool = ToolSelectPanelHelper.draw_active_tool_header(
context, layout,
tool_key=('VIEW_3D', tool_mode),
)
# Object Mode Options
# -------------------
# Example of how tool_settings can be accessed as pop-overs.
# TODO(campbell): editing options should be after active tool options
# (obviously separated for from the users POV)
draw_fn = getattr(_draw_tool_settings_context_mode, tool_mode, None)
if draw_fn is not None:
is_valid_context = draw_fn(context, layout, tool)
def draw_3d_brush_settings(layout, tool_mode):
layout.popover("VIEW3D_PT_tools_brush_settings_advanced", text="Brush")
if tool_mode != 'PAINT_WEIGHT':
layout.popover("VIEW3D_PT_tools_brush_texture")
if tool_mode == 'PAINT_TEXTURE':
layout.popover("VIEW3D_PT_tools_mask_texture")
layout.popover("VIEW3D_PT_tools_brush_stroke")
layout.popover("VIEW3D_PT_tools_brush_falloff")
layout.popover("VIEW3D_PT_tools_brush_display")
# Note: general mode options should be added to 'draw_mode_settings'.
if tool_mode == 'SCULPT':
if is_valid_context:
draw_3d_brush_settings(layout, tool_mode)
elif tool_mode == 'PAINT_VERTEX':
if is_valid_context:
draw_3d_brush_settings(layout, tool_mode)
elif tool_mode == 'PAINT_WEIGHT':
if is_valid_context:
draw_3d_brush_settings(layout, tool_mode)
elif tool_mode == 'PAINT_TEXTURE':
if is_valid_context:
draw_3d_brush_settings(layout, tool_mode)
elif tool_mode == 'EDIT_ARMATURE':
pass
elif tool_mode == 'EDIT_CURVE':
pass
elif tool_mode == 'EDIT_MESH':
pass
elif tool_mode == 'POSE':
pass
elif tool_mode == 'PARTICLE':
# Disable, only shows "Brush" panel, which is already in the top-bar.
# if tool.has_datablock:
# layout.popover_group(context=".paint_common", **popover_kw)
pass
elif tool_mode == 'PAINT_GPENCIL':
if is_valid_context:
brush = context.tool_settings.gpencil_paint.brush
if brush.gpencil_tool != 'ERASE':
if brush.gpencil_tool != 'TINT':
layout.popover("VIEW3D_PT_tools_grease_pencil_brush_advanced")
if brush.gpencil_tool not in {'FILL', 'TINT'}:
layout.popover("VIEW3D_PT_tools_grease_pencil_brush_stroke")
layout.popover("VIEW3D_PT_tools_grease_pencil_paint_appearance")
elif tool_mode == 'SCULPT_GPENCIL':
if is_valid_context:
brush = context.tool_settings.gpencil_sculpt_paint.brush
tool = brush.gpencil_tool
if tool in {'SMOOTH', 'RANDOMIZE'}:
layout.popover("VIEW3D_PT_tools_grease_pencil_sculpt_options")
layout.popover("VIEW3D_PT_tools_grease_pencil_sculpt_appearance")
elif tool_mode == 'WEIGHT_GPENCIL':
if is_valid_context:
layout.popover("VIEW3D_PT_tools_grease_pencil_weight_appearance")
elif tool_mode == 'VERTEX_GPENCIL':
if is_valid_context:
layout.popover("VIEW3D_PT_tools_grease_pencil_vertex_appearance")
def draw_mode_settings(self, context):
layout = self.layout
mode_string = context.mode
def row_for_mirror():
row = layout.row(align=True)
row.label(icon='MOD_MIRROR')
sub = row.row(align=True)
sub.scale_x = 0.6
return row, sub
if mode_string == 'EDIT_MESH':
_row, sub = row_for_mirror()
sub.prop(context.object.data, "use_mirror_x", text="X", toggle=True)
sub.prop(context.object.data, "use_mirror_y", text="Y", toggle=True)
sub.prop(context.object.data, "use_mirror_z", text="Z", toggle=True)
tool_settings = context.tool_settings
layout.prop(tool_settings, "use_mesh_automerge", text="")
elif mode_string == 'EDIT_ARMATURE':
_row, sub = row_for_mirror()
sub.prop(context.object.data, "use_mirror_x", text="X", toggle=True)
elif mode_string == 'POSE':
_row, sub = row_for_mirror()
sub.prop(context.object.pose, "use_mirror_x", text="X", toggle=True)
elif mode_string == 'PAINT_WEIGHT':
row, sub = row_for_mirror()
wpaint = context.tool_settings.weight_paint
sub.prop(wpaint, "use_symmetry_x", text="X", toggle=True)
sub.prop(wpaint, "use_symmetry_y", text="Y", toggle=True)
sub.prop(wpaint, "use_symmetry_z", text="Z", toggle=True)
row.popover(panel="VIEW3D_PT_tools_weightpaint_symmetry_for_topbar", text="")
elif mode_string == 'SCULPT':
row, sub = row_for_mirror()
sculpt = context.tool_settings.sculpt
sub.prop(sculpt, "use_symmetry_x", text="X", toggle=True)
sub.prop(sculpt, "use_symmetry_y", text="Y", toggle=True)
sub.prop(sculpt, "use_symmetry_z", text="Z", toggle=True)
row.popover(panel="VIEW3D_PT_sculpt_symmetry_for_topbar", text="")
elif mode_string == 'PAINT_TEXTURE':
_row, sub = row_for_mirror()
ipaint = context.tool_settings.image_paint
sub.prop(ipaint, "use_symmetry_x", text="X", toggle=True)
sub.prop(ipaint, "use_symmetry_y", text="Y", toggle=True)
sub.prop(ipaint, "use_symmetry_z", text="Z", toggle=True)
# No need for a popover, the panel only has these options.
elif mode_string == 'PAINT_VERTEX':
row, sub = row_for_mirror()
vpaint = context.tool_settings.vertex_paint
sub.prop(vpaint, "use_symmetry_x", text="X", toggle=True)
sub.prop(vpaint, "use_symmetry_y", text="Y", toggle=True)
sub.prop(vpaint, "use_symmetry_z", text="Z", toggle=True)
row.popover(panel="VIEW3D_PT_tools_vertexpaint_symmetry_for_topbar", text="")
# Expand panels from the side-bar as popovers.
popover_kw = {"space_type": 'VIEW_3D', "region_type": 'UI', "category": "Tool"}
if mode_string == 'SCULPT':
layout.popover_group(context=".sculpt_mode", **popover_kw)
elif mode_string == 'PAINT_VERTEX':
layout.popover_group(context=".vertexpaint", **popover_kw)
elif mode_string == 'PAINT_WEIGHT':
layout.popover_group(context=".weightpaint", **popover_kw)
elif mode_string == 'PAINT_TEXTURE':
layout.popover_group(context=".imagepaint", **popover_kw)
elif mode_string == 'EDIT_TEXT':
layout.popover_group(context=".text_edit", **popover_kw)
elif mode_string == 'EDIT_ARMATURE':
layout.popover_group(context=".armature_edit", **popover_kw)
elif mode_string == 'EDIT_METABALL':
layout.popover_group(context=".mball_edit", **popover_kw)
elif mode_string == 'EDIT_LATTICE':
layout.popover_group(context=".lattice_edit", **popover_kw)
elif mode_string == 'EDIT_CURVE':
layout.popover_group(context=".curve_edit", **popover_kw)
elif mode_string == 'EDIT_MESH':
layout.popover_group(context=".mesh_edit", **popover_kw)
elif mode_string == 'POSE':
layout.popover_group(context=".posemode", **popover_kw)
elif mode_string == 'PARTICLE':
layout.popover_group(context=".particlemode", **popover_kw)
elif mode_string == 'OBJECT':
layout.popover_group(context=".objectmode", **popover_kw)
elif mode_string in {'PAINT_GPENCIL', 'EDIT_GPENCIL', 'SCULPT_GPENCIL', 'WEIGHT_GPENCIL'}:
# Grease pencil layer.
gpl = context.active_gpencil_layer
if gpl and gpl.info is not None:
text = gpl.info
maxw = 25
if len(text) > maxw:
text = text[:maxw - 5] + '..' + text[-3:]
else:
text = ""
layout.label(text="Layer:")
sub = layout.row()
sub.ui_units_x = 8
sub.popover(
panel="TOPBAR_PT_gpencil_layers",
text=text,
)
class _draw_tool_settings_context_mode:
@staticmethod
def SCULPT(context, layout, tool):
if (tool is None) or (not tool.has_datablock):
return False
paint = context.tool_settings.sculpt
layout.template_ID_preview(paint, "brush", rows=3, cols=8, hide_buttons=True)
brush = paint.brush
if brush is None:
return False
tool_settings = context.tool_settings
capabilities = brush.sculpt_capabilities
ups = tool_settings.unified_paint_settings
size = "size"
size_owner = ups if ups.use_unified_size else brush
if size_owner.use_locked_size == 'SCENE':
size = "unprojected_radius"
UnifiedPaintPanel.prop_unified(
layout,
context,
brush,
size,
pressure_name="use_pressure_size",
unified_name="use_unified_size",
text="Radius",
slider=True,
header=True
)
# strength, use_strength_pressure
pressure_name = "use_pressure_strength" if capabilities.has_strength_pressure else None
UnifiedPaintPanel.prop_unified(
layout,
context,
brush,
"strength",
pressure_name=pressure_name,
unified_name="use_unified_strength",
text="Strength",
header=True
)
# direction
if not capabilities.has_direction:
layout.row().prop(brush, "direction", expand=True, text="")
if capabilities.has_color:
UnifiedPaintPanel.prop_unified_color(layout, context, brush, "color", text = "")
layout.prop(brush, "blend", text="", expand = False)
return True
@staticmethod
def PAINT_TEXTURE(context, layout, tool):
if (tool is None) or (not tool.has_datablock):
return False
paint = context.tool_settings.image_paint
layout.template_ID_preview(paint, "brush", rows=3, cols=8, hide_buttons=True)
brush = paint.brush
if brush is None:
return False
brush_basic_texpaint_settings(layout, context, brush, compact=True)
return True
@staticmethod
def PAINT_VERTEX(context, layout, tool):
if (tool is None) or (not tool.has_datablock):
return False
paint = context.tool_settings.vertex_paint
layout.template_ID_preview(paint, "brush", rows=3, cols=8, hide_buttons=True)
brush = paint.brush
if brush is None:
return False
brush_basic_texpaint_settings(layout, context, brush, compact=True)
return True
@staticmethod
def PAINT_WEIGHT(context, layout, tool):
if (tool is None) or (not tool.has_datablock):
return False
paint = context.tool_settings.weight_paint
layout.template_ID_preview(paint, "brush", rows=3, cols=8, hide_buttons=True)
brush = paint.brush
if brush is None:
return False
capabilities = brush.weight_paint_capabilities
if capabilities.has_weight:
UnifiedPaintPanel.prop_unified(
layout,
context,
brush,
"weight",
unified_name="use_unified_weight",
slider=True,
header=True
)
UnifiedPaintPanel.prop_unified(
layout,
context,
brush,
"size",
pressure_name="use_pressure_size",
unified_name="use_unified_size",
slider=True,
text="Radius",
header=True
)
UnifiedPaintPanel.prop_unified(
layout,
context,
brush,
"strength",
pressure_name="use_pressure_strength",
unified_name="use_unified_strength",
header=True
)
return True
@staticmethod
def PAINT_GPENCIL(context, layout, tool):
if tool is None:
return False
# is_paint = True
# FIXME: tools must use their own UI drawing!
if tool.idname in {
"builtin.line",
"builtin.box",
"builtin.circle",
"builtin.arc",
"builtin.curve",
"builtin.polyline",
}:
# is_paint = False
pass
elif tool.idname == "builtin.cutter":
row = layout.row(align=True)
row.prop(context.tool_settings.gpencil_sculpt, "intersection_threshold")
return False
elif not tool.has_datablock:
return False
paint = context.tool_settings.gpencil_paint
brush = paint.brush
if brush is None:
return False
gp_settings = brush.gpencil_settings
def draw_color_selector():
ma = gp_settings.material
row = layout.row(align=True)
if not gp_settings.use_material_pin:
ma = context.object.active_material
icon_id = 0
if ma:
icon_id = ma.id_data.preview.icon_id
txt_ma = ma.name
maxw = 25
if len(txt_ma) > maxw:
txt_ma = txt_ma[:maxw - 5] + '..' + txt_ma[-3:]
else:
txt_ma = ""
sub = row.row()
sub.ui_units_x = 8
sub.popover(
panel="TOPBAR_PT_gpencil_materials",
text=txt_ma,
icon_value=icon_id,
)
row.prop(gp_settings, "use_material_pin", text="")
if brush.gpencil_tool in {'DRAW', 'FILL'}:
row.separator(factor=1.0)
subrow = row.row(align=True)
row.prop_enum(settings, "color_mode", 'MATERIAL', text="", icon='MATERIAL')
row.prop_enum(settings, "color_mode", 'VERTEXCOLOR', text="", icon='VPAINT_HLT')
sub_row = row.row(align=True)
sub_row.enabled = settings.color_mode == 'VERTEXCOLOR'
sub_row.prop_with_popover(brush, "color", text="", panel="TOPBAR_PT_gpencil_vertexcolor")
row = layout.row(align=True)
tool_settings = context.scene.tool_settings
settings = tool_settings.gpencil_paint
row.template_ID_preview(settings, "brush", rows=3, cols=8, hide_buttons=True)
if context.object and brush.gpencil_tool in {'FILL', 'DRAW'}:
draw_color_selector()
if context.object and brush.gpencil_tool == 'TINT':
row.separator(factor=0.4)
row.prop_with_popover(brush, "color", text="", panel="TOPBAR_PT_gpencil_vertexcolor")
from bl_ui.properties_paint_common import (
brush_basic_gpencil_paint_settings,
)
brush_basic_gpencil_paint_settings(layout, context, brush, compact=True)
return True
@staticmethod
def SCULPT_GPENCIL(context, layout, tool):
if (tool is None) or (not tool.has_datablock):
return False
paint = context.tool_settings.gpencil_sculpt_paint
brush = paint.brush
from bl_ui.properties_paint_common import (
brush_basic_gpencil_sculpt_settings,
)
brush_basic_gpencil_sculpt_settings(layout, context, brush, compact=True)
return True
@staticmethod
def WEIGHT_GPENCIL(context, layout, tool):
if (tool is None) or (not tool.has_datablock):
return False
paint = context.tool_settings.gpencil_weight_paint
brush = paint.brush
from bl_ui.properties_paint_common import (
brush_basic_gpencil_weight_settings,
)
brush_basic_gpencil_weight_settings(layout, context, brush, compact=True)
return True
@staticmethod
def VERTEX_GPENCIL(context, layout, tool):
if (tool is None) or (not tool.has_datablock):
return False
paint = context.tool_settings.gpencil_vertex_paint
brush = paint.brush
row = layout.row(align=True)
tool_settings = context.scene.tool_settings
settings = tool_settings.gpencil_vertex_paint
row.template_ID_preview(settings, "brush", rows=3, cols=8, hide_buttons=True)
if brush.gpencil_vertex_tool not in {'BLUR', 'AVERAGE', 'SMEAR'}:
row.separator(factor=0.4)
row.prop_with_popover(brush, "color", text="", panel="TOPBAR_PT_gpencil_vertexcolor")
from bl_ui.properties_paint_common import (
brush_basic_gpencil_vertex_settings,
)
brush_basic_gpencil_vertex_settings(layout, context, brush, compact=True)
return True
@staticmethod
def PARTICLE(context, layout, tool):
if (tool is None) or (not tool.has_datablock):
return False
# See: 'VIEW3D_PT_tools_brush', basically a duplicate
settings = context.tool_settings.particle_edit
brush = settings.brush
tool = settings.tool
if tool == 'NONE':
return False
layout.prop(brush, "size", slider=True)
if tool == 'ADD':
layout.prop(brush, "count")
layout.prop(settings, "use_default_interpolate")
layout.prop(brush, "steps", slider=True)
layout.prop(settings, "default_key_count", slider=True)
else:
layout.prop(brush, "strength", slider=True)
if tool == 'LENGTH':
layout.row().prop(brush, "length_mode", expand=True)
elif tool == 'PUFF':
layout.row().prop(brush, "puff_mode", expand=True)
layout.prop(brush, "use_puff_volume")
elif tool == 'COMB':
row = layout.row()
row.active = settings.is_editable
row.prop(settings, "use_emitter_deflect", text="Deflect Emitter")
sub = row.row(align=True)
sub.active = settings.use_emitter_deflect
sub.prop(settings, "emitter_distance", text="Distance")
return True
class VIEW3D_HT_header(Header):
bl_space_type = 'VIEW_3D'
@staticmethod
def draw_xform_template(layout, context):
obj = context.active_object
object_mode = 'OBJECT' if obj is None else obj.mode
has_pose_mode = (
(object_mode == 'POSE') or
(object_mode == 'WEIGHT_PAINT' and context.pose_object is not None)
)
tool_settings = context.tool_settings
# Mode & Transform Settings
scene = context.scene
# Orientation
if object_mode in {'OBJECT', 'EDIT', 'EDIT_GPENCIL'} or has_pose_mode:
orient_slot = scene.transform_orientation_slots[0]
row = layout.row(align=True)
sub = row.row()
sub.ui_units_x = 4
sub.prop_with_popover(
orient_slot,
"type",
text="",
panel="VIEW3D_PT_transform_orientations",
)
# Pivot
if object_mode in {'OBJECT', 'EDIT', 'EDIT_GPENCIL', 'SCULPT_GPENCIL'} or has_pose_mode:
layout.prop(tool_settings, "transform_pivot_point", text="", icon_only=True)
# Snap
show_snap = False
if obj is None:
show_snap = True
else:
if (object_mode not in {
'SCULPT', 'VERTEX_PAINT', 'WEIGHT_PAINT', 'TEXTURE_PAINT',
'PAINT_GPENCIL', 'SCULPT_GPENCIL', 'WEIGHT_GPENCIL', 'VERTEX_GPENCIL'
}) or has_pose_mode:
show_snap = True
else:
paint_settings = UnifiedPaintPanel.paint_settings(context)
if paint_settings:
brush = paint_settings.brush
if brush and hasattr(brush, "stroke_method") and brush.stroke_method == 'CURVE':
show_snap = True
if show_snap:
snap_items = bpy.types.ToolSettings.bl_rna.properties["snap_elements"].enum_items
snap_elements = tool_settings.snap_elements
if len(snap_elements) == 1:
text = ""
for elem in snap_elements:
icon = snap_items[elem].icon
break
else:
text = "Mix"
icon = 'NONE'
del snap_items, snap_elements
row = layout.row(align=True)
row.prop(tool_settings, "use_snap", text="")
sub = row.row(align=True)
sub.popover(
panel="VIEW3D_PT_snapping",
icon=icon,
text=text,
)
# Proportional editing
if object_mode in {'EDIT', 'PARTICLE_EDIT', 'SCULPT_GPENCIL', 'EDIT_GPENCIL', 'OBJECT'}:
row = layout.row(align=True)
kw = {}
if object_mode == 'OBJECT':
attr = "use_proportional_edit_objects"
else:
attr = "use_proportional_edit"
if tool_settings.use_proportional_edit:
if tool_settings.use_proportional_connected:
kw["icon"] = 'PROP_CON'
elif tool_settings.use_proportional_projected:
kw["icon"] = 'PROP_PROJECTED'
else:
kw["icon"] = 'PROP_ON'
else:
kw["icon"] = 'PROP_OFF'
row.prop(tool_settings, attr, icon_only=True, **kw)
sub = row.row(align=True)
sub.active = getattr(tool_settings, attr)
sub.prop_with_popover(
tool_settings,
"proportional_edit_falloff",
text="",
icon_only=True,
panel="VIEW3D_PT_proportional_edit",
)
def draw(self, context):
layout = self.layout
tool_settings = context.tool_settings
view = context.space_data
shading = view.shading
show_region_tool_header = view.show_region_tool_header
if not show_region_tool_header:
layout.row(align=True).template_header()
row = layout.row(align=True)
obj = context.active_object
# mode_string = context.mode
object_mode = 'OBJECT' if obj is None else obj.mode
has_pose_mode = (
(object_mode == 'POSE') or
(object_mode == 'WEIGHT_PAINT' and context.pose_object is not None)
)
# Note: This is actually deadly in case enum_items have to be dynamically generated
# (because internal RNA array iterator will free everything immediately...).
# XXX This is an RNA internal issue, not sure how to fix it.
# Note: Tried to add an accessor to get translated UI strings instead of manual call
# to pgettext_iface below, but this fails because translated enumitems
# are always dynamically allocated.
act_mode_item = bpy.types.Object.bl_rna.properties["mode"].enum_items[object_mode]
act_mode_i18n_context = bpy.types.Object.bl_rna.properties["mode"].translation_context
sub = row.row(align=True)
sub.ui_units_x = 5.5
sub.operator_menu_enum(
"object.mode_set", "mode",
text=bpy.app.translations.pgettext_iface(act_mode_item.name, act_mode_i18n_context),
icon=act_mode_item.icon,
)
del act_mode_item
layout.template_header_3D_mode()
# Contains buttons like Mode, Pivot, Layer, Mesh Select Mode...
if obj:
# Particle edit
if object_mode == 'PARTICLE_EDIT':
row = layout.row()
row.prop(tool_settings.particle_edit, "select_mode", text="", expand=True)
# Grease Pencil
if obj and obj.type == 'GPENCIL' and context.gpencil_data:
gpd = context.gpencil_data
if gpd.is_stroke_paint_mode:
row = layout.row()
sub = row.row(align=True)
sub.prop(tool_settings, "use_gpencil_draw_onback", text="", icon='MOD_OPACITY')
sub.separator(factor=0.4)
sub.prop(tool_settings, "use_gpencil_weight_data_add", text="", icon='WPAINT_HLT')
sub.separator(factor=0.4)
sub.prop(tool_settings, "use_gpencil_draw_additive", text="", icon='FREEZE')
# Select mode for Editing
if gpd.use_stroke_edit_mode:
row = layout.row(align=True)
row.prop(tool_settings, "gpencil_selectmode_edit", text="", expand=True)
# Select mode for Sculpt
if gpd.is_stroke_sculpt_mode:
row = layout.row(align=True)
row.prop(tool_settings, "use_gpencil_select_mask_point", text="")
row.prop(tool_settings, "use_gpencil_select_mask_stroke", text="")
row.prop(tool_settings, "use_gpencil_select_mask_segment", text="")
# Select mode for Vertex Paint
if gpd.is_stroke_vertex_mode:
row = layout.row(align=True)
row.prop(tool_settings, "use_gpencil_vertex_select_mask_point", text="")
row.prop(tool_settings, "use_gpencil_vertex_select_mask_stroke", text="")
row.prop(tool_settings, "use_gpencil_vertex_select_mask_segment", text="")
if (
gpd.use_stroke_edit_mode or
gpd.is_stroke_sculpt_mode or
gpd.is_stroke_weight_mode or
gpd.is_stroke_vertex_mode
):
row = layout.row(align=True)
row.prop(gpd, "use_multiedit", text="", icon='GP_MULTIFRAME_EDITING')
sub = row.row(align=True)
sub.active = gpd.use_multiedit
sub.popover(
panel="VIEW3D_PT_gpencil_multi_frame",
text="Multiframe",
)
if gpd.use_stroke_edit_mode:
row = layout.row(align=True)
row.popover(
panel="VIEW3D_PT_tools_grease_pencil_interpolate",
text="Interpolate",
)
overlay = view.overlay
VIEW3D_MT_editor_menus.draw_collapsible(context, layout)
layout.separator_spacer()
if object_mode in {'PAINT_GPENCIL', 'SCULPT_GPENCIL'}:
# Grease pencil
if object_mode == 'PAINT_GPENCIL':
layout.prop_with_popover(
tool_settings,
"gpencil_stroke_placement_view3d",
text="",
panel="VIEW3D_PT_gpencil_origin",
)
if object_mode in {'PAINT_GPENCIL', 'SCULPT_GPENCIL'}:
layout.prop_with_popover(
tool_settings.gpencil_sculpt,
"lock_axis",
text="",
panel="VIEW3D_PT_gpencil_lock",
)
if object_mode == 'PAINT_GPENCIL':
# FIXME: this is bad practice!
# Tool options are to be displayed in the topbar.
if context.workspace.tools.from_space_view3d_mode(object_mode).idname == "builtin_brush.Draw":
settings = tool_settings.gpencil_sculpt.guide
row = layout.row(align=True)
row.prop(settings, "use_guide", text="", icon='GRID')
sub = row.row(align=True)
sub.active = settings.use_guide
sub.popover(
panel="VIEW3D_PT_gpencil_guide",
text="Guides",
)
layout.separator_spacer()
elif not show_region_tool_header:
# Transform settings depending on tool header visibility
VIEW3D_HT_header.draw_xform_template(layout, context)
layout.separator_spacer()
# Viewport Settings
layout.popover(
panel="VIEW3D_PT_object_type_visibility",
icon_value=view.icon_from_show_object_viewport,
text="",
)
# Gizmo toggle & popover.
row = layout.row(align=True)
# FIXME: place-holder icon.
row.prop(view, "show_gizmo", text="", toggle=True, icon='GIZMO')
sub = row.row(align=True)
sub.active = view.show_gizmo
sub.popover(
panel="VIEW3D_PT_gizmo_display",
text="",
)
# Overlay toggle & popover.
row = layout.row(align=True)
row.prop(overlay, "show_overlays", icon='OVERLAY', text="")
sub = row.row(align=True)
sub.active = overlay.show_overlays
sub.popover(panel="VIEW3D_PT_overlay", text="")
row = layout.row()
row.active = (object_mode == 'EDIT') or (shading.type in {'WIREFRAME', 'SOLID'})
# While exposing 'shading.show_xray(_wireframe)' is correct.
# this hides the key shortcut from users: T70433.
if has_pose_mode:
draw_depressed = overlay.show_xray_bone
elif shading.type == 'WIREFRAME':
draw_depressed = shading.show_xray_wireframe
else:
draw_depressed = shading.show_xray
row.operator(
"view3d.toggle_xray",
text="",
icon='XRAY',
depress=draw_depressed,
)
row = layout.row(align=True)
row.prop(shading, "type", text="", expand=True)
sub = row.row(align=True)
# TODO, currently render shading type ignores mesh two-side, until it's supported
# show the shading popover which shows double-sided option.
# sub.enabled = shading.type != 'RENDERED'
sub.popover(panel="VIEW3D_PT_shading", text="")
class VIEW3D_MT_editor_menus(Menu):
bl_label = ""
def draw(self, context):
layout = self.layout
obj = context.active_object
mode_string = context.mode
edit_object = context.edit_object
gp_edit = obj and obj.mode in {'EDIT_GPENCIL', 'PAINT_GPENCIL', 'SCULPT_GPENCIL',
'WEIGHT_GPENCIL', 'VERTEX_GPENCIL'}
ts = context.scene.tool_settings
layout.menu("VIEW3D_MT_view")
# Select Menu
if gp_edit:
if mode_string not in {'PAINT_GPENCIL', 'WEIGHT_GPENCIL'}:
if mode_string == 'SCULPT_GPENCIL' and \
(ts.use_gpencil_select_mask_point or
ts.use_gpencil_select_mask_stroke or
ts.use_gpencil_select_mask_segment):
layout.menu("VIEW3D_MT_select_gpencil")
elif mode_string == 'EDIT_GPENCIL':
layout.menu("VIEW3D_MT_select_gpencil")
elif mode_string == 'VERTEX_GPENCIL':
layout.menu("VIEW3D_MT_select_gpencil")
elif mode_string in {'PAINT_WEIGHT', 'PAINT_VERTEX', 'PAINT_TEXTURE'}:
mesh = obj.data
if mesh.use_paint_mask:
layout.menu("VIEW3D_MT_select_paint_mask")
elif mesh.use_paint_mask_vertex and mode_string in {'PAINT_WEIGHT', 'PAINT_VERTEX'}:
layout.menu("VIEW3D_MT_select_paint_mask_vertex")
elif mode_string != 'SCULPT':
layout.menu("VIEW3D_MT_select_%s" % mode_string.lower())
if gp_edit:
pass
elif mode_string == 'OBJECT':
layout.menu("VIEW3D_MT_add", text="Add", text_ctxt=i18n_contexts.operator_default)
elif mode_string == 'EDIT_MESH':
layout.menu("VIEW3D_MT_mesh_add", text="Add", text_ctxt=i18n_contexts.operator_default)
elif mode_string == 'EDIT_CURVE':
layout.menu("VIEW3D_MT_curve_add", text="Add", text_ctxt=i18n_contexts.operator_default)
elif mode_string == 'EDIT_SURFACE':
layout.menu("VIEW3D_MT_surface_add", text="Add", text_ctxt=i18n_contexts.operator_default)
elif mode_string == 'EDIT_METABALL':
layout.menu("VIEW3D_MT_metaball_add", text="Add", text_ctxt=i18n_contexts.operator_default)
elif mode_string == 'EDIT_ARMATURE':
layout.menu("TOPBAR_MT_edit_armature_add", text="Add", text_ctxt=i18n_contexts.operator_default)
if gp_edit:
if obj and obj.mode == 'PAINT_GPENCIL':
layout.menu("VIEW3D_MT_paint_gpencil")
elif obj and obj.mode == 'EDIT_GPENCIL':
layout.menu("VIEW3D_MT_edit_gpencil")
layout.menu("VIEW3D_MT_edit_gpencil_stroke")
layout.menu("VIEW3D_MT_edit_gpencil_point")
elif obj and obj.mode == 'WEIGHT_GPENCIL':
layout.menu("VIEW3D_MT_weight_gpencil")
elif edit_object:
layout.menu("VIEW3D_MT_edit_%s" % edit_object.type.lower())
if mode_string == 'EDIT_MESH':
layout.menu("VIEW3D_MT_edit_mesh_vertices")
layout.menu("VIEW3D_MT_edit_mesh_edges")
layout.menu("VIEW3D_MT_edit_mesh_faces")
layout.menu("VIEW3D_MT_uv_map", text="UV")
elif mode_string in {'EDIT_CURVE', 'EDIT_SURFACE'}:
layout.menu("VIEW3D_MT_edit_curve_ctrlpoints")
layout.menu("VIEW3D_MT_edit_curve_segments")
elif obj:
if mode_string != 'PAINT_TEXTURE':
layout.menu("VIEW3D_MT_%s" % mode_string.lower())
if mode_string == 'SCULPT':
layout.menu("VIEW3D_MT_mask")
layout.menu("VIEW3D_MT_face_sets")
else:
layout.menu("VIEW3D_MT_object")
# ********** Menu **********
# ********** Utilities **********
class ShowHideMenu:
bl_label = "Show/Hide"
_operator_name = ""
def draw(self, _context):
layout = self.layout
layout.operator("%s.reveal" % self._operator_name)
layout.operator("%s.hide" % self._operator_name, text="Hide Selected").unselected = False
layout.operator("%s.hide" % self._operator_name, text="Hide Unselected").unselected = True
# Standard transforms which apply to all cases
# NOTE: this doesn't seem to be able to be used directly
class VIEW3D_MT_transform_base(Menu):
bl_label = "Transform"
bl_category = "View"
# TODO: get rid of the custom text strings?
def draw(self, context):
layout = self.layout
layout.operator("transform.translate")
layout.operator("transform.rotate")
layout.operator("transform.resize", text="Scale")
layout.separator()
layout.operator("transform.tosphere", text="To Sphere")
layout.operator("transform.shear", text="Shear")
layout.operator("transform.bend", text="Bend")
layout.operator("transform.push_pull", text="Push/Pull")
if context.mode != 'OBJECT':
layout.operator("transform.vertex_warp", text="Warp")
layout.operator_context = 'EXEC_DEFAULT'
layout.operator("transform.vertex_random", text="Randomize").offset = 0.1
layout.operator_context = 'INVOKE_REGION_WIN'
# Generic transform menu - geometry types
class VIEW3D_MT_transform(VIEW3D_MT_transform_base):
def draw(self, context):
# base menu
VIEW3D_MT_transform_base.draw(self, context)
# generic...
layout = self.layout
if context.mode == 'EDIT_MESH':
layout.operator("transform.shrink_fatten", text="Shrink Fatten")
layout.operator("transform.skin_resize")
elif context.mode == 'EDIT_CURVE':
layout.operator("transform.transform", text="Radius").mode = 'CURVE_SHRINKFATTEN'
layout.separator()
layout.operator("transform.translate", text="Move Texture Space").texture_space = True
layout.operator("transform.resize", text="Scale Texture Space").texture_space = True
# Object-specific extensions to Transform menu
class VIEW3D_MT_transform_object(VIEW3D_MT_transform_base):
def draw(self, context):
layout = self.layout
# base menu
VIEW3D_MT_transform_base.draw(self, context)
# object-specific option follow...
layout.separator()
layout.operator("transform.translate", text="Move Texture Space").texture_space = True
layout.operator("transform.resize", text="Scale Texture Space").texture_space = True
layout.separator()
layout.operator_context = 'EXEC_REGION_WIN'
# XXX see alignmenu() in edit.c of b2.4x to get this working
layout.operator("transform.transform", text="Align to Transform Orientation").mode = 'ALIGN'
layout.separator()
layout.operator("object.randomize_transform")
layout.operator("object.align")
# TODO: there is a strange context bug here.
"""
layout.operator_context = 'INVOKE_REGION_WIN'
layout.operator("object.transform_axis_target")
"""
# Armature EditMode extensions to Transform menu
class VIEW3D_MT_transform_armature(VIEW3D_MT_transform_base):
def draw(self, context):
layout = self.layout
# base menu
VIEW3D_MT_transform_base.draw(self, context)
# armature specific extensions follow...
obj = context.object
if obj.type == 'ARMATURE' and obj.mode in {'EDIT', 'POSE'}:
if obj.data.display_type == 'BBONE':
layout.separator()
layout.operator("transform.transform", text="Scale BBone").mode = 'BONE_SIZE'
elif obj.data.display_type == 'ENVELOPE':
layout.separator()
layout.operator("transform.transform", text="Scale Envelope Distance").mode = 'BONE_SIZE'
layout.operator("transform.transform", text="Scale Radius").mode = 'BONE_ENVELOPE'
if context.edit_object and context.edit_object.type == 'ARMATURE':
layout.separator()
layout.operator("armature.align")
class VIEW3D_MT_mirror(Menu):
bl_label = "Mirror"
def draw(self, _context):
layout = self.layout
layout.operator("transform.mirror", text="Interactive Mirror")
layout.separator()
layout.operator_context = 'EXEC_REGION_WIN'
for (space_name, space_id) in (("Global", 'GLOBAL'), ("Local", 'LOCAL')):
for axis_index, axis_name in enumerate("XYZ"):
props = layout.operator("transform.mirror", text=f"{axis_name!s} {space_name!s}")
props.constraint_axis[axis_index] = True
props.orient_type = 'GLOBAL'
if space_id == 'GLOBAL':
layout.separator()
class VIEW3D_MT_snap(Menu):
bl_label = "Snap"
def draw(self, _context):
layout = self.layout
layout.operator("view3d.snap_selected_to_grid", text="Selection to Grid")
layout.operator("view3d.snap_selected_to_cursor", text="Selection to Cursor").use_offset = False
layout.operator("view3d.snap_selected_to_cursor", text="Selection to Cursor (Keep Offset)").use_offset = True
layout.operator("view3d.snap_selected_to_active", text="Selection to Active")
layout.separator()
layout.operator("view3d.snap_cursor_to_selected", text="Cursor to Selected")
layout.operator("view3d.snap_cursor_to_center", text="Cursor to World Origin")
layout.operator("view3d.snap_cursor_to_grid", text="Cursor to Grid")
layout.operator("view3d.snap_cursor_to_active", text="Cursor to Active")
class VIEW3D_MT_uv_map(Menu):
bl_label = "UV Mapping"
def draw(self, context):
layout = self.layout
tool_settings = context.tool_settings
layout.operator("uv.unwrap")
layout.prop(tool_settings, "use_edge_path_live_unwrap")
layout.separator()
layout.operator_context = 'INVOKE_DEFAULT'
layout.operator("uv.smart_project")
layout.operator("uv.lightmap_pack")
layout.operator("uv.follow_active_quads")
layout.separator()
layout.operator_context = 'EXEC_REGION_WIN'
layout.operator("uv.cube_project")
layout.operator("uv.cylinder_project")
layout.operator("uv.sphere_project")
layout.separator()
layout.operator_context = 'INVOKE_REGION_WIN'
layout.operator("uv.project_from_view").scale_to_bounds = False
layout.operator("uv.project_from_view", text="Project from View (Bounds)").scale_to_bounds = True
layout.separator()
layout.operator("mesh.mark_seam").clear = False
layout.operator("mesh.mark_seam", text="Clear Seam").clear = True
layout.separator()
layout.operator("uv.reset")
# ********** View menus **********
class VIEW3D_MT_view(Menu):
bl_label = "View"
def draw(self, context):
layout = self.layout
view = context.space_data
layout.prop(view, "show_region_toolbar")
layout.prop(view, "show_region_ui")
layout.prop(view, "show_region_tool_header")
layout.prop(view, "show_region_hud")
layout.separator()
layout.operator("view3d.view_selected", text="Frame Selected").use_all_regions = False
if view.region_quadviews:
layout.operator("view3d.view_selected", text="Frame Selected (Quad View)").use_all_regions = True
layout.operator("view3d.view_all").center = False
layout.operator("view3d.view_persportho", text="Perspective/Orthographic")
layout.menu("VIEW3D_MT_view_local")
layout.separator()
layout.menu("VIEW3D_MT_view_cameras", text="Cameras")
layout.separator()
layout.menu("VIEW3D_MT_view_viewpoint")
layout.menu("VIEW3D_MT_view_navigation")
layout.menu("VIEW3D_MT_view_align")
layout.separator()
layout.operator_context = 'INVOKE_REGION_WIN'
layout.menu("VIEW3D_MT_view_regions", text="View Regions")
layout.separator()
layout.operator("screen.animation_play", text="Play Animation")
layout.separator()
layout.operator("render.opengl", text="Viewport Render Image", icon='RENDER_STILL')
layout.operator("render.opengl", text="Viewport Render Animation", icon='RENDER_ANIMATION').animation = True
props = layout.operator("render.opengl",
text="Viewport Render Keyframes",
icon='RENDER_ANIMATION',
)
props.animation = True
props.render_keyed_only = True
layout.separator()
layout.menu("INFO_MT_area")
class VIEW3D_MT_view_local(Menu):
bl_label = "Local View"
def draw(self, _context):
layout = self.layout
layout.operator("view3d.localview", text="Toggle Local View")
layout.operator("view3d.localview_remove_from")
class VIEW3D_MT_view_cameras(Menu):
bl_label = "Cameras"
def draw(self, _context):
layout = self.layout
layout.operator("view3d.object_as_camera")
layout.operator("view3d.view_camera", text="Active Camera")
layout.operator("view3d.view_center_camera")
class VIEW3D_MT_view_viewpoint(Menu):
bl_label = "Viewpoint"
def draw(self, _context):
layout = self.layout
layout.operator("view3d.view_camera", text="Camera")
layout.separator()
layout.operator("view3d.view_axis", text="Top").type = 'TOP'
layout.operator("view3d.view_axis", text="Bottom").type = 'BOTTOM'
layout.separator()
layout.operator("view3d.view_axis", text="Front").type = 'FRONT'
layout.operator("view3d.view_axis", text="Back").type = 'BACK'
layout.separator()
layout.operator("view3d.view_axis", text="Right").type = 'RIGHT'
layout.operator("view3d.view_axis", text="Left").type = 'LEFT'
class VIEW3D_MT_view_navigation(Menu):
bl_label = "Navigation"
def draw(self, _context):
from math import pi
layout = self.layout
layout.operator_enum("view3d.view_orbit", "type")
props = layout.operator("view3d.view_orbit", text="Orbit Opposite")
props.type = 'ORBITRIGHT'
props.angle = pi
layout.separator()
layout.operator("view3d.view_roll", text="Roll Left").type = 'LEFT'
layout.operator("view3d.view_roll", text="Roll Right").type = 'RIGHT'
layout.separator()
layout.operator_enum("view3d.view_pan", "type")
layout.separator()
layout.operator("view3d.zoom", text="Zoom In").delta = 1
layout.operator("view3d.zoom", text="Zoom Out").delta = -1
layout.operator("view3d.zoom_border", text="Zoom Region...")
layout.operator("view3d.zoom_camera_1_to_1", text="Zoom Camera 1:1")
layout.separator()
layout.operator("view3d.fly")
layout.operator("view3d.walk")
class VIEW3D_MT_view_align(Menu):
bl_label = "Align View"
def draw(self, _context):
layout = self.layout
layout.menu("VIEW3D_MT_view_align_selected")
layout.separator()
layout.operator("view3d.camera_to_view", text="Align Active Camera to View")
layout.operator("view3d.camera_to_view_selected", text="Align Active Camera to Selected")
layout.separator()
layout.operator("view3d.view_all", text="Center Cursor and Frame All").center = True
layout.operator("view3d.view_center_cursor")
layout.separator()
layout.operator("view3d.view_lock_to_active")
layout.operator("view3d.view_lock_clear")
class VIEW3D_MT_view_align_selected(Menu):
bl_label = "Align View to Active"
def draw(self, _context):
layout = self.layout
props = layout.operator("view3d.view_axis", text="Top")
props.align_active = True
props.type = 'TOP'
props = layout.operator("view3d.view_axis", text="Bottom")
props.align_active = True
props.type = 'BOTTOM'
layout.separator()
props = layout.operator("view3d.view_axis", text="Front")
props.align_active = True
props.type = 'FRONT'
props = layout.operator("view3d.view_axis", text="Back")
props.align_active = True
props.type = 'BACK'
layout.separator()
props = layout.operator("view3d.view_axis", text="Right")
props.align_active = True
props.type = 'RIGHT'
props = layout.operator("view3d.view_axis", text="Left")
props.align_active = True
props.type = 'LEFT'
class VIEW3D_MT_view_regions(Menu):
bl_label = "View Regions"
def draw(self, _context):
layout = self.layout
layout.operator("view3d.clip_border", text="Clipping Region...")
layout.operator("view3d.render_border", text="Render Region...")
layout.separator()
layout.operator("view3d.clear_render_border")
# ********** Select menus, suffix from context.mode **********
class VIEW3D_MT_select_object_more_less(Menu):
bl_label = "Select More/Less"
def draw(self, _context):
layout = self.layout
layout = self.layout
layout.operator("object.select_more", text="More")
layout.operator("object.select_less", text="Less")
layout.separator()
props = layout.operator("object.select_hierarchy", text="Parent")
props.extend = False
props.direction = 'PARENT'
props = layout.operator("object.select_hierarchy", text="Child")
props.extend = False
props.direction = 'CHILD'
layout.separator()
props = layout.operator("object.select_hierarchy", text="Extend Parent")
props.extend = True
props.direction = 'PARENT'
props = layout.operator("object.select_hierarchy", text="Extend Child")
props.extend = True
props.direction = 'CHILD'
class VIEW3D_MT_select_object(Menu):
bl_label = "Select"
def draw(self, _context):
layout = self.layout
layout.operator("object.select_all", text="All").action = 'SELECT'
layout.operator("object.select_all", text="None").action = 'DESELECT'
layout.operator("object.select_all", text="Invert").action = 'INVERT'
layout.separator()
layout.operator("view3d.select_box")
layout.operator("view3d.select_circle")
layout.separator()
layout.operator_menu_enum("object.select_by_type", "type", text="Select All by Type...")
layout.operator("object.select_camera", text="Select Active Camera")
layout.operator("object.select_mirror", text="Mirror Selection")
layout.operator("object.select_random", text="Select Random")
layout.separator()
layout.menu("VIEW3D_MT_select_object_more_less")
layout.separator()
layout.operator_menu_enum("object.select_grouped", "type", text="Select Grouped")
layout.operator_menu_enum("object.select_linked", "type", text="Select Linked")
layout.operator("object.select_pattern", text="Select Pattern...")
class VIEW3D_MT_select_pose_more_less(Menu):
bl_label = "Select More/Less"
def draw(self, _context):
layout = self.layout
layout = self.layout
props = layout.operator("pose.select_hierarchy", text="Parent")
props.extend = False
props.direction = 'PARENT'
props = layout.operator("pose.select_hierarchy", text="Child")
props.extend = False
props.direction = 'CHILD'
layout.separator()
props = layout.operator("pose.select_hierarchy", text="Extend Parent")
props.extend = True
props.direction = 'PARENT'
props = layout.operator("pose.select_hierarchy", text="Extend Child")
props.extend = True
props.direction = 'CHILD'
class VIEW3D_MT_select_pose(Menu):
bl_label = "Select"
def draw(self, _context):
layout = self.layout
layout.operator("pose.select_all", text="All").action = 'SELECT'
layout.operator("pose.select_all", text="None").action = 'DESELECT'
layout.operator("pose.select_all", text="Invert").action = 'INVERT'
layout.separator()
layout.operator("view3d.select_box")
layout.operator("view3d.select_circle")
layout.separator()
layout.operator("pose.select_mirror", text="Flip Active")
layout.separator()
layout.operator("pose.select_constraint_target", text="Constraint Target")
layout.operator("pose.select_linked", text="Linked")
layout.separator()
layout.menu("VIEW3D_MT_select_pose_more_less")
layout.separator()
layout.operator_menu_enum("pose.select_grouped", "type", text="Grouped")
layout.operator("object.select_pattern", text="Select Pattern...")
class VIEW3D_MT_select_particle(Menu):
bl_label = "Select"
def draw(self, _context):
layout = self.layout
layout.operator("particle.select_all", text="All").action = 'SELECT'
layout.operator("particle.select_all", text="None").action = 'DESELECT'
layout.operator("particle.select_all", text="Invert").action = 'INVERT'
layout.separator()
layout.operator("view3d.select_box")
layout.operator("view3d.select_circle")
layout.separator()
layout.operator("particle.select_linked", text="Select Linked")
layout.separator()
layout.operator("particle.select_more")
layout.operator("particle.select_less")
layout.separator()
layout.operator("particle.select_random")
layout.separator()
layout.operator("particle.select_roots", text="Roots")
layout.operator("particle.select_tips", text="Tips")
class VIEW3D_MT_edit_mesh_select_similar(Menu):
bl_label = "Select Similar"
def draw(self, _context):
layout = self.layout
layout.operator_enum("mesh.select_similar", "type")
layout.separator()
layout.operator("mesh.select_similar_region", text="Face Regions")
class VIEW3D_MT_edit_mesh_select_by_trait(Menu):
bl_label = "Select All by Trait"
def draw(self, context):
layout = self.layout
tool_settings = context.tool_settings
if tool_settings.mesh_select_mode[2] is False:
layout.operator("mesh.select_non_manifold", text="Non Manifold")
layout.operator("mesh.select_loose", text="Loose Geometry")
layout.operator("mesh.select_interior_faces", text="Interior Faces")
layout.operator("mesh.select_face_by_sides", text="Faces by Sides")
layout.separator()
layout.operator("mesh.select_ungrouped", text="Ungrouped Verts")
class VIEW3D_MT_edit_mesh_select_more_less(Menu):
bl_label = "Select More/Less"
def draw(self, _context):
layout = self.layout
layout.operator("mesh.select_more", text="More")
layout.operator("mesh.select_less", text="Less")
layout.separator()
layout.operator("mesh.select_next_item", text="Next Active")
layout.operator("mesh.select_prev_item", text="Previous Active")
class VIEW3D_MT_edit_mesh_select_linked(Menu):
bl_label = "Select Linked"
def draw(self, _context):
layout = self.layout
layout.operator("mesh.select_linked", text="Linked")
layout.operator("mesh.shortest_path_select", text="Shortest Path")
layout.operator("mesh.faces_select_linked_flat", text="Linked Flat Faces")
class VIEW3D_MT_edit_mesh_select_loops(Menu):
bl_label = "Select Loops"
def draw(self, _context):
layout = self.layout
layout.operator("mesh.loop_multi_select", text="Edge Loops").ring = False
layout.operator("mesh.loop_multi_select", text="Edge Rings").ring = True
layout.separator()
layout.operator("mesh.loop_to_region")
layout.operator("mesh.region_to_loop")
class VIEW3D_MT_select_edit_mesh(Menu):
bl_label = "Select"
def draw(self, _context):
layout = self.layout
# primitive
layout.operator("mesh.select_all", text="All").action = 'SELECT'
layout.operator("mesh.select_all", text="None").action = 'DESELECT'
layout.operator("mesh.select_all", text="Invert").action = 'INVERT'
layout.separator()
layout.operator("view3d.select_box")
layout.operator("view3d.select_circle")
layout.separator()
# numeric
layout.operator("mesh.select_random", text="Select Random")
layout.operator("mesh.select_nth")
layout.separator()
# geometric
layout.operator("mesh.edges_select_sharp", text="Select Sharp Edges")
layout.separator()
# other ...
layout.menu("VIEW3D_MT_edit_mesh_select_similar")
layout.separator()
layout.menu("VIEW3D_MT_edit_mesh_select_by_trait")
layout.separator()
layout.menu("VIEW3D_MT_edit_mesh_select_more_less")
layout.separator()
layout.menu("VIEW3D_MT_edit_mesh_select_loops")
layout.separator()
layout.menu("VIEW3D_MT_edit_mesh_select_linked")
layout.separator()
layout.operator("mesh.select_axis", text="Side of Active")
layout.operator("mesh.select_mirror", text="Mirror Selection")
class VIEW3D_MT_select_edit_curve(Menu):
bl_label = "Select"
def draw(self, _context):
layout = self.layout
layout.operator("curve.select_all", text="All").action = 'SELECT'
layout.operator("curve.select_all", text="None").action = 'DESELECT'
layout.operator("curve.select_all", text="Invert").action = 'INVERT'
layout.separator()
layout.operator("view3d.select_box")
layout.operator("view3d.select_circle")
layout.separator()
layout.operator("curve.select_random")
layout.operator("curve.select_nth")
layout.operator("curve.select_linked", text="Select Linked")
layout.operator("curve.select_similar", text="Select Similar")
layout.separator()
layout.operator("curve.de_select_first")
layout.operator("curve.de_select_last")
layout.operator("curve.select_next")
layout.operator("curve.select_previous")
layout.separator()
layout.operator("curve.select_more")
layout.operator("curve.select_less")
class VIEW3D_MT_select_edit_surface(Menu):
bl_label = "Select"
def draw(self, _context):
layout = self.layout
layout.operator("curve.select_all", text="All").action = 'SELECT'
layout.operator("curve.select_all", text="None").action = 'DESELECT'
layout.operator("curve.select_all", text="Invert").action = 'INVERT'
layout.separator()
layout.operator("view3d.select_box")
layout.operator("view3d.select_circle")
layout.separator()
layout.operator("curve.select_random")
layout.operator("curve.select_nth")
layout.operator("curve.select_linked", text="Select Linked")
layout.operator("curve.select_similar", text="Select Similar")
layout.separator()
layout.operator("curve.select_row")
layout.separator()
layout.operator("curve.select_more")
layout.operator("curve.select_less")
class VIEW3D_MT_select_edit_text(Menu):
bl_label = "Select"
def draw(self, _context):
layout = self.layout
layout.operator("font.select_all", text="All")
layout.separator()
layout.operator("font.move_select", text="Previous Block").type = 'PREVIOUS_PAGE'
layout.operator("font.move_select", text="Next Block").type = 'NEXT_PAGE'
layout.separator()
layout.operator("font.move_select", text="Line Begin").type = 'LINE_BEGIN'
layout.operator("font.move_select", text="Line End").type = 'LINE_END'
layout.separator()
layout.operator("font.move_select", text="Previous Line").type = 'PREVIOUS_LINE'
layout.operator("font.move_select", text="Next Line").type = 'NEXT_LINE'
layout.separator()
layout.operator("font.move_select", text="Previous Word").type = 'PREVIOUS_WORD'
layout.operator("font.move_select", text="Next Word").type = 'NEXT_WORD'
class VIEW3D_MT_select_edit_metaball(Menu):
bl_label = "Select"
def draw(self, _context):
layout = self.layout
layout.operator("mball.select_all", text="All").action = 'SELECT'
layout.operator("mball.select_all", text="None").action = 'DESELECT'
layout.operator("mball.select_all", text="Invert").action = 'INVERT'
layout.separator()
layout.operator("view3d.select_box")
layout.operator("view3d.select_circle")
layout.separator()
layout.operator("mball.select_random_metaelems")
layout.separator()
layout.operator_menu_enum("mball.select_similar", "type", text="Similar")
class VIEW3D_MT_edit_lattice_context_menu(Menu):
bl_label = "Lattice Context Menu"
def draw(self, _context):
layout = self.layout
layout = self.layout
layout.menu("VIEW3D_MT_mirror")
layout.operator_menu_enum("lattice.flip", "axis")
layout.menu("VIEW3D_MT_snap")
layout.separator()
layout.operator("lattice.make_regular")
class VIEW3D_MT_select_edit_lattice(Menu):
bl_label = "Select"
def draw(self, _context):
layout = self.layout
layout.operator("lattice.select_all", text="All").action = 'SELECT'
layout.operator("lattice.select_all", text="None").action = 'DESELECT'
layout.operator("lattice.select_all", text="Invert").action = 'INVERT'
layout.separator()
layout.operator("view3d.select_box")
layout.operator("view3d.select_circle")
layout.separator()
layout.operator("lattice.select_mirror")
layout.operator("lattice.select_random")
layout.separator()
layout.operator("lattice.select_more")
layout.operator("lattice.select_less")
layout.separator()
layout.operator("lattice.select_ungrouped", text="Ungrouped Verts")
class VIEW3D_MT_select_edit_armature(Menu):
bl_label = "Select"
def draw(self, _context):
layout = self.layout
layout.operator("armature.select_all", text="All").action = 'SELECT'
layout.operator("armature.select_all", text="None").action = 'DESELECT'
layout.operator("armature.select_all", text="Invert").action = 'INVERT'
layout.separator()
layout.operator("view3d.select_box")
layout.operator("view3d.select_circle")
layout.separator()
layout.operator("armature.select_mirror", text="Mirror").extend = False
layout.separator()
layout.operator("armature.select_more", text="More")
layout.operator("armature.select_less", text="Less")
layout.separator()
layout.operator("armature.select_linked", text="Linked")
layout.separator()
props = layout.operator("armature.select_hierarchy", text="Parent")
props.extend = False
props.direction = 'PARENT'
props = layout.operator("armature.select_hierarchy", text="Child")
props.extend = False
props.direction = 'CHILD'
layout.separator()
props = layout.operator("armature.select_hierarchy", text="Extend Parent")
props.extend = True
props.direction = 'PARENT'
props = layout.operator("armature.select_hierarchy", text="Extend Child")
props.extend = True
props.direction = 'CHILD'
layout.operator_menu_enum("armature.select_similar", "type", text="Similar")
layout.operator("object.select_pattern", text="Select Pattern...")
class VIEW3D_MT_select_gpencil(Menu):
bl_label = "Select"
def draw(self, _context):
layout = self.layout
layout.operator("gpencil.select_all", text="All").action = 'SELECT'
layout.operator("gpencil.select_all", text="None").action = 'DESELECT'
layout.operator("gpencil.select_all", text="Invert").action = 'INVERT'
layout.separator()
layout.operator("gpencil.select_box")
layout.operator("gpencil.select_circle")
layout.separator()
layout.operator("gpencil.select_linked", text="Linked")
layout.operator("gpencil.select_alternate")
layout.operator_menu_enum("gpencil.select_grouped", "type", text="Grouped")
if _context.mode == 'VERTEX_GPENCIL':
layout.operator("gpencil.select_vertex_color", text="Vertex Color")
layout.separator()
layout.operator("gpencil.select_first")
layout.operator("gpencil.select_last")
layout.separator()
layout.operator("gpencil.select_more")
layout.operator("gpencil.select_less")
class VIEW3D_MT_select_paint_mask(Menu):
bl_label = "Select"
def draw(self, _context):
layout = self.layout
layout.operator("paint.face_select_all", text="All").action = 'SELECT'
layout.operator("paint.face_select_all", text="None").action = 'DESELECT'
layout.operator("paint.face_select_all", text="Invert").action = 'INVERT'
layout.separator()
layout.operator("view3d.select_box")
layout.operator("view3d.select_circle")
layout.separator()
layout.operator("paint.face_select_linked", text="Linked")
class VIEW3D_MT_select_paint_mask_vertex(Menu):
bl_label = "Select"
def draw(self, _context):
layout = self.layout
layout.operator("paint.vert_select_all", text="All").action = 'SELECT'
layout.operator("paint.vert_select_all", text="None").action = 'DESELECT'
layout.operator("paint.vert_select_all", text="Invert").action = 'INVERT'
layout.separator()
layout.operator("view3d.select_box")
layout.operator("view3d.select_circle")
layout.separator()
layout.operator("paint.vert_select_ungrouped", text="Ungrouped Verts")
class VIEW3D_MT_angle_control(Menu):
bl_label = "Angle Control"
@classmethod
def poll(cls, context):
settings = UnifiedPaintPanel.paint_settings(context)
if not settings:
return False
brush = settings.brush
tex_slot = brush.texture_slot
return tex_slot.has_texture_angle and tex_slot.has_texture_angle_source
def draw(self, context):
layout = self.layout
settings = UnifiedPaintPanel.paint_settings(context)
brush = settings.brush
sculpt = (context.sculpt_object is not None)
tex_slot = brush.texture_slot
layout.prop(tex_slot, "use_rake", text="Rake")
if brush.brush_capabilities.has_random_texture_angle and tex_slot.has_random_texture_angle:
if sculpt:
if brush.sculpt_capabilities.has_random_texture_angle:
layout.prop(tex_slot, "use_random", text="Random")
else:
layout.prop(tex_slot, "use_random", text="Random")
class VIEW3D_MT_mesh_add(Menu):
bl_idname = "VIEW3D_MT_mesh_add"
bl_label = "Mesh"
def draw(self, _context):
layout = self.layout
layout.operator_context = 'INVOKE_REGION_WIN'
layout.operator("mesh.primitive_plane_add", text="Plane", icon='MESH_PLANE')
layout.operator("mesh.primitive_cube_add", text="Cube", icon='MESH_CUBE')
layout.operator("mesh.primitive_circle_add", text="Circle", icon='MESH_CIRCLE')
layout.operator("mesh.primitive_uv_sphere_add", text="UV Sphere", icon='MESH_UVSPHERE')
layout.operator("mesh.primitive_ico_sphere_add", text="Ico Sphere", icon='MESH_ICOSPHERE')
layout.operator("mesh.primitive_cylinder_add", text="Cylinder", icon='MESH_CYLINDER')
layout.operator("mesh.primitive_cone_add", text="Cone", icon='MESH_CONE')
layout.operator("mesh.primitive_torus_add", text="Torus", icon='MESH_TORUS')
layout.separator()
layout.operator("mesh.primitive_grid_add", text="Grid", icon='MESH_GRID')
layout.operator("mesh.primitive_monkey_add", text="Monkey", icon='MESH_MONKEY')
class VIEW3D_MT_curve_add(Menu):
bl_idname = "VIEW3D_MT_curve_add"
bl_label = "Curve"
def draw(self, _context):
layout = self.layout
layout.operator_context = 'INVOKE_REGION_WIN'
layout.operator("curve.primitive_bezier_curve_add", text="Bezier", icon='CURVE_BEZCURVE')
layout.operator("curve.primitive_bezier_circle_add", text="Circle", icon='CURVE_BEZCIRCLE')
layout.separator()
layout.operator("curve.primitive_nurbs_curve_add", text="Nurbs Curve", icon='CURVE_NCURVE')
layout.operator("curve.primitive_nurbs_circle_add", text="Nurbs Circle", icon='CURVE_NCIRCLE')
layout.operator("curve.primitive_nurbs_path_add", text="Path", icon='CURVE_PATH')
class VIEW3D_MT_surface_add(Menu):
bl_idname = "VIEW3D_MT_surface_add"
bl_label = "Surface"
def draw(self, _context):
layout = self.layout
layout.operator_context = 'INVOKE_REGION_WIN'
layout.operator("surface.primitive_nurbs_surface_curve_add", text="Nurbs Curve", icon='SURFACE_NCURVE')
layout.operator("surface.primitive_nurbs_surface_circle_add", text="Nurbs Circle", icon='SURFACE_NCIRCLE')
layout.operator("surface.primitive_nurbs_surface_surface_add", text="Nurbs Surface", icon='SURFACE_NSURFACE')
layout.operator("surface.primitive_nurbs_surface_cylinder_add",
text="Nurbs Cylinder", icon='SURFACE_NCYLINDER')
layout.operator("surface.primitive_nurbs_surface_sphere_add", text="Nurbs Sphere", icon='SURFACE_NSPHERE')
layout.operator("surface.primitive_nurbs_surface_torus_add", text="Nurbs Torus", icon='SURFACE_NTORUS')
class VIEW3D_MT_edit_metaball_context_menu(Menu):
bl_label = "Metaball Context Menu"
def draw(self, _context):
layout = self.layout
layout.operator_context = 'INVOKE_REGION_WIN'
# Add
layout.operator("mball.duplicate_move")
layout.separator()
# Modify
layout.menu("VIEW3D_MT_mirror")
layout.menu("VIEW3D_MT_snap")
layout.separator()
# Remove
layout.operator_context = 'EXEC_DEFAULT'
layout.operator("mball.delete_metaelems", text="Delete")
class VIEW3D_MT_metaball_add(Menu):
bl_idname = "VIEW3D_MT_metaball_add"
bl_label = "Metaball"
def draw(self, _context):
layout = self.layout
layout.operator_context = 'INVOKE_REGION_WIN'
layout.operator_enum("object.metaball_add", "type")
class TOPBAR_MT_edit_curve_add(Menu):
bl_idname = "TOPBAR_MT_edit_curve_add"
bl_label = "Add"
bl_translation_context = i18n_contexts.operator_default
def draw(self, context):
is_surf = context.active_object.type == 'SURFACE'
layout = self.layout
layout.operator_context = 'EXEC_REGION_WIN'
if is_surf:
VIEW3D_MT_surface_add.draw(self, context)
else:
VIEW3D_MT_curve_add.draw(self, context)
class TOPBAR_MT_edit_armature_add(Menu):
bl_idname = "TOPBAR_MT_edit_armature_add"
bl_label = "Armature"
def draw(self, _context):
layout = self.layout
layout.operator_context = 'EXEC_REGION_WIN'
layout.operator("armature.bone_primitive_add", text="Single Bone", icon='BONE_DATA')
class VIEW3D_MT_armature_add(Menu):
bl_idname = "VIEW3D_MT_armature_add"
bl_label = "Armature"
def draw(self, _context):
layout = self.layout
layout.operator_context = 'EXEC_REGION_WIN'
layout.operator("object.armature_add", text="Single Bone", icon='BONE_DATA')
class VIEW3D_MT_light_add(Menu):
bl_idname = "VIEW3D_MT_light_add"
bl_label = "Light"
def draw(self, _context):
layout = self.layout
layout.operator_context = 'INVOKE_REGION_WIN'
layout.operator_enum("object.light_add", "type")
class VIEW3D_MT_lightprobe_add(Menu):
bl_idname = "VIEW3D_MT_lightprobe_add"
bl_label = "Light Probe"
def draw(self, _context):
layout = self.layout
layout.operator_context = 'INVOKE_REGION_WIN'
layout.operator_enum("object.lightprobe_add", "type")
class VIEW3D_MT_camera_add(Menu):
bl_idname = "VIEW3D_MT_camera_add"
bl_label = "Camera"
def draw(self, _context):
layout = self.layout
layout.operator_context = 'EXEC_REGION_WIN'
layout.operator("object.camera_add", text="Camera", icon='OUTLINER_OB_CAMERA')
class VIEW3D_MT_volume_add(Menu):
bl_idname = "VIEW3D_MT_volume_add"
bl_label = "Volume"
def draw(self, _context):
layout = self.layout
layout.operator("object.volume_import", text="Import OpenVDB...", icon='OUTLINER_DATA_VOLUME')
layout.operator("object.volume_add", text="Empty", icon='OUTLINER_DATA_VOLUME')
class VIEW3D_MT_add(Menu):
bl_label = "Add"
bl_translation_context = i18n_contexts.operator_default
def draw(self, context):
layout = self.layout
# note, don't use 'EXEC_SCREEN' or operators won't get the 'v3d' context.
# Note: was EXEC_AREA, but this context does not have the 'rv3d', which prevents
# "align_view" to work on first call (see [#32719]).
layout.operator_context = 'EXEC_REGION_WIN'
# layout.operator_menu_enum("object.mesh_add", "type", text="Mesh", icon='OUTLINER_OB_MESH')
layout.menu("VIEW3D_MT_mesh_add", icon='OUTLINER_OB_MESH')
# layout.operator_menu_enum("object.curve_add", "type", text="Curve", icon='OUTLINER_OB_CURVE')
layout.menu("VIEW3D_MT_curve_add", icon='OUTLINER_OB_CURVE')
# layout.operator_menu_enum("object.surface_add", "type", text="Surface", icon='OUTLINER_OB_SURFACE')
layout.menu("VIEW3D_MT_surface_add", icon='OUTLINER_OB_SURFACE')
layout.menu("VIEW3D_MT_metaball_add", text="Metaball", icon='OUTLINER_OB_META')
layout.operator("object.text_add", text="Text", icon='OUTLINER_OB_FONT')
if context.preferences.experimental.use_new_hair_type:
layout.operator("object.hair_add", text="Hair", icon='OUTLINER_OB_HAIR')
if context.preferences.experimental.use_new_particle_system:
layout.operator("object.pointcloud_add", text="Point Cloud", icon='OUTLINER_OB_POINTCLOUD')
layout.menu("VIEW3D_MT_volume_add", text="Volume", icon='OUTLINER_OB_VOLUME')
layout.operator_menu_enum("object.gpencil_add", "type", text="Grease Pencil", icon='OUTLINER_OB_GREASEPENCIL')
layout.separator()
if VIEW3D_MT_armature_add.is_extended():
layout.menu("VIEW3D_MT_armature_add", icon='OUTLINER_OB_ARMATURE')
else:
layout.operator("object.armature_add", text="Armature", icon='OUTLINER_OB_ARMATURE')
layout.operator("object.add", text="Lattice", icon='OUTLINER_OB_LATTICE').type = 'LATTICE'
layout.separator()
layout.operator_menu_enum("object.empty_add", "type", text="Empty", icon='OUTLINER_OB_EMPTY')
layout.menu("VIEW3D_MT_image_add", text="Image", icon='OUTLINER_OB_IMAGE')
layout.separator()
layout.menu("VIEW3D_MT_light_add", icon='OUTLINER_OB_LIGHT')
layout.menu("VIEW3D_MT_lightprobe_add", icon='OUTLINER_OB_LIGHTPROBE')
layout.separator()
if VIEW3D_MT_camera_add.is_extended():
layout.menu("VIEW3D_MT_camera_add", icon='OUTLINER_OB_CAMERA')
else:
VIEW3D_MT_camera_add.draw(self, context)
layout.separator()
layout.operator("object.speaker_add", text="Speaker", icon='OUTLINER_OB_SPEAKER')
layout.separator()
layout.operator_menu_enum("object.effector_add", "type", text="Force Field", icon='OUTLINER_OB_FORCE_FIELD')
layout.separator()
has_collections = bool(bpy.data.collections)
col = layout.column()
col.enabled = has_collections
if not has_collections or len(bpy.data.collections) > 10:
col.operator_context = 'INVOKE_REGION_WIN'
col.operator(
"object.collection_instance_add",
text="Collection Instance..." if has_collections else "No Collections to Instance",
icon='OUTLINER_OB_GROUP_INSTANCE',
)
else:
col.operator_menu_enum(
"object.collection_instance_add",
"collection",
text="Collection Instance",
icon='OUTLINER_OB_GROUP_INSTANCE',
)
class VIEW3D_MT_image_add(Menu):
bl_label = "Add Image"
def draw(self, _context):
layout = self.layout
layout.operator("object.load_reference_image", text="Reference", icon='IMAGE_REFERENCE')
layout.operator("object.load_background_image", text="Background", icon='IMAGE_BACKGROUND')
class VIEW3D_MT_object_relations(Menu):
bl_label = "Relations"
def draw(self, _context):
layout = self.layout
layout.operator("object.proxy_make", text="Make Proxy...")
layout.operator("object.make_override_library", text="Make Library Override...")
layout.operator("object.make_dupli_face")
layout.separator()
layout.operator_menu_enum("object.make_local", "type", text="Make Local...")
layout.menu("VIEW3D_MT_make_single_user")
layout.separator()
layout.operator("object.data_transfer")
layout.operator("object.datalayout_transfer")
class VIEW3D_MT_object(Menu):
bl_context = "objectmode"
bl_label = "Object"
def draw(self, context):
layout = self.layout
layout.menu("VIEW3D_MT_transform_object")
layout.operator_menu_enum("object.origin_set", text="Set Origin", property="type")
layout.menu("VIEW3D_MT_mirror")
layout.menu("VIEW3D_MT_object_clear")
layout.menu("VIEW3D_MT_object_apply")
layout.menu("VIEW3D_MT_snap")
layout.separator()
layout.operator("object.duplicate_move")
layout.operator("object.duplicate_move_linked")
layout.operator("object.join")
layout.separator()
layout.operator("view3d.copybuffer", text="Copy Objects", icon='COPYDOWN')
layout.operator("view3d.pastebuffer", text="Paste Objects", icon='PASTEDOWN')
layout.separator()
layout.menu("VIEW3D_MT_object_parent")
layout.menu("VIEW3D_MT_object_collection")
layout.menu("VIEW3D_MT_object_relations")
layout.menu("VIEW3D_MT_object_constraints")
layout.menu("VIEW3D_MT_object_track")
layout.menu("VIEW3D_MT_make_links", text="Make Links")
layout.separator()
layout.operator("object.shade_smooth")
layout.operator("object.shade_flat")
layout.separator()
layout.menu("VIEW3D_MT_object_animation")
layout.menu("VIEW3D_MT_object_rigid_body")
layout.separator()
layout.menu("VIEW3D_MT_object_quick_effects")
layout.separator()
ob = context.active_object
if ob and ob.type == 'GPENCIL' and context.gpencil_data:
layout.operator_menu_enum("gpencil.convert", "type", text="Convert to")
else:
layout.operator_menu_enum("object.convert", "target")
layout.separator()
layout.menu("VIEW3D_MT_object_showhide")
layout.separator()
layout.operator_context = 'EXEC_DEFAULT'
layout.operator("object.delete", text="Delete").use_global = False
layout.operator("object.delete", text="Delete Global").use_global = True
class VIEW3D_MT_object_animation(Menu):
bl_label = "Animation"
def draw(self, _context):
layout = self.layout
layout.operator("anim.keyframe_insert_menu", text="Insert Keyframe...")
layout.operator("anim.keyframe_delete_v3d", text="Delete Keyframes...")
layout.operator("anim.keyframe_clear_v3d", text="Clear Keyframes...")
layout.operator("anim.keying_set_active_set", text="Change Keying Set...")
layout.separator()
layout.operator("nla.bake", text="Bake Action...")
layout.operator("gpencil.mesh_bake", text="Bake Mesh to Grease Pencil...")
class VIEW3D_MT_object_rigid_body(Menu):
bl_label = "Rigid Body"
def draw(self, _context):
layout = self.layout
layout.operator("rigidbody.objects_add", text="Add Active").type = 'ACTIVE'
layout.operator("rigidbody.objects_add", text="Add Passive").type = 'PASSIVE'
layout.separator()
layout.operator("rigidbody.objects_remove", text="Remove")
layout.separator()
layout.operator("rigidbody.shape_change", text="Change Shape")
layout.operator("rigidbody.mass_calculate", text="Calculate Mass")
layout.operator("rigidbody.object_settings_copy", text="Copy from Active")
layout.operator("object.visual_transform_apply", text="Apply Transformation")
layout.operator("rigidbody.bake_to_keyframes", text="Bake to Keyframes")
layout.separator()
layout.operator("rigidbody.connect", text="Connect")
class VIEW3D_MT_object_clear(Menu):
bl_label = "Clear"
def draw(self, _context):
layout = self.layout
layout.operator("object.location_clear", text="Location").clear_delta = False
layout.operator("object.rotation_clear", text="Rotation").clear_delta = False
layout.operator("object.scale_clear", text="Scale").clear_delta = False
layout.separator()
layout.operator("object.origin_clear", text="Origin")
class VIEW3D_MT_object_context_menu(Menu):
bl_label = "Object Context Menu"
def draw(self, context):
layout = self.layout
view = context.space_data
obj = context.object
selected_objects_len = len(context.selected_objects)
# If nothing is selected
# (disabled for now until it can be made more useful).
'''
if selected_objects_len == 0:
layout.menu("VIEW3D_MT_add", text="Add", text_ctxt=i18n_contexts.operator_default)
layout.operator("view3d.pastebuffer", text="Paste Objects", icon='PASTEDOWN')
return
'''
# If something is selected
if obj is not None and obj.type in {'MESH', 'CURVE', 'SURFACE'}:
layout.operator("object.shade_smooth", text="Shade Smooth")
layout.operator("object.shade_flat", text="Shade Flat")
layout.separator()
if obj is None:
pass
elif obj.type == 'MESH':
layout.operator_context = 'INVOKE_REGION_WIN'
layout.operator_menu_enum("object.origin_set", text="Set Origin", property="type")
layout.operator_context = 'INVOKE_DEFAULT'
# If more than one object is selected
if selected_objects_len > 1:
layout.operator("object.join")
layout.separator()
elif obj.type == 'CAMERA':
layout.operator_context = 'INVOKE_REGION_WIN'
if obj.data.type == 'PERSP':
props = layout.operator("wm.context_modal_mouse", text="Camera Lens Angle")
props.data_path_iter = "selected_editable_objects"
props.data_path_item = "data.lens"
props.input_scale = 0.1
if obj.data.lens_unit == 'MILLIMETERS':
props.header_text = "Camera Lens Angle: %.1fmm"
else:
props.header_text = "Camera Lens Angle: %.1f\u00B0"
else:
props = layout.operator("wm.context_modal_mouse", text="Camera Lens Scale")
props.data_path_iter = "selected_editable_objects"
props.data_path_item = "data.ortho_scale"
props.input_scale = 0.01
props.header_text = "Camera Lens Scale: %.3f"
if not obj.data.dof.focus_object:
if view and view.camera == obj and view.region_3d.view_perspective == 'CAMERA':
props = layout.operator("ui.eyedropper_depth", text="DOF Distance (Pick)")
else:
props = layout.operator("wm.context_modal_mouse", text="DOF Distance")
props.data_path_iter = "selected_editable_objects"
props.data_path_item = "data.dof.focus_distance"
props.input_scale = 0.02
props.header_text = "DOF Distance: %.3f"
layout.separator()
elif obj.type in {'CURVE', 'FONT'}:
layout.operator_context = 'INVOKE_REGION_WIN'
props = layout.operator("wm.context_modal_mouse", text="Extrude Size")
props.data_path_iter = "selected_editable_objects"
props.data_path_item = "data.extrude"
props.input_scale = 0.01
props.header_text = "Extrude Size: %.3f"
props = layout.operator("wm.context_modal_mouse", text="Width Size")
props.data_path_iter = "selected_editable_objects"
props.data_path_item = "data.offset"
props.input_scale = 0.01
props.header_text = "Width Size: %.3f"
layout.separator()
layout.operator("object.convert", text="Convert to Mesh").target = 'MESH'
layout.operator("object.convert", text="Convert to Grease Pencil").target = 'GPENCIL'
layout.operator_menu_enum("object.origin_set", text="Set Origin", property="type")
layout.separator()
elif obj.type == 'GPENCIL':
layout.operator("gpencil.convert", text="Convert to Path").type = 'PATH'
layout.operator("gpencil.convert", text="Convert to Bezier Curve").type = 'CURVE'
layout.operator("gpencil.convert", text="Convert to Polygon Curve").type = 'POLY'
layout.operator_menu_enum("object.origin_set", text="Set Origin", property="type")
layout.separator()
elif obj.type == 'EMPTY':
layout.operator_context = 'INVOKE_REGION_WIN'
props = layout.operator("wm.context_modal_mouse", text="Empty Draw Size")
props.data_path_iter = "selected_editable_objects"
props.data_path_item = "empty_display_size"
props.input_scale = 0.01
props.header_text = "Empty Draw Size: %.3f"
layout.separator()
elif obj.type == 'LIGHT':
light = obj.data
layout.operator_context = 'INVOKE_REGION_WIN'
props = layout.operator("wm.context_modal_mouse", text="Power")
props.data_path_iter = "selected_editable_objects"
props.data_path_item = "data.energy"
props.header_text = "Light Power: %.3f"
if light.type == 'AREA':
props = layout.operator("wm.context_modal_mouse", text="Size X")
props.data_path_iter = "selected_editable_objects"
props.data_path_item = "data.size"
props.header_text = "Light Size X: %.3f"
if light.shape in {'RECTANGLE', 'ELLIPSE'}:
props = layout.operator("wm.context_modal_mouse", text="Size Y")
props.data_path_iter = "selected_editable_objects"
props.data_path_item = "data.size_y"
props.header_text = "Light Size Y: %.3f"
elif light.type in {'SPOT', 'POINT'}:
props = layout.operator("wm.context_modal_mouse", text="Radius")
props.data_path_iter = "selected_editable_objects"
props.data_path_item = "data.shadow_soft_size"
props.header_text = "Light Radius: %.3f"
elif light.type == 'SUN':
props = layout.operator("wm.context_modal_mouse", text="Angle")
props.data_path_iter = "selected_editable_objects"
props.data_path_item = "data.angle"
props.header_text = "Light Angle: %.3f"
if light.type == 'SPOT':
layout.separator()
props = layout.operator("wm.context_modal_mouse", text="Spot Size")
props.data_path_iter = "selected_editable_objects"
props.data_path_item = "data.spot_size"
props.input_scale = 0.01
props.header_text = "Spot Size: %.2f"
props = layout.operator("wm.context_modal_mouse", text="Spot Blend")
props.data_path_iter = "selected_editable_objects"
props.data_path_item = "data.spot_blend"
props.input_scale = -0.01
props.header_text = "Spot Blend: %.2f"
layout.separator()
layout.operator("view3d.copybuffer", text="Copy Objects", icon='COPYDOWN')
layout.operator("view3d.pastebuffer", text="Paste Objects", icon='PASTEDOWN')
layout.separator()
layout.operator("object.duplicate_move", icon='DUPLICATE')
layout.operator("object.duplicate_move_linked")
layout.separator()
props = layout.operator("wm.call_panel", text="Rename Active Object...")
props.name = "TOPBAR_PT_name"
props.keep_open = False
layout.separator()
layout.menu("VIEW3D_MT_mirror")
layout.menu("VIEW3D_MT_snap")
layout.menu("VIEW3D_MT_object_parent")
layout.operator_context = 'INVOKE_REGION_WIN'
if view and view.local_view:
layout.operator("view3d.localview_remove_from")
else:
layout.operator("object.move_to_collection")
layout.separator()
layout.operator("anim.keyframe_insert_menu", text="Insert Keyframe...")
layout.separator()
layout.operator_context = 'EXEC_DEFAULT'
layout.operator("object.delete", text="Delete").use_global = False
class VIEW3D_MT_object_shading(Menu):
# XXX, this menu is a place to store shading operator in object mode
bl_label = "Shading"
def draw(self, _context):
layout = self.layout
layout.operator("object.shade_smooth", text="Smooth")
layout.operator("object.shade_flat", text="Flat")
class VIEW3D_MT_object_apply(Menu):
bl_label = "Apply"
def draw(self, _context):
layout = self.layout
props = layout.operator("object.transform_apply", text="Location", text_ctxt=i18n_contexts.default)
props.location, props.rotation, props.scale = True, False, False
props = layout.operator("object.transform_apply", text="Rotation", text_ctxt=i18n_contexts.default)
props.location, props.rotation, props.scale = False, True, False
props = layout.operator("object.transform_apply", text="Scale", text_ctxt=i18n_contexts.default)
props.location, props.rotation, props.scale = False, False, True
props = layout.operator("object.transform_apply", text="All Transforms", text_ctxt=i18n_contexts.default)
props.location, props.rotation, props.scale = True, True, True
props = layout.operator("object.transform_apply", text="Rotation & Scale", text_ctxt=i18n_contexts.default)
props.location, props.rotation, props.scale = False, True, True
layout.separator()
layout.operator(
"object.transforms_to_deltas",
text="Location to Deltas",
text_ctxt=i18n_contexts.default,
).mode = 'LOC'
layout.operator(
"object.transforms_to_deltas",
text="Rotation to Deltas",
text_ctxt=i18n_contexts.default,
).mode = 'ROT'
layout.operator(
"object.transforms_to_deltas",
text="Scale to Deltas",
text_ctxt=i18n_contexts.default,
).mode = 'SCALE'
layout.operator(
"object.transforms_to_deltas",
text="All Transforms to Deltas",
text_ctxt=i18n_contexts.default,
).mode = 'ALL'
layout.operator("object.anim_transforms_to_deltas")
layout.separator()
layout.operator(
"object.visual_transform_apply",
text="Visual Transform",
text_ctxt=i18n_contexts.default,
)
layout.operator(
"object.convert",
text="Visual Geometry to Mesh",
text_ctxt=i18n_contexts.default,
).target = 'MESH'
layout.operator("object.duplicates_make_real")
class VIEW3D_MT_object_parent(Menu):
bl_label = "Parent"
def draw(self, _context):
layout = self.layout
operator_context_default = layout.operator_context
layout.operator_enum("object.parent_set", "type")
layout.separator()
layout.operator_context = 'EXEC_DEFAULT'
layout.operator("object.parent_no_inverse_set")
layout.operator_context = operator_context_default
layout.separator()
layout.operator_enum("object.parent_clear", "type")
class VIEW3D_MT_object_track(Menu):
bl_label = "Track"
def draw(self, _context):
layout = self.layout
layout.operator_enum("object.track_set", "type")
layout.separator()
layout.operator_enum("object.track_clear", "type")
class VIEW3D_MT_object_collection(Menu):
bl_label = "Collection"
def draw(self, _context):
layout = self.layout
layout.operator("object.move_to_collection")
layout.operator("object.link_to_collection")
layout.separator()
layout.operator("collection.create")
# layout.operator_menu_enum("collection.objects_remove", "collection") # BUGGY
layout.operator("collection.objects_remove")
layout.operator("collection.objects_remove_all")
layout.separator()
layout.operator("collection.objects_add_active")
layout.operator("collection.objects_remove_active")
class VIEW3D_MT_object_constraints(Menu):
bl_label = "Constraints"
def draw(self, _context):
layout = self.layout
layout.operator("object.constraint_add_with_targets")
layout.operator("object.constraints_copy")
layout.separator()
layout.operator("object.constraints_clear")
class VIEW3D_MT_object_quick_effects(Menu):
bl_label = "Quick Effects"
def draw(self, _context):
layout = self.layout
layout.operator("object.quick_fur")
layout.operator("object.quick_explode")
layout.operator("object.quick_smoke")
layout.operator("object.quick_liquid")
class VIEW3D_MT_object_showhide(Menu):
bl_label = "Show/Hide"
def draw(self, _context):
layout = self.layout
layout.operator("object.hide_view_clear")
layout.separator()
layout.operator("object.hide_view_set", text="Hide Selected").unselected = False
layout.operator("object.hide_view_set", text="Hide Unselected").unselected = True
class VIEW3D_MT_make_single_user(Menu):
bl_label = "Make Single User"
def draw(self, _context):
layout = self.layout
layout.operator_context = 'EXEC_DEFAULT'
props = layout.operator("object.make_single_user", text="Object")
props.object = True
props.obdata = props.material = props.animation = False
props = layout.operator("object.make_single_user", text="Object & Data")
props.object = props.obdata = True
props.material = props.animation = False
props = layout.operator("object.make_single_user", text="Object & Data & Materials")
props.object = props.obdata = props.material = True
props.animation = False
props = layout.operator("object.make_single_user", text="Materials")
props.material = True
props.object = props.obdata = props.animation = False
props = layout.operator("object.make_single_user", text="Object Animation")
props.animation = True
props.object = props.obdata = props.material = False
class VIEW3D_MT_make_links(Menu):
bl_label = "Make Links"
def draw(self, _context):
layout = self.layout
operator_context_default = layout.operator_context
if len(bpy.data.scenes) > 10:
layout.operator_context = 'INVOKE_REGION_WIN'
layout.operator("object.make_links_scene", text="Objects to Scene...", icon='OUTLINER_OB_EMPTY')
else:
layout.operator_context = 'EXEC_REGION_WIN'
layout.operator_menu_enum("object.make_links_scene", "scene", text="Objects to Scene")
layout.separator()
layout.operator_context = operator_context_default
layout.operator_enum("object.make_links_data", "type") # inline
layout.operator("object.join_uvs") # stupid place to add this!
class VIEW3D_MT_brush_paint_modes(Menu):
bl_label = "Enabled Modes"
def draw(self, context):
layout = self.layout
settings = UnifiedPaintPanel.paint_settings(context)
brush = settings.brush
layout.prop(brush, "use_paint_sculpt", text="Sculpt")
layout.prop(brush, "use_paint_uv_sculpt", text="UV Sculpt")
layout.prop(brush, "use_paint_vertex", text="Vertex Paint")
layout.prop(brush, "use_paint_weight", text="Weight Paint")
layout.prop(brush, "use_paint_image", text="Texture Paint")
class VIEW3D_MT_paint_vertex(Menu):
bl_label = "Paint"
def draw(self, _context):
layout = self.layout
layout.operator("paint.vertex_color_set")
layout.operator("paint.vertex_color_smooth")
layout.operator("paint.vertex_color_dirt")
layout.operator("paint.vertex_color_from_weight")
layout.separator()
layout.operator("paint.vertex_color_invert", text="Invert")
layout.operator("paint.vertex_color_levels", text="Levels")
layout.operator("paint.vertex_color_hsv", text="Hue Saturation Value")
layout.operator("paint.vertex_color_brightness_contrast", text="Bright/Contrast")
class VIEW3D_MT_hook(Menu):
bl_label = "Hooks"
def draw(self, context):
layout = self.layout
layout.operator_context = 'EXEC_AREA'
layout.operator("object.hook_add_newob")
layout.operator("object.hook_add_selob").use_bone = False
layout.operator("object.hook_add_selob", text="Hook to Selected Object Bone").use_bone = True
if any([mod.type == 'HOOK' for mod in context.active_object.modifiers]):
layout.separator()
layout.operator_menu_enum("object.hook_assign", "modifier")
layout.operator_menu_enum("object.hook_remove", "modifier")
layout.separator()
layout.operator_menu_enum("object.hook_select", "modifier")
layout.operator_menu_enum("object.hook_reset", "modifier")
layout.operator_menu_enum("object.hook_recenter", "modifier")
class VIEW3D_MT_vertex_group(Menu):
bl_label = "Vertex Groups"
def draw(self, context):
layout = self.layout
layout.operator_context = 'EXEC_AREA'
layout.operator("object.vertex_group_assign_new")
ob = context.active_object
if ob.mode == 'EDIT' or (ob.mode == 'WEIGHT_PAINT' and ob.type == 'MESH' and ob.data.use_paint_mask_vertex):
if ob.vertex_groups.active:
layout.separator()
layout.operator("object.vertex_group_assign", text="Assign to Active Group")
layout.operator(
"object.vertex_group_remove_from",
text="Remove from Active Group",
).use_all_groups = False
layout.operator("object.vertex_group_remove_from", text="Remove from All").use_all_groups = True
if ob.vertex_groups.active:
layout.separator()
layout.operator_menu_enum("object.vertex_group_set_active", "group", text="Set Active Group")
layout.operator("object.vertex_group_remove", text="Remove Active Group").all = False
layout.operator("object.vertex_group_remove", text="Remove All Groups").all = True
class VIEW3D_MT_gpencil_vertex_group(Menu):
bl_label = "Vertex Groups"
def draw(self, context):
layout = self.layout
layout.operator_context = 'EXEC_AREA'
ob = context.active_object
layout.operator("object.vertex_group_add", text="Add New Group")
ob = context.active_object
if ob.vertex_groups.active:
layout.separator()
layout.operator("gpencil.vertex_group_assign", text="Assign")
layout.operator("gpencil.vertex_group_remove_from", text="Remove")
layout.operator("gpencil.vertex_group_select", text="Select")
layout.operator("gpencil.vertex_group_deselect", text="Deselect")
class VIEW3D_MT_paint_weight_lock(Menu):
bl_label = "Vertex Group Locks"
def draw(self, _context):
layout = self.layout
op = layout.operator("object.vertex_group_lock", icon='LOCKED', text="Lock All")
op.action, op.mask = 'LOCK', 'ALL'
op = layout.operator("object.vertex_group_lock", icon='UNLOCKED', text="Unlock All")
op.action, op.mask = 'UNLOCK', 'ALL'
op = layout.operator("object.vertex_group_lock", icon='LOCKED', text="Lock Selected")
op.action, op.mask = 'LOCK', 'SELECTED'
op = layout.operator("object.vertex_group_lock", icon='UNLOCKED', text="Unlock Selected")
op.action, op.mask = 'UNLOCK', 'SELECTED'
op = layout.operator("object.vertex_group_lock", icon='LOCKED', text="Lock Unselected")
op.action, op.mask = 'LOCK', 'UNSELECTED'
op = layout.operator("object.vertex_group_lock", icon='UNLOCKED', text="Unlock Unselected")
op.action, op.mask = 'UNLOCK', 'UNSELECTED'
op = layout.operator("object.vertex_group_lock", text="Lock Only Selected")
op.action, op.mask = 'LOCK', 'INVERT_UNSELECTED'
op = layout.operator("object.vertex_group_lock", text="Lock Only Unselected")
op.action, op.mask = 'UNLOCK', 'INVERT_UNSELECTED'
op = layout.operator("object.vertex_group_lock", text="Invert Locks")
op.action, op.mask = 'INVERT', 'ALL'
class VIEW3D_MT_paint_weight(Menu):
bl_label = "Weights"
@staticmethod
def draw_generic(layout, is_editmode=False):
if not is_editmode:
layout.operator("paint.weight_from_bones", text="Assign Automatic From Bones").type = 'AUTOMATIC'
layout.operator("paint.weight_from_bones", text="Assign From Bone Envelopes").type = 'ENVELOPES'
layout.separator()
layout.operator("object.vertex_group_normalize_all", text="Normalize All")
layout.operator("object.vertex_group_normalize", text="Normalize")
layout.separator()
layout.operator("object.vertex_group_mirror", text="Mirror")
layout.operator("object.vertex_group_invert", text="Invert")
layout.operator("object.vertex_group_clean", text="Clean")
layout.separator()
layout.operator("object.vertex_group_quantize", text="Quantize")
layout.operator("object.vertex_group_levels", text="Levels")
layout.operator("object.vertex_group_smooth", text="Smooth")
if not is_editmode:
props = layout.operator("object.data_transfer", text="Transfer Weights")
props.use_reverse_transfer = True
props.data_type = 'VGROUP_WEIGHTS'
layout.operator("object.vertex_group_limit_total", text="Limit Total")
layout.operator("object.vertex_group_fix", text="Fix Deforms")
if not is_editmode:
layout.separator()
layout.operator("paint.weight_set")
layout.menu("VIEW3D_MT_paint_weight_lock", text="Locks")
def draw(self, _context):
self.draw_generic(self.layout, is_editmode=False)
class VIEW3D_MT_sculpt(Menu):
bl_label = "Sculpt"
def draw(self, _context):
layout = self.layout
props = layout.operator("paint.hide_show", text="Show All")
props.action = 'SHOW'
props.area = 'ALL'
props = layout.operator("paint.hide_show", text="Show Bounding Box")
props.action = 'SHOW'
props.area = 'INSIDE'
props = layout.operator("paint.hide_show", text="Hide Bounding Box")
props.action = 'HIDE'
props.area = 'INSIDE'
props = layout.operator("paint.hide_show", text="Hide Masked")
props.action = 'HIDE'
props.area = 'MASKED'
layout.separator()
layout.menu("VIEW3D_MT_sculpt_set_pivot", text="Set Pivot")
layout.separator()
layout.operator("sculpt.optimize")
class VIEW3D_MT_mask(Menu):
bl_label = "Mask"
def draw(self, _context):
layout = self.layout
props = layout.operator("paint.mask_flood_fill", text="Invert Mask")
props.mode = 'INVERT'
props = layout.operator("paint.mask_flood_fill", text="Fill Mask")
props.mode = 'VALUE'
props.value = 1
props = layout.operator("paint.mask_flood_fill", text="Clear Mask")
props.mode = 'VALUE'
props.value = 0
props = layout.operator("view3d.select_box", text="Box Mask")
props = layout.operator("paint.mask_lasso_gesture", text="Lasso Mask")
layout.separator()
props = layout.operator("sculpt.mask_filter", text='Smooth Mask')
props.filter_type = 'SMOOTH'
props.auto_iteration_count = True
props = layout.operator("sculpt.mask_filter", text='Sharpen Mask')
props.filter_type = 'SHARPEN'
props.auto_iteration_count = True
props = layout.operator("sculpt.mask_filter", text='Grow Mask')
props.filter_type = 'GROW'
props.auto_iteration_count = True
props = layout.operator("sculpt.mask_filter", text='Shrink Mask')
props.filter_type = 'SHRINK'
props.auto_iteration_count = True
props = layout.operator("sculpt.mask_filter", text='Increase Contrast')
props.filter_type = 'CONTRAST_INCREASE'
props.auto_iteration_count = False
props = layout.operator("sculpt.mask_filter", text='Decrease Contrast')
props.filter_type = 'CONTRAST_DECREASE'
props.auto_iteration_count = False
layout.separator()
props = layout.operator("sculpt.mask_expand", text="Expand Mask By Topology")
props.use_normals = False
props.keep_previous_mask = False
props.invert = True
props.smooth_iterations = 2
props.create_face_set = False
props = layout.operator("sculpt.mask_expand", text="Expand Mask By Curvature")
props.use_normals = True
props.keep_previous_mask = True
props.invert = False
props.smooth_iterations = 0
props.create_face_set = False
layout.separator()
props = layout.operator("mesh.paint_mask_extract", text="Mask Extract")
layout.separator()
props = layout.operator("mesh.paint_mask_slice", text="Mask Slice")
props.fill_holes = False
props.new_object = False
props = layout.operator("mesh.paint_mask_slice", text="Mask Slice and Fill Holes")
props.new_object = False
props = layout.operator("mesh.paint_mask_slice", text="Mask Slice to New Object")
layout.separator()
props = layout.operator("sculpt.dirty_mask", text='Dirty Mask')
class VIEW3D_MT_face_sets(Menu):
bl_label = "Face Sets"
def draw(self, _context):
layout = self.layout
op = layout.operator("sculpt.face_sets_create", text='Face Set From Masked')
op.mode = 'MASKED'
op = layout.operator("sculpt.face_sets_create", text='Face Set From Visible')
op.mode = 'VISIBLE'
op = layout.operator("sculpt.face_sets_create", text='Face Set From Edit Mode Selection')
op.mode = 'SELECTION'
layout.separator()
layout.menu("VIEW3D_MT_face_sets_init", text="Init Face Sets")
layout.separator()
op = layout.operator("sculpt.face_set_edit", text='Grow Face Set')
op.mode = 'GROW'
op = layout.operator("sculpt.face_set_edit", text='Shrink Face Set')
op.mode = 'SHRINK'
layout.separator()
op = layout.operator("sculpt.face_set_change_visibility", text='Invert Visible Face Sets')
op.mode = 'INVERT'
op = layout.operator("sculpt.face_set_change_visibility", text='Show All Face Sets')
op.mode = 'SHOW_ALL'
layout.separator()
op = layout.operator("sculpt.face_sets_randomize_colors", text='Randomize Colors')
class VIEW3D_MT_sculpt_set_pivot(Menu):
bl_label = "Sculpt Set Pivot"
def draw(self, _context):
layout = self.layout
props = layout.operator("sculpt.set_pivot_position", text="Pivot to Origin")
props.mode = 'ORIGIN'
props = layout.operator("sculpt.set_pivot_position", text="Pivot to Unmasked")
props.mode = 'UNMASKED'
props = layout.operator("sculpt.set_pivot_position", text="Pivot to Mask Border")
props.mode = 'BORDER'
props = layout.operator("sculpt.set_pivot_position", text="Pivot to Active Vertex")
props.mode = 'ACTIVE'
props = layout.operator("sculpt.set_pivot_position", text="Pivot to Surface Under Cursor")
props.mode = 'SURFACE'
class VIEW3D_MT_face_sets_init(Menu):
bl_label = "Face Sets Init"
def draw(self, _context):
layout = self.layout
op = layout.operator("sculpt.face_sets_init", text='By Loose Parts')
op.mode = 'LOOSE_PARTS'
op = layout.operator("sculpt.face_sets_init", text='By Materials')
op.mode = 'MATERIALS'
op = layout.operator("sculpt.face_sets_init", text='By Normals')
op.mode = 'NORMALS'
op = layout.operator("sculpt.face_sets_init", text='By UV Seams')
op.mode = 'UV_SEAMS'
op = layout.operator("sculpt.face_sets_init", text='By Edge Creases')
op.mode = 'CREASES'
op = layout.operator("sculpt.face_sets_init", text='By Edge Bevel Weight')
op.mode = 'BEVEL_WEIGHT'
op = layout.operator("sculpt.face_sets_init", text='By Sharp Edges')
op.mode = 'SHARP_EDGES'
op = layout.operator("sculpt.face_sets_init", text='By Face Maps')
op.mode = 'FACE_MAPS'
class VIEW3D_MT_particle(Menu):
bl_label = "Particle"
def draw(self, context):
layout = self.layout
tool_settings = context.tool_settings
particle_edit = tool_settings.particle_edit
layout.operator("particle.mirror")
layout.operator("particle.remove_doubles")
layout.separator()
if particle_edit.select_mode == 'POINT':
layout.operator("particle.subdivide")
layout.operator("particle.unify_length")
layout.operator("particle.rekey")
layout.operator("particle.weight_set")
layout.separator()
layout.menu("VIEW3D_MT_particle_showhide")
layout.separator()
layout.operator("particle.delete")
class VIEW3D_MT_particle_context_menu(Menu):
bl_label = "Particle Context Menu"
def draw(self, context):
layout = self.layout
tool_settings = context.tool_settings
particle_edit = tool_settings.particle_edit
layout.operator("particle.rekey")
layout.separator()
layout.operator("particle.delete")
layout.separator()
layout.operator("particle.remove_doubles")
layout.operator("particle.unify_length")
if particle_edit.select_mode == 'POINT':
layout.operator("particle.subdivide")
layout.operator("particle.weight_set")
layout.separator()
layout.operator("particle.mirror")
if particle_edit.select_mode == 'POINT':
layout.separator()
layout.operator("particle.select_all", text="All").action = 'SELECT'
layout.operator("particle.select_all", text="None").action = 'DESELECT'
layout.operator("particle.select_all", text="Invert").action = 'INVERT'
layout.separator()
layout.operator("particle.select_roots")
layout.operator("particle.select_tips")
layout.separator()
layout.operator("particle.select_random")
layout.separator()
layout.operator("particle.select_more")
layout.operator("particle.select_less")
layout.separator()
layout.operator("particle.select_linked", text="Select Linked")
class VIEW3D_MT_particle_showhide(ShowHideMenu, Menu):
_operator_name = "particle"
class VIEW3D_MT_pose(Menu):
bl_label = "Pose"
def draw(self, _context):
layout = self.layout
layout.menu("VIEW3D_MT_transform_armature")
layout.menu("VIEW3D_MT_pose_transform")
layout.menu("VIEW3D_MT_pose_apply")
layout.menu("VIEW3D_MT_snap")
layout.separator()
layout.menu("VIEW3D_MT_object_animation")
layout.separator()
layout.menu("VIEW3D_MT_pose_slide")
layout.menu("VIEW3D_MT_pose_propagate")
layout.separator()
layout.operator("pose.copy", icon='COPYDOWN')
layout.operator("pose.paste", icon='PASTEDOWN').flipped = False
layout.operator("pose.paste", icon='PASTEFLIPDOWN', text="Paste Pose Flipped").flipped = True
layout.separator()
layout.menu("VIEW3D_MT_pose_library")
layout.menu("VIEW3D_MT_pose_motion")
layout.menu("VIEW3D_MT_pose_group")
layout.separator()
layout.menu("VIEW3D_MT_object_parent")
layout.menu("VIEW3D_MT_pose_ik")
layout.menu("VIEW3D_MT_pose_constraints")
layout.separator()
layout.menu("VIEW3D_MT_pose_names")
layout.operator("pose.quaternions_flip")
layout.separator()
layout.operator_context = 'INVOKE_AREA'
layout.operator("armature.armature_layers", text="Change Armature Layers...")
layout.operator("pose.bone_layers", text="Change Bone Layers...")
layout.separator()
layout.menu("VIEW3D_MT_pose_showhide")
layout.menu("VIEW3D_MT_bone_options_toggle", text="Bone Settings")
class VIEW3D_MT_pose_transform(Menu):
bl_label = "Clear Transform"
def draw(self, _context):
layout = self.layout
layout.operator("pose.transforms_clear", text="All")
layout.separator()
layout.operator("pose.loc_clear", text="Location")
layout.operator("pose.rot_clear", text="Rotation")
layout.operator("pose.scale_clear", text="Scale")
layout.separator()
layout.operator("pose.user_transforms_clear", text="Reset Unkeyed")
class VIEW3D_MT_pose_slide(Menu):
bl_label = "In-Betweens"
def draw(self, _context):
layout = self.layout
layout.operator("pose.push_rest")
layout.operator("pose.relax_rest")
layout.operator("pose.push")
layout.operator("pose.relax")
layout.operator("pose.breakdown")
class VIEW3D_MT_pose_propagate(Menu):
bl_label = "Propagate"
def draw(self, _context):
layout = self.layout
layout.operator("pose.propagate").mode = 'WHILE_HELD'
layout.separator()
layout.operator("pose.propagate", text="To Next Keyframe").mode = 'NEXT_KEY'
layout.operator("pose.propagate", text="To Last Keyframe (Make Cyclic)").mode = 'LAST_KEY'
layout.separator()
layout.operator("pose.propagate", text="On Selected Keyframes").mode = 'SELECTED_KEYS'
layout.separator()
layout.operator("pose.propagate", text="On Selected Markers").mode = 'SELECTED_MARKERS'
class VIEW3D_MT_pose_library(Menu):
bl_label = "Pose Library"
def draw(self, _context):
layout = self.layout
layout.operator("poselib.browse_interactive", text="Browse Poses...")
layout.separator()
layout.operator("poselib.pose_add", text="Add Pose...")
layout.operator("poselib.pose_rename", text="Rename Pose...")
layout.operator("poselib.pose_remove", text="Remove Pose...")
class VIEW3D_MT_pose_motion(Menu):
bl_label = "Motion Paths"
def draw(self, _context):
layout = self.layout
layout.operator("pose.paths_calculate", text="Calculate")
layout.operator("pose.paths_clear", text="Clear")
class VIEW3D_MT_pose_group(Menu):
bl_label = "Bone Groups"
def draw(self, context):
layout = self.layout
pose = context.active_object.pose
layout.operator_context = 'EXEC_AREA'
layout.operator("pose.group_assign", text="Assign to New Group").type = 0
if pose.bone_groups:
active_group = pose.bone_groups.active_index + 1
layout.operator("pose.group_assign", text="Assign to Group").type = active_group
layout.separator()
# layout.operator_context = 'INVOKE_AREA'
layout.operator("pose.group_unassign")
layout.operator("pose.group_remove")
class VIEW3D_MT_pose_ik(Menu):
bl_label = "Inverse Kinematics"
def draw(self, _context):
layout = self.layout
layout.operator("pose.ik_add")
layout.operator("pose.ik_clear")
class VIEW3D_MT_pose_constraints(Menu):
bl_label = "Constraints"
def draw(self, _context):
layout = self.layout
layout.operator("pose.constraint_add_with_targets", text="Add (With Targets)...")
layout.operator("pose.constraints_copy")
layout.operator("pose.constraints_clear")
class VIEW3D_MT_pose_names(Menu):
bl_label = "Names"
def draw(self, _context):
layout = self.layout
layout.operator_context = 'EXEC_REGION_WIN'
layout.operator("pose.autoside_names", text="AutoName Left/Right").axis = 'XAXIS'
layout.operator("pose.autoside_names", text="AutoName Front/Back").axis = 'YAXIS'
layout.operator("pose.autoside_names", text="AutoName Top/Bottom").axis = 'ZAXIS'
layout.operator("pose.flip_names")
class VIEW3D_MT_pose_showhide(ShowHideMenu, Menu):
_operator_name = "pose"
class VIEW3D_MT_pose_apply(Menu):
bl_label = "Apply"
def draw(self, _context):
layout = self.layout
layout.operator("pose.armature_apply").selected = False
layout.operator("pose.armature_apply", text="Apply Selected as Rest Pose").selected = True
layout.operator("pose.visual_transform_apply")
layout.separator()
props = layout.operator("object.assign_property_defaults")
props.process_bones = True
class VIEW3D_MT_pose_context_menu(Menu):
bl_label = "Pose Context Menu"
def draw(self, _context):
layout = self.layout
layout.operator_context = 'INVOKE_REGION_WIN'
layout.operator("anim.keyframe_insert_menu", text="Insert Keyframe...")
layout.separator()
layout.operator("pose.copy", icon='COPYDOWN')
layout.operator("pose.paste", icon='PASTEDOWN').flipped = False
layout.operator("pose.paste", icon='PASTEFLIPDOWN', text="Paste X-Flipped Pose").flipped = True
layout.separator()
props = layout.operator("wm.call_panel", text="Rename Active Bone...")
props.name = "TOPBAR_PT_name"
props.keep_open = False
layout.separator()
layout.operator("pose.push")
layout.operator("pose.relax")
layout.operator("pose.breakdown")
layout.separator()
layout.operator("pose.paths_calculate", text="Calculate Motion Paths")
layout.operator("pose.paths_clear", text="Clear Motion Paths")
layout.separator()
layout.operator("pose.hide").unselected = False
layout.operator("pose.reveal")
layout.separator()
layout.operator("pose.user_transforms_clear")
class BoneOptions:
def draw(self, context):
layout = self.layout
options = [
"show_wire",
"use_deform",
"use_envelope_multiply",
"use_inherit_rotation",
]
if context.mode == 'EDIT_ARMATURE':
bone_props = bpy.types.EditBone.bl_rna.properties
data_path_iter = "selected_bones"
opt_suffix = ""
options.append("lock")
else: # pose-mode
bone_props = bpy.types.Bone.bl_rna.properties
data_path_iter = "selected_pose_bones"
opt_suffix = "bone."
for opt in options:
props = layout.operator("wm.context_collection_boolean_set", text=bone_props[opt].name,
text_ctxt=i18n_contexts.default)
props.data_path_iter = data_path_iter
props.data_path_item = opt_suffix + opt
props.type = self.type
class VIEW3D_MT_bone_options_toggle(Menu, BoneOptions):
bl_label = "Toggle Bone Options"
type = 'TOGGLE'
class VIEW3D_MT_bone_options_enable(Menu, BoneOptions):
bl_label = "Enable Bone Options"
type = 'ENABLE'
class VIEW3D_MT_bone_options_disable(Menu, BoneOptions):
bl_label = "Disable Bone Options"
type = 'DISABLE'
# ********** Edit Menus, suffix from ob.type **********
class VIEW3D_MT_edit_mesh(Menu):
bl_label = "Mesh"
def draw(self, _context):
layout = self.layout
with_bullet = bpy.app.build_options.bullet
layout.menu("VIEW3D_MT_transform")
layout.menu("VIEW3D_MT_mirror")
layout.menu("VIEW3D_MT_snap")
layout.separator()
layout.operator("mesh.duplicate_move", text="Duplicate")
layout.menu("VIEW3D_MT_edit_mesh_extrude")
layout.separator()
layout.menu("VIEW3D_MT_edit_mesh_merge", text="Merge")
layout.menu("VIEW3D_MT_edit_mesh_split", text="Split")
layout.operator_menu_enum("mesh.separate", "type")
layout.separator()
layout.operator("mesh.bisect")
layout.operator("mesh.knife_project")
if with_bullet:
layout.operator("mesh.convex_hull")
layout.separator()
layout.operator("mesh.symmetrize")
layout.operator("mesh.symmetry_snap")
layout.separator()
layout.menu("VIEW3D_MT_edit_mesh_normals")
layout.menu("VIEW3D_MT_edit_mesh_shading")
layout.menu("VIEW3D_MT_edit_mesh_weights")
layout.operator_menu_enum("mesh.sort_elements", "type", text="Sort Elements...")
layout.separator()
layout.menu("VIEW3D_MT_edit_mesh_showhide")
layout.menu("VIEW3D_MT_edit_mesh_clean")
layout.separator()
layout.menu("VIEW3D_MT_edit_mesh_delete")
class VIEW3D_MT_edit_mesh_context_menu(Menu):
bl_label = ""
def draw(self, context):
def count_selected_items_for_objects_in_mode():
selected_verts_len = 0
selected_edges_len = 0
selected_faces_len = 0
for ob in context.objects_in_mode_unique_data:
v, e, f = ob.data.count_selected_items()
selected_verts_len += v
selected_edges_len += e
selected_faces_len += f
return (selected_verts_len, selected_edges_len, selected_faces_len)
is_vert_mode, is_edge_mode, is_face_mode = context.tool_settings.mesh_select_mode
selected_verts_len, selected_edges_len, selected_faces_len = count_selected_items_for_objects_in_mode()
del count_selected_items_for_objects_in_mode
layout = self.layout
layout.operator_context = 'INVOKE_REGION_WIN'
# If nothing is selected
# (disabled for now until it can be made more useful).
'''
# If nothing is selected
if not (selected_verts_len or selected_edges_len or selected_faces_len):
layout.menu("VIEW3D_MT_mesh_add", text="Add", text_ctxt=i18n_contexts.operator_default)
return
'''
# Else something is selected
row = layout.row()
if is_vert_mode:
col = row.column()
col.label(text="Vertex Context Menu", icon='VERTEXSEL')
col.separator()
# Additive Operators
col.operator("mesh.subdivide", text="Subdivide")
col.separator()
col.operator("mesh.extrude_vertices_move", text="Extrude Vertices")
col.operator("mesh.bevel", text="Bevel Vertices").vertex_only = True
if selected_verts_len > 1:
col.separator()
col.operator("mesh.edge_face_add", text="New Edge/Face from Vertices")
col.operator("mesh.vert_connect_path", text="Connect Vertex Path")
col.operator("mesh.vert_connect", text="Connect Vertex Pairs")
col.separator()
# Deform Operators
col.operator("transform.push_pull", text="Push/Pull")
col.operator("transform.shrink_fatten", text="Shrink/Fatten")
col.operator("transform.shear", text="Shear")
col.operator("transform.vert_slide", text="Slide Vertices")
col.operator_context = 'EXEC_DEFAULT'
col.operator("transform.vertex_random", text="Randomize Vertices").offset = 0.1
col.operator("mesh.vertices_smooth", text="Smooth Vertices").factor = 0.5
col.operator_context = 'INVOKE_REGION_WIN'
col.operator("mesh.vertices_smooth_laplacian", text="Smooth Laplacian")
col.separator()
col.menu("VIEW3D_MT_mirror", text="Mirror Vertices")
col.menu("VIEW3D_MT_snap", text="Snap Vertices")
col.separator()
# Removal Operators
if selected_verts_len > 1:
col.menu("VIEW3D_MT_edit_mesh_merge", text="Merge Vertices")
col.operator("mesh.split")
col.operator_menu_enum("mesh.separate", "type")
col.operator("mesh.dissolve_verts")
col.operator("mesh.delete", text="Delete Vertices").type = 'VERT'
if is_edge_mode:
render = context.scene.render
col = row.column()
col.label(text="Edge Context Menu", icon='EDGESEL')
col.separator()
# Additive Operators
col.operator("mesh.subdivide", text="Subdivide")
col.separator()
col.operator("mesh.extrude_edges_move", text="Extrude Edges")
col.operator("mesh.bevel", text="Bevel Edges").vertex_only = False
if selected_edges_len >= 2:
col.operator("mesh.bridge_edge_loops")
if selected_edges_len >= 1:
col.operator("mesh.edge_face_add", text="New Face from Edges")
if selected_edges_len >= 2:
col.operator("mesh.fill")
col.separator()
props = col.operator("mesh.loopcut_slide")
props.TRANSFORM_OT_edge_slide.release_confirm = False
col.operator("mesh.offset_edge_loops_slide")
col.separator()
col.operator("mesh.knife_tool")
col.operator("mesh.bisect")
col.separator()
# Deform Operators
col.operator("mesh.edge_rotate", text="Rotate Edge CW").use_ccw = False
col.operator("transform.edge_slide")
col.operator("mesh.edge_split")
col.separator()
# Edge Flags
col.operator("transform.edge_crease")
col.operator("transform.edge_bevelweight")
col.separator()
col.operator("mesh.mark_seam").clear = False
col.operator("mesh.mark_seam", text="Clear Seam").clear = True
col.separator()
col.operator("mesh.mark_sharp")
col.operator("mesh.mark_sharp", text="Clear Sharp").clear = True
if render.use_freestyle:
col.separator()
col.operator("mesh.mark_freestyle_edge").clear = False
col.operator("mesh.mark_freestyle_edge", text="Clear Freestyle Edge").clear = True
col.separator()
# Removal Operators
col.operator("mesh.unsubdivide")
col.operator("mesh.split")
col.operator_menu_enum("mesh.separate", "type")
col.operator("mesh.dissolve_edges")
col.operator("mesh.delete", text="Delete Edges").type = 'EDGE'
if is_face_mode:
col = row.column()
col.label(text="Face Context Menu", icon='FACESEL')
col.separator()
# Additive Operators
col.operator("mesh.subdivide", text="Subdivide")
col.separator()
col.operator("view3d.edit_mesh_extrude_move_normal", text="Extrude Faces")
col.operator("view3d.edit_mesh_extrude_move_shrink_fatten", text="Extrude Faces Along Normals")
col.operator("mesh.extrude_faces_move", text="Extrude Individual Faces")
col.operator("mesh.inset")
col.operator("mesh.poke")
if selected_faces_len >= 2:
col.operator("mesh.bridge_edge_loops", text="Bridge Faces")
col.separator()
# Modify Operators
col.menu("VIEW3D_MT_uv_map", text="UV Unwrap Faces")
col.separator()
props = col.operator("mesh.quads_convert_to_tris")
props.quad_method = props.ngon_method = 'BEAUTY'
col.operator("mesh.tris_convert_to_quads")
col.separator()
col.operator("mesh.faces_shade_smooth")
col.operator("mesh.faces_shade_flat")
col.separator()
# Removal Operators
col.operator("mesh.unsubdivide")
col.operator("mesh.split")
col.operator_menu_enum("mesh.separate", "type")
col.operator("mesh.dissolve_faces")
col.operator("mesh.delete", text="Delete Faces").type = 'FACE'
class VIEW3D_MT_edit_mesh_select_mode(Menu):
bl_label = "Mesh Select Mode"
def draw(self, _context):
layout = self.layout
layout.operator_context = 'INVOKE_REGION_WIN'
layout.operator("mesh.select_mode", text="Vertex", icon='VERTEXSEL').type = 'VERT'
layout.operator("mesh.select_mode", text="Edge", icon='EDGESEL').type = 'EDGE'
layout.operator("mesh.select_mode", text="Face", icon='FACESEL').type = 'FACE'
class VIEW3D_MT_edit_mesh_extrude(Menu):
bl_label = "Extrude"
_extrude_funcs = {
'VERT': lambda layout:
layout.operator("mesh.extrude_vertices_move", text="Extrude Vertices"),
'EDGE': lambda layout:
layout.operator("mesh.extrude_edges_move", text="Extrude Edges"),
'REGION': lambda layout:
layout.operator("view3d.edit_mesh_extrude_move_normal", text="Extrude Faces"),
'REGION_VERT_NORMAL': lambda layout:
layout.operator("view3d.edit_mesh_extrude_move_shrink_fatten", text="Extrude Faces Along Normals"),
'FACE': lambda layout:
layout.operator("mesh.extrude_faces_move", text="Extrude Individual Faces"),
}
@staticmethod
def extrude_options(context):
tool_settings = context.tool_settings
select_mode = tool_settings.mesh_select_mode
mesh = context.object.data
menu = []
if mesh.total_face_sel:
menu += ['REGION', 'REGION_VERT_NORMAL', 'FACE']
if mesh.total_edge_sel and (select_mode[0] or select_mode[1]):
menu += ['EDGE']
if mesh.total_vert_sel and select_mode[0]:
menu += ['VERT']
# should never get here
return menu
def draw(self, context):
from math import pi
layout = self.layout
layout.operator_context = 'INVOKE_REGION_WIN'
for menu_id in self.extrude_options(context):
self._extrude_funcs[menu_id](layout)
layout.separator()
layout.operator("mesh.extrude_repeat")
layout.operator("mesh.spin").angle = pi * 2
class VIEW3D_MT_edit_mesh_vertices(Menu):
bl_label = "Vertex"
def draw(self, _context):
layout = self.layout
layout.operator_context = 'INVOKE_REGION_WIN'
layout.operator("mesh.extrude_vertices_move", text="Extrude Vertices")
layout.operator("mesh.bevel", text="Bevel Vertices").vertex_only = True
layout.separator()
layout.operator("mesh.edge_face_add", text="New Edge/Face from Vertices")
layout.operator("mesh.vert_connect_path", text="Connect Vertex Path")
layout.operator("mesh.vert_connect", text="Connect Vertex Pairs")
layout.separator()
props = layout.operator("mesh.rip_move", text="Rip Vertices")
props.MESH_OT_rip.use_fill = False
props = layout.operator("mesh.rip_move", text="Rip Vertices and Fill")
props.MESH_OT_rip.use_fill = True
layout.operator("mesh.rip_edge_move", text="Rip Vertices and Extend")
layout.separator()
layout.operator("transform.vert_slide", text="Slide Vertices")
layout.operator_context = 'EXEC_DEFAULT'
layout.operator("mesh.vertices_smooth", text="Smooth Vertices").factor = 0.5
layout.operator("mesh.vertices_smooth_laplacian", text="Smooth Vertices (Laplacian)")
layout.operator_context = 'INVOKE_REGION_WIN'
layout.separator()
layout.operator("mesh.blend_from_shape")
layout.operator("mesh.shape_propagate_to_all", text="Propagate to Shapes")
layout.separator()
layout.menu("VIEW3D_MT_vertex_group")
layout.menu("VIEW3D_MT_hook")
layout.separator()
layout.operator("object.vertex_parent_set")
class VIEW3D_MT_edit_mesh_edges_data(Menu):
bl_label = "Edge Data"
def draw(self, context):
layout = self.layout
render = context.scene.render
layout.operator_context = 'INVOKE_REGION_WIN'
layout.operator("transform.edge_crease")
layout.operator("transform.edge_bevelweight")
layout.separator()
layout.operator("mesh.mark_seam").clear = False
layout.operator("mesh.mark_seam", text="Clear Seam").clear = True
layout.separator()
layout.operator("mesh.mark_sharp")
layout.operator("mesh.mark_sharp", text="Clear Sharp").clear = True
layout.operator("mesh.mark_sharp", text="Mark Sharp from Vertices").use_verts = True
props = layout.operator("mesh.mark_sharp", text="Clear Sharp from Vertices")
props.use_verts = True
props.clear = True
if render.use_freestyle:
layout.separator()
layout.operator("mesh.mark_freestyle_edge").clear = False
layout.operator("mesh.mark_freestyle_edge", text="Clear Freestyle Edge").clear = True
class VIEW3D_MT_edit_mesh_edges(Menu):
bl_label = "Edge"
def draw(self, _context):
layout = self.layout
with_freestyle = bpy.app.build_options.freestyle
layout.operator_context = 'INVOKE_REGION_WIN'
layout.operator("mesh.extrude_edges_move", text="Extrude Edges")
layout.operator("mesh.bevel", text="Bevel Edges").vertex_only = False
layout.operator("mesh.bridge_edge_loops")
layout.operator("mesh.screw")
layout.separator()
layout.operator("mesh.subdivide")
layout.operator("mesh.subdivide_edgering")
layout.operator("mesh.unsubdivide")
layout.separator()
layout.operator("mesh.edge_rotate", text="Rotate Edge CW").use_ccw = False
layout.operator("mesh.edge_rotate", text="Rotate Edge CCW").use_ccw = True
layout.separator()
layout.operator("transform.edge_slide")
props = layout.operator("mesh.loopcut_slide")
props.TRANSFORM_OT_edge_slide.release_confirm = False
layout.operator("mesh.offset_edge_loops_slide")
layout.separator()
layout.operator("transform.edge_crease")
layout.operator("transform.edge_bevelweight")
layout.separator()
layout.operator("mesh.mark_seam").clear = False
layout.operator("mesh.mark_seam", text="Clear Seam").clear = True
layout.separator()
layout.operator("mesh.mark_sharp")
layout.operator("mesh.mark_sharp", text="Clear Sharp").clear = True
layout.operator("mesh.mark_sharp", text="Mark Sharp from Vertices").use_verts = True
props = layout.operator("mesh.mark_sharp", text="Clear Sharp from Vertices")
props.use_verts = True
props.clear = True
if with_freestyle:
layout.separator()
layout.operator("mesh.mark_freestyle_edge").clear = False
layout.operator("mesh.mark_freestyle_edge", text="Clear Freestyle Edge").clear = True
class VIEW3D_MT_edit_mesh_faces_data(Menu):
bl_label = "Face Data"
def draw(self, _context):
layout = self.layout
with_freestyle = bpy.app.build_options.freestyle
layout.operator_context = 'INVOKE_REGION_WIN'
layout.operator("mesh.colors_rotate")
layout.operator("mesh.colors_reverse")
layout.separator()
layout.operator("mesh.uvs_rotate")
layout.operator("mesh.uvs_reverse")
layout.separator()
if with_freestyle:
layout.operator("mesh.mark_freestyle_face").clear = False
layout.operator("mesh.mark_freestyle_face", text="Clear Freestyle Face").clear = True
class VIEW3D_MT_edit_mesh_faces(Menu):
bl_label = "Face"
bl_idname = "VIEW3D_MT_edit_mesh_faces"
def draw(self, _context):
layout = self.layout
layout.operator_context = 'INVOKE_REGION_WIN'
layout.operator("view3d.edit_mesh_extrude_move_normal", text="Extrude Faces")
layout.operator("view3d.edit_mesh_extrude_move_shrink_fatten", text="Extrude Faces Along Normals")
layout.operator("mesh.extrude_faces_move", text="Extrude Individual Faces")
layout.separator()
layout.operator("mesh.inset")
layout.operator("mesh.poke")
props = layout.operator("mesh.quads_convert_to_tris")
props.quad_method = props.ngon_method = 'BEAUTY'
layout.operator("mesh.tris_convert_to_quads")
layout.operator("mesh.solidify", text="Solidify Faces")
layout.operator("mesh.wireframe")
layout.separator()
layout.operator("mesh.fill")
layout.operator("mesh.fill_grid")
layout.operator("mesh.beautify_fill")
layout.separator()
layout.operator("mesh.intersect")
layout.operator("mesh.intersect_boolean")
layout.separator()
layout.operator("mesh.face_split_by_edges")
layout.separator()
layout.operator("mesh.faces_shade_smooth")
layout.operator("mesh.faces_shade_flat")
layout.separator()
layout.menu("VIEW3D_MT_edit_mesh_faces_data")
class VIEW3D_MT_edit_mesh_normals_select_strength(Menu):
bl_label = "Select by Face Strength"
def draw(self, _context):
layout = self.layout
op = layout.operator("mesh.mod_weighted_strength", text="Weak")
op.set = False
op.face_strength = 'WEAK'
op = layout.operator("mesh.mod_weighted_strength", text="Medium")
op.set = False
op.face_strength = 'MEDIUM'
op = layout.operator("mesh.mod_weighted_strength", text="Strong")
op.set = False
op.face_strength = 'STRONG'
class VIEW3D_MT_edit_mesh_normals_set_strength(Menu):
bl_label = "Select by Face Strength"
def draw(self, _context):
layout = self.layout
op = layout.operator("mesh.mod_weighted_strength", text="Weak")
op.set = True
op.face_strength = 'WEAK'
op = layout.operator("mesh.mod_weighted_strength", text="Medium")
op.set = True
op.face_strength = 'MEDIUM'
op = layout.operator("mesh.mod_weighted_strength", text="Strong")
op.set = True
op.face_strength = 'STRONG'
class VIEW3D_MT_edit_mesh_normals_average(Menu):
bl_label = "Average"
def draw(self, _context):
layout = self.layout
layout.operator("mesh.average_normals", text="Custom Normal").average_type = 'CUSTOM_NORMAL'
layout.operator("mesh.average_normals", text="Face Area").average_type = 'FACE_AREA'
layout.operator("mesh.average_normals", text="Corner Angle").average_type = 'CORNER_ANGLE'
class VIEW3D_MT_edit_mesh_normals(Menu):
bl_label = "Normals"
def draw(self, _context):
layout = self.layout
layout.operator("mesh.flip_normals", text="Flip")
layout.operator("mesh.normals_make_consistent", text="Recalculate Outside").inside = False
layout.operator("mesh.normals_make_consistent", text="Recalculate Inside").inside = True
layout.separator()
layout.operator("mesh.set_normals_from_faces", text="Set From Faces")
layout.operator_context = 'INVOKE_REGION_WIN'
layout.operator("transform.rotate_normal", text="Rotate...")
layout.operator("mesh.point_normals", text="Point to Target...")
layout.operator_context = 'EXEC_DEFAULT'
layout.operator("mesh.merge_normals", text="Merge")
layout.operator("mesh.split_normals", text="Split")
layout.menu("VIEW3D_MT_edit_mesh_normals_average", text="Average")
layout.separator()
layout.operator("mesh.normals_tools", text="Copy Vectors").mode = 'COPY'
layout.operator("mesh.normals_tools", text="Paste Vectors").mode = 'PASTE'
layout.operator("mesh.smooth_normals", text="Smooth Vectors")
layout.operator("mesh.normals_tools", text="Reset Vectors").mode = 'RESET'
layout.separator()
layout.menu("VIEW3D_MT_edit_mesh_normals_select_strength", text="Select by Face Strength")
layout.menu("VIEW3D_MT_edit_mesh_normals_set_strength", text="Set Face Strength")
class VIEW3D_MT_edit_mesh_shading(Menu):
bl_label = "Shading"
def draw(self, _context):
layout = self.layout
layout.operator("mesh.faces_shade_smooth", text="Smooth Faces")
layout.operator("mesh.faces_shade_flat", text="Flat Faces")
layout.separator()
layout.operator("mesh.mark_sharp", text="Smooth Edges").clear = True
layout.operator("mesh.mark_sharp", text="Sharp Edges")
layout.separator()
props = layout.operator("mesh.mark_sharp", text="Smooth Vertices")
props.use_verts = True
props.clear = True
layout.operator("mesh.mark_sharp", text="Sharp Vertices").use_verts = True
class VIEW3D_MT_edit_mesh_weights(Menu):
bl_label = "Weights"
def draw(self, _context):
VIEW3D_MT_paint_weight.draw_generic(self.layout, is_editmode=True)
class VIEW3D_MT_edit_mesh_clean(Menu):
bl_label = "Clean Up"
def draw(self, _context):
layout = self.layout
layout.operator("mesh.delete_loose")
layout.separator()
layout.operator("mesh.decimate")
layout.operator("mesh.dissolve_degenerate")
layout.operator("mesh.dissolve_limited")
layout.operator("mesh.face_make_planar")
layout.separator()
layout.operator("mesh.vert_connect_nonplanar")
layout.operator("mesh.vert_connect_concave")
layout.operator("mesh.remove_doubles")
layout.operator("mesh.fill_holes")
class VIEW3D_MT_edit_mesh_delete(Menu):
bl_label = "Delete"
def draw(self, _context):
layout = self.layout
layout.operator_enum("mesh.delete", "type")
layout.separator()
layout.operator("mesh.dissolve_verts")
layout.operator("mesh.dissolve_edges")
layout.operator("mesh.dissolve_faces")
layout.separator()
layout.operator("mesh.dissolve_limited")
layout.separator()
layout.operator("mesh.edge_collapse")
layout.operator("mesh.delete_edgeloop", text="Edge Loops")
class VIEW3D_MT_edit_mesh_merge(Menu):
bl_label = "Merge"
def draw(self, _context):
layout = self.layout
layout.operator_enum("mesh.merge", "type")
layout.separator()
layout.operator("mesh.remove_doubles", text="By Distance")
class VIEW3D_MT_edit_mesh_split(Menu):
bl_label = "Split"
def draw(self, _context):
layout = self.layout
layout.operator("mesh.split", text="Selection")
layout.separator()
layout.operator_enum("mesh.edge_split", "type")
class VIEW3D_MT_edit_mesh_showhide(ShowHideMenu, Menu):
_operator_name = "mesh"
class VIEW3D_MT_edit_gpencil_delete(Menu):
bl_label = "Delete"
def draw(self, _context):
layout = self.layout
layout.operator_enum("gpencil.delete", "type")
layout.separator()
layout.operator_enum("gpencil.dissolve", "type")
layout.separator()
layout.operator("gpencil.delete", text="Delete Active Keyframe (Active Layer)").type = 'FRAME'
layout.operator("gpencil.active_frames_delete_all", text="Delete Active Keyframes (All Layers)")
# Edit Curve
# draw_curve is used by VIEW3D_MT_edit_curve and VIEW3D_MT_edit_surface
def draw_curve(self, _context):
layout = self.layout
layout.menu("VIEW3D_MT_transform")
layout.menu("VIEW3D_MT_mirror")
layout.menu("VIEW3D_MT_snap")
layout.separator()
layout.operator("curve.spin")
layout.operator("curve.duplicate_move")
layout.separator()
layout.operator("curve.split")
layout.operator("curve.separate")
layout.separator()
layout.operator("curve.cyclic_toggle")
layout.operator_menu_enum("curve.spline_type_set", "type")
layout.separator()
layout.menu("VIEW3D_MT_edit_curve_showhide")
layout.menu("VIEW3D_MT_edit_curve_clean")
layout.menu("VIEW3D_MT_edit_curve_delete")
class VIEW3D_MT_edit_curve(Menu):
bl_label = "Curve"
draw = draw_curve
class VIEW3D_MT_edit_curve_ctrlpoints(Menu):
bl_label = "Control Points"
def draw(self, context):
layout = self.layout
edit_object = context.edit_object
if edit_object.type in {'CURVE', 'SURFACE'}:
layout.operator("curve.extrude_move")
layout.separator()
layout.operator("curve.make_segment")
layout.separator()
if edit_object.type == 'CURVE':
layout.operator("transform.tilt")
layout.operator("curve.tilt_clear")
layout.separator()
layout.operator_menu_enum("curve.handle_type_set", "type")
layout.operator("curve.normals_make_consistent")
layout.separator()
layout.operator("curve.smooth")
if edit_object.type == 'CURVE':
layout.operator("curve.smooth_tilt")
layout.operator("curve.smooth_radius")
layout.operator("curve.smooth_weight")
layout.separator()
layout.menu("VIEW3D_MT_hook")
layout.separator()
layout.operator("object.vertex_parent_set")
class VIEW3D_MT_edit_curve_segments(Menu):
bl_label = "Segments"
def draw(self, _context):
layout = self.layout
layout.operator("curve.subdivide")
layout.operator("curve.switch_direction")
class VIEW3D_MT_edit_curve_clean(Menu):
bl_label = "Clean Up"
def draw(self, _context):
layout = self.layout
layout.operator("curve.decimate")
class VIEW3D_MT_edit_curve_context_menu(Menu):
bl_label = "Curve Context Menu"
def draw(self, _context):
# TODO(campbell): match mesh vertex menu.
layout = self.layout
layout.operator_context = 'INVOKE_DEFAULT'
# Add
layout.operator("curve.subdivide")
layout.operator("curve.extrude_move")
layout.operator("curve.make_segment")
layout.operator("curve.duplicate_move")
layout.separator()
# Transform
layout.operator("transform.transform", text="Radius").mode = 'CURVE_SHRINKFATTEN'
layout.operator("transform.tilt")
layout.operator("curve.tilt_clear")
layout.operator("curve.smooth")
layout.operator("curve.smooth_tilt")
layout.operator("curve.smooth_radius")
layout.separator()
layout.menu("VIEW3D_MT_mirror")
layout.menu("VIEW3D_MT_snap")
layout.separator()
# Modify
layout.operator_menu_enum("curve.spline_type_set", "type")
layout.operator_menu_enum("curve.handle_type_set", "type")
layout.operator("curve.cyclic_toggle")
layout.operator("curve.switch_direction")
layout.separator()
layout.operator("curve.normals_make_consistent")
layout.operator("curve.spline_weight_set")
layout.operator("curve.radius_set")
layout.separator()
# Remove
layout.operator("curve.split")
layout.operator("curve.decimate")
layout.operator("curve.separate")
layout.operator("curve.dissolve_verts")
layout.operator("curve.delete", text="Delete Segment").type = 'SEGMENT'
layout.operator("curve.delete", text="Delete Point").type = 'VERT'
class VIEW3D_MT_edit_curve_delete(Menu):
bl_label = "Delete"
def draw(self, _context):
layout = self.layout
layout.operator_enum("curve.delete", "type")
layout.separator()
layout.operator("curve.dissolve_verts")
class VIEW3D_MT_edit_curve_showhide(ShowHideMenu, Menu):
_operator_name = "curve"
class VIEW3D_MT_edit_surface(Menu):
bl_label = "Surface"
draw = draw_curve
class VIEW3D_MT_edit_font_chars(Menu):
bl_label = "Special Characters"
def draw(self, _context):
layout = self.layout
layout.operator("font.text_insert", text="Copyright").text = "\u00A9"
layout.operator("font.text_insert", text="Registered Trademark").text = "\u00AE"
layout.separator()
layout.operator("font.text_insert", text="Degree Sign").text = "\u00B0"
layout.operator("font.text_insert", text="Multiplication Sign").text = "\u00D7"
layout.operator("font.text_insert", text="Circle").text = "\u008A"
layout.separator()
layout.operator("font.text_insert", text="Superscript 1").text = "\u00B9"
layout.operator("font.text_insert", text="Superscript 2").text = "\u00B2"
layout.operator("font.text_insert", text="Superscript 3").text = "\u00B3"
layout.separator()
layout.operator("font.text_insert", text="Double >>").text = "\u00BB"
layout.operator("font.text_insert", text="Double <<").text = "\u00AB"
layout.operator("font.text_insert", text="Promillage").text = "\u2030"
layout.separator()
layout.operator("font.text_insert", text="Dutch Florin").text = "\u00A4"
layout.operator("font.text_insert", text="British Pound").text = "\u00A3"
layout.operator("font.text_insert", text="Japanese Yen").text = "\u00A5"
layout.separator()
layout.operator("font.text_insert", text="German S").text = "\u00DF"
layout.operator("font.text_insert", text="Spanish Question Mark").text = "\u00BF"
layout.operator("font.text_insert", text="Spanish Exclamation Mark").text = "\u00A1"
class VIEW3D_MT_edit_font_kerning(Menu):
bl_label = "Kerning"
def draw(self, context):
layout = self.layout
ob = context.active_object
text = ob.data
kerning = text.edit_format.kerning
layout.operator("font.change_spacing", text="Decrease Kerning").delta = -1
layout.operator("font.change_spacing", text="Increase Kerning").delta = 1
layout.operator("font.change_spacing", text="Reset Kerning").delta = -kerning
class VIEW3D_MT_edit_font_delete(Menu):
bl_label = "Delete"
def draw(self, _context):
layout = self.layout
layout.operator("font.delete", text="Previous Character").type = 'PREVIOUS_CHARACTER'
layout.operator("font.delete", text="Next Character").type = 'NEXT_CHARACTER'
layout.operator("font.delete", text="Previous Word").type = 'PREVIOUS_WORD'
layout.operator("font.delete", text="Next Word").type = 'NEXT_WORD'
class VIEW3D_MT_edit_font(Menu):
bl_label = "Text"
def draw(self, _context):
layout = self.layout
layout.operator("font.text_cut", text="Cut")
layout.operator("font.text_copy", text="Copy", icon='COPYDOWN')
layout.operator("font.text_paste", text="Paste", icon='PASTEDOWN')
layout.separator()
layout.operator("font.text_paste_from_file")
layout.separator()
layout.operator("font.case_set", text="To Uppercase").case = 'UPPER'
layout.operator("font.case_set", text="To Lowercase").case = 'LOWER'
layout.separator()
layout.menu("VIEW3D_MT_edit_font_chars")
layout.separator()
layout.operator("font.style_toggle", text="Toggle Bold", icon='BOLD').style = 'BOLD'
layout.operator("font.style_toggle", text="Toggle Italic", icon='ITALIC').style = 'ITALIC'
layout.operator("font.style_toggle", text="Toggle Underline", icon='UNDERLINE').style = 'UNDERLINE'
layout.operator("font.style_toggle", text="Toggle Small Caps", icon='SMALL_CAPS').style = 'SMALL_CAPS'
layout.menu("VIEW3D_MT_edit_font_kerning")
layout.separator()
layout.menu("VIEW3D_MT_edit_font_delete")
class VIEW3D_MT_edit_font_context_menu(Menu):
bl_label = "Text Context Menu"
def draw(self, _context):
layout = self.layout
layout.operator_context = 'INVOKE_DEFAULT'
layout.operator("font.text_cut", text="Cut")
layout.operator("font.text_copy", text="Copy", icon='COPYDOWN')
layout.operator("font.text_paste", text="Paste", icon='PASTEDOWN')
layout.separator()
layout.operator("font.select_all")
layout.separator()
layout.menu("VIEW3D_MT_edit_font")
class VIEW3D_MT_edit_meta(Menu):
bl_label = "Metaball"
def draw(self, _context):
layout = self.layout
layout.menu("VIEW3D_MT_transform")
layout.menu("VIEW3D_MT_mirror")
layout.menu("VIEW3D_MT_snap")
layout.separator()
layout.operator("mball.duplicate_metaelems")
layout.separator()
layout.menu("VIEW3D_MT_edit_meta_showhide")
layout.operator_context = 'EXEC_DEFAULT'
layout.operator("mball.delete_metaelems", text="Delete")
class VIEW3D_MT_edit_meta_showhide(Menu):
bl_label = "Show/Hide"
def draw(self, _context):
layout = self.layout
layout.operator("mball.reveal_metaelems")
layout.operator("mball.hide_metaelems", text="Hide Selected").unselected = False
layout.operator("mball.hide_metaelems", text="Hide Unselected").unselected = True
class VIEW3D_MT_edit_lattice(Menu):
bl_label = "Lattice"
def draw(self, _context):
layout = self.layout
layout.separator()
layout.menu("VIEW3D_MT_transform")
layout.menu("VIEW3D_MT_mirror")
layout.menu("VIEW3D_MT_snap")
layout.operator_menu_enum("lattice.flip", "axis")
layout.separator()
layout.operator("lattice.make_regular")
layout.separator()
layout.operator("object.vertex_parent_set")
class VIEW3D_MT_edit_armature(Menu):
bl_label = "Armature"
def draw(self, context):
layout = self.layout
edit_object = context.edit_object
arm = edit_object.data
layout.menu("VIEW3D_MT_transform_armature")
layout.menu("VIEW3D_MT_mirror")
layout.menu("VIEW3D_MT_snap")
layout.menu("VIEW3D_MT_edit_armature_roll")
layout.separator()
layout.operator("armature.extrude_move")
if arm.use_mirror_x:
layout.operator("armature.extrude_forked")
layout.operator("armature.duplicate_move")
layout.operator("armature.fill")
layout.separator()
layout.operator("armature.split")
layout.operator("armature.separate")
layout.separator()
layout.operator("armature.subdivide", text="Subdivide")
layout.operator("armature.switch_direction", text="Switch Direction")
layout.separator()
layout.operator("armature.symmetrize")
layout.menu("VIEW3D_MT_edit_armature_names")
layout.separator()
layout.operator_context = 'INVOKE_DEFAULT'
layout.operator("armature.armature_layers")
layout.operator("armature.bone_layers")
layout.separator()
layout.menu("VIEW3D_MT_edit_armature_parent")
layout.separator()
layout.menu("VIEW3D_MT_bone_options_toggle", text="Bone Settings")
layout.separator()
layout.menu("VIEW3D_MT_edit_armature_delete")
class VIEW3D_MT_armature_context_menu(Menu):
bl_label = "Armature Context Menu"
def draw(self, context):
layout = self.layout
edit_object = context.edit_object
arm = edit_object.data
layout.operator_context = 'INVOKE_REGION_WIN'
# Add
layout.operator("armature.subdivide", text="Subdivide")
layout.operator("armature.duplicate_move", text="Duplicate")
layout.operator("armature.extrude_move")
if arm.use_mirror_x:
layout.operator("armature.extrude_forked")
layout.separator()
layout.operator("armature.fill")
layout.separator()
# Modify
layout.menu("VIEW3D_MT_mirror")
layout.menu("VIEW3D_MT_snap")
layout.operator("armature.symmetrize")
layout.operator("armature.switch_direction", text="Switch Direction")
layout.menu("VIEW3D_MT_edit_armature_names")
layout.separator()
layout.menu("VIEW3D_MT_edit_armature_parent")
layout.separator()
# Remove
layout.operator("armature.split")
layout.operator("armature.separate")
layout.operator("armature.dissolve")
layout.operator("armature.delete")
class VIEW3D_MT_edit_armature_names(Menu):
bl_label = "Names"
def draw(self, _context):
layout = self.layout
layout.operator_context = 'EXEC_REGION_WIN'
layout.operator("armature.autoside_names", text="AutoName Left/Right").type = 'XAXIS'
layout.operator("armature.autoside_names", text="AutoName Front/Back").type = 'YAXIS'
layout.operator("armature.autoside_names", text="AutoName Top/Bottom").type = 'ZAXIS'
layout.operator("armature.flip_names", text="Flip Names")
class VIEW3D_MT_edit_armature_parent(Menu):
bl_label = "Parent"
def draw(self, _context):
layout = self.layout
layout.operator("armature.parent_set", text="Make")
layout.operator("armature.parent_clear", text="Clear")
class VIEW3D_MT_edit_armature_roll(Menu):
bl_label = "Bone Roll"
def draw(self, _context):
layout = self.layout
layout.operator_menu_enum("armature.calculate_roll", "type")
layout.separator()
layout.operator("transform.transform", text="Set Roll").mode = 'BONE_ROLL'
layout.operator("armature.roll_clear")
class VIEW3D_MT_edit_armature_delete(Menu):
bl_label = "Delete"
def draw(self, _context):
layout = self.layout
layout.operator_context = 'EXEC_AREA'
layout.operator("armature.delete", text="Bones")
layout.separator()
layout.operator("armature.dissolve", text="Dissolve Bones")
# ********** Grease Pencil menus **********
class VIEW3D_MT_gpencil_autoweights(Menu):
bl_label = "Generate Weights"
def draw(self, _context):
layout = self.layout
layout.operator("gpencil.generate_weights", text="With Empty Groups").mode = 'NAME'
layout.operator("gpencil.generate_weights", text="With Automatic Weights").mode = 'AUTO'
class VIEW3D_MT_gpencil_simplify(Menu):
bl_label = "Simplify"
def draw(self, _context):
layout = self.layout
layout.operator("gpencil.stroke_simplify_fixed", text="Fixed")
layout.operator("gpencil.stroke_simplify", text="Adaptive")
layout.operator("gpencil.stroke_sample", text="Sample")
class VIEW3D_MT_paint_gpencil(Menu):
bl_label = "Draw"
def draw(self, _context):
layout = self.layout
layout.menu("GPENCIL_MT_layer_active", text="Active Layer")
layout.separator()
layout.menu("VIEW3D_MT_gpencil_animation")
layout.menu("VIEW3D_MT_edit_gpencil_interpolate")
layout.separator()
layout.menu("VIEW3D_MT_edit_gpencil_showhide")
layout.menu("GPENCIL_MT_cleanup")
class VIEW3D_MT_assign_material(Menu):
bl_label = "Assign Material"
def draw(self, context):
layout = self.layout
ob = context.active_object
mat_active = ob.active_material
for slot in ob.material_slots:
mat = slot.material
if mat:
layout.operator("gpencil.stroke_change_color", text=mat.name,
icon='LAYER_ACTIVE' if mat == mat_active else 'BLANK1').material = mat.name
class VIEW3D_MT_gpencil_copy_layer(Menu):
bl_label = "Copy Layer to Object"
def draw(self, context):
layout = self.layout
view_layer = context.view_layer
obact = context.active_object
gpl = context.active_gpencil_layer
done = False
if gpl is not None:
for ob in view_layer.objects:
if ob.type == 'GPENCIL' and ob != obact:
layout.operator("gpencil.layer_duplicate_object", text=ob.name).object = ob.name
done = True
if done is False:
layout.label(text="No destination object", icon='ERROR')
else:
layout.label(text="No layer to copy", icon='ERROR')
class VIEW3D_MT_edit_gpencil(Menu):
bl_label = "Grease Pencil"
def draw(self, _context):
layout = self.layout
layout.menu("VIEW3D_MT_edit_gpencil_transform")
layout.menu("VIEW3D_MT_mirror")
layout.menu("GPENCIL_MT_snap")
layout.separator()
layout.menu("GPENCIL_MT_layer_active", text="Active Layer")
layout.separator()
layout.menu("VIEW3D_MT_gpencil_animation")
layout.menu("VIEW3D_MT_edit_gpencil_interpolate")
layout.separator()
# Cut, Copy, Paste
layout.operator("gpencil.duplicate_move", text="Duplicate")
layout.operator("gpencil.stroke_split", text="Split")
layout.operator("gpencil.copy", text="Copy", icon='COPYDOWN')
layout.operator("gpencil.paste", text="Paste", icon='PASTEDOWN').type = 'ACTIVE'
layout.operator("gpencil.paste", text="Paste by Layer").type = 'LAYER'
layout.separator()
layout.menu("VIEW3D_MT_weight_gpencil")
layout.separator()
layout.menu("VIEW3D_MT_edit_gpencil_showhide")
layout.operator_menu_enum("gpencil.stroke_separate", "mode")
layout.menu("GPENCIL_MT_cleanup")
layout.separator()
# Remove
layout.menu("VIEW3D_MT_edit_gpencil_delete")
class VIEW3D_MT_edit_gpencil_stroke(Menu):
bl_label = "Stroke"
def draw(self, _context):
layout = self.layout
settings = _context.tool_settings.gpencil_sculpt
layout.operator("gpencil.stroke_subdivide", text="Subdivide").only_selected = False
layout.menu("VIEW3D_MT_gpencil_simplify")
layout.operator("gpencil.stroke_trim", text="Trim")
layout.separator()
layout.operator_menu_enum("gpencil.stroke_join", "type", text="Join")
layout.separator()
layout.menu("GPENCIL_MT_move_to_layer")
layout.menu("VIEW3D_MT_assign_material")
layout.operator("gpencil.set_active_material", text="Set as Active Material")
layout.operator_menu_enum("gpencil.stroke_arrange", "direction", text="Arrange Strokes")
layout.separator()
# Convert
op = layout.operator("gpencil.stroke_cyclical_set", text="Close")
op.type = 'CLOSE'
op.geometry = True
layout.operator("gpencil.stroke_cyclical_set", text="Toggle Cyclic").type = 'TOGGLE'
layout.operator_menu_enum("gpencil.stroke_caps_set", text="Toggle Caps", property="type")
layout.operator("gpencil.stroke_flip", text="Switch Direction")
layout.prop(settings, "use_scale_thickness")
layout.separator()
layout.operator("gpencil.reset_transform_fill", text="Reset Fill Transform")
class VIEW3D_MT_edit_gpencil_point(Menu):
bl_label = "Point"
def draw(self, _context):
layout = self.layout
layout.operator("gpencil.extrude_move", text="Extrude Points")
layout.separator()
layout.operator("gpencil.stroke_smooth", text="Smooth Points").only_selected = True
layout.separator()
layout.operator("gpencil.stroke_merge", text="Merge Points")
# TODO: add new RIP operator
layout.separator()
layout.menu("VIEW3D_MT_gpencil_vertex_group")
class VIEW3D_MT_weight_gpencil(Menu):
bl_label = "Weights"
def draw(self, _context):
layout = self.layout
layout.operator("gpencil.vertex_group_normalize_all", text="Normalize All")
layout.operator("gpencil.vertex_group_normalize", text="Normalize")
layout.separator()
layout.operator("gpencil.vertex_group_invert", text="Invert")
layout.operator("gpencil.vertex_group_smooth", text="Smooth")
layout.separator()
layout.menu("VIEW3D_MT_gpencil_autoweights")
class VIEW3D_MT_vertex_gpencil(Menu):
bl_label = "Paint"
def draw(self, _context):
layout = self.layout
layout.operator("gpencil.vertex_color_set", text="Set Vertex Colors")
layout.separator()
layout.operator("gpencil.vertex_color_invert", text="Invert")
layout.operator("gpencil.vertex_color_levels", text="Levels")
layout.operator("gpencil.vertex_color_hsv", text="Hue Saturation Value")
layout.operator("gpencil.vertex_color_brightness_contrast", text="Bright/Contrast")
layout.separator()
layout.menu("VIEW3D_MT_join_palette")
class VIEW3D_MT_gpencil_animation(Menu):
bl_label = "Animation"
@classmethod
def poll(cls, context):
ob = context.active_object
return ob and ob.type == 'GPENCIL' and ob.mode != 'OBJECT'
def draw(self, _context):
layout = self.layout
layout.operator("gpencil.blank_frame_add", text="Insert Blank Keyframe (Active Layer)")
layout.operator("gpencil.blank_frame_add", text="Insert Blank Keyframe (All Layers)").all_layers = True
layout.separator()
layout.operator("gpencil.frame_duplicate", text="Duplicate Active Keyframe (Active Layer)")
layout.operator("gpencil.frame_duplicate", text="Duplicate Active Keyframe (All Layers)").mode = 'ALL'
layout.separator()
layout.operator("gpencil.delete", text="Delete Active Keyframe (Active Layer)").type = 'FRAME'
layout.operator("gpencil.active_frames_delete_all", text="Delete Active Keyframes (All Layers)")
class VIEW3D_MT_edit_gpencil_transform(Menu):
bl_label = "Transform"
def draw(self, _context):
layout = self.layout
layout.operator("transform.translate")
layout.operator("transform.rotate")
layout.operator("transform.resize", text="Scale")
layout.separator()
layout.operator("transform.bend", text="Bend")
layout.operator("transform.shear", text="Shear")
layout.operator("transform.tosphere", text="To Sphere")
layout.operator("transform.transform", text="Shrink Fatten").mode = 'GPENCIL_SHRINKFATTEN'
class VIEW3D_MT_edit_gpencil_showhide(Menu):
bl_label = "Show/hide"
def draw(self, _context):
layout = self.layout
layout.operator("gpencil.reveal", text="Show All Layers")
layout.separator()
layout.operator("gpencil.hide", text="Hide Active Layer").unselected = False
layout.operator("gpencil.hide", text="Hide Inactive Layers").unselected = True
class VIEW3D_MT_edit_gpencil_interpolate(Menu):
bl_label = "Interpolate"
def draw(self, _context):
layout = self.layout
layout.operator("gpencil.interpolate", text="Interpolate")
layout.operator("gpencil.interpolate_sequence", text="Sequence")
class VIEW3D_MT_object_mode_pie(Menu):
bl_label = "Mode"
def draw(self, _context):
layout = self.layout
pie = layout.menu_pie()
pie.operator_enum("object.mode_set", "mode")
class VIEW3D_MT_view_pie(Menu):
bl_label = "View"
bl_idname = "VIEW3D_MT_view_pie"
def draw(self, _context):
layout = self.layout
pie = layout.menu_pie()
pie.operator_enum("view3d.view_axis", "type")
pie.operator("view3d.view_camera", text="View Camera", icon='CAMERA_DATA')
pie.operator("view3d.view_selected", text="View Selected", icon='ZOOM_SELECTED')
class VIEW3D_MT_transform_gizmo_pie(Menu):
bl_label = "View"
def draw(self, context):
layout = self.layout
pie = layout.menu_pie()
# 1: Left
pie.operator("view3d.transform_gizmo_set", text="Move").type = {'TRANSLATE'}
# 2: Right
pie.operator("view3d.transform_gizmo_set", text="Rotate").type = {'ROTATE'}
# 3: Down
pie.operator("view3d.transform_gizmo_set", text="Scale").type = {'SCALE'}
# 4: Up
pie.prop(context.space_data, "show_gizmo", text="Show Gizmos", icon='GIZMO')
# 5: Up/Left
pie.operator("view3d.transform_gizmo_set", text="All").type = {'TRANSLATE', 'ROTATE', 'SCALE'}
class VIEW3D_MT_shading_pie(Menu):
bl_label = "Shading"
def draw(self, context):
layout = self.layout
pie = layout.menu_pie()
view = context.space_data
pie.prop(view.shading, "type", expand=True)
class VIEW3D_MT_shading_ex_pie(Menu):
bl_label = "Shading"
def draw(self, context):
layout = self.layout
pie = layout.menu_pie()
view = context.space_data
pie.prop_enum(view.shading, "type", value='WIREFRAME')
pie.prop_enum(view.shading, "type", value='SOLID')
# Note this duplicates "view3d.toggle_xray" logic, so we can see the active item: T58661.
if context.pose_object:
pie.prop(view.overlay, "show_xray_bone", icon='XRAY')
else:
xray_active = (
(context.mode == 'EDIT_MESH') or
(view.shading.type in {'SOLID', 'WIREFRAME'})
)
if xray_active:
sub = pie
else:
sub = pie.row()
sub.active = False
sub.prop(
view.shading,
"show_xray_wireframe" if (view.shading.type == 'WIREFRAME') else "show_xray",
text="Toggle X-Ray",
icon='XRAY',
)
pie.prop(view.overlay, "show_overlays", text="Toggle Overlays", icon='OVERLAY')
pie.prop_enum(view.shading, "type", value='MATERIAL')
pie.prop_enum(view.shading, "type", value='RENDERED')
class VIEW3D_MT_pivot_pie(Menu):
bl_label = "Pivot Point"
def draw(self, context):
layout = self.layout
pie = layout.menu_pie()
obj = context.active_object
mode = context.mode
pie.prop_enum(context.scene.tool_settings, "transform_pivot_point", value='BOUNDING_BOX_CENTER')
pie.prop_enum(context.scene.tool_settings, "transform_pivot_point", value='CURSOR')
pie.prop_enum(context.scene.tool_settings, "transform_pivot_point", value='INDIVIDUAL_ORIGINS')
pie.prop_enum(context.scene.tool_settings, "transform_pivot_point", value='MEDIAN_POINT')
pie.prop_enum(context.scene.tool_settings, "transform_pivot_point", value='ACTIVE_ELEMENT')
if (obj is None) or (mode in {'OBJECT', 'POSE', 'WEIGHT_PAINT'}):
pie.prop(context.scene.tool_settings, "use_transform_pivot_point_align")
class VIEW3D_MT_orientations_pie(Menu):
bl_label = "Orientation"
def draw(self, context):
layout = self.layout
pie = layout.menu_pie()
scene = context.scene
pie.prop(scene.transform_orientation_slots[0], "type", expand=True)
class VIEW3D_MT_snap_pie(Menu):
bl_label = "Snap"
def draw(self, _context):
layout = self.layout
pie = layout.menu_pie()
pie.operator("view3d.snap_cursor_to_grid", text="Cursor to Grid", icon='CURSOR')
pie.operator("view3d.snap_selected_to_grid", text="Selection to Grid", icon='RESTRICT_SELECT_OFF')
pie.operator("view3d.snap_cursor_to_selected", text="Cursor to Selected", icon='CURSOR')
pie.operator(
"view3d.snap_selected_to_cursor",
text="Selection to Cursor",
icon='RESTRICT_SELECT_OFF',
).use_offset = False
pie.operator(
"view3d.snap_selected_to_cursor",
text="Selection to Cursor (Keep Offset)",
icon='RESTRICT_SELECT_OFF',
).use_offset = True
pie.operator("view3d.snap_selected_to_active", text="Selection to Active", icon='RESTRICT_SELECT_OFF')
pie.operator("view3d.snap_cursor_to_center", text="Cursor to World Origin", icon='CURSOR')
pie.operator("view3d.snap_cursor_to_active", text="Cursor to Active", icon='CURSOR')
class VIEW3D_MT_proportional_editing_falloff_pie(Menu):
bl_label = "Proportional Editing Falloff"
def draw(self, context):
layout = self.layout
pie = layout.menu_pie()
tool_settings = context.scene.tool_settings
pie.prop(tool_settings, "proportional_edit_falloff", expand=True)
class VIEW3D_MT_sculpt_mask_edit_pie(Menu):
bl_label = "Mask Edit"
def draw(self, _context):
layout = self.layout
pie = layout.menu_pie()
op = pie.operator("paint.mask_flood_fill", text='Invert Mask')
op.mode = 'INVERT'
op = pie.operator("paint.mask_flood_fill", text='Clear Mask')
op.mode = 'VALUE'
op.value = 0.0
op = pie.operator("sculpt.mask_filter", text='Smooth Mask')
op.filter_type = 'SMOOTH'
op.auto_iteration_count = True
op = pie.operator("sculpt.mask_filter", text='Sharpen Mask')
op.filter_type = 'SHARPEN'
op.auto_iteration_count = True
op = pie.operator("sculpt.mask_filter", text='Grow Mask')
op.filter_type = 'GROW'
op.auto_iteration_count = True
op = pie.operator("sculpt.mask_filter", text='Shrink Mask')
op.filter_type = 'SHRINK'
op.auto_iteration_count = True
op = pie.operator("sculpt.mask_filter", text='Increase Contrast')
op.filter_type = 'CONTRAST_INCREASE'
op.auto_iteration_count = False
op = pie.operator("sculpt.mask_filter", text='Decrease Contrast')
op.filter_type = 'CONTRAST_DECREASE'
op.auto_iteration_count = False
class VIEW3D_MT_sculpt_face_sets_edit_pie(Menu):
bl_label = "Face Sets Edit"
def draw(self, _context):
layout = self.layout
pie = layout.menu_pie()
op = pie.operator("sculpt.face_sets_create", text='Face Set From Masked')
op.mode = 'MASKED'
op = pie.operator("sculpt.face_sets_create", text='Face Set From Visible')
op.mode = 'VISIBLE'
op = pie.operator("sculpt.face_set_change_visibility", text='Invert Visible')
op.mode = 'INVERT'
op = pie.operator("sculpt.face_set_change_visibility", text='Show All')
op.mode = 'SHOW_ALL'
class VIEW3D_MT_wpaint_vgroup_lock_pie(Menu):
bl_label = "Vertex Group Locks"
def draw(self, _context):
layout = self.layout
pie = layout.menu_pie()
# 1: Left
op = pie.operator("object.vertex_group_lock", icon='LOCKED', text="Lock All")
op.action, op.mask = 'LOCK', 'ALL'
# 2: Right
op = pie.operator("object.vertex_group_lock", icon='UNLOCKED', text="Unlock All")
op.action, op.mask = 'UNLOCK', 'ALL'
# 3: Down
op = pie.operator("object.vertex_group_lock", icon='UNLOCKED', text="Unlock Selected")
op.action, op.mask = 'UNLOCK', 'SELECTED'
# 4: Up
op = pie.operator("object.vertex_group_lock", icon='LOCKED', text="Lock Selected")
op.action, op.mask = 'LOCK', 'SELECTED'
# 5: Up/Left
op = pie.operator("object.vertex_group_lock", icon='LOCKED', text="Lock Unselected")
op.action, op.mask = 'LOCK', 'UNSELECTED'
# 6: Up/Right
op = pie.operator("object.vertex_group_lock", text="Lock Only Selected")
op.action, op.mask = 'LOCK', 'INVERT_UNSELECTED'
# 7: Down/Left
op = pie.operator("object.vertex_group_lock", text="Lock Only Unselected")
op.action, op.mask = 'UNLOCK', 'INVERT_UNSELECTED'
# 8: Down/Right
op = pie.operator("object.vertex_group_lock", text="Invert Locks")
op.action, op.mask = 'INVERT', 'ALL'
# ********** Panel **********
class VIEW3D_PT_active_tool(Panel, ToolActivePanelHelper):
bl_space_type = 'VIEW_3D'
bl_region_type = 'UI'
bl_category = "Tool"
# See comment below.
# bl_options = {'HIDE_HEADER'}
# Don't show in properties editor.
@classmethod
def poll(cls, context):
return context.area.type == 'VIEW_3D'
# FIXME(campbell): remove this second panel once 'HIDE_HEADER' works with category tabs,
# Currently pinning allows ordering headerless panels below panels with headers.
class VIEW3D_PT_active_tool_duplicate(Panel, ToolActivePanelHelper):
bl_space_type = 'VIEW_3D'
bl_region_type = 'UI'
bl_category = "Tool"
bl_options = {'HIDE_HEADER'}
# Only show in properties editor.
@classmethod
def poll(cls, context):
return context.area.type != 'VIEW_3D'
class VIEW3D_PT_view3d_properties(Panel):
bl_space_type = 'VIEW_3D'
bl_region_type = 'UI'
bl_category = "View"
bl_label = "View"
def draw(self, context):
layout = self.layout
view = context.space_data
layout.use_property_split = True
layout.use_property_decorate = False # No animation.
col = layout.column()
subcol = col.column()
subcol.active = bool(view.region_3d.view_perspective != 'CAMERA' or view.region_quadviews)
subcol.prop(view, "lens", text="Focal Length")
subcol = col.column(align=True)
subcol.prop(view, "clip_start", text="Clip Start")
subcol.prop(view, "clip_end", text="End")
layout.separator()
col = layout.column(align=False, heading="Local Camera")
col.use_property_decorate = False
row = col.row(align=True)
sub = row.row(align=True)
sub.prop(view, "use_local_camera", text="")
sub = sub.row(align=True)
sub.enabled = view.use_local_camera
sub.prop(view, "camera", text="")
layout.separator()
col = layout.column(align=True)
col.prop(view, "use_render_border")
col.active = view.region_3d.view_perspective != 'CAMERA'
class VIEW3D_PT_view3d_lock(Panel):
bl_space_type = 'VIEW_3D'
bl_region_type = 'UI'
bl_category = "View"
bl_label = "View Lock"
bl_parent_id = "VIEW3D_PT_view3d_properties"
def draw(self, context):
layout = self.layout
layout.use_property_split = True
layout.use_property_decorate = False # No animation.
view = context.space_data
col = layout.column(align=True)
sub = col.column()
sub.active = bool(view.region_3d.view_perspective != 'CAMERA' or view.region_quadviews)
sub.prop(view, "lock_object")
lock_object = view.lock_object
if lock_object:
if lock_object.type == 'ARMATURE':
sub.prop_search(
view, "lock_bone", lock_object.data,
"edit_bones" if lock_object.mode == 'EDIT'
else "bones",
text="",
)
else:
subcol = sub.column(heading="Lock")
subcol.prop(view, "lock_cursor", text="To 3D Cursor")
col.prop(view, "lock_camera", text="Camera to View")
class VIEW3D_PT_view3d_cursor(Panel):
bl_space_type = 'VIEW_3D'
bl_region_type = 'UI'
bl_category = "View"
bl_label = "3D Cursor"
def draw(self, context):
layout = self.layout
cursor = context.scene.cursor
layout.column().prop(cursor, "location", text="Location")
rotation_mode = cursor.rotation_mode
if rotation_mode == 'QUATERNION':
layout.column().prop(cursor, "rotation_quaternion", text="Rotation")
elif rotation_mode == 'AXIS_ANGLE':
layout.column().prop(cursor, "rotation_axis_angle", text="Rotation")
else:
layout.column().prop(cursor, "rotation_euler", text="Rotation")
layout.prop(cursor, "rotation_mode", text="")
class VIEW3D_PT_collections(Panel):
bl_space_type = 'VIEW_3D'
bl_region_type = 'UI'
bl_category = "View"
bl_label = "Collections"
bl_options = {'DEFAULT_CLOSED'}
def _draw_collection(self, layout, view_layer, use_local_collections, collection, index):
need_separator = index
for child in collection.children:
index += 1
if child.exclude:
continue
if child.collection.hide_viewport:
continue
if need_separator:
layout.separator()
need_separator = False
icon = 'BLANK1'
# has_objects = True
if child.has_selected_objects(view_layer):
icon = 'LAYER_ACTIVE'
elif child.has_objects():
icon = 'LAYER_USED'
else:
# has_objects = False
pass
row = layout.row()
row.use_property_decorate = False
sub = row.split(factor=0.98)
subrow = sub.row()
subrow.alignment = 'LEFT'
subrow.operator(
"object.hide_collection", text=child.name, icon=icon, emboss=False,
).collection_index = index
sub = row.split()
subrow = sub.row(align=True)
subrow.alignment = 'RIGHT'
if not use_local_collections:
subrow.active = collection.is_visible # Parent collection runtime visibility
subrow.prop(child, "hide_viewport", text="", emboss=False)
else:
subrow.active = collection.visible_get() # Parent collection runtime visibility
icon = 'HIDE_OFF' if child.visible_get() else 'HIDE_ON'
props = subrow.operator("object.hide_collection", text="", icon=icon, emboss=False)
props.collection_index = index
props.toggle = True
for child in collection.children:
index = self._draw_collection(layout, view_layer, use_local_collections, child, index)
return index
def draw(self, context):
layout = self.layout
layout.use_property_split = False
view = context.space_data
view_layer = context.view_layer
layout.use_property_split = True
layout.prop(view, "use_local_collections")
layout.separator()
# We pass index 0 here because the index is increased
# so the first real index is 1
# And we start with index as 1 because we skip the master collection
self._draw_collection(layout, view_layer, view.use_local_collections, view_layer.layer_collection, 0)
class VIEW3D_PT_object_type_visibility(Panel):
bl_space_type = 'VIEW_3D'
bl_region_type = 'HEADER'
bl_label = "View Object Types"
bl_ui_units_x = 6
def draw(self, context):
layout = self.layout
layout.use_property_split = True
view = context.space_data
layout.label(text="Object Types Visibility")
col = layout.column()
attr_object_types = (
# Geometry
("mesh", "Mesh"),
("curve", "Curve"),
("surf", "Surface"),
("meta", "Meta"),
("font", "Text"),
("hair", "Hair"),
("pointcloud", "Point Cloud"),
("volume", "Volume"),
("grease_pencil", "Grease Pencil"),
(None, None),
# Other
("armature", "Armature"),
("lattice", "Lattice"),
("empty", "Empty"),
("light", "Light"),
("light_probe", "Light Probe"),
("camera", "Camera"),
("speaker", "Speaker"),
)
for attr, attr_name in attr_object_types:
if attr is None:
col.separator()
continue
if attr == "hair" and not hasattr(bpy.data, "hairs"):
continue
elif attr == "pointcloud" and not hasattr(bpy.data, "pointclouds"):
continue
attr_v = "show_object_viewport_" + attr
attr_s = "show_object_select_" + attr
icon_v = 'HIDE_OFF' if getattr(view, attr_v) else 'HIDE_ON'
icon_s = 'RESTRICT_SELECT_OFF' if getattr(view, attr_s) else 'RESTRICT_SELECT_ON'
row = col.row(align=True)
row.alignment = 'RIGHT'
row.label(text=attr_name)
row.prop(view, attr_v, text="", icon=icon_v, emboss=False)
rowsub = row.row(align=True)
rowsub.active = getattr(view, attr_v)
rowsub.prop(view, attr_s, text="", icon=icon_s, emboss=False)
class VIEW3D_PT_shading(Panel):
bl_space_type = 'VIEW_3D'
bl_region_type = 'HEADER'
bl_label = "Shading"
bl_ui_units_x = 12
@classmethod
def get_shading(cls, context):
# Get settings from 3D viewport or OpenGL render engine
view = context.space_data
if view.type == 'VIEW_3D':
return view.shading
else:
return context.scene.display.shading
def draw(self, _context):
layout = self.layout
layout.label(text="Viewport Shading")
class VIEW3D_PT_shading_lighting(Panel):
bl_space_type = 'VIEW_3D'
bl_region_type = 'HEADER'
bl_label = "Lighting"
bl_parent_id = 'VIEW3D_PT_shading'
@classmethod
def poll(cls, context):
shading = VIEW3D_PT_shading.get_shading(context)
engine = context.scene.render.engine
return shading.type in {'SOLID', 'MATERIAL'} or engine == 'BLENDER_EEVEE' and shading.type == 'RENDERED'
def draw(self, context):
layout = self.layout
shading = VIEW3D_PT_shading.get_shading(context)
col = layout.column()
split = col.split(factor=0.9)
if shading.type == 'SOLID':
split.row().prop(shading, "light", expand=True)
col = split.column()
split = layout.split(factor=0.9)
col = split.column()
sub = col.row()
if shading.light == 'STUDIO':
prefs = context.preferences
system = prefs.system
if not system.use_studio_light_edit:
sub.scale_y = 0.6 # smaller studiolight preview
sub.template_icon_view(shading, "studio_light", scale_popup=3.0)
else:
sub.prop(
system,
"use_studio_light_edit",
text="Disable Studio Light Edit",
icon='NONE',
toggle=True,
)
col = split.column()
col.operator("preferences.studiolight_show", emboss=False, text="", icon='PREFERENCES')
split = layout.split(factor=0.9)
col = split.column()
row = col.row()
row.prop(shading, "use_world_space_lighting", text="", icon='WORLD', toggle=True)
row = row.row()
row.active = shading.use_world_space_lighting
row.prop(shading, "studiolight_rotate_z", text="Rotation")
col = split.column() # to align properly with above
elif shading.light == 'MATCAP':
sub.scale_y = 0.6 # smaller matcap preview
sub.template_icon_view(shading, "studio_light", scale_popup=3.0)
col = split.column()
col.operator("preferences.studiolight_show", emboss=False, text="", icon='PREFERENCES')
col.operator("view3d.toggle_matcap_flip", emboss=False, text="", icon='ARROW_LEFTRIGHT')
elif shading.type == 'MATERIAL':
col.prop(shading, "use_scene_lights")
col.prop(shading, "use_scene_world")
col = layout.column()
split = col.split(factor=0.9)
if not shading.use_scene_world:
col = split.column()
sub = col.row()
sub.scale_y = 0.6
sub.template_icon_view(shading, "studio_light", scale_popup=3)
col = split.column()
col.operator("preferences.studiolight_show", emboss=False, text="", icon='PREFERENCES')
split = layout.split(factor=0.9)
col = split.column()
col.prop(shading, "studiolight_rotate_z", text="Rotation")
col.prop(shading, "studiolight_intensity")
col.prop(shading, "studiolight_background_alpha")
col.prop(shading, "studiolight_background_blur")
col = split.column() # to align properly with above
elif shading.type == 'RENDERED':
col.prop(shading, "use_scene_lights_render")
col.prop(shading, "use_scene_world_render")
if not shading.use_scene_world_render:
col = layout.column()
split = col.split(factor=0.9)
col = split.column()
sub = col.row()
sub.scale_y = 0.6
sub.template_icon_view(shading, "studio_light", scale_popup=3)
col = split.column()
col.operator("preferences.studiolight_show", emboss=False, text="", icon='PREFERENCES')
split = layout.split(factor=0.9)
col = split.column()
col.prop(shading, "studiolight_rotate_z", text="Rotation")
col.prop(shading, "studiolight_intensity")
col.prop(shading, "studiolight_background_alpha")
col.prop(shading, "studiolight_background_blur")
col = split.column() # to align properly with above
class VIEW3D_PT_shading_color(Panel):
bl_space_type = 'VIEW_3D'
bl_region_type = 'HEADER'
bl_label = "Color"
bl_parent_id = 'VIEW3D_PT_shading'
@classmethod
def poll(cls, context):
shading = VIEW3D_PT_shading.get_shading(context)
return shading.type in {'WIREFRAME', 'SOLID'}
def _draw_color_type(self, context):
layout = self.layout
shading = VIEW3D_PT_shading.get_shading(context)
layout.grid_flow(columns=3, align=True).prop(shading, "color_type", expand=True)
if shading.color_type == 'SINGLE':
layout.row().prop(shading, "single_color", text="")
def _draw_background_color(self, context):
layout = self.layout
shading = VIEW3D_PT_shading.get_shading(context)
layout.row().label(text="Background")
layout.row().prop(shading, "background_type", expand=True)
if shading.background_type == 'VIEWPORT':
layout.row().prop(shading, "background_color", text="")
def draw(self, context):
shading = VIEW3D_PT_shading.get_shading(context)
if shading.type == 'WIREFRAME':
self.layout.row().prop(shading, "wireframe_color_type", expand=True)
else:
self._draw_color_type(context)
self.layout.separator()
self._draw_background_color(context)
class VIEW3D_PT_shading_options(Panel):
bl_space_type = 'VIEW_3D'
bl_region_type = 'HEADER'
bl_label = "Options"
bl_parent_id = 'VIEW3D_PT_shading'
@classmethod
def poll(cls, context):
shading = VIEW3D_PT_shading.get_shading(context)
return shading.type in {'WIREFRAME', 'SOLID'}
def draw(self, context):
layout = self.layout
shading = VIEW3D_PT_shading.get_shading(context)
col = layout.column()
if shading.type == 'SOLID':
col.prop(shading, "show_backface_culling")
row = col.row(align=True)
if shading.type == 'WIREFRAME':
row.prop(shading, "show_xray_wireframe", text="")
sub = row.row()
sub.active = shading.show_xray_wireframe
sub.prop(shading, "xray_alpha_wireframe", text="X-Ray")
elif shading.type == 'SOLID':
row.prop(shading, "show_xray", text="")
sub = row.row()
sub.active = shading.show_xray
sub.prop(shading, "xray_alpha", text="X-Ray")
# X-ray mode is off when alpha is 1.0
xray_active = shading.show_xray and shading.xray_alpha != 1
row = col.row(align=True)
row.prop(shading, "show_shadows", text="")
row.active = not xray_active
sub = row.row(align=True)
sub.active = shading.show_shadows
sub.prop(shading, "shadow_intensity", text="Shadow")
sub.popover(
panel="VIEW3D_PT_shading_options_shadow",
icon='PREFERENCES',
text="",
)
col = layout.column()
row = col.row()
row.active = not xray_active
row.prop(shading, "show_cavity")
if shading.show_cavity and not xray_active:
row.prop(shading, "cavity_type", text="Type")
if shading.cavity_type in {'WORLD', 'BOTH'}:
col.label(text="World Space")
sub = col.row(align=True)
sub.prop(shading, "cavity_ridge_factor", text="Ridge")
sub.prop(shading, "cavity_valley_factor", text="Valley")
sub.popover(
panel="VIEW3D_PT_shading_options_ssao",
icon='PREFERENCES',
text="",
)
if shading.cavity_type in {'SCREEN', 'BOTH'}:
col.label(text="Screen Space")
sub = col.row(align=True)
sub.prop(shading, "curvature_ridge_factor", text="Ridge")
sub.prop(shading, "curvature_valley_factor", text="Valley")
row = col.row()
row.active = not xray_active
row.prop(shading, "use_dof", text="Depth Of Field")
if shading.type in {'WIREFRAME', 'SOLID'}:
row = layout.split()
row.prop(shading, "show_object_outline")
sub = row.row()
sub.active = shading.show_object_outline
sub.prop(shading, "object_outline_color", text="")
if shading.type == 'SOLID':
col = layout.column()
if shading.light in {'STUDIO', 'MATCAP'}:
col.active = shading.selected_studio_light.has_specular_highlight_pass
col.prop(shading, "show_specular_highlight", text="Specular Lighting")
class VIEW3D_PT_shading_options_shadow(Panel):
bl_label = "Shadow Settings"
bl_space_type = 'VIEW_3D'
bl_region_type = 'HEADER'
def draw(self, context):
layout = self.layout
layout.use_property_split = True
scene = context.scene
col = layout.column()
col.prop(scene.display, "light_direction")
col.prop(scene.display, "shadow_shift")
col.prop(scene.display, "shadow_focus")
class VIEW3D_PT_shading_options_ssao(Panel):
bl_label = "SSAO Settings"
bl_space_type = 'VIEW_3D'
bl_region_type = 'HEADER'
def draw(self, context):
layout = self.layout
layout.use_property_split = True
scene = context.scene
col = layout.column(align=True)
col.prop(scene.display, "matcap_ssao_samples")
col.prop(scene.display, "matcap_ssao_distance")
col.prop(scene.display, "matcap_ssao_attenuation")
class VIEW3D_PT_shading_render_pass(Panel):
bl_space_type = 'VIEW_3D'
bl_region_type = 'HEADER'
bl_label = "Render Pass"
bl_parent_id = 'VIEW3D_PT_shading'
COMPAT_ENGINES = {'BLENDER_EEVEE'}
@classmethod
def poll(cls, context):
return (
(context.space_data.shading.type == 'MATERIAL') or
(context.engine in cls.COMPAT_ENGINES and context.space_data.shading.type == 'RENDERED')
)
def draw(self, context):
shading = context.space_data.shading
layout = self.layout
layout.prop(shading, "render_pass", text="")
class VIEW3D_PT_gizmo_display(Panel):
bl_space_type = 'VIEW_3D'
bl_region_type = 'HEADER'
bl_label = "Gizmo"
def draw(self, context):
layout = self.layout
scene = context.scene
view = context.space_data
col = layout.column()
col.label(text="Viewport Gizmos")
col.active = view.show_gizmo
colsub = col.column()
colsub.prop(view, "show_gizmo_navigate", text="Navigate")
colsub.prop(view, "show_gizmo_tool", text="Active Tools")
colsub.prop(view, "show_gizmo_context", text="Active Object")
layout.separator()
col = layout.column()
col.active = view.show_gizmo_context
col.label(text="Object Gizmos")
col.prop(scene.transform_orientation_slots[1], "type", text="")
col.prop(view, "show_gizmo_object_translate", text="Move")
col.prop(view, "show_gizmo_object_rotate", text="Rotate")
col.prop(view, "show_gizmo_object_scale", text="Scale")
layout.separator()
# Match order of object type visibility
col = layout.column()
col.label(text="Empty")
col.prop(view, "show_gizmo_empty_image", text="Image")
col.prop(view, "show_gizmo_empty_force_field", text="Force Field")
col.label(text="Light")
col.prop(view, "show_gizmo_light_size", text="Size")
col.prop(view, "show_gizmo_light_look_at", text="Look At")
col.label(text="Camera")
col.prop(view, "show_gizmo_camera_lens", text="Lens")
col.prop(view, "show_gizmo_camera_dof_distance", text="Focus Distance")
class VIEW3D_PT_overlay(Panel):
bl_space_type = 'VIEW_3D'
bl_region_type = 'HEADER'
bl_label = "Overlays"
bl_ui_units_x = 13
def draw(self, _context):
layout = self.layout
layout.label(text="Viewport Overlays")
class VIEW3D_PT_overlay_guides(Panel):
bl_space_type = 'VIEW_3D'
bl_region_type = 'HEADER'
bl_parent_id = 'VIEW3D_PT_overlay'
bl_label = "Guides"
def draw(self, context):
layout = self.layout
view = context.space_data
scene = context.scene
overlay = view.overlay
shading = view.shading
display_all = overlay.show_overlays
col = layout.column()
col.active = display_all
split = col.split()
sub = split.column()
row = sub.row()
row_el = row.column()
row_el.prop(overlay, "show_ortho_grid", text="Grid")
grid_active = bool(
view.region_quadviews or
(view.region_3d.is_orthographic_side_view and not view.region_3d.is_perspective)
)
row_el.active = grid_active
row.prop(overlay, "show_floor", text="Floor")
if overlay.show_floor or overlay.show_ortho_grid:
sub = col.row(align=True)
sub.active = (
(overlay.show_floor and not view.region_3d.is_orthographic_side_view) or
(overlay.show_ortho_grid and grid_active)
)
sub.prop(overlay, "grid_scale", text="Scale")
sub = sub.row(align=True)
sub.active = scene.unit_settings.system == 'NONE'
sub.prop(overlay, "grid_subdivisions", text="Subdivisions")
sub = split.column()
row = sub.row()
row.label(text="Axes")
subrow = row.row(align=True)
subrow.prop(overlay, "show_axis_x", text="X", toggle=True)
subrow.prop(overlay, "show_axis_y", text="Y", toggle=True)
subrow.prop(overlay, "show_axis_z", text="Z", toggle=True)
split = col.split()
sub = split.column()
sub.prop(overlay, "show_text", text="Text Info")
sub.prop(overlay, "show_stats", text="Statistics")
sub = split.column()
sub.prop(overlay, "show_cursor", text="3D Cursor")
sub.prop(overlay, "show_annotation", text="Annotations")
if shading.type == 'MATERIAL':
row = col.row()
row.active = shading.render_pass == 'COMBINED'
row.prop(overlay, "show_look_dev")
class VIEW3D_PT_overlay_object(Panel):
bl_space_type = 'VIEW_3D'
bl_region_type = 'HEADER'
bl_parent_id = 'VIEW3D_PT_overlay'
bl_label = "Objects"
def draw(self, context):
layout = self.layout
view = context.space_data
overlay = view.overlay
display_all = overlay.show_overlays
col = layout.column(align=True)
col.active = display_all
split = col.split()
sub = split.column(align=True)
sub.prop(overlay, "show_extras", text="Extras")
sub.prop(overlay, "show_relationship_lines")
sub.prop(overlay, "show_outline_selected")
sub = split.column(align=True)
sub.prop(overlay, "show_bones", text="Bones")
sub.prop(overlay, "show_motion_paths")
sub.prop(overlay, "show_object_origins", text="Origins")
subsub = sub.column()
subsub.active = overlay.show_object_origins
subsub.prop(overlay, "show_object_origins_all", text="Origins (All)")
class VIEW3D_PT_overlay_geometry(Panel):
bl_space_type = 'VIEW_3D'
bl_region_type = 'HEADER'
bl_parent_id = 'VIEW3D_PT_overlay'
bl_label = "Geometry"
def draw(self, context):
layout = self.layout
view = context.space_data
overlay = view.overlay
display_all = overlay.show_overlays
is_wireframes = view.shading.type == 'WIREFRAME'
col = layout.column()
col.active = display_all
row = col.row(align=True)
if not is_wireframes:
row.prop(overlay, "show_wireframes", text="")
sub = row.row()
sub.active = overlay.show_wireframes or is_wireframes
sub.prop(overlay, "wireframe_threshold", text="Wireframe")
col = layout.column(align=True)
col.active = display_all
col.prop(overlay, "show_face_orientation")
# sub.prop(overlay, "show_onion_skins")
class VIEW3D_PT_overlay_motion_tracking(Panel):
bl_space_type = 'VIEW_3D'
bl_region_type = 'HEADER'
bl_parent_id = 'VIEW3D_PT_overlay'
bl_label = "Motion Tracking"
def draw_header(self, context):
view = context.space_data
self.layout.prop(view, "show_reconstruction", text="")
def draw(self, context):
layout = self.layout
view = context.space_data
overlay = view.overlay
display_all = overlay.show_overlays
col = layout.column()
col.active = display_all
if view.show_reconstruction:
split = col.split()
sub = split.column(align=True)
sub.active = view.show_reconstruction
sub.prop(view, "show_camera_path", text="Camera Path")
sub = split.column()
sub.prop(view, "show_bundle_names", text="Marker Names")
col = layout.column()
col.label(text="Tracks:")
row = col.row(align=True)
row.prop(view, "tracks_display_type", text="")
row.prop(view, "tracks_display_size", text="Size")
class VIEW3D_PT_overlay_edit_mesh(Panel):
bl_space_type = 'VIEW_3D'
bl_region_type = 'HEADER'
bl_parent_id = 'VIEW3D_PT_overlay'
bl_label = "Mesh Edit Mode"
@classmethod
def poll(cls, context):
return context.mode == 'EDIT_MESH'
def draw(self, context):
layout = self.layout
view = context.space_data
shading = view.shading
overlay = view.overlay
display_all = overlay.show_overlays
col = layout.column()
col.active = display_all
split = col.split()
sub = split.column()
sub.active = not ((shading.type == 'WIREFRAME') or shading.show_xray)
sub.prop(overlay, "show_edges", text="Edges")
sub = split.column()
sub.prop(overlay, "show_faces", text="Faces")
sub = split.column()
sub.prop(overlay, "show_face_center", text="Center")
row = col.row(align=True)
row.prop(overlay, "show_edge_crease", text="Creases", toggle=True)
row.prop(overlay, "show_edge_sharp", text="Sharp", text_ctxt=i18n_contexts.plural, toggle=True)
row.prop(overlay, "show_edge_bevel_weight", text="Bevel", toggle=True)
row.prop(overlay, "show_edge_seams", text="Seams", toggle=True)
if context.preferences.view.show_developer_ui:
col.label(text="Developer")
col.prop(overlay, "show_extra_indices", text="Indices")
class VIEW3D_PT_overlay_edit_mesh_shading(Panel):
bl_space_type = 'VIEW_3D'
bl_region_type = 'HEADER'
bl_parent_id = 'VIEW3D_PT_overlay_edit_mesh'
bl_label = "Shading"
@classmethod
def poll(cls, context):
return context.mode == 'EDIT_MESH'
def draw(self, context):
layout = self.layout
view = context.space_data
shading = view.shading
overlay = view.overlay
tool_settings = context.tool_settings
display_all = overlay.show_overlays
statvis = tool_settings.statvis
col = layout.column()
col.active = display_all
col.prop(overlay, "show_occlude_wire")
col.prop(overlay, "show_weight", text="Vertex Group Weights")
if overlay.show_weight:
row = col.split(factor=0.33)
row.label(text="Zero Weights")
sub = row.row()
sub.prop(tool_settings, "vertex_group_user", expand=True)
if shading.type == 'WIREFRAME':
xray = shading.show_xray_wireframe and shading.xray_alpha_wireframe < 1.0
elif shading.type == 'SOLID':
xray = shading.show_xray and shading.xray_alpha < 1.0
else:
xray = False
statvis_active = not xray
row = col.row()
row.active = statvis_active
row.prop(overlay, "show_statvis", text="Mesh Analysis")
if overlay.show_statvis:
col = col.column()
col.active = statvis_active
sub = col.split()
sub.label(text="Type")
sub.prop(statvis, "type", text="")
statvis_type = statvis.type
if statvis_type == 'OVERHANG':
row = col.row(align=True)
row.prop(statvis, "overhang_min", text="Minimum")
row.prop(statvis, "overhang_max", text="Maximum")
col.row().prop(statvis, "overhang_axis", expand=True)
elif statvis_type == 'THICKNESS':
row = col.row(align=True)
row.prop(statvis, "thickness_min", text="Minimum")
row.prop(statvis, "thickness_max", text="Maximum")
col.prop(statvis, "thickness_samples")
elif statvis_type == 'INTERSECT':
pass
elif statvis_type == 'DISTORT':
row = col.row(align=True)
row.prop(statvis, "distort_min", text="Minimum")
row.prop(statvis, "distort_max", text="Maximum")
elif statvis_type == 'SHARP':
row = col.row(align=True)
row.prop(statvis, "sharp_min", text="Minimum")
row.prop(statvis, "sharp_max", text="Maximum")
class VIEW3D_PT_overlay_edit_mesh_measurement(Panel):
bl_space_type = 'VIEW_3D'
bl_region_type = 'HEADER'
bl_parent_id = 'VIEW3D_PT_overlay_edit_mesh'
bl_label = "Measurement"
@classmethod
def poll(cls, context):
return context.mode == 'EDIT_MESH'
def draw(self, context):
layout = self.layout
view = context.space_data
overlay = view.overlay
display_all = overlay.show_overlays
col = layout.column()
col.active = display_all
split = col.split()
sub = split.column()
sub.prop(overlay, "show_extra_edge_length", text="Edge Length")
sub.prop(overlay, "show_extra_edge_angle", text="Edge Angle")
sub = split.column()
sub.prop(overlay, "show_extra_face_area", text="Face Area")
sub.prop(overlay, "show_extra_face_angle", text="Face Angle")
class VIEW3D_PT_overlay_edit_mesh_normals(Panel):
bl_space_type = 'VIEW_3D'
bl_region_type = 'HEADER'
bl_parent_id = 'VIEW3D_PT_overlay_edit_mesh'
bl_label = "Normals"
@classmethod
def poll(cls, context):
return context.mode == 'EDIT_MESH'
def draw(self, context):
layout = self.layout
view = context.space_data
overlay = view.overlay
display_all = overlay.show_overlays
col = layout.column()
col.active = display_all
row = col.row(align=True)
row.prop(overlay, "show_vertex_normals", text="", icon='NORMALS_VERTEX')
row.prop(overlay, "show_split_normals", text="", icon='NORMALS_VERTEX_FACE')
row.prop(overlay, "show_face_normals", text="", icon='NORMALS_FACE')
sub = row.row(align=True)
sub.active = overlay.show_vertex_normals or overlay.show_face_normals or overlay.show_split_normals
sub.prop(overlay, "normals_length", text="Size")
class VIEW3D_PT_overlay_edit_mesh_freestyle(Panel):
bl_space_type = 'VIEW_3D'
bl_region_type = 'HEADER'
bl_parent_id = 'VIEW3D_PT_overlay'
bl_label = "Freestyle"
@classmethod
def poll(cls, context):
return context.mode == 'EDIT_MESH' and bpy.app.build_options.freestyle
def draw(self, context):
layout = self.layout
view = context.space_data
overlay = view.overlay
display_all = overlay.show_overlays
col = layout.column()
col.active = display_all
row = col.row()
row.prop(overlay, "show_freestyle_edge_marks", text="Edge Marks")
row.prop(overlay, "show_freestyle_face_marks", text="Face Marks")
class VIEW3D_PT_overlay_edit_curve(Panel):
bl_space_type = 'VIEW_3D'
bl_region_type = 'HEADER'
bl_parent_id = 'VIEW3D_PT_overlay'
bl_label = "Curve Edit Mode"
@classmethod
def poll(cls, context):
return context.mode == 'EDIT_CURVE'
def draw(self, context):
layout = self.layout
view = context.space_data
overlay = view.overlay
display_all = overlay.show_overlays
col = layout.column()
col.active = display_all
row = col.row()
row.prop(overlay, "display_handle", text="Handles")
row = col.row()
row.prop(overlay, "show_curve_normals", text="")
sub = row.row()
sub.active = overlay.show_curve_normals
sub.prop(overlay, "normals_length", text="Normals")
class VIEW3D_PT_overlay_sculpt(Panel):
bl_space_type = 'VIEW_3D'
bl_context = ".sculpt_mode"
bl_region_type = 'HEADER'
bl_parent_id = 'VIEW3D_PT_overlay'
bl_label = "Sculpt"
@classmethod
def poll(cls, context):
return (
context.mode == 'SCULPT' and
(context.sculpt_object and context.tool_settings.sculpt)
)
def draw(self, context):
layout = self.layout
tool_settings = context.tool_settings
sculpt = tool_settings.sculpt
view = context.space_data
overlay = view.overlay
row = layout.row(align=True)
row.prop(sculpt, "show_mask", text="")
sub = row.row()
sub.active = sculpt.show_mask
sub.prop(overlay, "sculpt_mode_mask_opacity", text="Mask")
row = layout.row(align=True)
row.prop(sculpt, "show_face_sets", text="")
sub = row.row()
sub.active = sculpt.show_face_sets
row.prop(overlay, "sculpt_mode_face_sets_opacity", text="Face Sets")
class VIEW3D_PT_overlay_pose(Panel):
bl_space_type = 'VIEW_3D'
bl_region_type = 'HEADER'
bl_parent_id = 'VIEW3D_PT_overlay'
bl_label = "Pose Mode"
@classmethod
def poll(cls, context):
mode = context.mode
return (
(mode == 'POSE') or
(mode == 'PAINT_WEIGHT' and context.pose_object)
)
def draw(self, context):
layout = self.layout
view = context.space_data
mode = context.mode
overlay = view.overlay
display_all = overlay.show_overlays
col = layout.column()
col.active = display_all
if mode == 'POSE':
row = col.row()
row.prop(overlay, "show_xray_bone", text="")
sub = row.row()
sub.active = display_all and overlay.show_xray_bone
sub.prop(overlay, "xray_alpha_bone", text="Fade Geometry")
else:
row = col.row()
row.prop(overlay, "show_xray_bone")
class VIEW3D_PT_overlay_texture_paint(Panel):
bl_space_type = 'VIEW_3D'
bl_region_type = 'HEADER'
bl_parent_id = 'VIEW3D_PT_overlay'
bl_label = "Texture Paint"
@classmethod
def poll(cls, context):
return context.mode == 'PAINT_TEXTURE'
def draw(self, context):
layout = self.layout
view = context.space_data
overlay = view.overlay
display_all = overlay.show_overlays
col = layout.column()
col.active = display_all
col.prop(overlay, "texture_paint_mode_opacity")
class VIEW3D_PT_overlay_vertex_paint(Panel):
bl_space_type = 'VIEW_3D'
bl_region_type = 'HEADER'
bl_parent_id = 'VIEW3D_PT_overlay'
bl_label = "Vertex Paint"
@classmethod
def poll(cls, context):
return context.mode == 'PAINT_VERTEX'
def draw(self, context):
layout = self.layout
view = context.space_data
overlay = view.overlay
display_all = overlay.show_overlays
col = layout.column()
col.active = display_all
col.prop(overlay, "vertex_paint_mode_opacity", text="Opacity")
col.prop(overlay, "show_paint_wire")
class VIEW3D_PT_overlay_weight_paint(Panel):
bl_space_type = 'VIEW_3D'
bl_region_type = 'HEADER'
bl_parent_id = 'VIEW3D_PT_overlay'
bl_label = "Weight Paint"
@classmethod
def poll(cls, context):
return context.mode == 'PAINT_WEIGHT'
def draw(self, context):
layout = self.layout
view = context.space_data
overlay = view.overlay
display_all = overlay.show_overlays
col = layout.column()
col.active = display_all
col.prop(overlay, "weight_paint_mode_opacity", text="Opacity")
row = col.split(factor=0.33)
row.label(text="Zero Weights")
sub = row.row()
sub.prop(context.tool_settings, "vertex_group_user", expand=True)
col.prop(overlay, "show_wpaint_contours")
col.prop(overlay, "show_paint_wire")
class VIEW3D_PT_snapping(Panel):
bl_space_type = 'VIEW_3D'
bl_region_type = 'HEADER'
bl_label = "Snapping"
def draw(self, context):
tool_settings = context.tool_settings
snap_elements = tool_settings.snap_elements
obj = context.active_object
object_mode = 'OBJECT' if obj is None else obj.mode
layout = self.layout
col = layout.column()
col.label(text="Snap to")
col.prop(tool_settings, "snap_elements", expand=True)
col.separator()
if 'INCREMENT' in snap_elements:
col.prop(tool_settings, "use_snap_grid_absolute")
if snap_elements != {'INCREMENT'}:
col.label(text="Snap with")
row = col.row(align=True)
row.prop(tool_settings, "snap_target", expand=True)
col.prop(tool_settings, "use_snap_backface_culling")
if obj:
if object_mode == 'EDIT':
col.prop(tool_settings, "use_snap_self")
if object_mode in {'OBJECT', 'POSE', 'EDIT', 'WEIGHT_PAINT'}:
col.prop(tool_settings, "use_snap_align_rotation")
if 'FACE' in snap_elements:
col.prop(tool_settings, "use_snap_project")
if 'VOLUME' in snap_elements:
col.prop(tool_settings, "use_snap_peel_object")
col.label(text="Affect")
row = col.row(align=True)
row.prop(tool_settings, "use_snap_translate", text="Move", toggle=True)
row.prop(tool_settings, "use_snap_rotate", text="Rotate", toggle=True)
row.prop(tool_settings, "use_snap_scale", text="Scale", toggle=True)
class VIEW3D_PT_proportional_edit(Panel):
bl_space_type = 'VIEW_3D'
bl_region_type = 'HEADER'
bl_label = "Proportional Editing"
bl_ui_units_x = 8
def draw(self, context):
layout = self.layout
tool_settings = context.tool_settings
col = layout.column()
if context.mode != 'OBJECT':
col.prop(tool_settings, "use_proportional_connected")
sub = col.column()
sub.active = not tool_settings.use_proportional_connected
sub.prop(tool_settings, "use_proportional_projected")
col.separator()
col.prop(tool_settings, "proportional_edit_falloff", expand=True)
class VIEW3D_PT_transform_orientations(Panel):
bl_space_type = 'VIEW_3D'
bl_region_type = 'HEADER'
bl_label = "Transform Orientations"
bl_ui_units_x = 8
def draw(self, context):
layout = self.layout
layout.label(text="Transform Orientations")
scene = context.scene
orient_slot = scene.transform_orientation_slots[0]
orientation = orient_slot.custom_orientation
row = layout.row()
col = row.column()
col.prop(orient_slot, "type", expand=True)
row.operator("transform.create_orientation", text="", icon='ADD', emboss=False).use = True
if orientation:
row = layout.row(align=False)
row.prop(orientation, "name", text="", icon='OBJECT_ORIGIN')
row.operator("transform.delete_orientation", text="", icon='X', emboss=False)
class VIEW3D_PT_gpencil_origin(Panel):
bl_space_type = 'VIEW_3D'
bl_region_type = 'HEADER'
bl_label = "Stroke Placement"
def draw(self, context):
layout = self.layout
tool_settings = context.tool_settings
gpd = context.gpencil_data
layout.label(text="Stroke Placement")
row = layout.row()
col = row.column()
col.prop(tool_settings, "gpencil_stroke_placement_view3d", expand=True)
if tool_settings.gpencil_stroke_placement_view3d == 'SURFACE':
row = layout.row()
row.label(text="Offset")
row = layout.row()
row.prop(gpd, "zdepth_offset", text="")
if tool_settings.gpencil_stroke_placement_view3d == 'STROKE':
row = layout.row()
row.label(text="Target")
row = layout.row()
row.prop(tool_settings, "gpencil_stroke_snap_mode", expand=True)
class VIEW3D_PT_gpencil_lock(Panel):
bl_space_type = 'VIEW_3D'
bl_region_type = 'HEADER'
bl_label = "Drawing Plane"
def draw(self, context):
layout = self.layout
layout.label(text="Drawing Plane")
row = layout.row()
col = row.column()
col.prop(context.tool_settings.gpencil_sculpt, "lock_axis", expand=True)
class VIEW3D_PT_gpencil_guide(Panel):
bl_space_type = 'VIEW_3D'
bl_region_type = 'HEADER'
bl_label = "Guides"
def draw(self, context):
settings = context.tool_settings.gpencil_sculpt.guide
layout = self.layout
layout.label(text="Guides")
col = layout.column()
col.active = settings.use_guide
col.prop(settings, "type", expand=True)
if settings.type in {'ISO', 'PARALLEL', 'RADIAL'}:
col.prop(settings, "angle")
row = col.row(align=True)
col.prop(settings, "use_snapping")
if settings.use_snapping:
if settings.type == 'RADIAL':
col.prop(settings, "angle_snap")
else:
col.prop(settings, "spacing")
if settings.type in {'CIRCULAR', 'RADIAL'} or settings.use_snapping:
col.label(text="Reference Point")
row = col.row(align=True)
row.prop(settings, "reference_point", expand=True)
if settings.reference_point == 'CUSTOM':
col.prop(settings, "location", text="Custom Location")
elif settings.reference_point == 'OBJECT':
col.prop(settings, "reference_object", text="Object Location")
if not settings.reference_object:
col.label(text="No object selected, using cursor")
class VIEW3D_PT_overlay_gpencil_options(Panel):
bl_space_type = 'VIEW_3D'
bl_region_type = 'HEADER'
bl_parent_id = 'VIEW3D_PT_overlay'
bl_label = ""
@classmethod
def poll(cls, context):
return context.object and context.object.type == 'GPENCIL'
def draw_header(self, context):
layout = self.layout
layout.label(text={
'PAINT_GPENCIL': "Draw Grease Pencil",
'EDIT_GPENCIL': "Edit Grease Pencil",
'SCULPT_GPENCIL': "Sculpt Grease Pencil",
'WEIGHT_GPENCIL': "Weight Grease Pencil",
'VERTEX_GPENCIL': "Vertex Grease Pencil",
'OBJECT': "Grease Pencil",
}[context.mode])
def draw(self, context):
layout = self.layout
view = context.space_data
overlay = view.overlay
layout.prop(overlay, "use_gpencil_onion_skin", text="Onion Skin")
col = layout.column()
row = col.row()
row.prop(overlay, "use_gpencil_grid", text="")
sub = row.row(align=True)
sub.active = overlay.use_gpencil_grid
sub.prop(overlay, "gpencil_grid_opacity", text="Canvas", slider=True)
sub.prop(overlay, "use_gpencil_canvas_xray", text="", icon='XRAY')
row = col.row()
row.prop(overlay, "use_gpencil_fade_layers", text="")
sub = row.row()
sub.active = overlay.use_gpencil_fade_layers
sub.prop(overlay, "gpencil_fade_layer", text="Fade Layers", slider=True)
row = col.row()
row.prop(overlay, "use_gpencil_fade_objects", text="")
sub = row.row(align=True)
sub.active = overlay.use_gpencil_fade_objects
sub.prop(overlay, "gpencil_fade_objects", text="Fade Objects", slider=True)
sub.prop(overlay, "use_gpencil_fade_gp_objects", text="", icon='OUTLINER_OB_GREASEPENCIL')
if context.object.mode in {'EDIT_GPENCIL', 'SCULPT_GPENCIL', 'WEIGHT_GPENCIL', 'VERTEX_GPENCIL'}:
split = layout.split()
col = split.column()
col.prop(overlay, "use_gpencil_edit_lines", text="Edit Lines")
col = split.column()
col.prop(overlay, "use_gpencil_multiedit_line_only", text="Only in Multiframe")
if context.object.mode == 'EDIT_GPENCIL':
split = layout.split()
col = split.column()
col.prop(overlay, "use_gpencil_show_directions")
col = split.column()
col.prop(overlay, "use_gpencil_show_material_name", text="Material Name")
layout.prop(overlay, "vertex_opacity", text="Vertex Opacity", slider=True)
if context.object.mode in {'PAINT_GPENCIL', 'VERTEX_GPENCIL'}:
layout.label(text="Vertex Paint")
row = layout.row()
shading = VIEW3D_PT_shading.get_shading(context)
row.enabled = shading.type not in {'WIREFRAME', 'RENDERED'}
row.prop(overlay, "gpencil_vertex_paint_opacity", text="Opacity", slider=True)
class VIEW3D_PT_quad_view(Panel):
bl_space_type = 'VIEW_3D'
bl_region_type = 'UI'
bl_category = "View"
bl_label = "Quad View"
bl_options = {'DEFAULT_CLOSED'}
@classmethod
def poll(cls, context):
view = context.space_data
return view.region_quadviews
def draw(self, context):
layout = self.layout
view = context.space_data
region = view.region_quadviews[2]
col = layout.column()
col.prop(region, "lock_rotation")
row = col.row()
row.enabled = region.lock_rotation
row.prop(region, "show_sync_view")
row = col.row()
row.enabled = region.lock_rotation and region.show_sync_view
row.prop(region, "use_box_clip")
# Annotation properties
class VIEW3D_PT_grease_pencil(AnnotationDataPanel, Panel):
bl_space_type = 'VIEW_3D'
bl_region_type = 'UI'
bl_category = "View"
# NOTE: this is just a wrapper around the generic GP Panel
class VIEW3D_PT_annotation_onion(AnnotationOnionSkin, Panel):
bl_space_type = 'VIEW_3D'
bl_region_type = 'UI'
bl_category = "View"
bl_parent_id = 'VIEW3D_PT_grease_pencil'
# NOTE: this is just a wrapper around the generic GP Panel
class TOPBAR_PT_annotation_layers(Panel, AnnotationDataPanel):
bl_space_type = 'VIEW_3D'
bl_region_type = 'HEADER'
bl_label = "Layers"
bl_ui_units_x = 14
class VIEW3D_PT_view3d_stereo(Panel):
bl_space_type = 'VIEW_3D'
bl_region_type = 'UI'
bl_category = "View"
bl_label = "Stereoscopy"
bl_options = {'DEFAULT_CLOSED'}
@classmethod
def poll(cls, context):
scene = context.scene
multiview = scene.render.use_multiview
return multiview
def draw(self, context):
layout = self.layout
view = context.space_data
basic_stereo = context.scene.render.views_format == 'STEREO_3D'
col = layout.column()
col.row().prop(view, "stereo_3d_camera", expand=True)
col.label(text="Display:")
row = col.row()
row.active = basic_stereo
row.prop(view, "show_stereo_3d_cameras")
row = col.row()
row.active = basic_stereo
split = row.split()
split.prop(view, "show_stereo_3d_convergence_plane")
split = row.split()
split.prop(view, "stereo_3d_convergence_plane_alpha", text="Alpha")
split.active = view.show_stereo_3d_convergence_plane
row = col.row()
split = row.split()
split.prop(view, "show_stereo_3d_volume")
split = row.split()
split.active = view.show_stereo_3d_volume
split.prop(view, "stereo_3d_volume_alpha", text="Alpha")
class VIEW3D_PT_context_properties(Panel):
bl_space_type = 'VIEW_3D'
bl_region_type = 'UI'
bl_category = "Item"
bl_label = "Properties"
bl_options = {'DEFAULT_CLOSED'}
@staticmethod
def _active_context_member(context):
obj = context.object
if obj:
object_mode = obj.mode
if object_mode == 'POSE':
return "active_pose_bone"
elif object_mode == 'EDIT' and obj.type == 'ARMATURE':
return "active_bone"
else:
return "object"
return ""
@classmethod
def poll(cls, context):
import rna_prop_ui
member = cls._active_context_member(context)
if member:
context_member, member = rna_prop_ui.rna_idprop_context_value(context, member, object)
return context_member and rna_prop_ui.rna_idprop_has_properties(context_member)
return False
def draw(self, context):
import rna_prop_ui
member = VIEW3D_PT_context_properties._active_context_member(context)
if member:
# Draw with no edit button
rna_prop_ui.draw(self.layout, context, member, object, False)
# Grease Pencil Object - Multiframe falloff tools
class VIEW3D_PT_gpencil_multi_frame(Panel):
bl_space_type = 'VIEW_3D'
bl_region_type = 'HEADER'
bl_label = "Multi Frame"
def draw(self, context):
gpd = context.gpencil_data
settings = context.tool_settings.gpencil_sculpt
layout = self.layout
col = layout.column(align=True)
col.prop(settings, "use_multiframe_falloff")
# Falloff curve
if gpd.use_multiedit and settings.use_multiframe_falloff:
layout.template_curve_mapping(settings, "multiframe_falloff_curve", brush=True)
class VIEW3D_MT_gpencil_edit_context_menu(Menu):
bl_label = ""
def draw(self, context):
is_point_mode = context.tool_settings.gpencil_selectmode_edit == 'POINT'
is_stroke_mode = context.tool_settings.gpencil_selectmode_edit == 'STROKE'
is_segment_mode = context.tool_settings.gpencil_selectmode_edit == 'SEGMENT'
layout = self.layout
layout.operator_context = 'INVOKE_REGION_WIN'
row = layout.row()
if is_point_mode or is_segment_mode:
col = row.column()
col.label(text="Point Context Menu", icon='GP_SELECT_POINTS')
col.separator()
# Additive Operators
col.operator("gpencil.stroke_subdivide", text="Subdivide").only_selected = True
col.separator()
col.operator("gpencil.extrude_move", text="Extrude Points")
col.separator()
# Deform Operators
col.operator("gpencil.stroke_smooth", text="Smooth Points").only_selected = True
col.operator("transform.bend", text="Bend")
col.operator("transform.shear", text="Shear")
col.operator("transform.tosphere", text="To Sphere")
col.operator("transform.transform", text="Shrink Fatten").mode = 'GPENCIL_SHRINKFATTEN'
col.separator()
col.menu("VIEW3D_MT_mirror", text="Mirror Points")
col.menu("VIEW3D_MT_snap", text="Snap Points")
col.separator()
# Duplicate operators
col.operator("gpencil.duplicate_move", text="Duplicate")
col.operator("gpencil.copy", text="Copy", icon='COPYDOWN')
col.operator("gpencil.paste", text="Paste", icon='PASTEDOWN').type = 'ACTIVE'
col.operator("gpencil.paste", text="Paste by Layer").type = 'LAYER'
col.separator()
# Removal Operators
col.operator("gpencil.stroke_merge", text="Merge Points")
col.operator("gpencil.stroke_merge_by_distance").use_unselected = False
col.operator("gpencil.stroke_split", text="Split")
col.operator("gpencil.stroke_separate", text="Separate").mode = 'POINT'
col.separator()
col.operator("gpencil.delete", text="Delete Points").type = 'POINTS'
col.operator("gpencil.dissolve", text="Dissolve Points").type = 'POINTS'
col.operator("gpencil.dissolve", text="Dissolve Between").type = 'BETWEEN'
col.operator("gpencil.dissolve", text="Dissolve Unselected").type = 'UNSELECT'
if is_stroke_mode:
col = row.column()
col.label(text="Stroke Context Menu", icon='GP_SELECT_STROKES')
col.separator()
# Main Strokes Operators
col.operator("gpencil.stroke_subdivide", text="Subdivide").only_selected = False
col.menu("VIEW3D_MT_gpencil_simplify")
col.operator("gpencil.stroke_trim", text="Trim")
col.separator()
col.operator("gpencil.stroke_smooth", text="Smooth Stroke").only_selected = False
col.operator("transform.transform", text="Shrink Fatten").mode = 'GPENCIL_SHRINKFATTEN'
col.separator()
# Layer and Materials operators
col.menu("GPENCIL_MT_move_to_layer")
col.menu("VIEW3D_MT_assign_material")
col.operator("gpencil.set_active_material", text="Set as Active Material")
col.operator_menu_enum("gpencil.stroke_arrange", "direction", text="Arrange Strokes")
col.separator()
col.menu("VIEW3D_MT_mirror", text="Mirror Stroke")
col.menu("VIEW3D_MT_snap", text="Snap Stroke")
col.separator()
# Duplicate operators
col.operator("gpencil.duplicate_move", text="Duplicate")
col.operator("gpencil.copy", text="Copy", icon='COPYDOWN')
col.operator("gpencil.paste", text="Paste", icon='PASTEDOWN').type = 'ACTIVE'
col.operator("gpencil.paste", text="Paste by Layer").type = 'LAYER'
col.separator()
# Removal Operators
col.operator("gpencil.stroke_merge_by_distance").use_unselected = True
col.operator_menu_enum("gpencil.stroke_join", "type", text="Join")
col.operator("gpencil.stroke_split", text="Split")
col.operator("gpencil.stroke_separate", text="Separate").mode = 'STROKE'
col.separator()
col.operator("gpencil.delete", text="Delete Strokes").type = 'STROKES'
col.separator()
col.operator("gpencil.reproject", text="Reproject Strokes")
def draw_gpencil_layer_active(context, layout):
gpl = context.active_gpencil_layer
if gpl:
layout.label(text="Active Layer")
row = layout.row(align=True)
row.operator_context = 'EXEC_REGION_WIN'
row.operator_menu_enum("gpencil.layer_change", "layer", text="", icon='GREASEPENCIL')
row.prop(gpl, "info", text="")
row.operator("gpencil.layer_remove", text="", icon='X')
def draw_gpencil_material_active(context, layout):
ob = context.active_object
if ob and len(ob.material_slots) > 0 and ob.active_material_index >= 0:
ma = ob.material_slots[ob.active_material_index].material
if ma:
layout.label(text="Active Material")
row = layout.row(align=True)
row.operator_context = 'EXEC_REGION_WIN'
row.operator_menu_enum("gpencil.material_set", "slot", text="", icon='MATERIAL')
row.prop(ma, "name", text="")
class VIEW3D_PT_gpencil_sculpt_context_menu(Panel):
bl_space_type = 'VIEW_3D'
bl_region_type = 'WINDOW'
bl_label = "Sculpt Context Menu"
bl_ui_units_x = 12
def draw(self, context):
ts = context.tool_settings
settings = ts.gpencil_sculpt_paint
brush = settings.brush
layout = self.layout
layout.prop(brush, "size", slider=True)
layout.prop(brush, "strength")
# Layers
draw_gpencil_layer_active(context, layout)
class VIEW3D_PT_gpencil_weight_context_menu(Panel):
bl_space_type = 'VIEW_3D'
bl_region_type = 'WINDOW'
bl_label = "Weight Paint Context Menu"
bl_ui_units_x = 12
def draw(self, context):
ts = context.tool_settings
settings = ts.gpencil_weight_paint
brush = settings.brush
layout = self.layout
layout.prop(brush, "size", slider=True)
layout.prop(brush, "strength")
layout.prop(brush, "weight")
# Layers
draw_gpencil_layer_active(context, layout)
class VIEW3D_PT_gpencil_draw_context_menu(Panel):
bl_space_type = 'VIEW_3D'
bl_region_type = 'WINDOW'
bl_label = "Draw Context Menu"
bl_ui_units_x = 12
def draw(self, context):
ts = context.tool_settings
settings = ts.gpencil_paint
brush = settings.brush
gp_settings = brush.gpencil_settings
layout = self.layout
is_vertex = settings.color_mode == 'VERTEXCOLOR' or brush.gpencil_tool == 'TINT'
if brush.gpencil_tool not in {'ERASE', 'CUTTER', 'EYEDROPPER'} and is_vertex:
split = layout.split(factor=0.1)
split.prop(brush, "color", text="")
split.template_color_picker(brush, "color", value_slider=True)
col = layout.column()
col.separator()
col.prop_menu_enum(gp_settings, "vertex_mode", text="Mode")
col.separator()
if brush.gpencil_tool not in {'FILL', 'CUTTER'}:
layout.prop(brush, "size", slider=True)
if brush.gpencil_tool not in {'ERASE', 'FILL', 'CUTTER'}:
layout.prop(gp_settings, "pen_strength")
# Layers
draw_gpencil_layer_active(context, layout)
# Material
if not is_vertex:
draw_gpencil_material_active(context, layout)
class VIEW3D_PT_gpencil_vertex_context_menu(Panel):
bl_space_type = 'VIEW_3D'
bl_region_type = 'WINDOW'
bl_label = "Vertex Paint Context Menu"
bl_ui_units_x = 12
def draw(self, context):
layout = self.layout
ts = context.tool_settings
settings = ts.gpencil_vertex_paint
brush = settings.brush
gp_settings = brush.gpencil_settings
col = layout.column()
if brush.gpencil_vertex_tool in {'DRAW', 'REPLACE'}:
split = layout.split(factor=0.1)
split.prop(brush, "color", text="")
split.template_color_picker(brush, "color", value_slider=True)
col = layout.column()
col.separator()
col.prop_menu_enum(gp_settings, "vertex_mode", text="Mode")
col.separator()
row = col.row(align=True)
row.prop(brush, "size", text="Radius")
row.prop(gp_settings, "use_pressure", text="", icon='STYLUS_PRESSURE')
if brush.gpencil_vertex_tool in {'DRAW', 'BLUR', 'SMEAR'}:
row = layout.row(align=True)
row.prop(gp_settings, "pen_strength", slider=True)
row.prop(gp_settings, "use_strength_pressure", text="", icon='STYLUS_PRESSURE')
# Layers
draw_gpencil_layer_active(context, layout)
class VIEW3D_PT_paint_vertex_context_menu(Panel):
# Only for popover, these are dummy values.
bl_space_type = 'VIEW_3D'
bl_region_type = 'WINDOW'
bl_label = "Vertex Paint Context Menu"
def draw(self, context):
layout = self.layout
brush = context.tool_settings.vertex_paint.brush
capabilities = brush.vertex_paint_capabilities
if capabilities.has_color:
split = layout.split(factor=0.1)
UnifiedPaintPanel.prop_unified_color(split, context, brush, "color", text="")
UnifiedPaintPanel.prop_unified_color_picker(split, context, brush, "color", value_slider=True)
layout.prop(brush, "blend", text="")
UnifiedPaintPanel.prop_unified(
layout,
context,
brush,
"size",
unified_name="use_unified_size",
pressure_name="use_pressure_size",
slider=True,
)
UnifiedPaintPanel.prop_unified(
layout,
context,
brush,
"strength",
unified_name="use_unified_strength",
pressure_name="use_pressure_strength",
slider=True,
)
class VIEW3D_PT_paint_texture_context_menu(Panel):
# Only for popover, these are dummy values.
bl_space_type = 'VIEW_3D'
bl_region_type = 'WINDOW'
bl_label = "Texture Paint Context Menu"
def draw(self, context):
layout = self.layout
brush = context.tool_settings.image_paint.brush
capabilities = brush.image_paint_capabilities
if capabilities.has_color:
split = layout.split(factor=0.1)
UnifiedPaintPanel.prop_unified_color(split, context, brush, "color", text="")
UnifiedPaintPanel.prop_unified_color_picker(split, context, brush, "color", value_slider=True)
layout.prop(brush, "blend", text="")
if capabilities.has_radius:
UnifiedPaintPanel.prop_unified(
layout,
context,
brush,
"size",
unified_name="use_unified_size",
pressure_name="use_pressure_size",
slider=True,
)
UnifiedPaintPanel.prop_unified(
layout,
context,
brush,
"strength",
unified_name="use_unified_strength",
pressure_name="use_pressure_strength",
slider=True,
)
class VIEW3D_PT_paint_weight_context_menu(Panel):
# Only for popover, these are dummy values.
bl_space_type = 'VIEW_3D'
bl_region_type = 'WINDOW'
bl_label = "Weights Context Menu"
def draw(self, context):
layout = self.layout
brush = context.tool_settings.weight_paint.brush
UnifiedPaintPanel.prop_unified(
layout,
context,
brush,
"weight",
unified_name="use_unified_weight",
slider=True,
)
UnifiedPaintPanel.prop_unified(
layout,
context,
brush,
"size",
unified_name="use_unified_size",
pressure_name="use_pressure_size",
slider=True,
)
UnifiedPaintPanel.prop_unified(
layout,
context,
brush,
"strength",
unified_name="use_unified_strength",
pressure_name="use_pressure_strength",
slider=True,
)
class VIEW3D_PT_sculpt_context_menu(Panel):
# Only for popover, these are dummy values.
bl_space_type = 'VIEW_3D'
bl_region_type = 'WINDOW'
bl_label = "Sculpt Context Menu"
def draw(self, context):
layout = self.layout
brush = context.tool_settings.sculpt.brush
capabilities = brush.sculpt_capabilities
if capabilities.has_color:
split = layout.split(factor=0.1)
UnifiedPaintPanel.prop_unified_color(split, context, brush, "color", text="")
UnifiedPaintPanel.prop_unified_color_picker(split, context, brush, "color", value_slider=True)
layout.prop(brush, "blend", text="")
UnifiedPaintPanel.prop_unified(
layout,
context,
brush,
"size",
unified_name="use_unified_size",
pressure_name="use_pressure_size",
slider=True,
)
UnifiedPaintPanel.prop_unified(
layout,
context,
brush,
"strength",
unified_name="use_unified_strength",
pressure_name="use_pressure_strength",
slider=True,
)
if capabilities.has_auto_smooth:
layout.prop(brush, "auto_smooth_factor", slider=True)
if capabilities.has_normal_weight:
layout.prop(brush, "normal_weight", slider=True)
if capabilities.has_pinch_factor:
text = "Pinch"
if brush.sculpt_tool in {'BLOB', 'SNAKE_HOOK'}:
text = "Magnify"
layout.prop(brush, "crease_pinch_factor", slider=True, text=text)
if capabilities.has_rake_factor:
layout.prop(brush, "rake_factor", slider=True)
if capabilities.has_plane_offset:
layout.prop(brush, "plane_offset", slider=True)
layout.prop(brush, "plane_trim", slider=True, text="Distance")
if capabilities.has_height:
layout.prop(brush, "height", slider=True, text="Height")
class TOPBAR_PT_gpencil_materials(GreasePencilMaterialsPanel, Panel):
bl_space_type = 'VIEW_3D'
bl_region_type = 'HEADER'
bl_label = "Materials"
bl_ui_units_x = 14
@classmethod
def poll(cls, context):
ob = context.object
return ob and ob.type == 'GPENCIL'
class TOPBAR_PT_gpencil_vertexcolor(GreasePencilVertexcolorPanel, Panel):
bl_space_type = 'VIEW_3D'
bl_region_type = 'HEADER'
bl_label = "Vertex Color"
bl_ui_units_x = 10
@classmethod
def poll(cls, context):
ob = context.object
return ob and ob.type == 'GPENCIL'
classes = (
VIEW3D_HT_header,
VIEW3D_HT_tool_header,
VIEW3D_MT_editor_menus,
VIEW3D_MT_transform,
VIEW3D_MT_transform_base,
VIEW3D_MT_transform_object,
VIEW3D_MT_transform_armature,
VIEW3D_MT_mirror,
VIEW3D_MT_snap,
VIEW3D_MT_uv_map,
VIEW3D_MT_view,
VIEW3D_MT_view_local,
VIEW3D_MT_view_cameras,
VIEW3D_MT_view_navigation,
VIEW3D_MT_view_align,
VIEW3D_MT_view_align_selected,
VIEW3D_MT_view_viewpoint,
VIEW3D_MT_view_regions,
VIEW3D_MT_select_object,
VIEW3D_MT_select_object_more_less,
VIEW3D_MT_select_pose,
VIEW3D_MT_select_pose_more_less,
VIEW3D_MT_select_particle,
VIEW3D_MT_edit_mesh,
VIEW3D_MT_edit_mesh_select_similar,
VIEW3D_MT_edit_mesh_select_by_trait,
VIEW3D_MT_edit_mesh_select_more_less,
VIEW3D_MT_select_edit_mesh,
VIEW3D_MT_select_edit_curve,
VIEW3D_MT_select_edit_surface,
VIEW3D_MT_select_edit_text,
VIEW3D_MT_select_edit_metaball,
VIEW3D_MT_edit_lattice_context_menu,
VIEW3D_MT_select_edit_lattice,
VIEW3D_MT_select_edit_armature,
VIEW3D_MT_select_gpencil,
VIEW3D_MT_select_paint_mask,
VIEW3D_MT_select_paint_mask_vertex,
VIEW3D_MT_angle_control,
VIEW3D_MT_mesh_add,
VIEW3D_MT_curve_add,
VIEW3D_MT_surface_add,
VIEW3D_MT_edit_metaball_context_menu,
VIEW3D_MT_metaball_add,
TOPBAR_MT_edit_curve_add,
TOPBAR_MT_edit_armature_add,
VIEW3D_MT_armature_add,
VIEW3D_MT_light_add,
VIEW3D_MT_lightprobe_add,
VIEW3D_MT_camera_add,
VIEW3D_MT_volume_add,
VIEW3D_MT_add,
VIEW3D_MT_image_add,
VIEW3D_MT_object,
VIEW3D_MT_object_animation,
VIEW3D_MT_object_rigid_body,
VIEW3D_MT_object_clear,
VIEW3D_MT_object_context_menu,
VIEW3D_MT_object_shading,
VIEW3D_MT_object_apply,
VIEW3D_MT_object_relations,
VIEW3D_MT_object_parent,
VIEW3D_MT_object_track,
VIEW3D_MT_object_collection,
VIEW3D_MT_object_constraints,
VIEW3D_MT_object_quick_effects,
VIEW3D_MT_object_showhide,
VIEW3D_MT_make_single_user,
VIEW3D_MT_make_links,
VIEW3D_MT_brush_paint_modes,
VIEW3D_MT_paint_vertex,
VIEW3D_MT_hook,
VIEW3D_MT_vertex_group,
VIEW3D_MT_gpencil_vertex_group,
VIEW3D_MT_paint_weight,
VIEW3D_MT_paint_weight_lock,
VIEW3D_MT_sculpt,
VIEW3D_MT_sculpt_set_pivot,
VIEW3D_MT_mask,
VIEW3D_MT_face_sets,
VIEW3D_MT_face_sets_init,
VIEW3D_MT_particle,
VIEW3D_MT_particle_context_menu,
VIEW3D_MT_particle_showhide,
VIEW3D_MT_pose,
VIEW3D_MT_pose_transform,
VIEW3D_MT_pose_slide,
VIEW3D_MT_pose_propagate,
VIEW3D_MT_pose_library,
VIEW3D_MT_pose_motion,
VIEW3D_MT_pose_group,
VIEW3D_MT_pose_ik,
VIEW3D_MT_pose_constraints,
VIEW3D_MT_pose_names,
VIEW3D_MT_pose_showhide,
VIEW3D_MT_pose_apply,
VIEW3D_MT_pose_context_menu,
VIEW3D_MT_bone_options_toggle,
VIEW3D_MT_bone_options_enable,
VIEW3D_MT_bone_options_disable,
VIEW3D_MT_edit_mesh_context_menu,
VIEW3D_MT_edit_mesh_select_mode,
VIEW3D_MT_edit_mesh_select_linked,
VIEW3D_MT_edit_mesh_select_loops,
VIEW3D_MT_edit_mesh_extrude,
VIEW3D_MT_edit_mesh_vertices,
VIEW3D_MT_edit_mesh_edges,
VIEW3D_MT_edit_mesh_edges_data,
VIEW3D_MT_edit_mesh_faces,
VIEW3D_MT_edit_mesh_faces_data,
VIEW3D_MT_edit_mesh_normals,
VIEW3D_MT_edit_mesh_normals_select_strength,
VIEW3D_MT_edit_mesh_normals_set_strength,
VIEW3D_MT_edit_mesh_normals_average,
VIEW3D_MT_edit_mesh_shading,
VIEW3D_MT_edit_mesh_weights,
VIEW3D_MT_edit_mesh_clean,
VIEW3D_MT_edit_mesh_delete,
VIEW3D_MT_edit_mesh_merge,
VIEW3D_MT_edit_mesh_split,
VIEW3D_MT_edit_mesh_showhide,
VIEW3D_MT_paint_gpencil,
VIEW3D_MT_assign_material,
VIEW3D_MT_edit_gpencil,
VIEW3D_MT_edit_gpencil_stroke,
VIEW3D_MT_edit_gpencil_point,
VIEW3D_MT_edit_gpencil_delete,
VIEW3D_MT_edit_gpencil_showhide,
VIEW3D_MT_weight_gpencil,
VIEW3D_MT_vertex_gpencil,
VIEW3D_MT_gpencil_animation,
VIEW3D_MT_gpencil_simplify,
VIEW3D_MT_gpencil_copy_layer,
VIEW3D_MT_gpencil_autoweights,
VIEW3D_MT_gpencil_edit_context_menu,
VIEW3D_MT_edit_curve,
VIEW3D_MT_edit_curve_ctrlpoints,
VIEW3D_MT_edit_curve_segments,
VIEW3D_MT_edit_curve_clean,
VIEW3D_MT_edit_curve_context_menu,
VIEW3D_MT_edit_curve_delete,
VIEW3D_MT_edit_curve_showhide,
VIEW3D_MT_edit_surface,
VIEW3D_MT_edit_font,
VIEW3D_MT_edit_font_chars,
VIEW3D_MT_edit_font_kerning,
VIEW3D_MT_edit_font_delete,
VIEW3D_MT_edit_font_context_menu,
VIEW3D_MT_edit_meta,
VIEW3D_MT_edit_meta_showhide,
VIEW3D_MT_edit_lattice,
VIEW3D_MT_edit_armature,
VIEW3D_MT_armature_context_menu,
VIEW3D_MT_edit_armature_parent,
VIEW3D_MT_edit_armature_roll,
VIEW3D_MT_edit_armature_names,
VIEW3D_MT_edit_armature_delete,
VIEW3D_MT_edit_gpencil_transform,
VIEW3D_MT_edit_gpencil_interpolate,
VIEW3D_MT_object_mode_pie,
VIEW3D_MT_view_pie,
VIEW3D_MT_transform_gizmo_pie,
VIEW3D_MT_shading_pie,
VIEW3D_MT_shading_ex_pie,
VIEW3D_MT_pivot_pie,
VIEW3D_MT_snap_pie,
VIEW3D_MT_orientations_pie,
VIEW3D_MT_proportional_editing_falloff_pie,
VIEW3D_MT_sculpt_mask_edit_pie,
VIEW3D_MT_wpaint_vgroup_lock_pie,
VIEW3D_MT_sculpt_face_sets_edit_pie,
VIEW3D_PT_active_tool,
VIEW3D_PT_active_tool_duplicate,
VIEW3D_PT_view3d_properties,
VIEW3D_PT_view3d_lock,
VIEW3D_PT_view3d_cursor,
VIEW3D_PT_collections,
VIEW3D_PT_object_type_visibility,
VIEW3D_PT_grease_pencil,
VIEW3D_PT_annotation_onion,
VIEW3D_PT_gpencil_multi_frame,
VIEW3D_PT_quad_view,
VIEW3D_PT_view3d_stereo,
VIEW3D_PT_shading,
VIEW3D_PT_shading_lighting,
VIEW3D_PT_shading_color,
VIEW3D_PT_shading_options,
VIEW3D_PT_shading_options_shadow,
VIEW3D_PT_shading_options_ssao,
VIEW3D_PT_shading_render_pass,
VIEW3D_PT_gizmo_display,
VIEW3D_PT_overlay,
VIEW3D_PT_overlay_guides,
VIEW3D_PT_overlay_object,
VIEW3D_PT_overlay_geometry,
VIEW3D_PT_overlay_motion_tracking,
VIEW3D_PT_overlay_edit_mesh,
VIEW3D_PT_overlay_edit_mesh_shading,
VIEW3D_PT_overlay_edit_mesh_measurement,
VIEW3D_PT_overlay_edit_mesh_normals,
VIEW3D_PT_overlay_edit_mesh_freestyle,
VIEW3D_PT_overlay_edit_curve,
VIEW3D_PT_overlay_texture_paint,
VIEW3D_PT_overlay_vertex_paint,
VIEW3D_PT_overlay_weight_paint,
VIEW3D_PT_overlay_pose,
VIEW3D_PT_overlay_sculpt,
VIEW3D_PT_snapping,
VIEW3D_PT_proportional_edit,
VIEW3D_PT_gpencil_origin,
VIEW3D_PT_gpencil_lock,
VIEW3D_PT_gpencil_guide,
VIEW3D_PT_transform_orientations,
VIEW3D_PT_overlay_gpencil_options,
VIEW3D_PT_context_properties,
VIEW3D_PT_paint_vertex_context_menu,
VIEW3D_PT_paint_texture_context_menu,
VIEW3D_PT_paint_weight_context_menu,
VIEW3D_PT_gpencil_vertex_context_menu,
VIEW3D_PT_gpencil_sculpt_context_menu,
VIEW3D_PT_gpencil_weight_context_menu,
VIEW3D_PT_gpencil_draw_context_menu,
VIEW3D_PT_sculpt_context_menu,
TOPBAR_PT_gpencil_materials,
TOPBAR_PT_gpencil_vertexcolor,
TOPBAR_PT_annotation_layers,
)
if __name__ == "__main__": # only for live edit.
from bpy.utils import register_class
for cls in classes:
register_class(cls)
| 33.15117
| 118
| 0.635
|
dbef753177448138a946ecf3abec9919e7cffbe2
| 6,157
|
py
|
Python
|
tests/pytests/unit/client/ssh/test_ssh.py
|
lkubb/salt
|
48f8cc9dfe30b90b31050a2646bccd4b5a1b7927
|
[
"Apache-2.0"
] | null | null | null |
tests/pytests/unit/client/ssh/test_ssh.py
|
lkubb/salt
|
48f8cc9dfe30b90b31050a2646bccd4b5a1b7927
|
[
"Apache-2.0"
] | null | null | null |
tests/pytests/unit/client/ssh/test_ssh.py
|
lkubb/salt
|
48f8cc9dfe30b90b31050a2646bccd4b5a1b7927
|
[
"Apache-2.0"
] | null | null | null |
import os
import pytest
import salt.client.ssh.client
import salt.utils.msgpack
from salt.client import ssh
from tests.support.mock import MagicMock, patch
from tests.support.runtests import RUNTIME_VARS
pytestmark = [
pytest.mark.skip_if_binaries_missing("ssh", "ssh-keygen", check_all=True),
]
@pytest.fixture
def ssh_target(tmpdir):
argv = [
"ssh.set_auth_key",
"root",
"hobn+amNAXSBTiOXEqlBjGB...rsa root@master",
]
opts = {
"argv": argv,
"__role": "master",
"cachedir": tmpdir.strpath,
"extension_modules": tmpdir.join("extmods").strpath,
}
target = {
"passwd": "abc123",
"ssh_options": None,
"sudo": False,
"identities_only": False,
"host": "login1",
"user": "root",
"timeout": 65,
"remote_port_forwards": None,
"sudo_user": "",
"port": "22",
"priv": "/etc/salt/pki/master/ssh/salt-ssh.rsa",
}
return opts, target
@pytest.mark.skip_on_windows(reason="SSH_PY_SHIM not set on windows")
def test_cmd_block_python_version_error(ssh_target):
opts = ssh_target[0]
target = ssh_target[1]
single = ssh.Single(
opts,
opts["argv"],
"localhost",
mods={},
fsclient=None,
thin=salt.utils.thin.thin_path(opts["cachedir"]),
mine=False,
winrm=False,
**target
)
mock_shim = MagicMock(
return_value=(("", "ERROR: Unable to locate appropriate python command\n", 10))
)
patch_shim = patch("salt.client.ssh.Single.shim_cmd", mock_shim)
with patch_shim:
ret = single.cmd_block()
assert "ERROR: Python version error. Recommendation(s) follow:" in ret[0]
@pytest.mark.parametrize(
"test_opts",
[
("extra_filerefs", "salt://foobar", True),
("host", "testhost", False),
("ssh_user", "testuser", True),
("ssh_passwd", "testpasswd", True),
("ssh_port", 23, False),
("ssh_sudo", True, True),
("ssh_sudo_user", "sudouser", False),
("ssh_priv", "test_priv", True),
("ssh_priv_passwd", "sshpasswd", True),
("ssh_identities_only", True, True),
("ssh_remote_port_forwards", "test", True),
("ssh_options", ["test1", "test2"], True),
("ssh_max_procs", 2, True),
("ssh_askpass", True, True),
("ssh_key_deploy", True, True),
("ssh_update_roster", True, True),
("ssh_scan_ports", "test", True),
("ssh_scan_timeout", 1.0, True),
("ssh_timeout", 1, False),
("ssh_log_file", "/tmp/test", True),
("raw_shell", True, True),
("refresh_cache", True, True),
("roster", "/test", True),
("roster_file", "/test1", True),
("rosters", ["test1"], False),
("ignore_host_keys", True, True),
("min_extra_mods", "test", True),
("thin_extra_mods", "test1", True),
("verbose", True, True),
("static", True, True),
("ssh_wipe", True, True),
("rand_thin_dir", True, True),
("regen_thin", True, True),
("ssh_run_pre_flight", True, True),
("no_host_keys", True, True),
("saltfile", "/tmp/test", True),
("doesnotexist", None, False),
],
)
def test_ssh_kwargs(test_opts):
"""
test all ssh kwargs are not excluded from kwargs
when preparing the SSH opts
"""
opt_key = test_opts[0]
opt_value = test_opts[1]
# Is the kwarg in salt.utils.parsers?
in_parser = test_opts[2]
opts = {
"eauth": "auto",
"username": "test",
"password": "test",
"client": "ssh",
"tgt": "localhost",
"fun": "test.ping",
opt_key: opt_value,
}
client = salt.client.ssh.client.SSHClient(disable_custom_roster=True)
if in_parser:
ssh_kwargs = salt.utils.parsers.SaltSSHOptionParser().defaults
assert opt_key in ssh_kwargs
with patch("salt.roster.get_roster_file", MagicMock(return_value="")), patch(
"salt.client.ssh.shell.gen_key"
), patch("salt.fileserver.Fileserver.update"), patch("salt.utils.thin.gen_thin"):
ssh_obj = client._prep_ssh(**opts)
assert ssh_obj.opts.get(opt_key, None) == opt_value
@pytest.mark.skip_on_windows(reason="pre_flight_args is not implemented for Windows")
@pytest.mark.parametrize(
"test_opts",
[
(None, ""),
("one", " one"),
("one two", " one two"),
("| touch /tmp/test", " '|' touch /tmp/test"),
("; touch /tmp/test", " ';' touch /tmp/test"),
(["one"], " one"),
(["one", "two"], " one two"),
(["one", "two", "| touch /tmp/test"], " one two '| touch /tmp/test'"),
(["one", "two", "; touch /tmp/test"], " one two '; touch /tmp/test'"),
],
)
def test_run_with_pre_flight_args(ssh_target, test_opts):
"""
test Single.run() when ssh_pre_flight is set
and script successfully runs
"""
opts = ssh_target[0]
target = ssh_target[1]
opts["ssh_run_pre_flight"] = True
target["ssh_pre_flight"] = os.path.join(RUNTIME_VARS.TMP, "script.sh")
if test_opts[0] is not None:
target["ssh_pre_flight_args"] = test_opts[0]
expected_args = test_opts[1]
single = ssh.Single(
opts,
opts["argv"],
"localhost",
mods={},
fsclient=None,
thin=salt.utils.thin.thin_path(opts["cachedir"]),
mine=False,
**target
)
cmd_ret = ("Success", "", 0)
mock_cmd = MagicMock(return_value=cmd_ret)
mock_exec_cmd = MagicMock(return_value=("", "", 0))
patch_cmd = patch("salt.client.ssh.Single.cmd_block", mock_cmd)
patch_exec_cmd = patch("salt.client.ssh.shell.Shell.exec_cmd", mock_exec_cmd)
patch_shell_send = patch("salt.client.ssh.shell.Shell.send", return_value=None)
patch_os = patch("os.path.exists", side_effect=[True])
with patch_os, patch_cmd, patch_exec_cmd, patch_shell_send:
ret = single.run()
assert mock_exec_cmd.mock_calls[0].args[
0
] == "/bin/sh '/tmp/script.sh'{}".format(expected_args)
| 31.253807
| 87
| 0.583076
|
b4c9ac52358d5a150a83ad197482692fce77e459
| 390
|
py
|
Python
|
app/email.py
|
MelvinOmega/pitch-app
|
865aeef96433eb372f86ad8b78428b060fdc0856
|
[
"MIT"
] | null | null | null |
app/email.py
|
MelvinOmega/pitch-app
|
865aeef96433eb372f86ad8b78428b060fdc0856
|
[
"MIT"
] | null | null | null |
app/email.py
|
MelvinOmega/pitch-app
|
865aeef96433eb372f86ad8b78428b060fdc0856
|
[
"MIT"
] | null | null | null |
from flask_mail import Message
from flask import render_template
from . import mail
def mail_message(subject,template,to,**kwargs):
sender_email = 'melvinomega151@gmail.com'
email = Message(subject, sender = sender_email, recipients=[to])
email.body= render_template(template + ".txt",**kwargs)
email.html = render_template(template + ".html",**kwargs)
mail.send(email)
| 39
| 68
| 0.738462
|
0c3b463f05ed28abc79e1f400dbf2d7edcb47386
| 1,101
|
py
|
Python
|
iroko/records/webpack.py
|
tocororo/iroko
|
e1cf08bbd565178c2d60244719aad6d288b48363
|
[
"MIT"
] | null | null | null |
iroko/records/webpack.py
|
tocororo/iroko
|
e1cf08bbd565178c2d60244719aad6d288b48363
|
[
"MIT"
] | 14
|
2021-02-02T22:47:27.000Z
|
2021-12-22T18:39:46.000Z
|
iroko/records/webpack.py
|
tocororo/iroko
|
e1cf08bbd565178c2d60244719aad6d288b48363
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
# Copyright (c) 2021. Universidad de Pinar del Rio
# This file is part of SCEIBA (sceiba.cu).
# SCEIBA is free software; you can redistribute it and/or modify it
# under the terms of the MIT License; see LICENSE file for more details.
#
#
#
# iroko is free software; you can redistribute it and/or modify it under
# the terms of the MIT License; see LICENSE file for more details.
"""JS/CSS Webpack bundle to override search results template."""
from invenio_assets.webpack import WebpackThemeBundle
search_app = WebpackThemeBundle(
__name__,
'assets',
default='semantic-ui',
themes={
'bootstrap3': dict(entry={}, dependencies={}, aliases={}),
'semantic-ui': dict(
entry={
'iroko-search-app':
'./js/iroko_records/index.js',
},
dependencies={
"react": "^16.9.0",
"react-dom": "^16.9.0",
"react-overridable": "^0.0.2",
"semantic-ui-react": "^0.88.0"
}
)
}
)
| 28.973684
| 73
| 0.564941
|
3e9fe8cb3ae049462e45c998eba8dbfe17e24310
| 1,940
|
py
|
Python
|
tasks/search.py
|
mackenbaron/WeiboSpider
|
571ac8ce0cc2fe7548820c543f30a88c702962a6
|
[
"MIT"
] | 5
|
2019-09-21T05:19:24.000Z
|
2021-08-05T07:47:18.000Z
|
tasks/search.py
|
msak222/WeiboSpider
|
e330338c9565da6ba6c426cca4168df9f89faa63
|
[
"MIT"
] | null | null | null |
tasks/search.py
|
msak222/WeiboSpider
|
e330338c9565da6ba6c426cca4168df9f89faa63
|
[
"MIT"
] | 2
|
2019-05-08T04:14:21.000Z
|
2020-05-24T10:30:03.000Z
|
# coding:utf-8
from urllib import parse as url_parse
from logger.log import crawler
from tasks.workers import app
from page_get.basic import get_page
from config.conf import get_max_search_page
from page_parse import search as parse_search
from db.search_words import get_search_keywords
from db.wb_data import insert_weibo_data, get_wb_by_mid
# 只抓取原创微博,默认是按照时间进行排序,如果只抓首页,那么就不需要登录
url = 'http://s.weibo.com/weibo/{}&scope=ori&suball=1&page={}'
limit = get_max_search_page() + 1
@app.task(ignore_result=True)
def search_keyword(keyword):
cur_page = 1
encode_keyword = url_parse.quote(keyword)
while cur_page < limit:
cur_url = url.format(encode_keyword, cur_page)
search_page = get_page(cur_url)
if not search_page:
crawler.warning('本次并没获取到关键词{}的相关微博,该页面源码是{}'.format(keyword, search_page))
return
search_list = parse_search.get_search_info(search_page)
# 先判断数据库里是否存在相关的微博,如果是已有的,那就说明是已经抓取的微博(因为结果默认按时间排序),就退出循环
for wb_data in search_list:
rs = get_wb_by_mid(wb_data.weibo_id)
if rs:
crawler.info('关键词{}本次搜索更新的微博已经获取完成'.format(keyword))
return
else:
insert_weibo_data(wb_data)
# 这里暂时使用网络调用而非本地调用,权衡两种方法的好处
app.send_task('tasks.user.crawl_person_infos', args=(wb_data.uid,), queue='user_crawler',
routing_key='for_user_info')
# 判断是否包含下一页
if 'page next S_txt1 S_line1' in search_page:
cur_page += 1
else:
crawler.info('关键词{}搜索完成'.format(keyword))
return
@app.task(ignore_result=True)
def excute_search_task():
# keyword应该从数据库中读取出来
keywords = get_search_keywords()
for each in keywords:
app.send_task('tasks.search.search_keyword', args=(each[0],), queue='search_crawler',
routing_key='for_search_info')
| 35.272727
| 105
| 0.665464
|
610b32bd2ab9aca51992ad29cb3ad7b88ac56436
| 3,633
|
py
|
Python
|
test/python/test_la.py
|
solomonik/ctf
|
b79428ca8e7a5fa6ef22197ff5129d1aace3134b
|
[
"BSD-2-Clause"
] | 56
|
2015-02-28T08:19:58.000Z
|
2021-11-04T16:46:17.000Z
|
test/python/test_la.py
|
solomonik/ctf
|
b79428ca8e7a5fa6ef22197ff5129d1aace3134b
|
[
"BSD-2-Clause"
] | 40
|
2015-04-08T14:58:42.000Z
|
2017-11-17T20:57:26.000Z
|
test/python/test_la.py
|
solomonik/ctf
|
b79428ca8e7a5fa6ef22197ff5129d1aace3134b
|
[
"BSD-2-Clause"
] | 17
|
2015-04-03T00:57:43.000Z
|
2018-03-30T20:46:14.000Z
|
#!/usr/bin/env python
import unittest
import numpy
import ctf
import os
import sys
from ctf import random
import numpy.linalg as la
def allclose(a, b):
if abs(ctf.to_nparray(a) - ctf.to_nparray(b)).sum() > 1e-4:
print(ctf.to_nparray(a))
print(ctf.to_nparray(b))
return abs(ctf.to_nparray(a) - ctf.to_nparray(b)).sum() <= 1e-4
class KnowValues(unittest.TestCase):
def test_svd(self):
m = 9
n = 5
k = 5
for dt in [numpy.float32, numpy.float64]:
A = ctf.random.random((m,n))
A = ctf.astensor(A,dtype=dt)
[U,S,VT]=ctf.svd(A,k)
[U1,S1,VT1]=la.svd(ctf.to_nparray(A),full_matrices=False)
self.assertTrue(allclose(A, ctf.dot(U,ctf.dot(ctf.diag(S),VT))))
self.assertTrue(allclose(ctf.eye(k), ctf.dot(U.T(), U)))
self.assertTrue(allclose(ctf.eye(k), ctf.dot(VT, VT.T())))
A = ctf.tensor((m,n),dtype=numpy.complex64)
rA = ctf.tensor((m,n),dtype=numpy.float32)
rA.fill_random()
A.real(rA)
iA = ctf.tensor((m,n),dtype=numpy.float32)
iA.fill_random()
A.imag(iA)
[U,S,VT]=ctf.svd(A,k)
self.assertTrue(allclose(A, ctf.dot(U,ctf.dot(ctf.diag(S),VT))))
self.assertTrue(allclose(ctf.eye(k,dtype=numpy.complex64), ctf.dot(ctf.conj(U.T()), U)))
self.assertTrue(allclose(ctf.eye(k,dtype=numpy.complex64), ctf.dot(VT, ctf.conj(VT.T()))))
A = ctf.tensor((m,n),dtype=numpy.complex128)
rA = ctf.tensor((m,n),dtype=numpy.float64)
rA.fill_random()
A.real(rA)
iA = ctf.tensor((m,n),dtype=numpy.float64)
iA.fill_random()
A.imag(iA)
[U,S,VT]=ctf.svd(A,k)
self.assertTrue(allclose(A, ctf.dot(U,ctf.dot(ctf.diag(S),VT))))
self.assertTrue(allclose(ctf.eye(k,dtype=numpy.complex128), ctf.dot(ctf.conj(U.T()), U)))
self.assertTrue(allclose(ctf.eye(k,dtype=numpy.complex128), ctf.dot(VT, ctf.conj(VT.T()))))
def test_qr(self):
m = 8
n = 4
for dt in [numpy.float32, numpy.float64]:
A = ctf.random.random((m,n))
A = ctf.astensor(A,dtype=dt)
[Q,R]=ctf.qr(A)
self.assertTrue(allclose(A, ctf.dot(Q,R)))
self.assertTrue(allclose(ctf.eye(n), ctf.dot(Q.T(), Q)))
A = ctf.tensor((m,n),dtype=numpy.complex64)
rA = ctf.tensor((m,n),dtype=numpy.float32)
rA.fill_random()
A.real(rA)
iA = ctf.tensor((m,n),dtype=numpy.float32)
iA.fill_random()
A.imag(iA)
[Q,R]=ctf.qr(A)
self.assertTrue(allclose(A, ctf.dot(Q,R)))
self.assertTrue(allclose(ctf.eye(n,dtype=numpy.complex64), ctf.dot(ctf.conj(Q.T()), Q)))
A = ctf.tensor((m,n),dtype=numpy.complex128)
rA = ctf.tensor((m,n),dtype=numpy.float64)
rA.fill_random()
A.real(rA)
iA = ctf.tensor((m,n),dtype=numpy.float64)
iA.fill_random()
A.imag(iA)
[Q,R]=ctf.qr(A)
self.assertTrue(allclose(A, ctf.dot(Q,R)))
self.assertTrue(allclose(ctf.eye(n,dtype=numpy.complex128), ctf.dot(ctf.conj(Q.T()), Q)))
if __name__ == "__main__":
numpy.random.seed(5330)
if ctf.comm().rank() != 0:
result = unittest.TextTestRunner(stream = open(os.devnull, 'w')).run(unittest.TestSuite(unittest.TestLoader().loadTestsFromTestCase(KnowValues)))
else:
print("Tests for QR and SVD")
result = unittest.TextTestRunner().run(unittest.TestSuite(unittest.TestLoader().loadTestsFromTestCase(KnowValues)))
ctf.MPI_Stop()
sys.exit(not result)
| 33.638889
| 153
| 0.586843
|
e721d6007373976b4d867fb2ffa95f5a63010061
| 356
|
py
|
Python
|
src/doom/parameters.py
|
phillikus/doom-ai
|
ee0d49ae46321960adacc054d041377622877b10
|
[
"MIT"
] | 2
|
2019-02-15T05:53:24.000Z
|
2020-08-27T09:25:08.000Z
|
src/doom/parameters.py
|
philipp007/doom-ai
|
ee0d49ae46321960adacc054d041377622877b10
|
[
"MIT"
] | null | null | null |
src/doom/parameters.py
|
philipp007/doom-ai
|
ee0d49ae46321960adacc054d041377622877b10
|
[
"MIT"
] | 1
|
2020-07-15T12:21:27.000Z
|
2020-07-15T12:21:27.000Z
|
class Parameters():
def __init__(self):
self.scenario = 'Basic'
self.lr = 0.0001
self.gamma = 0.99
self.tau = 1.
self.seed = 1
self.num_processes = 4
self.num_steps = 20
self.max_episode_length = 10000
self.num_actions = 3
self.model = 'a3c'
self.num_updates = 100
| 27.384615
| 39
| 0.542135
|
e7e9dde9693fa33c0b9f7ee1971f53f984c6728d
| 1,047
|
py
|
Python
|
Stack/BinaryTreeInOrderTraversal.py
|
karan2808/Python-Data-Structures-and-Algorithms
|
a4b39ddf7297541d90dc4efcaab883f928281abd
|
[
"MIT"
] | 2
|
2021-01-31T03:42:01.000Z
|
2021-01-31T03:43:08.000Z
|
Stack/BinaryTreeInOrderTraversal.py
|
karan2808/Python-Data-Structures-and-Algorithms
|
a4b39ddf7297541d90dc4efcaab883f928281abd
|
[
"MIT"
] | null | null | null |
Stack/BinaryTreeInOrderTraversal.py
|
karan2808/Python-Data-Structures-and-Algorithms
|
a4b39ddf7297541d90dc4efcaab883f928281abd
|
[
"MIT"
] | 1
|
2021-01-31T03:42:02.000Z
|
2021-01-31T03:42:02.000Z
|
class Solution:
def inorderTraversal(self, root):
result = []
stack = []
current = root
# while current is not none or stack is not empty
while current != None or len(stack) > 0:
# push all the left nodes onto stack
while current:
stack.append(current)
current = current.left
# get the stack top, visit root node
current = stack.pop()
result.append(current.val)
# go to the right node and repeat
current = current.right
return result
class TreeNode:
def __init__(self, val = 0, left = None, right = None):
self.val = val
self.left = left
self.right = right
def main():
mySol = Solution()
root = TreeNode(1, None, TreeNode(2))
root.right.left = TreeNode(1)
root.right.right = TreeNode(3)
print("The inorder traversal of the BT is ")
print(mySol.inorderTraversal(root))
if __name__ == "__main__":
main()
| 26.846154
| 59
| 0.557784
|
3c9f5ef29ec7551a6753dfff161963900a3daa49
| 65
|
py
|
Python
|
Configuration.py
|
VincentMelia/PythonCartoons
|
835d48a99d9f7bdaa8098fc85ae0d8148f1c0a5e
|
[
"MIT"
] | null | null | null |
Configuration.py
|
VincentMelia/PythonCartoons
|
835d48a99d9f7bdaa8098fc85ae0d8148f1c0a5e
|
[
"MIT"
] | null | null | null |
Configuration.py
|
VincentMelia/PythonCartoons
|
835d48a99d9f7bdaa8098fc85ae0d8148f1c0a5e
|
[
"MIT"
] | null | null | null |
import os
Root_URL = 'https://' + os.getenv('cartoon_root_url')
| 16.25
| 53
| 0.692308
|
6c9323e18acf8e431568ad3239236f383f7abd84
| 212
|
py
|
Python
|
WebMirror/management/rss_parser_funcs/feed_parse_extractLorCromwell.py
|
fake-name/ReadableWebProxy
|
ed5c7abe38706acc2684a1e6cd80242a03c5f010
|
[
"BSD-3-Clause"
] | 193
|
2016-08-02T22:04:35.000Z
|
2022-03-09T20:45:41.000Z
|
WebMirror/management/rss_parser_funcs/feed_parse_extractLorCromwell.py
|
fake-name/ReadableWebProxy
|
ed5c7abe38706acc2684a1e6cd80242a03c5f010
|
[
"BSD-3-Clause"
] | 533
|
2016-08-23T20:48:23.000Z
|
2022-03-28T15:55:13.000Z
|
WebMirror/management/rss_parser_funcs/feed_parse_extractLorCromwell.py
|
rrosajp/ReadableWebProxy
|
ed5c7abe38706acc2684a1e6cd80242a03c5f010
|
[
"BSD-3-Clause"
] | 19
|
2015-08-13T18:01:08.000Z
|
2021-07-12T17:13:09.000Z
|
def extractLorCromwell(item):
"""
"""
vol, chp, frag, postfix = extractVolChapterFragmentPostfix(item['title'])
if not (chp or vol or frag) or 'preview' in item['title'].lower():
return None
return False
| 23.555556
| 74
| 0.698113
|
642e39b6d407736869f6c79415e58d4d2bc8e422
| 3,437
|
py
|
Python
|
students/K33402/Dubina Sergey/practical_works/prac_2/django_project_dubina/django_project_dubina/settings.py
|
ShubhamKunal/ITMO_ICT_WebDevelopment_2020-2021
|
bb91c91a56d21cec2b12ae4cc722eaa652a88420
|
[
"MIT"
] | 4
|
2020-09-03T15:41:42.000Z
|
2021-12-24T15:28:20.000Z
|
students/K33402/Dubina Sergey/practical_works/prac_2/django_project_dubina/django_project_dubina/settings.py
|
ShubhamKunal/ITMO_ICT_WebDevelopment_2020-2021
|
bb91c91a56d21cec2b12ae4cc722eaa652a88420
|
[
"MIT"
] | 48
|
2020-09-13T20:22:42.000Z
|
2021-04-30T11:13:30.000Z
|
students/K33402/Dubina Sergey/practical_works/prac_2/django_project_dubina/django_project_dubina/settings.py
|
ShubhamKunal/ITMO_ICT_WebDevelopment_2020-2021
|
bb91c91a56d21cec2b12ae4cc722eaa652a88420
|
[
"MIT"
] | 69
|
2020-09-06T10:32:37.000Z
|
2021-11-28T18:13:17.000Z
|
"""
Django settings for django_project_dubina project.
Generated by 'django-admin startproject' using Django 3.1.1.
For more information on this file, see
https://docs.djangoproject.com/en/3.1/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/3.1/ref/settings/
"""
import os
from pathlib import Path
SETTINGS_PATH = os.path.dirname(os.path.dirname(__file__))
# Build paths inside the project like this: BASE_DIR / 'subdir'.
BASE_DIR = Path(__file__).resolve().parent.parent
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/3.1/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = '8x*=8u#n56g)kim0ck9+#6_^19$^4o=^(%l)%=@a_uh1vqh#2c'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'project_first_app.apps.ProjectFirstAppConfig',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'django_project_dubina.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [os.path.join(SETTINGS_PATH, 'templates')],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'django_project_dubina.wsgi.application'
# Database
# https://docs.djangoproject.com/en/3.1/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': BASE_DIR / 'db.sqlite3',
}
}
# Password validation
# https://docs.djangoproject.com/en/3.1/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/3.1/topics/i18n/
LANGUAGE_CODE = 'en-us'
# AUTH_USER_MODEL = 'project_first_app.MyUser'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/3.1/howto/static-files/
STATIC_URL = '/static/'
| 27.717742
| 92
| 0.680244
|
94b209d9880ce721e9d1ccfadf0036f93936d3ab
| 3,403
|
py
|
Python
|
test/test_motor_bulk.py
|
myfreecomm/motor
|
bf1382c906045d9d9ad14106486a02f6b8721ada
|
[
"Apache-2.0"
] | null | null | null |
test/test_motor_bulk.py
|
myfreecomm/motor
|
bf1382c906045d9d9ad14106486a02f6b8721ada
|
[
"Apache-2.0"
] | null | null | null |
test/test_motor_bulk.py
|
myfreecomm/motor
|
bf1382c906045d9d9ad14106486a02f6b8721ada
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2014 MongoDB, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import unicode_literals
"""Test Motor's bulk API."""
import unittest
from pymongo.errors import BulkWriteError
from tornado.testing import gen_test
import motor
from test import MotorTest
class MotorBulkTest(MotorTest):
# This is just a smattering of tests, since the logic is all in PyMongo.
@gen_test
def test_multiple_error_ordered_batch(self):
yield self.collection.remove()
yield self.collection.ensure_index('a', unique=True)
try:
bulk = self.collection.initialize_ordered_bulk_op()
self.assertTrue(isinstance(bulk, motor.MotorBulkOperationBuilder))
bulk.insert({'b': 1, 'a': 1})
bulk.find({'b': 2}).upsert().update_one({'$set': {'a': 1}})
bulk.find({'b': 3}).upsert().update_one({'$set': {'a': 2}})
bulk.find({'b': 2}).upsert().update_one({'$set': {'a': 1}})
bulk.insert({'b': 4, 'a': 3})
bulk.insert({'b': 5, 'a': 1})
try:
yield bulk.execute()
except BulkWriteError as exc:
result = exc.details
self.assertEqual(exc.code, 65)
else:
self.fail("Error not raised")
self.assertEqual(1, result['nInserted'])
self.assertEqual(1, len(result['writeErrors']))
error = result['writeErrors'][0]
self.assertEqual(1, error['index'])
failed = error['op']
self.assertEqual(2, failed['q']['b'])
self.assertEqual(1, failed['u']['$set']['a'])
self.assertFalse(failed['multi'])
self.assertTrue(failed['upsert'])
cursor = self.collection.find({}, {'_id': False})
docs = yield cursor.to_list(None)
self.assertEqual([{'a': 1, 'b': 1}], docs)
finally:
yield self.collection.drop_index([('a', 1)])
@gen_test
def test_single_unordered_batch(self):
yield self.collection.remove()
bulk = self.collection.initialize_unordered_bulk_op()
self.assertTrue(isinstance(bulk, motor.MotorBulkOperationBuilder))
bulk.insert({'a': 1})
bulk.find({'a': 1}).update_one({'$set': {'b': 1}})
bulk.find({'a': 2}).upsert().update_one({'$set': {'b': 2}})
bulk.insert({'a': 3})
bulk.find({'a': 3}).remove()
result = yield bulk.execute()
self.assertEqual(0, len(result['writeErrors']))
upserts = result['upserted']
self.assertEqual(1, len(upserts))
self.assertEqual(2, upserts[0]['index'])
self.assertTrue(upserts[0].get('_id'))
a_values = yield self.collection.distinct('a')
self.assertEqual(
set([1, 2]),
set(a_values))
if __name__ == '__main__':
unittest.main()
| 35.082474
| 78
| 0.597414
|
440b0ca82cedc39f507ee3fb34d0d6ae8f990978
| 16,207
|
py
|
Python
|
app/models.py
|
lcfyi/ubcgrades
|
a00ee195eab320a9b976222e9c6d457c81d7872c
|
[
"MIT"
] | null | null | null |
app/models.py
|
lcfyi/ubcgrades
|
a00ee195eab320a9b976222e9c6d457c81d7872c
|
[
"MIT"
] | null | null | null |
app/models.py
|
lcfyi/ubcgrades
|
a00ee195eab320a9b976222e9c6d457c81d7872c
|
[
"MIT"
] | null | null | null |
from app import db
import enum
class CampusEnum(enum.Enum):
UBCV = 0
UBCO = 1
class SessionEnum(enum.Enum):
W = 0
S = 1
class PAIRReportsGrade(db.Model):
__tablename__ = "PAIRReportsGrade"
campus = db.Column(db.Enum(CampusEnum), primary_key=True) # UBCV or UBCO
year = db.Column(db.String(4), primary_key=True) # Ex: 2012
session = db.Column(db.Enum(SessionEnum), primary_key=True) # W or S
faculty_title = db.Column(db.String())
subject = db.Column(db.String(4), primary_key=True) # Ex: BA, KIN, MATH
subject_title = db.Column(db.String())
course = db.Column(db.String(3), primary_key=True) # Ex: 001, 200
detail = db.Column(db.String(3), primary_key=True) # Ex: A, B, C
section = db.Column(db.String(7), primary_key=True) # Ex: 001, 100, GIS, T1A, OVERALL
course_title = db.Column(db.String())
educators = db.Column(db.String())
enrolled = db.Column(db.Integer())
average = db.Column(db.Float(), nullable=True)
stdev = db.Column(db.Float(), nullable=True)
high = db.Column(db.Integer())
low = db.Column(db.Integer())
num_pass = db.Column(db.Integer())
num_fail = db.Column(db.Integer())
withdrew = db.Column(db.Integer())
audit = db.Column(db.Integer())
other = db.Column(db.Integer())
grade_0_9 = db.Column(db.Integer())
grade_10_19 = db.Column(db.Integer())
grade_20_29 = db.Column(db.Integer())
grade_30_39 = db.Column(db.Integer())
grade_40_49 = db.Column(db.Integer())
grade_lt50 = db.Column(db.Integer()) # Num less than 50
grade_50_54 = db.Column(db.Integer())
grade_55_59 = db.Column(db.Integer())
grade_60_63 = db.Column(db.Integer())
grade_64_67 = db.Column(db.Integer())
grade_68_71 = db.Column(db.Integer())
grade_72_75 = db.Column(db.Integer())
grade_76_79 = db.Column(db.Integer())
grade_80_84 = db.Column(db.Integer())
grade_85_89 = db.Column(db.Integer())
grade_90_100 = db.Column(db.Integer())
def __repr__(self):
return f"<PAIRReportsGrade {self.campus.name}-{self.year}{self.session.name}-{self.subject}-{self.course}" \
f"{self.detail if self.detail != '' else ''}-{self.section}>"
def to_dict(self):
return {
"grades": {
"0-9%": self.grade_0_9,
"10-19%": self.grade_10_19,
"20-29%": self.grade_20_29,
"30-39%": self.grade_30_39,
"40-49%": self.grade_40_49,
"<50%": self.grade_lt50,
"50-54%": self.grade_50_54,
"55-59%": self.grade_55_59,
"60-63%": self.grade_60_63,
"64-67%": self.grade_64_67,
"68-71%": self.grade_68_71,
"72-75%": self.grade_72_75,
"76-79%": self.grade_76_79,
"80-84%": self.grade_80_84,
"85-89%": self.grade_85_89,
"90-100%": self.grade_90_100
},
"campus": self.campus.name,
"year": self.year,
"session": self.session.name,
"faculty_title": self.faculty_title,
"subject": self.subject,
"subject_title": self.subject_title,
"course": self.course,
"detail": self.detail,
"section": self.section,
"course_title": self.course_title,
"educators": self.educators,
"enrolled": self.enrolled,
"average": self.average if self.average is not None else '',
"stdev": self.stdev if self.stdev is not None else '',
"high": self.high,
"low": self.low,
"pass": self.num_pass,
"fail": self.num_fail,
"withdrew": self.withdrew,
"audit": self.audit,
"other": self.other
}
class TableauDashboardGrade(db.Model):
__tablename__ = "TableauDashboardGrade"
campus = db.Column(db.Enum(CampusEnum), primary_key=True) # UBCV or UBCO
year = db.Column(db.String(4), primary_key=True) # Ex: 2012
session = db.Column(db.Enum(SessionEnum), primary_key=True) # W or S
faculty_title = db.Column(db.String())
subject = db.Column(db.String(4), primary_key=True) # Ex: BA, KIN, MATH
subject_title = db.Column(db.String())
course = db.Column(db.String(3), primary_key=True) # Ex: 001, 200
detail = db.Column(db.String(3), primary_key=True) # Ex: A, B, C
section = db.Column(db.String(7), primary_key=True) # Ex: 001, 100, GIS, T1A, OVERALL
course_title = db.Column(db.String())
educators = db.Column(db.String())
enrolled = db.Column(db.Integer())
average = db.Column(db.Float())
stdev = db.Column(db.Float())
high = db.Column(db.Integer())
low = db.Column(db.Integer())
# We note these fields are nullable
grade_lt50 = db.Column(db.Integer()) # Num less than 50
grade_50_54 = db.Column(db.Integer())
grade_55_59 = db.Column(db.Integer())
grade_60_63 = db.Column(db.Integer())
grade_64_67 = db.Column(db.Integer())
grade_68_71 = db.Column(db.Integer())
grade_72_75 = db.Column(db.Integer())
grade_76_79 = db.Column(db.Integer())
grade_80_84 = db.Column(db.Integer())
grade_85_89 = db.Column(db.Integer())
grade_90_100 = db.Column(db.Integer())
def __repr__(self):
return f"<TableauDashboardGrade {self.campus.name}-{self.year}{self.session.name}-{self.subject}-{self.course}" \
f"{self.detail if self.detail != '' else ''}-{self.section}>"
def to_dict(self):
return {
"grades": {
"<50%": self.grade_lt50,
"50-54%": self.grade_50_54,
"55-59%": self.grade_55_59,
"60-63%": self.grade_60_63,
"64-67%": self.grade_64_67,
"68-71%": self.grade_68_71,
"72-75%": self.grade_72_75,
"76-79%": self.grade_76_79,
"80-84%": self.grade_80_84,
"85-89%": self.grade_85_89,
"90-100%": self.grade_90_100
},
"campus": self.campus.name,
"year": self.year,
"session": self.session.name,
"faculty_title": self.faculty_title,
"subject": self.subject,
"subject_title": self.subject_title,
"course": self.course,
"detail": self.detail,
"section": self.section,
"course_title": self.course_title,
"educators": self.educators,
"enrolled": self.enrolled,
"average": self.average,
"stdev": self.stdev,
"high": self.high,
"low": self.low,
}
class Course(db.Model):
__tablename__ = "Course"
campus = db.Column(db.Enum(CampusEnum), primary_key=True) # UBCV or UBCO
faculty_title = db.Column(db.String())
subject = db.Column(db.String(4), primary_key=True) # Ex: BA, KIN, MATH
subject_title = db.Column(db.String())
course = db.Column(db.String(3), primary_key=True) # Ex: 001, 200
detail = db.Column(db.String(3), primary_key=True) # Ex: A, B, C
course_title = db.Column(db.String())
average = db.Column(db.Float())
average_past_5_yrs = db.Column(db.Float())
stdev = db.Column(db.Float())
max_course_avg = db.Column(db.Integer())
min_course_avg = db.Column(db.Integer())
def __repr__(self):
return f"<Course {self.campus.name}--{self.subject}-{self.course}{self.detail if self.detail != '' else ''}>"
def to_dict(self):
values = {
"campus": self.campus.name,
"faculty_title": self.faculty_title,
"subject": self.subject,
"subject_title": self.subject_title,
"course": self.course,
"detail": self.detail,
"course_title": self.course_title,
"average": self.average,
"average_past_5_yrs": self.average_past_5_yrs,
"stdev": self.stdev,
"max_course_avg": self.max_course_avg,
"min_course_avg": self.min_course_avg,
}
for key, val in values.items():
if val is None:
values[key] = ''
return values
class CourseDistributions(db.Model):
__tablename__ = 'CourseDistributions'
campus = db.Column(db.Enum(CampusEnum), primary_key=True) # UBCV or UBCO
year = db.Column(db.String(4), primary_key=True) # Ex: 2012
session = db.Column(db.Enum(SessionEnum), primary_key=True) # W or S
subject = db.Column(db.String(4), primary_key=True) # Ex: BA, KIN, MATH
course = db.Column(db.String(3), primary_key=True) # Ex: 001, 200
detail = db.Column(db.String(3), primary_key=True) # Ex: A, B, C
# We note these fields are nullable
grade_0_9 = db.Column(db.Integer())
grade_10_19 = db.Column(db.Integer())
grade_20_29 = db.Column(db.Integer())
grade_30_39 = db.Column(db.Integer())
grade_40_49 = db.Column(db.Integer())
grade_lt50 = db.Column(db.Integer()) # Num less than 50
grade_50_54 = db.Column(db.Integer())
grade_55_59 = db.Column(db.Integer())
grade_60_63 = db.Column(db.Integer())
grade_64_67 = db.Column(db.Integer())
grade_68_71 = db.Column(db.Integer())
grade_72_75 = db.Column(db.Integer())
grade_76_79 = db.Column(db.Integer())
grade_80_84 = db.Column(db.Integer())
grade_85_89 = db.Column(db.Integer())
grade_90_100 = db.Column(db.Integer())
def to_dict(self):
return {
"grades": {
"0-9%": self.grade_0_9,
"10-19%": self.grade_10_19,
"20-29%": self.grade_20_29,
"30-39%": self.grade_30_39,
"40-49%": self.grade_40_49,
"<50%": self.grade_lt50,
"50-54%": self.grade_50_54,
"55-59%": self.grade_55_59,
"60-63%": self.grade_60_63,
"64-67%": self.grade_64_67,
"68-71%": self.grade_68_71,
"72-75%": self.grade_72_75,
"76-79%": self.grade_76_79,
"80-84%": self.grade_80_84,
"85-89%": self.grade_85_89,
"90-100%": self.grade_90_100
},
"campus": self.campus.name,
"year": self.year,
"session": self.session.name,
"subject": self.subject,
"course": self.course,
"detail": self.detail
}
class Educator(db.Model):
__tablename__ = 'Educator'
campus = db.Column(db.Enum(CampusEnum), primary_key=True) # UBCV or UBCO
subject = db.Column(db.String(4), primary_key=True) # Ex: BA, KIN, MATH
course = db.Column(db.String(3), primary_key=True) # Ex: 001, 200
detail = db.Column(db.String(3), primary_key=True) # Ex: A, B, C
name = db.Column(db.String(), primary_key=True)
ys_1996S = db.Column(db.Integer())
ys_1996W = db.Column(db.Integer())
ys_1997S = db.Column(db.Integer())
ys_1997W = db.Column(db.Integer())
ys_1998S = db.Column(db.Integer())
ys_1998W = db.Column(db.Integer())
ys_1999S = db.Column(db.Integer())
ys_1999W = db.Column(db.Integer())
ys_2000S = db.Column(db.Integer())
ys_2000W = db.Column(db.Integer())
ys_2001S = db.Column(db.Integer())
ys_2001W = db.Column(db.Integer())
ys_2002S = db.Column(db.Integer())
ys_2002W = db.Column(db.Integer())
ys_2003S = db.Column(db.Integer())
ys_2003W = db.Column(db.Integer())
ys_2004S = db.Column(db.Integer())
ys_2004W = db.Column(db.Integer())
ys_2005S = db.Column(db.Integer())
ys_2005W = db.Column(db.Integer())
ys_2006S = db.Column(db.Integer())
ys_2006W = db.Column(db.Integer())
ys_2007S = db.Column(db.Integer())
ys_2007W = db.Column(db.Integer())
ys_2008S = db.Column(db.Integer())
ys_2008W = db.Column(db.Integer())
ys_2009S = db.Column(db.Integer())
ys_2009W = db.Column(db.Integer())
ys_2010S = db.Column(db.Integer())
ys_2010W = db.Column(db.Integer())
ys_2011S = db.Column(db.Integer())
ys_2011W = db.Column(db.Integer())
ys_2012S = db.Column(db.Integer())
ys_2012W = db.Column(db.Integer())
ys_2013S = db.Column(db.Integer())
ys_2013W = db.Column(db.Integer())
ys_2014S = db.Column(db.Integer())
ys_2014W = db.Column(db.Integer())
ys_2015S = db.Column(db.Integer())
ys_2015W = db.Column(db.Integer())
ys_2016S = db.Column(db.Integer())
ys_2016W = db.Column(db.Integer())
ys_2017S = db.Column(db.Integer())
ys_2017W = db.Column(db.Integer())
ys_2018S = db.Column(db.Integer())
ys_2018W = db.Column(db.Integer())
ys_2019S = db.Column(db.Integer())
ys_2019W = db.Column(db.Integer())
ys_2020S = db.Column(db.Integer())
def __repr__(self):
return f"<Educator {self.campus}-{self.subject}-{self.course}{self.course.detail}>"
def to_dict(self):
data = {}
yearsessions = {}
for key, val in vars(self).items():
if "ys_" in key:
if val != 0:
yearsessions[key[3:]] = val
else:
data[key] = val
data['yearsessions'] = yearsessions
data['campus'] = self.campus.name
data.pop('_sa_instance_state')
return data
class CourseAverageHistory(db.Model):
__tablename__ = 'CourseAverageHistory'
campus = db.Column(db.Enum(CampusEnum), primary_key=True) # UBCV or UBCO
subject = db.Column(db.String(4), primary_key=True) # Ex: BA, KIN, MATH
course = db.Column(db.String(3), primary_key=True) # Ex: 001, 200
detail = db.Column(db.String(3), primary_key=True) # Ex: A, B, C
ys_1996S = db.Column(db.Integer())
ys_1996W = db.Column(db.Integer())
ys_1997S = db.Column(db.Integer())
ys_1997W = db.Column(db.Integer())
ys_1998S = db.Column(db.Integer())
ys_1998W = db.Column(db.Integer())
ys_1999S = db.Column(db.Integer())
ys_1999W = db.Column(db.Integer())
ys_2000S = db.Column(db.Integer())
ys_2000W = db.Column(db.Integer())
ys_2001S = db.Column(db.Integer())
ys_2001W = db.Column(db.Integer())
ys_2002S = db.Column(db.Integer())
ys_2002W = db.Column(db.Integer())
ys_2003S = db.Column(db.Integer())
ys_2003W = db.Column(db.Integer())
ys_2004S = db.Column(db.Integer())
ys_2004W = db.Column(db.Integer())
ys_2005S = db.Column(db.Integer())
ys_2005W = db.Column(db.Integer())
ys_2006S = db.Column(db.Integer())
ys_2006W = db.Column(db.Integer())
ys_2007S = db.Column(db.Integer())
ys_2007W = db.Column(db.Integer())
ys_2008S = db.Column(db.Integer())
ys_2008W = db.Column(db.Integer())
ys_2009S = db.Column(db.Integer())
ys_2009W = db.Column(db.Integer())
ys_2010S = db.Column(db.Integer())
ys_2010W = db.Column(db.Integer())
ys_2011S = db.Column(db.Integer())
ys_2011W = db.Column(db.Integer())
ys_2012S = db.Column(db.Integer())
ys_2012W = db.Column(db.Integer())
ys_2013S = db.Column(db.Integer())
ys_2013W = db.Column(db.Integer())
ys_2014S = db.Column(db.Integer())
ys_2014W = db.Column(db.Integer())
ys_2015S = db.Column(db.Integer())
ys_2015W = db.Column(db.Integer())
ys_2016S = db.Column(db.Integer())
ys_2016W = db.Column(db.Integer())
ys_2017S = db.Column(db.Integer())
ys_2017W = db.Column(db.Integer())
ys_2018S = db.Column(db.Integer())
ys_2018W = db.Column(db.Integer())
ys_2019S = db.Column(db.Integer())
ys_2019W = db.Column(db.Integer())
ys_2020S = db.Column(db.Integer())
def __repr__(self):
return f"<CourseAverageHistory {self.campus}-{self.subject}-{self.course}{self.course.detail}>"
def to_dict(self):
data = {}
yearsessions = {}
for key, val in vars(self).items():
if "ys_" in key:
if val != 0:
yearsessions[key[3:]] = val
else:
data[key] = val
data['yearsessions'] = yearsessions
data['campus'] = self.campus.name
data.pop('_sa_instance_state')
return data
| 38.496437
| 121
| 0.590424
|
7370192ddd843201844af5eed98443bf9077be68
| 1,617
|
py
|
Python
|
services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/errors.py
|
elisabettai/osparc-simcore
|
ad7b6e05111b50fe95e49306a992170490a7247f
|
[
"MIT"
] | null | null | null |
services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/errors.py
|
elisabettai/osparc-simcore
|
ad7b6e05111b50fe95e49306a992170490a7247f
|
[
"MIT"
] | 1
|
2021-11-29T13:38:09.000Z
|
2021-11-29T13:38:09.000Z
|
services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/errors.py
|
mrnicegyu11/osparc-simcore
|
b6fa6c245dbfbc18cc74a387111a52de9b05d1f4
|
[
"MIT"
] | null | null | null |
from typing import Optional
from aiodocker.exceptions import DockerError
from httpx import Response
from models_library.projects_nodes import NodeID
from ...core.errors import DirectorException
class DynamicSidecarError(DirectorException):
pass
class GenericDockerError(DynamicSidecarError):
"""Generic docker library error"""
def __init__(self, msg: str, original_exception: DockerError):
super().__init__(msg + f": {original_exception.message}")
self.original_exception = original_exception
class DynamicSidecarNotFoundError(DirectorException):
"""Dynamic sidecar was not found"""
def __init__(self, node_uuid: NodeID):
super().__init__(f"node {node_uuid} not found")
class DynamicSchedulerException(DirectorException):
"""
Used to signal that something was wrong with during
the service's observation.
"""
class EntrypointContainerNotFoundError(DirectorException):
"""Raised while the entrypoint container was nto yet started"""
class LegacyServiceIsNotSupportedError(DirectorException):
"""This API is not implemented by the director-v0"""
class DynamicSidecarUnexpectedResponseStatus(DirectorException):
"""Used to signal that there was an issue with a request"""
def __init__(self, response: Response, msg: Optional[str] = None):
formatted_tag = f"[during {msg}]" if msg is not None else ""
message = (
f"Unexpected response {formatted_tag}: status={response.status_code}, "
f"body={response.text}"
)
super().__init__(message)
self.response = response
| 29.4
| 83
| 0.724799
|
dcff8c20ff16500c29b8afeb9247ab8865b88245
| 694
|
bzl
|
Python
|
third_party/pybind11/BUILD.bzl
|
howl-anderson/text
|
3eb4cdfbda1c4e2154b7055a75b036b22c1ae573
|
[
"Apache-2.0"
] | 14,668
|
2015-01-01T01:57:10.000Z
|
2022-03-31T23:33:32.000Z
|
third_party/pybind11/BUILD.bzl
|
howl-anderson/text
|
3eb4cdfbda1c4e2154b7055a75b036b22c1ae573
|
[
"Apache-2.0"
] | 276
|
2019-06-07T23:12:52.000Z
|
2022-03-31T17:38:05.000Z
|
third_party/pybind11/BUILD.bzl
|
howl-anderson/text
|
3eb4cdfbda1c4e2154b7055a75b036b22c1ae573
|
[
"Apache-2.0"
] | 5,941
|
2015-01-02T11:32:21.000Z
|
2022-03-31T16:35:46.000Z
|
"""
BUILD file for pybind11 package, since the github version does not have one.
"""
package(default_visibility = ["//visibility:public"])
cc_library(
name = "pybind11",
hdrs = glob(
include = [
"include/pybind11/*.h",
"include/pybind11/detail/*.h",
],
exclude = [
"include/pybind11/common.h",
"include/pybind11/eigen.h",
],
),
copts = [
"-fexceptions",
"-Wno-undefined-inline",
"-Wno-pragma-once-outside-header",
],
includes = ["include"],
strip_include_prefix = "include",
deps = [
"@org_tensorflow//third_party/python_runtime:headers",
],
)
| 23.133333
| 76
| 0.54755
|
bed726c5e04d02862bea38b3ca8afcb1444d486e
| 1,643
|
py
|
Python
|
scripts/utils_pandas.py
|
xR86/ml-stuff
|
2a1b79408897171b78032ff2531ab6f8b18be6c4
|
[
"MIT"
] | 3
|
2018-12-11T03:03:15.000Z
|
2020-02-11T19:38:07.000Z
|
scripts/utils_pandas.py
|
xR86/ml-stuff
|
2a1b79408897171b78032ff2531ab6f8b18be6c4
|
[
"MIT"
] | 6
|
2017-05-31T20:58:32.000Z
|
2021-02-16T23:13:15.000Z
|
scripts/utils_pandas.py
|
xR86/ml-stuff
|
2a1b79408897171b78032ff2531ab6f8b18be6c4
|
[
"MIT"
] | null | null | null |
import pandas as pd
# Ideally, selected features should be enumerated in markdown
# ___________
# |_md_______|
# | feature1 |
# | `id` |
# | feature2 |
# | `...` |
# |__________|
keep_this = """
Feature 1
Feature 2
"""
def feature_lst(lst=keep_this):
return list(map(lambda x: x.strip(), filter(None, lst.split('\n'))))
def init():
limit_on_flag = False
if limit_on_flag:
pd.set_option('display.max_rows', 60)
pd.set_option('display.max_columns', 20)
else:
pd.set_option('display.max_rows', None)
pd.set_option('display.max_columns', None)
# default = 60
print('display.max_rows = %s' % pd.get_option('display.max_rows'))
# default = 20
print('display.max_columns = %s' % pd.get_option('display.max_columns'))
pd.set_option('display.float_format', lambda x: '%.3f' % x)
print('display.float_format = %s' % pd.get_option('display.float_format'))
def info(df):
print('Dataset range: %s rows, %s columns\n' % df.shape)
df.info(verbose=False)
def chunker(df_tf, init_i = 0, i = 0, small = False):
# can be wrapped in tqdm ?
for chunk in df_tf:
if init_i != 0:
print('Step ' + str(i))
print('> left - ' + str(init_i) + ' skip')
init_i -= 1
i += 1
del chunk
continue
print('Step ' + str(i))
print('> Chunk accessed')
proc_small_df = chunk[lst]
print('> Chunk selected')
#if small:
# pd.concat([new_proc_df, proc_small_df], ignore_index=True)
print('> Saving csv')
proc_small_df.to_csv('proc.csv.' + str(i))
del proc_small_df
del chunk
print('next')
i += 1
if __name__ == '__main__':
print("List should be ['Feature 1', 'Feature 2']: %s" % feature_lst())
| 22.506849
| 75
| 0.649422
|
07910697d21fc30c1fbebc4a924e132fa34204b5
| 8,932
|
py
|
Python
|
homeassistant/components/mqtt/sensor.py
|
jasperro/core
|
26d7b2164e8a971506790ae5af06f31abdf278b5
|
[
"Apache-2.0"
] | 2
|
2020-03-02T19:17:52.000Z
|
2020-03-02T19:17:53.000Z
|
homeassistant/components/mqtt/sensor.py
|
jasperro/core
|
26d7b2164e8a971506790ae5af06f31abdf278b5
|
[
"Apache-2.0"
] | 6
|
2021-02-08T21:05:36.000Z
|
2022-03-12T00:54:00.000Z
|
homeassistant/components/mqtt/sensor.py
|
jasperro/core
|
26d7b2164e8a971506790ae5af06f31abdf278b5
|
[
"Apache-2.0"
] | 1
|
2020-03-07T10:43:50.000Z
|
2020-03-07T10:43:50.000Z
|
"""Support for MQTT sensors."""
from datetime import timedelta
import json
import logging
from typing import Optional
import voluptuous as vol
from homeassistant.components import mqtt, sensor
from homeassistant.components.sensor import DEVICE_CLASSES_SCHEMA
from homeassistant.const import (
CONF_DEVICE,
CONF_DEVICE_CLASS,
CONF_FORCE_UPDATE,
CONF_ICON,
CONF_NAME,
CONF_UNIT_OF_MEASUREMENT,
CONF_VALUE_TEMPLATE,
)
from homeassistant.core import callback
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.dispatcher import async_dispatcher_connect
from homeassistant.helpers.entity import Entity
from homeassistant.helpers.event import async_track_point_in_utc_time
from homeassistant.helpers.typing import ConfigType, HomeAssistantType
from homeassistant.util import dt as dt_util
from . import (
ATTR_DISCOVERY_HASH,
CONF_QOS,
CONF_STATE_TOPIC,
CONF_UNIQUE_ID,
MqttAttributes,
MqttAvailability,
MqttDiscoveryUpdate,
MqttEntityDeviceInfo,
subscription,
)
from .discovery import MQTT_DISCOVERY_NEW, clear_discovery_hash
_LOGGER = logging.getLogger(__name__)
CONF_EXPIRE_AFTER = "expire_after"
CONF_JSON_ATTRS = "json_attributes"
DEFAULT_NAME = "MQTT Sensor"
DEFAULT_FORCE_UPDATE = False
PLATFORM_SCHEMA = (
mqtt.MQTT_RO_PLATFORM_SCHEMA.extend(
{
vol.Optional(CONF_DEVICE): mqtt.MQTT_ENTITY_DEVICE_INFO_SCHEMA,
vol.Optional(CONF_DEVICE_CLASS): DEVICE_CLASSES_SCHEMA,
vol.Optional(CONF_EXPIRE_AFTER): cv.positive_int,
vol.Optional(CONF_FORCE_UPDATE, default=DEFAULT_FORCE_UPDATE): cv.boolean,
vol.Optional(CONF_ICON): cv.icon,
vol.Optional(CONF_JSON_ATTRS, default=[]): cv.ensure_list_csv,
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
vol.Optional(CONF_UNIQUE_ID): cv.string,
vol.Optional(CONF_UNIT_OF_MEASUREMENT): cv.string,
}
)
.extend(mqtt.MQTT_AVAILABILITY_SCHEMA.schema)
.extend(mqtt.MQTT_JSON_ATTRS_SCHEMA.schema)
)
async def async_setup_platform(
hass: HomeAssistantType, config: ConfigType, async_add_entities, discovery_info=None
):
"""Set up MQTT sensors through configuration.yaml."""
await _async_setup_entity(config, async_add_entities)
async def async_setup_entry(hass, config_entry, async_add_entities):
"""Set up MQTT sensors dynamically through MQTT discovery."""
async def async_discover_sensor(discovery_payload):
"""Discover and add a discovered MQTT sensor."""
discovery_data = discovery_payload.discovery_data
try:
config = PLATFORM_SCHEMA(discovery_payload)
await _async_setup_entity(
config, async_add_entities, config_entry, discovery_data
)
except Exception:
clear_discovery_hash(hass, discovery_data[ATTR_DISCOVERY_HASH])
raise
async_dispatcher_connect(
hass, MQTT_DISCOVERY_NEW.format(sensor.DOMAIN, "mqtt"), async_discover_sensor
)
async def _async_setup_entity(
config: ConfigType, async_add_entities, config_entry=None, discovery_data=None
):
"""Set up MQTT sensor."""
async_add_entities([MqttSensor(config, config_entry, discovery_data)])
class MqttSensor(
MqttAttributes, MqttAvailability, MqttDiscoveryUpdate, MqttEntityDeviceInfo, Entity
):
"""Representation of a sensor that can be updated using MQTT."""
def __init__(self, config, config_entry, discovery_data):
"""Initialize the sensor."""
self._config = config
self._unique_id = config.get(CONF_UNIQUE_ID)
self._state = None
self._sub_state = None
self._expiration_trigger = None
self._attributes = None
device_config = config.get(CONF_DEVICE)
if config.get(CONF_JSON_ATTRS):
_LOGGER.warning(
'configuration variable "json_attributes" is '
'deprecated, replace with "json_attributes_topic"'
)
MqttAttributes.__init__(self, config)
MqttAvailability.__init__(self, config)
MqttDiscoveryUpdate.__init__(self, discovery_data, self.discovery_update)
MqttEntityDeviceInfo.__init__(self, device_config, config_entry)
async def async_added_to_hass(self):
"""Subscribe to MQTT events."""
await super().async_added_to_hass()
await self._subscribe_topics()
async def discovery_update(self, discovery_payload):
"""Handle updated discovery message."""
config = PLATFORM_SCHEMA(discovery_payload)
self._config = config
await self.attributes_discovery_update(config)
await self.availability_discovery_update(config)
await self.device_info_discovery_update(config)
await self._subscribe_topics()
self.async_write_ha_state()
async def _subscribe_topics(self):
"""(Re)Subscribe to topics."""
template = self._config.get(CONF_VALUE_TEMPLATE)
if template is not None:
template.hass = self.hass
@callback
def message_received(msg):
"""Handle new MQTT messages."""
payload = msg.payload
# auto-expire enabled?
expire_after = self._config.get(CONF_EXPIRE_AFTER)
if expire_after is not None and expire_after > 0:
# Reset old trigger
if self._expiration_trigger:
self._expiration_trigger()
self._expiration_trigger = None
# Set new trigger
expiration_at = dt_util.utcnow() + timedelta(seconds=expire_after)
self._expiration_trigger = async_track_point_in_utc_time(
self.hass, self.value_is_expired, expiration_at
)
json_attributes = set(self._config[CONF_JSON_ATTRS])
if json_attributes:
self._attributes = {}
try:
json_dict = json.loads(payload)
if isinstance(json_dict, dict):
attrs = {
k: json_dict[k] for k in json_attributes & json_dict.keys()
}
self._attributes = attrs
else:
_LOGGER.warning("JSON result was not a dictionary")
except ValueError:
_LOGGER.warning("MQTT payload could not be parsed as JSON")
_LOGGER.debug("Erroneous JSON: %s", payload)
if template is not None:
payload = template.async_render_with_possible_json_value(
payload, self._state
)
self._state = payload
self.async_write_ha_state()
self._sub_state = await subscription.async_subscribe_topics(
self.hass,
self._sub_state,
{
"state_topic": {
"topic": self._config[CONF_STATE_TOPIC],
"msg_callback": message_received,
"qos": self._config[CONF_QOS],
}
},
)
async def async_will_remove_from_hass(self):
"""Unsubscribe when removed."""
self._sub_state = await subscription.async_unsubscribe_topics(
self.hass, self._sub_state
)
await MqttAttributes.async_will_remove_from_hass(self)
await MqttAvailability.async_will_remove_from_hass(self)
await MqttDiscoveryUpdate.async_will_remove_from_hass(self)
@callback
def value_is_expired(self, *_):
"""Triggered when value is expired."""
self._expiration_trigger = None
self._state = None
self.async_write_ha_state()
@property
def should_poll(self):
"""No polling needed."""
return False
@property
def name(self):
"""Return the name of the sensor."""
return self._config[CONF_NAME]
@property
def unit_of_measurement(self):
"""Return the unit this state is expressed in."""
return self._config.get(CONF_UNIT_OF_MEASUREMENT)
@property
def force_update(self):
"""Force update."""
return self._config[CONF_FORCE_UPDATE]
@property
def state(self):
"""Return the state of the entity."""
return self._state
@property
def device_state_attributes(self):
"""Return the state attributes."""
return self._attributes
@property
def unique_id(self):
"""Return a unique ID."""
return self._unique_id
@property
def icon(self):
"""Return the icon."""
return self._config.get(CONF_ICON)
@property
def device_class(self) -> Optional[str]:
"""Return the device class of the sensor."""
return self._config.get(CONF_DEVICE_CLASS)
| 33.961977
| 88
| 0.65215
|
a3cc16dac95532cb156a11ef6b3e3e71d6ca4d85
| 12,290
|
py
|
Python
|
chess_engine.py
|
Rakshasl/pythondemo
|
e00d51edc7ff4b1b7e82b5cbd533852a4bd09796
|
[
"MIT"
] | null | null | null |
chess_engine.py
|
Rakshasl/pythondemo
|
e00d51edc7ff4b1b7e82b5cbd533852a4bd09796
|
[
"MIT"
] | 1
|
2021-11-15T17:48:39.000Z
|
2021-11-15T17:48:39.000Z
|
chess_engine.py
|
Rakshasl/ChessOnline
|
a55535e7565f06b272ab1f8d9b5a79dfdfe4c993
|
[
"MIT"
] | null | null | null |
import chess
import random
import signal
import time
import cProfile
class Engine:
def __init__(self, fen):
self.board = chess.Board()
self.MAX_DEPTH = 60
self.piece_values = {
# pawn
1:100,
# bishop
2:310,
# knight
3:300,
# rook
4:500,
# queen
5:900,
# king
6:99999
}
self.square_table = square_table = {
1: [
0, 0, 0, 0, 0, 0, 0, 0,
50, 50, 50, 50, 50, 50, 50, 50,
10, 10, 20, 30, 30, 20, 10, 10,
5, 5, 10, 25, 25, 10, 5, 5,
0, 0, 0, 20, 20, 0, 0, 0,
5, -5, -10, 0, 0, -10, -5, 5,
5, 10, 10, -20, -20, 10, 10, 5,
0, 0, 0, 0, 0, 0, 0, 0
],
2: [
-50, -40, -30, -30, -30, -30, -40, -50,
-40, -20, 0, 0, 0, 0, -20, -40,
-30, 0, 10, 15, 15, 10, 0, -30,
-30, 5, 15, 20, 20, 15, 5, -30,
-30, 0, 15, 20, 20, 15, 0, -30,
-30, 5, 10, 15, 15, 10, 5, -30,
-40, -20, 0, 5, 5, 0, -20, -40,
-50, -40, -30, -30, -30, -30, -40, -50,
],
3: [
-20, -10, -10, -10, -10, -10, -10, -20,
-10, 0, 0, 0, 0, 0, 0, -10,
-10, 0, 5, 10, 10, 5, 0, -10,
-10, 5, 5, 10, 10, 5, 5, -10,
-10, 0, 10, 10, 10, 10, 0, -10,
-10, 10, 10, 10, 10, 10, 10, -10,
-10, 5, 0, 0, 0, 0, 5, -10,
-20, -10, -10, -10, -10, -10, -10, -20,
],
4: [
0, 0, 0, 0, 0, 0, 0, 0,
5, 10, 10, 10, 10, 10, 10, 5,
-5, 0, 0, 0, 0, 0, 0, -5,
-5, 0, 0, 0, 0, 0, 0, -5,
-5, 0, 0, 0, 0, 0, 0, -5,
-5, 0, 0, 0, 0, 0, 0, -5,
-5, 0, 0, 0, 0, 0, 0, -5,
0, 0, 0, 5, 5, 0, 0, 0
],
5: [
-20, -10, -10, -5, -5, -10, -10, -20,
-10, 0, 0, 0, 0, 0, 0, -10,
-10, 0, 5, 5, 5, 5, 0, -10,
-5, 0, 5, 5, 5, 5, 0, -5,
0, 0, 5, 5, 5, 5, 0, -5,
-10, 5, 5, 5, 5, 5, 0, -10,
-10, 0, 5, 0, 0, 0, 0, -10,
-20, -10, -10, -5, -5, -10, -10, -20
],
6: [
-30, -40, -40, -50, -50, -40, -40, -30,
-30, -40, -40, -50, -50, -40, -40, -30,
-30, -40, -40, -50, -50, -40, -40, -30,
-30, -40, -40, -50, -50, -40, -40, -30,
-20, -30, -30, -40, -40, -30, -30, -20,
-10, -20, -20, -20, -20, -20, -20, -10,
20, 20, 0, 0, 0, 0, 20, 20,
20, 30, 10, 0, 0, 10, 30, 20
]
}
self.board.set_fen(fen)
self.leaves_reached = 0
def random_response(self):
response = random.choice(list(self.board.legal_moves))
return str(response)
def material_eval(self):
score = 0
# iterate through the pieces
for i in range(1, 7):
score += len(self.board.pieces(i, chess.WHITE)) * self.piece_values[i]
score -= len(self.board.pieces(i, chess.BLACK)) * self.piece_values[i]
return score
def position_eval(self):
score = 0
# iterate through the pieces
for i in range(1, 7):
# eval white pieces
w_squares = self.board.pieces(i, chess.WHITE)
score += len(w_squares) * self.piece_values[i]
for square in w_squares:
score += self.square_table[i][-square]
b_squares = self.board.pieces(i, chess.BLACK)
score -= len(b_squares) * self.piece_values[i]
for square in b_squares:
score -= self.square_table[i][square]
return score
def minimax(self, depth, move, maximiser):
if depth == 0:
# return move, self.material_eval()
return move, self.position_eval()
if maximiser:
best_move = None
best_score = -9999
moves = list(self.board.legal_moves)
for move in moves:
self.leaves_reached += 1
self.board.push(move)
new_move, new_score = self.minimax(depth - 1, move, False)
if new_score > best_score:
best_score, best_move = new_score, move
self.board.pop()
return best_move, best_score
if not maximiser:
best_move = None
best_score = 9999
moves = list(self.board.legal_moves)
for move in moves:
self.leaves_reached += 1
self.board.push(move)
new_move, new_score = self.minimax(depth - 1, move, True)
if new_score < best_score:
best_score, best_move = new_score, move
self.board.pop()
return best_move, best_score
def alpha_beta(self, depth_neg, depth_pos, move, alpha, beta, prev_moves, maximiser):
move_sequence = []
# check if we're at the final search depth
if depth_neg == 0:
# return move, self.material_eval()
move_sequence.append(move)
return move_sequence, self.position_eval()
moves = list(self.board.legal_moves)
# moves = self.order_moves()
# if there are no legal moves, check for checkmate / stalemate
if not moves:
if self.board.is_checkmate():
if self.board.result() == "1-0":
move_sequence.append(move)
return move_sequence, 1000000
elif self.board.result() == "0-1":
move_sequence.append(move)
return move_sequence, -1000000
else:
move_sequence.append(move)
return move_sequence, 0
# initialise best move variables. What are these used for again? I need to simplify the logic here.
best_move = None
best_score = -10000001 if maximiser else 10000001
# put the last calculated best move in first place of the list. Hopefully this improves pruning.
if prev_moves and len(prev_moves) >= depth_neg:
if depth_neg == 4 and not self.board.turn:
print(prev_moves[depth_neg - 1])
if prev_moves[depth_neg - 1] in moves:
# if prev_moves[depth_neg - 1] in self.board.legal_moves:
# if not self.board.turn:
# print(prev_moves[depth_neg - 1])
moves.insert(0, prev_moves[depth_neg - 1])
if maximiser:
for move in moves:
self.leaves_reached += 1
# get score of the new move, record what it is
self.board.push(move)
new_sequence, new_score = self.alpha_beta(depth_neg - 1, depth_pos + 1, move, alpha, beta, prev_moves, False)
self.board.pop()
# Check whether the new score is better than the best score. If so, replace the best score.
if new_score > best_score:
move_sequence = new_sequence
best_score, best_move = new_score, move
# Check whether the new score is better than the beta. If it is, return and break the loop.
# Need to rethink the check against best here.
if new_score >= beta:
# self.check_against_best(best_move, best_score, depth_pos, True)
move_sequence.append(best_move)
return move_sequence, best_score
# Update alpha - upper bound
if new_score > alpha:
alpha = new_score
# return the best of the results
# self.check_against_best(best_move, best_score, depth_pos, True)
move_sequence.append(best_move)
return move_sequence, best_score
if not maximiser:
for move in moves:
self.leaves_reached += 1
# get score of the new move, record what it is
self.board.push(move)
new_sequence, new_score = self.alpha_beta(depth_neg - 1, depth_pos + 1, move, alpha, beta, prev_moves, True)
self.board.pop()
# Check whether the new score is better than the best score. If so, replace the best score.
if new_score < best_score:
move_sequence = new_sequence
best_score, best_move = new_score, move
# Check whether the new score is better than the alpha. If it is, return and break the loop
if new_score <= alpha:
# self.check_against_best(best_move, best_score, depth_pos, False)
move_sequence.append(best_move)
return move_sequence, best_score
# update beta - lower bound
if new_score < beta:
beta = new_score
# return the best of the results
# self.check_against_best(best_move, best_score, depth_pos, False)
move_sequence.append(best_move)
return move_sequence, best_score
def calculate_minimax(self, depth):
# This shows up true for white & false for black
maximiser = self.board.turn
best_move, best_score = self.minimax(depth, None, maximiser)
return str(best_move)
def calculate_ab(self, depth):
maximiser = self.board.turn
move_sequence, best_score = self.alpha_beta(depth, 0, None, -10000001, 10000001, None, maximiser)
for i in range(1, len(move_sequence)):
print("move", move_sequence[-i])
return str(move_sequence[-1])
def total_leaves(self):
leaves = self.leaves_reached
self.leaves_reached = 0
return leaves
def order_moves(self):
moves = list(self.board.legal_moves)
scores = []
for move in moves:
self.board.push(move)
# scores.append(self.material_eval())
scores.append(self.material_eval())
self.board.pop()
sorted_indexes = sorted(range(len(scores)), key=lambda i: scores[i], reverse=False)
return [moves[i] for i in sorted_indexes]
def iterative_deepening(self, depth):
# depth_neg, depth_pos, move, alpha, beta, prev_moves, maximiser)
move_list, score = self.alpha_beta(1, 0, None, -10000001, 10000001, None, self.board.turn)
for i in range(2, depth + 1):
print("Iteration", i)
move_list, score = self.alpha_beta(i, 0, None, -10000001, 10000001, move_list, self.board.turn)
print("Depth calculated:", len(move_list))
return str(move_list[-1])
# This is being used for testing at the moment, which is why there is so much commented code.
# Will move to a standalone testing script when I get the chance.
if __name__=="__main__":
fen = "r2qkbr1/ppp1pppp/2n1b2n/8/8/5P2/PPPP2PP/RNB1KBNR b KQq - 0 6"
newengine = Engine(fen)
# squares = newengine.board.pieces(1, chess.WHITE)
# for square in squares:
# print (square)
# print(squares)
# print(newengine.board)
# print(newengine.order_moves())
# print(newengine.material_eval())
# print(newengine.lazy_eval())
# start_time = time.time()
# print(newengine.calculate(3))
# print(newengine.total_leaves())
# print("Time taken:", time.time() - start_time)
start_time = time.time()
print(newengine.calculate_ab(4))
print(newengine.total_leaves())
print("Time taken:", time.time() - start_time)
start_time = time.time()
print(newengine.iterative_deepening(4))
print(newengine.total_leaves())
print("Time taken:", time.time() - start_time)
# cProfile.run('newengine.calculate(3)')
#
# cProfile.run('newengine.calculate_ab(3)')
# print(newengine.board)
| 35.2149
| 125
| 0.507404
|
70d58285adadea482658737d1b93f6846db6923c
| 1,213
|
py
|
Python
|
snippets/02_analize_shape_data.py
|
madewithbytes/heathmap_mappa
|
3c302151ec6dcd268b00c8c95e016daa1e618e7b
|
[
"MIT"
] | null | null | null |
snippets/02_analize_shape_data.py
|
madewithbytes/heathmap_mappa
|
3c302151ec6dcd268b00c8c95e016daa1e618e7b
|
[
"MIT"
] | null | null | null |
snippets/02_analize_shape_data.py
|
madewithbytes/heathmap_mappa
|
3c302151ec6dcd268b00c8c95e016daa1e618e7b
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
import pandas as pd
import shapefile as shp
def read_shape_file(*, file_path, encoding="ISO-8859-1"):
"""Reads the shape file and specifies shp file encoding."""
return shp.Reader(file_path, encoding=encoding)
def prepare_data_frame(*, shape_file):
"""Transforms the shapefile into a panda's dataframe object.
This object will contain the column values and data points of the shape."""
column_names = [r[0] for r in shape_file.fields][1:]
records = shape_file.records()
shape_points = [s.points for s in shape_file.shapes()]
data_frame = pd.DataFrame(columns=column_names, data=records)
data_frame = data_frame.assign(coords=shape_points)
return data_frame
def inspect_data_frame(*, data_frame):
"""Prints all States and Values available in the DataFrame object"""
print("Available states: {}".format(set(data_frame.EDO_LEY)))
print("Available values: {}".format(set(data_frame.DPHLIL_LEY)))
def main():
shape_file = read_shape_file(file_path="./data/PHLITL_2000/PHLITL_2000.shp")
data_frame = prepare_data_frame(shape_file=shape_file)
inspect_data_frame(data_frame=data_frame)
if __name__ == "__main__":
main()
| 32.783784
| 80
| 0.73042
|
9a26b95bb822f1ffd83a6cb1c7c39209d3324e28
| 19,111
|
py
|
Python
|
pyani/download.py
|
b-brankovics/pyani
|
d203f764e6e40dfcf1462c127c12ad0d71fd8357
|
[
"MIT"
] | null | null | null |
pyani/download.py
|
b-brankovics/pyani
|
d203f764e6e40dfcf1462c127c12ad0d71fd8357
|
[
"MIT"
] | null | null | null |
pyani/download.py
|
b-brankovics/pyani
|
d203f764e6e40dfcf1462c127c12ad0d71fd8357
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
# (c) The James Hutton Institute 2016-2019
# (c) University of Strathclyde 2019-2020
# Author: Leighton Pritchard
#
# Contact:
# leighton.pritchard@strath.ac.uk
#
# Leighton Pritchard,
# Strathclyde Institute for Pharmacy and Biomedical Sciences,
# Cathedral Street,
# Glasgow,
# G1 1XQ
# Scotland,
# UK
#
# The MIT License
#
# Copyright (c) 2016-2019 The James Hutton Institute
# Copyright (c) 2019 University of Strathclyde
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
"""Module providing functions useful for downloading genomes from NCBI."""
import hashlib
import logging
import re
import shlex
import subprocess
import sys
import traceback
import urllib.request
from namedlist import namedlist
from pathlib import Path
from subprocess import CompletedProcess
from typing import Any, Dict, List, NamedTuple, Optional, Tuple
from urllib.error import HTTPError, URLError
from Bio import Entrez # type: ignore
from tqdm import tqdm # type: ignore
from pyani.pyani_tools import termcolor
# Regular expression for NCBI taxon numbers
TAXONREGEX = re.compile(r"([0-9]\,?){1,}")
# Custom exceptions
class NCBIDownloadException(Exception):
"""General exception for failed NCBI download."""
def __init__(self, msg: str = "Error downloading file from NCBI"):
"""Instantiate class."""
Exception.__init__(self, msg)
class FileExistsException(Exception):
"""A specified file exists."""
def __init__(self, msg: str = "Specified file exists"):
"""Instantiate class."""
Exception.__init__(self, msg)
class ASMIDs(NamedTuple):
"""Matching Assembly ID information for a query taxID."""
query: str
result_count: int
asm_ids: List[str]
class Classification(NamedTuple):
"""Taxonomic classification for an isolate."""
organism: str
genus: str
species: str
strain: str
class DLFileData(NamedTuple):
"""Convenience struct for file download data."""
filestem: str
ftpstem: str
suffix: str
class Hashstatus(NamedTuple):
"""Status report on file hash comparison."""
passed: bool
localhash: str
filehash: str
class DLStatus:
"""Download status data."""
def __init__(
self,
url: str,
hashurl: str,
outfname: Path,
outfhash: Path,
skipped: bool,
error: Optional[str] = None,
):
self.url = url
self.hashurl = hashurl
self.outfname = outfname
self.outfhash = outfhash
self.skipped = skipped
self.error = error
def last_exception() -> str:
"""Return last exception as a string."""
exc_type, exc_value, exc_traceback = sys.exc_info()
return "".join(traceback.format_exception(exc_type, exc_value, exc_traceback))
def make_asm_dict(taxon_ids: List[str], retries: int) -> Dict:
"""Return a dict of assembly UIDs, keyed by passed taxon IDs.
:param taxon_ids:
:param retries:
Takes the passed list of taxon IDs and calls get_asm_uids to generate
a dictionary linking each taxon ID to a list of assembly IDs at NCBI.
"""
asm_dict = dict()
for tid in taxon_ids:
asm_uids = get_asm_uids(tid, retries)
asm_dict[tid] = asm_uids.asm_ids
return asm_dict
def set_ncbi_email(email: str) -> None:
"""Set contact email for NCBI.
:param email: str, email address to give to Entrez at NCBI
"""
Entrez.email = email
Entrez.tool = "pyani.py"
def download_genome_and_hash(
outdir: Path,
timeout: int,
dlfiledata: DLFileData,
dltype: str = "RefSeq",
disable_tqdm: bool = False,
) -> namedlist:
"""Download genome and accompanying MD5 hash from NCBI.
:param args: Namespace for command-line arguments
:param outdir: Path to output directory for downloads
:param timeout: int: timeout for download attempt
:param dlfiledata: namedtuple of info for file to download
:param dltype: reference database to use: RefSeq or GenBank
:param disable_tqdm: disable progress bar
This function tries the (assumed to be passed) RefSeq FTP URL first and,
if that fails, then attempts to download the corresponding GenBank data.
We attempt to gracefully skip genomes with download errors.
"""
# Create logger
logger = logging.getLogger(__name__)
if dltype == "GenBank":
filestem = re.sub("^GCF_", "GCA_", dlfiledata.filestem)
else:
filestem = dlfiledata.filestem
dlstatus = retrieve_genome_and_hash(
filestem, dlfiledata.suffix, dlfiledata.ftpstem, outdir, timeout, disable_tqdm,
)
# Pylint is confused by the content of dlstatus (a namedlist)
if dlstatus.error is not None: # pylint: disable=no-member
logger.warning(termcolor("%s download failed: skipping!", "magenta"), dltype)
logger.debug(
"Exception raised:\n%s", dlstatus.error
) # pylint: disable=no-member
dlstatus.skipped = True
return dlstatus # pylint: disable=no-member
# Get results from NCBI web history, in batches
def entrez_batch_webhistory(
record, expected, batchsize, retries, *fnargs, **fnkwargs
) -> List[str]:
"""Recover the Entrez data from a prior NCBI webhistory search.
:param record: Entrez webhistory record
:param expected: number of expected search returns
:param batchsize: how many search returns to retrieve in a batch
:param retries: int
:param *fnargs: arguments to Efetch
:param **fnkwargs: keyword arguments to Efetch
Recovers results in batches of defined size, using Efetch.
Returns all results as a list.
"""
results = [] # type: List[Any]
for start in range(0, expected, batchsize):
batch_handle = entrez_retry(
Entrez.efetch,
retries,
retstart=start,
retmax=batchsize,
webenv=record["WebEnv"],
query_key=record["QueryKey"],
*fnargs,
**fnkwargs,
)
batch_record = Entrez.read(batch_handle, validate=False)
results.extend(batch_record)
return results
# Retry an Entrez query a specified number of times
def entrez_retry(func, retries, *fnargs, **fnkwargs):
"""Retry the passed function up to the number of times specified.
:param func: function to be executed
:param retries: int, number of times to retry function execution
:param *fnargs: optional arguments to passed function
:param **fnkwargs: optional keyword arguments to passed function
"""
tries, success = 0, False
while not success and tries < retries:
try:
output = func(*fnargs, **fnkwargs)
success = True
except (HTTPError, URLError):
tries += 1
if not success:
raise NCBIDownloadException("Too many Entrez failures")
return output
# Split a list of taxon ids into components, checking for correct formatting
def split_taxa(taxa: str) -> List[str]:
"""Return list of taxon ids from the passed comma-separated list.
:param taxa: str, comma-separated list of valid NCBI taxonomy IDs
The function checks the passed taxon argument against a regular expression
that permits comma-separated numerical symbols only.
"""
# Check format of passed taxa
match = TAXONREGEX.match(taxa)
if match is None or len(match.group()) != len(taxa):
raise ValueError("invalid taxon string: {0}".format(taxa))
return [taxon for taxon in taxa.split(",") if len(taxon)]
# Get assembly UIDs for the subtree rooted at the passed taxon
def get_asm_uids(taxon_uid: str, retries: int) -> ASMIDs:
"""Return set of NCBI UIDs associated with the passed taxon UID.
:param taxon_uid: str, NCBI taxID for taxon to download
:param retries: int, number of download retry attempts
This query at NCBI returns all assemblies for the taxon subtree
rooted at the passed taxon_uid.
"""
query = "txid%s[Organism:exp]" % taxon_uid
# Perform initial search for assembly UIDs with taxon ID as query.
# Use NCBI history for the search.
handle = entrez_retry(
Entrez.esearch, retries, db="assembly", term=query, format="xml", usehistory="y"
)
record = Entrez.read(handle, validate=False)
result_count = int(record["Count"])
# Recover assembly UIDs from the web history
asm_ids = entrez_batch_webhistory(
record, result_count, 250, retries, db="assembly", retmode="xml"
)
return ASMIDs(query, result_count, asm_ids)
# Get a filestem from Entrez eSummary data
def extract_filestem(esummary) -> str:
"""Extract filestem from Entrez eSummary data.
:param esummary:
Function expects esummary['DocumentSummarySet']['DocumentSummary'][0]
Some illegal characters may occur in AssemblyName - for these, a more
robust regex replace/escape may be required. Sadly, NCBI don't just
use standard percent escapes, but instead replace certain
characters with underscores: white space, slash, comma, hash, brackets.
"""
escapes = re.compile(r"[\s/,#\(\)]")
escname = re.sub(escapes, "_", esummary["AssemblyName"])
return "_".join([esummary["AssemblyAccession"], escname])
# Get eSummary data for a single assembly UID
def get_ncbi_esummary(asm_uid, retries, api_key=None) -> Tuple:
"""Obtain full eSummary info for the passed assembly UID.
:param asm_uid:
:param retries:
:param api_key:
"""
# Obtain full eSummary data for the assembly
summary = Entrez.read(
entrez_retry(
Entrez.esummary,
retries,
db="assembly",
id=asm_uid,
report="full",
api_key=api_key,
),
validate=False,
)
# Extract filestem from assembly data
data = summary["DocumentSummarySet"]["DocumentSummary"][0]
filestem = extract_filestem(data)
return (data, filestem)
# Get the taxonomic classification strings for eSummary data
def get_ncbi_classification(esummary) -> Classification:
"""Return organism, genus, species, strain info from eSummary data.
:param esummary:
"""
# Extract species/strain info
organism = esummary["SpeciesName"]
try:
strain = esummary["Biosource"]["InfraspeciesList"][0]["Sub_value"]
except (KeyError, IndexError):
# we consider this an error/incompleteness in the NCBI metadata
strain = ""
genus, species = organism.split(" ", 1)
return Classification(organism, genus, species, strain)
# Given a remote filestem, generate URIs for download
def compile_url(filestem: str, suffix: str, ftpstem: str) -> Tuple[str, str]:
"""Compile download URLs given a passed filestem.
:param filestem:
:param suffix:
:param ftpstem:
The filestem corresponds to <AA>_<AN>, where <AA> and <AN> are
AssemblyAccession and AssemblyName: data fields in the eSummary record.
These correspond to downloadable files for each assembly at
ftp://ftp.ncbi.nlm.nih.gov/genomes/all/GC[AF]/nnn/nnn/nnn/<AA>_<AN>/
where <AA> is AssemblyAccession, and <AN> is AssemblyName. The choice
of GCA vs GCF, and the values of nnn, are derived from <AA>
The files in this directory all have the stem <AA>_<AN>_<suffix>, where
suffixes are:
assembly_report.txt
assembly_stats.txt
feature_table.txt.gz
genomic.fna.gz
genomic.gbff.gz
genomic.gff.gz
protein.faa.gz
protein.gpff.gz
rm_out.gz
rm.run
wgsmaster.gbff.gz
"""
gcstem, acc, _ = tuple(filestem.split("_", 2))
aaval = acc.split(".")[0]
subdirs = "/".join([acc[i : i + 3] for i in range(0, len(aaval), 3)])
url = f"{ftpstem}/{gcstem}/{subdirs}/{filestem}/{filestem}_{suffix}"
hashurl = f"{ftpstem}/{gcstem}/{subdirs}/{filestem}/md5checksums.txt"
return (url, hashurl)
# Download a remote file to the specified directory
def download_url(
url: str, outfname: Path, timeout: int, disable_tqdm: bool = False
) -> None:
"""Download remote URL to a local directory.
:param url: URL of remote file for download
:param outfname: Path, path to write output
:param timeout:
:param disable_tqdm: Boolean, show tqdm progress bar?
This function downloads the contents of the passed URL to the passed
filename, in buffered chunks
"""
# Open connection, and get expected filesize
req = urllib.request.Request(url)
with urllib.request.urlopen(req) as response:
fsize = int(response.info().get("Content-length"))
# Define buffer sizes
bsize = 1048576 # buffer size
fsize_dl = 0 # bytes downloaded
# Download file
with open(outfname, "wb") as ofh:
with tqdm(total=fsize, disable=disable_tqdm, desc=outfname.name) as pbar:
while True:
buffer = response.read(bsize)
if not buffer:
break
fsize_dl += len(buffer)
ofh.write(buffer)
pbar.update(bsize)
# Construct filepaths for downloaded files and their hashes
def construct_output_paths(
filestem: str, suffix: str, outdir: Path
) -> Tuple[Path, Path]:
"""Construct paths to output files for genome and hash.
:param filestem: str, output filename stem
:param suffix: str, output filename suffix
:param outdir: Path, path to output directory
"""
outfname = outdir / "_".join([filestem, suffix])
outfhash = outdir / "_".join([filestem, "hashes.txt"])
return (outfname, outfhash)
# Download a remote genome from NCBI and its MD5 hash
def retrieve_genome_and_hash(
filestem: str,
suffix: str,
ftpstem: str,
outdir: Path,
timeout: int,
disable_tqdm: bool = False,
) -> DLStatus:
"""Download genome contigs and MD5 hash data from NCBI.
:param filestem:
:param suffix:
:param ftpstem:
:param outdir:
:param timeout:
:param disable_tqdm: Boolean, show tqdm progress bar?
"""
skipped = False # Flag - set True if we skip download for existing file
error = None # Text of last-raised error
# Construct remote URLs and output filenames
url, hashurl = compile_url(filestem, suffix, ftpstem)
outfname, outfhash = construct_output_paths(filestem, suffix, outdir)
# Download the genome sequence and corresponding hash file
try:
download_url(url, outfname, timeout, disable_tqdm)
download_url(hashurl, outfhash, timeout, disable_tqdm)
except IOError:
error = last_exception()
return DLStatus(url, hashurl, outfname, outfhash, skipped, error)
# Check the file hash against the downloaded hash
def check_hash(fname: Path, hashfile: Path) -> Hashstatus:
"""Check MD5 of passed file against downloaded NCBI hash file.
:param fname: Path, path to local hash file
:param hashfile: Path, path to NCBI hash file
"""
filehash = ""
passed = False # Flag - set to True if the hash matches
# Generate MD5 hash
localhash = create_hash(fname)
# Get hash from file
filehash = extract_hash(hashfile, fname.name)
# Check for match
if filehash == localhash:
passed = True
return Hashstatus(passed, localhash, filehash)
# Extract contigs from a compressed file, using gunzip
def extract_contigs(fname: Path, ename: Path) -> CompletedProcess:
"""Extract contents of fname to ename using gunzip.
:param fname: str, path to input compressed file
:param ename: str, path to output uncompressed file
Returns status of subprocess call
"""
cmd = ["gunzip", "-c", shlex.quote(str(fname))]
with open(ename, "w") as efh:
return subprocess.run(cmd, stdout=efh, check=True, shell=False)
# Using a genomes UID, create class and label text files
def create_labels(
classification: Classification, filestem: str, genomehash: str
) -> Tuple[str, str]:
r"""Return class and label text from UID classification.
:param classification: Classification named tuple (org, genus, species, strain)
:param filestem: str, filestem of input genome file
:param genomehash: str, MD5 hash of genome data
The 'class' data is the organism as provided in the passed Classification
named tuple; the 'label' data is genus, species and strain information
from the same tuple. The label is intended to be human-readable, the class
data to be a genuine class identifier.
Returns a tuple of two strings: (label, class).
The two strings are tab-separated strings: <HASH>\t<FILE>\t<CLASS/LABEL>.
The hash is used to help uniquely identify the genome in the database
(label/class is unique by a combination of hash and run ID).
"""
return (
(
f"{genomehash}\t{filestem}_genomic\t{classification.genus[0] + '.'} "
f"{classification.species} {classification.strain}"
),
f"{genomehash}\t{filestem}_genomic\t{classification.organism}",
)
# Create an MD5 hash for the passed genome
def create_hash(fname: Path) -> str:
"""Return MD5 hash of the passed file contents.
:param fname: Path, path to file for hashing
We can ignore the Bandit B303 error as we're not using the hash for
cryptographic purposes.
"""
hash_md5 = hashlib.md5() # nosec
with open(fname, "rb") as fhandle:
for chunk in iter(lambda: fhandle.read(65536), b""):
hash_md5.update(chunk)
return hash_md5.hexdigest()
# Create an MD5 hash for the passed genome
def extract_hash(hashfile: Path, name: str) -> str:
"""Return MD5 hash from file of name:MD5 hashes.
:param hashfile: Path, path to file containing name:MD5 pairs
:param name: str, name associated with hash
"""
filehash = None
with open(hashfile, "r") as hhandle:
for line in [_.strip().split() for _ in hhandle if len(_.strip())]:
if Path(line[1]).name == name: # hash filename
filehash = line[0]
return str(filehash)
| 31.904841
| 88
| 0.676888
|
24c996ca99bf9a2641c8b9ae0f52217e415c9f6f
| 3,497
|
py
|
Python
|
python-codes/0-k-means/simpleKMeans.py
|
bakhshiali/python-AIscratch
|
c7a9d14a8d0a7624379c436816905a64b8ee0dc9
|
[
"BSD-3-Clause"
] | null | null | null |
python-codes/0-k-means/simpleKMeans.py
|
bakhshiali/python-AIscratch
|
c7a9d14a8d0a7624379c436816905a64b8ee0dc9
|
[
"BSD-3-Clause"
] | null | null | null |
python-codes/0-k-means/simpleKMeans.py
|
bakhshiali/python-AIscratch
|
c7a9d14a8d0a7624379c436816905a64b8ee0dc9
|
[
"BSD-3-Clause"
] | null | null | null |
#simple k-means
import numpy as np
import matplotlib.pyplot as plt
from matplotlib import style
style.use('fivethirtyeight')
plt.rcParams['axes.facecolor']='w'
from matplotlib.lines import Line2D
class KMeans:
'''
k = number of groups / clusters / ... (group)
tolerance = acceptable level of variation in precision (tol)
Iteration : repetition of process
'''
##you could use another tolerance stop limits as :
#error : (actual-forecast/forecast)*100
#accuracy : 1-error
#Note : centroid, center of mass and geometric center could be different.
def __init__(self, group=2, maxTolerance=0.001, iteration=300):
self.k = group
self.tol = maxTolerance
self.iteration = iteration
self.fig = plt.figure('K-Means PLOT',figsize=(9, 6))
self.ax = self.fig.add_subplot(111)#1*1 grid , no.1
self.colors = 200*["r","g","b","k","c"]
def fit(self,data):
self.centroids = {}
#start with first k data as centroids
self.centroids={i:data[i] for i in range(self.k)}
for _ in range(self.iteration):
self.classes={i:[] for i in range(self.k)}
for j in data:#j : featureset
distances = [np.linalg.norm(j-self.centroids[i]) for i in self.centroids]
self.classes[np.argmin(distances)].append(j)#min as cluster
pc = self.centroids #pc : prev_centroids
self.centroids={i:np.average(self.classes[i],axis=0) for i in self.classes}
print(self.centroids)
op=[False for c in self.centroids if np.sum(self.centroids[c]-pc[c]) > self.tol]
if op==[] : break #not op : optimum
def predict(self,data):
distances = [np.linalg.norm(data-self.centroids[i]) for i in self.centroids]
self.ax.scatter(data[0], data[1], marker="*",
color=self.colors[np.argmin(distances)], s=150, linewidths=2)
return np.argmin(distances)
def visualize(self):
for centroid in clf.centroids:
self.ax.scatter(clf.centroids[centroid][0], clf.centroids[centroid][1],
marker="$C$", color="k", s=100, linewidths=2)
for j in clf.classes:
[plt.scatter(i[0],i[1],marker="x",color=self.colors[j],s=150,linewidth=2) for i in clf.classes[j]]
self.ax.set_title('K-Means clustering, untagged data',fontsize=14)
self.ax.set_xlabel('X1',fontsize=12)
self.ax.set_ylabel('X2',fontsize=12)
customLines = [Line2D([0], [0], color='w', marker='*',
markersize=15,markerfacecolor='k'),
Line2D([0], [0], color='w', marker='$x$',
markersize=15,markerfacecolor='k'),
Line2D([0], [0], color='w', marker='$C$',
markersize=15,markerfacecolor='k')]
self.ax.legend(customLines,['new data','data','Center'],
loc='upper center', shadow=True)
#define input data
X = np.array([[1,4],[1.5,1.8],[7,7],[8,8],
[1,0.6],[9,9],[0,2],[8,6],
[0,1],[3,8],[2,10],[0,10],[1,8],
[2,8]])
#call Kmeans functions
clf = KMeans(group=3,maxTolerance=0.001, iteration=300)
clf.fit(X)
clf.visualize()
newData = np.array([[1,3],[8,4],[0,3],[4,4],[3,6],[6,6],[4.5,7],[4.6,7]])
for unknown in newData:
clf.predict(unknown)
plt.show()
| 47.256757
| 111
| 0.5662
|
296b31c7dff619238bad6e8de523961de6ce548c
| 2,365
|
py
|
Python
|
pipreq/cli.py
|
jessamynsmith/pipreq
|
ddafbaba06ff19ae1c02a385d3463f6896d338f6
|
[
"MIT"
] | 6
|
2015-02-27T03:52:51.000Z
|
2020-07-13T10:14:38.000Z
|
pipreq/cli.py
|
jessamynsmith/requirements-manager
|
ddafbaba06ff19ae1c02a385d3463f6896d338f6
|
[
"MIT"
] | 5
|
2015-03-31T16:27:44.000Z
|
2015-04-16T19:16:29.000Z
|
pipreq/cli.py
|
jessamynsmith/requirements-manager
|
ddafbaba06ff19ae1c02a385d3463f6896d338f6
|
[
"MIT"
] | 2
|
2015-02-27T14:02:49.000Z
|
2015-03-31T00:54:54.000Z
|
import argparse
import pkg_resources
import sys
from pipreq.command import Command
def create_parser():
parser = argparse.ArgumentParser(
description='Manage Python package requirements across multiple environments using '
'per-environment requirements files.')
parser.add_argument('--version', action='store_true', default=False,
help="Show program's version number")
parser.add_argument('-g', '--generate', action='store_true', default=False,
help='Generate requirements files')
parser.add_argument('-c', '--create', action='store_true', default=False,
help='Create or update rc file (requires list of packages)')
parser.add_argument('-U', '--upgrade', action='store_true', default=False,
help='Upgrade packages (requires list of packages)')
parser.add_argument('-x', '--remove-extra', action='store_true', default=False,
help='Remove packages not in list (requires list of packages)')
parser.add_argument('-n', '--dry-run', action='store_true', default=False,
help='Don\'t actually make any changes; '
'only show what would have been done')
parser.add_argument('packages', nargs='?', type=argparse.FileType('r'), default=sys.stdin)
return parser
def verify_args(args):
has_one_option = (bool(args.create) ^ bool(args.generate) ^ bool(args.upgrade)
^ bool(args.remove_extra))
if not has_one_option:
return 'Must specify generate (-g) or create/upgrade/remove-missing (-[cUx]) with packages'
if args.dry_run and not args.remove_extra:
return '-n is only supported with -x'
return None
def error(parser, message):
parser.print_help()
parser.exit(message="\nERROR: %s\n" % message)
def main():
try:
parser = create_parser()
parsed_args = parser.parse_args()
if parsed_args.version:
parser.exit("pipreq %s" % pkg_resources.require("pipreq")[0].version)
error_message = verify_args(parsed_args)
if error_message:
error(parser, error_message)
command = Command(parsed_args, ".requirementsrc")
command.run()
return 0
except KeyboardInterrupt:
sys.exit()
| 38.145161
| 99
| 0.630444
|
fb3e64ea7297afe54e7d5a6dc03d25765f07ae20
| 1,475
|
py
|
Python
|
tools/linux/PRESUBMIT.py
|
zealoussnow/chromium
|
fd8a8914ca0183f0add65ae55f04e287543c7d4a
|
[
"BSD-3-Clause-No-Nuclear-License-2014",
"BSD-3-Clause"
] | 14,668
|
2015-01-01T01:57:10.000Z
|
2022-03-31T23:33:32.000Z
|
tools/linux/PRESUBMIT.py
|
zealoussnow/chromium
|
fd8a8914ca0183f0add65ae55f04e287543c7d4a
|
[
"BSD-3-Clause-No-Nuclear-License-2014",
"BSD-3-Clause"
] | 86
|
2015-10-21T13:02:42.000Z
|
2022-03-14T07:50:50.000Z
|
tools/linux/PRESUBMIT.py
|
zealoussnow/chromium
|
fd8a8914ca0183f0add65ae55f04e287543c7d4a
|
[
"BSD-3-Clause-No-Nuclear-License-2014",
"BSD-3-Clause"
] | 5,941
|
2015-01-02T11:32:21.000Z
|
2022-03-31T16:35:46.000Z
|
# Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Top-level presubmit script for linux.
See http://dev.chromium.org/developers/how-tos/depottools/presubmit-scripts
for more details on the presubmit API built into depot_tools.
"""
USE_PYTHON3 = True
def CommonChecks(input_api, output_api):
import sys
def join(*args):
return input_api.os_path.join(input_api.PresubmitLocalPath(), *args)
output = []
sys_path_backup = sys.path
try:
sys.path = [
join('..', 'linux'),
] + sys.path
output.extend(input_api.canned_checks.RunPylint(input_api, output_api))
finally:
sys.path = sys_path_backup
output.extend(
input_api.canned_checks.RunUnitTestsInDirectory(
input_api,
output_api,
input_api.os_path.join(input_api.PresubmitLocalPath(), 'tests'),
files_to_check=[r'.+_tests\.py$'],
skip_shebang_check=True))
if input_api.is_committing:
output.extend(input_api.canned_checks.PanProjectChecks(input_api,
output_api,
owners_check=False))
return output
def CheckChangeOnUpload(input_api, output_api):
return CommonChecks(input_api, output_api)
def CheckChangeOnCommit(input_api, output_api):
return CommonChecks(input_api, output_api)
| 29.5
| 79
| 0.672542
|
a4419c50143411206fe624d174d9ceac696b2611
| 438
|
py
|
Python
|
BIZa/2014/DubsAlexandra/task_5_47.py
|
YukkaSarasti/pythonintask
|
eadf4245abb65f4400a3bae30a4256b4658e009c
|
[
"Apache-2.0"
] | null | null | null |
BIZa/2014/DubsAlexandra/task_5_47.py
|
YukkaSarasti/pythonintask
|
eadf4245abb65f4400a3bae30a4256b4658e009c
|
[
"Apache-2.0"
] | null | null | null |
BIZa/2014/DubsAlexandra/task_5_47.py
|
YukkaSarasti/pythonintask
|
eadf4245abb65f4400a3bae30a4256b4658e009c
|
[
"Apache-2.0"
] | null | null | null |
# Задача 5. Вариант 47
# Напишите программу, которая бы при запуске случайным образом отображала название одного из четырех океанов Земли.
# Dubs A. E.
# 14.04.2016
import random
print ("Программа случайным образом отображала название одного из четырех океанов Земли")
oceans = ('Тихий океан','Северный Ледовитый океан','Индийский океан','Атлантический океан')
print(random.choice(oceans))
input ("\n\n Нажмите Enter для выхода.")
| 27.375
| 115
| 0.76484
|
571229d6295f819133f1966522a23f5897445965
| 4,952
|
py
|
Python
|
src/lightcontrol/lib.py
|
greyltc/lightcontrol
|
d80c92d0c9aad7eed69ff0f072f200aecb1b7218
|
[
"MIT"
] | null | null | null |
src/lightcontrol/lib.py
|
greyltc/lightcontrol
|
d80c92d0c9aad7eed69ff0f072f200aecb1b7218
|
[
"MIT"
] | null | null | null |
src/lightcontrol/lib.py
|
greyltc/lightcontrol
|
d80c92d0c9aad7eed69ff0f072f200aecb1b7218
|
[
"MIT"
] | null | null | null |
import random
import socket
import threading
import struct
import time
class Datagetter(object):
"""
gets one data point at a time
"""
_dtype = None # "random" or "thermal"
_zone = None
_entered = None
temp_file_name = None
temp_file_object = None
_last_val = None
delay = 0.001
def __init__(self, dtype="random", zone=1, delay=0.001):
self.delay = delay
self.zone = zone
self.dtype = dtype
_last_val = None
self._stop_doer = threading.Event()
self._want_new = threading.Event()
def __enter__(self):
self._entered = True
if self._dtype == "thermal":
self.temp_file_object = open(self.temp_file_name, "r")
self._socket, self.socket = socket.socketpair()
# self._reader, self._writer = await asyncio.open_connection(sock=_socket)
_last_val = None
self._stop_doer.clear()
self._want_new.clear()
self._doer = threading.Thread(group=None, target=self._socket_loop, daemon=True)
self._doer.start()
return self
def __exit__(self, type, value, traceback):
self._entered = False
self._close_thermal_file()
self._close_sockets()
self._stop_doer.set()
self._doer.join()
self._stop_doer.clear()
def _close_thermal_file(self):
try:
self.temp_file_object.close()
except Exception as e:
pass
def _close_sockets(self):
try:
self._socket.close()
except Exception as e:
pass
try:
self.socket.close()
except Exception as e:
pass
def _update_thermal(self):
self.temp_file_name = f"/sys/class/thermal/thermal_zone{self._zone}/temp"
if hasattr(self.temp_file_object, "closed"):
if self.temp_file_object.closed == False:
self.temp_file_object.close()
if self._entered == True:
self.temp_file_object = open(self.temp_file_name, "r")
def trigger_new(self):
self._want_new.set()
def _socket_loop(self):
while not self._stop_doer.is_set():
if self._want_new.wait(timeout=0.1): # this timeout is for how often we check for a stop request
self._want_new.clear()
self._socket.send(struct.pack("f", self.get))
@property
def zone(self):
return self._zone
@zone.setter
def zone(self, value):
if value != self._zone:
self._zone = value
self._update_thermal()
@property
def dtype(self):
return self._dtype
@dtype.setter
def dtype(self, value):
if value != self._dtype:
if value == "thermal":
self._dtype = value
self._update_thermal()
elif value == "random":
self._dtype = value
self._close_thermal_file()
else:
print("Warning: Unknown datatype")
@property
def get(self):
if self._dtype == "thermal":
try:
point_int = int(self.temp_file_object.readline())
self.temp_file_object.seek(0)
except:
point_int = float("nan")
elif self._dtype == "random":
point_int = random.randint(0, 100 * 1000)
else:
point_int = float("nan")
if self.delay > 0:
time.sleep(self.delay) # insert fake delay to avoid too much cpu
return point_int / 1000
@property
def thermaltype(self):
if self._dtype == "thermal":
try:
type_file = f"/sys/class/thermal/thermal_zone{self._zone}/type"
with open(type_file, "r") as fh:
type_str = fh.readline()
result = type_str.strip()
except Exception as e:
result = "Unknown"
elif self._dtype == "random":
result = "Random"
else:
result = "None"
return result
class Downsampler(object):
"""
Feed this class high frequency data and every [factor]
samples it will return an average of the last [factor] samples
Can be used as an input filter to slow down datarate (and potentially increase precision)
"""
factor = 5
cache = []
struct.unpack_from
def __init__(self, factor=5):
self.factor = factor
self.cache = []
def feed(self, input):
if isinstance(input, tuple) or isinstance(input, list):
self.cache += input
else:
self.cache.append(input)
n_samples = len(self.cache)
if n_samples >= self.factor: # the cache is full, compute and return the average
ret_val = sum(self.cache) / n_samples
self.cache = []
else:
ret_val = float("nan")
return ret_val
| 28.959064
| 109
| 0.567246
|
18c78dc38b7a098bffee80c7ab1e3efa63965ac5
| 1,559
|
py
|
Python
|
src/Python/Tutorial/Advanced/non_blocking_visualization.py
|
felixfrank/Open3D
|
c45cb73c33e2a3917ff692747d3e692a2ffcb4d4
|
[
"MIT"
] | 1
|
2019-01-26T05:41:10.000Z
|
2019-01-26T05:41:10.000Z
|
src/Python/Tutorial/Advanced/non_blocking_visualization.py
|
fate3439/open3D
|
2698eac71993ad135acc17b5115d700735517197
|
[
"MIT"
] | null | null | null |
src/Python/Tutorial/Advanced/non_blocking_visualization.py
|
fate3439/open3D
|
2698eac71993ad135acc17b5115d700735517197
|
[
"MIT"
] | 1
|
2019-09-18T02:09:23.000Z
|
2019-09-18T02:09:23.000Z
|
# Open3D: www.open3d.org
# The MIT License (MIT)
# See license file or visit www.open3d.org for details
from open3d import *
import numpy as np
import copy
if __name__ == "__main__":
set_verbosity_level(VerbosityLevel.Debug)
source_raw = read_point_cloud("../../TestData/ICP/cloud_bin_0.pcd")
target_raw = read_point_cloud("../../TestData/ICP/cloud_bin_1.pcd")
source = voxel_down_sample(source_raw, voxel_size = 0.02)
target = voxel_down_sample(target_raw, voxel_size = 0.02)
trans = [[0.862, 0.011, -0.507, 0.0],
[-0.139, 0.967, -0.215, 0.7],
[0.487, 0.255, 0.835, -1.4],
[0.0, 0.0, 0.0, 1.0]]
source.transform(trans)
flip_transform = [[1, 0, 0, 0],
[0, -1, 0, 0],
[0, 0, -1, 0],
[0, 0, 0, 1]]
source.transform(flip_transform)
target.transform(flip_transform)
vis = Visualizer()
vis.create_window()
vis.add_geometry(source)
vis.add_geometry(target)
threshold = 0.05
icp_iteration = 100
save_image = False
for i in range(icp_iteration):
reg_p2l = registration_icp(source, target, threshold,
np.identity(4), TransformationEstimationPointToPlane(),
ICPConvergenceCriteria(max_iteration = 1))
source.transform(reg_p2l.transformation)
vis.update_geometry()
vis.reset_view_point(True)
vis.poll_events()
vis.update_renderer()
if save_image:
vis.capture_screen_image("temp_%04d.jpg" % i)
vis.destroy_window()
| 32.479167
| 71
| 0.621552
|
3af32e5568a83e7d2d11cfb1ac22e020ca043ea9
| 3,019
|
py
|
Python
|
torchelie/models/vgg.py
|
Vermeille/Torchelie
|
43957d83238372ae6436aac90127865c2040b76c
|
[
"MIT"
] | 117
|
2019-07-14T20:39:48.000Z
|
2021-10-17T19:16:48.000Z
|
torchelie/models/vgg.py
|
Vermeille/Torchelie
|
43957d83238372ae6436aac90127865c2040b76c
|
[
"MIT"
] | 41
|
2019-12-06T23:56:44.000Z
|
2021-08-02T09:13:30.000Z
|
torchelie/models/vgg.py
|
Vermeille/Torchelie
|
43957d83238372ae6436aac90127865c2040b76c
|
[
"MIT"
] | 13
|
2019-09-22T00:46:54.000Z
|
2021-04-09T15:53:15.000Z
|
import torch
import torch.nn as nn
import torchelie.nn as tnn
from torchelie.utils import kaiming
from typing import cast
from .classifier import ClassificationHead
from .registry import register
class VGG(tnn.CondSeq):
"""
Construct a VGG-like model. The architecture is composed of either the
number of channels or 'M' for a maxpool operation.
This creates a standard VGG11 with 10 classes.
..
VGG([64, 'M', 128, 'M', 256, 256, 'M', 512, 512, 'M', 512, 512, 'M'],
10)
"""
def __init__(self, arch: list, num_classes: int) -> None:
super().__init__()
self.arch = arch
in_ch = 3
self.in_channels = in_ch
feats = tnn.CondSeq()
block_num = 1
conv_num = 1
for layer in arch:
if layer == 'M':
feats.add_module(f'pool_{block_num}', nn.MaxPool2d(2, 2))
block_num += 1
conv_num = 1
else:
ch = cast(int, layer)
feats.add_module(f'conv_{block_num}_{conv_num}',
tnn.ConvBlock(in_ch, ch, 3).remove_batchnorm())
in_ch = ch
conv_num += 1
self.out_channels = ch
self.features = feats
self.classifier = ClassificationHead(self.out_channels, num_classes)
self.classifier.to_vgg_style(4096)
def add_batchnorm(self, remove_first=False) -> 'VGG':
for m in self.features:
if isinstance(m, tnn.ConvBlock):
m.restore_batchnorm()
if remove_first:
self.features.conv_1_1.remove_batchnorm()
return self
def set_input_specs(self, in_channels: int) -> 'VGG':
c1 = self.features.conv_1_1
assert isinstance(c1, tnn.ConvBlock)
c1.conv = kaiming(tnn.Conv3x3(in_channels, c1.conv.out_channels))
return self
@register
def vgg11(num_classes: int) -> 'VGG':
return VGG([64, 'M', 128, 'M', 256, 256, 'M', 512, 512, 'M', 512, 512, 'M'],
num_classes)
@register
def vgg13(num_classes: int) -> 'VGG':
return VGG([
64, 64, 'M', 128, 128, 'M', 256, 256, 'M', 512, 512, 'M', 512, 512, 'M'
], num_classes)
@register
def vgg16(num_classes: int) -> 'VGG':
return VGG([
64, 64, 'M', 128, 128, 'M', 256, 256, 256, 'M', 512, 512, 512, 'M', 512,
512, 512, 'M'
], num_classes)
@register
def vgg19(num_classes: int) -> 'VGG':
return VGG([
64, 64, 'M', 128, 128, 'M', 256, 256, 256, 256, 'M', 512, 512, 512, 512,
'M', 512, 512, 512, 512, 'M'
], num_classes)
@register
def vgg11_bn(num_classes: int) -> 'VGG':
return vgg11(num_classes).add_batchnorm()
@register
def vgg13_bn(num_classes: int) -> 'VGG':
return vgg13(num_classes).add_batchnorm()
@register
def vgg16_bn(num_classes: int) -> 'VGG':
return vgg16(num_classes).add_batchnorm()
@register
def vgg19_bn(num_classes: int) -> 'VGG':
return vgg19(num_classes).add_batchnorm()
| 26.955357
| 80
| 0.582312
|
38fa2b6c6697bc0c1b0fede00f6888e7fa10fe85
| 653
|
py
|
Python
|
Python/Mundo03/Exercicios/ex101.py
|
eStev4m/CursoPython
|
8b52a618e67c80d66518ef91c1d4596a2bfddc22
|
[
"MIT"
] | null | null | null |
Python/Mundo03/Exercicios/ex101.py
|
eStev4m/CursoPython
|
8b52a618e67c80d66518ef91c1d4596a2bfddc22
|
[
"MIT"
] | null | null | null |
Python/Mundo03/Exercicios/ex101.py
|
eStev4m/CursoPython
|
8b52a618e67c80d66518ef91c1d4596a2bfddc22
|
[
"MIT"
] | null | null | null |
'''Crie um programa que tenha uma função chamada voto() que vai receber como parâmetro o ano de nascimento de uma pessoa, retornando um valor literal indicando se uma pessoa tem voto NEGADO, OPCIONAL e OBRIGATÓRIO nas eleições.'''
def voto(pes):
from datetime import date
pes = date.today().year - nasc
if pes < 16:
print(f'Com {pes} anos: Não Vota')
return(pes)
elif 16 <= pes < 18 or pes > 65:
print(f'Com {pes} anos: Voto Opcional')
return(pes)
else:
print(f'Com {pes} anos: Voto Obrigatório')
return(pes)
print('-' * 30)
nasc = int(input('Em que ano você nasceu? '))
voto(nasc)
| 32.65
| 230
| 0.640123
|
803160750f086aa3ec0d8f6183c553ca1362376d
| 172
|
py
|
Python
|
fantastik/urls.py
|
mirceachira/fantastick-bot
|
ef1f059f5ac863013829ef78830c06c4a7fa4ea4
|
[
"MIT"
] | null | null | null |
fantastik/urls.py
|
mirceachira/fantastick-bot
|
ef1f059f5ac863013829ef78830c06c4a7fa4ea4
|
[
"MIT"
] | null | null | null |
fantastik/urls.py
|
mirceachira/fantastick-bot
|
ef1f059f5ac863013829ef78830c06c4a7fa4ea4
|
[
"MIT"
] | 1
|
2018-11-24T12:02:31.000Z
|
2018-11-24T12:02:31.000Z
|
from django.contrib import admin
from django.urls import path, include
urlpatterns = [
path('admin/', admin.site.urls),
path('', include('messenger_bot.urls'))
]
| 19.111111
| 43
| 0.697674
|
3e8289b843e6088d42a2743ce14b11cb09000ad9
| 106
|
py
|
Python
|
grizli/aws/__init__.py
|
york-stsci/grizli
|
86333320817bf626f70c91681782e7d5bb2585de
|
[
"MIT"
] | null | null | null |
grizli/aws/__init__.py
|
york-stsci/grizli
|
86333320817bf626f70c91681782e7d5bb2585de
|
[
"MIT"
] | null | null | null |
grizli/aws/__init__.py
|
york-stsci/grizli
|
86333320817bf626f70c91681782e7d5bb2585de
|
[
"MIT"
] | null | null | null |
from . import aws_drizzler
from . import db
from . import fit_redshift_lambda
from . import lambda_handler
| 26.5
| 33
| 0.820755
|
7f77a929753b76b8eb3f06f290289d1c90d607c5
| 8,719
|
py
|
Python
|
lite_content/lite_internal_frontend/strings.py
|
django-doctor/lite-frontend
|
330ff9575fd22d7c4c42698ac2d653244e6180d6
|
[
"MIT"
] | 1
|
2021-10-16T16:36:58.000Z
|
2021-10-16T16:36:58.000Z
|
lite_content/lite_internal_frontend/strings.py
|
django-doctor/lite-frontend
|
330ff9575fd22d7c4c42698ac2d653244e6180d6
|
[
"MIT"
] | 45
|
2020-08-11T14:37:46.000Z
|
2022-03-29T17:03:02.000Z
|
lite_content/lite_internal_frontend/strings.py
|
django-doctor/lite-frontend
|
330ff9575fd22d7c4c42698ac2d653244e6180d6
|
[
"MIT"
] | 3
|
2021-02-01T06:26:19.000Z
|
2022-02-21T23:02:46.000Z
|
from lite_content.lite_internal_frontend import ( # noqa
advice, # noqa
cases, # noqa
letter_templates, # noqa
open_general_licences, # noqa
roles, # noqa
flags, # noqa
organisations, # noqa
generic, # noqa
goods, # noqa
users, # noqa
teams, # noqa
queues, # noqa
picklists, # noqa
routing_rules, # noqa
) # noqa
# Buttons
CONTINUE = "Continue"
SAVE = "Save"
NOT_APPLICABLE = "N/A"
QUEUE_ALL_CASES = "All cases"
# Organisation
ORGANISATION_CREATION_SUCCESS = "The organisation was created successfully"
ORGANISATION_SET_FLAGS = "Set flags on this organisation"
ORGANISATION_EDIT_FLAGS = "Edit organisation flags"
# HMRC Organisation
HMRC_ORGANISATION_CREATION_SUCCESS = "The HMRC organisation was created successfully"
# Good
GOOD_DESCRIPTION = "Description"
GOOD_CONTROL_LIST_ENTRY = "Control list classification"
GOOD_INCORPORATED = "Incorporated"
GOOD_CONTROLLED = "Controlled"
GOOD_FLAGS = "Flags"
# Supporting documentation
SUPPORTING_DOCUMENTATION_TITLE = "Supporting documents"
SUPPORTING_DOCUMENTATION_NAME = "Name"
SUPPORTING_DOCUMENTATION_DESCRIPTION = "Description"
SUPPORTING_DOCUMENTATION_DOCUMENT = "Document"
SUPPORTING_DOCUMENTATION_NO_DOCUMENTATION = "No supporting documents"
DOCUMENT_TEMPLATES_TITLE = "Document templates"
class Common:
SERVICE_NAME = "LITE Internal"
class Authentication:
class UserDoesNotExist:
DESCRIPTION = "You are not registered to use this system"
TITLE = "User not found"
class UpdateUser:
class Status:
DEACTIVATE_WARNING = "This user will no longer be able to sign in or perform tasks"
REACTIVATE_WARNING = "This user will be able to sign in to and perform tasks"
class Activity:
ADDED_AN_ECJU_QUERY = " added an ECJU query:"
ADDED_A_CASE_NOTE = " added a case note:"
class FlaggingRules:
CREATE = "Create new flagging rule"
TITLE = "Flagging rules"
DESCRIPTION = "Flagging rules apply flags to cases automatically based on conditions"
class List:
class Filter:
Type = "type"
MY_TEAM_ONLY = "Only show my team"
INCLUDE_DEACTIVATED = "Include deactivated"
TEAM = "Team"
TYPE = "Type"
PARAMETER = "Parameter"
FLAG = "Flag"
CONDITION = "Conditions"
STATUS = "Status"
ACTIONS = "Actions"
EDIT = "Edit"
REACTIVATE = "Reactivate"
DEACTIVATE = "Deactivate"
class Create:
BACKLINK = "Back to flagging rules"
class Type:
TITLE = "Select parameters"
SAVE = "Save and continue"
GOOD = "Products"
DESTINATION = "Destinations"
APPLICATION = "Application types"
class Condition_and_flag:
GOOD_TITLE = "Set flagging rules"
DESTINATION_TITLE = "Select a country and flag"
APPLICATION_TITLE = "Select a application type and flag"
ADD_CONDITION = "Add a condition"
GOOD = "Select individual control list entries"
DESTINATION = "Country"
APPLICATION = "Application type"
FLAG = "Select a flag"
GOODS_QUESTION = "Only apply this rule to verified goods?"
YES_OPTION = "Yes"
NO_OPTION = "No"
class Status:
DEACTIVATE_HEADING = "Are you sure you want to deactivate this flagging rule?"
DEACTIVATE_WARNING = "This flagging rule will no longer be able to be used unless it's reactivated"
DEACTIVATE_CONFIRM = "Deactivate this flagging rule"
REACTIVATE_HEADING = "Are you sure you want to reactivate this flagging rule?"
REACTIVATE_WARNING = "This flagging rule will be able to be used unless it's deactivated again"
REACTIVATE_CONFIRM = "Reactivate this flagging rule"
BACK = "Back to flagging rules"
CANCEL = "Cancel"
NO_SELECTION_ERROR = "Select to confirm or not"
class Picklist:
TITLE = "Picklists"
class Edit:
class Status:
DEACTIVATE_HEADING = "Are you sure you want to deactivate this picklist item?"
DEACTIVATE_WARNING = "This picklist item will no longer be able to be used unless it's reactivated"
REACTIVATE_HEADING = "Are you sure you want to reactivate this picklist item?"
REACTIVATE_WARNING = "This picklist item will be able to be used unless it's deactivated again"
class LetterTemplates:
class AddParagraph:
ADD_BUTTON = "Add items"
HINT = "Select letter paragraphs to use in your template."
TITLE = "Add letter paragraphs"
NO_LETTER_PARAGRAPHS = "There aren't any letter paragraphs left to add"
class EditParagraph:
ADD_LINK = "Add another letter paragraph"
HINT = "Drag and drop letter paragraphs to reorder."
REMOVE_BUTTON = "Remove letter paragraph from template"
SAVE_BUTTON = "Done"
TITLE = "Edit letter paragraphs"
class OrderParagraph:
ADD_PARAGRAPH = "Add a new paragraph"
JS_HINT = "Drag and drop letter paragraphs to move them around"
NO_JS_HINT = "Delete and add new paragraphs"
PREVIEW_BUTTON = "Preview"
REMOVE_BUTTON = "Remove letter paragraph from template"
TITLE = "Choose the letter paragraphs you want to use in your letter"
class Preview:
SAVE_BUTTON = "Save"
TITLE = "Preview"
class LetterTemplates:
CREATE_BUTTON = "Create a template"
ERROR_BANNER = "An error occurred whilst processing your template"
LAYOUT_COLUMN_TITLE = "Layout"
NAME_COLUMN_TITLE = "Name"
RESTRICTED_COLUMN_TITLE = "Restricted to"
SUCCESSFULLY_CREATED_BANNER = "Your letter template was created successfully"
TITLE = "Letter templates"
UPDATED_COLUMN_TITLE = "Last updated"
class LetterTemplate:
BACK_LINK = "Back to letter templates"
CREATED_TITLE = "Created at"
EDIT_BUTTON = "Edit name and layout"
EDIT_PARAGRAPH_BUTTON = "Add or edit paragraphs"
LAST_UPDATE_TITLE = "Last updated"
LAYOUT_TITLE = "Layout"
RESTRICTED_TITLE = "Restricted to"
DECISIONS_TITLE = "Decisions"
VISIBLE_TO_EXPORTER_TITLE = "Visible to exporter"
DIGITAL_SIGNATURE_TITLE = "Has a digital signature"
class EditLetterTemplate:
BUTTON_NAME = "Save"
TITLE = "Edit %s"
class Name:
HINT = (
"Call it something that:\n• is easy to find\n• explains when to use this template\n\n For example,"
" 'Refuse a licence'"
)
TITLE = "Give your template a name"
class CaseTypes:
TITLE = "When should someone use this template?"
class Types:
APPLICATION = "Applications"
GOODS_QUERY = "Goods query"
END_USER_ADVISORY = "End user advisory queries"
class Decisions:
TITLE = "Decisions (optional)"
DESCRIPTION = "Select the decisions that apply to your template"
class IncludeSignature:
TITLE = "Add a digital signature to the template?"
DESCRIPTION = ""
YES_OPTION = "Yes"
NO_OPTION = "No"
class Layout:
TITLE = "Choose a layout"
class AddLetterTemplate:
class Name:
BACK_LINK = "Back to letter templates"
CONTINUE_BUTTON = "Continue"
HINT = (
"Call it something that:\n• is easy to find\n• explains when to use this template\n\n For example,"
" 'Refuse a licence'"
)
TITLE = "Give your template a name"
class CaseTypes:
CONTINUE_BUTTON = "Continue"
TITLE = "When should someone use this template?"
class Types:
APPLICATION = "Applications"
GOODS_QUERY = "Goods query"
END_USER_ADVISORY = "End user advisory queries"
class Decisions:
TITLE = "Decisions (optional)"
class VisibleToExporter:
TITLE = "Visible to exporter"
DESCRIPTION = "Should documents created with this template be visible to exporters?"
YES_OPTION = "Yes"
NO_OPTION = "No"
BUTTON = "Continue"
class IncludeSignature:
TITLE = "Add a digital signature to the template?"
DESCRIPTION = ""
YES_OPTION = "Yes"
NO_OPTION = "No"
BUTTON = "Continue"
class Layout:
CONTINUE_BUTTON = "Continue"
TITLE = "Choose a layout"
| 32.655431
| 115
| 0.632527
|
ba359f06b0f3704efcdfd0409f85222d8e598b89
| 13,293
|
py
|
Python
|
thriftpy2/thrift.py
|
aiudirog/thriftpy2
|
7f82b6fbc08bf38233316fd7ec5c294068d69f38
|
[
"MIT"
] | null | null | null |
thriftpy2/thrift.py
|
aiudirog/thriftpy2
|
7f82b6fbc08bf38233316fd7ec5c294068d69f38
|
[
"MIT"
] | null | null | null |
thriftpy2/thrift.py
|
aiudirog/thriftpy2
|
7f82b6fbc08bf38233316fd7ec5c294068d69f38
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
"""
thriftpy2.thrift
~~~~~~~~~~~~~~~~~~
Thrift simplified.
"""
from __future__ import absolute_import
import functools
import linecache
import types
from ._compat import with_metaclass, PY3
if PY3:
from itertools import zip_longest
else:
from itertools import izip_longest as zip_longest
def args_to_kwargs(thrift_spec, *args, **kwargs):
for item, value in zip_longest(sorted(thrift_spec.items()), args):
arg_name = item[1][1]
required = item[1][-1]
if value is not None:
kwargs[item[1][1]] = value
if required and arg_name not in kwargs:
raise ValueError(arg_name)
return kwargs
def parse_spec(ttype, spec=None):
name_map = TType._VALUES_TO_NAMES
def _type(s):
return parse_spec(*s) if isinstance(s, tuple) else name_map[s]
if spec is None:
return name_map[ttype]
if ttype == TType.STRUCT:
return spec.__name__
if ttype in (TType.LIST, TType.SET):
return "%s<%s>" % (name_map[ttype], _type(spec))
if ttype == TType.MAP:
return "MAP<%s, %s>" % (_type(spec[0]), _type(spec[1]))
def init_func_generator(cls, spec):
"""Generate `__init__` function based on TPayload.default_spec
For example::
spec = [('name', 'Alice'), ('number', None)]
will generate a types.FunctionType object representing::
def __init__(self, name='Alice', number=None):
self.name = name
self.number = number
"""
if not spec:
def __init__(self):
pass
return __init__
varnames, defaults = zip(*spec)
args = ', '.join(map('{0[0]}={0[1]!r}'.format, spec))
init = "def __init__(self, {}):\n".format(args)
init += "\n".join(map(' self.{0} = {0}'.format, varnames))
name = '<generated {}.__init__>'.format(cls.__name__)
code = compile(init, name, 'exec')
func = next(c for c in code.co_consts if isinstance(c, types.CodeType))
# Add a fake linecache entry so debuggers and the traceback module can
# better understand our generated code.
linecache.cache[name] = (len(init), None, init.splitlines(True), name)
return types.FunctionType(func, {}, argdefs=defaults)
class TType(object):
STOP = 0
VOID = 1
BOOL = 2
BYTE = 3
I08 = 3
DOUBLE = 4
I16 = 6
I32 = 8
I64 = 10
STRING = 11
UTF7 = 11
BINARY = 11 # This here just for parsing. For all purposes, it's a string
STRUCT = 12
MAP = 13
SET = 14
LIST = 15
UTF8 = 16
UTF16 = 17
_VALUES_TO_NAMES = {
STOP: 'STOP',
VOID: 'VOID',
BOOL: 'BOOL',
BYTE: 'BYTE',
I08: 'BYTE',
DOUBLE: 'DOUBLE',
I16: 'I16',
I32: 'I32',
I64: 'I64',
STRING: 'STRING',
UTF7: 'STRING',
BINARY: 'STRING',
STRUCT: 'STRUCT',
MAP: 'MAP',
SET: 'SET',
LIST: 'LIST',
UTF8: 'UTF8',
UTF16: 'UTF16'
}
class TMessageType(object):
CALL = 1
REPLY = 2
EXCEPTION = 3
ONEWAY = 4
class TPayloadMeta(type):
def __new__(cls, name, bases, attrs):
if "default_spec" in attrs:
spec = attrs.pop("default_spec")
attrs["__init__"] = init_func_generator(cls, spec)
return super(TPayloadMeta, cls).__new__(cls, name, bases, attrs)
def gen_init(cls, thrift_spec=None, default_spec=None):
if thrift_spec is not None:
cls.thrift_spec = thrift_spec
if default_spec is not None:
cls.__init__ = init_func_generator(cls, default_spec)
return cls
class TPayload(with_metaclass(TPayloadMeta, object)):
__hash__ = None
def read(self, iprot):
iprot.read_struct(self)
def write(self, oprot):
oprot.write_struct(self)
def __repr__(self):
l = ['%s=%r' % (key, value) for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(l))
def __str__(self):
return repr(self)
def __eq__(self, other):
return isinstance(other, self.__class__) and \
self.__dict__ == other.__dict__
def __ne__(self, other):
return not self.__eq__(other)
class TClient(object):
def __init__(self, service, iprot, oprot=None):
self._service = service
self._iprot = self._oprot = iprot
if oprot is not None:
self._oprot = oprot
self._seqid = 0
def __getattr__(self, _api):
if _api in self._service.thrift_services:
return functools.partial(self._req, _api)
# close method is a reserved method name defined as below
# so we need to handle it alone
if _api == 'tclose':
return functools.partial(self._req, 'close')
raise AttributeError("{} instance has no attribute '{}'".format(
self.__class__.__name__, _api))
def __dir__(self):
return self._service.thrift_services
def _req(self, _api, *args, **kwargs):
try:
service_args = getattr(self._service, _api + "_args")
kwargs = args_to_kwargs(service_args.thrift_spec, *args, **kwargs)
except ValueError as e:
raise TApplicationException(
TApplicationException.UNKNOWN_METHOD,
'{arg} is required argument for {service}.{api}'.format(
arg=e.args[0], service=self._service.__name__, api=_api))
result_cls = getattr(self._service, _api + "_result")
self._send(_api, **kwargs)
# wait result only if non-oneway
if not getattr(result_cls, "oneway"):
return self._recv(_api)
def _send(self, _api, **kwargs):
self._oprot.write_message_begin(_api, TMessageType.CALL, self._seqid)
args = getattr(self._service, _api + "_args")()
for k, v in kwargs.items():
setattr(args, k, v)
args.write(self._oprot)
self._oprot.write_message_end()
self._oprot.trans.flush()
def _recv(self, _api):
fname, mtype, rseqid = self._iprot.read_message_begin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.read_message_end()
raise x
result = getattr(self._service, _api + "_result")()
result.read(self._iprot)
self._iprot.read_message_end()
if hasattr(result, "success") and result.success is not None:
return result.success
# void api without throws
if len(result.thrift_spec) == 0:
return
# check throws
for k, v in result.__dict__.items():
if k != "success" and v:
raise v
# no throws & not void api
if hasattr(result, "success"):
raise TApplicationException(TApplicationException.MISSING_RESULT)
def close(self):
self._iprot.trans.close()
if self._iprot != self._oprot:
self._oprot.trans.close()
class TProcessor(object):
"""Base class for processor, which works on two streams."""
def __init__(self, service, handler):
self._service = service
self._handler = handler
def process_in(self, iprot):
api, type, seqid = iprot.read_message_begin()
if api not in self._service.thrift_services:
iprot.skip(TType.STRUCT)
iprot.read_message_end()
return api, seqid, TApplicationException(TApplicationException.UNKNOWN_METHOD), None # noqa
args = getattr(self._service, api + "_args")()
args.read(iprot)
iprot.read_message_end()
result = getattr(self._service, api + "_result")()
# convert kwargs to args
api_args = [args.thrift_spec[k][1] for k in sorted(args.thrift_spec)]
def call():
f = getattr(self._handler, api)
return f(*(args.__dict__[k] for k in api_args))
return api, seqid, result, call
def send_exception(self, oprot, api, exc, seqid):
oprot.write_message_begin(api, TMessageType.EXCEPTION, seqid)
exc.write(oprot)
oprot.write_message_end()
oprot.trans.flush()
def send_result(self, oprot, api, result, seqid):
oprot.write_message_begin(api, TMessageType.REPLY, seqid)
result.write(oprot)
oprot.write_message_end()
oprot.trans.flush()
def handle_exception(self, e, result):
for k in sorted(result.thrift_spec):
if result.thrift_spec[k][1] == "success":
continue
_, exc_name, exc_cls, _ = result.thrift_spec[k]
if isinstance(e, exc_cls):
setattr(result, exc_name, e)
return True
return False
def process(self, iprot, oprot):
api, seqid, result, call = self.process_in(iprot)
if isinstance(result, TApplicationException):
return self.send_exception(oprot, api, result, seqid)
try:
result.success = call()
except TApplicationException as e:
return self.send_exception(oprot, api, e, seqid)
except Exception as e:
# raise if api don't have throws
if not self.handle_exception(e, result):
raise
if not result.oneway:
self.send_result(oprot, api, result, seqid)
class TMultiplexedProcessor(TProcessor):
SEPARATOR = ":"
def __init__(self):
self.processors = {}
def register_processor(self, service_name, processor):
if service_name in self.processors:
raise TApplicationException(
type=TApplicationException.INTERNAL_ERROR,
message='processor for `{}` already registered'
.format(service_name))
self.processors[service_name] = processor
def process_in(self, iprot):
api, type, seqid = iprot.read_message_begin()
if type not in (TMessageType.CALL, TMessageType.ONEWAY):
raise TException("TMultiplexed protocol only supports CALL & ONEWAY") # noqa
if TMultiplexedProcessor.SEPARATOR not in api:
raise TException("Service name not found in message. "
"You should use TMultiplexedProtocol in client.")
service_name, api = api.split(TMultiplexedProcessor.SEPARATOR)
if service_name not in self.processors:
iprot.skip(TType.STRUCT)
iprot.read_message_end()
e = TApplicationException(TApplicationException.UNKNOWN_METHOD)
return api, seqid, e, None
proc = self.processors[service_name]
args = getattr(proc._service, api + "_args")()
args.read(iprot)
iprot.read_message_end()
result = getattr(proc._service, api + "_result")()
# convert kwargs to args
api_args = [args.thrift_spec[k][1] for k in sorted(args.thrift_spec)]
def call():
f = getattr(proc._handler, api)
return f(*(args.__dict__[k] for k in api_args))
return api, seqid, result, call
class TProcessorFactory(object):
def __init__(self, processor_class, *args, **kwargs):
self.args = args
self.kwargs = kwargs
self.processor_class = processor_class
def get_processor(self):
return self.processor_class(*self.args, **self.kwargs)
class TException(TPayload, Exception):
"""Base class for all thrift exceptions."""
def __hash__(self):
return id(self)
def __eq__(self, other):
return id(self) == id(other)
class TDecodeException(TException):
def __init__(self, name, fid, field, value, ttype, spec=None):
self.struct_name = name
self.fid = fid
self.field = field
self.value = value
self.type_repr = parse_spec(ttype, spec)
def __str__(self):
return (
"Field '%s(%s)' of '%s' needs type '%s', "
"but the value is `%r`"
) % (self.field, self.fid, self.struct_name, self.type_repr,
self.value)
class TApplicationException(TException):
"""Application level thrift exceptions."""
thrift_spec = {
1: (TType.STRING, 'message', False),
2: (TType.I32, 'type', False),
}
UNKNOWN = 0
UNKNOWN_METHOD = 1
INVALID_MESSAGE_TYPE = 2
WRONG_METHOD_NAME = 3
BAD_SEQUENCE_ID = 4
MISSING_RESULT = 5
INTERNAL_ERROR = 6
PROTOCOL_ERROR = 7
def __init__(self, type=UNKNOWN, message=None):
super(TApplicationException, self).__init__()
self.type = type
self.message = message
def __str__(self):
if self.message:
return self.message
if self.type == self.UNKNOWN_METHOD:
return 'Unknown method'
elif self.type == self.INVALID_MESSAGE_TYPE:
return 'Invalid message type'
elif self.type == self.WRONG_METHOD_NAME:
return 'Wrong method name'
elif self.type == self.BAD_SEQUENCE_ID:
return 'Bad sequence ID'
elif self.type == self.MISSING_RESULT:
return 'Missing result'
else:
return 'Default (unknown) TApplicationException'
| 29.215385
| 104
| 0.601595
|
92561a5b6c465621c3e2f89b74e93c18181bf171
| 9,102
|
py
|
Python
|
Lib/site-packages/pip/_vendor/rich/tree.py
|
edupyter/EDUPYTER38
|
396183cea72987506f1ef647c0272a2577c56218
|
[
"bzip2-1.0.6"
] | 1
|
2022-02-25T13:46:54.000Z
|
2022-02-25T13:46:54.000Z
|
Lib/site-packages/pip/_vendor/rich/tree.py
|
edupyter/EDUPYTER38
|
396183cea72987506f1ef647c0272a2577c56218
|
[
"bzip2-1.0.6"
] | 1
|
2022-01-27T19:09:25.000Z
|
2022-01-27T19:09:25.000Z
|
Lib/site-packages/pip/_vendor/rich/tree.py
|
edupyter/EDUPYTER38
|
396183cea72987506f1ef647c0272a2577c56218
|
[
"bzip2-1.0.6"
] | 1
|
2021-09-27T11:14:58.000Z
|
2021-09-27T11:14:58.000Z
|
from typing import Iterator, List, Optional, Tuple
from ._loop import loop_first, loop_last
from .console import Console, ConsoleOptions, RenderableType, RenderResult
from .jupyter import JupyterMixin
from .measure import Measurement
from .segment import Segment
from .style import Style, StyleStack, StyleType
from .styled import Styled
class Tree(JupyterMixin):
"""A renderable for a tree structure.
Args:
label (RenderableType): The renderable or str for the tree label.
style (StyleType, optional): Style of this tree. Defaults to "tree".
guide_style (StyleType, optional): Style of the guide lines. Defaults to "tree.line".
expanded (bool, optional): Also display children. Defaults to True.
highlight (bool, optional): Highlight renderable (if str). Defaults to False.
"""
def __init__(
self,
label: RenderableType,
*,
style: StyleType = "tree",
guide_style: StyleType = "tree.line",
expanded: bool = True,
highlight: bool = False,
hide_root: bool = False,
) -> None:
self.label = label
self.style = style
self.guide_style = guide_style
self.children: List[Tree] = []
self.expanded = expanded
self.highlight = highlight
self.hide_root = hide_root
def add(
self,
label: RenderableType,
*,
style: Optional[StyleType] = None,
guide_style: Optional[StyleType] = None,
expanded: bool = True,
highlight: bool = False,
) -> "Tree":
"""Add a child tree.
Args:
label (RenderableType): The renderable or str for the tree label.
style (StyleType, optional): Style of this tree. Defaults to "tree".
guide_style (StyleType, optional): Style of the guide lines. Defaults to "tree.line".
expanded (bool, optional): Also display children. Defaults to True.
highlight (Optional[bool], optional): Highlight renderable (if str). Defaults to False.
Returns:
Tree: A new child Tree, which may be further modified.
"""
node = Tree(
label,
style=self.style if style is None else style,
guide_style=self.guide_style if guide_style is None else guide_style,
expanded=expanded,
highlight=self.highlight if highlight is None else highlight,
)
self.children.append(node)
return node
def __rich_console__(
self, console: "Console", options: "ConsoleOptions"
) -> "RenderResult":
stack: List[Iterator[Tuple[bool, Tree]]] = []
pop = stack.pop
push = stack.append
new_line = Segment.line()
get_style = console.get_style
null_style = Style.null()
guide_style = get_style(self.guide_style, default="") or null_style
SPACE, CONTINUE, FORK, END = range(4)
ASCII_GUIDES = (" ", "| ", "+-- ", "`-- ")
TREE_GUIDES = [
(" ", "│ ", "├── ", "└── "),
(" ", "┃ ", "┣━━ ", "┗━━ "),
(" ", "║ ", "╠══ ", "╚══ "),
]
_Segment = Segment
def make_guide(index: int, style: Style) -> Segment:
"""Make a Segment for a level of the guide lines."""
if options.ascii_only:
line = ASCII_GUIDES[index]
else:
guide = 1 if style.bold else (2 if style.underline2 else 0)
line = TREE_GUIDES[0 if options.legacy_windows else guide][index]
return _Segment(line, style)
levels: List[Segment] = [make_guide(CONTINUE, guide_style)]
push(iter(loop_last([self])))
guide_style_stack = StyleStack(get_style(self.guide_style))
style_stack = StyleStack(get_style(self.style))
remove_guide_styles = Style(bold=False, underline2=False)
depth = 0
while stack:
stack_node = pop()
try:
last, node = next(stack_node)
except StopIteration:
levels.pop()
if levels:
guide_style = levels[-1].style or null_style
levels[-1] = make_guide(FORK, guide_style)
guide_style_stack.pop()
style_stack.pop()
continue
push(stack_node)
if last:
levels[-1] = make_guide(END, levels[-1].style or null_style)
guide_style = guide_style_stack.current + get_style(node.guide_style)
style = style_stack.current + get_style(node.style)
prefix = levels[(2 if self.hide_root else 1) :]
renderable_lines = console.render_lines(
Styled(node.label, style),
options.update(
width=options.max_width
- sum(level.cell_length for level in prefix),
highlight=self.highlight,
height=None,
),
pad=options.justify is not None,
)
if not (depth == 0 and self.hide_root):
for first, line in loop_first(renderable_lines):
if prefix:
yield from _Segment.apply_style(
prefix,
style.background_style,
post_style=remove_guide_styles,
)
yield from line
yield new_line
if first and prefix:
prefix[-1] = make_guide(
SPACE if last else CONTINUE, prefix[-1].style or null_style
)
if node.expanded and node.children:
levels[-1] = make_guide(
SPACE if last else CONTINUE, levels[-1].style or null_style
)
levels.append(
make_guide(END if len(node.children) == 1 else FORK, guide_style)
)
style_stack.push(get_style(node.style))
guide_style_stack.push(get_style(node.guide_style))
push(iter(loop_last(node.children)))
depth += 1
def __rich_measure__(
self, console: "Console", options: "ConsoleOptions"
) -> "Measurement":
stack: List[Iterator[Tree]] = [iter([self])]
pop = stack.pop
push = stack.append
minimum = 0
maximum = 0
measure = Measurement.get
level = 0
while stack:
iter_tree = pop()
try:
tree = next(iter_tree)
except StopIteration:
level -= 1
continue
push(iter_tree)
min_measure, max_measure = measure(console, options, tree.label)
indent = level * 4
minimum = max(min_measure + indent, minimum)
maximum = max(max_measure + indent, maximum)
if tree.expanded and tree.children:
push(iter(tree.children))
level += 1
return Measurement(minimum, maximum)
if __name__ == "__main__": # pragma: no cover
from pip._vendor.rich.console import Group
from pip._vendor.rich.markdown import Markdown
from pip._vendor.rich.panel import Panel
from pip._vendor.rich.syntax import Syntax
from pip._vendor.rich.table import Table
table = Table(row_styles=["", "dim"])
table.add_column("Released", style="cyan", no_wrap=True)
table.add_column("Title", style="magenta")
table.add_column("Box Office", justify="right", style="green")
table.add_row("Dec 20, 2019", "Star Wars: The Rise of Skywalker", "$952,110,690")
table.add_row("May 25, 2018", "Solo: A Star Wars Story", "$393,151,347")
table.add_row("Dec 15, 2017", "Star Wars Ep. V111: The Last Jedi", "$1,332,539,889")
table.add_row("Dec 16, 2016", "Rogue One: A Star Wars Story", "$1,332,439,889")
code = """\
class Segment(NamedTuple):
text: str = ""
style: Optional[Style] = None
is_control: bool = False
"""
syntax = Syntax(code, "python", theme="monokai", line_numbers=True)
markdown = Markdown(
"""\
### example.md
> Hello, World!
>
> Markdown _all_ the things
"""
)
root = Tree("🌲 [b green]Rich Tree", highlight=True, hide_root=True)
node = root.add(":file_folder: Renderables", guide_style="red")
simple_node = node.add(":file_folder: [bold yellow]Atomic", guide_style="uu green")
simple_node.add(Group("📄 Syntax", syntax))
simple_node.add(Group("📄 Markdown", Panel(markdown, border_style="green")))
containers_node = node.add(
":file_folder: [bold magenta]Containers", guide_style="bold magenta"
)
containers_node.expanded = True
panel = Panel.fit("Just a panel", border_style="red")
containers_node.add(Group("📄 Panels", panel))
containers_node.add(Group("📄 [b magenta]Table", table))
console = Console()
console.print(root)
| 36.119048
| 99
| 0.566359
|
fc54fc8c1b0e81df03082c1af6e54c1f0a07d7cd
| 4,894
|
py
|
Python
|
magnum/tests/unit/common/x509/test_validator.py
|
ISCAS-VDI/magnum-base
|
5bb88e12b3e5d665ae1b345b62023d1016217e08
|
[
"Apache-2.0"
] | null | null | null |
magnum/tests/unit/common/x509/test_validator.py
|
ISCAS-VDI/magnum-base
|
5bb88e12b3e5d665ae1b345b62023d1016217e08
|
[
"Apache-2.0"
] | null | null | null |
magnum/tests/unit/common/x509/test_validator.py
|
ISCAS-VDI/magnum-base
|
5bb88e12b3e5d665ae1b345b62023d1016217e08
|
[
"Apache-2.0"
] | 1
|
2020-09-09T14:35:08.000Z
|
2020-09-09T14:35:08.000Z
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import unittest
from cryptography import x509 as c_x509
from magnum.common.exception import CertificateValidationError
from magnum.common.x509 import validator as v
class TestValidators(unittest.TestCase):
def setUp(self):
super(TestValidators, self).setUp()
def tearDown(self):
super(TestValidators, self).tearDown()
def _build_key_usage(self, critical=False):
# Digital Signature and Key Encipherment are enabled
key_usage = c_x509.KeyUsage(
True, False, True, False, False, False, False, False, False)
return c_x509.Extension(key_usage.oid, critical, key_usage)
def _build_basic_constraints(self, ca=False, critical=False):
bc = c_x509.BasicConstraints(ca, None)
return c_x509.Extension(bc.oid, critical, bc)
def test_filter_allowed_extensions(self):
key_usage = self._build_key_usage(critical=True)
actual = [e for e in v.filter_allowed_extensions([key_usage],
['keyUsage'])]
self.assertEqual([key_usage], actual)
def test_filter_allowed_extensions_disallowed_but_not_critical(self):
key_usage = self._build_key_usage()
actual = [e for e in v.filter_allowed_extensions([key_usage],
['subjectAltName'])]
self.assertEqual([], actual)
def test_filter_allowed_extensions_disallowed(self):
key_usage = self._build_key_usage(critical=True)
with self.assertRaises(CertificateValidationError):
next(v.filter_allowed_extensions([key_usage], ['subjectAltName']))
def test_merge_key_usage(self):
key_usage = self._build_key_usage(critical=True)
self.assertEqual(key_usage,
v._merge_key_usage(key_usage,
['Digital Signature',
'Key Encipherment']))
def test_merge_key_usage_disallowed_but_not_critical(self):
key_usage = self._build_key_usage()
expected = c_x509.KeyUsage(
True, False, False, False, False, False, False, False, False)
expected = c_x509.Extension(expected.oid, False, expected)
self.assertEqual(expected,
v._merge_key_usage(key_usage,
['Digital Signature']))
def test_merge_key_usage_disallowed(self):
key_usage = self._build_key_usage(critical=True)
with self.assertRaises(CertificateValidationError):
v._merge_key_usage(key_usage, ['Digital Signature'])
def test_disallow_ca_in_basic_constraints_not_critical(self):
bc = self._build_basic_constraints(ca=True)
expected = self._build_basic_constraints(ca=False)
self.assertEqual(expected, v._disallow_ca_in_basic_constraints(bc))
def test_disallow_ca_in_basic_constraints(self):
bc = self._build_basic_constraints(ca=True, critical=True)
with self.assertRaises(CertificateValidationError):
v._disallow_ca_in_basic_constraints(bc)
def test_disallow_ca_in_basic_constraints_with_non_ca(self):
bc = self._build_basic_constraints(ca=False)
self.assertEqual(bc, v._disallow_ca_in_basic_constraints(bc))
def test_remove_ca_key_usage(self):
contains_ca_key_usage = set([
"Digital Signature", "Certificate Sign", "CRL Sign"])
self.assertEqual(set(["Digital Signature"]),
v._remove_ca_key_usage(contains_ca_key_usage))
def test_remove_ca_key_usage_cert_sign(self):
contains_ca_key_usage = set(["Digital Signature", "Certificate Sign"])
self.assertEqual(set(["Digital Signature"]),
v._remove_ca_key_usage(contains_ca_key_usage))
def test_remove_ca_key_usage_crl_sign(self):
contains_ca_key_usage = set(["Digital Signature", "CRL Sign"])
self.assertEqual(set(["Digital Signature"]),
v._remove_ca_key_usage(contains_ca_key_usage))
def test_remove_ca_key_usage_without_ca_usage(self):
contains_ca_key_usage = set(["Digital Signature"])
self.assertEqual(set(["Digital Signature"]),
v._remove_ca_key_usage(contains_ca_key_usage))
| 39.467742
| 78
| 0.670821
|
abbdbca507ae45c8c4a39fc90e96715fcb6bfe0f
| 767
|
py
|
Python
|
tests/framework/Samplers/SparseGrid/scgpc/polynomial.py
|
rinelson456/raven
|
1114246136a2f72969e75b5e99a11b35500d4eef
|
[
"Apache-2.0"
] | 159
|
2017-03-24T21:07:06.000Z
|
2022-03-20T13:44:40.000Z
|
tests/framework/Samplers/SparseGrid/scgpc/polynomial.py
|
rinelson456/raven
|
1114246136a2f72969e75b5e99a11b35500d4eef
|
[
"Apache-2.0"
] | 1,667
|
2017-03-27T14:41:22.000Z
|
2022-03-31T19:50:06.000Z
|
tests/framework/Samplers/SparseGrid/scgpc/polynomial.py
|
rinelson456/raven
|
1114246136a2f72969e75b5e99a11b35500d4eef
|
[
"Apache-2.0"
] | 95
|
2017-03-24T21:05:03.000Z
|
2022-03-08T17:30:22.000Z
|
# Copyright 2017 Battelle Energy Alliance, LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import numpy as np
def eval(inp,exp):
return sum(n**exp for n in inp)
def run(self,Input):
self.ans = eval((self.x1,self.x2),1.0)
self.ans2 = eval((self.x1,self.x2),2.0)
| 34.863636
| 74
| 0.743155
|
2982ee11a75bb9f15346bc6802ccc7f0c12c7a66
| 12,838
|
py
|
Python
|
userbot/modules/afk.py
|
realityizzlie/OpenUserBot
|
5a8c1ee1ac59b6c73baa7d5b030a9e99e2ed3515
|
[
"Naumen",
"Condor-1.1",
"MS-PL"
] | null | null | null |
userbot/modules/afk.py
|
realityizzlie/OpenUserBot
|
5a8c1ee1ac59b6c73baa7d5b030a9e99e2ed3515
|
[
"Naumen",
"Condor-1.1",
"MS-PL"
] | null | null | null |
userbot/modules/afk.py
|
realityizzlie/OpenUserBot
|
5a8c1ee1ac59b6c73baa7d5b030a9e99e2ed3515
|
[
"Naumen",
"Condor-1.1",
"MS-PL"
] | null | null | null |
# Copyright (C) 2019 The Raphielscape Company LLC.
#
# Licensed under the Raphielscape Public License, Version 1.d (the "License");
# you may not use this file except in compliance with the License.
# All Credits to https://t.me/azrim89 for timestamp.
# All Credits to https://t.me/Devp73 for Offline stamps..
#
""" Userbot module which contains afk-related commands """
from datetime import datetime
import time
from random import choice, randint
from asyncio import sleep
from telethon.events import StopPropagation
from telethon.tl.functions.account import UpdateProfileRequest
from userbot import (AFKREASON, COUNT_MSG, CMD_HELP, ISAFK, BOTLOG,
BOTLOG_CHATID, USERS, PM_AUTO_BAN, bot, ALIVE_NAME, is_redis_alive)
from userbot.events import register
# ========================= CONSTANTS ============================
AFKSTR = [
"`I'm busy right now. Please talk in a bag and when I come back you can just give me the bag!`",
"I'm away right now. If you need anything, leave a message after the beep:\n`beeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeep`!",
"`You missed me, next time aim better.`",
"`I'll be back in a few minutes and if I'm not...,\nwait longer.`",
"`I'm not here right now, so I'm probably somewhere else.`",
"`Roses are red,\nViolets are blue,\nLeave me a message,\nAnd I'll get back to you.`",
"`Sometimes the best things in life are worth waiting for…\nI'll be right back.`",
"`I'll be right back,\nbut if I'm not right back,\nI'll be back later.`",
"`If you haven't figured it out already,\nI'm not here.`",
"`Hello, welcome to my away message, how may I ignore you today?`",
"`I'm away over 7 seas and 7 countries,\n7 waters and 7 continents,\n7 mountains and 7 hills,\n7 plains and 7 mounds,\n7 pools and 7 lakes,\n7 springs and 7 meadows,\n7 cities and 7 neighborhoods,\n7 blocks and 7 houses...\n\nWhere not even your messages can reach me!`",
"`I'm away from the keyboard at the moment, but if you'll scream loud enough at your screen, I might just hear you.`",
"`I went that way\n---->`",
"`I went this way\n<----`",
"`Please leave a message and make me feel even more important than I already am.`",
"`I am not here so stop writing to me,\nor else you will find yourself with a screen full of your own messages.`",
"`If I were here,\nI'd tell you where I am.\n\nBut I'm not,\nso ask me when I return...`",
"`I am away!\nI don't know when I'll be back!\nHopefully a few minutes from now!`",
"`I'm not available right now so please leave your name, number, and address and I will stalk you later.`",
"`Sorry, I'm not here right now.\nFeel free to talk to my userbot as long as you like.\nI'll get back to you later.`",
"`I bet you were expecting an away message!`",
"`Life is so short, there are so many things to do...\nI'm away doing one of them..`",
"`I am not here right now...\nbut if I was...\n\nwouldn't that be awesome?`",
]
# ================= CONSTANT =================
DEFAULTUSER = str(ALIVE_NAME) if ALIVE_NAME else uname().node
AFKSK = str(choice(AFKSTR))
# ============================================
global USER_AFK # pylint:disable=E0602
global afk_time # pylint:disable=E0602
global afk_start
global afk_end
USER_AFK = {}
afk_time = None
afk_start = {}
# =================================================================
@register(outgoing=True, pattern="^.afk(?: |$)(.*)", disable_errors=True)
async def set_afk(afk_e):
""" For .afk command, allows you to inform people that you are afk when they message you """
message = afk_e.text
string = afk_e.pattern_match.group(1)
global ISAFK
global AFKREASON
global USER_AFK # pylint:disable=E0602
global afk_time # pylint:disable=E0602
global afk_start
global afk_end
user = await bot.get_me()
global reason
USER_AFK = {}
afk_time = None
afk_end = {}
start_1 = datetime.now()
afk_start = start_1.replace(microsecond=0)
if string:
AFKREASON = string
await afk_e.edit(f"**Going AFK!**\
\nReason: `{string}`")
else:
await afk_e.edit("**Going AFK!**")
if user.last_name:
await afk_e.client(UpdateProfileRequest(first_name=user.first_name, last_name=user.last_name + " [ OFFLINE ]"))
else:
await afk_e.client(UpdateProfileRequest(first_name=user.first_name, last_name=" [ OFFLINE ]"))
if BOTLOG:
await afk_e.client.send_message(BOTLOG_CHATID, "#AFK\nYou went AFK!")
ISAFK = True
afk_time = datetime.now() # pylint:disable=E0602
raise StopPropagation
@register(outgoing=True)
async def type_afk_is_not_true(notafk):
""" This sets your status as not afk automatically when you write something while being afk """
global ISAFK
global COUNT_MSG
global USERS
global AFKREASON
global USER_AFK # pylint:disable=E0602
global afk_time # pylint:disable=E0602
global afk_start
global afk_end
user = await bot.get_me()
last = user.last_name
if last and last.endswith(" [ OFFLINE ]"):
last1 = last[:-12]
else:
last1 = ""
back_alive = datetime.now()
afk_end = back_alive.replace(microsecond=0)
if ISAFK:
ISAFK = False
msg = await notafk.respond("**My Master is back !**")
time.sleep(3)
await msg.delete()
await notafk.client(UpdateProfileRequest(first_name=user.first_name, last_name=last1))
if BOTLOG:
await notafk.client.send_message(
BOTLOG_CHATID,
"You've recieved " + str(COUNT_MSG) + " messages from " +
str(len(USERS)) + " chats while you were away",
)
for i in USERS:
name = await notafk.client.get_entity(i)
name0 = str(name.first_name)
await notafk.client.send_message(
BOTLOG_CHATID,
"[" + name0 + "](tg://user?id=" + str(i) + ")" +
" sent you " + "`" + str(USERS[i]) + " messages`",
)
COUNT_MSG = 0
USERS = {}
AFKREASON = None
@register(incoming=True, disable_edited=True)
async def mention_afk(mention):
""" This function takes care of notifying the people who mention you that you are AFK."""
global COUNT_MSG
global USERS
global ISAFK
global USER_AFK # pylint:disable=E0602
global afk_time # pylint:disable=E0602
global afk_start
global afk_end
user = await bot.get_me()
back_alivee = datetime.now()
afk_end = back_alivee.replace(microsecond=0)
afk_since = "**a while ago**"
if mention.message.mentioned and not (await mention.get_sender()).bot:
if ISAFK:
now = datetime.now()
datime_since_afk = now - afk_time # pylint:disable=E0602
time = float(datime_since_afk.seconds)
days = time // (24 * 3600)
time = time % (24 * 3600)
hours = time // 3600
time %= 3600
minutes = time // 60
time %= 60
seconds = time
if days == 1:
afk_since = "**Yesterday**"
elif days > 1:
if days > 6:
date = now + \
datetime.timedelta(
days=-days, hours=-hours, minutes=-minutes)
afk_since = date.strftime("%A, %Y %B %m, %H:%I")
else:
wday = now + datetime.timedelta(days=-days)
afk_since = wday.strftime('%A')
elif hours > 1:
afk_since = f"`{int(hours)}h {int(minutes)}m`"
elif minutes > 0:
afk_since = f"`{int(minutes)}m {int(seconds)}s`"
else:
afk_since = f"`{int(seconds)}s`"
if mention.sender_id not in USERS:
if AFKREASON:
await mention.reply(f"My Master **{DEFAULTUSER}** Is still **afk since** {afk_since}.\
\n**Because My Master is** `{AFKREASON}`")
else:
await mention.reply(f"My Master 👑 {DEFAULTUSER} 👑 is **afk Since** {afk_since}.\nand My Master Has Left a Word for You Only: \n{AFKSK}\n`.` ")
USERS.update({mention.sender_id: 1})
COUNT_MSG = COUNT_MSG + 1
elif mention.sender_id in USERS:
if USERS[mention.sender_id] % randint(2, 4) == 0:
if AFKREASON:
await mention.reply(f"My Master **{DEFAULTUSER}** Is still **afk since** {afk_since}.\
\n**Because My Master is** `{AFKREASON}`")
else:
await mention.reply(f"My Master 👑 {DEFAULTUSER} 👑 is **afk Since** {afk_since}.\nand My Master Has Left a Word for You Only: \n{AFKSK}\n`.` ")
USERS[mention.sender_id] = USERS[mention.sender_id] + 1
COUNT_MSG = COUNT_MSG + 1
else:
USERS[mention.sender_id] = USERS[mention.sender_id] + 1
COUNT_MSG = COUNT_MSG + 1
@register(incoming=True, disable_errors=True)
async def afk_on_pm(sender):
""" Function which informs people that you are AFK in PM """
global ISAFK
global USERS
global COUNT_MSG
global COUNT_MSG
global USERS
global ISAFK
global USER_AFK # pylint:disable=E0602
global afk_time # pylint:disable=E0602
global afk_start
global afk_end
user = await bot.get_me()
back_alivee = datetime.now()
afk_end = back_alivee.replace(microsecond=0)
afk_since = "**a while ago**"
if sender.is_private and sender.sender_id != 777000 and not (
await sender.get_sender()).bot:
if PM_AUTO_BAN:
try:
from userbot.modules.sql_helper.pm_permit_sql import is_approved
apprv = is_approved(sender.sender_id)
except AttributeError:
apprv = True
else:
apprv = True
if apprv and ISAFK:
now = datetime.now()
datime_since_afk = now - afk_time # pylint:disable=E0602
time = float(datime_since_afk.seconds)
days = time // (24 * 3600)
time = time % (24 * 3600)
hours = time // 3600
time %= 3600
minutes = time // 60
time %= 60
seconds = time
if days == 1:
afk_since = "**yesterday**"
elif days > 1:
if days > 6:
date = now + \
datetime.timedelta(
days=-days, hours=-hours, minutes=-minutes)
afk_since = date.strftime("%A, %Y %B %m, %H:%I")
else:
wday = now + datetime.timedelta(days=-days)
afk_since = wday.strftime('%A')
elif hours > 1:
afk_since = f"`{int(hours)}h {int(minutes)}m`"
elif minutes > 0:
afk_since = f"`{int(minutes)}m {int(seconds)}s`"
else:
afk_since = f"`{int(seconds)}s`"
if sender.sender_id not in USERS:
if AFKREASON:
await sender.reply(f"My Master **{DEFAULTUSER}** is **afk since** {afk_since}.\
\n**Because My Master is** `{AFKREASON}`\n So please wwit for my master a.k.a Sandesh to be online or kill yourself.😏😏")
else:
await sender.reply(f"My Master 👑 {DEFAULTUSER} 👑 is **afk Since** {afk_since}.\nand My Master Has Left a Word for You Only: \n{AFKSK}\n`.` ")
USERS.update({sender.sender_id: 1})
COUNT_MSG = COUNT_MSG + 1
elif apprv and sender.sender_id in USERS:
if USERS[sender.sender_id] % randint(2, 4) == 0:
if AFKREASON:
await sender.reply(f"My Master **{DEFAULTUSER}** Is **still afk since** {afk_since}.\
\n**Because My Master is** `{AFKREASON}`")
else:
await sender.reply(f"My Master 👑 {DEFAULTUSER} 👑 is **afk Since** {afk_since}.\nand My Master Has Left a Word for You Only: \n{AFKSK}\n`.` ")
USERS[sender.sender_id] = USERS[sender.sender_id] + 1
COUNT_MSG = COUNT_MSG + 1
else:
USERS[sender.sender_id] = USERS[sender.sender_id] + 1
COUNT_MSG = COUNT_MSG + 1
CMD_HELP.update({
"afk":
"`.afk` [Optional Reason]\
\nUsage: Sets you as afk.\nReplies to anyone who tags/PM's \
you telling them that you are AFK(reason).\n\nSwitches off AFK when you type back anything, anywhere.\
"
})
| 43.965753
| 275
| 0.573687
|
9a05c459b4f951bccfcaada7bbfaa39260b14eb0
| 600
|
py
|
Python
|
Section 3 - Strings/Str Ops - construction.py
|
gitjot/python-for-lccs
|
a8a4ae8847abbc33361f80183c06d57b20523382
|
[
"CC0-1.0"
] | 10
|
2020-02-14T14:28:15.000Z
|
2022-02-02T18:44:11.000Z
|
Section 3 - Strings/Str Ops - construction.py
|
gitjot/python-for-lccs
|
a8a4ae8847abbc33361f80183c06d57b20523382
|
[
"CC0-1.0"
] | null | null | null |
Section 3 - Strings/Str Ops - construction.py
|
gitjot/python-for-lccs
|
a8a4ae8847abbc33361f80183c06d57b20523382
|
[
"CC0-1.0"
] | 8
|
2020-03-25T09:27:42.000Z
|
2021-11-03T15:24:38.000Z
|
# Event: LCCS Python Fundamental Skills Workshop
# Date: May 2018
# Author: Joe English, PDST
# eMail: computerscience@pdst.ie
# Purpose: A program to demonstrate string construction
# Initialise the output string
outStr = "The quick "
# Ask the user for a colour
colour = input("Please enter a colour: ")
# Concatenate the colour to the output
outStr = outStr + colour
outStr = outStr + " fox jumps over the lazy "
# Ask the user for an animal
animal = input("Please enter an animal: ")
outStr = outStr + animal
outStr = outStr + "!"
# Display the output
print( outStr )
| 25
| 56
| 0.698333
|
ca2c42b982d7182410ac9b3bef8818bce5499f9a
| 14,710
|
py
|
Python
|
test/benchmark/utils_final.py
|
jjjjohnson/InsightFace_TF
|
2aa73fba06a145e1602ea9cba89ea4e0adc9c85a
|
[
"MIT"
] | 721
|
2018-03-07T10:13:14.000Z
|
2022-03-05T12:05:32.000Z
|
test/benchmark/utils_final.py
|
jjjjohnson/InsightFace_TF
|
2aa73fba06a145e1602ea9cba89ea4e0adc9c85a
|
[
"MIT"
] | 107
|
2018-03-14T07:36:33.000Z
|
2021-12-23T12:46:43.000Z
|
test/benchmark/utils_final.py
|
jjjjohnson/InsightFace_TF
|
2aa73fba06a145e1602ea9cba89ea4e0adc9c85a
|
[
"MIT"
] | 283
|
2018-03-06T02:55:04.000Z
|
2022-02-24T06:51:16.000Z
|
from math import exp
from mxnet import gluon
from mxnet import autograd
from mxnet import nd
from mxnet import image
from mxnet.gluon import nn
import mxnet as mx
import numpy as np
from time import time
import matplotlib.pyplot as plt
import random
class DataLoader(object):
"""similiar to gluon.data.DataLoader, but might be faster.
The main difference this data loader tries to read more exmaples each
time. But the limits are 1) all examples in dataset have the same shape, 2)
data transfomer needs to process multiple examples at each time
"""
def __init__(self, dataset, batch_size, shuffle, transform=None):
self.dataset = dataset
self.batch_size = batch_size
self.shuffle = shuffle
self.transform = transform
def __iter__(self):
data = self.dataset[:]
X = data[0]
y = nd.array(data[1])
n = X.shape[0]
if self.shuffle:
idx = np.arange(n)
np.random.shuffle(idx)
X = nd.array(X.asnumpy()[idx])
y = nd.array(y.asnumpy()[idx])
for i in range(n // self.batch_size):
if self.transform is not None:
yield self.transform(X[i * self.batch_size:(i + 1) * self.batch_size],
y[i * self.batch_size:(i + 1) * self.batch_size])
else:
yield (X[i * self.batch_size:(i + 1) * self.batch_size],
y[i * self.batch_size:(i + 1) * self.batch_size])
def __len__(self):
return len(self.dataset) // self.batch_size
def load_data_fashion_mnist(batch_size, resize=None, root="~/.mxnet/datasets/fashion-mnist"):
"""download the fashion mnist dataest and then load into memory"""
def transform_mnist(data, label):
# Transform a batch of examples.
if resize:
n = data.shape[0]
new_data = nd.zeros((n, resize, resize, data.shape[3]))
for i in range(n):
new_data[i] = image.imresize(data[i], resize, resize)
data = new_data
# change data from batch x height x width x channel to batch x channel x height x width
return nd.transpose(data.astype('float32'), (0, 3, 1, 2)) / 255, label.astype('float32')
mnist_train = gluon.data.vision.FashionMNIST(root=root, train=True, transform=None)
mnist_test = gluon.data.vision.FashionMNIST(root=root, train=False, transform=None)
# Transform later to avoid memory explosion.
train_data = DataLoader(mnist_train, batch_size, shuffle=True, transform=transform_mnist)
test_data = DataLoader(mnist_test, batch_size, shuffle=False, transform=transform_mnist)
return (train_data, test_data)
def load_data_mnist(batch_size, resize=None, root="~/.mxnet/datasets/mnist"):
"""download the fashion mnist dataest and then load into memory"""
def transform_mnist(data, label):
# Transform a batch of examples.
if resize:
n = data.shape[0]
new_data = nd.zeros((n, resize, resize, data.shape[3]))
for i in range(n):
new_data[i] = image.imresize(data[i], resize, resize)
data = new_data
# change data from batch x height x width x channel to batch x channel x height x width
return nd.transpose(data.astype('float32'), (0, 3, 1, 2)) / 255, label.astype('float32')
mnist_train = gluon.data.vision.MNIST(root=root, train=True, transform=None)
mnist_test = gluon.data.vision.MNIST(root=root, train=False, transform=None)
# Transform later to avoid memory explosion.
train_data = DataLoader(mnist_train, batch_size, shuffle=True, transform=transform_mnist)
test_data = DataLoader(mnist_test, batch_size, shuffle=False, transform=transform_mnist)
return (train_data, test_data)
def try_gpu():
"""If GPU is available, return mx.gpu(0); else return mx.cpu()"""
try:
ctx = mx.gpu()
_ = nd.array([0], ctx=ctx)
except:
ctx = mx.cpu()
return ctx
def try_all_gpus():
"""Return all available GPUs, or [mx.gpu()] if there is no GPU"""
ctx_list = []
try:
for i in range(16):
ctx = mx.gpu(i)
_ = nd.array([0], ctx=ctx)
ctx_list.append(ctx)
except:
pass
if not ctx_list:
ctx_list = [mx.cpu()]
return ctx_list
def SGD(params, lr):
for param in params:
param[:] = param - lr * param.grad
def accuracy(output, label):
return nd.mean(output.argmax(axis=1) == label).asscalar()
def _get_batch(batch, ctx):
"""return data and label on ctx"""
if isinstance(batch, mx.io.DataBatch):
data = batch.data[0]
label = batch.label[0]
else:
data, label = batch
return (gluon.utils.split_and_load(data, ctx),
gluon.utils.split_and_load(label, ctx),
data.shape[0])
def evaluate_accuracy(data_iterator, net, ctx=[mx.cpu()]):
if isinstance(ctx, mx.Context):
ctx = [ctx]
acc = nd.array([0])
n = 0.
if isinstance(data_iterator, mx.io.MXDataIter):
data_iterator.reset()
for batch in data_iterator:
data, label, batch_size = _get_batch(batch, ctx)
for X, y in zip(data, label):
acc += nd.sum(net(X).argmax(axis=1) == y).copyto(mx.cpu())
n += y.size
acc.wait_to_read() # don't push too many operators into backend
return acc.asscalar() / n
def train(train_data, test_data, net, loss, trainer, ctx, num_epochs, print_batches=None):
"""Train a network"""
print("Start training on ", ctx)
if isinstance(ctx, mx.Context):
ctx = [ctx]
for epoch in range(num_epochs):
train_loss, train_acc, n, m = 0.0, 0.0, 0.0, 0.0
if isinstance(train_data, mx.io.MXDataIter):
train_data.reset()
start = time()
for i, batch in enumerate(train_data):
data, label, batch_size = _get_batch(batch, ctx)
losses = []
with autograd.record():
outputs = [net(X) for X in data]
losses = [loss(yhat, y) for yhat, y in zip(outputs, label)]
for l in losses:
l.backward()
train_acc += sum([(yhat.argmax(axis=1) == y).sum().asscalar()
for yhat, y in zip(outputs, label)])
train_loss += sum([l.sum().asscalar() for l in losses])
trainer.step(batch_size)
n += batch_size
m += sum([y.size for y in label])
if print_batches and (i + 1) % print_batches == 0:
print("Batch %d. Loss: %f, Train acc %f" % (
n, train_loss / n, train_acc / m
))
test_acc = evaluate_accuracy(test_data, net, ctx)
print("Epoch %d. Loss: %.3f, Train acc %.2f, Test acc %.2f, Time %.1f sec" % (
epoch, train_loss / n, train_acc / m, test_acc, time() - start
))
class Residual(nn.HybridBlock):
def __init__(self, channels, same_shape=True, **kwargs):
super(Residual, self).__init__(**kwargs)
self.same_shape = same_shape
with self.name_scope():
strides = 1 if same_shape else 2
self.conv1 = nn.Conv2D(channels, kernel_size=3, padding=1,
strides=strides)
self.bn1 = nn.BatchNorm()
self.conv2 = nn.Conv2D(channels, kernel_size=3, padding=1)
self.bn2 = nn.BatchNorm()
if not same_shape:
self.conv3 = nn.Conv2D(channels, kernel_size=1,
strides=strides)
def hybrid_forward(self, F, x):
out = F.relu(self.bn1(self.conv1(x)))
out = self.bn2(self.conv2(out))
if not self.same_shape:
x = self.conv3(x)
return F.relu(out + x)
def resnet18(num_classes):
net = nn.HybridSequential()
with net.name_scope():
net.add(
nn.BatchNorm(),
nn.Conv2D(64, kernel_size=3, strides=1),
nn.MaxPool2D(pool_size=3, strides=2),
Residual(64),
Residual(64),
Residual(128, same_shape=False),
Residual(128),
Residual(256, same_shape=False),
Residual(256),
nn.GlobalAvgPool2D(),
nn.Dense(num_classes)
)
return net
def show_images(imgs, nrows, ncols, figsize=None):
"""plot a list of images"""
if not figsize:
figsize = (ncols, nrows)
_, figs = plt.subplots(nrows, ncols, figsize=figsize)
for i in range(nrows):
for j in range(ncols):
figs[i][j].imshow(imgs[i * ncols + j].asnumpy())
figs[i][j].axes.get_xaxis().set_visible(False)
figs[i][j].axes.get_yaxis().set_visible(False)
plt.show()
def data_iter_random(corpus_indices, batch_size, num_steps, ctx=None):
"""Sample mini-batches in a random order from sequential data."""
# Subtract 1 because label indices are corresponding input indices + 1.
num_examples = (len(corpus_indices) - 1) // num_steps
epoch_size = num_examples // batch_size
# Randomize samples.
example_indices = list(range(num_examples))
random.shuffle(example_indices)
def _data(pos):
return corpus_indices[pos: pos + num_steps]
for i in range(epoch_size):
# Read batch_size random samples each time.
i = i * batch_size
batch_indices = example_indices[i: i + batch_size]
data = nd.array(
[_data(j * num_steps) for j in batch_indices], ctx=ctx)
label = nd.array(
[_data(j * num_steps + 1) for j in batch_indices], ctx=ctx)
yield data, label
def data_iter_consecutive(corpus_indices, batch_size, num_steps, ctx=None):
"""Sample mini-batches in a consecutive order from sequential data."""
corpus_indices = nd.array(corpus_indices, ctx=ctx)
data_len = len(corpus_indices)
batch_len = data_len // batch_size
indices = corpus_indices[0: batch_size * batch_len].reshape((
batch_size, batch_len))
# Subtract 1 because label indices are corresponding input indices + 1.
epoch_size = (batch_len - 1) // num_steps
for i in range(epoch_size):
i = i * num_steps
data = indices[:, i: i + num_steps]
label = indices[:, i + 1: i + num_steps + 1]
yield data, label
def grad_clipping(params, clipping_norm, ctx):
"""Gradient clipping."""
if clipping_norm is not None:
norm = nd.array([0.0], ctx)
for p in params:
norm += nd.sum(p.grad ** 2)
norm = nd.sqrt(norm).asscalar()
if norm > clipping_norm:
for p in params:
p.grad[:] *= clipping_norm / norm
def predict_rnn(rnn, prefix, num_chars, params, hidden_dim, ctx, idx_to_char,
char_to_idx, get_inputs, is_lstm=False):
"""Predict the next chars given the prefix."""
prefix = prefix.lower()
state_h = nd.zeros(shape=(1, hidden_dim), ctx=ctx)
if is_lstm:
state_c = nd.zeros(shape=(1, hidden_dim), ctx=ctx)
output = [char_to_idx[prefix[0]]]
for i in range(num_chars + len(prefix)):
X = nd.array([output[-1]], ctx=ctx)
if is_lstm:
Y, state_h, state_c = rnn(get_inputs(X), state_h, state_c, *params)
else:
Y, state_h = rnn(get_inputs(X), state_h, *params)
if i < len(prefix) - 1:
next_input = char_to_idx[prefix[i + 1]]
else:
next_input = int(Y[0].argmax(axis=1).asscalar())
output.append(next_input)
return ''.join([idx_to_char[i] for i in output])
def train_and_predict_rnn(rnn, is_random_iter, epochs, num_steps, hidden_dim,
learning_rate, clipping_norm, batch_size,
pred_period, pred_len, seqs, get_params, get_inputs,
ctx, corpus_indices, idx_to_char, char_to_idx,
is_lstm=False):
"""Train an RNN model and predict the next item in the sequence."""
if is_random_iter:
data_iter = data_iter_random
else:
data_iter = data_iter_consecutive
params = get_params()
softmax_cross_entropy = gluon.loss.SoftmaxCrossEntropyLoss()
for e in range(1, epochs + 1):
# If consecutive sampling is used, in the same epoch, the hidden state
# is initialized only at the beginning of the epoch.
if not is_random_iter:
state_h = nd.zeros(shape=(batch_size, hidden_dim), ctx=ctx)
if is_lstm:
state_c = nd.zeros(shape=(batch_size, hidden_dim), ctx=ctx)
train_loss, num_examples = 0, 0
for data, label in data_iter(corpus_indices, batch_size, num_steps,
ctx):
# If random sampling is used, the hidden state has to be
# initialized for each mini-batch.
if is_random_iter:
state_h = nd.zeros(shape=(batch_size, hidden_dim), ctx=ctx)
if is_lstm:
state_c = nd.zeros(shape=(batch_size, hidden_dim), ctx=ctx)
with autograd.record():
# outputs shape: (batch_size, vocab_size)
if is_lstm:
outputs, state_h, state_c = rnn(get_inputs(data), state_h,
state_c, *params)
else:
outputs, state_h = rnn(get_inputs(data), state_h, *params)
# Let t_ib_j be the j-th element of the mini-batch at time i.
# label shape: (batch_size * num_steps)
# label = [t_0b_0, t_0b_1, ..., t_1b_0, t_1b_1, ..., ].
label = label.T.reshape((-1,))
# Concatenate outputs:
# shape: (batch_size * num_steps, vocab_size).
outputs = nd.concat(*outputs, dim=0)
# Now outputs and label are aligned.
loss = softmax_cross_entropy(outputs, label)
loss.backward()
grad_clipping(params, clipping_norm, ctx)
SGD(params, learning_rate)
train_loss += nd.sum(loss).asscalar()
num_examples += loss.size
if e % pred_period == 0:
print("Epoch %d. Training perplexity %f" % (e,
exp(train_loss / num_examples)))
for seq in seqs:
print(' - ', predict_rnn(rnn, seq, pred_len, params,
hidden_dim, ctx, idx_to_char, char_to_idx, get_inputs,
is_lstm))
print()
| 38.207792
| 96
| 0.584908
|
0eb4b0913659bdf35a80909206c5b0ffef803e3e
| 242
|
py
|
Python
|
{{cookiecutter.project_slug}}/backend/app/app/core/celery_app.py
|
sam-mosleh/full-stack-fastapi-postgresql
|
d2274eb6da91ecee2c54bb128251b804c5ba6a1d
|
[
"MIT"
] | null | null | null |
{{cookiecutter.project_slug}}/backend/app/app/core/celery_app.py
|
sam-mosleh/full-stack-fastapi-postgresql
|
d2274eb6da91ecee2c54bb128251b804c5ba6a1d
|
[
"MIT"
] | null | null | null |
{{cookiecutter.project_slug}}/backend/app/app/core/celery_app.py
|
sam-mosleh/full-stack-fastapi-postgresql
|
d2274eb6da91ecee2c54bb128251b804c5ba6a1d
|
[
"MIT"
] | null | null | null |
from celery import Celery
from app.core.config import settings
celery_app = Celery(
"worker", broker=settings.CELERY_REDIS_DSN, backend=settings.CELERY_REDIS_DSN
)
celery_app.conf.task_routes = {"app.worker.test_celery": "main-queue"}
| 24.2
| 81
| 0.785124
|
afc3fa21e5ead07a8ed3ecb7eb5bbf5a04acec5f
| 20
|
py
|
Python
|
BLSeg/blseg/model/fcn/__init__.py
|
ForrestPi/semanticSegmentation
|
1e5519279e2a9574f09eaf91439138b74b0f860c
|
[
"MIT"
] | 7
|
2020-04-06T10:25:30.000Z
|
2021-02-24T14:51:22.000Z
|
BLSeg/blseg/model/fcn/__init__.py
|
ForrestPi/semanticSegmentation
|
1e5519279e2a9574f09eaf91439138b74b0f860c
|
[
"MIT"
] | null | null | null |
BLSeg/blseg/model/fcn/__init__.py
|
ForrestPi/semanticSegmentation
|
1e5519279e2a9574f09eaf91439138b74b0f860c
|
[
"MIT"
] | 2
|
2020-04-08T14:43:21.000Z
|
2020-12-11T03:03:37.000Z
|
from .fcn import FCN
| 20
| 20
| 0.8
|
a2926304c9e1f4ed56c86494fb0cf6f64d13105a
| 16,797
|
py
|
Python
|
src/compas_rhino/artists/meshartist.py
|
ioanna21/compas
|
a8ab31b84744c74efb59249cd9a78d1a08bc4856
|
[
"MIT"
] | null | null | null |
src/compas_rhino/artists/meshartist.py
|
ioanna21/compas
|
a8ab31b84744c74efb59249cd9a78d1a08bc4856
|
[
"MIT"
] | null | null | null |
src/compas_rhino/artists/meshartist.py
|
ioanna21/compas
|
a8ab31b84744c74efb59249cd9a78d1a08bc4856
|
[
"MIT"
] | null | null | null |
from __future__ import print_function
from __future__ import absolute_import
from __future__ import division
from functools import partial
import compas_rhino
from compas_rhino.artists._artist import BaseArtist
from compas.utilities import color_to_colordict
from compas.utilities import pairwise
from compas.geometry import add_vectors
from compas.geometry import scale_vector
from compas.geometry import centroid_polygon
from compas.geometry import centroid_points
colordict = partial(color_to_colordict, colorformat='rgb', normalize=False)
__all__ = ['MeshArtist']
class MeshArtist(BaseArtist):
"""A mesh artist defines functionality for visualising COMPAS meshes in Rhino.
Parameters
----------
mesh : :class:`compas.datastructures.Mesh`
A COMPAS mesh.
layer : str, optional
The name of the layer that will contain the mesh.
Attributes
----------
mesh : :class:`compas.datastructures.Mesh`
The COMPAS mesh associated with the artist.
layer : str
The layer in which the mesh should be contained.
color_vertices : 3-tuple
Default color of the vertices.
color_edges : 3-tuple
Default color of the edges.
color_faces : 3-tuple
Default color of the faces.
Examples
--------
.. code-block:: python
import compas
from compas.datastructures import Mesh
from compas_rhino.artists import MeshArtist
mesh = Mesh.from_obj(compas.get('faces.obj'))
artist = MeshArtist(mesh, layer='COMPAS::MeshArtist')
artist.clear_layer()
artist.draw_faces(join_faces=True)
artist.draw_vertices(color={key: '#ff0000' for key in mesh.vertices_on_boundary()})
artist.draw_edges()
artist.redraw()
"""
def __init__(self, mesh, layer=None):
super(MeshArtist, self).__init__()
self._mesh = None
self._vertex_xyz = None
self.mesh = mesh
self.layer = layer
self.color_vertices = (255, 255, 255)
self.color_edges = (0, 0, 0)
self.color_faces = (0, 0, 0)
@property
def mesh(self):
return self._mesh
@mesh.setter
def mesh(self, mesh):
self._mesh = mesh
self._vertex_xyz = None
@property
def vertex_xyz(self):
"""dict:
The view coordinates of the mesh vertices.
The view coordinates default to the actual mesh coordinates.
"""
if not self._vertex_xyz:
self._vertex_xyz = {vertex: self.mesh.vertex_attributes(vertex, 'xyz') for vertex in self.mesh.vertices()}
return self._vertex_xyz
@vertex_xyz.setter
def vertex_xyz(self, vertex_xyz):
self._vertex_xyz = vertex_xyz
# ==========================================================================
# clear
# ==========================================================================
def clear_by_name(self):
"""Clear all objects in the "namespace" of the associated mesh."""
guids = compas_rhino.get_objects(name="{}.*".format(self.mesh.name))
compas_rhino.delete_objects(guids, purge=True)
def clear_layer(self):
"""Clear the main layer of the artist."""
if self.layer:
compas_rhino.clear_layer(self.layer)
# ==========================================================================
# draw
# ==========================================================================
def draw(self):
"""Draw the mesh using the chosen visualisation settings.
Returns
-------
list
The GUIDs of the created Rhino objects.
"""
guids = self.draw_vertices()
guids += self.draw_faces()
guids += self.draw_edges()
return guids
def draw_mesh(self, color=(0, 0, 0), disjoint=False):
"""Draw the mesh as a consolidated RhinoMesh.
Parameters
----------
color : tuple, optional
The color of the mesh.
Default is black, ``(0, 0, 0)``.
disjoint : bool, optional
Draw the faces of the mesh with disjoint vertices.
Default is ``False``.
Returns
-------
list
The GUIDs of the created Rhino objects.
Notes
-----
The mesh should be a valid Rhino Mesh object, which means it should have only triangular or quadrilateral faces.
Faces with more than 4 vertices will be triangulated on-the-fly.
"""
vertex_index = self.mesh.key_index()
vertex_xyz = self.vertex_xyz
vertices = [vertex_xyz[vertex] for vertex in self.mesh.vertices()]
faces = [[vertex_index[vertex] for vertex in self.mesh.face_vertices(face)] for face in self.mesh.faces()]
new_faces = []
for face in faces:
f = len(face)
if f == 3:
new_faces.append(face + face[-1:])
elif f == 4:
new_faces.append(face)
elif f > 4:
centroid = len(vertices)
vertices.append(centroid_polygon([vertices[index] for index in face]))
for a, b in pairwise(face + face[0:1]):
new_faces.append([centroid, a, b, b])
else:
continue
layer = self.layer
name = "{}".format(self.mesh.name)
guid = compas_rhino.draw_mesh(vertices, new_faces, layer=layer, name=name, color=color, disjoint=disjoint)
return [guid]
def draw_vertices(self, vertices=None, color=None):
"""Draw a selection of vertices.
Parameters
----------
vertices : list
A selection of vertices to draw.
Default is ``None``, in which case all vertices are drawn.
color : tuple or dict of tuple, optional
The color specififcation for the vertices.
The default is white, ``(255, 255, 255)``.
Returns
-------
list
The GUIDs of the created Rhino objects.
"""
vertices = vertices or list(self.mesh.vertices())
vertex_xyz = self.vertex_xyz
vertex_color = colordict(color, vertices, default=self.color_vertices)
points = []
for vertex in vertices:
points.append({
'pos': vertex_xyz[vertex],
'name': "{}.vertex.{}".format(self.mesh.name, vertex),
'color': vertex_color[vertex]})
return compas_rhino.draw_points(points, layer=self.layer, clear=False, redraw=False)
def draw_faces(self, faces=None, color=None, join_faces=False):
"""Draw a selection of faces.
Parameters
----------
faces : list, optional
A selection of faces to draw.
The default is ``None``, in which case all faces are drawn.
color : tuple or dict of tuple, optional
The color specififcation for the faces.
The default color is black ``(0, 0, 0)``.
join_faces : bool, optional
Join the faces into 1 mesh.
Default is ``False``, in which case the faces are drawn as individual meshes.
Returns
-------
list
The GUIDs of the created Rhino objects.
"""
faces = faces or list(self.mesh.faces())
vertex_xyz = self.vertex_xyz
face_color = colordict(color, faces, default=self.color_faces)
facets = []
for face in faces:
facets.append({
'points': [vertex_xyz[vertex] for vertex in self.mesh.face_vertices(face)],
'name': "{}.face.{}".format(self.mesh.name, face),
'color': face_color[face]})
guids = compas_rhino.draw_faces(facets, layer=self.layer, clear=False, redraw=False)
if not join_faces:
return guids
guid = compas_rhino.rs.JoinMeshes(guids, delete_input=True)
compas_rhino.rs.ObjectLayer(guid, self.layer)
compas_rhino.rs.ObjectName(guid, '{}'.format(self.mesh.name))
if color:
compas_rhino.rs.ObjectColor(guid, color)
return [guid]
def draw_edges(self, edges=None, color=None):
"""Draw a selection of edges.
Parameters
----------
edges : list, optional
A selection of edges to draw.
The default is ``None``, in which case all edges are drawn.
color : tuple or dict of tuple, optional
The color specififcation for the edges.
The default color is black, ``(0, 0, 0)``.
Returns
-------
list
The GUIDs of the created Rhino objects.
"""
edges = edges or list(self.mesh.edges())
vertex_xyz = self.vertex_xyz
edge_color = colordict(color, edges, default=self.color_edges)
lines = []
for edge in edges:
lines.append({
'start': vertex_xyz[edge[0]],
'end': vertex_xyz[edge[1]],
'color': edge_color[edge],
'name': "{}.edge.{}-{}".format(self.mesh.name, *edge)})
return compas_rhino.draw_lines(lines, layer=self.layer, clear=False, redraw=False)
# ==========================================================================
# draw normals
# ==========================================================================
def draw_vertexnormals(self, vertices=None, color=(0, 255, 0), scale=1.0):
"""Draw the normals at the vertices of the mesh.
Parameters
----------
vertices : list, optional
A selection of vertex normals to draw.
Default is to draw all vertex normals.
color : tuple, optional
The color specification of the normal vectors.
The default color is green, ``(0, 255, 0)``.
scale : float, optional
Scale factor for the vertex normals.
Default is ``1.0``.
Returns
-------
list
The GUIDs of the created Rhino objects.
"""
vertex_xyz = self.vertex_xyz
vertices = vertices or list(self.mesh.vertices())
lines = []
for vertex in vertices:
a = vertex_xyz[vertex]
n = self.mesh.vertex_normal(vertex)
b = add_vectors(a, scale_vector(n, scale))
lines.append({
'start': a,
'end': b,
'color': color,
'name': "{}.vertexnormal.{}".format(self.mesh.name, vertex),
'arrow': 'end'})
return compas_rhino.draw_lines(lines, layer=self.layer, clear=False, redraw=False)
def draw_facenormals(self, faces=None, color=(0, 255, 255), scale=1.0):
"""Draw the normals of the faces.
Parameters
----------
faces : list, optional
A selection of face normals to draw.
Default is to draw all face normals.
color : tuple, optional
The color specification of the normal vectors.
The default color is cyan, ``(0, 255, 255)``.
scale : float, optional
Scale factor for the face normals.
Default is ``1.0``.
Returns
-------
list
The GUIDs of the created Rhino objects.
"""
vertex_xyz = self.vertex_xyz
faces = faces or list(self.mesh.faces())
lines = []
for face in faces:
a = centroid_points([vertex_xyz[vertex] for vertex in self.mesh.face_vertices(face)])
n = self.mesh.face_normal(face)
b = add_vectors(a, scale_vector(n, scale))
lines.append({
'start': a,
'end': b,
'name': "{}.facenormal.{}".format(self.mesh.name, face),
'color': color,
'arrow': 'end'})
return compas_rhino.draw_lines(lines, layer=self.layer, clear=False, redraw=False)
# ==========================================================================
# draw labels
# ==========================================================================
def draw_vertexlabels(self, text=None, color=None):
"""Draw labels for a selection vertices.
Parameters
----------
text : dict, optional
A dictionary of vertex labels as vertex-text pairs.
The default value is ``None``, in which case every vertex will be labelled with its key.
color : tuple or dict of tuple, optional
The color sepcification of the labels.
The default color is the same as the default vertex color.
Returns
-------
list
The GUIDs of the created Rhino objects.
"""
if not text or text == 'key':
vertex_text = {vertex: str(vertex) for vertex in self.mesh.vertices()}
elif text == 'index':
vertex_text = {vertex: str(index) for index, vertex in enumerate(self.mesh.vertices())}
elif isinstance(text, dict):
vertex_text = text
else:
raise NotImplementedError
vertex_xyz = self.vertex_xyz
vertex_color = colordict(color, vertex_text.keys(), default=self.color_vertices)
labels = []
for vertex in vertex_text:
labels.append({
'pos': vertex_xyz[vertex],
'name': "{}.vertexlabel.{}".format(self.mesh.name, vertex),
'color': vertex_color[vertex],
'text': vertex_text[vertex]})
return compas_rhino.draw_labels(labels, layer=self.layer, clear=False, redraw=False)
def draw_facelabels(self, text=None, color=None):
"""Draw labels for a selection of faces.
Parameters
----------
text : dict, optional
A dictionary of face labels as face-text pairs.
The default value is ``None``, in which case every face will be labelled with its key.
color : tuple or dict of tuple, optional
The color sepcification of the labels.
The default color is the same as the default face color.
Returns
-------
list
The GUIDs of the created Rhino objects.
"""
if not text or text == 'key':
face_text = {face: str(face) for face in self.mesh.faces()}
elif text == 'index':
face_text = {face: str(index) for index, face in enumerate(self.mesh.faces())}
elif isinstance(text, dict):
face_text = text
else:
raise NotImplementedError
vertex_xyz = self.vertex_xyz
face_color = colordict(color, face_text.keys(), default=self.color_faces)
labels = []
for face in face_text:
labels.append({
'pos': centroid_points([vertex_xyz[vertex] for vertex in self.mesh.face_vertices(face)]),
'name': "{}.facelabel.{}".format(self.mesh.name, face),
'color': face_color[face],
'text': face_text[face]})
return compas_rhino.draw_labels(labels, layer=self.layer, clear=False, redraw=False)
def draw_edgelabels(self, text=None, color=None):
"""Draw labels for a selection of edges.
Parameters
----------
text : dict, optional
A dictionary of edge labels as edge-text pairs.
The default value is ``None``, in which case every edge will be labelled with its key.
color : tuple or dict of tuple, optional
The color sepcification of the labels.
The default color is the same as the default color for edges.
Returns
-------
list
The GUIDs of the created Rhino objects.
"""
if text is None:
edge_text = {(u, v): "{}-{}".format(u, v) for u, v in self.mesh.edges()}
elif isinstance(text, dict):
edge_text = text
else:
raise NotImplementedError
vertex_xyz = self.vertex_xyz
edge_color = colordict(color, edge_text.keys(), default=self.color_edges)
labels = []
for edge in edge_text:
labels.append({
'pos': centroid_points([vertex_xyz[edge[0]], vertex_xyz[edge[1]]]),
'name': "{}.edgelabel.{}-{}".format(self.mesh.name, *edge),
'color': edge_color[edge],
'text': edge_text[edge]})
return compas_rhino.draw_labels(labels, layer=self.layer, clear=False, redraw=False)
# ==============================================================================
# Main
# ==============================================================================
if __name__ == "__main__":
pass
| 35.66242
| 120
| 0.549146
|
56f12778945c2c01fec8aa352125d3bba26b3e5d
| 4,493
|
py
|
Python
|
tools/run_net.py
|
XXSSOTA/SlowFast
|
c5f5ff68d18fc01289a3777f91e102d680367c4c
|
[
"Apache-2.0"
] | null | null | null |
tools/run_net.py
|
XXSSOTA/SlowFast
|
c5f5ff68d18fc01289a3777f91e102d680367c4c
|
[
"Apache-2.0"
] | null | null | null |
tools/run_net.py
|
XXSSOTA/SlowFast
|
c5f5ff68d18fc01289a3777f91e102d680367c4c
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/env python3
# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved.
"""Wrapper to train and test a video classification model."""
import argparse
import sys
import torch
import slowfast.utils.checkpoint as cu
import slowfast.utils.multiprocessing as mpu
from slowfast.config.defaults import get_cfg
from test_net import test
from train_net import train
def parse_args():
"""
Parse the following arguments for the video training and testing pipeline.
Args:
shard_id (int): shard id for the current machine. Starts from 0 to
num_shards - 1. If single machine is used, then set shard id to 0.
num_shards (int): number of shards using by the job.
init_method (str): initialization method to launch the job with multiple
devices. Options includes TCP or shared file-system for
initialization. details can be find in
https://pytorch.org/docs/stable/distributed.html#tcp-initialization
cfg (str): path to the config file.
opts (argument): provide addtional options from the command line, it
overwrites the config loaded from file.
"""
parser = argparse.ArgumentParser(
description="Provide SlowFast video training and testing pipeline."
)
parser.add_argument(
"--shard_id",
help="The shard id of current node, Starts from 0 to num_shards - 1",
default=0,
type=int,
)
parser.add_argument(
"--num_shards",
help="Number of shards using by the job",
default=1,
type=int,
)
parser.add_argument(
"--init_method",
help="Initialization method, includes TCP or shared file-system",
default="tcp://localhost:9999",
type=str,
)
parser.add_argument(
"--cfg",
dest="cfg_file",
help="Path to the config file",
# default="configs/Kinetics/SLOWFAST_4x16_R50.yaml",
default="../configs/Kinetics/C2D_8x8_R50.yaml",
type=str,
)
parser.add_argument(
"opts",
help="See slowfast/config/defaults.py for all options",
default=None,
nargs=argparse.REMAINDER,
)
if len(sys.argv) == 1:
parser.print_help()
return parser.parse_args()
def load_config(args):
"""
Given the arguemnts, load and initialize the configs.
Args:
args (argument): arguments includes `shard_id`, `num_shards`,
`init_method`, `cfg_file`, and `opts`.
"""
# Setup cfg.
cfg = get_cfg()
# Load config from cfg.
if args.cfg_file is not None:
cfg.merge_from_file(args.cfg_file)
# Load config from command line, overwrite config from opts.
if args.opts is not None:
cfg.merge_from_list(args.opts)
# Inherit parameters from args.
if hasattr(args, "num_shards") and hasattr(args, "shard_id"):
cfg.NUM_SHARDS = args.num_shards
cfg.SHARD_ID = args.shard_id
if hasattr(args, "rng_seed"):
cfg.RNG_SEED = args.rng_seed
if hasattr(args, "output_dir"):
cfg.OUTPUT_DIR = args.output_dir
# Create the checkpoint dir.
cu.make_checkpoint_dir(cfg.OUTPUT_DIR)
return cfg
def main():
"""
Main function to spawn the train and test process.
"""
args = parse_args()
cfg = load_config(args)
# Perform training.
if cfg.TRAIN.ENABLE:
if cfg.NUM_GPUS > 1:
torch.multiprocessing.spawn(
mpu.run,
nprocs=cfg.NUM_GPUS,
args=(
cfg.NUM_GPUS,
train,
args.init_method,
cfg.SHARD_ID,
cfg.NUM_SHARDS,
cfg.DIST_BACKEND,
cfg,
),
daemon=False,
)
else:
train(cfg=cfg)
# Perform multi-clip testing.
if cfg.TEST.ENABLE:
if cfg.NUM_GPUS > 1:
torch.multiprocessing.spawn(
mpu.run,
nprocs=cfg.NUM_GPUS,
args=(
cfg.NUM_GPUS,
test,
args.init_method,
cfg.SHARD_ID,
cfg.NUM_SHARDS,
cfg.DIST_BACKEND,
cfg,
),
daemon=False,
)
else:
test(cfg=cfg)
if __name__ == "__main__":
main()
| 29.366013
| 80
| 0.57623
|
d6e9cf8b71eaa9fce8af2eed0448a1a3a6652d3b
| 253
|
py
|
Python
|
HackerRank/TwoArrays.py
|
kokuraxc/play-ground
|
48b5291f3cca117e0cd0a17bf9255ec4dc1a5cdd
|
[
"MIT"
] | null | null | null |
HackerRank/TwoArrays.py
|
kokuraxc/play-ground
|
48b5291f3cca117e0cd0a17bf9255ec4dc1a5cdd
|
[
"MIT"
] | null | null | null |
HackerRank/TwoArrays.py
|
kokuraxc/play-ground
|
48b5291f3cca117e0cd0a17bf9255ec4dc1a5cdd
|
[
"MIT"
] | null | null | null |
# https://www.hackerrank.com/challenges/two-arrays/
# Complete the twoArrays function below.
def twoArrays(k, A, B):
A.sort()
B.sort(reverse=True)
C = zip(A, B)
for c in C:
if sum(c) < k:
return 'NO'
return 'YES'
| 23
| 51
| 0.577075
|
948ac478296bf87ed917abeb3eb6c8e641df4a67
| 10,636
|
py
|
Python
|
sdk/python/pulumi_azure_native/devtestlab/v20180915/get_virtual_machine_schedule.py
|
sebtelko/pulumi-azure-native
|
711ec021b5c73da05611c56c8a35adb0ce3244e4
|
[
"Apache-2.0"
] | null | null | null |
sdk/python/pulumi_azure_native/devtestlab/v20180915/get_virtual_machine_schedule.py
|
sebtelko/pulumi-azure-native
|
711ec021b5c73da05611c56c8a35adb0ce3244e4
|
[
"Apache-2.0"
] | null | null | null |
sdk/python/pulumi_azure_native/devtestlab/v20180915/get_virtual_machine_schedule.py
|
sebtelko/pulumi-azure-native
|
711ec021b5c73da05611c56c8a35adb0ce3244e4
|
[
"Apache-2.0"
] | null | null | null |
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi SDK Generator. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from ... import _utilities
from . import outputs
__all__ = [
'GetVirtualMachineScheduleResult',
'AwaitableGetVirtualMachineScheduleResult',
'get_virtual_machine_schedule',
]
@pulumi.output_type
class GetVirtualMachineScheduleResult:
"""
A schedule.
"""
def __init__(__self__, created_date=None, daily_recurrence=None, hourly_recurrence=None, id=None, location=None, name=None, notification_settings=None, provisioning_state=None, status=None, tags=None, target_resource_id=None, task_type=None, time_zone_id=None, type=None, unique_identifier=None, weekly_recurrence=None):
if created_date and not isinstance(created_date, str):
raise TypeError("Expected argument 'created_date' to be a str")
pulumi.set(__self__, "created_date", created_date)
if daily_recurrence and not isinstance(daily_recurrence, dict):
raise TypeError("Expected argument 'daily_recurrence' to be a dict")
pulumi.set(__self__, "daily_recurrence", daily_recurrence)
if hourly_recurrence and not isinstance(hourly_recurrence, dict):
raise TypeError("Expected argument 'hourly_recurrence' to be a dict")
pulumi.set(__self__, "hourly_recurrence", hourly_recurrence)
if id and not isinstance(id, str):
raise TypeError("Expected argument 'id' to be a str")
pulumi.set(__self__, "id", id)
if location and not isinstance(location, str):
raise TypeError("Expected argument 'location' to be a str")
pulumi.set(__self__, "location", location)
if name and not isinstance(name, str):
raise TypeError("Expected argument 'name' to be a str")
pulumi.set(__self__, "name", name)
if notification_settings and not isinstance(notification_settings, dict):
raise TypeError("Expected argument 'notification_settings' to be a dict")
pulumi.set(__self__, "notification_settings", notification_settings)
if provisioning_state and not isinstance(provisioning_state, str):
raise TypeError("Expected argument 'provisioning_state' to be a str")
pulumi.set(__self__, "provisioning_state", provisioning_state)
if status and not isinstance(status, str):
raise TypeError("Expected argument 'status' to be a str")
pulumi.set(__self__, "status", status)
if tags and not isinstance(tags, dict):
raise TypeError("Expected argument 'tags' to be a dict")
pulumi.set(__self__, "tags", tags)
if target_resource_id and not isinstance(target_resource_id, str):
raise TypeError("Expected argument 'target_resource_id' to be a str")
pulumi.set(__self__, "target_resource_id", target_resource_id)
if task_type and not isinstance(task_type, str):
raise TypeError("Expected argument 'task_type' to be a str")
pulumi.set(__self__, "task_type", task_type)
if time_zone_id and not isinstance(time_zone_id, str):
raise TypeError("Expected argument 'time_zone_id' to be a str")
pulumi.set(__self__, "time_zone_id", time_zone_id)
if type and not isinstance(type, str):
raise TypeError("Expected argument 'type' to be a str")
pulumi.set(__self__, "type", type)
if unique_identifier and not isinstance(unique_identifier, str):
raise TypeError("Expected argument 'unique_identifier' to be a str")
pulumi.set(__self__, "unique_identifier", unique_identifier)
if weekly_recurrence and not isinstance(weekly_recurrence, dict):
raise TypeError("Expected argument 'weekly_recurrence' to be a dict")
pulumi.set(__self__, "weekly_recurrence", weekly_recurrence)
@property
@pulumi.getter(name="createdDate")
def created_date(self) -> str:
"""
The creation date of the schedule.
"""
return pulumi.get(self, "created_date")
@property
@pulumi.getter(name="dailyRecurrence")
def daily_recurrence(self) -> Optional['outputs.DayDetailsResponse']:
"""
If the schedule will occur once each day of the week, specify the daily recurrence.
"""
return pulumi.get(self, "daily_recurrence")
@property
@pulumi.getter(name="hourlyRecurrence")
def hourly_recurrence(self) -> Optional['outputs.HourDetailsResponse']:
"""
If the schedule will occur multiple times a day, specify the hourly recurrence.
"""
return pulumi.get(self, "hourly_recurrence")
@property
@pulumi.getter
def id(self) -> str:
"""
The identifier of the resource.
"""
return pulumi.get(self, "id")
@property
@pulumi.getter
def location(self) -> Optional[str]:
"""
The location of the resource.
"""
return pulumi.get(self, "location")
@property
@pulumi.getter
def name(self) -> str:
"""
The name of the resource.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter(name="notificationSettings")
def notification_settings(self) -> Optional['outputs.NotificationSettingsResponse']:
"""
Notification settings.
"""
return pulumi.get(self, "notification_settings")
@property
@pulumi.getter(name="provisioningState")
def provisioning_state(self) -> str:
"""
The provisioning status of the resource.
"""
return pulumi.get(self, "provisioning_state")
@property
@pulumi.getter
def status(self) -> Optional[str]:
"""
The status of the schedule (i.e. Enabled, Disabled)
"""
return pulumi.get(self, "status")
@property
@pulumi.getter
def tags(self) -> Optional[Mapping[str, str]]:
"""
The tags of the resource.
"""
return pulumi.get(self, "tags")
@property
@pulumi.getter(name="targetResourceId")
def target_resource_id(self) -> Optional[str]:
"""
The resource ID to which the schedule belongs
"""
return pulumi.get(self, "target_resource_id")
@property
@pulumi.getter(name="taskType")
def task_type(self) -> Optional[str]:
"""
The task type of the schedule (e.g. LabVmsShutdownTask, LabVmAutoStart).
"""
return pulumi.get(self, "task_type")
@property
@pulumi.getter(name="timeZoneId")
def time_zone_id(self) -> Optional[str]:
"""
The time zone ID (e.g. Pacific Standard time).
"""
return pulumi.get(self, "time_zone_id")
@property
@pulumi.getter
def type(self) -> str:
"""
The type of the resource.
"""
return pulumi.get(self, "type")
@property
@pulumi.getter(name="uniqueIdentifier")
def unique_identifier(self) -> str:
"""
The unique immutable identifier of a resource (Guid).
"""
return pulumi.get(self, "unique_identifier")
@property
@pulumi.getter(name="weeklyRecurrence")
def weekly_recurrence(self) -> Optional['outputs.WeekDetailsResponse']:
"""
If the schedule will occur only some days of the week, specify the weekly recurrence.
"""
return pulumi.get(self, "weekly_recurrence")
class AwaitableGetVirtualMachineScheduleResult(GetVirtualMachineScheduleResult):
# pylint: disable=using-constant-test
def __await__(self):
if False:
yield self
return GetVirtualMachineScheduleResult(
created_date=self.created_date,
daily_recurrence=self.daily_recurrence,
hourly_recurrence=self.hourly_recurrence,
id=self.id,
location=self.location,
name=self.name,
notification_settings=self.notification_settings,
provisioning_state=self.provisioning_state,
status=self.status,
tags=self.tags,
target_resource_id=self.target_resource_id,
task_type=self.task_type,
time_zone_id=self.time_zone_id,
type=self.type,
unique_identifier=self.unique_identifier,
weekly_recurrence=self.weekly_recurrence)
def get_virtual_machine_schedule(expand: Optional[str] = None,
lab_name: Optional[str] = None,
name: Optional[str] = None,
resource_group_name: Optional[str] = None,
virtual_machine_name: Optional[str] = None,
opts: Optional[pulumi.InvokeOptions] = None) -> AwaitableGetVirtualMachineScheduleResult:
"""
A schedule.
:param str expand: Specify the $expand query. Example: 'properties($select=status)'
:param str lab_name: The name of the lab.
:param str name: The name of the schedule.
:param str resource_group_name: The name of the resource group.
:param str virtual_machine_name: The name of the virtual machine.
"""
__args__ = dict()
__args__['expand'] = expand
__args__['labName'] = lab_name
__args__['name'] = name
__args__['resourceGroupName'] = resource_group_name
__args__['virtualMachineName'] = virtual_machine_name
if opts is None:
opts = pulumi.InvokeOptions()
if opts.version is None:
opts.version = _utilities.get_version()
__ret__ = pulumi.runtime.invoke('azure-native:devtestlab/v20180915:getVirtualMachineSchedule', __args__, opts=opts, typ=GetVirtualMachineScheduleResult).value
return AwaitableGetVirtualMachineScheduleResult(
created_date=__ret__.created_date,
daily_recurrence=__ret__.daily_recurrence,
hourly_recurrence=__ret__.hourly_recurrence,
id=__ret__.id,
location=__ret__.location,
name=__ret__.name,
notification_settings=__ret__.notification_settings,
provisioning_state=__ret__.provisioning_state,
status=__ret__.status,
tags=__ret__.tags,
target_resource_id=__ret__.target_resource_id,
task_type=__ret__.task_type,
time_zone_id=__ret__.time_zone_id,
type=__ret__.type,
unique_identifier=__ret__.unique_identifier,
weekly_recurrence=__ret__.weekly_recurrence)
| 39.247232
| 324
| 0.655604
|
958c976c02fd791e5db62bcb010be01a3f1a255d
| 3,143
|
py
|
Python
|
clictest/cmd/api.py
|
arulkumarkandasamy/clictest
|
3e62a11eee8e0ef4e0c3ac1090e52b0bfccce59e
|
[
"Apache-2.0"
] | null | null | null |
clictest/cmd/api.py
|
arulkumarkandasamy/clictest
|
3e62a11eee8e0ef4e0c3ac1090e52b0bfccce59e
|
[
"Apache-2.0"
] | null | null | null |
clictest/cmd/api.py
|
arulkumarkandasamy/clictest
|
3e62a11eee8e0ef4e0c3ac1090e52b0bfccce59e
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/env python
# Copyright 2010 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# Copyright 2011 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Clictest API Server
"""
import os
import sys
import eventlet
from oslo_utils import encodeutils
# Monkey patch socket, time, select, threads
eventlet.patcher.monkey_patch(all=False, socket=True, time=True,
select=True, thread=True, os=True)
# If ../clictest/__init__.py exists, add ../ to Python search path, so that
# it will override what happens to be installed in /usr/(local/)lib/python...
possible_topdir = os.path.normpath(os.path.join(os.path.abspath(sys.argv[0]),
os.pardir,
os.pardir))
if os.path.exists(os.path.join(possible_topdir, 'clictest', '__init__.py')):
sys.path.insert(0, possible_topdir)
from oslo_config import cfg
from oslo_log import log as logging
import oslo_messaging
import osprofiler.notifier
import osprofiler.web
from clictest.common import config
from clictest.common import exception
from clictest.common import wsgi
from clictest import notifier
CONF = cfg.CONF
CONF.import_group("profiler", "clictest.common.wsgi")
logging.register_options(CONF)
KNOWN_EXCEPTIONS = (RuntimeError,
exception.WorkerCreationFailure)
def fail(e):
global KNOWN_EXCEPTIONS
return_code = KNOWN_EXCEPTIONS.index(type(e)) + 1
sys.stderr.write("ERROR: %s\n" % encodeutils.exception_to_unicode(e))
sys.exit(return_code)
def main():
try:
config.parse_args()
config.set_config_defaults()
wsgi.set_eventlet_hub()
logging.setup(CONF, 'clictest')
notifier.set_defaults()
if cfg.CONF.profiler.enabled:
_notifier = osprofiler.notifier.create("Messaging",
oslo_messaging, {},
notifier.get_transport(),
"clictest", "api",
cfg.CONF.bind_host)
osprofiler.notifier.set(_notifier)
osprofiler.web.enable(cfg.CONF.profiler.hmac_keys)
else:
osprofiler.web.disable()
server = wsgi.Server()
server.start(config.load_paste_app('clictest-api'), default_port=8292)
server.wait()
except KNOWN_EXCEPTIONS as e:
fail(e)
if __name__ == '__main__':
main()
| 32.739583
| 78
| 0.650016
|
8faaa838139be229462d68986b7d873344540044
| 538
|
py
|
Python
|
Algo and DSA/LeetCode-Solutions-master/Python/maximum-sum-circular-subarray.py
|
Sourav692/FAANG-Interview-Preparation
|
f523e5c94d582328b3edc449ea16ac6ab28cdc81
|
[
"Unlicense"
] | 3,269
|
2018-10-12T01:29:40.000Z
|
2022-03-31T17:58:41.000Z
|
Algo and DSA/LeetCode-Solutions-master/Python/maximum-sum-circular-subarray.py
|
Sourav692/FAANG-Interview-Preparation
|
f523e5c94d582328b3edc449ea16ac6ab28cdc81
|
[
"Unlicense"
] | 53
|
2018-12-16T22:54:20.000Z
|
2022-02-25T08:31:20.000Z
|
Algo and DSA/LeetCode-Solutions-master/Python/maximum-sum-circular-subarray.py
|
Sourav692/FAANG-Interview-Preparation
|
f523e5c94d582328b3edc449ea16ac6ab28cdc81
|
[
"Unlicense"
] | 1,236
|
2018-10-12T02:51:40.000Z
|
2022-03-30T13:30:37.000Z
|
# Time: O(n)
# Space: O(1)
class Solution(object):
def maxSubarraySumCircular(self, A):
"""
:type A: List[int]
:rtype: int
"""
total, max_sum, cur_max, min_sum, cur_min = 0, -float("inf"), 0, float("inf"), 0
for a in A:
cur_max = max(cur_max+a, a)
max_sum = max(max_sum, cur_max)
cur_min = min(cur_min+a, a)
min_sum = min(min_sum, cur_min)
total += a
return max(max_sum, total-min_sum) if max_sum >= 0 else max_sum
| 28.315789
| 88
| 0.524164
|
65da8eb744102af0e0788356fc81cf42c270588b
| 210
|
py
|
Python
|
Boilermake2018/Lib/site-packages/chatterbot/corpus.py
|
TejPatel98/voice_your_professional_email
|
9cc48f7bcd6576a6962711755e5d5d485832128c
|
[
"CC0-1.0"
] | 9
|
2017-11-14T07:13:19.000Z
|
2021-05-24T12:11:58.000Z
|
Boilermake2018/Lib/site-packages/chatterbot/corpus.py
|
TejPatel98/voice_your_professional_email
|
9cc48f7bcd6576a6962711755e5d5d485832128c
|
[
"CC0-1.0"
] | 2
|
2017-12-06T07:40:08.000Z
|
2017-12-06T07:42:43.000Z
|
Boilermake2018/Lib/site-packages/chatterbot/corpus.py
|
TejPatel98/voice_your_professional_email
|
9cc48f7bcd6576a6962711755e5d5d485832128c
|
[
"CC0-1.0"
] | 7
|
2017-11-15T11:11:33.000Z
|
2017-12-14T10:26:37.000Z
|
"""
Seamlessly import the external chatterbot corpus module.
View the corpus on GitHub at https://github.com/gunthercox/chatterbot-corpus
"""
from chatterbot_corpus import Corpus
__all__ = (
'Corpus',
)
| 17.5
| 76
| 0.752381
|
35c87d709fcc8816f3124e09c776fd3c93cce646
| 1,203
|
py
|
Python
|
HuberyBlog/apps/blog/migrations/0010_auto_20190626_1034.py
|
SomnambulistOfChina/ChineseSomnambulist
|
dc6efcb4ea1bc02f8999cd78bebfd648253631a6
|
[
"Apache-2.0"
] | 5
|
2019-05-21T08:26:18.000Z
|
2021-07-20T11:32:49.000Z
|
HuberyBlog/apps/blog/migrations/0010_auto_20190626_1034.py
|
SomnambulistOfChina/ChineseSomnambulist
|
dc6efcb4ea1bc02f8999cd78bebfd648253631a6
|
[
"Apache-2.0"
] | null | null | null |
HuberyBlog/apps/blog/migrations/0010_auto_20190626_1034.py
|
SomnambulistOfChina/ChineseSomnambulist
|
dc6efcb4ea1bc02f8999cd78bebfd648253631a6
|
[
"Apache-2.0"
] | 2
|
2019-07-20T08:35:04.000Z
|
2020-02-29T07:34:42.000Z
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.7 on 2019-06-26 10:34
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('blog', '0009_visitview_ip_address'),
]
operations = [
migrations.AlterField(
model_name='blog',
name='title',
field=models.CharField(max_length=128, verbose_name='标题'),
),
migrations.AlterField(
model_name='category',
name='name',
field=models.CharField(blank=True, max_length=64, null=True, unique=True, verbose_name='分类名称'),
),
migrations.AlterField(
model_name='tag',
name='name',
field=models.CharField(max_length=32, unique=True, verbose_name='标签'),
),
migrations.AlterField(
model_name='web',
name='name',
field=models.CharField(max_length=64, verbose_name='网站名字'),
),
migrations.AlterField(
model_name='webcategory',
name='name',
field=models.CharField(max_length=64, verbose_name='网站类别'),
),
]
| 29.341463
| 107
| 0.57606
|
fca4d734de9f902a51130729235354819fdd74c2
| 32,996
|
py
|
Python
|
src/ismn/interface.py
|
wpreimes/ismn
|
379c53a85e9e47b7eeceb31e84670c5c5290546a
|
[
"MIT"
] | null | null | null |
src/ismn/interface.py
|
wpreimes/ismn
|
379c53a85e9e47b7eeceb31e84670c5c5290546a
|
[
"MIT"
] | 4
|
2020-12-20T13:22:18.000Z
|
2021-02-19T13:38:08.000Z
|
src/ismn/interface.py
|
wpreimes/ismn
|
379c53a85e9e47b7eeceb31e84670c5c5290546a
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
# The MIT License (MIT)
#
# Copyright (c) 2021 TU Wien
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
from pathlib import Path
from tempfile import gettempdir
import platform
import os
import sys
from typing import Optional, Union
import pandas as pd
from ismn.filecollection import IsmnFileCollection, _load_metadata_df
from ismn.components import *
from ismn.const import *
from ismn.base import IsmnRoot
try:
import cartopy.crs as ccrs
import cartopy.feature as cfeature
if platform.system() == "Darwin":
import matplotlib
matplotlib.use("TkAgg")
import matplotlib.pyplot as plt
from matplotlib.patches import Rectangle
plotlibs = True
except ImportError:
plotlibs = False
class ISMN_Interface:
"""
Class provides interface to ISMN data downloaded from the ISMN website
https://ismn.earth.
Upon initialization it collects metadata from all files in
path_to_data and saves metadata information in a csv file into the folder
python_metadata in meta_path (or data_path if no meta_path is defined).
First initialization can take some time if all ISMN
data is present in data_path and will start multiple processes.
Parameters
----------
data_path : str or Path
Path to ISMN data to read, either to a zip archive or to the extracted
directory that contains the network folders.
Download data from https://ismn.earth after registration.
meta_path : str or Path
Path where the metadata csv file(s) is / are stored. The actual filename
is defined by the name of data_path and will be generated automatically
if it does not yet exist.
network : str or list, optional (default: None)
Name(s) of network(s) to load. Other data in the data_path will be ignored.
By default or if None is passed, all networks are activated.
If an empty list is passed no networks are activated.
parallel: bool, optional (default: False)
Activate parallel processes to speed up metadata generation.
All available CPUs will be used.
keep_loaded_data : bool, optional (default: False)
Keep data for a file in memory once it is loaded. This makes subsequent
calls of data faster (if e.g. a station is accessed multiple times)
but can fill up memory if multiple networks are loaded.
Raises
------
ISMNError
if given, network was not found :attr:`.ISMN_Interface.data_path`
Attributes
----------
climate : collections.OrderedDict
All Climate classes and their descriptions.
collection : NetworkCollection
Contains all loaded networks with stations and sensors.
keep_loaded_data : bool
Switch to keep data in memory after loading (not recommended).
metadata : pandas.DataFrame
Metadata for active networks, with idx that could also be passed
to :func:`ismn.interface.read_metadata`
landcover : collections.OrderedDict
All Landcover classes and their descriptions.
parallel : bool
Switch to activate parallel processing where possible.
root : IsmnRoot
ISMN data folder or .zip access
Properties
----------
networks : OrderedDict
Access Networks container from collection directly.
grid : pygeogrids.grid.BasicGrid
Grid from collection that contains all station lats and lons
"""
def __init__(
self,
data_path,
meta_path=None,
network=None,
parallel=False,
keep_loaded_data=False,
temp_root=gettempdir(),
):
self.climate, self.landcover = KOEPPENGEIGER, LANDCOVER
self.parallel = parallel
self.root = IsmnRoot(data_path)
self.keep_loaded_data = keep_loaded_data
self.activate_network(network=network, meta_path=meta_path, temp_root=temp_root)
def activate_network(
self,
network: Union[list, str] = None,
meta_path: str = None,
temp_root: str = gettempdir(),
):
"""
Load (file) collection for specific networks.
"""
meta_csv_filename = f"{self.root.name}.csv"
if meta_path is None:
meta_path = Path(self.root.root_dir) / "python_metadata"
else:
meta_path = Path(meta_path)
meta_csv_file = meta_path / meta_csv_filename
if network is not None:
network = np.atleast_1d(network)
if not os.path.isfile(meta_csv_file):
self.__file_collection = IsmnFileCollection.build_from_scratch(
self.root,
parallel=self.parallel,
log_path=meta_path,
temp_root=temp_root,
)
self.__file_collection.to_metadata_csv(meta_csv_file)
self.__file_collection = IsmnFileCollection.from_metadata_csv(
self.root, meta_csv_file, network=network
)
metadata = self.__file_collection.metadata_df
metadata.index = range(len(metadata.index))
self.metadata = metadata
networks = self.__collect_networks(network)
self.collection = NetworkCollection(networks)
def __collect_networks(self, network_names: Optional[list] = None) -> list:
"""
Build Networks and fill them with Stations and Sensors and apply
according filehandlers from filelist for data reading.
"""
networks = OrderedDict([])
for f in self.__file_collection.iter_filehandlers(network_names):
nw_name, st_name, instrument = (
f.metadata["network"].val,
f.metadata["station"].val,
f.metadata["instrument"].val,
)
if nw_name not in networks:
networks[nw_name] = Network(nw_name)
if st_name not in networks[nw_name].stations:
networks[nw_name].add_station(
st_name,
f.metadata["longitude"].val,
f.metadata["latitude"].val,
f.metadata["elevation"].val,
)
# the senor name is the index in the list
networks[nw_name].stations[st_name].add_sensor(
instrument,
f.metadata["variable"].val,
f.metadata["variable"].depth,
filehandler=f, # todo: remove station meta from sensor
name=None,
keep_loaded_data=self.keep_loaded_data,
)
return list(networks.values())
def __getitem__(self, item):
return self.collection[item]
def __repr__(self):
return (
f"{self.root}\n"
f"with Networks[Stations]:\n"
f"------------------------\n"
f"{self.collection.__repr__(' ')}"
)
@property
def networks(self):
return self.collection.networks
@property
def grid(self):
return self.collection.grid
@deprecated
def list_networks(self) -> np.ndarray:
# get network names from list of active files
return np.array(list(self.networks.keys()))
@deprecated
def list_stations(self, network: str = None) -> np.ndarray:
# get station names for one of the active networks
if network is not None:
if network not in self.networks:
raise ISMNError(
f"Network {network} not found in currently loaded networks."
)
return np.array(list(self.networks[network].stations.keys()))
else:
stations = []
for network in self.networks.values():
stations += list(network.stations.keys())
return np.array(stations)
@deprecated
def list_sensors(self, network: str = None, station: str = None) -> np.ndarray:
# List sensors names for a specific sensor in an active network
sensors = np.array([])
for net in self.networks.values():
if network in [None, net.name]:
for stat in net.stations.values():
if station in [None, stat.name]:
sensors = np.append(
sensors,
# get the objects instead, use .values()?
np.array(list(stat.sensors.keys())),
)
return sensors
def network_for_station(self, stationname, name_only=True):
"""
Find networks that contain a station of the passed name.
Parameters
----------
stationname : str
Station name to search in the active networks.
name_only : bool, optional (default: True)
Returns only the name of the network and not the Network.
Returns
-------
network_names : str or Network or None
Network that contains a station of that name, or None if no such
network exists.
Prints are warning and uses the FIRST found network name if there
are multiple stations with the same name in different networks.
"""
network_with_station = []
for network in self.networks.values():
if stationname in network.stations.keys():
network_with_station.append(network)
if len(network_with_station) > 1:
warnings.warn("stationname occurs in multiple networks")
if len(network_with_station) == 0:
return None
else:
nw = network_with_station[0]
if name_only:
warnings.warn(
"Future Versions of the package will always return the Network object (same as name_only=False now). "
"You can use Network.name to get the name of a network.",
category=DeprecationWarning,
)
return nw.name
else:
return nw
def stations_that_measure(self, variable, **filter_kwargs):
"""
Goes through all stations and returns those that measure the specified
variable
Parameters
----------
variable : str
variable name, one of:
* soil_moisture
* soil_temperature
* soil_suction
* precipitation
* air_temperature
* field_capacity
* permanent_wilting_point
* plant_available_water
* potential_plant_available_water
* saturation
* silt_fraction
* snow_depth
* sand_fraction
* clay_fraction
* organic_carbon
* snow_water_equivalent
* surface_temperature
* surface_temperature_quality_flag_original
filter_kwargs :
Parameters are used to check all sensors at all stations, only stations
that have at least one matching sensor are returned.
For a description of possible filter kwargs, see
:func:`ismn.components.Sensor.eval`
Yields
-------
ISMN_station : Station
"""
for network in self.networks.values():
for station in network.iter_stations(variable=variable, **filter_kwargs):
yield station
def get_dataset_ids(
self,
variable,
min_depth=0,
max_depth=0.1,
filter_meta_dict=None,
check_only_sensor_depth_from=False,
groupby=None,
):
"""
Yield all sensors for a specific network and/or station and/or
variable and/or depth. The id is defined by the position of the filehandler
in the filelist.
Parameters
----------
variable : str or list[str] or None
Variable(s) to filer out, None to allow all variables.
min_depth : float, optional (default: 0)
Min depth of sensors to search
max_depth : float, optional (default: 0.1)
Max depth of sensors to search
filter_meta_dict: dict, optional (default: None)
Additional metadata keys and values for which the file list is filtered
e.g. {'lc_2010': 10} to filter for a landcover class.
if there are multiple conditions, ALL have to be fulfilled.
e.g. {'lc_2010': 10', 'climate_KG': 'Dfc'})
check_only_sensor_depth_from : bool, optional (default: False)
Ignores the sensors depth_to value and only checks if depth_from of
the sensor is in the passed depth (e.g. for cosmic ray probes).
groupby : str, optional (default: None)
A metadata field name that is used to group sensors, e.g. network
"""
if groupby is None:
ids = []
else:
ids = {}
depth = Depth(min_depth, max_depth)
for id, filehandler in enumerate(self.__file_collection.iter_filehandlers()):
eval = filehandler.check_metadata(
variable=variable,
allowed_depth=depth,
filter_meta_dict=filter_meta_dict,
check_only_sensor_depth_from=check_only_sensor_depth_from,
)
if eval:
if groupby is not None:
groupval = filehandler.metadata[groupby].val
if groupval not in ids.keys():
ids[groupval] = []
ids[groupval].append(id)
else:
ids.append(id)
return ids
def read_metadata(self, idx, format="pandas"):
"""
Read only metadata by id as pd.DataFrame.
Parameters
----------
idx : int or list
id of sensor to read, best one of those returned
from :func:`ismn.interface.get_dataset_ids` or one in
:attr:`.ISMN_Interface.metadata`.
format : str, optional (default: 'pandas')
This only affects the return value when a SINGLE idx is passed.
If multiple indices or None is passed, a DataFrame is returned.
- pandas : return metadata as dataframe (Default)
- dict : return metadata as dict (only for single idx)
- obj : return metadata as MetaData object (only for single idx)
Returns
-------
metadata : pd.DataFrame or dict or MetaData
Metadata for the passed index.
"""
idx = np.atleast_1d(idx)
if len(idx) == 1:
filehandler = self.__file_collection.get_filehandler(idx[0])
if format.lower() == "pandas":
return filehandler.metadata.to_pd()
elif format.lower() == "dict":
return filehandler.metadata.to_dict()
elif format.lower() == "obj":
return filehandler.metadata
else:
raise NotImplementedError(f"{format} is not a supported format.")
else:
if format.lower() != "pandas":
warnings.warn(
"Multiple indices passed (or None), return format will be 'pandas'"
)
dfs = []
for i in idx:
filehandler = self.__file_collection.get_filehandler(i)
if len(idx) == 1:
return filehandler.metadata.to_pd()
else:
df = filehandler.metadata.to_pd(transpose=True, dropna=False)
df.index = [i]
dfs.append(df)
return pd.concat(dfs, axis=0).dropna(axis=1, how="all")
def read_ts(self, idx, return_meta=False):
"""
Read a time series directly by the filehandler id.
Parameters
----------
idx : int
id of filehandler to read, best one of those returned
by :func:`ismn.interface.ISMN_Interface.get_dataset_ids`
return_meta : bool, optional (default: False)
Also return the metadata for this sensor (as a second return value)
Returns
-------
timeseries : pd.DataFrame
Observation time series
metadata : pd.DataFrame, optional
All available metadata for that sensor. Only returned when
`return_meta=False`
"""
filehandler = self.__file_collection.get_filehandler(idx)
if return_meta:
return filehandler.read_data(), filehandler.metadata.to_pd()
else:
return filehandler.read_data()
def read(self, *args, **kwargs):
# alias of :func:`ismn.interface.ISMN_Interface.read_ts`
return self.read_ts(*args, **kwargs)
def find_nearest_station(self, lon, lat, return_distance=False, max_dist=np.inf):
"""
Finds the nearest station to passed coordinates available in downloaded
data.
Parameters
----------
lon : float
Longitude of point
lat : float
Latitude of point
return_distance : bool, optional (default: False)
if True also distance is returned
max_dist : float, optional (default: np.inf)
Maximum distance allowed. If no station is within this distance
None is returned.
Returns
-------
station : ismn.components.Station
Nearest station object that was found in within the selected distance
distance : float, optional
distance to station in meters, measured in cartesian coordinates and not on
a great circle. Should be OK for small distances
"""
# what happens if there is no point within max dist if that works?
gpi, d = self.collection.grid.find_nearest_gpi(lon, lat, max_dist=max_dist)
if len(np.atleast_1d(gpi)) == 0:
stat = None
d = None
else:
stat = self.collection.station4gpi(gpi)
if return_distance:
return stat, d
else:
return stat
def plot_station_locations(
self,
variable=None,
min_depth=-np.inf,
max_depth=np.inf,
stats_text=True,
check_only_sensor_depth_from=False,
markersize=1,
text_scalefactor=1,
dpi=300,
filename=None,
ax=None,
):
"""
Plots available stations on a world map in robinson projection.
Parameters
----------
variable : str or list[str], optional (default: None)
Show only stations that measure this/these variable(s), e.g. soil_moisture
If None is passed, no filtering for variable is performed.
min_depth : float, optional (default: -np.inf)
Minimum depth, only stations that have a valid sensor measuring the
passed variable (if one is selected) in this depth range are included.
max_depth : float, optional (default: -np.inf)
See description of min_depth. This is the bottom threshold for the
allowed depth.
stats_text : bool, optianal (default: False)
Include text of net/stat/sens counts in plot.
check_only_sensor_depth_from : bool, optional (default: False)
Ignores the sensors depth_to value and only checks if depth_from of
the sensor is in the passed depth_range (e.g. for cosmic ray probes).
markersize : int, optional (default: 1)
Size of the marker, might depend on the amount of stations you plot.
text_scalefactor : float, optional (default: 1)
Scale factor that is applied to header and legend.
dpi: float, optional (default: 300)
Only applies when figure is saved to file.
Resolution of the output figure.
filename : str or Path, optional (default: None)
Filename where image is stored. If None is passed, no file is created.
ax : plt.axes
Axes object that can be used by cartopy (projection assigned).
Returns
-------
fig: matplotlib.Figure
created figure instance. If axes was given this will be None.
ax: matplitlib.Axes
used axes instance, can be added to another figure for example.
count : dict
Number of valid sensors and stations that contain at least one valid
sensor and networks that contain at least one valid station.
"""
if filename and ax:
raise ValueError(
"Either pass a filename OR pass ax to use for plot, not both."
)
if not plotlibs:
warnings.warn(
"Could not import all plotting libs, plotting functions not available."
"Please install cartopy and matplotlib."
)
return
data_crs = ccrs.PlateCarree()
if ax is None:
fig, ax = plt.subplots(1, 1)
ax.set_axis_off()
ax = plt.axes(projection=ccrs.Robinson())
else:
fig = None
ax.coastlines(linewidth=0.5)
# show global map
ax.set_global()
ax.add_feature(cfeature.BORDERS, linewidth=0.5, edgecolor="gray")
if not (sys.version_info[0] == 3 and sys.version_info[1] == 4):
ax.add_feature(cfeature.STATES, linewidth=0.5, edgecolor="gray")
colormap = plt.get_cmap("tab20")
else:
colormap = plt.get_cmap("Set1")
all_networks = list(self.networks.keys())
colorsteps = np.arange(0, 1, 1 / float(len(all_networks)))
rect = []
act_networks = []
act_stations = []
iterator = self.collection.iter_sensors(
variable=variable,
depth=Depth(min_depth, max_depth),
filter_meta_dict=None,
check_only_sensor_depth_from=check_only_sensor_depth_from,
)
n_sens = 0
for nw, stat, sens in iterator:
netcolor = colormap(colorsteps[all_networks.index(nw.name)])
if nw.name not in act_networks:
act_networks.append(nw.name)
rect.append(Rectangle((0, 0), 1, 1, fc=netcolor))
if stat.name not in act_stations:
act_stations.append(stat.name)
ax.plot(
stat.lon,
stat.lat,
color=netcolor,
markersize=markersize,
marker="s",
transform=data_crs,
)
n_sens += 1
nrows = 8.0 if len(act_networks) > 8 else len(act_networks)
try:
ncols = int(len(act_networks) / nrows)
except ZeroDivisionError:
ncols = 0
if ncols == 0:
ncols = 1
handles, labels = ax.get_legend_handles_labels()
lgd = ax.legend(handles, labels, loc="lower center", bbox_to_anchor=(0.5, -0.1))
ax.legend(
rect,
act_networks,
loc="upper center",
ncol=ncols,
bbox_to_anchor=(0.5, -0.05),
fontsize=4*text_scalefactor,
)
postfix_depth = (
"when only considering depth_from of the sensor"
if check_only_sensor_depth_from
else ""
)
depth_text = f"between {min_depth} and {max_depth} m \n {postfix_depth}"
feedback = (
f"{n_sens} valid sensors in {len(act_stations)} stations "
f"in {len(act_networks)} networks (of {len(all_networks)} potential networks) \n"
f"for {f'variable {variable}' if variable is not None else 'all variables'} "
f"{depth_text}"
)
if stats_text:
text = ax.text(
0.5,
1.05,
feedback,
transform=ax.transAxes,
fontsize=5*text_scalefactor, #"xx-small",
horizontalalignment="center",
)
else:
text = None
if fig:
fig.set_size_inches([6, 3.5 + (0.25* text_scalefactor) * nrows ])
if filename is not None:
fig.savefig(
filename,
bbox_extra_artists=(lgd, text) if stats_text else (lgd),
bbox_inches='tight',
dpi=dpi,
)
else:
counts = (len(act_networks), len(act_stations), n_sens)
return fig, ax, counts
def get_min_max_obs_timestamps(
self,
variable="soil_moisture",
min_depth=-np.inf,
max_depth=np.inf,
filter_meta_dict=None,
):
"""
Filter the active file list and return the min/max time stamp from ALL
time series that match the passed criteria.
This time period does NOT apply to all time series in the collection
but is the OVERALL earliest and latest timestamp found.
Parameters
----------
variable : str, optional (default: 'soil_moisture')
One of those in :const:`ismn.const.VARIABLE_LUT` or returned by
:func:`ismn.interface.ISMN_Interface.get_variables`:
'soil_moisture', 'soil_temperature', 'soil_suction',
'precipitation', 'air_temperature', 'field_capacity',
'permanent_wilting_point', 'plant_available_water',
'potential_plant_available_water', 'saturation', 'silt_fraction',
'snow_depth', 'sand_fraction', 'clay_fraction', 'organic_carbon',
'snow_water_equivalent', 'surface_temperature',
'surface_temperature_quality_flag_original'
min_depth : float, optional (default: -np.inf)
Only sensors in this depth are considered.
max_depth : float, optional (default: np.inf)
Only sensors in this depth are considered.
filter_meta_dict: dict, optional (default: None)
Additional metadata keys and values for which the file list is filtered
e.g. {'lc_2010': 10} to filter for a landcover class.
if there are multiple conditions, ALL have to be fulfilled.
e.g. {'lc_2010': 10', 'climate_KG': 'Dfc'})
Returns
-------
start_date: datetime.datetime
Earliest time stamp found in all sensors that fulfill the passed
requirements.
end_date: datetime.datetime
Latest time stamp found in all sensors that fulfill the passed
requirements.
"""
t_min = None
t_max = None
for net, stat, sens in self.collection.iter_sensors(
variable=variable,
depth=Depth(min_depth, max_depth),
filter_meta_dict=filter_meta_dict,
):
time_from = pd.Timestamp(sens.metadata["timerange_from"].val)
time_to = pd.Timestamp(sens.metadata["timerange_to"].val)
if t_min is None:
t_min = time_from
if t_max is None:
t_max = time_to
if time_from < t_min:
t_min = time_from
if time_to > t_max:
t_max = time_to
t_min = t_min.to_pydatetime() if t_min is not None else None
t_max = t_max.to_pydatetime() if t_max is not None else None
return t_min, t_max
def get_static_var_vals(
self,
variable="soil_moisture",
min_depth=-np.inf,
max_depth=np.inf,
static_var_name="lc_2010",
) -> dict:
"""
Get unique meta values for the selected static variable in the active
networks.
Parameters
----------
variable : str, optional (default: 'soil_moisture')
One of those in :const:`ismn.const.VARIABLE_LUT` or returned by
:func:`ismn.interface.ISMN_Interface.get_variables`:
'soil_moisture', 'soil_temperature', 'soil_suction',
'precipitation', 'air_temperature', 'field_capacity',
'permanent_wilting_point', 'plant_available_water',
'potential_plant_available_water', 'saturation', 'silt_fraction',
'snow_depth', 'sand_fraction', 'clay_fraction', 'organic_carbon',
'snow_water_equivalent', 'surface_temperature',
'surface_temperature_quality_flag_original'
min_depth : float, optional (default: -np.inf)
Only sensors in this depth are considered.
max_depth : float, optional (default: np.inf)
Only sensors in this depth are considered.
static_var_name : str, optional (default: 'lc_2010')
One of:
'lc_2000', 'lc_2005', 'lc_2010', 'lc_insitu', 'climate_KG',
'climate_insitu'
Returns
-------
vals : dict
Unique values found in static meta and their meanings.
"""
if static_var_name not in CSV_META_TEMPLATE_SURF_VAR.keys():
raise ValueError(
f"{static_var_name} is not in the list of supported variables."
f"Choose one of {list(CSV_META_TEMPLATE_SURF_VAR.keys())}"
)
vals = []
for net in self.networks.values():
for sta in net.stations.values():
for sen in sta.sensors.values():
if sen.eval(variable=variable, depth=Depth(min_depth, max_depth)):
vals.append(sen.filehandler.metadata[static_var_name].val)
val_dict = {}
for val in np.unique(np.array(vals)):
if val in self.climate.keys():
val_dict[val] = self.climate[val]
elif val in self.landcover.values():
for k, v in self.landcover.items():
if v == val:
val_dict[v] = k
return val_dict
def get_landcover_types(
self,
variable: str = "soil_moisture",
min_depth: float = 0,
max_depth: float = 10,
landcover: str = "lc_2010",
) -> dict:
"""
See :func:`ismn.interface.ISMN_Interface.get_static_var_vals`
"""
return self.get_static_var_vals(variable, min_depth, max_depth, landcover)
def get_climate_types(
self,
variable: str = "soil_moisture",
min_depth: float = 0,
max_depth: float = 10,
climate: str = "climate_KG",
) -> dict:
"""
See :func:`ismn.interface.ISMN_Interface.get_static_var_vals`
"""
return self.get_static_var_vals(variable, min_depth, max_depth, climate)
def get_variables(self) -> np.ndarray:
"""
get a list of variables available in the data
"""
all_vars = np.array([])
for _, station in self.collection.iter_stations():
stat_vars = station.get_variables()
if not all(np.isin(stat_vars, all_vars)):
all_vars = np.union1d(stat_vars, all_vars)
return all_vars
def print_landcover_dict(self) -> None:
"""
print all classes provided by the CCI Landcover Classification
"""
print("CCI Landcover Classification")
print("----------------------------")
for key in self.landcover.keys():
print("{:4}: {}".format(key, self.landcover[key]))
def print_climate_dict(self) -> None:
"""
print all classes provided by the Koeppen-Geiger climate Classification
"""
print("KOEPPEN GEIGER Climate Classification")
print("-------------------------------------")
for key in self.climate.keys():
print("{:4}: {}".format(key, self.climate[key]))
def close_files(self):
# close all open filehandlers
self.__file_collection.close()
| 36.419426
| 122
| 0.586798
|
d80dcc141b8fcdff5446bf25eab6ed4d8aeaa148
| 471
|
py
|
Python
|
1_Image_Classification/general_utilities.py
|
mabdulkareem/lav_volume_with_qc
|
762d4d26c49d4ae3626ef4ac7ff2c9836beace2f
|
[
"MIT"
] | null | null | null |
1_Image_Classification/general_utilities.py
|
mabdulkareem/lav_volume_with_qc
|
762d4d26c49d4ae3626ef4ac7ff2c9836beace2f
|
[
"MIT"
] | null | null | null |
1_Image_Classification/general_utilities.py
|
mabdulkareem/lav_volume_with_qc
|
762d4d26c49d4ae3626ef4ac7ff2c9836beace2f
|
[
"MIT"
] | null | null | null |
import os
# A function to create directory
def create_dir(path_of_dir):
try:
os.makedirs(path_of_dir) # For one directory containing inner/sub directory(ies)
except FileExistsError:
#print("Directory %s already exists" % path_of_dir)
pass
except OSError:
print ("Creation of the directory %s failed" % path_of_dir)
else:
#print ("Successfully created the directory %s " % path_of_dir)
pass
| 26.166667
| 92
| 0.647558
|
2d29b37bc9a1a26790ee88b5902951afe4fc4560
| 524
|
py
|
Python
|
sympy/liealgebras/cartan_matrix.py
|
utkarshdeorah/sympy
|
dcdf59bbc6b13ddbc329431adf72fcee294b6389
|
[
"BSD-3-Clause"
] | 8,323
|
2015-01-02T15:51:43.000Z
|
2022-03-31T13:13:19.000Z
|
sympy/liealgebras/cartan_matrix.py
|
utkarshdeorah/sympy
|
dcdf59bbc6b13ddbc329431adf72fcee294b6389
|
[
"BSD-3-Clause"
] | 15,102
|
2015-01-01T01:33:17.000Z
|
2022-03-31T22:53:13.000Z
|
sympy/liealgebras/cartan_matrix.py
|
utkarshdeorah/sympy
|
dcdf59bbc6b13ddbc329431adf72fcee294b6389
|
[
"BSD-3-Clause"
] | 4,490
|
2015-01-01T17:48:07.000Z
|
2022-03-31T17:24:05.000Z
|
from .cartan_type import CartanType
def CartanMatrix(ct):
"""Access the Cartan matrix of a specific Lie algebra
Examples
========
>>> from sympy.liealgebras.cartan_matrix import CartanMatrix
>>> CartanMatrix("A2")
Matrix([
[ 2, -1],
[-1, 2]])
>>> CartanMatrix(['C', 3])
Matrix([
[ 2, -1, 0],
[-1, 2, -1],
[ 0, -2, 2]])
This method works by returning the Cartan matrix
which corresponds to Cartan type t.
"""
return CartanType(ct).cartan_matrix()
| 20.153846
| 64
| 0.578244
|
b1b29696f408a828c04e2638e332d7a09a832fed
| 12,053
|
py
|
Python
|
airbyte-integrations/bases/source-acceptance-test/source_acceptance_test/tests/test_core.py
|
globalprofessionalsearch/airbyte
|
9c76b8af5363e9bfcf2b532b5418440d713c1c6a
|
[
"MIT"
] | null | null | null |
airbyte-integrations/bases/source-acceptance-test/source_acceptance_test/tests/test_core.py
|
globalprofessionalsearch/airbyte
|
9c76b8af5363e9bfcf2b532b5418440d713c1c6a
|
[
"MIT"
] | 1
|
2021-08-17T06:12:26.000Z
|
2021-08-17T06:12:26.000Z
|
airbyte-integrations/bases/source-acceptance-test/source_acceptance_test/tests/test_core.py
|
globalprofessionalsearch/airbyte
|
9c76b8af5363e9bfcf2b532b5418440d713c1c6a
|
[
"MIT"
] | null | null | null |
#
# MIT License
#
# Copyright (c) 2020 Airbyte
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
#
import logging
from collections import Counter, defaultdict
from functools import reduce
from typing import Any, List, Mapping, MutableMapping
import pytest
from airbyte_cdk.models import AirbyteMessage, ConnectorSpecification, Status, Type
from docker.errors import ContainerError
from jsonschema import validate
from source_acceptance_test.base import BaseTest
from source_acceptance_test.config import BasicReadTestConfig, ConnectionTestConfig
from source_acceptance_test.utils import ConnectorRunner, SecretDict, serialize, verify_records_schema
from source_acceptance_test.utils.json_schema_helper import JsonSchemaHelper
@pytest.mark.default_timeout(10)
class TestSpec(BaseTest):
def test_match_expected(self, connector_spec: ConnectorSpecification, connector_config: SecretDict, docker_runner: ConnectorRunner):
output = docker_runner.call_spec()
spec_messages = [message for message in output if message.type == Type.SPEC]
assert len(spec_messages) == 1, "Spec message should be emitted exactly once"
if connector_spec:
assert spec_messages[0].spec == connector_spec, "Spec should be equal to the one in spec.json file"
assert docker_runner.env_variables.get("AIRBYTE_ENTRYPOINT"), "AIRBYTE_ENTRYPOINT must be set in dockerfile"
assert docker_runner.env_variables.get("AIRBYTE_ENTRYPOINT") == " ".join(
docker_runner.entry_point
), "env should be equal to space-joined entrypoint"
# Getting rid of technical variables that start with an underscore
config = {key: value for key, value in connector_config.data.items() if not key.startswith("_")}
spec_message_schema = spec_messages[0].spec.connectionSpecification
validate(instance=config, schema=spec_message_schema)
js_helper = JsonSchemaHelper(spec_message_schema)
variants = js_helper.find_variant_paths()
js_helper.validate_variant_paths(variants)
def test_required(self):
"""Check that connector will fail if any required field is missing"""
def test_optional(self):
"""Check that connector can work without any optional field"""
def test_has_secret(self):
"""Check that spec has a secret. Not sure if this should be always the case"""
def test_secret_never_in_the_output(self):
"""This test should be injected into any docker command it needs to know current config and spec"""
@pytest.mark.default_timeout(30)
class TestConnection(BaseTest):
def test_check(self, connector_config, inputs: ConnectionTestConfig, docker_runner: ConnectorRunner):
if inputs.status == ConnectionTestConfig.Status.Succeed:
output = docker_runner.call_check(config=connector_config)
con_messages = [message for message in output if message.type == Type.CONNECTION_STATUS]
assert len(con_messages) == 1, "Connection status message should be emitted exactly once"
assert con_messages[0].connectionStatus.status == Status.SUCCEEDED
elif inputs.status == ConnectionTestConfig.Status.Failed:
output = docker_runner.call_check(config=connector_config)
con_messages = [message for message in output if message.type == Type.CONNECTION_STATUS]
assert len(con_messages) == 1, "Connection status message should be emitted exactly once"
assert con_messages[0].connectionStatus.status == Status.FAILED
elif inputs.status == ConnectionTestConfig.Status.Exception:
with pytest.raises(ContainerError) as err:
docker_runner.call_check(config=connector_config)
assert err.value.exit_status != 0, "Connector should exit with error code"
assert "Traceback" in err.value.stderr.decode("utf-8"), "Connector should print exception"
@pytest.mark.default_timeout(30)
class TestDiscovery(BaseTest):
def test_discover(self, connector_config, docker_runner: ConnectorRunner):
output = docker_runner.call_discover(config=connector_config)
catalog_messages = [message for message in output if message.type == Type.CATALOG]
assert len(catalog_messages) == 1, "Catalog message should be emitted exactly once"
# TODO(sherifnada) return this once an input bug is fixed (test suite currently fails if this file is not provided)
# if catalog:
# for stream1, stream2 in zip(catalog_messages[0].catalog.streams, catalog.streams):
# assert stream1.json_schema == stream2.json_schema, f"Streams: {stream1.name} vs {stream2.name}, stream schemas should match"
# stream1.json_schema = None
# stream2.json_schema = None
# assert stream1.dict() == stream2.dict(), f"Streams {stream1.name} and {stream2.name}, stream configs should match"
def primary_keys_for_records(streams, records):
streams_with_primary_key = [stream for stream in streams if stream.stream.source_defined_primary_key]
for stream in streams_with_primary_key:
stream_records = [r for r in records if r.stream == stream.stream.name]
for stream_record in stream_records:
pk_values = {}
for pk_path in stream.stream.source_defined_primary_key:
pk_value = reduce(lambda data, key: data.get(key) if isinstance(data, dict) else None, pk_path, stream_record.data)
pk_values[tuple(pk_path)] = pk_value
yield pk_values, stream_record
@pytest.mark.default_timeout(5 * 60)
class TestBasicRead(BaseTest):
@staticmethod
def _validate_schema(records, configured_catalog):
"""
Check if data type and structure in records matches the one in json_schema of the stream in catalog
"""
bar = "-" * 80
streams_errors = verify_records_schema(records, configured_catalog)
for stream_name, errors in streams_errors.items():
errors = map(str, errors.values())
str_errors = f"\n{bar}\n".join(errors)
logging.error(f"The {stream_name} stream has the following schema errors:\n{str_errors}")
if streams_errors:
pytest.fail(f"Please check your json_schema in selected streams {tuple(streams_errors.keys())}.")
def _validate_empty_streams(self, records, configured_catalog, allowed_empty_streams):
"""
Only certain streams allowed to be empty
"""
counter = Counter(record.stream for record in records)
all_streams = set(stream.stream.name for stream in configured_catalog.streams)
streams_with_records = set(counter.keys())
streams_without_records = all_streams - streams_with_records
streams_without_records = streams_without_records - allowed_empty_streams
assert not streams_without_records, f"All streams should return some records, streams without records: {streams_without_records}"
def _validate_expected_records(self, records, expected_records, flags):
"""
We expect some records from stream to match expected_records, partially or fully, in exact or any order.
"""
actual_by_stream = self.group_by_stream(records)
expected_by_stream = self.group_by_stream(expected_records)
for stream_name, expected in expected_by_stream.items():
actual = actual_by_stream.get(stream_name, [])
self.compare_records(
stream_name=stream_name,
actual=actual,
expected=expected,
extra_fields=flags.extra_fields,
exact_order=flags.exact_order,
extra_records=flags.extra_records,
)
def test_read(
self,
connector_config,
configured_catalog,
inputs: BasicReadTestConfig,
expected_records: List[AirbyteMessage],
docker_runner: ConnectorRunner,
):
output = docker_runner.call_read(connector_config, configured_catalog)
records = [message.record for message in output if message.type == Type.RECORD]
assert records, "At least one record should be read using provided catalog"
if inputs.validate_schema:
self._validate_schema(records=records, configured_catalog=configured_catalog)
self._validate_empty_streams(records=records, configured_catalog=configured_catalog, allowed_empty_streams=inputs.empty_streams)
for pks, record in primary_keys_for_records(streams=configured_catalog.streams, records=records):
for pk_path, pk_value in pks.items():
assert pk_value is not None, (
f"Primary key subkeys {repr(pk_path)} " f"have null values or not present in {record.stream} stream records."
)
if expected_records:
self._validate_expected_records(records=records, expected_records=expected_records, flags=inputs.expect_records)
@staticmethod
def remove_extra_fields(record: Any, spec: Any) -> Any:
"""Remove keys from record that spec doesn't have, works recursively"""
if not isinstance(spec, Mapping):
return record
assert isinstance(record, Mapping), "Record or part of it is not a dictionary, but expected record is."
result = {}
for k, v in spec.items():
assert k in record, "Record or part of it doesn't have attribute that has expected record."
result[k] = TestBasicRead.remove_extra_fields(record[k], v)
return result
@staticmethod
def compare_records(stream_name, actual, expected, extra_fields, exact_order, extra_records):
"""Compare records using combination of restrictions"""
if exact_order:
for r1, r2 in zip(expected, actual):
if r1 is None:
assert extra_records, f"Stream {stream_name}: There are more records than expected, but extra_records is off"
break
if extra_fields:
r2 = TestBasicRead.remove_extra_fields(r2, r1)
assert r1 == r2, f"Stream {stream_name}: Mismatch of record order or values"
else:
expected = set(map(serialize, expected))
actual = set(map(serialize, actual))
missing_expected = set(expected) - set(actual)
assert not missing_expected, f"Stream {stream_name}: All expected records must be produced"
if not extra_records:
extra_actual = set(actual) - set(expected)
assert not extra_actual, f"Stream {stream_name}: There are more records than expected, but extra_records is off"
@staticmethod
def group_by_stream(records) -> MutableMapping[str, List[MutableMapping]]:
"""Group records by a source stream"""
result = defaultdict(list)
for record in records:
result[record.stream].append(record.data)
return result
| 48.212
| 142
| 0.701817
|
cb7dc36ada218a31a914ec1f6af20a7fe3c4678d
| 5,512
|
py
|
Python
|
grr/gui/plugins/artifact_view_test.py
|
ethicalhackeragnidhra/Grr
|
9ff9178396d9d16575e42dded33627cb09ac3af1
|
[
"Apache-2.0"
] | 1
|
2020-12-18T00:47:19.000Z
|
2020-12-18T00:47:19.000Z
|
grr/gui/plugins/artifact_view_test.py
|
ethicalhackeragnidhra/Grr
|
9ff9178396d9d16575e42dded33627cb09ac3af1
|
[
"Apache-2.0"
] | null | null | null |
grr/gui/plugins/artifact_view_test.py
|
ethicalhackeragnidhra/Grr
|
9ff9178396d9d16575e42dded33627cb09ac3af1
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/env python
# -*- mode: python; encoding: utf-8 -*-
"""Test the artifact rendering interface."""
import os
from grr import config
from grr.gui import gui_test_lib
from grr.gui import runtests_test
from grr.lib import artifact
from grr.lib import artifact_registry
from grr.lib import flags
from grr.lib import parsers
from grr.lib.flows.general import collectors
class TestCmdProcessor(parsers.CommandParser):
output_types = ["SoftwarePackage"]
supported_artifacts = ["TestCmdArtifact"]
class TestArtifactRender(gui_test_lib.GRRSeleniumTest):
"""Test the Cron view GUI."""
def _UploadCustomArtifacts(self):
artifact_registry.REGISTRY.ClearRegistry()
test_artifacts_file = os.path.join(config.CONFIG["Test.data_dir"],
"artifacts", "test_artifacts.json")
with open(test_artifacts_file, "rb") as fd:
artifact.UploadArtifactYamlFile(fd.read(), token=self.token)
def _LoadSystemArtifacts(self):
artifact_registry.REGISTRY.ClearRegistry()
test_artifacts_file = os.path.join(config.CONFIG["Test.data_dir"],
"artifacts", "test_artifacts.json")
artifact_registry.REGISTRY.AddFileSource(test_artifacts_file)
def setUp(self):
super(TestArtifactRender, self).setUp()
self.client_id = self.SetupClients(1, system="linux")[0]
self.RequestAndGrantClientApproval(self.client_id)
def testArtifactRendering(self):
self._LoadSystemArtifacts()
self.Open("/")
self.Type("client_query", self.client_id.Basename())
self.Click("client_query_submit")
self.WaitUntilEqual(self.client_id, self.GetText, "css=span[type=subject]")
# Choose client 1
self.Click("css=td:contains('%s')" % self.client_id.Basename())
# First screen should be the Host Information already.
self.WaitUntil(self.IsTextPresent, "Host-0")
self.Click("css=a[grrtarget='client.launchFlows']")
self.Click("css=#_Collectors")
self.assertEqual(collectors.ArtifactCollectorFlow.__name__,
self.GetText("link=ArtifactCollectorFlow"))
self.Click("link=ArtifactCollectorFlow")
self.WaitUntil(self.IsTextPresent, "Artifact list")
self.Click("css=grr-artifacts-list-form button:contains('All Platforms')")
self.Click("css=grr-artifacts-list-form li:contains('Linux')")
# Check search works. Note that test artifacts names are used (see
# test_data/artifacts/test_artifacts.json for details.
self.WaitUntil(self.IsTextPresent, "TestCmdArtifact")
self.WaitUntil(self.IsTextPresent, "TestFilesArtifact")
self.Type("css=grr-artifacts-list-form input[type=text]", u"Cmd")
self.WaitUntil(self.IsTextPresent, "TestCmdArtifact")
self.WaitUntilNot(self.IsTextPresent, "TestFilesArtifact")
# Check we can add to the list.
self.Click("css=grr-artifacts-list-form tr:contains('TestCmdArtifact')")
self.Click("css=grr-artifacts-list-form button:contains('Add')")
# Selected artifacts should be highlighted in bold.
self.WaitUntil(self.IsElementPresent, "css=grr-artifacts-list-form "
"strong:contains('TestCmdArtifact')")
# Check the artifact description loaded.
self.WaitUntil(self.IsTextPresent, "Test command artifact for dpkg.")
self.WaitUntil(self.IsTextPresent, "TestCmdProcessor")
def testSystemArtifactsAreNotMarkedInStartFlowForm(self):
self._LoadSystemArtifacts()
self.Open("/#/clients/%s/launch-flow" % self.client_id.Basename())
self.Click("css=#_Collectors")
self.Click("link=ArtifactCollectorFlow")
self.WaitUntil(self.IsElementPresent, "css=*:contains('TestCmdArtifact')")
self.WaitUntilNot(self.IsElementPresent,
"css=span[title~='Custom Uploaded Artifact'] > i.fa-user")
def testCustomArtifactsAreMarkedInStartFlowForm(self):
self._UploadCustomArtifacts()
self.Open("/#/clients/%s/launch-flow" % self.client_id.Basename())
self.Click("css=#_Collectors")
self.Click("link=ArtifactCollectorFlow")
self.WaitUntil(self.IsElementPresent, "css=*:contains('TestCmdArtifact') > "
"span[title~='Custom Uploaded Artifact'] > i.fa-user")
def testSystemArtifactsAreNotMarkedInFlowArguments(self):
self._UploadCustomArtifacts()
self.Open("/#/clients/%s/launch-flow" % self.client_id.Basename())
self.Click("css=#_Collectors")
self.Click("link=ArtifactCollectorFlow")
self.DoubleClick(
"css=grr-artifacts-list-form tr:contains('TestCmdArtifact')")
self.Click("css=button.Launch")
self.WaitUntil(self.IsElementPresent,
"css=grr-artifact-name:contains('TestCmdArtifact')")
self.WaitUntilNot(self.IsElementPresent,
"css=span[title~='Custom Uploaded Artifact'] > i.fa-user")
def testCustomArtifactsAreMarkedInFlowArguments(self):
self._UploadCustomArtifacts()
self.Open("/#/clients/%s/launch-flow" % self.client_id.Basename())
self.Click("css=#_Collectors")
self.Click("link=ArtifactCollectorFlow")
self.DoubleClick(
"css=grr-artifacts-list-form tr:contains('TestCmdArtifact')")
self.Click("css=button.Launch")
self.WaitUntil(self.IsElementPresent,
"css=grr-artifact-name:contains('TestCmdArtifact') "
"span[title~='Custom Uploaded Artifact'] > i.fa-user")
def main(argv):
# Run the full test suite
runtests_test.SeleniumTestProgram(argv=argv)
if __name__ == "__main__":
flags.StartMain(main)
| 37.243243
| 80
| 0.708999
|
99d3df1883d3ee9552db979d803c14e254a0c554
| 668
|
py
|
Python
|
website/urls.py
|
HASSANFARYAD/Django-simpleusers
|
5070eb7cbad0412eb94d8c83c822e7939dd44644
|
[
"MIT"
] | null | null | null |
website/urls.py
|
HASSANFARYAD/Django-simpleusers
|
5070eb7cbad0412eb94d8c83c822e7939dd44644
|
[
"MIT"
] | null | null | null |
website/urls.py
|
HASSANFARYAD/Django-simpleusers
|
5070eb7cbad0412eb94d8c83c822e7939dd44644
|
[
"MIT"
] | null | null | null |
from django.urls import path
from . import views
urlpatterns = [
# path('', views.home, name='home'),
path('',views.HomePageView.as_view(), name='home'),
path('detail/<int:id>/', views.detail, name='detail'),
# path('detail/<int:pk>/', views.DetailPageView.as_view, name='detal')
path('search/', views.search, name='search'),
path('contacts/create', views.ContactCreateView.as_view(), name="create"),
path('contacts/update/<int:pk>', views.ContactUpdateView.as_view(), name="update"),
path('contacts/delete/<int:pk>', views.ContactDeleteView.as_view(), name="delete"),
path('signup/', views.SignUpView.as_view(), name="signup"),
]
| 44.533333
| 87
| 0.669162
|
76fffa0ffa6f60dcfd4fad205dbcbdad18295a75
| 2,748
|
py
|
Python
|
sdk/python/pulumi_azure_nextgen/network/v20200301/get_virtual_network_gateway_vpnclient_connection_health.py
|
test-wiz-sec/pulumi-azure-nextgen
|
20a695af0d020b34b0f1c336e1b69702755174cc
|
[
"Apache-2.0"
] | null | null | null |
sdk/python/pulumi_azure_nextgen/network/v20200301/get_virtual_network_gateway_vpnclient_connection_health.py
|
test-wiz-sec/pulumi-azure-nextgen
|
20a695af0d020b34b0f1c336e1b69702755174cc
|
[
"Apache-2.0"
] | null | null | null |
sdk/python/pulumi_azure_nextgen/network/v20200301/get_virtual_network_gateway_vpnclient_connection_health.py
|
test-wiz-sec/pulumi-azure-nextgen
|
20a695af0d020b34b0f1c336e1b69702755174cc
|
[
"Apache-2.0"
] | null | null | null |
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi SDK Generator. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union
from ... import _utilities, _tables
from . import outputs
__all__ = [
'GetVirtualNetworkGatewayVpnclientConnectionHealthResult',
'AwaitableGetVirtualNetworkGatewayVpnclientConnectionHealthResult',
'get_virtual_network_gateway_vpnclient_connection_health',
]
@pulumi.output_type
class GetVirtualNetworkGatewayVpnclientConnectionHealthResult:
"""
List of virtual network gateway vpn client connection health.
"""
def __init__(__self__, value=None):
if value and not isinstance(value, list):
raise TypeError("Expected argument 'value' to be a list")
pulumi.set(__self__, "value", value)
@property
@pulumi.getter
def value(self) -> Optional[Sequence['outputs.VpnClientConnectionHealthDetailResponseResult']]:
"""
List of vpn client connection health.
"""
return pulumi.get(self, "value")
class AwaitableGetVirtualNetworkGatewayVpnclientConnectionHealthResult(GetVirtualNetworkGatewayVpnclientConnectionHealthResult):
# pylint: disable=using-constant-test
def __await__(self):
if False:
yield self
return GetVirtualNetworkGatewayVpnclientConnectionHealthResult(
value=self.value)
def get_virtual_network_gateway_vpnclient_connection_health(resource_group_name: Optional[str] = None,
virtual_network_gateway_name: Optional[str] = None,
opts: Optional[pulumi.InvokeOptions] = None) -> AwaitableGetVirtualNetworkGatewayVpnclientConnectionHealthResult:
"""
Use this data source to access information about an existing resource.
:param str resource_group_name: The name of the resource group.
:param str virtual_network_gateway_name: The name of the virtual network gateway.
"""
__args__ = dict()
__args__['resourceGroupName'] = resource_group_name
__args__['virtualNetworkGatewayName'] = virtual_network_gateway_name
if opts is None:
opts = pulumi.InvokeOptions()
if opts.version is None:
opts.version = _utilities.get_version()
__ret__ = pulumi.runtime.invoke('azure-nextgen:network/v20200301:getVirtualNetworkGatewayVpnclientConnectionHealth', __args__, opts=opts, typ=GetVirtualNetworkGatewayVpnclientConnectionHealthResult).value
return AwaitableGetVirtualNetworkGatewayVpnclientConnectionHealthResult(
value=__ret__.value)
| 41.636364
| 208
| 0.725619
|
9bc948e95d8f9f889423d879138c7a9201fe095b
| 17,878
|
py
|
Python
|
mod.py
|
Gejfish/MOnika
|
eecc54783187ccc224ba02d0c6d5624efa241146
|
[
"MIT"
] | null | null | null |
mod.py
|
Gejfish/MOnika
|
eecc54783187ccc224ba02d0c6d5624efa241146
|
[
"MIT"
] | null | null | null |
mod.py
|
Gejfish/MOnika
|
eecc54783187ccc224ba02d0c6d5624efa241146
|
[
"MIT"
] | null | null | null |
from discord import channel
from discord.ext import commands
import discord
import cogs
import random
import asyncio
import requests
from discord import File
import json
import os
import traceback
from discord import User
from discord.ext.commands import Bot
class ModCog(commands.Cog):
def __init__(self, bot):
self.bot = bot
@commands.command()
async def serverinfo(self, ctx):
time = str(ctx.message.guild.created_at).split(" ")[0]
osoby = ctx.guild.member_count
guild_id = ctx.guild.id
guild_owner = f"<@!{ctx.guild.owner_id}>"
embed = discord.Embed(colour=0xE657EE)
embed.set_author(name=f'Informacje o serwerze {ctx.guild}')
embed.add_field(name='Właściciel:', value=guild_owner, inline=False)
embed.add_field(name='Id servera:', value=guild_id, inline=False)
embed.add_field(name='Ilość osób:', value=osoby, inline=False)
embed.add_field(name='Ilość botów:', value="Idk", inline=False)
embed.add_field(name='Ilość kanałów:', value=len(ctx.guild.channels), inline=False)
embed.add_field(name='Ilość ról:', value=len(ctx.guild.roles), inline=False)
embed.add_field(name='Ilość emotek:', value=len(ctx.guild.emojis), inline=False)
embed.set_thumbnail(url=ctx.guild.icon_url)
embed.set_footer(text=f'Serwer został stworzony dnia {time}')
await ctx.send(embed=embed)
@commands.command()
async def servers(self, ctx):
activeservers = self.bot.guilds
if ctx.author.id == 453950321790550016:
for guild in activeservers:
await ctx.send(guild.name)
if ctx.author.id == 327899255249436672:
for guild in activeservers:
await ctx.send(guild.name)
if not ctx.author.id == 327899255249436672 or 453950321790550016:
embed=discord.Embed(title="Nie ma tak łatwo chuju", color=0xE657EE)
await ctx.send(embed=embed)
@commands.command()
async def kick(self, ctx, member : discord.Member=None, *, reason=None):
if not member:
embed=discord.Embed(title="Nie podałeś użytkownika", color=0xE657EE)
await ctx.send(embed=embed)
if ctx.author.top_role <= member.top_role:
embed=discord.Embed(title="Brak permisji!", description=f"{ctx.author} Nie masz wystarczajocych permisji", color=0xE657EE)
return await ctx.send(embed=embed)
if ctx.author.guild_permissions.kick_members == True:
await member.kick(reason=reason)
embed=discord.Embed(title="Kick!", description=f"Wyrzucono użytkownika: {member} powód: `{reason}`.", color=0xE657EE)
await member.kick(reason=reason)
await ctx.send(embed=embed)
if not ctx.author.guild_permissions.kick_members:
embed=discord.Embed(title="Brak permisji!", description=f"{ctx.author} Nie masz wystarczających permisji", color=0xE657EE)
await ctx.send(embed=embed)
@commands.command()
async def unban(self, ctx, *, member):
if ctx.author.guild_permissions.ban_members == False:
e=discord.Embed(title="Nie posiadasz uprawnień aby odbanować użykownika", color=0xE657EE)
return await ctx.send(embed=e)
try:
banned_users = await ctx.guild.bans()
member_name, member_discriminator = member.split('#')
for ban_entry in banned_users:
user = ban_entry.user
if (user.name, user.discriminator) == (member_name, member_discriminator):
await ctx.guild.unban(user)
embed=discord.Embed(title="Odbanowaleś", description=f"{user.mention} Uwu", color=0xE657EE)
await ctx.send(embed=embed)
except:
embed=discord.Embed(title="Nie posiadam uprawnien aby odbanowac", color=0xE657EE)
await ctx.send(embed=embed)
@commands.command()
async def ban(self, ctx, member : discord.Member=None, *, reason=None):
if not ctx.author.guild_permissions.ban_members:
embed=discord.Embed(title="Brak permisji!", description=f"{ctx.author} Nie masz wystarczajocych permisji", color=0xE657EE)
return await ctx.send(embed=embed)
if not member:
embed=discord.Embed(title="Nie podałeś użytkownika", color=0xE657EE)
return await ctx.send(embed=embed)
if ctx.author.top_role <= member.top_role:
embed=discord.Embed(title="Brak permisji!", description=f"{ctx.author} Nie masz wystarczajocych permisji", color=0xE657EE)
return await ctx.send(embed=embed)
try:
if ctx.author.guild_permissions.ban_members == True:
await member.ban(reason=reason)
embed=discord.Embed(title="Ban!", description=f"Zbanowano użytkownika powód: `{reason}`.", color=0xE657EE)
await ctx.send(embed=embed)
except:
embed=discord.Embed(title="Nie posiadam uprawnień do banowania", color=0xE657EE)
await ctx.send(embed=embed)
@commands.command(pass_context=True)
async def giverole(self, ctx, user: discord.Member=None, role: discord.Role=None):
if not user:
embed=discord.Embed(title="Nie podałeś użytkownika", color=0xE657EE)
return await ctx.send(embed=embed)
if not role:
embed=discord.Embed(title="Nie podałeś roli", color=0xE657EE)
return await ctx.send(embed=embed)
if not ctx.author.guild_permissions.administrator == True:
embed=discord.Embed(title="Nie masz uprawnień", color=0xE657EE)
await ctx.send(embed=embed)
if ctx.author.guild_permissions.administrator == True:
await user.add_roles(role)
embed=discord.Embed(title="Dodałeś role:", description=f"{user.name} Dostał role {role.name}", color=0xE657EE)
await ctx.send(embed=embed)
@commands.command()
async def userinfo(self, ctx, m: discord.Member=None):
if not m:
m = ctx.author
roles = [role for role in m.roles]
embed = discord.Embed(colour=0xE657EE, timestamp=ctx.message.created_at)
embed.set_author(name=f"User Info - {m}")
embed.set_thumbnail(url=m.avatar_url)
embed.set_footer(text=f"Wywołane przez {ctx.author}", icon_url=ctx.author.avatar_url)
embed.add_field(name="ID:", value=m.id)
embed.add_field(name="Nazwa użytkownika na serwerze:", value=m.display_name)
embed.add_field(name="Stworzone w:", value=m.created_at.strftime("%a, %#d %B %Y, %I:%M %p UTC"))
embed.add_field(name="Dołączył w:", value=m.joined_at.strftime("%a, %#d %B %Y, %I:%M %p UTC"))
embed.add_field(name=f"Role ({len(roles)})", value=" ".join([role.mention for role in roles]))
embed.add_field(name="Top role:", value=m.top_role.mention)
embed.add_field(name="Bot?", value=m.bot)
await ctx.send(embed=embed)
@commands.command(aliases=['clean'])
async def clear(self, ctx, *, arg):
if ctx.author.guild_permissions.manage_messages == True:
try:
embed=discord.Embed(title=f"Usunaleś {int(arg)} wiadomości", color=0xE657EE)
await ctx.send(embed=embed)
await asyncio.sleep(0.5)
await ctx.channel.purge(limit=int(arg) + 2)
except ValueError:
if arg == "w chuj":
embed=discord.Embed(title=f"Usunaleś w chuj wiadomosci czyli 100 wiadomości", color=0xE657EE)
await ctx.send(embed=embed)
await asyncio.sleep(0.5)
await ctx.channel.purge(limit=100 + 2)
else:
e=discord.Embed(title="Argument nie jest liczba", color=0xE657EE)
await ctx.send(embed=e)
if not ctx.author.guild_permissions.manage_messages:
embed=discord.Embed(title="Nie masz permisji żeby usuwać wiadmości", color=0xE657EE)
await ctx.send(embed=embed)
@commands.command()
async def mute(self, ctx, user: discord.Member=None, *, reason=None):
if not user:
a=discord.Embed(title="Nie podałeś użytkownika", color=0xE657EE)
return await ctx.send(embed=a)
if not ctx.author.guild_permissions.mute_members == True:
embed=discord.Embed(title="Nie masz wystarczających premisji", color=0xE657EE)
return await ctx.send(embed=embed)
if ctx.author.top_role <= user.top_role:
embed=discord.Embed(title="Nie masz wystarczających premisji", color=0xE657EE)
return await ctx.send(embed=embed)
try:
for channel in ctx.guild.channels:
overrite = channel.overwrites_for(user)
overrite.update(send_messages=False, add_reactions=False)
await channel.set_permissions(user, overwrite=overrite, reason=reason)
e=discord.Embed(title=f"Zmutowano `{user.name}` przez `{ctx.author.name}`", color=0xE657EE)
await ctx.send(embed=e)
except:
e=discord.Embed(title="Nie posiadam uprawnien aby go zmutowac", color=0xE657EE)
await ctx.send(embed=e)
@commands.command()
async def unmute(self, ctx, user: discord.Member=None, *, reason=None):
if not user:
a=discord.Embed(title="Nie podałeś użytkownika", color=0xE657EE)
return await ctx.send(embed=a)
if not ctx.author.guild_permissions.mute_members == True:
embed=discord.Embed(title="Nie masz wystarczających premisji", color=0xE657EE)
return await ctx.send(embed=embed)
if ctx.author.top_role <= user.top_role:
embed=discord.Embed(title="Nie masz wystarczających premisji", color=0xE657EE)
return await ctx.send(embed=embed)
try:
for channel in ctx.guild.channels:
overrite = channel.overwrites_for(user)
overrite.update(send_messages=None, add_reactions=None)
empty = overrite.is_empty()
if empty:
await channel.set_permissions(user, overwrite=None, reason=reason)
else:
await channel.set_permissions(user, overwrite=overrite, reason=reason)
e=discord.Embed(title=f"Odmutowano `{user.name}` przez `{ctx.author.name}`", color=0xE657EE)
await ctx.send(embed=e)
except:
e=discord.Embed(title="Nie posiadam uprawnien aby go odmutowac", color=0xE657EE)
await ctx.send(embed=e)
@commands.command()
async def warn(self, ctx, member: discord.Member, *, reason="nie podano powodu"):
if ctx.author.guild_permissions.kick_members == False:
embed=discord.Embed(title="Nie masz permisji aby dawać warny", color=0xE657EE)
return await ctx.send(embed=embed)
if ctx.author.top_role <= member.top_role:
embed=discord.Embed(title="Nie możesz dać warna tej osobie", color=0xE657EE)
return await ctx.send(embed=embed)
if ctx.author.guild_permissions.kick_members == True:
with open("warns.json", "r") as f:
warns = json.load(f)
if not str(ctx.guild.id) in warns:
warns[str(ctx.guild.id)] = {}
if not str(member.id) in warns[str(ctx.guild.id)]:
warns[str(ctx.guild.id)][str(member.id)] = {}
warns[str(ctx.guild.id)][str(member.id)][str(len(warns[str(ctx.guild.id)][str(member.id)]) + 1)] = reason
with open("warns.json", "w") as f:
json.dump(warns, f, indent=4)
embed=discord.Embed(title=f"`{member.name}` dostał ostrzeżenie z powodu `{reason}`", color=0xE657EE)
e=discord.Embed(title=f"Dostałeś ostrzeżenie przez `{ctx.author.name}` na serwerze `{ctx.guild.name}` z powodu `{reason}`", color=0xE657EE)
await ctx.send(embed=embed)
await member.send(embed=e)
@commands.command()
async def warns(self, ctx, member: discord.Member=None):
member = member or ctx.author
with open("warns.json", "r") as f:
warns = json.load(f)
e = discord.Embed(title=f"Warny użytkownika {member.name}:", description="\n".join([f"{warn}. {warns[str(ctx.guild.id)][str(member.id)][warn]}" for warn in warns[str(ctx.guild.id)][str(member.id)]]), color=0xE657EE)
await ctx.send(embed=e)
@commands.command()
async def removewarn(self, ctx, member: discord.Member, _id: str):
if ctx.author.guild_permissions.kick_members == False:
embed=discord.Embed(title="Nie masz permisji aby dawać warny", color=0xE657EE)
return await ctx.send(embed=embed)
if ctx.author.top_role <= member.top_role:
embed=discord.Embed(title="Nie możesz dać warna tej osobie", color=0xE657EE)
return await ctx.send(embed=embed)
if ctx.author.guild_permissions.kick_members == True:
with open("warns.json", "r") as f:
warns = json.load(f)
del warns[str(ctx.guild.id)][str(member.id)][_id]
with open("warns.json", "w") as f:
json.dump(warns, f, indent=4)
embed=discord.Embed(title=f"Usunięto warna o ID `{_id}` użytkownikowi `{member.name}`", color=0xE657EE)
await ctx.send(embed=embed)
@commands.command()
async def clearwarns(self, ctx, member: discord.Member):
if ctx.author.guild_permissions.kick_members == False:
embed=discord.Embed(title="Nie masz permisji aby dawać warny", color=0xE657EE)
return await ctx.send(embed=embed)
if ctx.author.top_role <= member.top_role:
embed=discord.Embed(title="Nie możesz dać warna tej osobie", color=0xE657EE)
return await ctx.send(embed=embed)
if ctx.author.guild_permissions.kick_members == True:
with open("warns.json", "r") as f:
warns = json.load(f)
del warns[str(ctx.guild.id)][str(member.id)]
with open("warns.json", "w") as f:
json.dump(warns, f, indent=4)
embed=discord.Embed(title=f"Użytkownik `{member.name}` został wyczyszczony z warnów", color=0xE657EE)
await ctx.send(embed=embed)
@commands.command()
async def addtodo(self, ctx, *, arg):
arg = arg.replace("@", "@\u200b")
if len(arg) > 100:
return await ctx.send("Wiadomość przekroczyła limit znaków (`limit 100`)")
with open("todo.json", "r") as f:
t = json.load(f)
if not str(ctx.author.id) in t:
t[str(ctx.author.id)] = "Lista rzeczy do zrobienia:"
t[str(ctx.author.id)] = t[str(ctx.author.id)] + "\n- " + arg
with open("todo.json", "w") as f:
json.dump(t, f, indent=4)
embed=discord.Embed(title=f"Dodano `{arg}` do twojego todo", color=0xE657EE)
await ctx.send(embed=embed)
@commands.command()
async def cleartodo(self, ctx):
with open("todo.json", "r") as f:
iu = json.load(f)
iu[str(ctx.author.id)] = "Lista rzeczy do zrobienia:"
with open("todo.json", "w") as f:
json.dump(iu, f, indent=4)
embed=discord.Embed(title="Wyczyszczono twoje todo", color=0xE657EE)
await ctx.send(embed=embed)
@commands.command(description="Usuwa tekst z todo", usage="todo remove (tekst)", aliases=["-", "delete", "rem", "del"])
async def removetodo(self, ctx, *, arg):
arg = arg.replace("@", "@\u200b")
with open("todo.json", "r") as f:
t = json.load(f)
t[str(ctx.author.id)] = t[str(ctx.author.id)].replace("\n- " + arg, "")
with open("todo.json", "w") as f:
json.dump(t, f, indent=4)
embed=discord.Embed(title=f"Usunięto `{arg}` z todo.")
await ctx.send(embed=embed)
@commands.command()
async def viewtodo(self, ctx, member: discord.Member=None, m: discord.Member=None):
try:
m = member or ctx.author
if not member:
with open("todo.json", "r") as f:
iu = json.load(f)
if str(ctx.author.id) not in iu:
ius = "Użytkownik nie posiada todo :C"
else:
ius = iu[str(ctx.author.id)]
e=discord.Embed(title=f"Todo użytkownika {ctx.author.name}", description=ius, colour=0xE657EE)
e.set_thumbnail(url=(m.avatar_url))
return await ctx.send(embed=e)
with open("todo.json", "r") as f:
iu = json.load(f)
if str(member.id) not in iu:
ius = "Użytkownik nie posiada todo :C"
else:
ius = iu[str(member.id)]
e=discord.Embed(title=f"Todo użytkownika {member.name}", description=ius, colour=0xE657EE, timestamp=ctx.message.created_at)
e.set_thumbnail(url=(m.avatar_url))
await ctx.send(embed=e)
except:
await ctx.send(traceback.format_exc())
def setup(bot):
bot.add_cog(ModCog(bot))
print('Mod Gotowe')
| 46.801047
| 224
| 0.595648
|
949d16efd3c645268e3385b286265125d19ceb56
| 4,066
|
py
|
Python
|
examples/joystick_uart/python/JoystickUART.py
|
gdsports/xac_joystick_tinyusb
|
5d2d520adaa0ba21fafdd92468f79154d7753707
|
[
"MIT"
] | 1
|
2021-07-14T10:40:16.000Z
|
2021-07-14T10:40:16.000Z
|
examples/joystick_uart/python/JoystickUART.py
|
gdsports/xac_joystick_tinyusb
|
5d2d520adaa0ba21fafdd92468f79154d7753707
|
[
"MIT"
] | null | null | null |
examples/joystick_uart/python/JoystickUART.py
|
gdsports/xac_joystick_tinyusb
|
5d2d520adaa0ba21fafdd92468f79154d7753707
|
[
"MIT"
] | 1
|
2021-02-13T03:42:20.000Z
|
2021-02-13T03:42:20.000Z
|
#!/usr/bin/python3
"""
Interface to XAC joystick (joystick_uart.ino) via serial UART port.
MIT License
Copyright (c) 2021 gdsports625@gmail.com
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
"""
from struct import pack
import threading
class JoystickUART:
"""Joystick Serial Interface"""
def __init__(self):
self.thread_lock = threading.Lock()
self.ser_port = 0
self.x_axis = 128
self.y_axis = 128
self.my_buttons = 0
def begin(self, serial_port):
"""Start JoystickUART"""
with self.thread_lock:
self.ser_port = serial_port
self.x_axis = 128
self.y_axis = 128
self.my_buttons = 0
self.write()
return
def end(self):
"""End JoystickUART"""
self.ser_port.close()
return
def write(self):
"""Send JoystickUART state"""
self.ser_port.write(pack('<BBBBBBBBBBBB', 2, 9, 2, self.my_buttons, \
self.x_axis, self.y_axis, \
0, 0, 0, 0, 0, 3))
return
def press(self, button_number):
"""Press button 0..7"""
with self.thread_lock:
button_number = button_number & 0x07
self.my_buttons |= (1<<button_number)
self.write()
return
def release(self, button_number):
"""Release button 0..7"""
with self.thread_lock:
button_number = button_number & 0x07
self.my_buttons &= ~(1<<button_number)
self.write()
return
def releaseAll(self):
"""Release all buttons"""
with self.thread_lock:
self.my_buttons = 0
self.write()
return
def buttons(self, buttons):
"""Set all buttons"""
with self.thread_lock:
self.my_buttons = buttons
self.write()
return
def xAxis(self, position):
"""Move left stick X axis 0..128..255"""
with self.thread_lock:
self.x_axis = position
self.write()
return
def yAxis(self, position):
"""Move left stick Y axis 0..128..255"""
with self.thread_lock:
self.y_axis = position
self.write()
return
def main():
""" test JoystickUART class """
import sys
import serial
import time
import random
xac = JoystickUART()
try:
xac.begin(serial.Serial('/dev/ttyS0', 115200, timeout=0))
except:
print("Cannot open /dev/ttyS0")
sys.exit(1)
while True:
# Press and hold every button 0..7
for button in range(0, 8):
xac.press(button)
time.sleep(0.1)
time.sleep(1)
# Release all buttons
xac.releaseAll()
time.sleep(1)
# Press all 8 buttons at the same time
xac.buttons(0xff)
time.sleep(1)
# Release all buttons
xac.releaseAll()
time.sleep(1)
xac.xAxis(random.randint(0, 255))
xac.yAxis(random.randint(0, 255))
if __name__ == "__main__":
main()
| 29.251799
| 78
| 0.614855
|
66be06c047b34b4b0977b55355eaf4ccfc9e4372
| 4,167
|
py
|
Python
|
app/models.py
|
tahoe/janitor
|
b6ce73bddc13c70079bdc7ba4c7a9b3ee0cad0bd
|
[
"Apache-2.0"
] | 52
|
2019-08-14T10:48:26.000Z
|
2022-03-30T18:09:08.000Z
|
app/models.py
|
tahoe/janitor
|
b6ce73bddc13c70079bdc7ba4c7a9b3ee0cad0bd
|
[
"Apache-2.0"
] | 18
|
2019-08-20T04:13:37.000Z
|
2022-01-31T12:40:12.000Z
|
app/models.py
|
tahoe/janitor
|
b6ce73bddc13c70079bdc7ba4c7a9b3ee0cad0bd
|
[
"Apache-2.0"
] | 12
|
2019-08-14T10:49:11.000Z
|
2020-09-02T18:56:34.000Z
|
from datetime import datetime
from flask import current_app
from app import db, ma
from sqlalchemy import Enum
from marshmallow import fields
PROVIDER_TYPES = Enum('transit', 'backbone', 'transport', 'peering', 'facility',
'multi', name='ProviderType')
class Provider(db.Model):
id = db.Column(db.Integer, primary_key=True)
name = db.Column(db.String(128), index=True)
type = db.Column(PROVIDER_TYPES)
email_esc = db.Column(db.VARCHAR(128), nullable=True)
circuits = db.relationship('Circuit', backref='provider', lazy='dynamic')
def __repr__(self):
return f'<Provider {self.name} type: {self.type}>'
class Circuit(db.Model):
id = db.Column(db.Integer, primary_key=True)
provider_cid = db.Column(db.VARCHAR(128), index=True, unique=True)
a_side = db.Column(db.VARCHAR(128), nullable=True)
z_side = db.Column(db.VARCHAR(128), nullable=True)
provider_id = db.Column(db.Integer, db.ForeignKey('provider.id'))
contract_filename = db.Column(db.String(256), default=None, nullable=True)
def __repr__(self):
return f'<Circuit {self.provider_cid}>'
class CircuitSchema(ma.ModelSchema):
class Meta:
model = Circuit
sqla_session = db.session
include_fk = True
maintenances = fields.Nested('CircuitMaintSchema', default=[], many=True)
class CircuitMaintSchema(ma.ModelSchema):
maint_id = fields.Int()
circuit_id = fields.Int()
impact = fields.Str()
date = fields.Date()
class Maintenance(db.Model):
id = db.Column(db.Integer, primary_key=True)
provider_maintenance_id = db.Column(db.String(128), nullable=True)
start = db.Column(db.TIME)
end = db.Column(db.TIME)
timezone = db.Column(db.String(128), nullable=True)
cancelled = db.Column(db.INT, default=0)
rescheduled = db.Column(db.INT, default=0)
rescheduled_id = db.Column(db.Integer, db.ForeignKey('maintenance.id'),
nullable=True)
location = db.Column(db.String(2048), index=True, nullable=True)
reason = db.Column(db.TEXT(), nullable=True)
received_dt = db.Column(db.DateTime)
started = db.Column(db.INT, default=0)
ended = db.Column(db.INT, default=0)
updates = db.relationship('MaintUpdate', backref='maintenance',
lazy='dynamic')
def __repr__(self):
return f'<Maintenance {self.provider_maintenance_id}>'
class MaintCircuit(db.Model):
id = db.Column(db.Integer, primary_key=True)
maint_id = db.Column(db.Integer, db.ForeignKey('maintenance.id'))
circuit_id = db.Column(db.Integer, db.ForeignKey('circuit.id'))
impact = db.Column(db.VARCHAR(128))
date = db.Column(db.DATE)
maintenance = db.relationship("Maintenance", backref="circuits")
circuit = db.relationship("Circuit", backref="maintenances")
class MaintUpdate(db.Model):
id = db.Column(db.Integer, primary_key=True)
maintenance_id = db.Column(db.Integer, db.ForeignKey('maintenance.id'))
comment = db.Column(db.TEXT())
updated = db.Column(db.DateTime, default=datetime.utcnow)
class ApschedulerJobs(db.Model):
id = db.Column(db.VARCHAR(191), primary_key=True)
next_run_time = db.Column(db.FLOAT)
job_state = db.Column(db.BLOB, nullable=False)
class MaintenanceSchema(ma.ModelSchema):
class Meta:
model = Maintenance
sqla_session = db.session
include_fk = True
circuits = fields.Nested('MaintenanceCircuitSchema',
default=[], many=True)
class MaintenanceCircuitSchema(ma.ModelSchema):
class Meta:
model = MaintCircuit
sqla_session = db.session
# include_fk = True
# circuit_id = fields.Int()
# impact = fields.Str()
# date = fields.Date()
#details = fields.Nested('ProviderCircuitSchema', default=[], many=True)
class ProviderSchema(ma.ModelSchema):
class Meta:
model = Provider
sqla_session = db.session
circuits = fields.Nested('ProviderCircuitSchema', default=[], many=True)
class ProviderCircuitSchema(ma.ModelSchema):
id = fields.Int()
provider_cid = fields.Str()
a_side = fields.Str()
z_side = fields.Str()
| 31.097015
| 80
| 0.680106
|
d1925ae362138c79bd7a7a9f0778870b84429742
| 5,961
|
py
|
Python
|
code/test_ntt.py
|
elimsc/stark-anatomy
|
5e316e685a1f87cef1d26590bf1584f6523dc603
|
[
"Apache-2.0"
] | null | null | null |
code/test_ntt.py
|
elimsc/stark-anatomy
|
5e316e685a1f87cef1d26590bf1584f6523dc603
|
[
"Apache-2.0"
] | null | null | null |
code/test_ntt.py
|
elimsc/stark-anatomy
|
5e316e685a1f87cef1d26590bf1584f6523dc603
|
[
"Apache-2.0"
] | null | null | null |
from pyspark import SparkConf, SparkContext
from base.algebra import *
from rdd.rdd_poly import poly_degree
from base.univariate import *
from base.ntt import *
import os
from rdd.rdd_poly import (
ntt1,
rdd_fast_coset_divide,
rdd_fast_coset_evaluate,
rdd_fast_multiply,
rdd_intt,
rdd_ntt,
)
conf = SparkConf().set("spark.driver.memory", "8g").set("spark.executor.memory", "4g")
sc = SparkContext()
def test_ntt():
field = Field.main()
logn = 7
n = 1 << logn
primitive_root = field.primitive_nth_root(n)
coefficients = [field.sample(os.urandom(17)) for i in range(n)]
poly = Polynomial(coefficients)
rdd_cofs = sc.parallelize(list(enumerate(coefficients)))
values = ntt(primitive_root, coefficients)
values1 = ntt1(primitive_root, coefficients)
rdd_value = rdd_ntt(primitive_root, n, rdd_cofs)
values2 = rdd_value.values().collect()
values_again = poly.evaluate_domain(
[primitive_root ^ i for i in range(len(values))]
)
assert values == values_again, "ntt does not compute correct batch-evaluation"
assert values == values1, "rdd_ntt does not compute correct batch-evaluation"
assert values == values2, "rdd_ntt does not compute correct batch-evaluation"
def test_intt():
field = Field.main()
logn = 7
n = 1 << logn
primitive_root = field.primitive_nth_root(n)
ninv = FieldElement(n, field).inverse()
values = [field.sample(os.urandom(1)) for i in range(n)]
coeffs = ntt(primitive_root, values)
rdd_coeffs = sc.parallelize(list(enumerate(coeffs)))
values_again = intt(primitive_root, coeffs)
rdd_value = rdd_intt(primitive_root, n, ninv, rdd_coeffs)
values2 = [v for (_, v) in rdd_value.collect()]
assert values == values_again, "inverse ntt is different from forward ntt"
assert values == values2, "rdd_intt dont work"
def test_multiply():
field = Field.main()
logn = 6
n = 1 << logn
primitive_root = field.primitive_nth_root(n)
for trial in range(5):
lhs_degree = int(os.urandom(1)[0]) % (n // 2)
rhs_degree = int(os.urandom(1)[0]) % (n // 2)
lhs = Polynomial([field.sample(os.urandom(17)) for i in range(lhs_degree + 1)])
rhs = Polynomial([field.sample(os.urandom(17)) for i in range(rhs_degree + 1)])
fast_product = fast_multiply(lhs, rhs, primitive_root, n)
slow_product = lhs * rhs
lhs1 = sc.parallelize(list(enumerate(lhs.coefficients)))
rhs1 = sc.parallelize(list(enumerate(rhs.coefficients)))
product1 = rdd_fast_multiply(lhs1, rhs1, primitive_root, n)
assert len(slow_product.coefficients) == product1.count()
assert fast_product == slow_product, "fast product does not equal slow product"
assert slow_product.coefficients == product1.values().collect()
def test_divide():
field = Field.main()
logn = 6
n = 1 << logn
primitive_root = field.primitive_nth_root(n)
for trial in range(10):
lhs_degree = int(os.urandom(1)[0]) % (n // 2)
rhs_degree = int(os.urandom(1)[0]) % (n // 2)
lhs_coffs = [field.sample(os.urandom(17)) for i in range(lhs_degree + 1)]
rhs_coffs = [field.sample(os.urandom(17)) for i in range(rhs_degree + 1)]
lhs = Polynomial(lhs_coffs)
rhs = Polynomial(rhs_coffs)
fast_product = fast_multiply(lhs, rhs, primitive_root, n)
quotient = fast_coset_divide(
fast_product, lhs, field.generator(), primitive_root, n
)
lhs1 = sc.parallelize(list(enumerate(fast_product.coefficients)))
rhs1 = sc.parallelize(list(enumerate(lhs.coefficients)))
quotient1 = rdd_fast_coset_divide(
lhs1, rhs1, field.generator(), primitive_root, n
)
assert quotient1.count() == len(quotient.coefficients)
assert quotient1.values().collect() == quotient.coefficients
assert quotient == rhs, "fast divide does not equal original factor"
def test_interpolate():
field = Field.main()
logn = 9
n = 1 << logn
primitive_root = field.primitive_nth_root(n)
for trial in range(10):
N = sum((1 << (8 * i)) * int(os.urandom(1)[0]) for i in range(8)) % n
if N == 0:
continue
print("N:", N)
values = [field.sample(os.urandom(17)) for i in range(N)]
domain = [field.sample(os.urandom(17)) for i in range(N)]
poly = fast_interpolate(domain, values, primitive_root, n)
print("poly degree:", poly.degree())
values_again = fast_evaluate(poly, domain, primitive_root, n)[0:N]
# values_again = poly.evaluate_domain(domain)
if values != values_again:
print("fast interpolation and evaluation are not inverses")
print("expected:", ",".join(str(c.value) for c in values))
print("observed:", ",".join(str(c.value) for c in values_again))
assert False
print("")
def test_coset_evaluate():
field = Field.main()
logn = 9
n = 1 << logn
primitive_root = field.primitive_nth_root(n)
two = FieldElement(2, field)
domain = [two * (primitive_root ^ i) for i in range(n)]
degree = ((int(os.urandom(1)[0]) * 256 + int(os.urandom(1)[0])) % n) - 1
coefficients = [field.sample(os.urandom(17)) for i in range(degree + 1)]
poly = Polynomial(coefficients)
rdd_poly = sc.parallelize(list(enumerate(coefficients)))
values_fast = fast_coset_evaluate(poly, two, primitive_root, n)
values_traditional = [poly.evaluate(d) for d in domain]
values_rdd = rdd_fast_coset_evaluate(rdd_poly, two, primitive_root, n)
assert all(
vf == vt for (vf, vt) in zip(values_fast, values_traditional)
), "values do not match with traditional evaluations"
assert values_fast == values_rdd.values().collect()
# test_ntt()
# test_intt()
# test_coset_evaluate()
test_divide()
# test_multiply()
sc.stop()
| 31.707447
| 87
| 0.651904
|
8fc7c6785eda64e5a4118b81690eaea44c65b255
| 2,210
|
py
|
Python
|
scattertext/termscoring/CohensD.py
|
mastafaMicrosoft/scattertext
|
6a9b6b85525bc25dec75c4767668881224dd5612
|
[
"Apache-2.0"
] | 1,823
|
2016-07-28T00:25:56.000Z
|
2022-03-30T12:33:57.000Z
|
scattertext/termscoring/CohensD.py
|
mastafaMicrosoft/scattertext
|
6a9b6b85525bc25dec75c4767668881224dd5612
|
[
"Apache-2.0"
] | 92
|
2016-07-28T23:13:20.000Z
|
2022-01-24T03:53:38.000Z
|
scattertext/termscoring/CohensD.py
|
mastafaMicrosoft/scattertext
|
6a9b6b85525bc25dec75c4767668881224dd5612
|
[
"Apache-2.0"
] | 271
|
2016-12-26T12:56:08.000Z
|
2022-03-24T19:35:13.000Z
|
import numpy as np
from scattertext.termscoring.CohensDCalculator import CohensDCalculator
from scattertext.termscoring.CorpusBasedTermScorer import CorpusBasedTermScorer
class CohensD(CorpusBasedTermScorer, CohensDCalculator):
'''
Cohen's d scores
term_scorer = (CohensD(corpus).set_categories('Positive', ['Negative'], ['Plot']))
html = st.produce_frequency_explorer(
corpus,
category='Positive',
not_categories=['Negative'],
neutral_categories=['Plot'],
term_scorer=term_scorer,
metadata=rdf['movie_name'],
grey_threshold=0,
show_neutral=True
)
file_name = 'rotten_fresh_fre.html'
open(file_name, 'wb').write(html.encode('utf-8'))
IFrame(src=file_name, width=1300, height=700)
'''
def _set_scorer_args(self, **kwargs):
pass
def get_scores(self, *args):
return self.get_score_df()['cohens_d']
def get_score_df(self, correction_method=None):
'''
:param correction_method: str or None, correction method from statsmodels.stats.multitest.multipletests
'fdr_bh' is recommended.
:return: pd.DataFrame
'''
# From https://people.kth.se/~lang/Effect_size.pdf
# Shinichi Nakagawa1 and Innes C. Cuthill. Effect size, confidence interval and statistical
# significance: a practical guide for biologists. 2007. In Biological Reviews 82.
#
# Modification: when calculating variance, an empty document is added to each set
X = self._get_X().astype(np.float64)
X_doc_len_norm = X / X.sum(axis=1)
X_doc_len_norm[np.isnan(X_doc_len_norm)] = 0
cat_X, ncat_X = self._get_cat_and_ncat(X_doc_len_norm)
orig_cat_X, orig_ncat_X = self._get_cat_and_ncat(X)
score_df = (self
.get_cohens_d_df(cat_X, ncat_X, orig_cat_X, orig_ncat_X, correction_method)
.set_index(np.array(self._get_index())))
return score_df
def get_name(self):
return "Cohen's d"
class HedgesR(CohensD):
def get_scores(self, *args):
return self.get_score_df()['hedges_r']
def get_name(self):
return "Hedge's r"
| 32.985075
| 111
| 0.661086
|
95c52b544e90d4b008be7a901ba97ac70bfa7256
| 3,110
|
py
|
Python
|
MODULES/Collection_ArchiveCollectedData_ArchiveViaCustomMethod.py
|
FunnyWolf/viperpython
|
ba794ee74079285be32191e898daa3e56305c8be
|
[
"BSD-3-Clause"
] | 42
|
2021-01-20T15:30:33.000Z
|
2022-03-31T07:51:11.000Z
|
MODULES/Collection_ArchiveCollectedData_ArchiveViaCustomMethod.py
|
FunnyWolf/viperpython
|
ba794ee74079285be32191e898daa3e56305c8be
|
[
"BSD-3-Clause"
] | 2
|
2021-08-17T00:16:33.000Z
|
2022-02-21T11:37:45.000Z
|
MODULES/Collection_ArchiveCollectedData_ArchiveViaCustomMethod.py
|
FunnyWolf/viperpython
|
ba794ee74079285be32191e898daa3e56305c8be
|
[
"BSD-3-Clause"
] | 28
|
2021-01-22T05:06:39.000Z
|
2022-03-31T03:27:42.000Z
|
# -*- coding: utf-8 -*-
# @File : SimpleRewMsfModule.py
# @Date : 2019/1/11
# @Desc :
from Lib.ModuleAPI import *
class PostModule(PostMSFRawModule):
NAME_ZH = "打包压缩目录并回传"
DESC_ZH = "zip压缩目标指定目录,并将压缩后的文件回传到Viper."
NAME_EN = "Zip directory and send back"
DESC_EN = "Zip compresses the target specified directory, and send the compressed file to Viper."
MODULETYPE = TAG2TYPE.Collection
PLATFORM = ["Windows", "Linux"] # 平台
PERMISSIONS = ["User", "Administrator", "SYSTEM", "Root"] # 所需权限
ATTCK = ["T1560"] # ATTCK向量
REFERENCES = ["https://attack.mitre.org/techniques/T1560/003/"]
README = ["https://www.yuque.com/vipersec/module/nf83mz"]
AUTHOR = "Viper"
REQUIRE_SESSION = True
OPTIONS = register_options([
OptionStr(name='INPUTDIR', tag_zh="压缩目录", desc_zh="需要压缩的目录",
tag_en="Directory", desc_en="Fill in the directory that needs to be compressed", length=24),
OptionInt(name='TIMEOUT',
tag_zh="超时时间", desc_zh="压缩命令超时时间",
tag_en="Time out", desc_en="Compression timeout",
default=60 * 10),
OptionBool(name='GETRESULT', tag_zh="自动回传压缩文件", desc_zh="执行完成压缩后是否自动将文件回传到Viper",
tag_en="Automatically upload compressed files",
desc_en="Whether to automatically upload the file to Viper after the compression is performed",
default=False),
])
def __init__(self, sessionid, ipaddress, custom_param):
super().__init__(sessionid, ipaddress, custom_param)
self.type = "post"
self.mname = "multi/manage/upload_and_exec_api"
self.outfile = None
def check(self):
"""执行前的检查函数"""
session = Session(self._sessionid)
if session.is_windows:
self.set_msf_option("LPATH", "viperzip.exe")
self.set_msf_option("RPATH", "viperzip_viper.exe")
elif session.is_linux:
self.set_msf_option("LPATH", "viperzip")
self.set_msf_option("RPATH", "viperzip_viper")
else:
return False, "模块只支持Windows及Linux原生Session", "This module only supports Meterpreter for Windows and Linux"
inputdir = self.param("INPUTDIR")
self.outfile = f"{self.random_str(8)}.zip"
args = f"-inputdir {inputdir} -outfile {self.outfile}"
self.set_msf_option("ARGS", args)
self.set_msf_option("CLEANUP", True)
self.set_msf_option("TIMEOUT", self.param("TIMEOUT"))
if self.param("GETRESULT"):
self.set_msf_option("RESULTFILE", self.outfile)
return True, None
def callback(self, status, message, data):
if status is not True:
self.log_error("模块执行失败", "Module execution failed")
self.log_error(message, message)
return
self.log_info("模块执行完成", "Module operation completed")
self.log_good("压缩文件:", "Zip file")
self.log_raw(data)
if self.param("GETRESULT"):
self.log_good(f"压缩后文件存放在<文件管理>:{message}", f"Zip file is stored in <Files>: {message}")
| 38.875
| 118
| 0.623794
|
624bc9e1173c9a4f6dc9a5ad6229af27fa4974d5
| 821
|
py
|
Python
|
django_application/TODO/nugget/migrations/0003_auto_20171203_1835.py
|
mcculloughsco/Nugget
|
23516fcd2538c9f4fe907334dda334c706686649
|
[
"Apache-2.0"
] | 1
|
2017-11-05T23:26:29.000Z
|
2017-11-05T23:26:29.000Z
|
django_application/TODO/nugget/migrations/0003_auto_20171203_1835.py
|
mcculloughsco/CompSci326TermProject
|
03623a37fdb0e2235fdf07e23ed764ed5a0d64af
|
[
"Apache-2.0"
] | 3
|
2020-02-12T00:24:15.000Z
|
2021-06-10T20:03:37.000Z
|
django_application/TODO/nugget/migrations/0003_auto_20171203_1835.py
|
mcculloughsco/Nugget
|
23516fcd2538c9f4fe907334dda334c706686649
|
[
"Apache-2.0"
] | 2
|
2017-12-18T17:52:57.000Z
|
2018-03-08T06:42:43.000Z
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.6 on 2017-12-03 23:35
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('nugget', '0002_auto_20171203_1624'),
]
operations = [
migrations.AlterField(
model_name='battleinstance',
name='net_coins',
field=models.IntegerField(default=0, help_text='Coins won or lost', verbose_name='Net Coins'),
),
migrations.AlterField(
model_name='battleinstance',
name='winner',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='Winner', to='nugget.Profile', verbose_name='Winner'),
),
]
| 30.407407
| 160
| 0.646772
|
1eae0b40c129f68e80f21ccb5c971ad2e4411952
| 6,580
|
py
|
Python
|
cotyledon/_utils.py
|
y-gupta/cotyledon
|
319faa2673a986733d9a7622bee29e187f2e7391
|
[
"Apache-2.0"
] | 71
|
2016-11-14T15:02:41.000Z
|
2022-02-20T06:41:59.000Z
|
cotyledon/_utils.py
|
y-gupta/cotyledon
|
319faa2673a986733d9a7622bee29e187f2e7391
|
[
"Apache-2.0"
] | 26
|
2016-07-14T13:42:15.000Z
|
2022-03-18T07:03:39.000Z
|
cotyledon/_utils.py
|
y-gupta/cotyledon
|
319faa2673a986733d9a7622bee29e187f2e7391
|
[
"Apache-2.0"
] | 17
|
2016-05-02T15:48:42.000Z
|
2021-11-24T15:26:08.000Z
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import collections
import contextlib
import errno
import logging
import multiprocessing
import os
import select
import signal
import sys
import threading
import time
if os.name == 'posix':
import fcntl
LOG = logging.getLogger(__name__)
_SIGNAL_TO_NAME = dict((getattr(signal, name), name) for name in dir(signal)
if name.startswith("SIG") and name not in ('SIG_DFL',
'SIG_IGN'))
def signal_to_name(sig):
return _SIGNAL_TO_NAME.get(sig)
def spawn(target, *args, **kwargs):
t = threading.Thread(target=target, args=args, kwargs=kwargs)
t.daemon = True
t.start()
return t
def check_workers(workers, minimum):
if not isinstance(workers, int) or workers < minimum:
raise ValueError("'workers' must be an int >= %d, not: %s (%s)" %
(minimum, workers, type(workers).__name__))
def check_callable(thing, name):
if not hasattr(thing, "__call__"):
raise ValueError("'%s' must be a callable" % name)
def _bootstrap_process(target, *args, **kwargs):
if "fds_to_close" in kwargs:
for fd in kwargs["fds_to_close"]:
os.close(fd)
del kwargs["fds_to_close"]
target(*args, **kwargs)
def spawn_process(*args, **kwargs):
p = multiprocessing.Process(target=_bootstrap_process,
args=args, kwargs=kwargs)
p.start()
return p
try:
from setproctitle import setproctitle
except ImportError:
def setproctitle(*args, **kwargs):
pass
def get_process_name():
return os.path.basename(sys.argv[0])
def run_hooks(name, hooks, *args, **kwargs):
try:
for hook in hooks:
hook(*args, **kwargs)
except Exception:
LOG.exception("Exception raised during %s hooks" % name)
@contextlib.contextmanager
def exit_on_exception():
try:
yield
except SystemExit as exc:
os._exit(exc.code)
except BaseException:
LOG.exception('Unhandled exception')
os._exit(2)
if os.name == "posix":
SIGALRM = signal.SIGALRM
SIGHUP = signal.SIGHUP
SIGCHLD = signal.SIGCHLD
SIBREAK = None
else:
SIGALRM = SIGHUP = None
SIGCHLD = "fake sigchld"
SIGBREAK = signal.SIGBREAK
class SignalManager(object):
def __init__(self):
# Setup signal fd, this allows signal to behave correctly
if os.name == 'posix':
self.signal_pipe_r, self.signal_pipe_w = os.pipe()
self._set_nonblock(self.signal_pipe_r)
self._set_nonblock(self.signal_pipe_w)
signal.set_wakeup_fd(self.signal_pipe_w)
self._signals_received = collections.deque()
signal.signal(signal.SIGINT, signal.SIG_DFL)
if os.name == 'posix':
signal.signal(signal.SIGCHLD, signal.SIG_DFL)
signal.signal(signal.SIGTERM, self._signal_catcher)
signal.signal(signal.SIGALRM, self._signal_catcher)
signal.signal(signal.SIGHUP, self._signal_catcher)
else:
# currently a noop on window...
signal.signal(signal.SIGTERM, self._signal_catcher)
# FIXME(sileht): should allow to catch signal CTRL_BREAK_EVENT,
# but we to create the child process with CREATE_NEW_PROCESS_GROUP
# to make this work, so current this is a noop for later fix
signal.signal(signal.SIGBREAK, self._signal_catcher)
@staticmethod
def _set_nonblock(fd):
flags = fcntl.fcntl(fd, fcntl.F_GETFL, 0)
flags = flags | os.O_NONBLOCK
fcntl.fcntl(fd, fcntl.F_SETFL, flags)
def _signal_catcher(self, sig, frame):
# NOTE(sileht): This is useful only for python < 3.5
# in python >= 3.5 we could read the signal number
# from the wakeup_fd pipe
if sig in (SIGALRM, signal.SIGTERM):
self._signals_received.appendleft(sig)
else:
self._signals_received.append(sig)
def _wait_forever(self):
# Wait forever
while True:
# Check if signals have been received
if os.name == "posix":
self._empty_signal_pipe()
self._run_signal_handlers()
if os.name == "posix":
# NOTE(sileht): we cannot use threading.Event().wait(),
# threading.Thread().join(), or time.sleep() because signals
# can be missed when received by non-main threads
# (https://bugs.python.org/issue5315)
# So we use select.select() alone, we will receive EINTR or
# will read data from signal_r when signal is emitted and
# cpython calls PyErr_CheckSignals() to run signals handlers
# That looks perfect to ensure handlers are run and run in the
# main thread
try:
select.select([self.signal_pipe_r], [], [])
except select.error as e:
if e.args[0] != errno.EINTR:
raise
else:
# NOTE(sileht): here we do only best effort
# and wake the loop periodically, set_wakeup_fd
# doesn't work on non posix platform so
# 1 seconds have been picked with the advice of a dice.
time.sleep(1)
# NOTE(sileht): We emulate SIGCHLD, _service_manager
# will just check often for dead child
self._signals_received.append(SIGCHLD)
def _empty_signal_pipe(self):
try:
while os.read(self.signal_pipe_r, 4096) == 4096:
pass
except (IOError, OSError):
pass
def _run_signal_handlers(self):
while True:
try:
sig = self._signals_received.popleft()
except IndexError:
return
self._on_signal_received(sig)
def _on_signal_received(self, sig):
pass
| 32.254902
| 78
| 0.61155
|
f059463dbe53f795df6517266a09a69fb7006548
| 3,164
|
py
|
Python
|
settings.py
|
Manojkumar-Siva/serversideprocessing
|
1efc8ae9d979cbce3937482e87e9248adec6390d
|
[
"BSD-3-Clause"
] | null | null | null |
settings.py
|
Manojkumar-Siva/serversideprocessing
|
1efc8ae9d979cbce3937482e87e9248adec6390d
|
[
"BSD-3-Clause"
] | null | null | null |
settings.py
|
Manojkumar-Siva/serversideprocessing
|
1efc8ae9d979cbce3937482e87e9248adec6390d
|
[
"BSD-3-Clause"
] | null | null | null |
"""
Django settings for calculations project.
Generated by 'django-admin startproject' using Django 3.1.1.
For more information on this file, see
https://docs.djangoproject.com/en/3.1/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/3.1/ref/settings/
"""
from pathlib import Path
import os
# Build paths inside the project like this: BASE_DIR / 'subdir'.
BASE_DIR = Path(__file__).resolve().parent.parent
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/3.1/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'glxvc-l^zz5=w%%+@1ji)@0y&_auyj33+gpyz9wofk9@-(zdqi'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = ['*']
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'mathapp'
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'calculations.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'calculations.wsgi.application'
# Database
# https://docs.djangoproject.com/en/3.1/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': BASE_DIR / 'db.sqlite3',
}
}
# Password validation
# https://docs.djangoproject.com/en/3.1/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/3.1/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/3.1/howto/static-files/
STATIC_URL = '/static/'
STATICFILES_DIRS=[
os.path.join(BASE_DIR,'static')
]
| 25.111111
| 91
| 0.696271
|
5aed3bebc0a26782312adde7c4788ded9973d6ee
| 1,693
|
py
|
Python
|
setup.py
|
jvanasco/sqlalchemy-access
|
a2778f3b973e0db4de78a3d7eb6341a29c92786e
|
[
"MIT"
] | null | null | null |
setup.py
|
jvanasco/sqlalchemy-access
|
a2778f3b973e0db4de78a3d7eb6341a29c92786e
|
[
"MIT"
] | 1
|
2020-01-21T15:42:15.000Z
|
2020-01-21T15:42:15.000Z
|
setup.py
|
jvanasco/sqlalchemy-access
|
a2778f3b973e0db4de78a3d7eb6341a29c92786e
|
[
"MIT"
] | null | null | null |
import os
import re
from setuptools import setup, find_packages
v = open(os.path.join(os.path.dirname(__file__), 'sqlalchemy_access', '__init__.py'))
VERSION = re.compile(r".*__version__ = '(.*?)'", re.S).match(v.read()).group(1)
v.close()
readme = os.path.join(os.path.dirname(__file__), 'README.rst')
setup(name='sqlalchemy-access',
version=VERSION,
description="MS Access for SQLAlchemy",
long_description=open(readme).read(),
url='https://github.com/sqlalchemy/sqlalchemy-access',
author='Gord Thompson',
author_email='gord@gordthompson.com',
license='MIT',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: Implementation :: CPython',
'Topic :: Database :: Front-Ends',
'Operating System :: OS Independent',
],
keywords='SQLAlchemy Microsoft Access',
project_urls={
'Documentation': 'https://github.com/sqlalchemy/sqlalchemy-access/wiki',
'Source': 'https://github.com/sqlalchemy/sqlalchemy-access',
'Tracker': 'https://github.com/sqlalchemy/sqlalchemy-access/issues',
},
packages=find_packages(include=['sqlalchemy_access']),
include_package_data=True,
install_requires = ['SQLAlchemy', 'pyodbc>=4.0.27'],
zip_safe=False,
entry_points={
'sqlalchemy.dialects': [
'access.pyodbc = sqlalchemy_access.pyodbc:AccessDialect_pyodbc',
]
},
)
| 36.021277
| 85
| 0.631424
|
045bf66b8b1b795ac06409ef91522b5dec867e67
| 212
|
py
|
Python
|
dingshi_start.py
|
andyrenpanlong/soubu_app
|
abe56cbb3f21062ec3a8ef256131513484400c27
|
[
"MIT"
] | null | null | null |
dingshi_start.py
|
andyrenpanlong/soubu_app
|
abe56cbb3f21062ec3a8ef256131513484400c27
|
[
"MIT"
] | null | null | null |
dingshi_start.py
|
andyrenpanlong/soubu_app
|
abe56cbb3f21062ec3a8ef256131513484400c27
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
from scrapy import cmdline
import time
import os
def start_process():
os.system('python dianpu.py')
time.sleep(60*3600) # 每隔一小时重新执行命令,防止进程中断
start_process()
start_process()
| 16.307692
| 44
| 0.693396
|
575b390d9c6c299e3138c652ddc530f162bd7101
| 4,070
|
py
|
Python
|
bigquery/schema_builder.py
|
unravelin/BigQuery-Python
|
a0986fd60f0c2cc84a0fe49af8b4807125e1707d
|
[
"Apache-2.0"
] | 1
|
2021-01-07T23:12:53.000Z
|
2021-01-07T23:12:53.000Z
|
bigquery/schema_builder.py
|
unravelin/BigQuery-Python
|
a0986fd60f0c2cc84a0fe49af8b4807125e1707d
|
[
"Apache-2.0"
] | null | null | null |
bigquery/schema_builder.py
|
unravelin/BigQuery-Python
|
a0986fd60f0c2cc84a0fe49af8b4807125e1707d
|
[
"Apache-2.0"
] | null | null | null |
from __future__ import absolute_import
__author__ = 'Aneil Mallavarapu (http://github.com/aneilbaboo)'
from datetime import datetime
import six
import dateutil.parser
from .errors import InvalidTypeException
def default_timestamp_parser(s):
try:
if dateutil.parser.parse(s):
return True
else:
return False
except:
return False
def schema_from_record(record, timestamp_parser=default_timestamp_parser):
"""Generate a BigQuery schema given an example of a record that is to be
inserted into BigQuery.
Parameters
----------
record : dict
Example of a record that is to be inserted into BigQuery
timestamp_parser : function, optional
Unary function taking a ``str`` and returning and ``bool`` that is
True if the string represents a date
Returns
-------
Schema: list
"""
return [describe_field(k, v, timestamp_parser=timestamp_parser)
for k, v in list(record.items())]
def describe_field(k, v, timestamp_parser=default_timestamp_parser):
"""Given a key representing a column name and value representing the value
stored in the column, return a representation of the BigQuery schema
element describing that field. Raise errors if invalid value types are
provided.
Parameters
----------
k : Union[str, unicode]
Key representing the column
v : Union[str, unicode, int, float, datetime, object]
Value mapped to by `k`
Returns
-------
object
Describing the field
Raises
------
Exception
If invalid value types are provided.
Examples
--------
>>> describe_field("username", "Bob")
{"name": "username", "type": "string", "mode": "nullable"}
>>> describe_field("users", [{"username": "Bob"}])
{"name": "users", "type": "record", "mode": "repeated",
"fields": [{"name":"username","type":"string","mode":"nullable"}]}
"""
def bq_schema_field(name, bq_type, mode):
return {"name": name, "type": bq_type, "mode": mode}
if isinstance(v, list):
if len(v) == 0:
raise Exception(
"Can't describe schema because of empty list {0}:[]".format(k))
v = v[0]
mode = "repeated"
else:
mode = "nullable"
bq_type = bigquery_type(v, timestamp_parser=timestamp_parser)
if not bq_type:
raise InvalidTypeException(k, v)
field = bq_schema_field(k, bq_type, mode)
if bq_type == "record":
try:
field['fields'] = schema_from_record(v, timestamp_parser)
except InvalidTypeException as e:
# recursively construct the key causing the error
raise InvalidTypeException("%s.%s" % (k, e.key), e.value)
return field
def bigquery_type(o, timestamp_parser=default_timestamp_parser):
"""Given a value, return the matching BigQuery type of that value. Must be
one of str/unicode/int/float/datetime/record, where record is a dict
containing value which have matching BigQuery types.
Parameters
----------
o : object
A Python object
time_stamp_parser : function, optional
Unary function taking a ``str`` and returning and ``bool`` that is
True if the string represents a date
Returns
-------
Union[str, None]
Name of the corresponding BigQuery type for `o`, or None if no type
could be found
Examples
--------
>>> bigquery_type("abc")
"string"
>>> bigquery_type(123)
"integer"
"""
t = type(o)
if t == int:
return "integer"
elif (t == six.binary_type and six.PY2) or t == six.text_type:
if timestamp_parser and timestamp_parser(o):
return "timestamp"
else:
return "string"
elif t == float:
return "float"
elif t == bool:
return "boolean"
elif t == dict:
return "record"
elif t == datetime:
return "timestamp"
else:
return None # failed to find a type
| 27.876712
| 79
| 0.616708
|
345363793e139b6a1edc6171353b3c2dd6be0a42
| 929
|
py
|
Python
|
tests/settings.py
|
PeterRistCMS/django-flexisettings
|
235c8d4fe391ee6bacd65ed2814f4f54599fd6b9
|
[
"BSD-3-Clause"
] | 8
|
2015-05-23T20:15:49.000Z
|
2019-12-19T22:14:42.000Z
|
tests/settings.py
|
PeterRistCMS/django-flexisettings
|
235c8d4fe391ee6bacd65ed2814f4f54599fd6b9
|
[
"BSD-3-Clause"
] | 5
|
2015-03-10T10:57:31.000Z
|
2019-11-19T03:53:36.000Z
|
tests/settings.py
|
PeterRistCMS/django-flexisettings
|
235c8d4fe391ee6bacd65ed2814f4f54599fd6b9
|
[
"BSD-3-Clause"
] | 3
|
2015-08-26T17:08:03.000Z
|
2022-01-14T16:45:12.000Z
|
import unittest2 as unittest
from tests.base import BaseTestCase
import os
class SettingsTestCase(BaseTestCase):
def setUp(self):
super(SettingsTestCase, self).setUp()
os.environ.setdefault(self.envvar,
"%s.settings" % self.test_project)
def test_run_env(self):
"""Test running environment lookup"""
import flexisettings.settings
self.assertEqual(flexisettings.settings.FLEXI_RUN_ENV, 't')
def test_debug_settings(self):
"""Test proxyfied lookup without evaluation (settings.DEBUG)"""
import flexisettings.settings
self.assertTrue(flexisettings.settings.DEBUG)
def test_security_settings(self):
"""Test proxyfied lookup with evaluation (settings.SECRET_KEY)"""
import flexisettings.settings
self.assertEqual(flexisettings.settings.SECRET_KEY, self.secret_key)
if __name__ == '__main__':
unittest.main()
| 32.034483
| 76
| 0.706136
|
e1b78fc155613bef230c76f182cbd13959bef3bd
| 5,386
|
py
|
Python
|
jamf/models/computer_content_caching_data_migration_error.py
|
jensenbox/python-jamf
|
85213085b1064a00375a7aa7df5e33c19f5178eb
|
[
"RSA-MD"
] | 1
|
2021-04-20T15:28:57.000Z
|
2021-04-20T15:28:57.000Z
|
jamf/models/computer_content_caching_data_migration_error.py
|
jensenbox/python-jamf
|
85213085b1064a00375a7aa7df5e33c19f5178eb
|
[
"RSA-MD"
] | null | null | null |
jamf/models/computer_content_caching_data_migration_error.py
|
jensenbox/python-jamf
|
85213085b1064a00375a7aa7df5e33c19f5178eb
|
[
"RSA-MD"
] | null | null | null |
# coding: utf-8
"""
Jamf Pro API
## Overview This is a sample Jamf Pro server which allows for usage without any authentication. The Jamf Pro environment which supports the Try it Out functionality does not run the current beta version of Jamf Pro, thus any newly added endpoints will result in an error and should be used soley for documentation purposes. # noqa: E501
The version of the OpenAPI document: 10.25.0
Generated by: https://openapi-generator.tech
"""
import pprint
import re # noqa: F401
import six
from jamf.configuration import Configuration
class ComputerContentCachingDataMigrationError(object):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
"""
Attributes:
openapi_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
openapi_types = {
'code': 'int',
'domain': 'str',
'user_info': 'list[ComputerContentCachingDataMigrationErrorUserInfo]'
}
attribute_map = {
'code': 'code',
'domain': 'domain',
'user_info': 'userInfo'
}
def __init__(self, code=None, domain=None, user_info=None, local_vars_configuration=None): # noqa: E501
"""ComputerContentCachingDataMigrationError - a model defined in OpenAPI""" # noqa: E501
if local_vars_configuration is None:
local_vars_configuration = Configuration()
self.local_vars_configuration = local_vars_configuration
self._code = None
self._domain = None
self._user_info = None
self.discriminator = None
if code is not None:
self.code = code
if domain is not None:
self.domain = domain
if user_info is not None:
self.user_info = user_info
@property
def code(self):
"""Gets the code of this ComputerContentCachingDataMigrationError. # noqa: E501
:return: The code of this ComputerContentCachingDataMigrationError. # noqa: E501
:rtype: int
"""
return self._code
@code.setter
def code(self, code):
"""Sets the code of this ComputerContentCachingDataMigrationError.
:param code: The code of this ComputerContentCachingDataMigrationError. # noqa: E501
:type code: int
"""
self._code = code
@property
def domain(self):
"""Gets the domain of this ComputerContentCachingDataMigrationError. # noqa: E501
:return: The domain of this ComputerContentCachingDataMigrationError. # noqa: E501
:rtype: str
"""
return self._domain
@domain.setter
def domain(self, domain):
"""Sets the domain of this ComputerContentCachingDataMigrationError.
:param domain: The domain of this ComputerContentCachingDataMigrationError. # noqa: E501
:type domain: str
"""
self._domain = domain
@property
def user_info(self):
"""Gets the user_info of this ComputerContentCachingDataMigrationError. # noqa: E501
:return: The user_info of this ComputerContentCachingDataMigrationError. # noqa: E501
:rtype: list[ComputerContentCachingDataMigrationErrorUserInfo]
"""
return self._user_info
@user_info.setter
def user_info(self, user_info):
"""Sets the user_info of this ComputerContentCachingDataMigrationError.
:param user_info: The user_info of this ComputerContentCachingDataMigrationError. # noqa: E501
:type user_info: list[ComputerContentCachingDataMigrationErrorUserInfo]
"""
self._user_info = user_info
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, ComputerContentCachingDataMigrationError):
return False
return self.to_dict() == other.to_dict()
def __ne__(self, other):
"""Returns true if both objects are not equal"""
if not isinstance(other, ComputerContentCachingDataMigrationError):
return True
return self.to_dict() != other.to_dict()
| 31.132948
| 342
| 0.624025
|
358a100138e4e5b19db3e52b16f1a77cf21ce492
| 2,752
|
py
|
Python
|
example/blog_cache.py
|
bearword/bearword_blog
|
4d2a8de1e8658d70b9eb0aba60d2859a7f2399dd
|
[
"MIT"
] | 2
|
2018-04-13T05:51:23.000Z
|
2018-04-13T06:02:23.000Z
|
example/blog_cache.py
|
lnkdel/Flask-Blogging
|
55e95f4f2d7d48b0b370e1decdefb4b5b2552cab
|
[
"MIT"
] | null | null | null |
example/blog_cache.py
|
lnkdel/Flask-Blogging
|
55e95f4f2d7d48b0b370e1decdefb4b5b2552cab
|
[
"MIT"
] | null | null | null |
"""
This example demonstrates the use of caches
"""
from flask import Flask, render_template_string, redirect, current_app
from sqlalchemy import create_engine, MetaData
from flask_login import UserMixin, LoginManager, login_user, logout_user, current_user
from flask_blogging import SQLAStorage, BloggingEngine
from flask_principal import identity_changed, Identity, AnonymousIdentity, identity_loaded, \
UserNeed, RoleNeed
from flask_cache import Cache
app = Flask(__name__)
app.config["SECRET_KEY"] = "secret" # for WTF-forms and login
app.config["BLOGGING_URL_PREFIX"] = "/blog"
app.config["BLOGGING_DISQUS_SITENAME"] = "test"
app.config["BLOGGING_SITEURL"] = "http://localhost:8000"
app.config["BLOGGING_SITENAME"] = "My Site"
app.config["BLOGGING_PERMISSIONS"] = False # Enable blogger permissions'
app.config["CACHE_TYPE"] = "simple"
# create cache
cache = Cache(app)
# extensions
engine = create_engine('sqlite:////tmp/blog.db')
meta = MetaData()
sql_storage = SQLAStorage(engine, metadata=meta)
blog_engine = BloggingEngine(app, sql_storage, cache=cache)
login_manager = LoginManager(app)
meta.create_all(bind=engine)
class User(UserMixin):
def __init__(self, user_id):
self.id = user_id
def get_name(self):
return "Paul Dirac" # typically the user's name
@identity_loaded.connect_via(app)
def on_identity_loaded(sender, identity):
identity.user = current_user
if hasattr(current_user, "id"):
identity.provides.add(UserNeed(current_user.id))
identity.provides.add(RoleNeed("blogger"))
@login_manager.user_loader
@blog_engine.user_loader
def load_user(user_id):
return User(user_id)
index_template = """
<!DOCTYPE html>
<html>
<head> </head>
<body>
{% if current_user.is_authenticated %}
<a href="/logout/"> Logout </a>
{% else %}
<a href="/login/"> Login </a>
{% endif %}
  <a href="/blog/"> Blog </a>
  <a href="/blog/sitemap.xml">Sitemap</a>
  <a href="/blog/feeds/all.atom.xml">ATOM</a>
</body>
</html>
"""
@app.route("/")
def index():
return render_template_string(index_template)
@app.route("/login/")
def login():
user = User("testuser")
login_user(user)
# notify the change of role
identity_changed.send(current_app._get_current_object(),
identity=Identity("testuser"))
return redirect("/blog")
@app.route("/logout/")
def logout():
logout_user()
# notify the change of role
identity_changed.send(current_app._get_current_object(),
identity=AnonymousIdentity())
return redirect("/")
if __name__ == "__main__":
app.run(debug=True, port=8000, use_reloader=True)
| 28.371134
| 93
| 0.69186
|
1fa2c2b052c18d20f856b07940aaf7697b502fc7
| 845
|
py
|
Python
|
ubxlib/ubx_mon_ver.py
|
albard00/ubxlib
|
cfad25461e680bbb527e4ce74e6b699da6065bea
|
[
"MIT"
] | null | null | null |
ubxlib/ubx_mon_ver.py
|
albard00/ubxlib
|
cfad25461e680bbb527e4ce74e6b699da6065bea
|
[
"MIT"
] | null | null | null |
ubxlib/ubx_mon_ver.py
|
albard00/ubxlib
|
cfad25461e680bbb527e4ce74e6b699da6065bea
|
[
"MIT"
] | null | null | null |
from ubxlib.cid import UbxCID
from ubxlib.frame import UbxFrame
from ubxlib.types import Fields, CH
class UbxMonVer_(UbxFrame):
CID = UbxCID(UbxCID.CLASS_MON, 0x04)
NAME = 'UBX-MON-VER'
class UbxMonVerPoll(UbxMonVer_):
NAME = UbxMonVer_.NAME + '-POLL'
def __init__(self):
super().__init__()
class UbxMonVer(UbxMonVer_):
def __init__(self):
super().__init__()
# fields defined in unpack as they are dynamic
def unpack(self):
# Dynamically build fields based on message length
self.f = Fields()
self.f.add(CH(30, 'swVersion'))
self.f.add(CH(10, 'hwVersion'))
extra_length = len(self.data) - 40
extra_info = int(extra_length / 30)
for i in range(extra_info):
self.f.add(CH(30, f'extension_{i}'))
super().unpack()
| 23.472222
| 58
| 0.627219
|
2c208efc5d75ccc7d6f040189a61c6fc5bc39c1e
| 718
|
py
|
Python
|
redcmd/autocomp/completer/google_suggest.py
|
amol9/redcmd
|
34086eb42ec6acc16dbd2b3ea530898a6a287639
|
[
"MIT"
] | 1
|
2015-10-26T19:38:28.000Z
|
2015-10-26T19:38:28.000Z
|
redcmd/autocomp/completer/google_suggest.py
|
amol9/redcmd
|
34086eb42ec6acc16dbd2b3ea530898a6a287639
|
[
"MIT"
] | null | null | null |
redcmd/autocomp/completer/google_suggest.py
|
amol9/redcmd
|
34086eb42ec6acc16dbd2b3ea530898a6a287639
|
[
"MIT"
] | null | null | null |
import json
from six.moves.urllib.parse import urlencode
from redlib.api.http import HttpRequest, RequestOptions, GlobalOptions
from .base import Completer
from ... import const
class GoogleSuggest(Completer):
def __init__(self):
self._g_opt = GlobalOptions(cache_dir=const.autocomp_cache_dir, timeout=3, cache_timeout='15s')
def complete(self, term):
r_opt = RequestOptions(headers = {'User-Agent' : 'Mozilla 51.0' })
http = HttpRequest(self._g_opt)
url="http://suggestqueries.google.com/complete/search?client=firefox&" + urlencode({'q' : term})
j = http.get(url, r_opt)
js = json.loads(j)
return [i.encode('ascii', 'ignore') for i in js[1]]
| 27.615385
| 104
| 0.681058
|
62193ae5987296cca001b6056497412a5b923dc9
| 2,848
|
py
|
Python
|
matlab_ext/external/spfpm_py3/demo.py
|
zaqwes8811/coordinator-tasks
|
7f63fdf613eff5d441a3c2c7b52d2a3d02d9736a
|
[
"MIT"
] | null | null | null |
matlab_ext/external/spfpm_py3/demo.py
|
zaqwes8811/coordinator-tasks
|
7f63fdf613eff5d441a3c2c7b52d2a3d02d9736a
|
[
"MIT"
] | 15
|
2015-03-07T12:46:41.000Z
|
2015-04-11T09:08:36.000Z
|
matlab_ext/external/spfpm_py3/demo.py
|
zaqwes8811/micro-apps
|
7f63fdf613eff5d441a3c2c7b52d2a3d02d9736a
|
[
"MIT"
] | null | null | null |
#!/usr/bin/python3
# Demonstration of Simple Python Fixed-Point Module
# (C)Copyright 2006-2014, RW Penney
import time
try:
import matplotlib, numpy
matplotlib.use('qt4agg')
import matplotlib.pyplot as plt
HAVE_MATPLOTLIB = True
except ImportError:
HAVE_MATPLOTLIB = False
import FixedPoint
def basicDemo():
"""Basic demonstration of roots & exponents at various accuracies"""
for resolution in [8, 32, 80, 274]:
family = FixedPoint.FXfamily(resolution)
val = 2
print('=== {0} bits ==='.format(resolution))
rt = FixedPoint.FXnum(val, family).sqrt()
print('sqrt(' + str(val) + ')~ ' + str(rt))
print('sqrt(' + str(val) + ')^2 ~ ' + str(rt * rt))
print('exp(1) ~ ' + str(family.exp1))
print()
def overflowDemo():
"""Illustrate how finite range limits calculation of exponents"""
res = 20
print('=== {0}-bit fractional part ==='.format(res))
for intsize in [4, 8, 16, 32]:
family = FixedPoint.FXfamily(res, intsize)
x = FixedPoint.FXnum(0.0, family)
step = 0.1
while True:
try:
ex = x.exp()
except FixedPoint.FXoverflowError:
print('{0:2d}-bit integer part: exp(x) overflows near x={1:.3g}'.format(intsize, float(x)))
break
x += step
print()
def speedDemo():
"""calculate indicative speed of floating-point operations"""
print('=== speed test ===')
for res, count in [ (16, 10000), (32, 10000), (64, 10000), (128, 10000), (256, 10000), (512, 10000) ]:
fam = FixedPoint.FXfamily(res)
x = FixedPoint.FXnum(0.5, fam)
lmb = FixedPoint.FXnum(3.6, fam)
one = FixedPoint.FXnum(1.0, fam)
t0 = time.clock()
for i in range(0, count):
# use logistic-map in chaotic region:
x = lmb * x * (one - x)
t1 = time.clock()
ops = count * 3
Dt = t1 - t0
print('{0} {1}-bit operations in {2:.2f}s ~ {3:.2g} FLOPS'.format(ops, res, Dt, (ops / Dt)))
def plotDemo():
"""Plot graph of approximations to Pi"""
pi_true = FixedPoint.FXfamily(200).pi
b_min, b_max = 8, 25
pipoints = []
for res in range(b_min, b_max+1):
val = 4 * FixedPoint.FXnum(1, FixedPoint.FXfamily(res)).atan()
pipoints.append([res, val])
pipoints = numpy.array(pipoints)
truepoints = numpy.array([[b_min, pi_true], [b_max, pi_true]])
plt.xlabel('bits')
plt.ylabel('$4 tan^{-1}1$')
plt.xlim([b_min, b_max])
plt.ylim([3.13, 3.16])
plt.grid(True)
for arr in (truepoints, pipoints):
plt.plot(arr[:,0], arr[:,1])
plt.show()
if __name__ == "__main__":
basicDemo()
overflowDemo()
speedDemo()
if HAVE_MATPLOTLIB:
plotDemo()
# vim: set ts=4 sw=4 et:
| 28.48
| 107
| 0.569171
|
07fda1d7129991d1cbc865d4288324c2d7740e2c
| 815
|
py
|
Python
|
213. House Robber II.py
|
MapleLove2014/leetcode
|
135c79ebe98815d0e38280edfadaba90e677aff5
|
[
"Apache-2.0"
] | 1
|
2020-12-04T07:38:16.000Z
|
2020-12-04T07:38:16.000Z
|
213. House Robber II.py
|
MapleLove2014/leetcode
|
135c79ebe98815d0e38280edfadaba90e677aff5
|
[
"Apache-2.0"
] | null | null | null |
213. House Robber II.py
|
MapleLove2014/leetcode
|
135c79ebe98815d0e38280edfadaba90e677aff5
|
[
"Apache-2.0"
] | null | null | null |
class Solution:
def rob(self, nums):
if len(nums) == 1:
return nums[0]
return self.doRob(nums)
def doRob(self, nums):
return max(self.robbing(0, nums, True, 0, 0), self.robbing(0, nums, False, 0, 0))
def robbing(self, i, nums, robFirst, value1, value2):
if i == 0:
return self.robbing(i + 1, nums, robFirst, nums[i] if robFirst else 0, value2)
if i == len(nums) - 1:
return max(value1, value2 + (0 if robFirst else nums[i]))
if i == 1:
return self.robbing(i + 1, nums, robFirst, value1 if robFirst else max(value1, value2 + nums[i]), value1)
return self.robbing(i + 1, nums, robFirst, max(value1, value2 + nums[i]), value1)
s = Solution()
print(s.rob([2, 3, 2]) == 3)
print(s.rob([1,2,3,1]) == 4)
| 38.809524
| 117
| 0.568098
|
e4050adfe51197549ad92444896d3f681a8a6ad8
| 6,398
|
py
|
Python
|
deepbiosphere/scripts/maxent_inference.py
|
moiexpositoalonsolab/deepbiosphere
|
a12e59c40d2c29b5428e4969ef8c3a0cb457e387
|
[
"MIT"
] | null | null | null |
deepbiosphere/scripts/maxent_inference.py
|
moiexpositoalonsolab/deepbiosphere
|
a12e59c40d2c29b5428e4969ef8c3a0cb457e387
|
[
"MIT"
] | null | null | null |
deepbiosphere/scripts/maxent_inference.py
|
moiexpositoalonsolab/deepbiosphere
|
a12e59c40d2c29b5428e4969ef8c3a0cb457e387
|
[
"MIT"
] | null | null | null |
import time
import glob
import rasterio
import numpy as np
import rpy2.robjects as robjects
from rpy2.robjects.packages import importr
import deepbiosphere.scripts.GEOCLEF_Run as run
import deepbiosphere.scripts.GEOCLEF_Utils as utils
import deepbiosphere.scripts.GEOCLEF_Dataset as dataset
from deepbiosphere.scripts import GEOCLEF_Config as config
from deepbiosphere.scripts.GEOCLEF_Config import paths, Run_Params
from deepbiosphere.scripts.GEOCLEF_Run import setup_dataset, setup_model, setup_loss
def maxent_inference(base_dir, params, num_species):
print("getting data")
# TODO: make sure dataframe has all the info it needs for plotting
obs = dataset.get_gbif_observations(base_dir, params.params.organism, params.params.region, params.params.observation, params.params.threshold, num_species)
obs.fillna('nan', inplace=True)
if 'species' not in obs.columns:
obs = utils.add_taxon_metadata(self.base_dir, obs, self.organism)
dset= run.setup_dataset(params.params.observation, params.base_dir, params.params.organism, params.params.region, params.params.normalize, params.params.no_altitude, params.params.dataset, params.params.threshold, num_species=num_species)
train_samp, test_samp, idxs = run.better_split_train_test(dset)
# load in tiffs as rasters
# TODO: make cleaner
rasnames = f"{paths.DBS_DIR}occurrences/MaxentResults_All/*.tif"
files = glob.glob(rasnames)
all_ras = []
for file in files:
src = rasterio.open(file)
specname = file.split('/')[-1].split('_Proj')[0].replace('_', ' ')
temp = src.read().squeeze()
nodata = temp.min()
# trick: convert nan value points to 0 probability!
# if nodata < 0.0:
# print("setting nan value for {} from {} to 0.0".format(specname, nodata, " to 0.0"))
temp[temp == nodata] = 0.0
all_ras.append((specname, src.transform, temp))
print("extracting predictions")
tick = time.time()
# so I think the negative values are because the rasters are only fit around the species range, but to certify that what I'll do is plot the rasters + the offending point with geopandas
maxent_pred = np.full([len(obs), dset.num_specs], np.nan)
maxent_gen = np.full([len(obs), dset.num_gens], np.nan)
maxent_fam = np.full([len(obs), dset.num_fams], np.nan)
sp_2_gen = utils.dict_from_columns(obs, 'species', 'genus')
sp_2_fam = utils.dict_from_columns(obs, 'species', 'family')
to_iterate = dset.obs[:, dataset.lat_lon_idx].tolist()
# TODO: rewrite this order so it's faster
# loop over rasters, not indices because you can
# order species in order expected for file
# need (rasters, spec_name_same)
for spec, trans, raster in all_ras:
spc_idx = dset.spec_dict[spec]
gen_idx = dset.gen_dict[sp_2_gen[spec]]
fam_idx = dset.fam_dict[sp_2_fam[spec]]
for i, (lat,lon) in enumerate(to_iterate):
# print("valid idx? ", spc_idx)
x, y = dataset.latlon_2_idx(trans, (lat, lon))
if x < 0 or y < 0 or x >= raster.shape[0] or y >= raster.shape[1]:
# this means that maxent predicted no probability in this area, so the raster is cut off for this region
# so can go ahead and say 0 probability
maxent_pred[i, spc_idx] = 0.0
maxent_gen[i, gen_idx] = 0.0
maxent_fam[i, fam_idx] = 0.0
else:
# convert species to genus, family
maxent_pred[i, spc_idx] = raster[x,y]
maxent_gen[i, gen_idx] = raster[x,y]
maxent_fam[i, fam_idx] = raster[x,y]
tock = time.time()
print("extracting predictions took {} minutes".format((tock-tick)/60))
# check how many nans are left, if reasonable amount then just convert to 0.0
num_nans = maxent_pred[maxent_pred == np.nan]
print("num nans is ", num_nans.shape)
# convert to pandas dataframe and save in the correct location
print('saving data')
tick = time.time()
to_transfer = ['lat', 'lon', 'region', 'city', 'NA_L3NAME', 'US_L3NAME', 'NA_L2NAME', 'NA_L1NAME', 'test']
inv_gen = {v: k for k, v in dset.gen_dict.items()}
inv_fam = {v: k for k, v in dset.fam_dict.items()}
df_spec_cols = [dset.inv_spec[i] for i in range(dset.num_specs)]
df_gen_cols = [inv_gen[i] for i in range(dset.num_gens)]
df_fam_cols = [inv_fam[i] for i in range(dset.num_fams)]
df_spec = utils.numpy_2_df(maxent_pred, df_spec_cols, obs, to_transfer)
df_gen = utils.numpy_2_df(maxent_gen, df_gen_cols, obs, to_transfer)
df_fam = utils.numpy_2_df(maxent_fam, df_fam_cols, obs, to_transfer)
pth_spec = config.build_inference_path(base_dir, params.params.model, params.params.loss, params.params.exp_id, 'species', num_species)
pth_gen = config.build_inference_path(base_dir, params.params.model, params.params.loss, params.params.exp_id, 'genus', num_species)
pth_fam = config.build_inference_path(base_dir, params.params.model, params.params.loss, params.params.exp_id, 'family', num_species)
df_spec.to_csv(pth_spec)
df_gen.to_csv(pth_gen)
df_fam.to_csv(pth_fam)
tock = time.time()
print("took {} minutes to save data".format((tock-tick)/60))
# TODO: see if can embed R into this and run the maxent??
# yes! can use rpy2!
def train_maxent():
# 1. get
# robjects.r('install.packages("rJava")')
to_import = ['devtools', 'rJava', 'dismo', 'raster', 'foreach', 'doParallel', 'sp']
# install.packages("rJava")
# devtools::install_github("s-u/rJava")
# install.packages("dismo")
pckgs = {}
for imp in to_import:
pckgs[imp] = importr(imp)
print(pckgs)
if __name__ == "__main__":
np.testing.suppress_warnings()
args = ['base_dir', 'num_species', 'observation', 'organism', 'region', 'exp_id', 'seed', 'normalize', 'dataset', 'threshold', 'model', 'load_from_config', 'loss', 'no_alt']
ARGS = config.parse_known_args(args)
config.setup_main_dirs(ARGS.base_dir)
params = config.Run_Params(ARGS.base_dir, ARGS)
# TODO: make sure you can only set model to be maxent here
ARGS = config.parse_known_args(args)
# train_maxent()
maxent_inference(ARGS.base_dir, params, ARGS.num_species)
| 46.362319
| 242
| 0.672398
|
2092176a80ef75a89c50c0ea3ade65082dee8c81
| 7,818
|
py
|
Python
|
pyupdater/vendor/PyInstaller/depend/dylib.py
|
rsumner31/PyUpdater1
|
d9658000472e57453267ee8fa174ae914dd8d33c
|
[
"BSD-2-Clause"
] | null | null | null |
pyupdater/vendor/PyInstaller/depend/dylib.py
|
rsumner31/PyUpdater1
|
d9658000472e57453267ee8fa174ae914dd8d33c
|
[
"BSD-2-Clause"
] | null | null | null |
pyupdater/vendor/PyInstaller/depend/dylib.py
|
rsumner31/PyUpdater1
|
d9658000472e57453267ee8fa174ae914dd8d33c
|
[
"BSD-2-Clause"
] | null | null | null |
#-----------------------------------------------------------------------------
# Copyright (c) 2013, PyInstaller Development Team.
#
# Distributed under the terms of the GNU General Public License with exception
# for distributing bootloader.
#
# The full license is in the file COPYING.txt, distributed with this software.
#-----------------------------------------------------------------------------
"""
Manipulating with dynamic libraries.
"""
__all__ = ['exclude_list', 'include_list', 'include_library']
import os
import re
from PyInstaller.compat import is_win, is_unix, is_aix, is_darwin
import PyInstaller.log as logging
logger = logging.getLogger(__name__)
_BOOTLOADER_FNAMES = set(['run', 'run_d', 'runw', 'runw_d'])
# Regex excludes
# Ignoring some system libraries speeds up packaging process
_excludes = {}
# Regex includes - overrides excludes.
# Include list is used only to override specific libraries
# from exclude list.
_includes = {}
_win_includes = {
# DLLs are from 'Microsoft Visual C++ 2010 Redistributable Package'.
# http://msdn.microsoft.com/en-us/library/8kche8ah(v=vs.100).aspx
#
# Python 3.3 and 3.4 depends use Visual Studio C++ 2010 for Windows builds.
# python33.dll depends on msvcr100.dll.
#
# Visual Studio C++ 2010 does not need Assembly manifests anymore and
# uses C++ runtime libraries the old way - pointing to C:\Windows\System32.
# It is necessary to allow inclusion of these libraries from C:\Windows\System32.
r'atl100.dll$': 1,
r'msvcr100.dll$': 1,
r'msvcp100.dll$': 1,
r'mfc100.dll$': 1,
r'mfc100u.dll$': 1,
r'mfcmifc80.dll$': 1,
r'mfcm100.dll$': 1,
r'mfcm100u.dll$': 1,
}
_win_excludes = {
# MS assembly excludes
r'^Microsoft\.Windows\.Common-Controls$': 1,
}
_unix_excludes = {
r'/libc\.so\..*': 1,
r'/libdl\.so\..*': 1,
r'/libm\.so\..*': 1,
r'/libpthread\.so\..*': 1,
r'/librt\.so\..*': 1,
r'/libthread_db\.so\..*': 1,
# glibc regex excludes.
r'/ld-linux\.so\..*': 1,
r'/libBrokenLocale\.so\..*': 1,
r'/libanl\.so\..*': 1,
r'/libcidn\.so\..*': 1,
r'/libcrypt\.so\..*': 1,
r'/libnsl\.so\..*': 1,
r'/libnss_compat.*\.so\..*': 1,
r'/libnss_dns.*\.so\..*': 1,
r'/libnss_files.*\.so\..*': 1,
r'/libnss_hesiod.*\.so\..*': 1,
r'/libnss_nis.*\.so\..*': 1,
r'/libnss_nisplus.*\.so\..*': 1,
r'/libresolv\.so\..*': 1,
r'/libutil\.so\..*': 1,
# libGL can reference some hw specific libraries (like nvidia libs).
r'/libGL\..*': 1,
# libxcb-dri changes ABI frequently (e.g.: between Ubuntu LTS releases) and is usually installed
# as dependency of the graphics stack anyway. No need to bundle it.
r'/libxcb\.so\..*': 1,
r'/libxcb-dri.*\.so\..*': 1,
}
_aix_excludes = {
r'/libbz2\.a': 1,
r'/libc\.a': 1,
r'/libC\.a': 1,
r'/libcrypt\.a': 1,
r'/libdl\.a': 1,
r'/libintl\.a': 1,
r'/libpthreads\.a': 1,
r'/librt\\.a': 1,
r'/librtl\.a': 1,
r'/libz\.a': 1,
}
if is_win:
_includes = _win_includes
_excludes = _win_excludes
from PyInstaller.utils import winutils
sep = '[%s]' % re.escape(os.sep + os.altsep)
# Exclude everything from the Windows directory by default.
windir = re.escape(winutils.get_windows_dir())
_excludes['^%s%s' % (windir, sep)] = 1
# Allow pythonNN.dll, pythoncomNN.dll, pywintypesNN.dll
_includes[r'%spy(?:thon(?:com(?:loader)?)?|wintypes)\d+\.dll$' % sep] = 1
elif is_aix:
# The exclude list for AIX differs from other *nix platforms.
_excludes = _aix_excludes
elif is_unix:
# Common excludes for *nix platforms -- except AIX.
_excludes = _unix_excludes
class ExcludeList(object):
def __init__(self):
self.regex = re.compile('|'.join(_excludes.keys()), re.I)
def search(self, libname):
# Running re.search() on '' regex never returns None.
if _excludes:
return self.regex.search(libname)
else:
return False
class IncludeList(object):
def __init__(self):
self.regex = re.compile('|'.join(_includes.keys()), re.I)
def search(self, libname):
# Running re.search() on '' regex never returns None.
if _includes:
return self.regex.search(libname)
else:
return False
exclude_list = ExcludeList()
include_list = IncludeList()
if is_darwin:
# On Mac use macholib to decide if a binary is a system one.
from PyInstaller.lib.macholib import util
class MacExcludeList(object):
def search(self, libname):
return util.in_system_path(libname)
exclude_list = MacExcludeList()
def include_library(libname):
"""
Check if a dynamic library should be included with application or not.
"""
# For configuration phase we need to have exclude / include lists None
# so these checking is skipped and library gets included.
if exclude_list:
if exclude_list.search(libname) and not include_list.search(libname):
# Library is excluded and is not overriden by include list.
# It should be then excluded.
return False
else:
# Include library
return True
else:
# By default include library.
return True
def mac_set_relative_dylib_deps(libname, distname):
"""
On Mac OS X set relative paths to dynamic library dependencies
of `libname`.
Relative paths allow to avoid using environment variable DYLD_LIBRARY_PATH.
There are known some issues with DYLD_LIBRARY_PATH. Relative paths is
more flexible mechanism.
Current location of dependend libraries is derived from the location
of the library path (paths start with '@loader_path').
'distname' path of the library relative to dist directory of frozen
executable. We need this to determine the level of directory
level for @loader_path of binaries not found in dist directory.
E.g. qt4 plugins are not in the same directory as Qt*.dylib
files. Without using '@loader_path/../..' for qt plugins
Mac OS X would not be able to resolve shared library
dependencies and qt plugins will not be loaded.
"""
from PyInstaller.lib.macholib import util
from PyInstaller.lib.macholib.MachO import MachO
# Ignore bootloader otherwise PyInstaller fails with exception like
# 'ValueError: total_size > low_offset (288 > 0)'
if os.path.basename(libname) in _BOOTLOADER_FNAMES:
return
# Determine how many directories up is the directory with shared
# dynamic libraries. '../'
# E.g. ./qt4_plugins/images/ -> ./../../
parent_dir = ''
# Check if distname is not only base filename.
if os.path.dirname(distname):
parent_level = len(os.path.dirname(distname).split(os.sep))
parent_dir = parent_level * (os.pardir + os.sep)
def match_func(pth):
"""
For system libraries is still used absolute path. It is unchanged.
"""
# Match non system dynamic libraries.
if not util.in_system_path(pth):
# Use relative path to dependend dynamic libraries bases on
# location of the executable.
return os.path.join('@loader_path', parent_dir,
os.path.basename(pth))
# Rewrite mach headers with @loader_path.
dll = MachO(libname)
dll.rewriteLoadCommands(match_func)
# Write changes into file.
# Write code is based on macholib example.
try:
f = open(dll.filename, 'rb+')
for header in dll.headers:
f.seek(0)
dll.write(f)
f.seek(0, 2)
f.flush()
f.close()
except Exception:
pass
| 30.779528
| 100
| 0.619468
|
a592a7e232617a9cb7891f617f1c18ea018e5e11
| 1,439
|
py
|
Python
|
vespene/config/workers.py
|
Conan-Kudo/vespene
|
9e9977523f45586e1326ccd77d8cc0cb10591a07
|
[
"Apache-2.0"
] | 680
|
2018-10-29T12:12:10.000Z
|
2019-04-27T09:52:58.000Z
|
vespene/config/workers.py
|
Conan-Kudo/vespene
|
9e9977523f45586e1326ccd77d8cc0cb10591a07
|
[
"Apache-2.0"
] | 110
|
2018-10-29T12:33:34.000Z
|
2019-02-14T02:31:43.000Z
|
vespene/config/workers.py
|
Conan-Kudo/vespene
|
9e9977523f45586e1326ccd77d8cc0cb10591a07
|
[
"Apache-2.0"
] | 92
|
2018-10-29T12:21:12.000Z
|
2019-06-08T11:08:08.000Z
|
# Copyright 2018, Michael DeHaan LLC
# License: Apache License Version 2.0
# ---------------------------------------------------------------------------
# workers.py - configuration related to worker setup. This file *CAN* be
# different per worker.
# ---------------------------------------------------------------------------
BUILD_ROOT = "/tmp/vespene/buildroot/"
# ---------------------------------------------------------------------------
# all of these settings deal with serving up the buildroot.
# to disable file serving thorugh Django you can set this to FALSE
FILESERVING_ENABLED = True
FILESERVING_PORT = 8000
# leave this blank and the system will try to figure this out
# the setup scripts will usually set this to `hostname` though if
# unset the registration code will run `hostname`
FILESERVING_HOSTNAME = ""
FILESERVING_URL="/srv"
# if you disable fileserving but are using triggers to copy build roots
# to some other location (perhaps NFS served up by a web server or an FTP
# server) you can set this FILESERVING_ENABLED to False and the following pattern will
# be used instead to generate web links in the main GUI. If this pattern
# is set the links to the built-in fileserver will NOT be rendered, but this will
# not turn on the fileserver. To do that, set FILESERVING_ENABLED to False also
# BUILDROOT_WEB_LINK = "http://build-fileserver.example.com/builds/{{ build.id }}"
BUILDROOT_WEB_LINK = ""
| 41.114286
| 86
| 0.642113
|
d87230ccc90f10393a6490ed64c3efc261608dbe
| 934
|
py
|
Python
|
tetris/blockmesh/geometry.py
|
gabrielbdsantos/tetris
|
78a173bd31079a932e061e5b8fdb49541cb96ae0
|
[
"MIT"
] | null | null | null |
tetris/blockmesh/geometry.py
|
gabrielbdsantos/tetris
|
78a173bd31079a932e061e5b8fdb49541cb96ae0
|
[
"MIT"
] | null | null | null |
tetris/blockmesh/geometry.py
|
gabrielbdsantos/tetris
|
78a173bd31079a932e061e5b8fdb49541cb96ae0
|
[
"MIT"
] | null | null | null |
# coding: utf-8
"""Provide common geometry types."""
from __future__ import annotations
from abc import abstractproperty
from tetris.typing import BlockMeshElement
class Geometry(BlockMeshElement):
"""Base class for geometry objects."""
def __init__(self, name: str) -> None:
self.name = name
@abstractproperty
def type(self) -> str:
"""Define the geometry type"""
...
def __eq__(self, other: Geometry) -> bool:
"""Check whether two geometries are 'equal'."""
return self.name == other.name
class TriSurfaceMesh(Geometry):
"""Create a geometry based on a surface file."""
def __init__(self, name: str, file: str) -> None:
super().__init__(name)
self.file = file
@property
def type(self) -> str:
return "triSurfaceMesh"
def write(self) -> str:
return f'{self.name} {{ type {self.type}; file "{self.file}"; }}'
| 23.35
| 73
| 0.625268
|
8de6cb96ff6963c9bf1b20f02fea027aeaa7122c
| 6,096
|
py
|
Python
|
skabase/tests/test_utils.py
|
adityadangeska/lmc-base-classes
|
a3dada19b27fcc889546d754ef94986c55da5acc
|
[
"BSD-3-Clause"
] | 1
|
2019-05-31T09:47:31.000Z
|
2019-05-31T09:47:31.000Z
|
skabase/tests/test_utils.py
|
adityadangeska/lmc-base-classes
|
a3dada19b27fcc889546d754ef94986c55da5acc
|
[
"BSD-3-Clause"
] | null | null | null |
skabase/tests/test_utils.py
|
adityadangeska/lmc-base-classes
|
a3dada19b27fcc889546d754ef94986c55da5acc
|
[
"BSD-3-Clause"
] | null | null | null |
"""Tests for skabase.utils."""
import json
import pytest
from skabase.utils import get_groups_from_json
from skabase.utils import GroupDefinitionsError
TEST_GROUPS = {
# Valid groups
'basic_no_subgroups': {
'group_name': 'g1',
'devices': ['my/dev/1'],
},
'basic_empty_subgroups': {
'group_name': 'g2',
'devices': ['my/dev/2'],
'subgroups': []
},
'dual_level': {
'group_name': 'g3',
'subgroups': [
{'group_name': 'g3-1',
'devices': ['my/dev/3-1']}
]
},
'multi_level': {
'group_name': 'data_centre_1',
'devices': ['dc1/aircon/1', 'dc1/aircon/2'],
'subgroups': [
{'group_name': 'racks',
'subgroups': [
{'group_name': 'rackA',
'devices': ['dc1/server/1', 'dc1/server/2',
'dc1/switch/A', 'dc1/pdu/rackA']},
{'group_name': 'rackB',
'devices': ['dc1/server/3', 'dc1/server/4',
'dc1/switch/B', 'dc1/pdu/rackB'],
'subgroups': []},
]},
]
},
# Invalid groups (bad keys)
'bk1_bad_keys': {
},
'bk2_bad_keys': {
'group_name': 'bk2',
'bad_devices_key': ['my/dev/01', 'my/dev/02']
},
'bk3_bad_keys': {
'group_name': 'bk3',
'bad_subgroups_key': []
},
'bk4_bad_keys': {
'bad_group_name_key': 'bk4',
'devices': ['my/dev/41']
},
'bk5_bad_nested_keys': {
'group_name': 'bk5',
'subgroups': [
{'group_name': 'bk5-1',
'bad_devices_key': ['my/dev/3-1']}
]
},
'bk6_bad_nested_keys': {
'group_name': 'bk6',
'subgroups': [
{'bad_group_name_key': 'bk6-1',
'devices': ['my/dev/3-1']}
]
},
# Invalid groups (bad values)
'bv1_bad_device_names': {
'group_name': 'bv1',
'devices': ['my\dev-11']
},
'bv2_bad_device_names': {
'group_name': 'bv2',
'devices': ['1', '2', 'bad']
},
'bv3_bad_device_names': {
'group_name': 'bv3',
'devices': [' ']
},
'bv4_bad_subgroups_value': {
'group_name': 'bv4',
'subgroups': [' ']
},
'bv5_bad_nested_device_names': {
'group_name': 'bv5',
'subgroups': [
{'group_name': 'bv5-1',
'devices': ['my\dev-11']}
]
},
}
VALID_GROUP_KEYS = [
('basic_no_subgroups', ),
('basic_no_subgroups', 'basic_empty_subgroups', ),
('basic_no_subgroups', 'basic_empty_subgroups', 'dual_level', ),
('basic_no_subgroups', 'basic_empty_subgroups', 'dual_level', 'multi_level'),
]
BAD_GROUP_KEYS = [
('bk1_bad_keys', ),
('bk2_bad_keys', ),
('bk3_bad_keys', ),
('bk4_bad_keys', ),
('bk5_bad_nested_keys', ),
('bk6_bad_nested_keys', ),
('bv1_bad_device_names', ),
('bv2_bad_device_names', ),
('bv3_bad_device_names', ),
('bv4_bad_subgroups_value', ),
('bv5_bad_nested_device_names', ),
# Include a valid group, g2 with an invalid group
('basic_no_subgroups', 'bk1_bad_keys', ),
]
def _jsonify_group_configs(group_configs):
"""Returns list of JSON definitions for groups."""
definitions = []
for group_config in group_configs:
definitions.append(json.dumps(group_config))
return definitions
def _get_group_configs_from_keys(group_keys):
"""Provides list of group configs based on keys for TEST_GROUPS."""
group_configs = []
for group_key in group_keys:
group_config = TEST_GROUPS[group_key]
group_configs.append(group_config)
return group_configs
def _group_id_name(keys):
"""Helper function to give tests nicer names."""
return ','.join(keys)
@pytest.fixture(scope="module", params=VALID_GROUP_KEYS, ids=_group_id_name)
def valid_group_configs(request):
"""Provides valid lists of groups configs, one at a time."""
return _get_group_configs_from_keys(request.param)
@pytest.fixture(scope="module", params=BAD_GROUP_KEYS, ids=_group_id_name)
def bad_group_configs(request):
"""Provides bad lists of groups configs, one at a time."""
return _get_group_configs_from_keys(request.param)
def test_get_groups_from_json_empty_list():
groups = get_groups_from_json([])
assert groups == {}
# empty or whitespace strings should also be ignored
groups = get_groups_from_json([''])
assert groups == {}
groups = get_groups_from_json([' ', '', ' '])
assert groups == {}
def _validate_group(definition, group):
"""Compare groups test definition dict to actual tango.Group."""
expected_group_name = definition['group_name'] # key must exist
expected_devices = definition.get('devices', []) # key may exist
expected_subgroups = definition.get('subgroups', []) # key may exist
print "Checking group:", expected_group_name, group
assert group is not None
assert expected_group_name == group.get_name()
device_list = group.get_device_list(forward=False)
assert expected_devices == list(device_list)
for expected_subgroup in expected_subgroups:
print "\tsubgroup def", expected_subgroup
subgroup = group.get_group(expected_subgroup['group_name'])
assert subgroup is not None
# recurse the tree
_validate_group(expected_subgroup, subgroup)
def test_get_groups_from_json_valid(valid_group_configs):
json_definitions = _jsonify_group_configs(valid_group_configs)
groups = get_groups_from_json(json_definitions)
# Check result
assert len(groups) == len(valid_group_configs)
for group_config in valid_group_configs:
name = group_config['group_name']
group = groups[name]
_validate_group(group_config, group)
def test_get_groups_from_json_invalid(bad_group_configs):
json_definitions = _jsonify_group_configs(bad_group_configs)
with pytest.raises(GroupDefinitionsError):
get_groups_from_json(json_definitions)
| 29.736585
| 81
| 0.607612
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.