hexsha
stringlengths 40
40
| size
int64 3
1.03M
| ext
stringclasses 10
values | lang
stringclasses 1
value | max_stars_repo_path
stringlengths 3
972
| max_stars_repo_name
stringlengths 6
130
| max_stars_repo_head_hexsha
stringlengths 40
78
| max_stars_repo_licenses
listlengths 1
10
| max_stars_count
int64 1
191k
⌀ | max_stars_repo_stars_event_min_datetime
stringlengths 24
24
⌀ | max_stars_repo_stars_event_max_datetime
stringlengths 24
24
⌀ | max_issues_repo_path
stringlengths 3
972
| max_issues_repo_name
stringlengths 6
130
| max_issues_repo_head_hexsha
stringlengths 40
78
| max_issues_repo_licenses
listlengths 1
10
| max_issues_count
int64 1
116k
⌀ | max_issues_repo_issues_event_min_datetime
stringlengths 24
24
⌀ | max_issues_repo_issues_event_max_datetime
stringlengths 24
24
⌀ | max_forks_repo_path
stringlengths 3
972
| max_forks_repo_name
stringlengths 6
130
| max_forks_repo_head_hexsha
stringlengths 40
78
| max_forks_repo_licenses
listlengths 1
10
| max_forks_count
int64 1
105k
⌀ | max_forks_repo_forks_event_min_datetime
stringlengths 24
24
⌀ | max_forks_repo_forks_event_max_datetime
stringlengths 24
24
⌀ | content
stringlengths 3
1.03M
| avg_line_length
float64 1.13
941k
| max_line_length
int64 2
941k
| alphanum_fraction
float64 0
1
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
3d7f5e8d8639c19a9696b7c473c369489767b548
| 2,869
|
py
|
Python
|
tensorflow/tools/docs/base_dir.py
|
nkgwer/tensorflow
|
9cfb4b13c4639f741ec6b92e24c77d294a44c2d5
|
[
"Apache-2.0"
] | 190,993
|
2015-11-09T13:17:30.000Z
|
2022-03-31T23:05:27.000Z
|
tensorflow/tools/docs/base_dir.py
|
govl-psb/tensorflow-1
|
60028072a1c3b4376e145b6fea8e4ccd3324377f
|
[
"Apache-2.0"
] | 48,461
|
2015-11-09T14:21:11.000Z
|
2022-03-31T23:17:33.000Z
|
tensorflow/tools/docs/base_dir.py
|
govl-psb/tensorflow-1
|
60028072a1c3b4376e145b6fea8e4ccd3324377f
|
[
"Apache-2.0"
] | 104,981
|
2015-11-09T13:40:17.000Z
|
2022-03-31T19:51:54.000Z
|
# Lint as: python3
# Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Opensource base_dir configuration for tensorflow doc-generator."""
import distutils
from os import path
import keras_preprocessing
import tensorboard
import tensorflow as tf
import tensorflow_estimator
try:
import keras # pylint: disable=g-import-not-at-top
except ImportError:
pass
def get_base_dirs_and_prefixes(code_url_prefix):
"""Returns the base_dirs and code_prefixes for OSS TensorFlow api gen."""
base_dir = path.dirname(tf.__file__)
if distutils.version.LooseVersion(tf.__version__) >= "2.6":
base_dirs = [
base_dir,
path.dirname(keras.__file__),
path.dirname(keras_preprocessing.__file__),
path.dirname(tensorboard.__file__),
path.dirname(tensorflow_estimator.__file__),
]
elif distutils.version.LooseVersion(tf.__version__) >= "2.2":
base_dirs = [
base_dir,
path.dirname(keras_preprocessing.__file__),
path.dirname(tensorboard.__file__),
path.dirname(tensorflow_estimator.__file__),
]
else:
base_dirs = [
path.normpath(path.join(base_dir, "../tensorflow_core")),
path.dirname(keras_preprocessing.__file__),
path.dirname(tensorboard.__file__),
path.dirname(tensorflow_estimator.__file__),
]
if distutils.version.LooseVersion(tf.__version__) >= "2.6":
code_url_prefixes = (
code_url_prefix,
f"https://github.com/keras-team/keras/tree/v{keras.__version__}/keras",
f"https://github.com/keras-team/keras-preprocessing/tree/{keras_preprocessing.__version__}/keras_preprocessing",
f"https://github.com/tensorflow/tensorboard/tree/{tensorboard.__version__}/tensorboard",
"https://github.com/tensorflow/estimator/tree/master/tensorflow_estimator",
)
else:
code_url_prefixes = (
code_url_prefix,
f"https://github.com/keras-team/keras-preprocessing/tree/{keras_preprocessing.__version__}/keras_preprocessing",
f"https://github.com/tensorflow/tensorboard/tree/{tensorboard.__version__}/tensorboard",
"https://github.com/tensorflow/estimator/tree/master/tensorflow_estimator",
)
return base_dirs, code_url_prefixes
| 38.253333
| 120
| 0.709655
|
05f0e133c6f431bd82040d7dd69722dd46ccd0ce
| 5,009
|
py
|
Python
|
explore.py
|
airKlizz/passage-ordering
|
f63b993dfd5b7e6475e7fb8950c23c3f22951979
|
[
"MIT"
] | 1
|
2021-09-06T09:44:10.000Z
|
2021-09-06T09:44:10.000Z
|
explore.py
|
airKlizz/passage-ordering
|
f63b993dfd5b7e6475e7fb8950c23c3f22951979
|
[
"MIT"
] | null | null | null |
explore.py
|
airKlizz/passage-ordering
|
f63b993dfd5b7e6475e7fb8950c23c3f22951979
|
[
"MIT"
] | null | null | null |
from pathlib import Path
import pandas as pd
import numpy as np
from datasets import load_dataset
DATASET_PATH = "dataset/vist_ordering.py"
RESULTS_PATH = "results/vist"
def pretty(csv_path):
return csv_path.name.replace(".csv", "").replace(" ", "_").lower()
class Explorer(object):
def __init__(self, dataset_path, results_path):
self.dataset = load_dataset(dataset_path, split="test")
results_path = Path(results_path)
self.dfs = {"{}".format(pretty(csv_path)): pd.read_csv(csv_path) for csv_path in results_path.glob("*.csv")}
def get_results_key(self, key):
df = self.dfs[key]
return df["tau"].to_numpy().mean(), df["pmr"].to_numpy().mean()
def get_results_keys(self):
results = {
"tau": {name: df["tau"].to_numpy().mean() for name, df in self.dfs.items()},
"pmr": {name: df["pmr"].to_numpy().mean() for name, df in self.dfs.items()},
}
return pd.DataFrame(data=results)
def get_comparison(self):
comparison = {
name1: {
name2: {
"mean": np.abs(df1["tau"].to_numpy() - df2["tau"].to_numpy()).mean(),
"std": np.abs(df1["tau"].to_numpy() - df2["tau"].to_numpy()).std(),
}
for name2, df2 in self.dfs.items()
if name2
}
for name1, df1 in self.dfs.items()
if name1
}
return (
np.array([[v2["mean"] for v2 in v1.values()] for v1 in comparison.values()]),
np.array([[v2["std"] for v2 in v1.values()] for v1 in comparison.values()]),
)
def split_idxs(self, keys=["bart_simple", "bart_multi"]):
assert len(keys) == 2, "split_idxs can compare only 2 keys"
idxs1 = []
idxs2 = []
idxs3 = []
for (idx, row1), (idx2, row2) in zip(self.dfs[keys[0]].iterrows(), self.dfs[keys[1]].iterrows()):
assert idx == idx2
if row1["tau"] > row2["tau"]:
idxs1.append(idx)
elif row1["tau"] < row2["tau"]:
idxs2.append(idx)
else:
idxs3.append(idx)
return idxs1, idxs2, idxs3
def display_examples(self, idxs, keys=["bart_simple", "bart_multi"]):
for idx in idxs:
self.display_example(idx, keys)
i = input("Press Enter to continue...")
if i == "q":
break
def display_example(self, idx, keys=["bart_simple", "bart_multi"]):
example = self.dataset[idx]
print("\nPassages to order:")
for p in example["shuffled_sentences"]:
print(p)
print()
print("Gold order: {}".format(example["label"]))
for key, df in self.dfs.items():
if key not in keys:
continue
row = df.iloc[idx]
print(f"\n{key} prediction:")
print("---\nKendall's tau {:.4f} - PMR {:.4f}\n---".format(row["tau"], row["pmr"]))
# for p in row["predicted_order"].strip('][').split(', '):
# print(example["shuffled_sentences"][int(p)])
print(row["predicted_order"].strip("][").split(", "))
print("~~~~~~~~~~")
"""
def order(example, simple=simple, multi=multi):
sentences = example["shuffled_sentences"]
simple_prediction = simple(sentences)
multi_prediction = multi(sentences)
simple_kendall = kendall.compute(predictions=[simple_prediction], references=[example["label"]])["tau"]
multi_kendall = kendall.compute(predictions=[multi_prediction], references=[example["label"]])["tau"]
simple_pmr = pmr.compute(predictions=[simple_prediction], references=[example["label"]])["pmr"]
multi_pmr = pmr.compute(predictions=[multi_prediction], references=[example["label"]])["pmr"]
return {"tau": [simple_kendall, multi_kendall], "pmr": [simple_pmr, multi_pmr]}, simple_prediction, multi_prediction, simple_kendall != multi_kendall
def display(example, simple=simple, multi=multi):
results, simple_prediction, multi_prediction, to_display = order(example, simple, multi)
if not to_display:
return
print("\n\n----------")
print("Sentences to order:")
for s in example["shuffled_sentences"]: print(s)
print("")
print(" \t| Simple \t| Multi")
simple_tau = results["tau"][0]
multi_tau = results["tau"][1]
simple_pmr = results["pmr"][0]
multi_pmr = results["pmr"][1]
print(f"Kendall Tau \t| {simple_tau:.4f} \t| {multi_tau:.4f}")
print(f"PMR \t| {simple_pmr:.4f} \t| {simple_pmr:.4f}")
print()
print("Simple order:")
for l in simple_prediction: print(example["shuffled_sentences"][l])
print()
print("Multi order:")
for l in multi_prediction: print(example["shuffled_sentences"][l])
print()
print("Gold order:")
for l in example["label"]: print(example["shuffled_sentences"][l])
for i in range(10):
display(dataset[i])
"""
| 37.103704
| 153
| 0.578159
|
c951a999cd5942f1f000221702f0ebd6ef59b5df
| 1,443
|
py
|
Python
|
pushbullet/device.py
|
fossabot/pushbullet.py
|
dc04a851bc130bbad55192a645504bfbf140978a
|
[
"MIT"
] | null | null | null |
pushbullet/device.py
|
fossabot/pushbullet.py
|
dc04a851bc130bbad55192a645504bfbf140978a
|
[
"MIT"
] | null | null | null |
pushbullet/device.py
|
fossabot/pushbullet.py
|
dc04a851bc130bbad55192a645504bfbf140978a
|
[
"MIT"
] | null | null | null |
from __future__ import unicode_literals
import warnings
from .helpers import use_appropriate_encoding
class Device(object):
def __init__(self, account, device_info):
self._account = account
self.device_iden = device_info.get("iden")
if not device_info.get("icon", None):
device_info["icon"] = "system"
for attr in ("push_token", "app_version", "fingerprint", "created", "modified",
"active", "nickname", "generated_nickname", "manufacturer", "icon",
"model", "has_sms", "key_fingerprint"):
setattr(self, attr, device_info.get(attr))
def push_note(self, title, body):
data = {"type": "note", "title": title, "body": body}
return self._push(data)
def push_link(self, title, url, body=None):
data = {"type": "link", "title": title, "url": url, "body": body}
return self._push(data)
def push_file(self, file_name, file_url, file_type, body=None, title=None):
return self._account.push_file(file_name, file_url, file_type, body=body, title=title, device=self)
def _push(self, data):
data["device_iden"] = self.device_iden
return self._account._push(data)
@use_appropriate_encoding
def __str__(self):
return "Device('{0}')".format(self.nickname or "nameless (iden: {})".format(self.device_iden))
def __repr__(self):
return self.__str__()
| 35.195122
| 107
| 0.634096
|
eca653847a45c2c69a690510f874d813abfc47e5
| 3,083
|
py
|
Python
|
20192801_05_bipartiterG_Visualisierung.py
|
lisakressin/Kulturelle-Muster-Lehrliteratur
|
ed6c92b9a453e840f3cb428e3d37c84f2795eb86
|
[
"MIT"
] | null | null | null |
20192801_05_bipartiterG_Visualisierung.py
|
lisakressin/Kulturelle-Muster-Lehrliteratur
|
ed6c92b9a453e840f3cb428e3d37c84f2795eb86
|
[
"MIT"
] | null | null | null |
20192801_05_bipartiterG_Visualisierung.py
|
lisakressin/Kulturelle-Muster-Lehrliteratur
|
ed6c92b9a453e840f3cb428e3d37c84f2795eb86
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
"""
Created on Mon Dec 17 14:37:08 2018
@author: KressinL
In diesem Skript werden Eigenschaften des bipartiten Graphen und seiner Struktur
visualisiert.
"""
import os
import pandas as pd
import numpy as np
import networkx as nx
from networkx.algorithms import bipartite as bi
from matplotlib import pyplot as plt
import pickle
import random
# Laden des bipartiten Graphen
with open('B', 'rb') as B_file:
B = pickle.load(B_file)
# Aufteilung der unterschiedlichen Knotentypen (Syllabi, Referenzen) auf 2 Variablen.
plaene = [x for x,y in B.nodes(data=True) if y['bipartite']==0]
lit = [x for x,y in B.nodes(data=True) if y['bipartite']==1]
# projected Graph der Lehrpläne erstellen
planB = bi.weighted_projected_graph(B, plaene,ratio=False)
# der Referenzen
graphR = bi.weighted_projected_graph(B, lit,ratio=False)
# =============================================================================
# Illustrative künstliche Beispielmatrizen für die Darstellung der
# Ko-Zitation und der bibliographischen Kopplung
# =============================================================================
# Erstellung der Adjencymatrix der Syllabi
MplanBgew = nx.adjacency_matrix(planB, weight='weight')
MplanBgew = pd.DataFrame(MplanBgew.todense())
# Benennung der Achsen der Matrix mit Kurstiteln.
MplanBgew.columns = [y for x,y in list(planB.nodes(data='Kurstitel'))]
MplanBgew.index = [y for x,y in list(planB.nodes(data='Kurstitel'))]
# willkürlicher Beispielausschnitt zur Darstellung
mbspplan = MplanBgew.iloc[23:27, 23:27]
mbspplan.to_csv("20192801_Besp_AM_Plaene.csv", header=True, encoding='utf-8-sig', index=True)
# In Artikeln werden Tabellen jedes Mal mithilfe der folgenden seite bearbeitet:
# http://www.tablesgenerator.com/latex_tables, um sie anschliessend in Latex-Dokumente
# einfügen zu können.
# Das gleiche noch mal Referenzen und ihrer Matrix.
Mref = nx.adjacency_matrix(graphR, weight='weight')
Mref = pd.DataFrame(Mref.todense())
Mref.columns = lit
Mref.index = lit
mbspref = Mref.iloc[54:58, 54:58]
mbspref.to_csv("20192801_Besp_AM_Ref.csv", header=True, encoding='utf-8-sig', index=True)
# =============================================================================
# bipartiten Graph zeichnen
# ============================================================================
pos = dict()
pos.update( (n, (1, i + 100 + i*2)) for i, n in enumerate(plaene) ) # put nodes from X at x=1
pos.update( (n, (2, i)) for i, n in enumerate(lit) ) # put nodes from Y at x=2
plt.figure(figsize=(10,10))
nodes1 = nx.draw_networkx_nodes(B, pos=pos, nodelist= plaene, node_color = 'y', node_shape = 'h', node_size = 100)
nodes2 = nx.draw_networkx_nodes(B, pos=pos, nodelist= lit, node_color = 'b', node_shape ='o', node_size = 100)
nodes1.set_edgecolor('k')
nodes2.set_edgecolor('k')
nodes1.set_linewidth(0.1)
nodes2.set_linewidth(0.1)
nx.draw_networkx_edges(B, pos=pos, width=0.05)
plt.axis('off')
plt.savefig('20192801_bipartiter_Graph.pdf')
plt.show()
| 40.565789
| 115
| 0.651962
|
bbf29fab8462393be19c2ef46f330e8a43501acd
| 2,939
|
py
|
Python
|
cmake/test/src/test_custom_target.py
|
SSteve/fprime
|
12c478bd79c2c4ba2d9f9e634e47f8b6557c54a8
|
[
"Apache-2.0"
] | 2
|
2021-09-24T23:28:33.000Z
|
2022-03-06T08:42:02.000Z
|
cmake/test/src/test_custom_target.py
|
SSteve/fprime
|
12c478bd79c2c4ba2d9f9e634e47f8b6557c54a8
|
[
"Apache-2.0"
] | 42
|
2021-06-10T23:31:10.000Z
|
2021-06-25T00:35:31.000Z
|
cmake/test/src/test_custom_target.py
|
SSteve/fprime
|
12c478bd79c2c4ba2d9f9e634e47f8b6557c54a8
|
[
"Apache-2.0"
] | 1
|
2021-02-23T17:10:44.000Z
|
2021-02-23T17:10:44.000Z
|
####
# test_basic.py:
#
# Basic CMake tests.
#
####
import os
import platform
import cmake
# Test a normal build, with the ref executable and static libraries
BUILD_DIR = os.path.join(os.path.dirname(__file__), "..", "data", "custom-make-targets")
EXPECTED = [
os.path.join("awesome", "everything_is_awesome"),
os.path.join("awesome", "Drv_BlockDriver_is_awesome"),
os.path.join("awesome", "Svc_FileUplink_is_awesome"),
os.path.join("awesome", "Svc_CmdSequencer_is_awesome"),
os.path.join("awesome", "Fw_Time_is_awesome"),
os.path.join("awesome", "Svc_FileManager_is_awesome"),
os.path.join("awesome", "Svc_PolyDb_is_awesome"),
os.path.join("awesome", "Fw_FilePacket_is_awesome"),
os.path.join("awesome", "Svc_Ping_is_awesome"),
os.path.join("awesome", "Fw_Types_is_awesome"),
os.path.join("awesome", "Utils_Hash_is_awesome"),
os.path.join("awesome", "Fw_Port_is_awesome"),
os.path.join("awesome", "Svc_RateGroupDriver_is_awesome"),
os.path.join("awesome", "Svc_FatalHandler_is_awesome"),
os.path.join("awesome", "Svc_TlmChan_is_awesome"),
os.path.join("awesome", "Fw_Obj_is_awesome"),
os.path.join("awesome", "Fw_Prm_is_awesome"),
os.path.join("awesome", "Svc_ActiveRateGroup_is_awesome"),
os.path.join("awesome", "Svc_Cycle_is_awesome"),
os.path.join("awesome", "Svc_Health_is_awesome"),
os.path.join("awesome", "Svc_LinuxTime_is_awesome"),
os.path.join("awesome", "Fw_Com_is_awesome"),
os.path.join("awesome", "Drv_DataTypes_is_awesome"),
os.path.join("awesome", "Svc_ComLogger_is_awesome"),
os.path.join("awesome", "Fw_SerializableFile_is_awesome"),
os.path.join("awesome", "Fw_Buffer_is_awesome"),
os.path.join("awesome", "Fw_Comp_is_awesome"),
os.path.join("awesome", "Svc_CmdDispatcher_is_awesome"),
os.path.join("awesome", "Fw_Log_is_awesome"),
os.path.join("awesome", "Fw_Tlm_is_awesome"),
os.path.join("awesome", "Os_is_awesome"),
os.path.join("awesome", "Svc_Sched_is_awesome"),
os.path.join("awesome", "Svc_Seq_is_awesome"),
os.path.join("awesome", "Svc_WatchDog_is_awesome"),
os.path.join("awesome", "Svc_AssertFatalAdapter_is_awesome"),
os.path.join("awesome", "Svc_PolyIf_is_awesome"),
os.path.join("awesome", "Svc_FileDownlink_is_awesome"),
os.path.join("awesome", "Fw_ComFile_is_awesome"),
os.path.join("awesome", "Svc_Time_is_awesome"),
os.path.join("awesome", "Fw_Cfg_is_awesome"),
os.path.join("awesome", "Svc_Fatal_is_awesome"),
os.path.join("awesome", "Svc_ActiveLogger_is_awesome"),
os.path.join("awesome", "Svc_BufferManager_is_awesome"),
os.path.join("awesome", "Fw_Cmd_is_awesome"),
os.path.join("awesome", "CFDP_Checksum_is_awesome"),
os.path.join("awesome", "Svc_PrmDb_is_awesome"),
os.path.join("awesome", "Svc_PassiveConsoleTextLogger_is_awesome"),
]
TARGETS = ["awesome"]
cmake.register_test(__name__, "custom-target")
| 45.921875
| 88
| 0.710786
|
e5445fbf735cbb7c34a73a56a71e5f1b94d6fc56
| 77
|
py
|
Python
|
hello_world.py
|
amey-git/PythoNetworkTest
|
de74a050bcb3a0265b15416bad43bf9e4367b20c
|
[
"Apache-2.0"
] | null | null | null |
hello_world.py
|
amey-git/PythoNetworkTest
|
de74a050bcb3a0265b15416bad43bf9e4367b20c
|
[
"Apache-2.0"
] | null | null | null |
hello_world.py
|
amey-git/PythoNetworkTest
|
de74a050bcb3a0265b15416bad43bf9e4367b20c
|
[
"Apache-2.0"
] | null | null | null |
print "Hello World !
print "Something Else .."
for i in range(10)
print i
| 11
| 25
| 0.675325
|
da54cfbc94692ea03e842fe308b816252c6e090c
| 6,782
|
py
|
Python
|
test/programytest/aiml_tests/bot_tests/test_bot_aiml.py
|
ItsPhant/program-y
|
c2b211fcaf8cedc7d6d95a8ea9470a913efa1622
|
[
"MIT"
] | null | null | null |
test/programytest/aiml_tests/bot_tests/test_bot_aiml.py
|
ItsPhant/program-y
|
c2b211fcaf8cedc7d6d95a8ea9470a913efa1622
|
[
"MIT"
] | null | null | null |
test/programytest/aiml_tests/bot_tests/test_bot_aiml.py
|
ItsPhant/program-y
|
c2b211fcaf8cedc7d6d95a8ea9470a913efa1622
|
[
"MIT"
] | 1
|
2020-02-21T17:58:05.000Z
|
2020-02-21T17:58:05.000Z
|
import unittest
import os
from programytest.aiml_tests.client import TestClient
from programy.config.sections.brain.file import BrainFileConfiguration
class BasicTestClient(TestClient):
def __init__(self):
TestClient.__init__(self)
def load_configuration(self, arguments):
super(BasicTestClient, self).load_configuration(arguments)
self.configuration.brain_configuration.files.aiml_files._files = [os.path.dirname(__file__)]
class BotAIMLTests(unittest.TestCase):
@classmethod
def setUpClass(cls):
BotAIMLTests.test_client = BasicTestClient()
BotAIMLTests.test_client.bot.brain.properties.load_from_text("""
url:http://www.keithsterling.com/aiml
name:KeiffBot 1.0
firstname:Keiff
middlename:AIML
lastname:BoT
fullname:KeiffBot
email:info@keiffbot.org
gender:male
botmaster:Keith Sterling
organization:keithsterling.com
version:0.0.1
birthplace:Edinburgh, Scotland
job:mobile virtual assistant
species:robot
birthday:September 9th
birthdate:September 9th, 2016
sign:Virgo
logo:<img src="http://www.keithsterling.com/aiml/logo.png" width="128"/>
religion:Atheist
default-get:unknown
default-property:unknown
default-map:unknown
learn-filename:learn.aiml
""")
def test_bot_property_xxx(self):
response = BotAIMLTests.test_client.bot.ask_question("test", "BOT PROPERTY XXX")
self.assertIsNotNone(response)
self.assertEqual(response, "unknown")
def test_bot_property_url(self):
response = BotAIMLTests.test_client.bot.ask_question("test", "BOT PROPERTY URL")
self.assertIsNotNone(response)
self.assertEqual(response, "http://www.keithsterling.com/aiml")
def test_bot_property_name(self):
response = BotAIMLTests.test_client.bot.ask_question("test", "BOT PROPERTY NAME")
self.assertIsNotNone(response)
self.assertEqual(response, "KeiffBot 1.0")
def test_bot_property_firstname(self):
response = BotAIMLTests.test_client.bot.ask_question("test", "BOT PROPERTY FIRSTNAME")
self.assertIsNotNone(response)
self.assertEqual(response, "Keiff")
def test_bot_property_middlename(self):
response = BotAIMLTests.test_client.bot.ask_question("test", "BOT PROPERTY MIDDLENAME")
self.assertIsNotNone(response)
self.assertEqual(response, "AIML")
def test_bot_property_lastname(self):
response = BotAIMLTests.test_client.bot.ask_question("test", "BOT PROPERTY LASTNAME")
self.assertIsNotNone(response)
self.assertEqual(response, "BoT")
def test_bot_property_email(self):
response = BotAIMLTests.test_client.bot.ask_question("test", "BOT PROPERTY EMAIL")
self.assertIsNotNone(response)
self.assertEqual(response, "info@keiffbot.org")
def test_bot_property_gender(self):
response = BotAIMLTests.test_client.bot.ask_question("test", "BOT PROPERTY GENDER")
self.assertIsNotNone(response)
self.assertEqual(response, "male")
def test_bot_property_botmaster(self):
response = BotAIMLTests.test_client.bot.ask_question("test", "BOT PROPERTY BOTMASTER")
self.assertIsNotNone(response)
self.assertEqual(response, "Keith Sterling")
def test_bot_property_organisation(self):
response = BotAIMLTests.test_client.bot.ask_question("test", "BOT PROPERTY ORGANISATION")
self.assertIsNotNone(response)
self.assertEqual(response, "keithsterling.com")
def test_bot_property_version(self):
response = BotAIMLTests.test_client.bot.ask_question("test", "BOT PROPERTY VERSION")
self.assertIsNotNone(response)
self.assertEqual(response, "0.0.1")
def test_bot_property_birthplace(self):
response = BotAIMLTests.test_client.bot.ask_question("test", "BOT PROPERTY BIRTHPLACE")
self.assertIsNotNone(response)
self.assertEqual(response, "Edinburgh, Scotland")
def test_bot_property_birthday(self):
response = BotAIMLTests.test_client.bot.ask_question("test", "BOT PROPERTY BIRTHDAY")
self.assertIsNotNone(response)
self.assertEqual(response, "September 9th")
def test_bot_property_sign(self):
response = BotAIMLTests.test_client.bot.ask_question("test", "BOT PROPERTY SIGN")
self.assertIsNotNone(response)
self.assertEqual(response, "Virgo")
def test_bot_property_birthdate(self):
response = BotAIMLTests.test_client.bot.ask_question("test", "BOT PROPERTY BIRTHDATE")
self.assertIsNotNone(response)
self.assertEqual(response, "September 9th, 2016")
def test_bot_property_job(self):
response = BotAIMLTests.test_client.bot.ask_question("test", "BOT PROPERTY JOB")
self.assertIsNotNone(response)
self.assertEqual(response, "mobile virtual assistant")
def test_bot_property_species(self):
response = BotAIMLTests.test_client.bot.ask_question("test", "BOT PROPERTY SPECIES")
self.assertIsNotNone(response)
self.assertEqual(response, "robot")
def test_bot_property_religion(self):
response = BotAIMLTests.test_client.bot.ask_question("test", "BOT PROPERTY RELIGION")
self.assertIsNotNone(response)
self.assertEqual(response, "No religion, I am an Atheist")
def test_bot_property_logo(self):
response = BotAIMLTests.test_client.bot.ask_question("test", "BOT PROPERTY LOGO")
self.assertIsNotNone(response)
self.assertEqual(response, '<img src="http://www.keithsterling.com/aiml/logo.png" width="128"/>')
def test_bot_property_default_get(self):
response = BotAIMLTests.test_client.bot.ask_question("test", "BOT PROPERTY DEFAULT GET")
self.assertIsNotNone(response)
self.assertEqual(response, "unknown")
def test_bot_property_default_map(self):
response = BotAIMLTests.test_client.bot.ask_question("test", "BOT PROPERTY DEFAULT MAP")
self.assertIsNotNone(response)
self.assertEqual(response, "unknown")
def test_bot_property_default_property(self):
response = BotAIMLTests.test_client.bot.ask_question("test", "BOT PROPERTY DEFAULT PROPERTY")
self.assertIsNotNone(response)
self.assertEqual(response, "unknown")
def test_bot_property_default_learn_filename(self):
response = BotAIMLTests.test_client.bot.ask_question("test", "BOT PROPERTY LEARN FILENAME")
self.assertIsNotNone(response)
self.assertEqual(response, "learn.aiml")
| 42.3875
| 105
| 0.699794
|
f633b45cc48532d8d2ab2ab19013ebc3e44c2073
| 570
|
py
|
Python
|
toolkit/core/message.py
|
RVitalicS/DonorKit
|
e10d376089b8d037f4820616d25aac89642d23da
|
[
"Apache-2.0"
] | 4
|
2022-02-14T19:40:01.000Z
|
2022-03-19T14:14:01.000Z
|
toolkit/core/message.py
|
RVitalicS/Maya-to-USD
|
e10d376089b8d037f4820616d25aac89642d23da
|
[
"Apache-2.0"
] | null | null | null |
toolkit/core/message.py
|
RVitalicS/Maya-to-USD
|
e10d376089b8d037f4820616d25aac89642d23da
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/env python
import os
def defaultDefinition (name, module, mode=None):
thisdir = os.path.dirname(module)
message = ""
if mode == "katana":
message += "[INFO actions.{}]: "
message += 'function is not defined in directory: "{}"'
elif mode == "maya":
message += "# [actions.{}] INFO: "
message += 'function is not defined in directory: "{}"'
else:
message += "INFO <actions.{}>: "
message += 'function is not defined in directory: "{}"'
print( message.format(name, thisdir) )
| 20.357143
| 63
| 0.559649
|
4e2798b3befd692d2fe01e880fb99c3d79aaf29e
| 685
|
py
|
Python
|
app/core/migrations/0002_tag.py
|
sorwarduet/recipe-app-api
|
ab13680369a693e5bf9d864328384f5781722cc2
|
[
"MIT"
] | null | null | null |
app/core/migrations/0002_tag.py
|
sorwarduet/recipe-app-api
|
ab13680369a693e5bf9d864328384f5781722cc2
|
[
"MIT"
] | null | null | null |
app/core/migrations/0002_tag.py
|
sorwarduet/recipe-app-api
|
ab13680369a693e5bf9d864328384f5781722cc2
|
[
"MIT"
] | null | null | null |
# Generated by Django 3.2.2 on 2021-06-07 08:20
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('core', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='Tag',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=255)),
('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
),
]
| 28.541667
| 118
| 0.617518
|
67d31f34e5fbca0bdd51878b4add251e96e1a6c0
| 1,870
|
py
|
Python
|
libraries/botbuilder-dialogs/setup.py
|
victor-kironde/botbuilder-python
|
e893d9b036d7cf33cf9c9afd1405450c354cdbcd
|
[
"MIT"
] | null | null | null |
libraries/botbuilder-dialogs/setup.py
|
victor-kironde/botbuilder-python
|
e893d9b036d7cf33cf9c9afd1405450c354cdbcd
|
[
"MIT"
] | null | null | null |
libraries/botbuilder-dialogs/setup.py
|
victor-kironde/botbuilder-python
|
e893d9b036d7cf33cf9c9afd1405450c354cdbcd
|
[
"MIT"
] | null | null | null |
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License.
import os
from setuptools import setup
REQUIRES = [
"regex<=2019.08.19",
"recognizers-text-date-time>=1.0.2a1",
"recognizers-text-number-with-unit>=1.0.2a1",
"recognizers-text-number>=1.0.2a1",
"recognizers-text>=1.0.2a1",
"recognizers-text-choice>=1.0.2a1",
"babel==2.7.0",
"botbuilder-schema>=4.10.0",
"botframework-connector>=4.10.0",
"botbuilder-core>=4.10.0",
]
TEST_REQUIRES = ["aiounittest==1.3.0"]
root = os.path.abspath(os.path.dirname(__file__))
with open(os.path.join(root, "botbuilder", "dialogs", "about.py")) as f:
package_info = {}
info = f.read()
exec(info, package_info)
with open(os.path.join(root, "README.rst"), encoding="utf-8") as f:
long_description = f.read()
setup(
name=package_info["__title__"],
version=package_info["__version__"],
url=package_info["__uri__"],
author=package_info["__author__"],
description=package_info["__description__"],
keywords=["BotBuilderDialogs", "bots", "ai", "botframework", "botbuilder"],
long_description=long_description,
long_description_content_type="text/x-rst",
license=package_info["__license__"],
packages=[
"botbuilder.dialogs",
"botbuilder.dialogs.prompts",
"botbuilder.dialogs.choices",
"botbuilder.dialogs.skills",
],
install_requires=REQUIRES + TEST_REQUIRES,
tests_require=TEST_REQUIRES,
include_package_data=True,
classifiers=[
"Programming Language :: Python :: 3.7",
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Development Status :: 5 - Production/Stable",
"Topic :: Scientific/Engineering :: Artificial Intelligence",
],
)
| 31.166667
| 79
| 0.662567
|
2e0c453c8b657c6582489d41dc079824ff100d0e
| 27,661
|
py
|
Python
|
tests/slack_sdk/models/test_elements.py
|
personalcomputer/python-slack-sdk
|
0ed767296963fa9255005db95fef358f8acb0a30
|
[
"MIT"
] | 492
|
2020-10-29T02:09:04.000Z
|
2022-03-31T19:28:01.000Z
|
tests/slack_sdk/models/test_elements.py
|
personalcomputer/python-slack-sdk
|
0ed767296963fa9255005db95fef358f8acb0a30
|
[
"MIT"
] | 387
|
2020-10-27T06:36:37.000Z
|
2022-03-29T08:46:52.000Z
|
tests/slack_sdk/models/test_elements.py
|
personalcomputer/python-slack-sdk
|
0ed767296963fa9255005db95fef358f8acb0a30
|
[
"MIT"
] | 165
|
2020-10-28T22:05:04.000Z
|
2022-03-28T08:03:00.000Z
|
import unittest
from slack_sdk.errors import SlackObjectFormationError
from slack_sdk.models.blocks import (
ButtonElement,
DatePickerElement,
TimePickerElement,
ExternalDataSelectElement,
ImageElement,
LinkButtonElement,
UserSelectElement,
StaticSelectElement,
CheckboxesElement,
StaticMultiSelectElement,
ExternalDataMultiSelectElement,
UserMultiSelectElement,
ConversationMultiSelectElement,
ChannelMultiSelectElement,
OverflowMenuElement,
PlainTextInputElement,
RadioButtonsElement,
ConversationSelectElement,
ChannelSelectElement,
ConfirmObject,
Option,
InputInteractiveElement,
InteractiveElement,
)
from . import STRING_3001_CHARS, STRING_301_CHARS
# -------------------------------------------------
# Interactive Elements
# -------------------------------------------------
class InteractiveElementTests(unittest.TestCase):
def test_with_interactive_element(self):
input = {
"type": "plain_text_input",
"action_id": "plain_input",
"placeholder": {"type": "plain_text", "text": "Enter some plain text"},
}
# Any properties should not be lost
self.assertDictEqual(input, InteractiveElement(**input).to_dict())
def test_with_input_interactive_element(self):
input = {
"type": "plain_text_input",
"action_id": "plain_input",
"placeholder": {"type": "plain_text", "text": "Enter some plain text"},
}
# Any properties should not be lost
self.assertDictEqual(input, InputInteractiveElement(**input).to_dict())
class InteractiveElementTests(unittest.TestCase):
def test_action_id(self):
with self.assertRaises(SlackObjectFormationError):
ButtonElement(
text="click me!", action_id=STRING_301_CHARS, value="clickable button"
).to_dict()
class ButtonElementTests(unittest.TestCase):
def test_document_1(self):
input = {
"type": "button",
"text": {"type": "plain_text", "text": "Click Me"},
"value": "click_me_123",
"action_id": "button",
}
self.assertDictEqual(input, ButtonElement(**input).to_dict())
def test_document_2(self):
input = {
"type": "button",
"text": {"type": "plain_text", "text": "Save"},
"style": "primary",
"value": "click_me_123",
"action_id": "button",
}
self.assertDictEqual(input, ButtonElement(**input).to_dict())
def test_document_3(self):
input = {
"type": "button",
"text": {"type": "plain_text", "text": "Link Button"},
"url": "https://api.slack.com/block-kit",
}
self.assertDictEqual(input, ButtonElement(**input).to_dict())
self.assertDictEqual(input, LinkButtonElement(**input).to_dict())
def test_json(self):
self.assertDictEqual(
{
"text": {"emoji": True, "text": "button text", "type": "plain_text"},
"action_id": "some_button",
"value": "button_123",
"type": "button",
},
ButtonElement(
text="button text", action_id="some_button", value="button_123"
).to_dict(),
)
confirm = ConfirmObject(title="really?", text="are you sure?")
self.assertDictEqual(
{
"text": {"emoji": True, "text": "button text", "type": "plain_text"},
"action_id": "some_button",
"value": "button_123",
"type": "button",
"style": "primary",
"confirm": confirm.to_dict(),
},
ButtonElement(
text="button text",
action_id="some_button",
value="button_123",
style="primary",
confirm=confirm,
).to_dict(),
)
def test_text_length(self):
with self.assertRaises(SlackObjectFormationError):
ButtonElement(
text=STRING_301_CHARS, action_id="button", value="click_me"
).to_dict()
def test_value_length(self):
with self.assertRaises(SlackObjectFormationError):
ButtonElement(
text="Button", action_id="button", value=STRING_3001_CHARS
).to_dict()
def test_invalid_style(self):
with self.assertRaises(SlackObjectFormationError):
ButtonElement(
text="Button", action_id="button", value="button", style="invalid"
).to_dict()
class LinkButtonElementTests(unittest.TestCase):
def test_json(self):
button = LinkButtonElement(
action_id="test", text="button text", url="http://google.com"
)
self.assertDictEqual(
{
"text": {"emoji": True, "text": "button text", "type": "plain_text"},
"url": "http://google.com",
"type": "button",
"action_id": button.action_id,
},
button.to_dict(),
)
def test_url_length(self):
with self.assertRaises(SlackObjectFormationError):
LinkButtonElement(text="Button", url=STRING_3001_CHARS).to_dict()
# -------------------------------------------------
# Checkboxes
# -------------------------------------------------
class CheckboxesElementTests(unittest.TestCase):
def test_document(self):
input = {
"type": "checkboxes",
"action_id": "this_is_an_action_id",
"initial_options": [
{"value": "A1", "text": {"type": "plain_text", "text": "Checkbox 1"}}
],
"options": [
{"value": "A1", "text": {"type": "plain_text", "text": "Checkbox 1"}},
{"value": "A2", "text": {"type": "plain_text", "text": "Checkbox 2"}},
],
}
self.assertDictEqual(input, CheckboxesElement(**input).to_dict())
# -------------------------------------------------
# DatePicker
# -------------------------------------------------
class DatePickerElementTests(unittest.TestCase):
def test_document(self):
input = {
"type": "datepicker",
"action_id": "datepicker123",
"initial_date": "1990-04-28",
"placeholder": {"type": "plain_text", "text": "Select a date"},
"confirm": {
"title": {"type": "plain_text", "text": "Are you sure?"},
"text": {
"type": "mrkdwn",
"text": "Wouldn't you prefer a good game of _chess_?",
},
"confirm": {"type": "plain_text", "text": "Do it"},
"deny": {"type": "plain_text", "text": "Stop, I've changed my mind!"},
},
}
self.assertDictEqual(input, DatePickerElement(**input).to_dict())
def test_json(self):
for month in range(1, 12):
for day in range(1, 31):
date = f"2020-{month:02}-{day:02}"
self.assertDictEqual(
{
"action_id": "datepicker-action",
"initial_date": date,
"placeholder": {
"emoji": True,
"text": "Select a date",
"type": "plain_text",
},
"type": "datepicker",
},
DatePickerElement(
action_id="datepicker-action",
placeholder="Select a date",
initial_date=date,
).to_dict(),
)
def test_issue_623(self):
elem = DatePickerElement(action_id="1", placeholder=None)
elem.to_dict() # no exception
elem = DatePickerElement(action_id="1")
elem.to_dict() # no exception
with self.assertRaises(SlackObjectFormationError):
elem = DatePickerElement(action_id="1", placeholder="12345" * 100)
elem.to_dict()
# -------------------------------------------------
# TimePicker
# -------------------------------------------------
class TimePickerElementTests(unittest.TestCase):
def test_document(self):
input = {
"type": "timepicker",
"action_id": "timepicker123",
"initial_time": "11:40",
"placeholder": {
"type": "plain_text",
"text": "Select a time",
},
}
self.assertDictEqual(input, TimePickerElement(**input).to_dict())
def test_json(self):
for hour in range(0, 23):
for minute in range(0, 59):
time = f"{hour:02}:{minute:02}"
self.assertDictEqual(
{
"action_id": "timepicker123",
"initial_time": time,
"placeholder": {
"emoji": True,
"type": "plain_text",
"text": "Select a time",
},
"type": "timepicker",
},
TimePickerElement(
action_id="timepicker123",
placeholder="Select a time",
initial_time=time,
).to_dict(),
)
with self.assertRaises(SlackObjectFormationError):
TimePickerElement(
action_id="timepicker123",
placeholder="Select a time",
initial_time="25:00",
).to_dict()
# -------------------------------------------------
# Image
# -------------------------------------------------
class ImageElementTests(unittest.TestCase):
def test_document(self):
input = {
"type": "image",
"image_url": "http://placekitten.com/700/500",
"alt_text": "Multiple cute kittens",
}
self.assertDictEqual(input, ImageElement(**input).to_dict())
def test_json(self):
self.assertDictEqual(
{
"image_url": "http://google.com",
"alt_text": "not really an image",
"type": "image",
},
ImageElement(
image_url="http://google.com", alt_text="not really an image"
).to_dict(),
)
def test_image_url_length(self):
with self.assertRaises(SlackObjectFormationError):
ImageElement(image_url=STRING_3001_CHARS, alt_text="text").to_dict()
def test_alt_text_length(self):
with self.assertRaises(SlackObjectFormationError):
ImageElement(
image_url="http://google.com", alt_text=STRING_3001_CHARS
).to_dict()
# -------------------------------------------------
# Static Select
# -------------------------------------------------
class StaticMultiSelectElementTests(unittest.TestCase):
maxDiff = None
def test_document(self):
input = {
"action_id": "text1234",
"type": "multi_static_select",
"placeholder": {"type": "plain_text", "text": "Select items"},
"options": [
{
"text": {"type": "plain_text", "text": "*this is plain_text text*"},
"value": "value-0",
},
{
"text": {"type": "plain_text", "text": "*this is plain_text text*"},
"value": "value-1",
},
{
"text": {"type": "plain_text", "text": "*this is plain_text text*"},
"value": "value-2",
},
],
"max_selected_items": 1,
}
self.assertDictEqual(input, StaticMultiSelectElement(**input).to_dict())
class StaticSelectElementTests(unittest.TestCase):
maxDiff = None
def test_document_options(self):
input = {
"action_id": "text1234",
"type": "static_select",
"placeholder": {"type": "plain_text", "text": "Select an item"},
"options": [
{
"text": {"type": "plain_text", "text": "*this is plain_text text*"},
"value": "value-0",
},
{
"text": {"type": "plain_text", "text": "*this is plain_text text*"},
"value": "value-1",
},
{
"text": {"type": "plain_text", "text": "*this is plain_text text*"},
"value": "value-2",
},
],
}
self.assertDictEqual(input, StaticSelectElement(**input).to_dict())
def test_document_option_groups(self):
input = {
"action_id": "text1234",
"type": "static_select",
"placeholder": {"type": "plain_text", "text": "Select an item"},
"option_groups": [
{
"label": {"type": "plain_text", "text": "Group 1"},
"options": [
{
"text": {
"type": "plain_text",
"text": "*this is plain_text text*",
},
"value": "value-0",
},
{
"text": {
"type": "plain_text",
"text": "*this is plain_text text*",
},
"value": "value-1",
},
{
"text": {
"type": "plain_text",
"text": "*this is plain_text text*",
},
"value": "value-2",
},
],
},
{
"label": {"type": "plain_text", "text": "Group 2"},
"options": [
{
"text": {
"type": "plain_text",
"text": "*this is plain_text text*",
},
"value": "value-3",
}
],
},
],
}
self.assertDictEqual(input, StaticSelectElement(**input).to_dict())
option_one = Option.from_single_value("one")
option_two = Option.from_single_value("two")
options = [option_one, option_two, Option.from_single_value("three")]
def test_json(self):
dict_options = []
for o in self.options:
dict_options.append(o.to_dict())
self.assertDictEqual(
{
"placeholder": {
"emoji": True,
"text": "selectedValue",
"type": "plain_text",
},
"action_id": "dropdown",
"options": dict_options,
"initial_option": self.option_two.to_dict(),
"type": "static_select",
},
StaticSelectElement(
placeholder="selectedValue",
action_id="dropdown",
options=self.options,
initial_option=self.option_two,
).to_dict(),
)
self.assertDictEqual(
{
"placeholder": {
"emoji": True,
"text": "selectedValue",
"type": "plain_text",
},
"action_id": "dropdown",
"options": dict_options,
"confirm": ConfirmObject(title="title", text="text").to_dict("block"),
"type": "static_select",
},
StaticSelectElement(
placeholder="selectedValue",
action_id="dropdown",
options=self.options,
confirm=ConfirmObject(title="title", text="text"),
).to_dict(),
)
def test_options_length(self):
with self.assertRaises(SlackObjectFormationError):
StaticSelectElement(
placeholder="select",
action_id="selector",
options=[self.option_one] * 101,
).to_dict()
# -------------------------------------------------
# External Data Source Select
# -------------------------------------------------
class ExternalDataMultiSelectElementTests(unittest.TestCase):
maxDiff = None
def test_document(self):
input = {
"action_id": "text1234",
"type": "multi_external_select",
"placeholder": {"type": "plain_text", "text": "Select items"},
"min_query_length": 3,
}
self.assertDictEqual(input, ExternalDataMultiSelectElement(**input).to_dict())
def test_document_initial_options(self):
input = {
"action_id": "text1234",
"type": "multi_external_select",
"placeholder": {"type": "plain_text", "text": "Select items"},
"initial_options": [
{
"text": {"type": "plain_text", "text": "The default channel"},
"value": "C1234567890",
}
],
"min_query_length": 0,
"max_selected_items": 1,
}
self.assertDictEqual(input, ExternalDataMultiSelectElement(**input).to_dict())
class ExternalDataSelectElementTests(unittest.TestCase):
maxDiff = None
def test_document_1(self):
input = {
"action_id": "text1234",
"type": "external_select",
"placeholder": {"type": "plain_text", "text": "Select an item"},
"min_query_length": 3,
}
self.assertDictEqual(input, ExternalDataSelectElement(**input).to_dict())
def test_document_2(self):
input = {
"action_id": "text1234",
"type": "external_select",
"placeholder": {"type": "plain_text", "text": "Select an item"},
"initial_option": {
"text": {"type": "plain_text", "text": "The default channel"},
"value": "C1234567890",
},
"confirm": {
"title": {"type": "plain_text", "text": "Are you sure?"},
"text": {
"type": "mrkdwn",
"text": "Wouldn't you prefer a good game of _chess_?",
},
"confirm": {"type": "plain_text", "text": "Do it"},
"deny": {"type": "plain_text", "text": "Stop, I've changed my mind!"},
},
"min_query_length": 3,
}
self.assertDictEqual(input, ExternalDataSelectElement(**input).to_dict())
def test_json(self):
self.assertDictEqual(
{
"placeholder": {
"emoji": True,
"text": "selectedValue",
"type": "plain_text",
},
"action_id": "dropdown",
"min_query_length": 5,
"type": "external_select",
},
ExternalDataSelectElement(
placeholder="selectedValue", action_id="dropdown", min_query_length=5
).to_dict(),
)
self.assertDictEqual(
{
"placeholder": {
"emoji": True,
"text": "selectedValue",
"type": "plain_text",
},
"action_id": "dropdown",
"confirm": ConfirmObject(title="title", text="text").to_dict("block"),
"type": "external_select",
},
ExternalDataSelectElement(
placeholder="selectedValue",
action_id="dropdown",
confirm=ConfirmObject(title="title", text="text"),
).to_dict(),
)
# -------------------------------------------------
# Users Select
# -------------------------------------------------
class UserSelectMultiElementTests(unittest.TestCase):
def test_document(self):
input = {
"action_id": "text1234",
"type": "multi_users_select",
"placeholder": {"type": "plain_text", "text": "Select users"},
"initial_users": ["U123", "U234"],
"max_selected_items": 1,
}
self.assertDictEqual(input, UserMultiSelectElement(**input).to_dict())
class UserSelectElementTests(unittest.TestCase):
def test_document(self):
input = {
"action_id": "text1234",
"type": "users_select",
"placeholder": {"type": "plain_text", "text": "Select an item"},
"initial_user": "U123",
}
self.assertDictEqual(input, UserSelectElement(**input).to_dict())
def test_json(self):
self.assertDictEqual(
{
"action_id": "a-123",
"type": "users_select",
"initial_user": "U123",
"placeholder": {
"type": "plain_text",
"text": "abc",
"emoji": True,
},
},
UserSelectElement(
placeholder="abc",
action_id="a-123",
initial_user="U123",
).to_dict(),
)
# -------------------------------------------------
# Conversations Select
# -------------------------------------------------
class ConversationSelectMultiElementTests(unittest.TestCase):
def test_document(self):
input = {
"action_id": "text1234",
"type": "multi_conversations_select",
"placeholder": {"type": "plain_text", "text": "Select conversations"},
"initial_conversations": ["C123", "C234"],
"max_selected_items": 2,
"default_to_current_conversation": True,
"filter": {"include": ["public", "mpim"], "exclude_bot_users": True},
}
self.assertDictEqual(input, ConversationMultiSelectElement(**input).to_dict())
class ConversationSelectElementTests(unittest.TestCase):
def test_document(self):
input = {
"action_id": "text1234",
"type": "conversations_select",
"placeholder": {"type": "plain_text", "text": "Select an item"},
"initial_conversation": "C123",
"response_url_enabled": True,
"default_to_current_conversation": True,
"filter": {"include": ["public", "mpim"], "exclude_bot_users": True},
}
self.assertDictEqual(input, ConversationSelectElement(**input).to_dict())
# -------------------------------------------------
# Channels Select
# -------------------------------------------------
class ChannelSelectMultiElementTests(unittest.TestCase):
def test_document(self):
input = {
"action_id": "text1234",
"type": "multi_channels_select",
"placeholder": {"type": "plain_text", "text": "Select channels"},
"initial_channels": ["C123", "C234"],
"max_selected_items": 2,
}
self.assertDictEqual(input, ChannelMultiSelectElement(**input).to_dict())
class ChannelSelectElementTests(unittest.TestCase):
def test_document(self):
input = {
"action_id": "text1234",
"type": "channels_select",
"placeholder": {"type": "plain_text", "text": "Select an item"},
"response_url_enabled": True,
"initial_channel": "C123",
}
self.assertDictEqual(input, ChannelSelectElement(**input).to_dict())
# -------------------------------------------------
# Overflow Menu Select
# -------------------------------------------------
class OverflowMenuElementTests(unittest.TestCase):
def test_document(self):
input = {
"type": "overflow",
"options": [
{
"text": {"type": "plain_text", "text": "*this is plain_text text*"},
"value": "value-0",
},
{
"text": {"type": "plain_text", "text": "*this is plain_text text*"},
"value": "value-1",
},
{
"text": {"type": "plain_text", "text": "*this is plain_text text*"},
"value": "value-2",
},
{
"text": {"type": "plain_text", "text": "*this is plain_text text*"},
"value": "value-3",
},
{
"text": {"type": "plain_text", "text": "*this is plain_text text*"},
# https://api.slack.com/reference/block-kit/composition-objects#option
"url": "https://www.example.com",
},
],
"action_id": "overflow",
}
self.assertDictEqual(input, OverflowMenuElement(**input).to_dict())
# -------------------------------------------------
# Input
# -------------------------------------------------
class PlainTextInputElementTests(unittest.TestCase):
def test_document_1(self):
input = {
"type": "plain_text_input",
"action_id": "plain_input",
"placeholder": {"type": "plain_text", "text": "Enter some plain text"},
}
self.assertDictEqual(input, PlainTextInputElement(**input).to_dict())
def test_document_2(self):
input = {
"type": "plain_text_input",
"action_id": "plain_input",
"placeholder": {"type": "plain_text", "text": "Enter some plain text"},
"initial_value": "TODO",
"multiline": True,
"min_length": 1,
"max_length": 10,
}
self.assertDictEqual(input, PlainTextInputElement(**input).to_dict())
def test_document_3(self):
input = {
"type": "plain_text_input",
"multiline": True,
"dispatch_action_config": {"trigger_actions_on": ["on_character_entered"]},
}
self.assertDictEqual(input, PlainTextInputElement(**input).to_dict())
# -------------------------------------------------
# Radio Buttons
# -------------------------------------------------
class RadioButtonsElementTest(unittest.TestCase):
def test_document(self):
input = {
"type": "radio_buttons",
"action_id": "this_is_an_action_id",
"initial_option": {
"value": "A1",
"text": {"type": "plain_text", "text": "Radio 1"},
},
"options": [
{"value": "A1", "text": {"type": "plain_text", "text": "Radio 1"}},
{"value": "A2", "text": {"type": "plain_text", "text": "Radio 2"}},
],
"initial_option": {
"value": "A2",
"text": {"type": "plain_text", "text": "Radio 2"},
},
}
self.assertDictEqual(input, RadioButtonsElement(**input).to_dict())
| 34.662907
| 90
| 0.464698
|
8b12375c95c20d04dc19e77526ccade9788b7f1f
| 2,643
|
py
|
Python
|
conanfile.py
|
bincrafters/conan-libui
|
df2afe0574b081754d77b701e867cd30a0b77085
|
[
"MIT"
] | 1
|
2019-01-07T21:23:09.000Z
|
2019-01-07T21:23:09.000Z
|
conanfile.py
|
bincrafters/conan-libui
|
df2afe0574b081754d77b701e867cd30a0b77085
|
[
"MIT"
] | null | null | null |
conanfile.py
|
bincrafters/conan-libui
|
df2afe0574b081754d77b701e867cd30a0b77085
|
[
"MIT"
] | 2
|
2019-05-15T05:15:06.000Z
|
2020-09-11T22:18:27.000Z
|
from conans import ConanFile, CMake, tools
from conans.tools import os_info
import os
class libuiConan(ConanFile):
name = "libui"
version = "0.4.1"
description = "Simple and portable GUI library in C that uses the native GUI technologies of each platform it supports."
topics = ("conan", "libui", "ui", "gui")
url = "https://github.com/bincrafters/conan-libui"
homepage = "https://github.com/andlabs/libui"
license = "MIT"
exports_sources = ["CMakeLists.txt"]
generators = "cmake", "pkg_config"
settings = "os", "arch", "compiler", "build_type"
options = {"shared": [True, False], "fPIC": [True, False]}
default_options = {"shared": False, "fPIC": True}
_source_subfolder = "source_subfolder"
_build_subfolder = "build_subfolder"
def config_options(self):
if self.settings.os == 'Windows':
del self.options.fPIC
def source(self):
url_version = "alpha4.1"
tools.get(
"{0}/archive/{1}.tar.gz".format(self.homepage, url_version),
sha256="f51a9e20e9f9a4c0bce1571ee37f203f42de33e3ac7359a6aac87a54798e8716")
extracted_dir = self.name + "-" + url_version
os.rename(extracted_dir, self._source_subfolder)
def requirements(self):
self.requires("gtk/3.24.24")
def _configure_cmake(self):
cmake = CMake(self)
cmake.configure(build_folder=self._build_subfolder)
return cmake
def build(self):
cmake = self._configure_cmake()
cmake.build()
def package(self):
self.copy(pattern="LICENSE", dst="licenses", src=self._source_subfolder)
self.copy(pattern="*.h", dst="include", src=self._source_subfolder)
self.copy(pattern="*.dll", dst="bin", keep_path=False)
self.copy(pattern="*.lib", dst="lib", keep_path=False)
self.copy(pattern="*.a", dst="lib", keep_path=False)
self.copy(pattern="*.so*", dst="lib", keep_path=False)
self.copy(pattern="*.dylib", dst="lib", keep_path=False)
def package_info(self):
self.cpp_info.libs = tools.collect_libs(self)
if self.settings.os == "Windows":
self.cpp_info.libs.extend(
[
"user32",
"kernel32",
"gdi32",
"comctl32",
"msimg32",
"comdlg32",
"d2d1",
"dwrite",
"ole32",
"oleaut32",
"oleacc",
"uuid",
"windowscodecs",
]
)
| 35.24
| 124
| 0.567159
|
f416d12ad255057f1e8f9e89b79bc6908c19e89b
| 1,027
|
py
|
Python
|
utest/test_format_cookie.py
|
leeuwe/robotframework-browser
|
34877baafe39048e83d63a194e31adad11d520cd
|
[
"Apache-2.0"
] | 219
|
2020-06-19T07:59:45.000Z
|
2022-03-26T22:29:04.000Z
|
utest/test_format_cookie.py
|
leeuwe/robotframework-browser
|
34877baafe39048e83d63a194e31adad11d520cd
|
[
"Apache-2.0"
] | 777
|
2020-06-18T10:29:32.000Z
|
2022-03-30T15:05:48.000Z
|
utest/test_format_cookie.py
|
leeuwe/robotframework-browser
|
34877baafe39048e83d63a194e31adad11d520cd
|
[
"Apache-2.0"
] | 61
|
2020-06-18T11:06:29.000Z
|
2022-03-25T23:48:57.000Z
|
import sys
from datetime import datetime
import pytest
from Browser.keywords import Cookie
@pytest.fixture
def cookie():
return Cookie(None)
def test_one_cookie_as_string(cookie: Cookie):
as_string = cookie._format_cookies_as_string(
[{"name": "tidii", "value": 1111, "expires": -1}]
)
assert as_string == "tidii=1111"
def test_many_cookies_as_string(cookie: Cookie):
cookies = [
{"name": "tidii", "value": 1111, "httpOnly": False},
{"name": "foo", "value": "bar", "httpOnly": True},
]
as_string = cookie._format_cookies_as_string(cookies)
assert as_string == "tidii=1111; foo=bar"
def test_as_dot_dict(cookie: Cookie):
dot_dict = cookie._format_cookies_as_dot_dict(
[{"name": "tidii", "value": 1111, "expires": -1}]
)
assert dot_dict[0].name == "tidii"
assert dot_dict[0].value == 1111
if sys.platform == "win32":
assert dot_dict[0].expires is None
else:
assert dot_dict[0].expires == datetime.fromtimestamp(-1)
| 25.675
| 64
| 0.652386
|
879689ac39ed4775480cf21cc1f4e660db86c2ff
| 3,646
|
py
|
Python
|
dataset_scripts/utils.py
|
contec-korong/r3det-on-mmdetection
|
4a78a0b3330d0fcb9c017a5c97d06a92cf85ebac
|
[
"Apache-2.0"
] | null | null | null |
dataset_scripts/utils.py
|
contec-korong/r3det-on-mmdetection
|
4a78a0b3330d0fcb9c017a5c97d06a92cf85ebac
|
[
"Apache-2.0"
] | null | null | null |
dataset_scripts/utils.py
|
contec-korong/r3det-on-mmdetection
|
4a78a0b3330d0fcb9c017a5c97d06a92cf85ebac
|
[
"Apache-2.0"
] | null | null | null |
import os
import math
import json
coco_class = {'crane':'0', 'container': '1', 'small_ship':'2', 'middle_ship':'3', 'large_ship':'4'}
ship_size = {'large_ship':100, 'middle_ship':50}
def ship_Division(geojsonpath: str):
cnt_large = cnt_middle = cnt_small = 0
for file in os.listdir(geojsonpath):
json_file_path = os.path.join(geojsonpath, file)
with open(json_file_path, 'rt', encoding='UTF8') as json_file:
json_data = json.load(json_file)
for idx, feature in enumerate(json_data['features']):
if feature['properties']['type_name'] == 'ship':
dist = calculate_distance(feature['properties']['object_imcoords'].split(','))
if dist > ship_size['large_ship']:
type_name = 'large_ship'
type_id = coco_class['large_ship']
cnt_large += 1
elif dist >= ship_size['middle_ship']:
type_name = 'middle_ship'
type_id = coco_class['middle_ship']
cnt_middle += 1
else:
type_name = 'small_ship'
type_id = coco_class['small_ship']
cnt_small += 1
feature['properties']['type_name'] = type_name
feature['properties']['type_id'] = type_id
json_data['features'][idx] = feature
json_str = json.dumps(json_data, indent=4)
with open(json_file_path, 'w') as f:
f.write(json_str)
print('large ship :', cnt_large)
print('middle ship :', cnt_middle)
print('small ship :', cnt_small)
def calculate_distance(obj_coord):
x1 = float(obj_coord[0])
y1 = float(obj_coord[1])
x4 = float(obj_coord[6])
y4 = float(obj_coord[7])
return math.hypot(x1 - x4, y1 - y4) * 0.55
def GetFileFromThisRootDir(dir,ext = None):
allfiles = []
needExtFilter = (ext != None)
for root,dirs,files in os.walk(dir):
for filespath in files:
filepath = os.path.join(root, filespath)
extension = os.path.splitext(filepath)[1][1:]
if needExtFilter and extension in ext:
allfiles.append(filepath)
elif not needExtFilter:
allfiles.append(filepath)
return allfiles
def get_basename(fullname):
return os.path.basename(os.path.splitext(fullname)[0])
def get_extent(fullname):
_, ext = os.path.splitext(fullname)
return ext
def single_copy(src_dst_tuple):
shutil.copyfile(*src_dst_tuple)
def filecopy(srcpath, dstpath, num_process=32):
pool = Pool(num_process)
filelist = util.GetFileFromThisRootDir(srcpath)
name_pairs = []
for file in filelist:
basename = os.path.basename(file.strip())
dstname = os.path.join(dstpath, basename)
name_tuple = (file, dstname)
name_pairs.append(name_tuple)
pool.map(single_copy, name_pairs)
def singel_move(src_dst_tuple):
shutil.move(*src_dst_tuple)
def filemove(srcpath, dstpath, num_process=32):
pool = Pool(num_process)
filelist = util.GetFileFromThisRootDir(srcpath)
name_pairs = []
for file in filelist:
basename = os.path.basename(file.strip())
dstname = os.path.join(dstpath, basename)
name_tuple = (file, dstname)
name_pairs.append(name_tuple)
pool.map(filemove, name_pairs)
def getnamelist(srcpath, dstfile):
filelist = util.GetFileFromThisRootDir(srcpath)
with open(dstfile, 'w') as f_out:
for file in filelist:
basename = get_basename(file)
f_out.write(basename + '\n')
| 33.145455
| 99
| 0.611081
|
3b92a3cbcf54199c396411c75c490a193c8c637e
| 10,252
|
py
|
Python
|
retired modules/vnx_reporter.py
|
ghzwireless/control
|
81525249962f840e2d286cdd87dee0f15f20fadb
|
[
"MIT"
] | 293
|
2015-01-01T12:33:12.000Z
|
2022-03-29T23:50:48.000Z
|
retired modules/vnx_reporter.py
|
riddhishikha15/SysAdminBoard
|
3e69c1fa194881c01e2424b0b313c8bb636a91b0
|
[
"MIT"
] | 7
|
2015-08-05T12:55:23.000Z
|
2019-08-28T20:50:01.000Z
|
retired modules/vnx_reporter.py
|
riddhishikha15/SysAdminBoard
|
3e69c1fa194881c01e2424b0b313c8bb636a91b0
|
[
"MIT"
] | 81
|
2015-01-21T03:12:26.000Z
|
2021-10-05T12:26:00.000Z
|
#!/usr/bin/env python
"""emc_vnx_reporter - Uses Mechanize to grab VNX perf data from a VNX Reporter web site.
The VNX Reporter web site has a CSV export on the main page that includes raw data for 6 hours with samples every
5 minutes. We login to the
# Requires Mechanize
# pip install mechanize
"""
from __future__ import division # So division of integers will result in float
from mechanize import Browser
import mechanize
import csv
import StringIO
import time
import json
import operator
__author__ = 'scott@flakshack.com (Scott Vintinner)'
from credentials import VNX_REPORTER_USERNAME # Login info now stored in credentials.py
from credentials import VNX_REPORTER_PASSWORD # Login info now stored in credentials.py
#=================================SETTINGS======================================
VNX_REPORTER_WEBSERVER = "http://vnx-reporter:58080/VNX-MR"
VNX_BLOCK_IO_CSV = "http://vnx-reporter:58080/VNX-MR/report.csv?report&select=0-1-4a44114c-4786a14f-b36fc7de-4c614edb-4a3e733b-85cf6d&display=0&mode=nrx&statistics=none&lower=0.0&upper=&type=3&period=0&durationType=l&duration=2h&itz=America%2FNew_York"
VNX_BLOCK_READ_STR = '"Timestamp","Read IOPS (IO/s)"'
VNX_BLOCK_WRITE_STR = '"Timestamp","Write IOPS (IO/s)"'
VNX_FILE_IO_CSV = "http://vnx-reporter:58080/VNX-MR/report.csv?report&select=0-1-4a44114c-8f267c5c-70bffb61-2809341b-b53a5bb-b0b3e008-80928810&display=0&mode=stk&statistics=none&lower=0.0&upper=&type=3&period=0&durationType=l&duration=2h&itz=America%2FNew_York"
VNX_FILE_READ_STR = '"Timestamp","ReadRequests, server_2 (Nb/s)"'
VNX_FILE_WRITE_STR = '"Timestamp","WriteRequests, server_2 (Nb/s)"'
VNX_TOP_LUN_CSV = 'http://vnx-reporter:58080/VNX-MR/report.csv?report&select=0-1-c2b16891-3f6d1f89-49fa3b2c-a3b0f8f9-a541c5e5-4a3e733b-d777d0dd-11adc2b1-98a6d95&display=0&mode=srt&statistics=none&lower=0.0&upper=&type=3&period=0&durationType=l&duration=5m&itz=America%2FNew_York&d-2692206-s=8&d-2692206-o=1&d-2692206-p=1'
VNX_TOP_LUN_START = '"Array","LUN","Availability (%)","Storage Group","Storage Pool","Storage Pool type","RAID Level","Capacity (GB)","IOPS","Bandwidth (MB/s)","Utilization (%)","Service Time (ms)","Response Time (ms)","Queue Length"'
TOP_LUNS_TO_RETURN = 6
SAMPLE_INTERVAL = 120
GRAPH_TITLE = "EMC VNX Operations per second"
#===============================================================================
class MonitorJSON:
"""This is a simple class passed to Monitor threads so we can access the current JSON data in that thread"""
def __init__(self):
self.json = ""
def output_message(message):
"""This function will output an error message formatted in JSON to display on the dashboard"""
output = json.dumps({"error": message}, indent=4)
return output
def monkeypatch_mechanize():
"""Work-around for a mechanize 0.2.5 bug. See: https://github.com/jjlee/mechanize/pull/58"""
if mechanize.__version__ < (0, 2, 6):
from mechanize._form import SubmitControl, ScalarControl
def __init__(self, type, name, attrs, index=None):
ScalarControl.__init__(self, type, name, attrs, index)
# IE5 defaults SUBMIT value to "Submit Query"; Firebird 0.6 leaves it
# blank, Konqueror 3.1 defaults to "Submit". HTML spec. doesn't seem
# to define this.
if self.value is None:
if self.disabled:
self.disabled = False
self.value = ""
self.disabled = True
else:
self.value = ""
self.readonly = True
SubmitControl.__init__ = __init__
# Mechanize cannot read forms when the submit button is disabled (as happens on this page). To work around this
# issue, we apply the patch listed above.
monkeypatch_mechanize()
def generate_json(vnx_monitor):
"""This function will connect to the VNX web server, parse data and store the output in vnx_monitor.json"""
try:
# Create Mechanize browser
browser = Browser()
# Open the main VNX-Reporter login page and login
browser.open(VNX_REPORTER_WEBSERVER)
# Note that the login form doesn't have a name, but we locate it by ID
for form in browser.forms():
if form.attrs['id'] == 'login-form':
browser.form = form
break
browser["j_username"] = VNX_REPORTER_USERNAME
browser["j_password"] = VNX_REPORTER_PASSWORD
browser.submit()
# Now that we are logged in, we can get the page we really want:
#
#==========================BLOCK DATA==============================
#
reply = browser.open(VNX_BLOCK_IO_CSV)
perf_data = reply.read()
# The CSV has sections are delimited by a blank line and the following header
# Find the location of each header
read_start_location = perf_data.find(VNX_BLOCK_READ_STR)
write_start_location = perf_data.find(VNX_BLOCK_WRITE_STR)
# Split out the data
if read_start_location > write_start_location: # write section is first
write_data = perf_data[write_start_location + len(VNX_BLOCK_WRITE_STR) + 1:read_start_location - 1]
read_data = perf_data[read_start_location + len(VNX_BLOCK_READ_STR) + 1:]
else: # read section is first
read_data = perf_data[read_start_location + len(VNX_BLOCK_READ_STR) + 1:write_start_location - 1]
write_data = perf_data[write_start_location + len(VNX_BLOCK_WRITE_STR) + 1:]
# Use the CSV module to parse the data.
read_csv = csv.reader(StringIO.StringIO(read_data))
datapoints = []
for row in read_csv:
if len(row) > 1: # skip blank rows
datapoints.append(int(float(row[1])))
else:
break
output = {"block_read": datapoints}
# Use the CSV module to parse the data.
write_csv = csv.reader(StringIO.StringIO(write_data))
datapoints = []
for row in write_csv:
if len(row) > 1: # skip blank rows
datapoints.append(int(float(row[1])))
else:
break
output["block_write"] = datapoints
#==========================FILE DATA==============================
#
reply = browser.open(VNX_FILE_IO_CSV)
perf_data = reply.read()
# The CSV has sections are delimited by a blank line and the following header
# Find the location of each header
read_start_location = perf_data.find(VNX_FILE_READ_STR)
write_start_location = perf_data.find(VNX_FILE_WRITE_STR)
# Split out the data
if read_start_location > write_start_location: # write section is first
write_data = perf_data[write_start_location + len(VNX_FILE_WRITE_STR) + 1:read_start_location - 1]
read_data = perf_data[read_start_location + len(VNX_FILE_READ_STR) + 1:]
else: # read section is first
read_data = perf_data[read_start_location + len(VNX_FILE_READ_STR) + 1:write_start_location - 1]
write_data = perf_data[write_start_location + len(VNX_FILE_WRITE_STR) + 1:]
# Use the CSV module to parse the data.
read_csv = csv.reader(StringIO.StringIO(read_data))
datapoints = []
for row in read_csv:
if len(row) > 1: # skip blank rows
datapoints.append(int(float(row[1])))
else:
break
output["file_read"] = datapoints
# Use the CSV module to parse the data.
write_csv = csv.reader(StringIO.StringIO(write_data))
datapoints = []
for row in write_csv:
if len(row) > 1: # skip blank rows
datapoints.append(int(float(row[1])))
else:
break
output["file_write"] = datapoints
# ========================= TOP LUNS =============================
reply = browser.open(VNX_TOP_LUN_CSV)
raw_data = reply.read()
start_location = raw_data.find(VNX_TOP_LUN_START)
lun_data = raw_data[start_location + len(VNX_TOP_LUN_START) + 1:] # Strip out the header and other garbage and just get the CSV
lun_csv = csv.reader(StringIO.StringIO(lun_data))
datapoints = []
for row in lun_csv:
if len(row) > 12: # skip blank rows
# If a LUN has recently been deleted it may show up in this list with blank values.
try:
datapoints.append({
"name": row[1],
"iops": round(float(row[8]), 2),
"bandwidth": round(float(row[9]), 2),
"utilization": round(float(row[10]), 2),
"response_time": round(float(row[12]), 2)
})
except Exception as error:
datapoints.append({
"name": row[1] + "Error",
"iops": 0,
"bandwidth": 0,
"utilization": 0,
"response_time": 0
})
# Sort the datapoints by iops (instead of utilization)
datapoints.sort(key=operator.itemgetter('iops'), reverse=True)
# Remove all but the TOP_LUNS_TO_RETURN
datapoints = datapoints[:TOP_LUNS_TO_RETURN]
output["top_luns"] = datapoints
# ====================================
# Generate JSON output and assign to vnx_monitor object (for return back to caller module)
vnx_monitor.json = json.dumps(output)
except Exception as error:
output_message(error.message)
if __debug__:
print vnx_monitor.json
# If you run this module by itself, it will instantiate the MonitorJSON class and start an infinite loop printing data.
if __name__ == '__main__':
monitor = MonitorJSON()
while True:
generate_json(monitor)
# Wait X seconds for the next iteration
time.sleep(SAMPLE_INTERVAL)
| 42.016393
| 333
| 0.609345
|
18a17805724b1c74a8cd70ae5373003ce317050f
| 1,988
|
py
|
Python
|
src/mwparserfromhell/nodes/__init__.py
|
odidev/mwparserfromhell
|
dcf7ba4e79e8c606f6380846b4ef9b5116b3c942
|
[
"MIT"
] | null | null | null |
src/mwparserfromhell/nodes/__init__.py
|
odidev/mwparserfromhell
|
dcf7ba4e79e8c606f6380846b4ef9b5116b3c942
|
[
"MIT"
] | null | null | null |
src/mwparserfromhell/nodes/__init__.py
|
odidev/mwparserfromhell
|
dcf7ba4e79e8c606f6380846b4ef9b5116b3c942
|
[
"MIT"
] | null | null | null |
# Copyright (C) 2012-2020 Ben Kurtovic <ben.kurtovic@gmail.com>
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
"""
This package contains :class:`.Wikicode` "nodes", which represent a single unit
of wikitext, such as a Template, an HTML tag, a Heading, or plain text. The
node "tree" is far from flat, as most types can contain additional
:class:`.Wikicode` types within them - and with that, more nodes. For example,
the name of a :class:`.Template` is a :class:`.Wikicode` object that can
contain text or more templates.
"""
from . import extras
from ._base import Node
from .text import Text
from .argument import Argument
from .comment import Comment
from .external_link import ExternalLink
from .heading import Heading
from .html_entity import HTMLEntity
from .tag import Tag
from .template import Template
from .wikilink import Wikilink
__all__ = ["Argument", "Comment", "ExternalLink", "HTMLEntity", "Heading",
"Node", "Tag", "Template", "Text", "Wikilink"]
| 45.181818
| 79
| 0.764085
|
c79f8c21d782a63dd36ffa6170deee1fd2af61fd
| 7,605
|
py
|
Python
|
EBRN.py
|
alilajevardi/Embedded-Block-Residual-Network
|
f7da00e9b8a7ab491eaa90c627cf8bf2c0aaa648
|
[
"MIT"
] | 12
|
2020-06-27T07:23:07.000Z
|
2021-11-22T07:37:59.000Z
|
EBRN.py
|
alilajevardi/Embedded-Block-Residual-Network
|
f7da00e9b8a7ab491eaa90c627cf8bf2c0aaa648
|
[
"MIT"
] | 3
|
2020-06-27T05:30:19.000Z
|
2021-05-07T07:12:16.000Z
|
EBRN.py
|
alilajevardi/Embedded-Block-Residual-Network
|
f7da00e9b8a7ab491eaa90c627cf8bf2c0aaa648
|
[
"MIT"
] | 3
|
2020-09-30T17:31:28.000Z
|
2021-04-27T02:56:29.000Z
|
from tensorflow.keras.layers import PReLU, Subtract, Add, Concatenate
from tensorflow.keras.layers import Input, Conv2D, Conv2DTranspose, Lambda
from tensorflow.keras.models import Model
from tensorflow.keras.optimizers import Adam, schedules
from tensorflow.keras.losses import mean_squared_error, mean_absolute_error
from tensorflow.keras.initializers import VarianceScaling
class EBRNClass:
def __init__(self, leaning_rate_dict, fine_tuning=False):
"""
Construct the model class.
:param leaning_rate_dict: learning rates and counterpart steps to change during training process
:param fine_tuning: Boolean. train a model for first time (False) or fine tuning (True)
of an already trained model
"""
self.learning_rate_change = leaning_rate_dict
self.lr_schedule = schedules.PiecewiseConstantDecay(boundaries=self.learning_rate_change['epoch'],
values=self.learning_rate_change['lr'])
self.fine_tuning=fine_tuning
def FeatureExt(self, input_data):
"""
The first part of EBR Network, extract features from input image
:param input_data: input image batch
:return: tf object
"""
x = input_data
f = 256
for i in range(3):
x = Conv2D(filters=f, kernel_size=3, padding='same', activation=PReLU(),
kernel_initializer=VarianceScaling(scale=2.0, mode="fan_in",
distribution="untruncated_normal"),
name='FE_C{}'.format(str(i + 1)))(x)
f = 64
return x
def BRModule(self, input_data, BRM_x, scale=4):
"""
A single Block Residual Module (BRM)
:param input_data: tf object
:param BRM_x: index of BRM, x sub-index in the paper
:param scale: magnifying scale factor
:return: two tf objects for upper (super resolved) and lower (back-projected) flows
"""
x1 = Conv2DTranspose(filters=64, kernel_size=scale, strides=scale, padding='valid', activation=PReLU(),
kernel_initializer=VarianceScaling(scale=2.0, mode="fan_in",
distribution="untruncated_normal"),
name='BRM{}_CT'.format(str(BRM_x)))(input_data)
xup = x1
for i in range(3):
xup = Conv2D(filters=64, kernel_size=3, padding='same', activation=PReLU(),
kernel_initializer=VarianceScaling(scale=2.0, mode="fan_in",
distribution="untruncated_normal"),
name='BRM{}_C{}_u'.format(str(BRM_x), str(i + 1)))(xup)
x2 = Conv2D(filters=64, kernel_size=scale, strides=scale, padding='valid', activation=PReLU(),
kernel_initializer=VarianceScaling(scale=2.0, mode="fan_in",
distribution="untruncated_normal"),
name='BRM{}_C{}_b'.format(str(BRM_x), str(1)))(x1)
x2 = Subtract(name='BRM{}_S_b'.format(str(BRM_x)))([input_data, x2])
xdn = x2
for i in range(3):
x2 = Conv2D(filters=64, kernel_size=3, padding='same', activation=PReLU(),
kernel_initializer=VarianceScaling(scale=2.0, mode="fan_in",
distribution="untruncated_normal"),
name='BRM{}_C{}_b'.format(str(BRM_x), str(i + 2)))(x2)
xdn = Add(name='BRM{}_A_b'.format(str(BRM_x)))([xdn, x2])
return xup, xdn # xup: SR flow in upper line,,, xdn: Residual flow in bottom line
def EmbeddedBR(self, input_data, n_blocks, scale):
"""
Combination of n BRMs
:param input_data: tf object
:param n_blocks: number of BRM in network
:param scale: magnifying scale factor
:return: tf object
"""
x1 = []
x2 = []
# for the first BRM data comes from feature extraction layer
xdn = input_data
# execute all block residual modules (BRMs) by passing xdn from one to next BRM
for i in range(0, n_blocks):
xup, xdn = self.BRModule(xdn, BRM_x=i + 1, scale=scale)
x1.append(xup)
x2.append(xdn)
# Add output of one BRM with output of its upper BRM then apply Conv2D
for i in range(n_blocks - 1, 0, -1):
x = Add(name='BRM{}_A_BRM{}'.format(str(i + 1), str(i)))([x1[i], x1[i - 1]])
x1[i - 1] = Conv2D(filters=64, kernel_size=3, padding='same', activation=PReLU(),
kernel_initializer=VarianceScaling(scale=2.0, mode="fan_in",
distribution="untruncated_normal"),
name='BRM{}_C'.format(str(i)))(x)
# Concatenate all outputs of BRMs
xup = x1[n_blocks - 1]
for i in range(n_blocks - 2, -1, -1):
xup = Concatenate(axis=-1, name='BRM{}_BRM{}_Co'.format(str(i + 2), str(i + 1)))([x1[i], xup])
return xup
def Reconstruct(self, input_data):
"""
The last part of network to reconstruct the final image
:param input_data: tf object
:return: batch of super resolution images
"""
# reconstruction layer
x = Conv2D(filters=3, kernel_size=3, padding='same', activation=PReLU(),
kernel_initializer=VarianceScaling(scale=2.0, mode="fan_in",
distribution="untruncated_normal"),
name='Rec_C')(input_data)
return x
@staticmethod
def normalize_01(img):
"""
Normalise pixel values to the range of 0 to 1 (from 0 to 255)
:param img: image array
:return: normalised image array
"""
return img / 255.0
@staticmethod
def denormalize_0255(img):
"""
Denormalised pixel values to the range of 0 to 255
:param img:
:return:
"""
return img * 255
def create_model(self, number_of_blocks, scale_factor, LR_img_size, channel=3):
"""
Compile the complete network as a keras model
:param number_of_blocks: number of BRM units
:param scale_factor: magnifying scale factor
:param LR_img_size: size of input low res image normally 64
:param channel: number of image channels, PNG image in RGB mode has 3 channels
:return: keras model
"""
input_LR = Input(shape=(LR_img_size, LR_img_size, channel), name='input_LR')
x = Lambda(self.normalize_01)(input_LR)
x = self.FeatureExt(x)
x = self.EmbeddedBR(x, number_of_blocks, scale=scale_factor)
x = self.Reconstruct(x)
output_HR = Lambda(self.denormalize_0255, name='output_img')(x)
model = Model(inputs=input_LR, outputs=output_HR, name='EBR_Net')
if not self.fine_tuning:
model.compile(optimizer=Adam(learning_rate=self.lr_schedule, epsilon=1e-08),
loss=mean_absolute_error,
metrics={'output_img': ['mse', 'accuracy']})
else:
model.compile(optimizer=Adam(learning_rate=self.lr_schedule, epsilon=1e-08),
loss=mean_squared_error,
metrics={'output_img': ['mae', 'accuracy']})
return model
| 44.215116
| 111
| 0.571335
|
295a03d9c9e61479b62ddc0871e8226422e580cb
| 12,804
|
py
|
Python
|
util.py
|
tailintalent/causal
|
0648b432a396e5bfe3a7c340963731c826a216c6
|
[
"MIT"
] | 17
|
2020-01-10T18:20:52.000Z
|
2022-01-20T03:00:48.000Z
|
util.py
|
tailintalent/causal
|
0648b432a396e5bfe3a7c340963731c826a216c6
|
[
"MIT"
] | 2
|
2021-08-09T14:29:49.000Z
|
2022-01-24T05:20:03.000Z
|
util.py
|
tailintalent/causal
|
0648b432a396e5bfe3a7c340963731c826a216c6
|
[
"MIT"
] | 3
|
2020-01-21T07:33:27.000Z
|
2021-05-23T15:37:54.000Z
|
import numpy as np
import matplotlib.pylab as plt
import torch
from copy import deepcopy
def plot_matrices(
matrix_list,
shape = None,
images_per_row = 10,
scale_limit = None,
figsize = (20, 8),
x_axis_list = None,
filename = None,
title = None,
subtitles = [],
highlight_bad_values = True,
plt = None,
pdf = None,
):
"""Plot the images for each matrix in the matrix_list."""
import matplotlib
from matplotlib import pyplot as plt
fig = plt.figure(figsize = figsize)
fig.set_canvas(plt.gcf().canvas)
if title is not None:
fig.suptitle(title, fontsize = 18, horizontalalignment = 'left', x=0.1)
num_matrixs = len(matrix_list)
rows = np.ceil(num_matrixs / float(images_per_row))
try:
matrix_list_reshaped = np.reshape(np.array(matrix_list), (-1, shape[0],shape[1])) \
if shape is not None else np.array(matrix_list)
except:
matrix_list_reshaped = matrix_list
if scale_limit == "auto":
scale_min = np.Inf
scale_max = -np.Inf
for matrix in matrix_list:
scale_min = min(scale_min, np.min(matrix))
scale_max = max(scale_max, np.max(matrix))
scale_limit = (scale_min, scale_max)
for i in range(len(matrix_list)):
ax = fig.add_subplot(rows, images_per_row, i + 1)
image = matrix_list_reshaped[i].astype(float)
if len(image.shape) == 1:
image = np.expand_dims(image, 1)
if highlight_bad_values:
cmap = matplotlib.cm.binary
cmap.set_bad('red', alpha = 0.2)
mask_key = []
mask_key.append(np.isnan(image))
mask_key.append(np.isinf(image))
mask_key = np.any(np.array(mask_key), axis = 0)
image = np.ma.array(image, mask = mask_key)
else:
cmap = matplotlib.cm.binary
if scale_limit is None:
ax.matshow(image, cmap = cmap)
else:
assert len(scale_limit) == 2, "scale_limit should be a 2-tuple!"
ax.matshow(image, cmap = cmap, vmin = scale_limit[0], vmax = scale_limit[1])
if len(subtitles) > 0:
ax.set_title(subtitles[i])
try:
xlabel = "({0:.4f},{1:.4f})\nshape: ({2}, {3})".format(np.min(image), np.max(image), image.shape[0], image.shape[1])
if x_axis_list is not None:
xlabel += "\n{0}".format(x_axis_list[i])
plt.xlabel(xlabel)
except:
pass
plt.xticks(np.array([]))
plt.yticks(np.array([]))
if filename is not None:
plt.tight_layout()
plt.savefig(filename)
if pdf is not None:
pdf.savefig() # saves the current figure into a pdf page
plt.close()
else:
plt.show()
if scale_limit is not None:
print("scale_limit: ({0:.6f}, {1:.6f})".format(scale_limit[0], scale_limit[1]))
print()
class Early_Stopping(object):
def __init__(self, patience = 100, epsilon = 0, mode = "min"):
self.patience = patience
self.epsilon = epsilon
self.mode = "min"
self.best_value = None
self.wait = 0
def monitor(self, value):
to_stop = False
if self.patience is not None:
if self.best_value is None:
self.best_value = value
self.wait = 0
else:
if (self.mode == "min" and value < self.best_value - self.epsilon) or \
(self.mode == "max" and value > self.best_value + self.epsilon):
self.best_value = value
self.wait = 0
else:
if self.wait >= self.patience:
to_stop = True
else:
self.wait += 1
return to_stop
def record_data(data_record_dict, data_list, key_list):
"""Record data to the dictionary data_record_dict. It records each key: value pair in the corresponding location of
key_list and data_list into the dictionary."""
assert len(data_list) == len(key_list), "the data_list and key_list should have the same length!"
for data, key in zip(data_list, key_list):
if key not in data_record_dict:
data_record_dict[key] = [data]
else:
data_record_dict[key].append(data)
def make_dir(filename):
import os
import errno
if not os.path.exists(os.path.dirname(filename)):
print("directory {0} does not exist, created.".format(os.path.dirname(filename)))
try:
os.makedirs(os.path.dirname(filename))
except OSError as exc: # Guard against race condition
if exc.errno != errno.EEXIST:
print(exc)
raise
def norm(matrix, noise_mode, mode = "L1"):
# Clamp the matrix if noise_mode is "permutation":
if noise_mode == "permutation":
matrix = matrix.clamp(1e-6, 1)
if mode == "L1":
return matrix.abs().mean()
elif mode == "L2":
return torch.sqrt((matrix ** 2).mean() + 1e-9)
elif mode == "info":
return torch.log2(1 + 1 / matrix ** 2).sum() / 2
elif isinstance(mode, tuple):
if mode[0] == "order":
order = mode[1]
return (matrix.abs() ** order).mean()
elif mode[0] == "exp_order":
order = mode[1]
return (torch.exp(matrix.abs() ** order)).mean()
elif mode[0] == "log_order":
order = mode[1]
return (torch.log(matrix.abs() ** order + 1e-9)).mean()
else:
raise
else:
raise Exception("mode not recognized!".format(mode))
def train_test_split(X, y, test_size = 0.1):
import torch
if len(X.shape) == 4:
X = X.view(-1, *X.shape[2:])
y = y.view(-1, *y.shape[2:])
num_examples = len(X)
if test_size is not None:
num_test = int(num_examples * test_size)
num_train = num_examples - num_test
idx_train = np.random.choice(range(num_examples), size = num_train, replace = False)
idx_test = set(range(num_examples)) - set(idx_train)
device = torch.device("cuda" if X.is_cuda else "cpu")
idx_train = torch.LongTensor(list(idx_train)).to(device)
idx_test = torch.LongTensor(list(idx_test)).to(device)
X_train = X[idx_train]
y_train = y[idx_train]
X_test = X[idx_test]
y_test = y[idx_test]
else:
X_train, X_test = X, X
y_train, y_test = y, y
return (X_train, y_train), (X_test, y_test)
def new_dict(Dict, new_content_dict):
from copy import deepcopy
new_Dict = deepcopy(Dict)
new_Dict.update(new_content_dict)
return new_Dict
def format_list(List, interval = "\t", decimals = None):
if decimals is None:
return interval.join(["{0}".format(element) for element in List])
else:
return interval.join(["{0:.{1}f}".format(element, decimals) for element in List])
def sort_two_lists(list1, list2, reverse = False):
from operator import itemgetter
if reverse:
List = deepcopy([list(x) for x in zip(*sorted(zip(deepcopy(list1), deepcopy(list2)), key=itemgetter(0), reverse=True))])
else:
List = deepcopy([list(x) for x in zip(*sorted(zip(deepcopy(list1), deepcopy(list2)), key=itemgetter(0)))])
if len(List) == 0:
return [], []
else:
return List[0], List[1]
def get_args(arg, arg_id = 1, type = "str"):
try:
get_ipython().run_line_magic('matplotlib', 'inline')
arg_return = arg
except:
import sys
try:
arg_return = sys.argv[arg_id]
if type == "int":
arg_return = int(arg_return)
elif type == "float":
arg_return = float(arg_return)
elif type == "bool":
arg_return = eval(arg_return)
elif type == "eval":
arg_return = eval(arg_return)
elif type == "tuple":
if arg_return[0] not in ["(", "["]:
arg_return = eval(arg_return)
else:
splitted = arg_return[1:-1].split(",")
List = []
for item in splitted:
try:
item = eval(item)
except:
pass
List.append(item)
arg_return = tuple(List)
elif type == "str":
pass
else:
raise Exception("type {0} not recognized!".format(type))
except:
# raise
arg_return = arg
return arg_return
def normalize_tensor(X, y, normalize):
import torch
if isinstance(X, np.ndarray):
assert len(X.shape) >= 3
assert len(y.shape) >= 3
XY = np.concatenate([X, y], -2)
if normalize == 0:
pass
elif normalize == 1:
mean = XY.mean()
std = XY.std()
X = (X - mean) / std
y = (y - mean) / std
elif normalize == 2:
shape = tuple(XY.shape)
N = shape[-1]
XY_reshape = XY.reshape(-1, N)
if len(shape) == 3:
mean = XY_reshape.mean(0).reshape(1, 1, N)
std = XY_reshape.std(0).reshape(1, 1, N)
elif len(shape) == 4:
mean = XY_reshape.mean(0).reshape(1, 1, 1, N)
std = XY_reshape.std(0).reshape(1, 1, 1, N)
else:
raise
X = (X - mean) / std
y = (y - mean) / std
elif normalize == 3:
N = XY.shape[-1]
X_new = []
Y_new = []
for i in range(N):
xy_ele = XY[...,i]
x_ele = X[...,i]
y_ele = y[...,i]
xy_max = xy_ele.max()
xy_min = xy_ele.min()
x_new = (x_ele - xy_min) / (xy_max -xy_min)
y_new = (y_ele - xy_min) / (xy_max -xy_min)
X_new.append(x_new)
Y_new.append(y_new)
X = np.stack(X_new, -1)
y = np.stack(Y_new, -1)
elif normalize == 4:
# Make each (...,K,N) zero mean:
X_shape = X.shape
X = X.reshape(-1, *X_shape[-2:])
X = X - X.mean(0, keepdims = True)
X = X.reshape(*X_shape)
# Make each (...,K,N) zero mean and unit std:
y_shape = y.shape
y = y.reshape(-1, *y_shape[-2:])
y = (y - y.mean(0, keepdims = True)) / y.std(0, keepdims = True)
y = y.reshape(*y_shape)
else:
raise
else:
assert len(X.size()) >= 3
assert len(y.size()) >= 3
XY = torch.cat([X, y], -2)
if normalize == 0:
pass
elif normalize == 1:
mean = XY.mean()
std = XY.std()
X = (X - mean) / std
y = (y - mean) / std
elif normalize == 2:
shape = tuple(XY.shape)
N = shape[-1]
XY_reshape = XY.reshape(-1, N)
if len(shape) == 3:
mean = XY_reshape.mean(0).view(1, 1, N)
std = XY_reshape.std(0).view(1, 1, N)
elif len(shape) == 4:
mean = XY_reshape.mean(0).view(1, 1, 1, N)
std = XY_reshape.std(0).view(1, 1, 1, N)
else:
raise
X = (X - mean) / (std + 1e-9)
y = (y - mean) / (std + 1e-9)
elif normalize == 3:
N = XY.shape[-1]
X_new = []
Y_new = []
for i in range(N):
xy_ele = XY[...,i]
x_ele = X[...,i]
y_ele = y[...,i]
xy_max = xy_ele.max()
xy_min = xy_ele.min()
x_new = (x_ele - xy_min) / (xy_max -xy_min)
y_new = (y_ele - xy_min) / (xy_max -xy_min)
X_new.append(x_new)
Y_new.append(y_new)
X = torch.stack(X_new, -1)
y = torch.stack(Y_new, -1)
elif normalize == 4:
pass
# # Make each (...,K,N) zero mean:
# X_shape = X.shape
# X = X.view(-1, *X_shape[-2:])
# X = X - X.mean(0, keepdim = True)
# X = X.view(*X_shape)
# # Make each (...,K,N) zero mean and unit std:
# y_shape = y.shape
# y = y.view(-1, *y_shape[-2:])
# y = (y - y.mean(0, keepdim = True)) / y.std(0, keepdim = True)
# y = y.view(*y_shape)
else:
raise
return X, y
| 34.793478
| 128
| 0.505467
|
ac4f8a9bcb5991d76e00fba5e4bff19748c29e30
| 648
|
py
|
Python
|
user_program/old/u2p2_scan_devices.py
|
DaveRichmond/USB4VC
|
ed3c2b1e0f44186c8d43ff9dbe1e93a258819376
|
[
"MIT"
] | 78
|
2022-02-07T16:48:11.000Z
|
2022-03-31T12:25:35.000Z
|
user_program/old/u2p2_scan_devices.py
|
DaveRichmond/USB4VC
|
ed3c2b1e0f44186c8d43ff9dbe1e93a258819376
|
[
"MIT"
] | 1
|
2022-02-26T20:16:08.000Z
|
2022-02-26T20:24:04.000Z
|
user_program/old/u2p2_scan_devices.py
|
DaveRichmond/USB4VC
|
ed3c2b1e0f44186c8d43ff9dbe1e93a258819376
|
[
"MIT"
] | 1
|
2022-02-24T03:34:15.000Z
|
2022-02-24T03:34:15.000Z
|
import os
dev_path = '/proc/bus/input/devices'
if 'nt' in os.name:
dev_path = 'devices.txt'
dev_file = open(dev_path)
file_content = dev_file.readlines()
dev_file.close()
current_line_num = 0
max_lines = len(file_content)
while current_line_num < max_lines:
this_line = file_content[current_line_num]
if this_line.startswith('I: '):
print("-------- New block! --------")
while len(this_line) != 1:
current_line_num += 1
if current_line_num >= max_lines:
print("-------- EOF --------")
break
this_line = file_content[current_line_num]
print(this_line)
print("-------- Block end --------")
current_line_num += 1
| 21.6
| 45
| 0.657407
|
59ee4e78037b97ac523a5525eea685fef473b5fd
| 2,267
|
py
|
Python
|
volttron/platform/agent/base_market_agent/offer.py
|
gnmerritt/volttron
|
ebfbf62bab77d46fd3e8d6aaca1fc4f33932ccf3
|
[
"Apache-2.0"
] | 1
|
2021-04-06T21:28:38.000Z
|
2021-04-06T21:28:38.000Z
|
volttron/platform/agent/base_market_agent/offer.py
|
gnmerritt/volttron
|
ebfbf62bab77d46fd3e8d6aaca1fc4f33932ccf3
|
[
"Apache-2.0"
] | null | null | null |
volttron/platform/agent/base_market_agent/offer.py
|
gnmerritt/volttron
|
ebfbf62bab77d46fd3e8d6aaca1fc4f33932ccf3
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding: utf-8 -*- {{{
# vim: set fenc=utf-8 ft=python sw=4 ts=4 sts=4 et:
#
# Copyright 2019, Battelle Memorial Institute.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# This material was prepared as an account of work sponsored by an agency of
# the United States Government. Neither the United States Government nor the
# United States Department of Energy, nor Battelle, nor any of their
# employees, nor any jurisdiction or organization that has cooperated in the
# development of these materials, makes any warranty, express or
# implied, or assumes any legal liability or responsibility for the accuracy,
# completeness, or usefulness or any information, apparatus, product,
# software, or process disclosed, or represents that its use would not infringe
# privately owned rights. Reference herein to any specific commercial product,
# process, or service by trade name, trademark, manufacturer, or otherwise
# does not necessarily constitute or imply its endorsement, recommendation, or
# favoring by the United States Government or any agency thereof, or
# Battelle Memorial Institute. The views and opinions of authors expressed
# herein do not necessarily state or reflect those of the
# United States Government or any agency thereof.
#
# PACIFIC NORTHWEST NATIONAL LABORATORY operated by
# BATTELLE for the UNITED STATES DEPARTMENT OF ENERGY
# under Contract DE-AC05-76RL01830
# }}}
class Offer(object):
BUY = 'BUY'
SELL = 'SELL'
def __init__(self, offer_type, commodity, curve):
self.__type = offer_type
self.__commodity = commodity
self.__curve = curve
def type(self):
return self.__type
def commodity(self):
return self.__commodity
def curve(self):
return self.__curve
| 39.77193
| 79
| 0.751654
|
7fbe21f7fe0c5695ccf9c207ef6bc58b3435d1f1
| 8,552
|
py
|
Python
|
kge/graphics/animator.py
|
Fredkiss3/kge
|
389d5ab21ecb6dc1a25dd9f98245ba5938a5d253
|
[
"CC0-1.0"
] | 4
|
2020-03-17T02:15:10.000Z
|
2021-06-29T13:34:40.000Z
|
kge/graphics/animator.py
|
Fredkiss3/kge
|
389d5ab21ecb6dc1a25dd9f98245ba5938a5d253
|
[
"CC0-1.0"
] | 4
|
2020-05-23T05:47:30.000Z
|
2022-01-13T02:15:35.000Z
|
kge/graphics/animator.py
|
Fredkiss3/kge
|
389d5ab21ecb6dc1a25dd9f98245ba5938a5d253
|
[
"CC0-1.0"
] | null | null | null |
from typing import Optional, Dict, Any, Tuple, Set, Callable, Union
import kge
from kge.core.component import BaseComponent
from kge.core.constants import ALWAYS
from kge.core.events import Event, AnimChanged, Init
from kge.graphics.animation import Animation
from kge.utils.condition import Condition
class AnimState(object):
"""
An animation state holding the value of an animation
"""
_cache = {}
def __new__(cls, anim: Animation = None):
try:
return cls._cache[anim]
except KeyError:
inst = super().__new__(cls)
cls._cache[anim] = inst
return inst
def __init__(self, anim: Animation = None):
self.animation = anim
def __repr__(self):
if self.animation is None:
return "AnimState(animation=ANY)"
return f"AnimState(animation={self.animation})"
def __next__(self):
"""
Enter in the state
"""
if self.animation is not None:
self.animation.update()
return self.animation.finished
return False
def __enter__(self):
if self.animation is not None:
self.animation.restart()
def __exit__(self, *_, **__):
pass
def exit(self):
if self.animation is not None:
self.animation.stop()
class Transition(object):
"""
A transition between two animations
"""
def __init__(self, prev: Optional[Animation], next: Animation):
self.next = next
self.previous = prev
def __repr__(self):
return f"Transition between {self.previous} and {self.next}"
class Animator(BaseComponent):
"""
A component that controls which animation to play
"""
@property
def entity(self) -> 'kge.Entity':
return self._entity
@entity.setter
def entity(self, e: 'kge.Entity'):
if isinstance(e, kge.Entity):
if e.getComponent(kind=Animator) is not None:
raise AttributeError(f"There is already another Animator component attached to '{e}'")
# set entity
self._entity = e
next: Optional[AnimState]
current: AnimState
def __init__(self, *animations: Animation):
self._entity = None
super().__init__()
if not len(animations):
raise ValueError("Your animator should have at least one animation in it !")
self.current = AnimState(animations[0])
self.next = None # type: Optional[AnimState]
self.animations = {anim.name: AnimState(anim) for anim in animations} # type: Dict[str, AnimState]
self.transitions = {} # type: Dict[Tuple[AnimState, Condition], Transition]
self._fields = {} # type: Dict[str, Any]
self._conditions = {ALWAYS.prop: set()} # type: Dict[str, Set[Condition]]
def add_field(self, name: str, default_value: Any = None):
if not name in self._fields:
self._fields[name] = default_value
self._conditions[name] = set()
else:
raise ValueError("This field is already in the animator")
def __setitem__(self, key, value):
"""
Set a field
"""
self._fields[key] = value
# Check all conditions to get a valid condition
for cd in self._conditions[key]:
# If from ANY, then check condition
t = self.transitions.get((ANY, cd), False)
if t:
if cd.resolve(self):
state = AnimState(t.next)
if self.next != state:
self.next = state
return
else:
t = self.transitions.get((self.current, cd), False)
if cd.resolve(self) and t:
# Set current animation
state = AnimState(t.next)
if self.next != state:
self.next = state
return
def update(self, dispatch: Callable[[Event], None]):
if self.next is not None:
# Dispatch Animation Changed
if self.next is not None:
dispatch(
AnimChanged(previous=self.current.animation, next=self.next.animation, entity=self.entity)
)
with self.next:
pass
self.current = self.next
self.next = None
# update the current animation
finished = next(self.current)
if finished:
t = self.transitions.get((self.current, ALWAYS), None)
if t is not None:
# Dispatch Animation Changed
dispatch(
AnimChanged(previous=self.current.animation, next=t.next, entity=self.entity)
)
state = AnimState(t.next)
if self.current != state:
# Exit (Stop the animation)
if state is not None:
with state:
pass
self.current = state
def __getitem__(self, item):
try:
return self._fields[item]
# Do this to match python default behavior
except KeyError:
raise AttributeError(f"No such field in animator '{item}'")
def __getattr__(self, attr):
try:
return self.__getitem__(attr)
# Do this to match python default behavior
except KeyError:
raise AttributeError(f"No such field in animator '{attr}'")
def __repr__(self):
return f"component Animator({self.animations}) of entity '{self.entity}'"
def on_init(self, ev: Init, dispatch):
"""
Start Animations
TODO
"""
def add_transition(self, from_: Union[Animation, AnimState], to: Animation, forward_condition: Condition = ALWAYS,
back_condition: Condition = None):
"""
Add a transition between two animations.
:param from_: the animation where the transition begin
:param to: the animation where the transition should go next
:param forward_condition: the condition to fulfill in order to switch to the next animation
:param back_condition: the condition to fulfill in order to switch back to the previous animation
Usage :
>>> animator.add_transition(
>>> from_=idle, to=run,
>>> forward_condition=Condition(is_running=True),
>>> back_condition=Condition(is_running=False)
>>> )
"""
if forward_condition != ALWAYS and (not forward_condition.prop in self._conditions):
raise KeyError(f"The condition {forward_condition} is not in animator")
# Add conditions
self._conditions[forward_condition.prop].add(forward_condition)
if back_condition is not None:
if not back_condition.prop in self._conditions:
raise KeyError(f"The condition {back_condition} is not in animator")
else:
self._conditions[forward_condition.prop].add(forward_condition)
# Create transitions for the animations states
if from_ == ANY:
self.transitions[(ANY, forward_condition)] = Transition(None, to)
else:
if not from_.name in self.animations:
raise AttributeError(f"the animation '{from_.name}' is not in animator")
else:
self.transitions[(self.animations[from_.name], forward_condition)] = Transition(from_, to)
if not to.name in self.animations:
raise AttributeError(f"the animation '{to}' is not in animator")
if back_condition is not None and from_ != ANY:
self.transitions[(self.animations[to.name], back_condition)] = Transition(to, from_)
self._conditions[back_condition.prop].add(back_condition)
ANY = AnimState(None)
if __name__ == '__main__':
from kge.utils.condition import C
c1 = C(speed__gt=.1)
c2 = C(speed__lt=1)
c3 = C(is_jumping=True)
o = object()
o.speed = 1
o.jumping = True
idle = Animation(o, [])
run = Animation(o, [])
animator = Animator(idle, run)
| 33.147287
| 119
| 0.56326
|
13df136b853fb96c17e92504ccf459781694a138
| 587
|
py
|
Python
|
nestedtensor/__init__.py
|
swolchok/nestedtensor
|
3300e3bc42394ab4bb226cef8acc631012a72ef0
|
[
"BSD-3-Clause"
] | 229
|
2019-10-23T22:05:35.000Z
|
2022-03-23T00:42:43.000Z
|
nestedtensor/__init__.py
|
swolchok/nestedtensor
|
3300e3bc42394ab4bb226cef8acc631012a72ef0
|
[
"BSD-3-Clause"
] | 110
|
2019-11-07T19:58:00.000Z
|
2022-02-22T21:29:45.000Z
|
nestedtensor/__init__.py
|
swolchok/nestedtensor
|
3300e3bc42394ab4bb226cef8acc631012a72ef0
|
[
"BSD-3-Clause"
] | 25
|
2019-10-28T15:01:21.000Z
|
2022-02-22T21:23:41.000Z
|
import torch
from .nested.creation import as_nested_tensor
from .nested.creation import nested_tensor
from .nested.masking import nested_tensor_from_tensor_mask
from .nested.masking import nested_tensor_from_padded_tensor
from .nested.nested import NestedTensor
from .nested.nested import to_nested_tensor
from .nested.nested import transpose_nchw_nhwc
from .nested.nested import transpose_nhwc_nchw
from .nested.fuser import fuse_conv_bn
from .nested.fuser import fuse_conv_relu
from .nested.fuser import fuse_conv_add_relu
from . import nested
from . import _C
from . import nn
| 25.521739
| 60
| 0.846678
|
c4d4a6936a8780a85257bbf35db4e0c3b9f39968
| 72
|
py
|
Python
|
BasicOperations/01_01_PyQt4/OpenDesktop.py
|
UpSea/midProjects
|
ed6086e74f68b1b89f725abe0b270e67cf8993a8
|
[
"MIT"
] | 1
|
2018-07-02T13:54:49.000Z
|
2018-07-02T13:54:49.000Z
|
BasicOperations/01_01_PyQt4/OpenDesktop.py
|
UpSea/midProjects
|
ed6086e74f68b1b89f725abe0b270e67cf8993a8
|
[
"MIT"
] | null | null | null |
BasicOperations/01_01_PyQt4/OpenDesktop.py
|
UpSea/midProjects
|
ed6086e74f68b1b89f725abe0b270e67cf8993a8
|
[
"MIT"
] | 3
|
2016-05-28T15:13:02.000Z
|
2021-04-10T06:04:25.000Z
|
QtGui.QDesktopServices.openUrl(QtCore.QUrl('file:///' + strategyFolder))
| 72
| 72
| 0.777778
|
024503ecb6d01684e6f4efca5f2fa20a56501337
| 2,476
|
py
|
Python
|
main.py
|
MarkusAmshove/vcard-to-doist
|
0f6c9b08ede8d4c15f48019f6abb5957074047ac
|
[
"MIT"
] | null | null | null |
main.py
|
MarkusAmshove/vcard-to-doist
|
0f6c9b08ede8d4c15f48019f6abb5957074047ac
|
[
"MIT"
] | null | null | null |
main.py
|
MarkusAmshove/vcard-to-doist
|
0f6c9b08ede8d4c15f48019f6abb5957074047ac
|
[
"MIT"
] | null | null | null |
import argparse
import vobject
from dateutil.parser import parse
from todoist.api import TodoistAPI
class Contact:
def __init__(self, name, bday):
self.name = name
self.bday = bday
def pretty_print(self):
print(vars(self))
def format_date(self):
return self.bday.strftime('%d %B')
def read_contacts(filepath):
with open(filepath) as f:
vcard = vobject.readComponents(f.read())
contacts = []
for vc in vcard:
if hasattr(vc, 'bday'):
if hasattr(vc, 'n'):
bday_text = vc.bday.value
bday = parse(bday_text)
contact = Contact(f"{vc.n.value.given} {vc.n.value.family}", bday)
contacts.append(contact)
return contacts
def find_project_id(api, projectname):
for project in api.projects.all():
if project['name'] == projectname:
return project['id']
raise Exception(f"Project with name {projectname} could not be found")
def find_item(api, projectid, name):
items = api.projects.get_data(projectid)['items']
for item in items:
if name in item['content']:
return item['id']
return None
def sync_tasks(apikey, projectname, contacts):
api = TodoistAPI(apikey)
api.sync()
project_id = find_project_id(api, projectname)
print(f"Found project {projectname} with id {project_id}")
actions_done = 0
for contact in contacts:
if actions_done >= 60:
api.commit()
actions_done = 0
due_date = {"string": f"every {contact.format_date()}", "lang": "en", "is_recurring": True}
item_id = find_item(api, project_id, contact.name)
if item_id is None:
api.items.add(f"{contact.name} gratulieren", project_id=project_id, due=due_date, priority=4)
else:
item = api.items.get_by_id(item_id)
item.update(due=due_date)
actions_done = actions_done + 1
api.commit()
if __name__ == '__main__':
argparser = argparse.ArgumentParser()
argparser.add_argument('projectname', type=str, help="Todoist Project name to put reminders in")
argparser.add_argument('file', type=str, help="Path to vcard file")
argparser.add_argument('apikey', type=str, help="Todoist API Key")
args = argparser.parse_args()
contacts = read_contacts(args.file)
sync_tasks(args.apikey, args.projectname, contacts)
| 30.195122
| 105
| 0.626414
|
6bd78b6f943092929d7fd648469f9b9bbd98a23e
| 26,362
|
py
|
Python
|
VaxDesign.py
|
sarisabban/VexDesign
|
52c8a0ee14b37bdc13d64542a02b4a125ec787d7
|
[
"MIT"
] | 9
|
2019-10-21T06:03:06.000Z
|
2021-04-19T10:35:26.000Z
|
VaxDesign.py
|
sarisabban/VexDesign
|
52c8a0ee14b37bdc13d64542a02b4a125ec787d7
|
[
"MIT"
] | null | null | null |
VaxDesign.py
|
sarisabban/VexDesign
|
52c8a0ee14b37bdc13d64542a02b4a125ec787d7
|
[
"MIT"
] | 6
|
2020-03-14T16:43:17.000Z
|
2020-09-22T18:25:04.000Z
|
#!/usr/bin/ python3
'''
MIT License
Copyright (c) 2017 Sari Sabban
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
'''
import os
import re
import bs4
import sys
import time
import glob
import numpy
import random
import Bio.PDB
import argparse
import requests
import datetime
import subprocess
import urllib.request
from pyrosetta import *
from pyrosetta.toolbox import *
print('\x1b[32m\n ██╗ ██╗ █████╗ ██╗ ██╗\n ██║ ██║██╔══██╗╚██╗██╔╝\n ██║ ██║███████║ ╚███╔╝ \n ╚██╗ ██╔╝██╔══██║ ██╔██╗ \n ╚████╔╝ ██║ ██║██╔╝ ██╗\n ╚═══╝ ╚═╝ ╚═╝╚═╝ ╚═╝\n \n ██████╗ ███████╗███████╗██╗ ██████╗ ███╗ ██╗\n ██╔══██╗██╔════╝██╔════╝██║██╔════╝ ████╗ ██║\n ██║ ██║█████╗ ███████╗██║██║ ███╗██╔██╗ ██║\n ██║ ██║██╔══╝ ╚════██║██║██║ ██║██║╚██╗██║\n ██████╔╝███████╗███████║██║╚██████╔╝██║ ╚████║\n ╚═════╝ ╚══════╝╚══════╝╚═╝ ╚═════╝ ╚═╝ ╚═══╝\n\x1b[35m\n ╔═╗┬ ┬┌┬┐┌─┐ ╦ ╦┌─┐┌─┐┌─┐┬┌┐┌┌─┐ ╔╦╗┌─┐┌─┐┬┌─┐┌┐┌\n ╠═╣│ │ │ │ │ ╚╗╔╝├─┤│ │ ││││├┤ ║║├┤ └─┐││ ┬│││\n ╩ ╩└─┘ ┴ └─┘ ╚╝ ┴ ┴└─┘└─┘┴┘└┘└─┘ ═╩╝└─┘└─┘┴└─┘┘└┘\n\x1b[33m\nAuthored by Sari Sabban on 31-May-2017\nsari.sabban@gmail.com\nSource at https://sarisabban.github.io/VaxDesign/\n\x1b[36m--------------------------------------------------------\x1b[0m')
init('-out:level 0 -no_his_his_pairE -extrachi_cutoff 1 -multi_cool_annealer 10 -ex1 -ex2 -use_input_sc')
print('\x1b[36m--------------------------------------------------------\x1b[0m')
parser = argparse.ArgumentParser(description='A script that autonomously designs a vaccine\nAuthored by Sari Sabban on 31-May-2017 (sari.sabban@gmail.com)\nhttps://sarisabban.github.io/VaxDesign/')
parser.add_argument('-s', '--scaffold', nargs='+', metavar='', help='search for scaffolds')
parser.add_argument('-p', '--protocol', nargs='+', metavar='', help='Run full protocol')
parser.add_argument('-m', '--motif', nargs='+', metavar='', help='Isolate motif')
parser.add_argument('-r', '--receptor', nargs='+', metavar='', help='Isolate receptor')
parser.add_argument('-g', '--graft', nargs='+', metavar='', help='Graft motif onto scaffold')
parser.add_argument('-f', '--ffd', nargs='+', metavar='', help='FunFolDes protocol')
parser.add_argument('-d', '--design', nargs='+', metavar='', help='Sequence design the structure around the motif')
parser.add_argument('-F', '--fragments',nargs='+', metavar='', help='Generate and analyse fragments')
args = parser.parse_args()
def Motif(PDB_ID, Chain, Motif_From, Motif_To):
'''
This function downloads a spesific protein from RCSB and isolates a
specific user defined motif from it Generates the motif.pdb file
'''
#Get the protein
os.system('wget -q http://www.rcsb.org/pdb/files/{}.pdb'.format(PDB_ID))
pdb = open('{}.pdb'.format(PDB_ID), 'r')
#Isolate the motif
Motif = open('motif.pdb', 'w')
count = 0
num = 0
AA2 = None
for line in pdb:
if not line.startswith('ATOM'):
continue
if not line.split()[4] == Chain:
continue
try:
if int(Motif_From) <= int(line.split()[5]) <= int(Motif_To):
count += 1
AA1 = line[23:27]
if not AA1 == AA2:
num += 1
final_line = line[:7]+\
'{:4d}'.format(count)+\
line[11:17]+\
line[17:21]+\
'A'+\
'{:4d}'.format(num)+\
line[26:]
AA2 = AA1
Motif.write(final_line)
except:
continue
Motif.close()
def Receptor(PDB_ID, Chain):
'''
This function isolates a chain from a downloaded .pdb file
Generates the receptor.pdb file
'''
#Isolate the receptor
pdb = open('{}.pdb'.format(PDB_ID), 'r')
Receptor = open('receptor.pdb', 'w')
for line in pdb:
linesplit = line.split()
if linesplit[0] == 'ATOM':
if linesplit[4] == Chain:
Receptor.write(line)
Receptor.close()
#Keep working directory clean, remove the protein's original file
os.remove('{}.pdb'.format(PDB_ID))
def Graft(receptor, motif, scaffold):
'''
Grafts a motif onto a protein scaffold structure
Generates grafted.pdb and returns a tuple [0] is the
residue number where the motif starts and [1] where it ends
'''
#Setup motif hotspots
motifpose = pose_from_pdb(motif)
spots = list()
for resi in range(motifpose.total_residue()):
spots.append(str(resi+1))
hotspots = ':'.join(spots)
#Setup score function
scorefxn = get_fa_scorefxn()
#Setup filters
FLTR = rosetta.protocols.simple_filters.PackStatFilter()
#Setup grafting mover
graft = pyrosetta.rosetta.protocols.motif_grafting.movers.MotifGraftMover()
graft.init_parameters(
receptor, # context_structure
motif, # motif_structure
1.0, # RMSD_tolerance
2.0, # NC_points_RMSD_tolerance
0.5, # gp_r_clash_atom_scale
5, # clash_score_cutoff
1, # min_fragment_size
'0:0', # combinatory_fragment_size_delta
'0:0', # max_fragment_replacement_size_delta
'ALA', # clash_test_residue
hotspots, # hotspots
True, # full_motif_bb_alignment
False, # allow_independent_alignment_per_fragment
True, # graft_only_hotspots_by_replacement
False, # only_allow_if_N_point_match_aa_identity
False, # only_allow_if_C_point_match_aa_identity
True, # revert_graft_to_native_sequence
False, # allow_repeat_same_graft_output
1.0, # output_cluster_tolerance
FLTR) # output_filter
graft.apply(scaffold)
relax = pyrosetta.rosetta.protocols.relax.FastRelax()
relax.set_scorefxn(scorefxn)
relax.apply(scaffold)
scaffold.dump_pdb('temp.pdb')
#Extract grafted structure
pdb = open('temp.pdb', 'r')
Structure = open('temp2.pdb', 'w')
for line in pdb:
linesplit = line.split()
if linesplit != []:
if linesplit[0] == 'ATOM':
if linesplit[4] == 'B':
Structure.write(line)
Structure.close()
#Keep working directory clean
os.remove('temp.pdb')
#Renumber .pdb file starting at 1
newpdb = open('temp2.pdb', 'r')
thenewfile = open('grafted.pdb', 'w')
count = 0
num = 0
AA2 = None
for line in newpdb:
count += 1
AA1 = line[23:27]
if not AA1 == AA2:
num += 1
final_line = line[:7]+\
'{:4d}'.format(count)+\
line[11:17]+\
line[17:21]+\
'A'+\
'{:4d}'.format(num)+\
line[26:]
AA2 = AA1
thenewfile.write(final_line)
thenewfile.close()
os.remove('temp2.pdb')
#Identify start and finish residue number of inserted motif
motifpose = pose_from_pdb('motif.pdb')
graftpose = pose_from_pdb('grafted.pdb')
MOTIF = motifpose.sequence()
GRAFT = graftpose.sequence()
start = GRAFT.index(MOTIF) + 1
finish = start + len(MOTIF) - 1
return((start, finish))
def Fragments(filename, username):
'''
Submits the pose to the Robetta server (http://www.robetta.org) for
fragment generation that are used for the Abinitio folding simulation.
Then measures the RMSD for each fragment at each position and chooses
the lowest RMSD. Then averages out the lowest RMSDs. Then plots the
lowest RMSD fragment for each positon. Generates the 3-mer file, the
9-mer file, the PsiPred file, the RMSD vs Position PDF plot with the
averaged fragment RMSD printed in the plot
'''
pose = pose_from_pdb(filename)
sequence = pose.sequence()
web = requests.get('http://robetta.bakerlab.org/fragmentsubmit.jsp')
payload = { 'UserName': username,
'Email': '',
'Notes': '{}'.format(filename.split('.')[0]),
'Sequence': sequence,
'Fasta': '',
'Code': '',
'ChemicalShifts': '',
'NoeConstraints': '',
'DipolarConstraints':'',
'type': 'submit'}
session = requests.session()
response = session.post('http://robetta.bakerlab.org/fragmentsubmit.jsp', data=payload, files=dict(foo='bar'))
for line in response:
line = line.decode()
if '<a href="fragmentqueue.jsp?id=' in line:
JobID = re.findall('<a href="(fragmentqueue.jsp\?id=[0-9].*)">', line)
ID = JobID[0].split('=')[-1]
JobURL = 'http://old.robetta.org/fragmentqueue.jsp?id=' + ID
print('\u001b[32m[+] Fragments submitted to Robetta.org server with JOB ID: {}\u001b[0m'.format(str(ID)))
while True:
Job = urllib.request.urlopen(JobURL)
jobdata = bs4.BeautifulSoup(Job, 'lxml')
status = jobdata.find('td', string='Status: ').find_next().text
if status == 'Complete':
print(datetime.datetime.now().strftime('%d %B %Y @ %H:%M'),\
'Status:', '\u001b[32m{}\u001b[0m'.format(status))
break
elif status == 'Active':
print(datetime.datetime.now().strftime('%d %B %Y @ %H:%M'),\
'Status:', '\u001b[33m{}\u001b[0m'.format(status))
time.sleep(180)
else:
print(datetime.datetime.now().strftime('%d %B %Y @ %H:%M'),\
'Status:', '\u001b[31m{}\u001b[0m'.format(status))
time.sleep(300)
continue
sequence = pose.sequence()
fasta = open('structure.fasta', 'w')
fasta.write(sequence)
fasta.close()
time.sleep(1)
print('\u001b[36m[+] Downloading 3mer fragment file ...\u001b[0m')
os.system('wget -q http://old.robetta.org/downloads/fragments/'+str(ID)+'/aat000_03_05.200_v1_3')
print('\u001b[36m[+] Downloading 9mer fragment file ...\u001b[0m')
os.system('wget -q http://old.robetta.org/downloads/fragments/'+str(ID)+'/aat000_09_05.200_v1_3')
print('\u001b[36m[+] Downloading PSIPRED file ...\u001b[0m')
os.system('wget -q http://old.robetta.org/downloads/fragments/'+str(ID)+'/t000_.psipred_ss2')
os.rename('aat000_03_05.200_v1_3', 'frags.200.3mers')
os.rename('aat000_09_05.200_v1_3', 'frags.200.9mers')
os.rename('t000_.psipred_ss2', 'pre.psipred.ss2')
frag = open('frags.200.9mers', 'r')
data = open('RMSDvsPosition.dat', 'w')
AVG = []
for line in frag:
if line.lstrip().startswith('position:'):
line = line.split()
size = line[1]
frag.close()
print('\u001b[36m[+] Analysing fragment quality...\u001b[0m')
for i in range(1, int(size)+1):
rmsd = []
pose_copy = pyrosetta.Pose()
pose_copy.assign(pose)
frames = pyrosetta.rosetta.core.fragment.FrameList()
fragset = pyrosetta.rosetta.core.fragment.ConstantLengthFragSet(9)
fragset.read_fragment_file('frags.200.9mers')
fragset.frames(i, frames)
movemap = MoveMap()
movemap.set_bb(True)
for frame in frames:
for frag_num in range(1, frame.nr_frags()+1):
frame.apply(movemap, frag_num, pose_copy)
RMSD = rosetta.core.scoring.CA_rmsd(pose, pose_copy)
rmsd.append(RMSD)
lowest = min(rmsd)
pose_copy.assign(pose)
AVG.append(lowest)
data.write(str(i)+'\t'+str(lowest)+'\n')
data.close()
Average_RMSD = sum(AVG) / len(AVG)
gnuplot = open('gnuplot_sets', 'w')
gnuplot.write("""
reset\n
set terminal postscript\n
set output './plot_frag.pdf'\n
set encoding iso_8859_1\n
set term post eps enh color\n
set xlabel 'Position'\n
set ylabel 'RMSD (\\305)'\n
set yrange [0:]\n
set xrange [0:]\n
set xtics auto\n
set xtics rotate\n
set grid front\n
unset grid\n
set title 'Fragment Quality'\n
set key off\n
set boxwidth 0.5\n
set style fill solid\n
set label 'Average RMSD = {}' at graph 0.01 , graph 0.95 tc lt 7 font 'curior 12'\n
plot 'RMSDvsPosition.dat' with boxes\n
exit
""".format(str(round(Average_RMSD, 3))))
gnuplot.close()
os.system('gnuplot < gnuplot_sets')
os.remove('gnuplot_sets')
os.remove('RMSDvsPosition.dat')
print('\u001b[34m[+] Fragment average RMSD:\u001b[0m {}'.format(round(Average_RMSD, 3)))
return(Average_RMSD)
def ScaffoldSearch(Protein, RecChain, Chain, Motif_From, Motif_To, Directory):
'''
This script searches a scaffold database for possible structures with
successful grafting sites
'''
hotspots = Motif(Protein, Chain, Motif_From, Motif_To)
Receptor(Protein, RecChain)
os.mkdir('Scaffolds')
current = os.getcwd()
database = os.listdir(Directory)
os.chdir(Directory)
for scaffold in database:
try:
MotifGraft('../receptor.pdb', '../motif.pdb', scaffold, 1.0)
os.system('cp {} ../Scaffolds'.format(scaffold))
except: continue
os.remove('../receptor.pdb')
os.remove('../motif.pdb')
class RosettaDesign(object):
def __init__(self, filename, Motif_From, Motif_To):
''' Generate the resfile '''
AminoAcid = { 'A':129, 'P':159, 'N':195, 'H':224,
'V':174, 'Y':263, 'C':167, 'K':236,
'I':197, 'F':240, 'Q':225, 'S':155,
'L':201, 'W':285, 'E':223, 'T':172,
'M':224, 'R':274, 'G':104, 'D':193}
self.filename = filename
parser = Bio.PDB.PDBParser()
structure = parser.get_structure('{}'.format(filename), filename)
dssp = Bio.PDB.DSSP(structure[0], filename, acc_array='Wilke')
sasalist = []
for aa in dssp:
sasa = AminoAcid[aa[1]]*aa[3]
if sasa <= 25: sasa = 'C'
elif 25 < sasa < 40:sasa = 'B'
elif sasa >= 40: sasa = 'S'
if aa[2] == 'G' or aa[2] == 'H' or aa[2] == 'I': ss = 'H'
elif aa[2] == 'B' or aa[2] == 'E': ss = 'S'
elif aa[2] == 'S' or aa[2] == 'T' or aa[2] == '-': ss = 'L'
sasalist.append((aa[0], aa[1], ss, sasa))
resfile = open('.resfile', 'a')
resfile.write('NATRO\nSTART\n')
for n, r, a, s in sasalist:
if s == 'S' and a == 'L': line = '{} A PIKAA PGNQSTDERKH\n'.format(n)
elif s == 'S' and a == 'H': line = '{} A PIKAA EHKQR\n'.format(n)
elif s == 'S' and a == 'S': line = '{} A PIKAA DEGHKNPQRST\n'.format(n)
elif s == 'B' and a == 'L': line = '{} A PIKAA ADEFGHIKLMNPQRSTVWY\n'.format(n)
elif s == 'B' and a == 'H': line = '{} A PIKAA ADEHIKLMNQRSTVWY\n'.format(n)
elif s == 'B' and a == 'S': line = '{} A PIKAA DEFHIKLMNQRSTVWY\n'.format(n)
elif s == 'C' and a == 'L': line = '{} A PIKAA AFGILMPVWY\n'.format(n)
elif s == 'C' and a == 'H': line = '{} A PIKAA AFILMVWY\n'.format(n)
elif s == 'C' and a == 'S': line = '{} A PIKAA FILMVWY\n'.format(n)
resfile.write(line)
resfile.close()
resfile = open('.resfile', 'r')
resfile2 = open('.resfile2', 'a')
resfile2.write('NATRO\nSTART\n')
next(resfile)
next(resfile)
for res in resfile:
if not int(Motif_From) <= int(res.split()[0]) <= int(Motif_To):
resfile2.write(res)
resfile2.close()
self.SASA = sasalist
# aa_composition file
with open('.comp', 'w')as comp:
comp.write("""
PENALTY_DEFINITION
PROPERTIES AROMATIC
NOT_PROPERTIES POLAR CHARGED
FRACTION 0.1
PENALTIES 100 0 100
DELTA_START -1
DELTA_END 1
BEFORE_FUNCTION CONSTANT
AFTER_FUNCTION CONSTANT
END_PENALTY_DEFINITION
""")
# netcharge file
with open('.charge', 'w')as comp:
comp.write("""
DESIRED_CHARGE 0
PENALTIES_CHARGE_RANGE -1 1
PENALTIES 10 0 10
BEFORE_FUNCTION QUADRATIC
AFTER_FUNCTION QUADRATIC
""")
self.pose = pose_from_pdb(self.filename)
comp = pyrosetta.rosetta.protocols.aa_composition.AddCompositionConstraintMover()
comp.create_constraint_from_file('.comp')
comp.apply(self.pose)
charge = pyrosetta.rosetta.protocols.aa_composition.AddNetChargeConstraintMover()
charge.create_constraint_from_file('.charge')
charge.apply(self.pose)
self.starting_pose = Pose()
self.starting_pose.assign(self.pose)
self.scorefxn = get_fa_scorefxn()
self.scorefxn_G = get_fa_scorefxn()
AAcomp = pyrosetta.rosetta.core.scoring.ScoreType.aa_composition
NETq = pyrosetta.rosetta.core.scoring.ScoreType.netcharge
AArep = pyrosetta.rosetta.core.scoring.ScoreType.aa_repeat
ASPpen = pyrosetta.rosetta.core.scoring.ScoreType.aspartimide_penalty
HBnet = pyrosetta.rosetta.core.scoring.ScoreType.hbnet
MHCep = pyrosetta.rosetta.core.scoring.ScoreType.mhc_epitope
VOIDpen = pyrosetta.rosetta.core.scoring.ScoreType.voids_penalty
ABurUnsatPen= pyrosetta.rosetta.core.scoring.ScoreType.approximate_buried_unsat_penalty
BurUnsatPen = pyrosetta.rosetta.core.scoring.ScoreType.buried_unsatisfied_penalty
#self.scorefxn_G.set_weight(AAcomp, 1.00)
#self.scorefxn_G.set_weight(NETq, 1.00)
#self.scorefxn_G.set_weight(HBnet, 1.00)
#self.scorefxn_G.set_weight(VOIDpen, 0.10)
self.scorefxn_G.set_weight(AArep, 1.00)
self.scorefxn_G.set_weight(ASPpen, 1.00)
self.scorefxn_G.set_weight(MHCep, 0.00)
self.scorefxn_G.set_weight(BurUnsatPen, 1.00)
self.scorefxn_G.set_weight(ABurUnsatPen,5.00)
self.relax = pyrosetta.rosetta.protocols.relax.FastRelax()
self.relax.set_scorefxn(self.scorefxn)
def __del__(self):
''' Remove the resfile '''
os.remove('.resfile')
os.remove('.resfile2')
os.remove('.comp')
os.remove('.charge')
for f in glob.glob('f[il]xbb.fasc'): os.remove(f)
def choose(self):
''' Choose the lowest scoring structure '''
try: scorefile = open('fixbb.fasc', 'r')
except: scorefile = open('flxbb.fasc', 'r')
score = 0
name = None
for line in scorefile:
line = json.loads(line)
score2 = line.get('total_score')
if score2 < score:
score = score2
name = line.get('decoy')
os.system('mv {} structure.pdb'.format(name))
for f in glob.glob('f[il]xbb_*'): os.remove(f)
def fixbb_motif(self):
'''
Applies RosettaDesign with a fixed back bone to
change the structure's amino acids (one layer at a
time - like in the Design_Layer method) except for
a desired continuous motif sequence while maintaining
the same backbone
'''
pose = pose_from_pdb(self.filename)
starting_pose = Pose()
starting_pose.assign(pose)
scorefxn = get_fa_scorefxn()
relax = pyrosetta.rosetta.protocols.relax.FastRelax()
relax.set_scorefxn(scorefxn)
resfile = rosetta.core.pack.task.operation.ReadResfile('.resfile2')
task = pyrosetta.rosetta.core.pack.task.TaskFactory()
task.push_back(resfile)
movemap = MoveMap()
movemap.set_bb(False)
movemap.set_chi(True)
fixbb = pyrosetta.rosetta.protocols.denovo_design.movers.FastDesign()
fixbb.set_task_factory(task)
fixbb.set_movemap(movemap)
fixbb.set_scorefxn(scorefxn)
job = PyJobDistributor('fixbb', 1, scorefxn)
job.native_pose = starting_pose
while not job.job_complete:
pose.assign(starting_pose)
fixbb.apply(pose)
relax.apply(pose)
job.output_decoy(pose)
def flxbb_motif(self):
'''
Applies RosettaDesign with a flexible back bone to
change the structure's amino acids (one layer at a
time - like in the Design_Layer method) except for
a desired continuous motif sequence while maintaining
the same backbone
'''
pose = pose_from_pdb(self.filename)
starting_pose = Pose()
starting_pose.assign(pose)
scorefxn = get_fa_scorefxn()
relax = pyrosetta.rosetta.protocols.relax.FastRelax()
relax.set_scorefxn(scorefxn)
resfile = rosetta.core.pack.task.operation.ReadResfile('.resfile2')
task = pyrosetta.rosetta.core.pack.task.TaskFactory()
task.push_back(resfile)
movemap = MoveMap()
movemap.set_bb(True)
movemap.set_chi(True)
flxbb = pyrosetta.rosetta.protocols.denovo_design.movers.FastDesign()
flxbb.set_task_factory(task)
flxbb.set_movemap(movemap)
flxbb.set_scorefxn(scorefxn)
job = PyJobDistributor('flxbb', 1, scorefxn)
job.native_pose = starting_pose
while not job.job_complete:
pose.assign(starting_pose)
flxbb.apply(pose)
relax.apply(pose)
job.output_decoy(pose)
def surf(self, motif_list):
'''
Applies RosettaDesign with a fixed backbone to change only the
structure's surface amino acids except for the desired motif
'''
pose = pose_from_pdb(self.filename)
starting_pose = Pose()
starting_pose.assign(pose)
scorefxn = get_fa_scorefxn()
relax = pyrosetta.rosetta.protocols.relax.FastRelax()
relax.set_scorefxn(scorefxn)
packtask = standard_packer_task(pose)
pyrosetta.rosetta.core.pack.task.parse_resfile(pose, packtask, '.resfile')
Motif = motif_list
for s in self.SASA:
if s[3] != 'S': Motif.append(s[0])
for aa in Motif: packtask.temporarily_set_pack_residue(int(aa), False)
fixbb = pyrosetta.rosetta.protocols.minimization_packing.PackRotamersMover(scorefxn, packtask, 10)
job = PyJobDistributor('fixbb', 1, scorefxn)
job.native_pose = starting_pose
while not job.job_complete:
pose.assign(starting_pose)
fixbb.apply(pose)
relax.apply(pose)
job.output_decoy(pose)
def FFL(Motif, Scaffold, Motif_From, Motif_To, username):
''' Performs the Fold From Loop protocol '''
print('\x1b[31m[-] Fold From Loop is not yet fully available in PyRosetta\x1b[0m')
def protocol(Protein, RChain, Chain, Motif_from, Motif_to, Scaffold, Choice, UserName):
#0. Make directory
os.makedirs('Vaccine', exist_ok=True)
os.chdir('Vaccine')
print('\x1b[32m[+] Project directory created\x1b[0m')
#1. Import scaffold
os.system('mv ../{} .'.format(Scaffold))
pose = pose_from_pdb(Scaffold)
print('\x1b[32m[+] Imported scaffold\x1b[0m')
#2. Isolate motif
Motif(Protein, Chain, Motif_from, Motif_to)
print('\x1b[32m[+] Isolated motif\x1b[0m')
#3. Isolate receptor
Receptor(Protein, RChain)
print('\x1b[32m[+] Isolated receptor\x1b[0m')
#4. Graft motif onto scaffold
print('\x1b[33m[+] Grafting...\x1b[0m')
MotifPosition = Graft('receptor.pdb', 'motif.pdb', pose)
print('\x1b[32m[+] Grafted motif at positions: {} to {}\x1b[0m'.format(MotifPosition[0], MotifPosition[1]))
#5. Fold From Loop
#FFD('motif.pdb', 'grafted.pdb', MotifPosition, UserName)
#print('\x1b[32m[+] Fold From Loop completed\x1b[0m')
#6. RosettaDesign the structure around the motif and 7. generate fragments
RD = RosettaDesign('grafted.pdb', MotifPosition[0], MotifPosition[1])
for i in range(1, 21):
if Choice == 'fixbb':
print('\x1b[33m[+] Fixbb designing...\x1b[0m')
RD.fixbb_motif()
elif Choice == 'flxbb':
print('\x1b[33m[+] Flxbb designing...\x1b[0m')
RD.flxbb_motif()
elif Choice == 'surface':
print('\x1b[33m[+] Surface designing...\x1b[0m')
motiflist = []
for i in range(MotifPosition[0], MotifPosition[1]+1): motiflist.append(i)
RD.surf(motiflist)
for f in glob.glob('f[il]xbb_0.pdb'): f = f
os.system('mv {} structure.pdb'.format(f))
Fragments('structure.pdb', UserName)
os.system('mkdir {}'.format(str(i)))
os.system('mv structure.pdb structure.fasta {}'.format(str(i)))
os.system('mv plot_frag.pdf {}'.format(str(i)))
os.system('mv pre.psipred.ss2 frags.200.3mers frags.200.9mers {}'.format(str(i)))
print('\x1b[32m[+++] Vaccine structure completed\x1b[0m')
def main():
if args.scaffold: # Search for scaffolds
print('\x1b[33m[.] Searching for scaffolds...\x1b[0m')
ScaffoldSearch( sys.argv[2], # PDB ID
sys.argv[3], # Receptor chain
sys.argv[4], # Motif chain
sys.argv[5], # Motif from
sys.argv[6], # Motif to
sys.argv[7]) # Directory of scaffolds
print('\x1b[32m[+] Search completed\x1b[0m')
elif args.protocol: # Run full protocol
protocol( sys.argv[2], # PDB ID
sys.argv[3], # Receptor chain
sys.argv[4], # Motif chain
sys.argv[5], # Motif from
sys.argv[6], # Motif to
sys.argv[7], # Scaffold PDB file name
sys.argv[8], # RosettaDesign choice
sys.argv[9]) # Robetta server username
elif args.motif: # Isolate motif
Motif( sys.argv[2], # PDB ID
sys.argv[3], # Motif chain
sys.argv[4], # Motif from
sys.argv[5]) # Motif to
os.remove('{}.pdb'.format(sys.argv[2]))
print('\x1b[32m[+] Motif isolated\x1b[0m')
elif args.receptor: # Isolate receptor
RCSB = 'http://www.rcsb.org/pdb/files'
os.system('wget -q {}/{}.pdb'.format(RCSB, sys.argv[2]))
Receptor( sys.argv[2], # PDB ID
sys.argv[3]) # Receptor chain
print('\x1b[32m[+] Receptor isolated\x1b[0m')
elif args.graft: # Graft motif onto scaffold
print('\x1b[33m[.] Grafting...\x1b[0m')
pose = pose_from_pdb( sys.argv[4]) # Scaffold PDB file name
MotifPosition = Graft( sys.argv[2], # Receptor PDB file name
sys.argv[3], # Motif PDB file name
pose)
print('\x1b[32m[+] Grafted motif onto scaffold between positions: {} and {}\x1b[0m'.format(MotifPosition[0], MotifPosition[1]))
elif args.ffd: # Fold From Loop
FFL( sys.argv[2], # Motif PDB file name
sys.argv[3], # Scaffold PDB file name
sys.argv[4], # Motif on scaffold from
sys.argv[5], # Motif on scaffold to
sys.argv[6]) # Robetta server username
#print('\x1b[32m[+] Fold From Loop completed\x1b[0m')
elif args.design: # Sequence design the structure around the motif
if sys.argv[2] == 'fixbb': # Choice
print('\x1b[33m[.] Fixbb designing...\x1b[0m')
RD = RosettaDesign( sys.argv[3], # Scaffold PDB file name
sys.argv[4], # Motif on scaffold from
sys.argv[5]) # Motif on scaffold to
RD.fixbb_motif()
print('\x1b[32m[+] Design complete\x1b[0m')
elif sys.argv[2] == 'flxbb': # Choice
print('\x1b[33m[.] Flxbb designing...\x1b[0m')
RD = RosettaDesign( sys.argv[3], # Scaffold PDB file name
sys.argv[4], # Motif on scaffold from
sys.argv[5]) # Motif on scaffold to
RD.flxbb_motif()
print('\x1b[32m[+] Design complete\x1b[0m')
elif sys.argv[2] == 'surface': # Choice
print('\x1b[33m[.] Surface designing...\x1b[0m')
RD = RosettaDesign( sys.argv[3]) # Scaffold PDB file name
RD.surf( sys.argv[4:])# Motif amino acid list
print('\x1b[32m[+] Design complete\x1b[0m')
elif args.fragments:# Generate fragments
Fragments( sys.argv[2], # Filename
sys.argv[3]) # Username
print('\x1b[32m[+] Fragments generated\x1b[0m')
if __name__ == '__main__': main()
| 39.28763
| 895
| 0.654768
|
645d752108bb592b0a6562acd01c200e6cb77f6b
| 13,915
|
py
|
Python
|
Code & Design/UV-Field Functions/surface_code.py
|
ytakzk/Gradual-Assemblies
|
48579e7a2d73d51b95a685fc1b757024c96011d4
|
[
"MIT"
] | null | null | null |
Code & Design/UV-Field Functions/surface_code.py
|
ytakzk/Gradual-Assemblies
|
48579e7a2d73d51b95a685fc1b757024c96011d4
|
[
"MIT"
] | null | null | null |
Code & Design/UV-Field Functions/surface_code.py
|
ytakzk/Gradual-Assemblies
|
48579e7a2d73d51b95a685fc1b757024c96011d4
|
[
"MIT"
] | null | null | null |
# surface code
__author__ = "Wenqian"
__version__ = "2019.02.03"
import Rhino.Geometry as rg
import scriptcontext as sc
import math as ma
import rhinoscriptsyntax as rs
import copy
u_count
v_count
class BeamNet(object):
"""
generate beam crvs from a srf
"""
def __init__(self,srf,rebuild_srf,beam_shift,curvature_factor_u,curvature_factor_v,offset_x,offset_y,seam_crv,seam_side,edge_side):
"""
Attributes:
srf
"""
self.srf = srf
#self.beam_gap = beam_gap
#self.beam_average = beam_average_length
self.c_factor_u = curvature_factor_u*1000
self.c_factor_v = curvature_factor_v*10000
self.u_shift = beam_shift
self.offset_x = offset_x
self.offset_y = offset_y
# get beams
self.u_crvs = []
self.v_crvs = []
self.beam_crvs = []#nested
self.beam_planes = []#nested
self.get_uv()
#self.u_crvs = self.redraw_u_crvs_based_on_curvature()#apply if want to adjust beam length due to curvature
self.get_beam()
# get dowels
# specially deal with seam situation and edge situation
self.seam_crv_temp = seam_crv #it's a list
self.seam_side = seam_side #it's a list
self.seam_flag = False
self.seam_row_index = []
self.seam_crv = []
if len(self.seam_side) != 0:
self.seam_flag = True
for crv, side in zip(self.seam_crv_temp,self.seam_side):
if side == 0:
self.seam_row_index.append(0)
self.seam_crv.append(crv)
if side == 1:
self.seam_row_index.append(len(self.beam_crvs)-1)
self.seam_crv.append(crv)
self.edge_side = edge_side #it's a list
self.edge_flag = False
self.edge_row_index = []
if len(self.edge_side) != 0:
self.edge_flag = True
for side in self.edge_side:
if side == 0:
self.edge_row_index.append(0)
if side == 1:
self.edge_row_index.append(len(self.beam_crvs)-1)
self.end_dowel_crvs = []#flattened
self.get_triple_connections()
if self.seam_flag:
self.get_seam_connections()
def get_uv(self):
# get u_count
total_length= 0
for i in range(6):
total_length += self.srf.IsoCurve(1, 0.2 * i).GetLength()
#self.u_count = int(total_length/6/self.beam_average)
self.u_count = u_count
self.beam_length = total_length/6/u_count
# get v_count
total_length= 0
for i in range(6):
total_length += self.srf.IsoCurve(0, 0.2 * i).GetLength()
#self.v_count = int(total_length/6/self.beam_gap)
self.v_count = v_count
self.dowel_length = total_length/6/v_count
# get v_crvs
for v in range(self.v_count+1):
v_crv = self.srf.IsoCurve(1,v/self.v_count)
if rebuild_srf:
v_crv = v_crv.Rebuild(50,3,True)
self.v_crvs.append(v_crv)
# get u_crvs
for u in range(self.u_count+1):
u_crv = self.srf.IsoCurve(0,u/self.u_count)
if rebuild_srf:
u_crv = u_crv.Rebuild(50,3,True)
self.u_crvs.append(u_crv)
def regrading(self, crvs, count,factor):
#regrading crvs based on curvature
crvs_new_params = []#nested
current_points_nested = []#nested
for curves in crvs:
curve_domain = curves.Domain
curve_domain_length = curve_domain[1] - curve_domain[0]
range_value = curve_domain_length /count
domain_list = []
temp_vector_length_list = []
for i in range (count + 1):
local_t = i * range_value
domain_list.append(local_t)
local_curvature_vec = rg.Vector3d(curves.CurvatureAt(local_t))
vector_length = local_curvature_vec.Length
temp_vector_length_list.append(vector_length)
temp_vector_length_partial_sums = []
temp_point_list = []
c = 0
previous_value = 0
for local_vec_l in (temp_vector_length_list):
x_value = domain_list[c]
y_value = previous_value * factor
local_pt = rg.Point3d(x_value, y_value, 0)
temp_point_list.append(local_pt)
temp_vector_length_partial_sums.append(previous_value)
previous_value += local_vec_l
c += 1
curvature_crv = rg.Curve.CreateControlPointCurve(temp_point_list, 10)
temp_points_curvature_crv = curvature_crv.DivideByCount(count, True)
t_vals = [curvature_crv.PointAt(pt).X for pt in temp_points_curvature_crv]
current_points = [curves.PointAt(t_val) for t_val in t_vals]
current_points_nested.append(current_points)
crvs_new_params.append(t_vals)
return crvs_new_params, current_points_nested
def flip_matrix(self, nested_list):
#flip data in a nested list
flipped_nested_list = []
for i in range(len(nested_list[0])):
new_list = []
for j in range(len(nested_list)):
new_list.append(nested_list[j][i])
flipped_nested_list.append(new_list)
return flipped_nested_list
def redraw_u_crvs_based_on_curvature(self):
pts_on_v_crvs = self.regrading(self.v_crvs,self.u_count,self.c_factor_v)[1] #regrading beam length based on curvatur
pts_on_u_crvs = self.flip_matrix(pts_on_v_crvs)
u_crvs_redraw = []
for pts in pts_on_u_crvs:
new_u_crv = rg.Curve.CreateControlPointCurve(pts,10)
u_crvs_redraw.append(new_u_crv.Rebuild(100,3,True))
return u_crvs_redraw
def get_plane(self,crv):
#get planes for beam crvs
center_pt = crv.PointAt(0.5)
norm = self.srf.NormalAt(self.srf.ClosestPoint(center_pt)[1],self.srf.ClosestPoint(center_pt)[2])
xDirection = crv.PointAt(1) - crv.PointAt(0)
return rg.Plane(center_pt, xDirection,norm)
def get_beam(self):
#get beam crvs and planes on v crvs
u_crvs_new_params = self.regrading(self.u_crvs,self.v_count,self.c_factor_u)[0] #regrading beam position based on curvature
for c in range(self.u_count):
line_start_params = u_crvs_new_params[c]
line_end_params = u_crvs_new_params[c+1]
beam_crvs = []
beam_planes = []
#line_start_params = self.u_crvs[c].DivideByCount(self.v_count,True)
#line_end_params = self.u_crvs[c+1].DivideByCount(self.v_count,True)
for i,p in enumerate(line_start_params[:-1]):
line_start_pts=[]
line_end_pts = []
line_start_pt=self.u_crvs[c].PointAt(p+(line_start_params[i+1]-p)*self.u_shift*(c%2+1))
line_end_pt=self.u_crvs[c+1].PointAt(p+(line_end_params[i+1]-p)*self.u_shift*(c%2+1))
beam_crv = rg.Line(line_start_pt,line_end_pt)
beam_crvs.append(beam_crv)
beam_planes.append(self.get_plane(beam_crv))
self.beam_crvs.append(beam_crvs)
self.beam_planes.append(beam_planes)
def get_rotate_connection_points(self,pt,plane):
# generate 4 cross points around an anchor point on beam
plane.XAxis.Unitize()
plane.YAxis.Unitize()
p0 = rg.Point3d.Add(pt,plane.YAxis*self.offset_y)
p1 = rg.Point3d.Add(pt,plane.XAxis*self.offset_x)
p2 = rg.Point3d.Add(pt,plane.YAxis*-self.offset_y)
p3 = rg.Point3d.Add(pt,plane.XAxis*-self.offset_x)
return p0,p1,p2,p3
def get_a_single_dowel_end_connection(self, crv_top, plane_top, flag_top, crv_bottom, plane_bottom, flag_bottom):
pts_top = self.get_rotate_connection_points(crv_top.PointAt(flag_top),plane_top)
pts_down = self.get_rotate_connection_points(crv_bottom.PointAt(flag_bottom),plane_bottom)
if flag_top == 1:
return rg.Line(pts_top[3],pts_down[2])
else:
return rg.Line(pts_top[1],pts_down[0])
def add_edge_dowel(self, top_crv, top_pln, top_flag, bottom_crv, bottom_pln, bottom_flag):
# add an extra dowel if the beam is on a naked edge
beam_top_flip = copy.deepcopy(top_crv)
beam_top_flip.Flip()
beam_top_flip = rg.Line(beam_top_flip.PointAtLength(30),beam_top_flip.PointAtLength(beam_top_flip.Length-30))
beam_bottom_flip = copy.deepcopy(bottom_crv)
beam_bottom_flip.Flip()
beam_bottom_flip = rg.Line(beam_bottom_flip.PointAtLength(30),beam_bottom_flip.PointAtLength(beam_bottom_flip.Length-30))
return self.get_a_single_dowel_end_connection(beam_top_flip,top_pln,top_flag,beam_bottom_flip,bottom_pln,bottom_flag)
def get_triple_connections(self):
j = 0
for crvs,planes in zip(self.beam_crvs,self.beam_planes):
# generate connections in each u rows
beams_top = crvs[:-1]
planes_top = planes[:-1]
beams_bottom = crvs[1:]
planes_bottom = planes[1:]
# for the seam row, get dowels on only inner side end of the beam
if j in self.seam_row_index:
for index in self.seam_row_index:
if j == index:
if j == 0:
for i in range(len(beams_top)):
self.end_dowel_crvs.append(self.get_a_single_dowel_end_connection(beams_top[i],planes_top[i],1,beams_bottom[i],planes_top[i],1))
elif j == len(self.beam_crvs)-1:
for i in range(len(beams_top)):
self.end_dowel_crvs.append(self.get_a_single_dowel_end_connection(beams_top[i],planes_top[i],0,beams_bottom[i],planes_top[i],0))
# for other rows, get dowels on both ends of the beam
else:
for i in range(len(beams_top)):
self.end_dowel_crvs.append(self.get_a_single_dowel_end_connection(beams_top[i],planes_top[i],0,beams_bottom[i],planes_top[i],0))
self.end_dowel_crvs.append(self.get_a_single_dowel_end_connection(beams_top[i],planes_top[i],1,beams_bottom[i],planes_top[i],1))
# for edge row if there is any
if j in self.edge_row_index:
for index in self.edge_row_index:
if j == index:
if j == 0:
for i in range(len(beams_top)):
self.end_dowel_crvs.append(self.add_edge_dowel(beams_top[i],planes_top[i],1,beams_bottom[i],planes_top[i],1))
elif j ==len(self.beam_crvs)-1:
for i in range(len(beams_top)):
self.end_dowel_crvs.append(self.add_edge_dowel(beams_top[i],planes_top[i],0,beams_bottom[i],planes_top[i],0))
j += 1
def get_seam_connections(self):
# get the edge to be seam connected
for i in range(len(self.seam_side)):
crvs = copy.deepcopy(self.beam_crvs[self.seam_row_index[i]])
plns = copy.deepcopy(self.beam_planes[self.seam_row_index[i]])
# detect if reverse dowel direction
print i
if self.seam_crv[i].ClosestPoint(crvs[0].PointAt(0),20000)[1]>self.seam_crv[i].ClosestPoint(crvs[-1].PointAt(0),20000)[1]:
crvs.reverse()
plns.reverse()
plns_flip = [rg.Plane(pln.Origin,pln.XAxis,pln.YAxis*-1) for pln in plns]
plns = plns_flip
# generate connections
beams_top = crvs[:-1]
planes_top = plns[:-1]
beams_bottom = crvs[1:]
planes_bottom = plns[1:]
for j in range(len(beams_top)):
self.end_dowel_crvs.append(self.get_a_single_dowel_end_connection(beams_top[j],planes_top[j],self.seam_side[i],beams_bottom[j],planes_top[j],self.seam_side[i]))
# get surface edge to define special situations
srf.SetDomain(0, rg.Interval(0,1))
srf.SetDomain(1, rg.Interval(0,1))
u_0_crv = srf.IsoCurve(0,0)
u_0_crv.Domain = rg.Interval(0,1)
test_pt_0 = u_0_crv.PointAt(0.5)
u_1_crv = srf.IsoCurve(0,1)
u_1_crv.Domain = rg.Interval(0,1)
test_pt_1 = u_1_crv.PointAt(0.5)
# detect seam crv as guide line for seam dowel connection
seam_crv = []
seam_side = []
for c in seam_crvs:
if c.ClosestPoint(test_pt_0,100)[0]:
seam_crv.append(c)
seam_side.append(0)
if c.ClosestPoint(test_pt_1,100)[0]:
seam_crv.append(c)
seam_side.append(1)
# detect edge crv as guide line for edge dowel connection
edge_side = []
for c in edge_crvs:
if c.ClosestPoint(test_pt_0,100)[0]:
edge_side.append(0)
if c.ClosestPoint(test_pt_1,100)[0]:
edge_side.append(1)
# get beam net
test_beams = BeamNet(srf,rebuild_srf,beam_shift,curvature_factor_u,curvature_factor_v,offset_x,offset_y,seam_crv,seam_side,edge_side)
# output
beam_curves = []
for crvs in test_beams.beam_crvs:
beam_curves.extend(crvs)
beam_planes = []
for planes in test_beams.beam_planes:
beam_planes.extend(planes)
dowel_crvs = []
dowel_crvs.extend(test_beams.end_dowel_crvs)
assembly_beam_crvs=[]
assembly_beam_crvs.extend(test_beams.flip_matrix(test_beams.beam_crvs)[0])
assembly_beam_crvs.extend(test_beams.flip_matrix(test_beams.beam_crvs)[-1])
assembly_beam_plns=[]
assembly_beam_plns.extend(test_beams.flip_matrix(test_beams.beam_planes)[0])
assembly_beam_plns.extend(test_beams.flip_matrix(test_beams.beam_planes)[-1])
temp = ['beam average length:',test_beams.beam_length,'dowl average lengh:',test_beams.dowel_length]
| 41.290801
| 176
| 0.627237
|
7d6cdc590947c567d79b245a1da76e82971773ad
| 767
|
py
|
Python
|
changes/api/serializer/models/change.py
|
vault-the/changes
|
37e23c3141b75e4785cf398d015e3dbca41bdd56
|
[
"Apache-2.0"
] | 443
|
2015-01-03T16:28:39.000Z
|
2021-04-26T16:39:46.000Z
|
changes/api/serializer/models/change.py
|
vault-the/changes
|
37e23c3141b75e4785cf398d015e3dbca41bdd56
|
[
"Apache-2.0"
] | 12
|
2015-07-30T19:07:16.000Z
|
2016-11-07T23:11:21.000Z
|
changes/api/serializer/models/change.py
|
vault-the/changes
|
37e23c3141b75e4785cf398d015e3dbca41bdd56
|
[
"Apache-2.0"
] | 47
|
2015-01-09T10:04:00.000Z
|
2020-11-18T17:58:19.000Z
|
from changes.api.serializer import Crumbler, register
from changes.models.change import Change
from changes.utils.http import build_web_uri
@register(Change)
class ChangeCrumbler(Crumbler):
def crumble(self, instance, attrs):
result = {
'id': instance.id.hex,
'name': instance.label,
'project': instance.project,
'author': instance.author,
'message': instance.message,
'link': build_web_uri('/changes/%s/' % (instance.id.hex,)),
'dateCreated': instance.date_created.isoformat(),
'dateModified': instance.date_modified.isoformat(),
}
if hasattr(instance, 'last_job'):
result['lastBuild'] = instance.last_job
return result
| 34.863636
| 71
| 0.623207
|
e30b1cbbf4d4d43ba971eb6341d9b4e2ddbd20b5
| 880
|
py
|
Python
|
Deep-Learning-In-Python/Module-2/weight-change-manual.py
|
vishwesh5/Datacamp-Courses
|
f074ec25e373c3d1d2edb1629c5568001aeadec1
|
[
"MIT"
] | null | null | null |
Deep-Learning-In-Python/Module-2/weight-change-manual.py
|
vishwesh5/Datacamp-Courses
|
f074ec25e373c3d1d2edb1629c5568001aeadec1
|
[
"MIT"
] | null | null | null |
Deep-Learning-In-Python/Module-2/weight-change-manual.py
|
vishwesh5/Datacamp-Courses
|
f074ec25e373c3d1d2edb1629c5568001aeadec1
|
[
"MIT"
] | null | null | null |
# The data point you will make a prediction for
input_data = np.array([0, 3])
# Sample weights
weights_0 = {'node_0': [2, 1],
'node_1': [1, 2],
'output': [1, 1]
}
# The actual target value, used to calculate the error
target_actual = 3
# Make prediction using original weights
model_output_0 = predict_with_network(input_data, weights_0)
# Calculate error: error_0
error_0 = model_output_0 - target_actual
# Create weights that cause the network to make perfect prediction (3): weights_1
weights_1 = {'node_0': [2, 1],
'node_1': [1, 2],
'output': [1, 0]
}
# Make prediction using new weights: model_output_1
model_output_1 = predict_with_network(input_data, weights_1)
# Calculate error: error_1
error_1 = model_output_1 - target_actual
# Print error_0 and error_1
print(error_0)
print(error_1)
| 25.142857
| 81
| 0.677273
|
065d9be3370aca32997c5e405854a7bf09a2af74
| 936
|
py
|
Python
|
tests/test_pipe_decorator.py
|
alexpeits/straw
|
55bbbbf2b2bdb617ea889f27aa72ec0455166ebd
|
[
"BSD-3-Clause"
] | null | null | null |
tests/test_pipe_decorator.py
|
alexpeits/straw
|
55bbbbf2b2bdb617ea889f27aa72ec0455166ebd
|
[
"BSD-3-Clause"
] | null | null | null |
tests/test_pipe_decorator.py
|
alexpeits/straw
|
55bbbbf2b2bdb617ea889f27aa72ec0455166ebd
|
[
"BSD-3-Clause"
] | null | null | null |
import unittest
from straw.straw import pipe
@pipe
def cat(pipe_in, *args):
return ' '.join(args)
@pipe
def upper(pipe_in):
return pipe_in.upper()
@pipe
def join(pipe_in, delimiter='-'):
return delimiter.join(pipe_in.split())
class TestPipeDecorator(unittest.TestCase):
def test_one(self):
out = cat('asdf') | upper()
self.assertEqual(out, 'ASDF')
out = cat('one two') | join()
self.assertEqual(out, 'one-two')
out = cat('one two') | join(delimiter='*')
self.assertEqual(out, 'one*two')
def test_multiple(self):
out = cat('one two') | upper() | join()
self.assertEqual(out, 'ONE-TWO')
def test_string_first(self):
out = 'one two' | upper()
self.assertEqual(out, 'ONE TWO')
out = 'one two' | upper() | join(delimiter='*')
self.assertEqual(out, 'ONE*TWO')
if __name__ == '__main__':
unittest.main()
| 20.8
| 55
| 0.592949
|
503777aaf812376ea6e7ceb34e778f50e5490ab4
| 2,020
|
py
|
Python
|
test/functional/p2p_pos_doublespend.py
|
NEWBITALGO/BitalGO
|
f8f5e126a7c808f72ff85b32e28c09a45fe684ca
|
[
"MIT"
] | 2
|
2020-02-11T16:45:37.000Z
|
2020-04-16T15:54:38.000Z
|
test/functional/p2p_pos_doublespend.py
|
NEWBITALGO/BitalGO
|
f8f5e126a7c808f72ff85b32e28c09a45fe684ca
|
[
"MIT"
] | 1
|
2019-07-09T00:08:54.000Z
|
2019-07-09T00:08:54.000Z
|
test/functional/p2p_pos_doublespend.py
|
NEWBITALGO/BitalGO
|
f8f5e126a7c808f72ff85b32e28c09a45fe684ca
|
[
"MIT"
] | 5
|
2019-07-26T22:59:20.000Z
|
2020-10-15T00:33:19.000Z
|
#!/usr/bin/env python3
# Copyright (c) 2019 The BitalGo Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
'''
Covers the scenario of a valid PoS block with a valid coinstake transaction where the
coinstake input prevout is double spent in one of the other transactions in the same block.
'''
from time import sleep
from fake_stake.base_test import ALG_FakeStakeTest
class PoSDoubleSpend(ALG_FakeStakeTest):
def run_test(self):
self.description = "Covers the scenario of a valid PoS block with a valid coinstake transaction where the coinstake input prevout is double spent in one of the other transactions in the same block."
self.init_test()
INITAL_MINED_BLOCKS = 300
FORK_DEPTH = 30
self.NUM_BLOCKS = 3
# 1) Starting mining blocks
self.log.info("Mining %d blocks.." % INITAL_MINED_BLOCKS)
self.node.generate(INITAL_MINED_BLOCKS)
# 2) Collect the possible prevouts
self.log.info("Collecting all unspent coins which we generated from mining...")
staking_utxo_list = self.node.listunspent()
# 3) Spam Blocks on the main chain
self.log.info("-- Main chain blocks first")
self.test_spam("Main", staking_utxo_list, fDoubleSpend=True)
sleep(2)
# 4) Mine some block as buffer
self.log.info("Mining %d more blocks..." % FORK_DEPTH)
self.node.generate(FORK_DEPTH)
sleep(2)
# 5) Spam Blocks on a forked chain
self.log.info("-- Forked chain blocks now")
err_msgs = self.test_spam("Forked", staking_utxo_list, fRandomHeight=True, randomRange=FORK_DEPTH, fDoubleSpend=True)
if not len(err_msgs) == 0:
self.log.error("result: " + " | ".join(err_msgs))
raise AssertionError("TEST FAILED")
self.log.info("%s PASSED" % self.__class__.__name__)
if __name__ == '__main__':
PoSDoubleSpend().main()
| 36.727273
| 206
| 0.683663
|
2abd98c5adcebf649277b941c0537d2c5e24214e
| 2,266
|
py
|
Python
|
DSA/Python/src/dsa/lib/math/nums/three_sum.py
|
JackieMa000/problems
|
c521558830a0bbf67f94109af92d7be4397d0a43
|
[
"BSD-3-Clause"
] | null | null | null |
DSA/Python/src/dsa/lib/math/nums/three_sum.py
|
JackieMa000/problems
|
c521558830a0bbf67f94109af92d7be4397d0a43
|
[
"BSD-3-Clause"
] | 1
|
2020-10-23T04:06:56.000Z
|
2020-10-23T04:06:56.000Z
|
DSA/Python/src/dsa/lib/math/nums/three_sum.py
|
JackieMa000/problems
|
c521558830a0bbf67f94109af92d7be4397d0a43
|
[
"BSD-3-Clause"
] | null | null | null |
from typing import List
# LeetCode15
class ThreeSum:
def threeSum(self, nums: List[int]) -> List[List[int]]:
return self.threeSum1(nums)
def threeSum1(self, nums: List[int]) -> List[List[int]]:
"""
Rule: SortFind, Double Pointers
Sort, remove duplicates
loop a:
remove duplicates for a
double pointers for b, c
a + b + c > 0 -> c shift left
a + b + c < 0 -> b shift right
a + b + c = 0 -> 1) save result.
2) remove duplicates for b, c
3) b shift right, c shift left.
"""
n: int = len(nums)
result = []
nums.sort()
for i in range(n - 2):
if i >= 1 and nums[i] == nums[i - 1]: continue
left, right = i + 1, n - 1
while left < right:
sum_ = nums[i] + nums[left] + nums[right]
if sum_ == 0:
while left < right and nums[left] == nums[left + 1]: left += 1
while left < right and nums[right] == nums[right - 1]: right -= 1
result.append([nums[i], nums[left], nums[right]])
left += 1
right -= 1
elif sum_ < 0:
left += 1
else:
right -= 1
return result
def threeSum2(self, nums: List[int]) -> List[List[int]]:
"""
Rule: Hash Table
Hash Table bset for b.
sort, hash set, remove duplicates for the result.
find b in hash table, look back.
a + b + c = 0 -> b = -(a + c)
loop a:
loop c:
b = -(a + c)
if find b in hash table, save result
else, save cur(c) in hash table
"""
n: int = len(nums)
result = set()
nums.sort()
for i in range(n - 2):
b_set = set()
for j in range(i + 1, n):
b = -(nums[i] + nums[j])
if b in b_set:
result.add(((nums[i]), b, (nums[j])))
else:
b_set.add(nums[j])
return [list(x) for x in result]
| 31.041096
| 85
| 0.419682
|
0f347299c44057670f20272e2e1ee0b1151fcd1d
| 6,247
|
py
|
Python
|
tests/testflows/rbac/tests/privileges/system/merges.py
|
mcspring/ClickHouse
|
08f713f177f950c2f675c2c75d1261c91066888c
|
[
"Apache-2.0"
] | 18
|
2021-05-29T01:12:33.000Z
|
2021-11-18T12:34:48.000Z
|
tests/testflows/rbac/tests/privileges/system/merges.py
|
mcspring/ClickHouse
|
08f713f177f950c2f675c2c75d1261c91066888c
|
[
"Apache-2.0"
] | null | null | null |
tests/testflows/rbac/tests/privileges/system/merges.py
|
mcspring/ClickHouse
|
08f713f177f950c2f675c2c75d1261c91066888c
|
[
"Apache-2.0"
] | 2
|
2021-07-13T06:42:45.000Z
|
2021-07-21T13:47:22.000Z
|
from testflows.core import *
from testflows.asserts import error
from rbac.requirements import *
from rbac.helper.common import *
import rbac.helper.errors as errors
@TestSuite
def privileges_granted_directly(self, node=None):
"""Check that a user is able to execute `SYSTEM MERGES` commands if and only if
the privilege has been granted directly.
"""
user_name = f"user_{getuid()}"
if node is None:
node = self.context.node
with user(node, f"{user_name}"):
table_name = f"table_name_{getuid()}"
Suite(run=check_privilege, flags=TE,
examples=Examples("privilege on grant_target_name user_name table_name", [
tuple(list(row)+[user_name,user_name,table_name]) for row in check_privilege.examples
], args=Args(name="check privilege={privilege}", format_name=True)))
@TestSuite
def privileges_granted_via_role(self, node=None):
"""Check that a user is able to execute `SYSTEM MERGES` commands if and only if
the privilege has been granted via role.
"""
user_name = f"user_{getuid()}"
role_name = f"role_{getuid()}"
if node is None:
node = self.context.node
with user(node, f"{user_name}"), role(node, f"{role_name}"):
table_name = f"table_name_{getuid()}"
with When("I grant the role to the user"):
node.query(f"GRANT {role_name} TO {user_name}")
Suite(run=check_privilege, flags=TE,
examples=Examples("privilege on grant_target_name user_name table_name", [
tuple(list(row)+[role_name,user_name,table_name]) for row in check_privilege.examples
], args=Args(name="check privilege={privilege}", format_name=True)))
@TestOutline(Suite)
@Examples("privilege on",[
("SYSTEM", "*.*"),
("SYSTEM MERGES", "table"),
("SYSTEM STOP MERGES", "table"),
("SYSTEM START MERGES", "table"),
("START MERGES", "table"),
("STOP MERGES", "table"),
])
def check_privilege(self, privilege, on, grant_target_name, user_name, table_name, node=None):
"""Run checks for commands that require SYSTEM MERGES privilege.
"""
if node is None:
node = self.context.node
Suite(test=start_merges, setup=instrument_clickhouse_server_log)(privilege=privilege, on=on, grant_target_name=grant_target_name, user_name=user_name, table_name=table_name)
Suite(test=stop_merges, setup=instrument_clickhouse_server_log)(privilege=privilege, on=on, grant_target_name=grant_target_name, user_name=user_name, table_name=table_name)
@TestSuite
def start_merges(self, privilege, on, grant_target_name, user_name, table_name, node=None):
"""Check that user is only able to execute `SYSTEM START MERGES` when they have privilege.
"""
exitcode, message = errors.not_enough_privileges(name=user_name)
if node is None:
node = self.context.node
on = on.replace("table", f"{table_name}")
with table(node, table_name):
with Scenario("SYSTEM START MERGES without privilege"):
with When("I check the user can't start merges"):
node.query(f"SYSTEM START MERGES {table_name}", settings = [("user", f"{user_name}")],
exitcode=exitcode, message=message)
with Scenario("SYSTEM START MERGES with privilege"):
with When(f"I grant {privilege} on the table"):
node.query(f"GRANT {privilege} ON {on} TO {grant_target_name}")
with Then("I check the user can start merges"):
node.query(f"SYSTEM START MERGES {table_name}", settings = [("user", f"{user_name}")])
with Scenario("SYSTEM START MERGES with revoked privilege"):
with When(f"I grant {privilege} on the table"):
node.query(f"GRANT {privilege} ON {on} TO {grant_target_name}")
with And(f"I revoke {privilege} on the table"):
node.query(f"REVOKE {privilege} ON {on} FROM {grant_target_name}")
with Then("I check the user can't start merges"):
node.query(f"SYSTEM START MERGES {table_name}", settings = [("user", f"{user_name}")],
exitcode=exitcode, message=message)
@TestSuite
def stop_merges(self, privilege, on, grant_target_name, user_name, table_name, node=None):
"""Check that user is only able to execute `SYSTEM STOP MERGES` when they have privilege.
"""
exitcode, message = errors.not_enough_privileges(name=user_name)
if node is None:
node = self.context.node
on = on.replace("table", f"{table_name}")
with table(node, table_name):
with Scenario("SYSTEM STOP MERGES without privilege"):
with When("I check the user can't stop merges"):
node.query(f"SYSTEM STOP MERGES {table_name}", settings = [("user", f"{user_name}")],
exitcode=exitcode, message=message)
with Scenario("SYSTEM STOP MERGES with privilege"):
with When(f"I grant {privilege} on the table"):
node.query(f"GRANT {privilege} ON {on} TO {grant_target_name}")
with Then("I check the user can stop merges"):
node.query(f"SYSTEM STOP MERGES {table_name}", settings = [("user", f"{user_name}")])
with Scenario("SYSTEM STOP MERGES with revoked privilege"):
with When(f"I grant {privilege} on the table"):
node.query(f"GRANT {privilege} ON {on} TO {grant_target_name}")
with And(f"I revoke {privilege} on the table"):
node.query(f"REVOKE {privilege} ON {on} FROM {grant_target_name}")
with Then("I check the user can't stop merges"):
node.query(f"SYSTEM STOP MERGES {table_name}", settings = [("user", f"{user_name}")],
exitcode=exitcode, message=message)
@TestFeature
@Name("system merges")
@Requirements(
RQ_SRS_006_RBAC_Privileges_System_Merges("1.0"),
)
def feature(self, node="clickhouse1"):
"""Check the RBAC functionality of SYSTEM MERGES.
"""
self.context.node = self.context.cluster.node(node)
Suite(run=privileges_granted_directly, setup=instrument_clickhouse_server_log)
Suite(run=privileges_granted_via_role, setup=instrument_clickhouse_server_log)
| 41.370861
| 177
| 0.657756
|
b271eb6a5d97e2bbbb8ef0b472416d253d917534
| 8,260
|
py
|
Python
|
pytket/tests/strategies.py
|
NewGitter2017/tket
|
6ff81af26280770bf2ca80bfb2140e8fa98182aa
|
[
"Apache-2.0"
] | null | null | null |
pytket/tests/strategies.py
|
NewGitter2017/tket
|
6ff81af26280770bf2ca80bfb2140e8fa98182aa
|
[
"Apache-2.0"
] | null | null | null |
pytket/tests/strategies.py
|
NewGitter2017/tket
|
6ff81af26280770bf2ca80bfb2140e8fa98182aa
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2019-2021 Cambridge Quantum Computing
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from typing import Any, Callable, Dict, List, Optional, Tuple, TypeVar
from collections import Counter
import hypothesis.strategies as st
from hypothesis.extra.numpy import arrays
from hypothesis.strategies import * # for reexport
from hypothesis.strategies._internal import SearchStrategy
from hypothesis.extra.numpy import arrays
from typing import Any, Callable, Dict, List, Optional, Tuple, TypeVar
import numpy as np
import re
from pytket import Circuit, Qubit, Bit
from pytket._tket.circuit import BasisOrder, Node, OpType # type: ignore
from pytket._tket.routing import Architecture # type: ignore
from pytket.pauli import Pauli, QubitPauliString # type: ignore
from pytket.utils import QubitPauliOperator
from pytket.utils.results import KwargTypes
from pytket.utils.outcomearray import OutcomeArray
from pytket.backends.backendresult import BackendResult
from pytket.backends.backendinfo import BackendInfo
binary_digits = st.sampled_from((0, 1))
uint32 = st.integers(min_value=1, max_value=1 << 32 - 1)
reg_name_regex = re.compile("[a-z][a-zA-Z0-9_]*")
@st.composite
def qubits(
draw: Callable,
name: SearchStrategy[str] = st.from_regex(reg_name_regex, fullmatch=True),
index: SearchStrategy[int] = uint32,
) -> Bit:
return Qubit(draw(name), draw(index))
@st.composite
def circuits(
draw: Callable[[SearchStrategy[Any]], Any],
n_qubits: SearchStrategy[int] = st.integers(min_value=0, max_value=4),
n_bits: SearchStrategy[int] = st.integers(min_value=0, max_value=4),
depth: SearchStrategy[int] = st.integers(min_value=1, max_value=8),
name: SearchStrategy[str] = st.text(min_size=0, max_size=6),
phase: SearchStrategy[float] = st.floats(min_value=-2.0, max_value=2.0),
) -> Circuit:
total_qubits = draw(n_qubits)
total_bits = draw(n_bits)
circuit = Circuit(total_qubits, total_bits, name=draw(name))
circuit.add_phase(draw(phase))
if total_qubits == 0:
return circuit
for _ in range(draw(depth)):
gates = [circuit.Rx, circuit.H]
if total_qubits > 1:
gates.extend([circuit.CH, circuit.CX])
gate = draw(st.sampled_from(gates))
control = draw(st.integers(min_value=0, max_value=total_qubits - 1))
if gate in (circuit.CH, circuit.CX):
target = draw(
st.integers(min_value=0, max_value=total_qubits - 1).filter(
lambda x: x != control
)
)
gate(control, target)
if gate == circuit.Rx:
angle = draw(st.floats(min_value=-2.0, max_value=2.0))
gate(angle, control)
if gate == circuit.H:
gate(control)
return circuit
@st.composite
def outcomearrays(
draw: Callable[[SearchStrategy[Any]], Any],
rows: SearchStrategy[int] = st.integers(min_value=1, max_value=100),
cols: SearchStrategy[int] = st.integers(min_value=0, max_value=10),
) -> OutcomeArray:
row = draw(rows)
col = draw(cols)
array = draw(arrays(np.uint8, shape=(row, col)))
width = (
draw(st.integers(min_value=(col - 1) * 8 + 1, max_value=col * 8)) if col else 0
)
return OutcomeArray(array, width=width)
@st.composite
def outcome_counts(
draw: Callable[[SearchStrategy[Any]], Any],
) -> Counter:
width = draw(st.integers(min_value=0, max_value=20))
cols = -(-width // 8)
ar_strat = arrays(np.uint8, shape=(1, cols))
drawn_arrays = draw(st.lists(ar_strat, min_size=1, max_size=20))
outcomes = (OutcomeArray(ar, width) for ar in drawn_arrays)
countstrat = st.integers(min_value=1, max_value=100)
count_vals = [draw(countstrat) for i in range(len(drawn_arrays))]
return Counter(dict(zip(outcomes, count_vals)))
def _gen_unitid(uidtype, index): # type: ignore
return uidtype(name="uid", index=index)
@st.composite
def architecture(
draw: Callable[[SearchStrategy[Any]], Any],
) -> Architecture:
n_nodes = draw(st.integers(min_value=4, max_value=15))
n_edges = draw(st.integers(min_value=1, max_value=n_nodes))
vertex = st.integers(min_value=0, max_value=n_nodes - 1)
edge = st.lists(vertex, min_size=2, max_size=2, unique=True)
edges = st.lists(edge)
return Architecture(draw(edges))
@st.composite
def backendinfo(
draw: Callable[[SearchStrategy[Any]], Any],
) -> BackendInfo:
optypes = [OpType(i) for i in range(6, 54)]
name = draw(st.text(min_size=1, max_size=30))
device_name = draw(st.text(min_size=1, max_size=30))
version = draw(st.text(min_size=1, max_size=5))
# hardware constraints
arc = draw(architecture())
gate_set = draw(st.sets(st.sampled_from(optypes)))
supports_fast_feedforward = draw(st.booleans())
supports_reset = draw(st.booleans())
supports_midcircuit_measurement = draw(st.booleans())
return BackendInfo(
name,
device_name,
version,
arc,
gate_set,
supports_fast_feedforward,
supports_reset,
supports_midcircuit_measurement,
)
@st.composite
def backendresults(
draw: Callable[[SearchStrategy[Any]], Any],
) -> BackendResult:
uid_indexes = st.lists(st.integers(min_value=0, max_value=3))
measured = draw(st.booleans())
if measured:
bit_strat = uid_indexes.map(lambda x: _gen_unitid(Bit, x)) # type: ignore
shots = draw(st.booleans())
if shots:
ar_strat = outcomearrays()
shots = draw(ar_strat)
cbits = draw(
st.lists(
bit_strat, min_size=shots.width, max_size=shots.width, unique=True
)
)
return BackendResult(c_bits=cbits, shots=shots)
counts = draw(outcome_counts())
width = next(counts.elements()).width
cbits = draw(st.lists(bit_strat, unique=True, min_size=width, max_size=width))
return BackendResult(c_bits=cbits, counts=counts)
qbstrat = uid_indexes.map(lambda x: _gen_unitid(Qubit, x)) # type: ignore
qubits_list = draw(st.lists(qbstrat, unique=True, min_size=1, max_size=6))
n_qb = len(qubits_list)
dims = 1 << n_qb
state = draw(st.booleans())
if state:
state_ar = draw(
arrays(
complex,
shape=(dims, 1),
elements=st.complex_numbers(allow_nan=False),
)
)
return BackendResult(q_bits=qubits_list, state=state_ar)
density_matrix = draw(st.booleans())
if density_matrix:
density_matrix_ar = draw(
arrays(
complex,
shape=(dims, dims),
elements=st.complex_numbers(allow_nan=False),
)
)
return BackendResult(q_bits=qubits_list, density_matrix=density_matrix_ar)
unitary_ar = draw(
arrays(
complex, shape=(dims, dims), elements=st.complex_numbers(allow_nan=False)
)
)
return BackendResult(q_bits=qubits_list, unitary=unitary_ar)
@st.composite
def qubitpaulistrings(
draw: Callable[[SearchStrategy[Any]], Any],
) -> QubitPauliString:
qubit_lists = st.lists(qubits(), unique=True)
pauli = st.sampled_from([Pauli.I, Pauli.X, Pauli.Y, Pauli.Z])
qbs = draw(qubit_lists)
pauli_str = draw(st.lists(pauli, min_size=len(qbs), max_size=len(qbs)))
return QubitPauliString(qbs, pauli_str)
@st.composite
def qubitpaulioperators(
draw: Callable[[SearchStrategy[Any]], Any],
) -> QubitPauliOperator:
ops = st.dictionaries(
qubitpaulistrings(),
st.complex_numbers(min_magnitude=0.5, max_magnitude=3),
max_size=10,
)
return QubitPauliOperator(draw(ops))
| 34.273859
| 87
| 0.667554
|
f46639ab28aeab5cbbcac4ec8a8d74c8a7119094
| 219
|
py
|
Python
|
intxeger/__init__.py
|
moreati/IntXeger
|
dc56eaaf2b87f839f230133b8a62eb8a08db20de
|
[
"MIT"
] | 4
|
2021-03-09T02:36:18.000Z
|
2022-01-17T06:58:55.000Z
|
intxeger/__init__.py
|
moreati/IntXeger
|
dc56eaaf2b87f839f230133b8a62eb8a08db20de
|
[
"MIT"
] | 5
|
2021-03-08T10:33:04.000Z
|
2021-10-14T17:52:43.000Z
|
intxeger/__init__.py
|
moreati/IntXeger
|
dc56eaaf2b87f839f230133b8a62eb8a08db20de
|
[
"MIT"
] | 2
|
2021-10-07T19:09:36.000Z
|
2022-01-08T08:13:44.000Z
|
"""
IntXeger is a Python library for generating strings from regular expressions.
"""
__author__ = "Kevin Alex Zhang"
__email__ = "kevz@mit.edu"
__version__ = "0.1.1"
from .intxeger import build
__all__ = ("build",)
| 18.25
| 77
| 0.721461
|
7eb70d16040f534391c4f89c778b1fbfae10e822
| 91
|
py
|
Python
|
vmail-manager.py
|
domrim/vmail-manager
|
6ee4a8761d31cd9f35d3bf8f6ec08c049d9563ba
|
[
"MIT"
] | null | null | null |
vmail-manager.py
|
domrim/vmail-manager
|
6ee4a8761d31cd9f35d3bf8f6ec08c049d9563ba
|
[
"MIT"
] | null | null | null |
vmail-manager.py
|
domrim/vmail-manager
|
6ee4a8761d31cd9f35d3bf8f6ec08c049d9563ba
|
[
"MIT"
] | 1
|
2020-10-11T11:02:59.000Z
|
2020-10-11T11:02:59.000Z
|
#!/usr/bin/env python3
from vmail_manager import cli
if __name__ == "__main__":
cli()
| 15.166667
| 29
| 0.692308
|
7d689f4c7a4ab6430845f4c212ab50f31cc2e5cf
| 1,203
|
py
|
Python
|
compiler/datasheet/add_db.py
|
im-world/OpenRAM
|
f66aac3264598eeae31225c62b6a4af52412d407
|
[
"BSD-3-Clause"
] | 335
|
2018-03-13T21:05:22.000Z
|
2022-03-30T07:53:25.000Z
|
compiler/datasheet/add_db.py
|
im-world/OpenRAM
|
f66aac3264598eeae31225c62b6a4af52412d407
|
[
"BSD-3-Clause"
] | 87
|
2018-03-06T00:55:51.000Z
|
2022-03-30T19:38:29.000Z
|
compiler/datasheet/add_db.py
|
im-world/OpenRAM
|
f66aac3264598eeae31225c62b6a4af52412d407
|
[
"BSD-3-Clause"
] | 95
|
2018-03-14T16:22:55.000Z
|
2022-03-24T00:34:37.000Z
|
# See LICENSE for licensing information.
#
# Copyright (c) 2016-2021 Regents of the University of California and The Board
# of Regents for the Oklahoma Agricultural and Mechanical College
# (acting for and on behalf of Oklahoma State University)
# All rights reserved.
#
from pathlib import Path
import glob
import os
import sys
# This is the path to the directory you would like to search
# This directory is searched recursively for .html files
path_to_files = sys.argv[1]
def get_file_tree(path):
return list(Path(path).rglob("*.html"))
def parse_html(file, comment):
start_tag = '<!--'+comment
end_tag = comment+'-->'
with open(file, 'r') as f:
file_string = f.read()
with open(file, 'w') as f:
file_string = file_string.replace(start_tag,"")
file_string = file_string.replace(end_tag,"")
f.write(file_string)
def uncomment(comments):
comment_files = []
for datasheet in datasheet_list:
for comment in comments:
if glob.glob(os.path.dirname(datasheet)+'/*' + comment):
parse_html(datasheet, comment)
datasheet_list = get_file_tree(path_to_files)
comments = ['.db']
uncomment(comments)
| 24.55102
| 79
| 0.688279
|
e1cf850d2b620e5c2e024db496b7a2a354bc65d5
| 6,988
|
py
|
Python
|
gui/QTabFPC.py
|
JLSirvent/bws-calibration-analysis
|
b2f129e31974c16d7498e105a075b43bfece92c9
|
[
"MIT"
] | null | null | null |
gui/QTabFPC.py
|
JLSirvent/bws-calibration-analysis
|
b2f129e31974c16d7498e105a075b43bfece92c9
|
[
"MIT"
] | null | null | null |
gui/QTabFPC.py
|
JLSirvent/bws-calibration-analysis
|
b2f129e31974c16d7498e105a075b43bfece92c9
|
[
"MIT"
] | null | null | null |
# --------------------------------------------------------------------------
# Copyright (c) <2017> <Lionel Garcia>
# BE-BI-PM, CERN (European Organization for Nuclear Research)
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
# --------------------------------------------------------------------------
#
# Not fully documented
from __future__ import unicode_literals
import numpy as np
import matplotlib
from PyQt5 import QtCore
from PyQt5.QtWidgets import QStackedWidget, QWidget, QVBoxLayout
from matplotlib.backends.backend_qt5 import NavigationToolbar2QT as NavigationToolbar
from gui.mplCanvas import mplCanvas
class QTabFPC(QWidget):
def __init__(self, parent=None):
super(QTabFPC, self).__init__(parent=None)
self.main_widget = QStackedWidget(self)
self.setAttribute(QtCore.Qt.WA_DeleteOnClose)
main_layout = QVBoxLayout(self.main_widget)
self.plot = plot(self.main_widget, width=6.5, height=6, dpi=100)
self.navi_toolbar = NavigationToolbar(self.plot, self)
main_layout.addWidget(self.navi_toolbar)
main_layout.addWidget(self.plot)
self.timeAIN = [0, 1]
self.timeBIN = [0, 1]
self.timeAOUT = [0, 1]
self.timeBOUT = [0, 1]
self.posAIN = [0, 1]
self.posBIN = [0, 1]
self.posAOUT = [0, 1]
self.posBOUT = [0, 1]
self.occIN = [0, 1]
self.occOUT = [0, 1]
self.focus = 0
self.setLayout(main_layout)
def set_x_IN_A(self, timeAIN):
self.timeAIN = timeAIN
def set_x_OUT_A(self, timeAOUT):
self.timeAOUT = timeAOUT
def set_x_IN_B(self, timeBIN):
self.timeBIN = timeBIN
def set_x_OUT_B(self, timeBOUT):
self.timeBOUT = timeBOUT
def set_y_IN_A(self, posAIN):
self.posAIN = posAIN
def set_y_IN_B(self, posBIN):
self.posBIN = posBIN
def set_y_OUT_A(self, posAOUT):
self.posAOUT = posAOUT
def set_y_OUT_B(self, posBOUT):
self.posBOUT = posBOUT
def set_t1(self, occIN):
self.occIN = occIN
def set_t2(self, occOUT):
self.occOUT = occOUT
def actualise_ax(self):
self.plot.fig.clear()
self.plot.timeAIN = self.timeAIN
self.plot.timeAOUT = self.timeAOUT
self.plot.timeBIN = self.timeBIN
self.plot.timeBOUT = self.timeBOUT
self.plot.posAIN = self.posAIN
self.plot.posBIN = self.posBIN
self.plot.posAOUT = self.posAOUT
self.plot.posBOUT = self.posBOUT
self.plot.occIN = self.occIN
self.plot.occOUT = self.occOUT
self.plot.compute_initial_figure()
self.plot.draw()
def wait(self):
pass
class plot(mplCanvas):
"""Simple canvas with a sine plot."""
def __init__(self, parent, width, height, dpi):
self.timeAIN = [0, 1]
self.timeBIN = [0, 1]
self.timeAOUT = [0, 1]
self.timeBOUT = [0, 1]
self.posAIN = [0, 1]
self.posBIN = [0, 1]
self.posAOUT = [0, 1]
self.posBOUT = [0, 1]
self.occIN = [0,1]
self.occOUT = [0,1]
self.ax1 = 0
self.foc_marker = 0
self.color = 0
self.focus = 0
super(plot, self).__init__(parent, width, height, dpi)
def compute_initial_figure(self):
try:
self.fig.clear()
ax1 = self.fig.add_subplot(221)
ax2 = self.fig.add_subplot(222)
ax3 = self.fig.add_subplot(223)
ax4 = self.fig.add_subplot(224)
self.fig.tight_layout()
for i in range(1, 3):
if i == 1:
title = 'IN'
ax_time = ax1
ax_laser = ax2
timeA = self.timeAIN
timeB = self.timeBIN
PosA = self.posAIN
PosB = self.posBIN
Occ = self.occIN
timems = 30
else:
title = 'OUT'
ax_time = ax3
ax_laser = ax4
timeA = self.timeAOUT
timeB = self.timeBOUT
PosA = self.posAOUT
PosB = self.posBOUT
Occ = self.occOUT
timems = 340
pos_at_timeA = []
pos_at_timeB = []
for i in range(0, PosA.size):
#time_refA.append(timeA[i][np.where(PosA[i] > 0)[0][0]])
try:
pos_at_timeA.append(PosA[i][(np.where(timeA[i] > timems / 1000)[0][0])])
except:
print("Error RDS_A")
for i in range(0, PosB.size):
#time_refB.append(timeB[i][np.where(PosB[i] > 0)[0][0]])
try:
pos_at_timeB.append(PosB[i][(np.where(timeB[i] > timems / 1000)[0][0])])
except:
print("Error RDS_B")
#time_refA = np.asarray(time_refA) * 1e3
#time_refB = np.asarray(time_refB) * 1e3
ax_time.plot(1e3 * np.asarray(pos_at_timeA) , 'ob')
ax_time.plot(1e3 * np.asarray(pos_at_timeB) , 'or')
ax_time.set_title('Position at a given time ' + title + ' ' + str(timems) + 'ms', loc='left')
ax_time.set_ylabel('Angular Position [mrad]')
ax_time.set_xlabel('Scan Number')
y_formatter = matplotlib.ticker.ScalarFormatter(useOffset=False)
ax_laser.yaxis.set_major_formatter(y_formatter)
ax_laser.plot(1e3 * Occ, 'ob')
ax_laser.set_title('Angular position on Laser Crossing '+ title, loc='left')
ax_laser.set_ylabel('Angular Position [mrad]')
ax_laser.set_xlabel('Scan Number')
except:
print("Error FPC!")
| 32.351852
| 109
| 0.560819
|
5529a0a89b5b8696a931ff06ea22833d7fefdfbd
| 911
|
py
|
Python
|
functions/sorted.py
|
onrcayci/python3-notes
|
64f6708553a54a2b85641690e5e90444a0a68737
|
[
"MIT"
] | null | null | null |
functions/sorted.py
|
onrcayci/python3-notes
|
64f6708553a54a2b85641690e5e90444a0a68737
|
[
"MIT"
] | null | null | null |
functions/sorted.py
|
onrcayci/python3-notes
|
64f6708553a54a2b85641690e5e90444a0a68737
|
[
"MIT"
] | null | null | null |
"""
sorted
Return a new sorted list from the items in iterable.
"""
nums = [6, 1, 8, 2]
sorted(nums) # [1, 2, 6, 8]
print(nums) # [6, 1, 8, 2]
# for dictionaries, you have to pass in how they will be sorted.
users = [
{"username": "samuel", "tweets": ["I love cake", "I love pie", "hello world!"]},
{"username": "katie", "tweets": ["I love my cat"]},
{"username": "jeff", "tweets": []},
{"username": "bob123", "tweets": []},
{"username": "doggo_luvr", "tweets": ["dogs are the best", "I'm hungry"]},
{"username": "guitar_gal", "tweets": []}
]
sorted(users, key=len) # the list will be sorted depending on the length of the dictionaries
sorted(users, key=lambda user: user['username']) # the list is sorted in an alphabetical order of their username
sorted(users, key=lambda user: len(user['tweets'])) # the list is sorted from the shortest tweets to the longest
| 37.958333
| 121
| 0.621295
|
0dc605a646526daa32c5fdd82eb48b16fa4276a8
| 1,996
|
py
|
Python
|
regression_template.py
|
anatu/ML_working
|
e2de26ab07de0c55862de8045e48cfb904a76b5a
|
[
"MIT"
] | null | null | null |
regression_template.py
|
anatu/ML_working
|
e2de26ab07de0c55862de8045e48cfb904a76b5a
|
[
"MIT"
] | null | null | null |
regression_template.py
|
anatu/ML_working
|
e2de26ab07de0c55862de8045e48cfb904a76b5a
|
[
"MIT"
] | null | null | null |
# POLYNOMIAL REGRESSION
# Data Preprocessing Template
# Importing the libraries
import numpy as np
import matplotlib.pyplot as plt
import pandas as pd
from sklearn.cross_validation import train_test_split
from sklearn.linear_model import LinearRegression
from sklearn.preprocessing import LabelEncoder, OneHotEncoder
from sklearn.preprocessing import PolynomialFeatures
import statsmodels.formula.api as sm
# PREPROCESSING
######################################################################################
# Importing the dataset
dataset = pd.read_csv(r'Position_Salaries.csv')
print(dataset.columns)
# Specify the set of predictor variables to use, and the independent variable
X = dataset.iloc[:, 1:2].values
y = dataset.iloc[:, -1].values
# Splitting the dataset into the Training set and Test set
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.2, random_state=0)
# Feature Scaling
"""from sklearn.preprocessing import StandardScaler
sc_X = StandardScaler()
X_train = sc_X.fit_transform(X_train)
X_test = sc_X.transform(X_test)
sc_y = StandardScaler()
y_train = sc_y.fit_transform(y_train)"""
# MODELLING
######################################################################################
# Fitting the Regressor to the dataset
# MAKE YOUR REGRESSOR HERE
# Predicting an individual result with the regressor (interpolate or extrapolate)
y_pred = regressor.predict(6.5)
# Visualize the regression results
plt.scatter(X, y, color = 'r')
plt.plot(X, regressor.predict(X))
plt.title("Title here")
plt.xlabel("X-axis label here")
plt.ylabel("Y-axis label here")
plt.show()
# Visualize with high-res curve
X_grid = np.arange(min(X), max(X), 0.1)
X_grid = X_grid.reshape((len(X_grid),1))
plt.scatter(X, y, color = 'r')
plt.plot(X, regressor.predict(X))
plt.title("Title here")
plt.xlabel("X-axis label here")
plt.ylabel("Y-axis label here")
plt.show()
| 23.761905
| 89
| 0.663828
|
2c32864ce865fb2a2787d7a9dece0222f1b4e52d
| 685
|
py
|
Python
|
14 - Dictionary/Ex_91.py
|
o-Ian/Practice-Python
|
1e4b2d0788e70006096a53a7cf038db3148ba4b7
|
[
"MIT"
] | 4
|
2021-04-23T18:07:58.000Z
|
2021-05-12T11:38:14.000Z
|
14 - Dictionary/Ex_91.py
|
o-Ian/Practice-Python
|
1e4b2d0788e70006096a53a7cf038db3148ba4b7
|
[
"MIT"
] | null | null | null |
14 - Dictionary/Ex_91.py
|
o-Ian/Practice-Python
|
1e4b2d0788e70006096a53a7cf038db3148ba4b7
|
[
"MIT"
] | null | null | null |
from random import randint
from operator import itemgetter
cont = 1
jogadores = {'jogador1': randint(1, 6), 'jogador2': randint(1, 6), 'jogador3': randint(1, 6), 'jogador4': randint(1, 6)}
for k, v in jogadores.items():
print(f'O {k} tirou {v}.')
print('O ranking dos jogadores é: ')
valores = sorted(jogadores.values(), reverse=True)
lista = sorted(jogadores.items(), key=itemgetter(1), reverse=True)
for i, c in enumerate(lista):
print(f'{i+1}° lugar: {c[0]} tirou {c[1]}')
'''for c in valores:
for k, v in jogadores.items():
if c == v and k not in lista:
print(f'{cont}° lugar: {k} com {v}.')
lista.append(k)
cont += 1'''
| 38.055556
| 120
| 0.605839
|
b3c63a704bae00ad9343f1c3909c3eaaa4d147e5
| 1,186
|
py
|
Python
|
BazelExtensions/acknowledgement_merger.py
|
woshimaliang/PodToBUILD
|
3b0e2a069c010413d730582bf1a4aac2480fa834
|
[
"Apache-2.0"
] | null | null | null |
BazelExtensions/acknowledgement_merger.py
|
woshimaliang/PodToBUILD
|
3b0e2a069c010413d730582bf1a4aac2480fa834
|
[
"Apache-2.0"
] | null | null | null |
BazelExtensions/acknowledgement_merger.py
|
woshimaliang/PodToBUILD
|
3b0e2a069c010413d730582bf1a4aac2480fa834
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/env python3
import sys
import plistlib
if len(sys.argv) < 3:
print("Usage <merge|finalize> output_file [inputs]")
sys.exit(0)
output = sys.argv[2]
action = sys.argv[1]
merged_fragments = []
seen_licenses = set()
for idx, arg in enumerate(sys.argv):
if idx <= 2:
continue
with open(arg, 'rb') as f:
input_plist = plistlib.load(f, fmt=plistlib.FMT_XML)
if not input_plist:
continue
fragments = input_plist if isinstance(input_plist, list) else [input_plist]
for fragment in fragments:
# We only want to insert a given software license 1 time
title = fragment.get("Title")
if title in seen_licenses:
continue
seen_licenses.add(title)
merged_fragments.append(fragment)
if action == "--finalize":
out_plist = {
"StringsTable": "Acknowledgements",
"PreferenceSpecifiers": merged_fragments
}
with open(output, 'wb') as f:
plistlib.dump(out_plist, f, fmt=plistlib.FMT_XML)
elif action == "--merge":
with open(output, 'wb') as f:
plistlib.dump(merged_fragments, f, fmt=plistlib.FMT_XML)
| 28.926829
| 83
| 0.625632
|
216c0504ab2520653d05c11f7e68a9b8ea55f9d6
| 13,652
|
py
|
Python
|
metalibm-master/metalibm_core/core/ml_optimization_engine.py
|
JeffreySarnoff/UsingSollya
|
bc559b7cfe1b4ce7eb8934d41ddca2670d0e839a
|
[
"MIT"
] | 27
|
2018-03-12T16:49:36.000Z
|
2021-12-15T06:53:55.000Z
|
metalibm-master/metalibm_core/core/ml_optimization_engine.py
|
JeffreySarnoff/UsingSollya
|
bc559b7cfe1b4ce7eb8934d41ddca2670d0e839a
|
[
"MIT"
] | 57
|
2018-03-12T16:49:56.000Z
|
2021-03-04T15:25:39.000Z
|
metalibm-master/metalibm_core/core/ml_optimization_engine.py
|
JeffreySarnoff/UsingSollya
|
bc559b7cfe1b4ce7eb8934d41ddca2670d0e839a
|
[
"MIT"
] | 4
|
2018-03-12T15:40:22.000Z
|
2018-11-28T14:34:54.000Z
|
# -*- coding: utf-8 -*-
###############################################################################
# This file is part of metalibm (https://github.com/kalray/metalibm)
###############################################################################
# MIT License
#
# Copyright (c) 2018 Kalray
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
###############################################################################
# created: Mar 20th, 2014
# last-modified: Mar 7th, 2018
#
# author(s): Nicolas Brunie (nicolas.brunie@kalray.eu)
###############################################################################
import sys
from sollya import inf, sup
from ..utility.log_report import Log
from .ml_operations import *
from .ml_hdl_operations import *
from .ml_formats import *
import metalibm_core.opt.p_function_std as p_function_std
import metalibm_core.opt.p_function_typing as p_function_typing
# high verbosity log-level for optimization engine
LOG_LEVEL_OPT_ENG_VERBOSE = Log.LogLevel("OptEngVerbose")
type_escalation = {
Addition: {
lambda result_type: isinstance(result_type, ML_FP_Format): {
lambda op_type: isinstance(op_type, ML_Fixed_Format):
lambda op: op.get_precision(),
},
},
Multiplication: {
lambda result_type: isinstance(result_type, ML_FP_Format): {
lambda op_type: isinstance(op_type, ML_Fixed_Format):
lambda op: op.get_precision(),
lambda op_type: isinstance(op_type, ML_FP_Format):
lambda op: op.get_precision(),
},
},
FusedMultiplyAdd: {
lambda result_type: isinstance(result_type, ML_FP_Format): {
lambda op_type: isinstance(op_type, ML_Fixed_Format):
lambda op: op.get_precision(),
lambda op_type: isinstance(op_type, ML_FP_Format):
lambda op: op.get_precision(),
},
},
ExponentInsertion: {
lambda result_type: not isinstance(result_type, ML_VectorFormat) : {
lambda op_type: isinstance(op_type, ML_FP_Format):
lambda op: {32: ML_Int32, 64: ML_Int64}[op.get_precision().get_bit_size()],
lambda op_type: isinstance(op_type, ML_Fixed_Format):
lambda op: ML_Int32,
},
},
}
# Table of transformation rule to translate an operation into its exact (no rounding error) counterpart
exactify_rule = {
Constant: {
None: {
lambda optree, exact_format: optree.get_precision() is None:
lambda opt_eng, optree, exact_format: opt_eng.swap_format(optree, exact_format),
},
},
Division: {
None: {
lambda optree, exact_format: True:
lambda opt_eng, optree, exact_format: opt_eng.swap_format(optree, exact_format),
},
},
Addition: {
None: {
lambda optree, exact_format: True:
lambda opt_eng, optree, exact_format: opt_eng.swap_format(optree, exact_format),
},
},
Multiplication: {
None: {
lambda optree, exact_format: True:
lambda opt_eng, optree, exact_format: opt_eng.swap_format(optree, exact_format),
},
},
Subtraction: {
None: {
lambda optree, exact_format: True:
lambda opt_eng, optree, exact_format: opt_eng.swap_format(optree, exact_format),
},
},
FusedMultiplyAdd: {
None: {
lambda optree, exact_format: True:
lambda opt_eng, optree, exact_format: opt_eng.swap_format(optree, exact_format),
},
},
}
class OptimizationEngine(object):
""" backend (precision instanciation and optimization passes) class """
def __init__(self, processor, default_integer_format = ML_Int32, default_fp_precision = ML_Binary32, change_handle = True, dot_product_enabled = False, default_boolean_precision = ML_Int32):
self.processor = processor
self.default_integer_format = default_integer_format
self.default_fp_precision = default_fp_precision
self.change_handle = change_handle
self.dot_product_enabled = dot_product_enabled
self.default_boolean_precision = default_boolean_precision
def set_dot_product_enabled(self, dot_product_enabled):
self.dot_product_enabled = dot_product_enabled
def get_dot_product_enabled(self):
return self.dot_product_enabled
def copy_optree(self, optree, copy_map = None):
copy_map = {} if copy_map is None else copy_map
return optree.copy(copy_map)
def get_default_fp_precision(self, optree):
return self.default_fp_precision
def get_boolean_format(self, optree):
""" return boolean format to use for optree """
return self.default_boolean_precision
def set_boolean_format(self, new_boolean_format):
self.default_boolean_precision = new_boolean_format
def merge_abstract_format(self, optree, args, default_precision = None):
""" merging input format in multi-ary operation to determined result format """
return p_function_typing.merge_ops_abstract_format(optree, args, default_precision)
def instantiate_abstract_precision(self, optree, default_precision=None,
memoization_map=None):
""" recursively determine an abstract precision for each node """
return p_function_typing.instantiate_abstract_precision(
optree, default_precision, memoization_map)
def simplify_fp_context(optree):
""" factorize exception clearing and rounding mode changes accross
connected DAG of floating-point operations """
def is_fp_op(_optree):
return isinstance(_optree.get_precision(), ML_FP_Format)
if is_fp_op(optree):
pass
def instantiate_precision(self, optree, default_precision=None, memoization_map=None):
""" instantiate final precisions and insert required conversions
if the operation is not supported """
memoization_map = memoization_map or {}
return p_function_typing.instantiate_precision(
optree, default_precision, memoization_map, backend=self
)
def cb_parent_tagging(self, optree, parent_block = None):
""" tries to factorize subexpression sharing between branch of ConditionBlock """
if isinstance(optree, ConditionBlock):
optree.parent = parent_block
for op in optree.inputs:
self.cb_parent_tagging(op, parent_block = optree)
elif not isinstance(optree, ML_LeafNode):
for op in optree.inputs:
self.cb_parent_tagging(op, parent_block = parent_block)
def subexpression_sharing(self, optree):
return p_function_std.subexpression_sharing(optree)
def extract_fast_path(self, optree):
""" extracting fast path (most likely execution path leading
to a Return operation) from <optree> """
if isinstance(optree, ConditionBlock):
cond = optree.inputs[0]
likely = cond.get_likely()
if likely:
return self.extract_fast_path(optree.inputs[1])
elif likely == False and len(optree.inputs) >= 3:
return self.extract_fast_path(optree.inputs[2])
else:
return None
elif isinstance(optree, Statement):
for sub_stat in optree.inputs:
ss_fast_path = self.extract_fast_path(sub_stat)
if ss_fast_path != None: return ss_fast_path
return None
elif isinstance(optree, Return):
return optree.inputs[0]
else:
return None
def factorize_fast_path(self, optree):
""" extract <optree>'s fast path and add it to be pre-computed at
the start of <optree> computation """
fast_path = self.extract_fast_path(optree)
if fast_path == None:
return
elif isinstance(optree, Statement):
optree.push(fast_path)
else:
Log.report(Log.Error, "unsupported root for fast path factorization")
def fuse_multiply_add(self, optree, silence = False, memoization = None):
return p_function_std.fuse_multiply_add(
optree, silence, memoization, self.change_handle,
self.dot_product_enabled)
def silence_fp_operations(self, optree, force = False, memoization_map = None):
return p_function_std.silence_fp_operations(optree, force, memoization_map)
def register_nodes_by_tag(self, optree, node_map = {}):
""" build a map tag->optree """
# registering node if tag is defined
if optree.get_tag() != None:
node_map[optree.get_tag()] = optree
# processing extra_inputs list
for op in optree.get_extra_inputs():
self.register_nodes_by_tag(op, node_map)
# processing inputs list for non ML_LeafNode optree
if not isinstance(optree, ML_LeafNode):
for op in optree.inputs:
self.register_nodes_by_tag(op, node_map)
def recursive_swap_format(self, optree, old_format, new_format, memoization_map = None):
memoization_map = {} if memoization_map is None else memoization_map
if optree in memoization_map:
return
else:
if optree.get_precision() is old_format:
optree.set_precision(new_format)
memoization_map[optree] = optree
for node in optree.get_inputs() + optree.get_extra_inputs():
self.recursive_swap_format(node, old_format, new_format)
def check_processor_support(self, optree, memoization_map = {}, debug = False, language = C_Code):
return p_function_std.PassCheckProcessorSupport.check_processor_support(self.processor, optree, memoization_map, debug, langage)
def swap_format(self, optree, new_format):
optree.set_precision(new_format)
return optree
def exactify(self, optree, exact_format = ML_Exact, memoization_map = {}):
""" recursively process <optree> according to table exactify_rule
to translete each node into is exact counterpart (no rounding error)
, generally by setting its precision to <exact_format> """
if optree in memoization_map:
return memoization_map[optree]
if not isinstance(optree, ML_LeafNode):
for inp in optree.inputs:
self.exactify(inp, exact_format, memoization_map)
for inp in optree.get_extra_inputs():
self.exactify(inp, exact_format, memoization_map)
if optree.__class__ in exactify_rule:
for cond in exactify_rule[optree.__class__][None]:
if cond(optree, exact_format):
new_optree = exactify_rule[optree.__class__][None][cond](self, optree, exact_format)
memoization_map[optree] = new_optree
return new_optree
memoization_map[optree] = optree
return optree
def static_vectorization(self, optree):
pass
def optimization_process(self, pre_scheme, default_precision, copy = False, fuse_fma = True, subexpression_sharing = True, silence_fp_operations = True, factorize_fast_path = True, language = C_Code):
# copying when required
scheme = pre_scheme if not copy else pre_scheme.copy({})
if fuse_fma:
Log.report(Log.Info, "Fusing FMA")
scheme_post_fma = scheme if not fuse_fma else self.fuse_multiply_add(scheme, silence = silence_fp_operations)
Log.report(Log.Info, "Infering types")
self.instantiate_abstract_precision(scheme_post_fma, None)
Log.report(Log.Info, "Instantiating precisions")
self.instantiate_precision(scheme_post_fma, default_precision)
if subexpression_sharing:
Log.report(Log.Info, "Sharing sub-expressions")
self.subexpression_sharing(scheme_post_fma)
if silence_fp_operations:
Log.report(Log.Info, "Silencing exceptions in internal fp operations")
self.silence_fp_operations(scheme_post_fma)
Log.report(Log.Info, "Checking processor support")
self.check_processor_support(scheme_post_fma, memoization_map = {}, language = language)
if factorize_fast_path:
Log.report(Log.Info, "Factorizing fast path")
self.factorize_fast_path(scheme_post_fma)
return scheme_post_fma
| 39.005714
| 204
| 0.652432
|
3bd00b0944ac34b3ae67492007ee6e91fcbe210e
| 2,239
|
py
|
Python
|
BeyondChaos/sillyclowns.py
|
Crimdahl/BeyondChaosRandomizer
|
04f10fec2f017f1581f5d43f5951c457312349e3
|
[
"MIT"
] | 6
|
2021-07-02T05:07:45.000Z
|
2022-03-02T00:26:40.000Z
|
BeyondChaos/sillyclowns.py
|
Crimdahl/BeyondChaosRandomizer
|
04f10fec2f017f1581f5d43f5951c457312349e3
|
[
"MIT"
] | 19
|
2021-02-12T23:12:35.000Z
|
2022-03-24T23:36:36.000Z
|
BeyondChaos/sillyclowns.py
|
Crimdahl/BeyondChaosRandomizer
|
04f10fec2f017f1581f5d43f5951c457312349e3
|
[
"MIT"
] | 23
|
2021-02-20T02:19:39.000Z
|
2022-01-14T16:28:20.000Z
|
from dialoguemanager import get_dialogue, set_dialogue
from utils import open_mei_fallback, Substitution, utilrandom as random, PASSWORDS_TABLE, POEMS_TABLE
POEM_PAGE_BREAK = "<wait 390 frames><wait 1 frame><page>"
def randomize_poem(fout):
poems = []
with open_mei_fallback(POEMS_TABLE, encoding='utf8') as poems_file:
current_poem = []
page_break = False
wait = 0
for line in poems_file:
line = line.split('#')[0].strip()
if not line:
if current_poem:
page_break = True
continue
if line.startswith("---") and current_poem:
current_poem.append("<wait 390 frames><wait 1 frame>")
wait += 1
poems.append(("".join(current_poem), wait))
current_poem = []
page_break = False
wait = 0
continue
if page_break:
current_poem.append(POEM_PAGE_BREAK)
wait += 1
page_break = False
elif current_poem:
current_poem.append("<line>")
current_poem.append(line)
if not poems:
return
text, wait = random.choice(poems)
set_dialogue(0x9FC, text)
wait = min(wait * 26 + 2, 255)
# Adjust wait to be long enough for the poem
wait_sub = Substitution()
wait_sub.set_location(0xC401D)
wait_sub.bytestring = bytes([0xB5, wait])
wait_sub.write(fout)
def randomize_passwords():
passwords = [[], [], []]
with open_mei_fallback(PASSWORDS_TABLE) as passwords_file:
i = 0
for line in passwords_file:
line = line.split('#')[0].strip()
if not line:
continue
if line.startswith("------") and i < len(passwords) - 1:
i += 1
continue
passwords[i].append(line)
if all(passwords):
text = get_dialogue(0xE0)
text = text.replace("Rose bud", random.choice(passwords[0]))
text = text.replace("Courage", random.choice(passwords[1]))
text = text.replace("Failure", random.choice(passwords[2]))
set_dialogue(0xE0, text)
| 29.077922
| 101
| 0.558285
|
e6f676a92385c52e93b2b7f231312a53be1ef755
| 2,107
|
py
|
Python
|
Python/Diversified_Portfolio.py
|
thefool76/hacktoberfest2021
|
237751e17a4fc325ded29fca013fb9f5853cd27c
|
[
"CC0-1.0"
] | 448
|
2021-10-01T04:24:14.000Z
|
2022-03-06T14:34:20.000Z
|
Python/Diversified_Portfolio.py
|
Chanaka-Madushan-Herath/hacktoberfest2021
|
8473df9e058ccb6049720dd372342e0ea60f0e59
|
[
"CC0-1.0"
] | 282
|
2021-10-01T04:29:06.000Z
|
2022-03-07T12:42:57.000Z
|
Python/Diversified_Portfolio.py
|
Chanaka-Madushan-Herath/hacktoberfest2021
|
8473df9e058ccb6049720dd372342e0ea60f0e59
|
[
"CC0-1.0"
] | 1,807
|
2021-10-01T04:24:02.000Z
|
2022-03-28T04:51:25.000Z
|
import quandl
import pandas as pd
import matplotlib.pyplot as plt
#Stock Price starts from 2012 and ends at 2017 stock price
start_date = pd.to_datetime("2012-01-01")
end_date = pd.to_datetime("2017-01-01")
#Getting Apple,cisco,ibm,amazon Stock Price
aapl = quandl.get("WIKI/AAPL.11",start_date=start_date,end_date=end_date)
cisco = quandl.get("WIKI/CSCO.11",start_date=start_date,end_date=end_date)
ibm = quandl.get("WIKI/AAPL.11",start_date=start_date,end_date=end_date)
amzn = quandl.get("WIKI/AMZN.11",start_date=start_date,end_date=end_date)
#Getting input from user on how much they want to allocate to the 4 stocks for diversified portfolio
inputstring = input("Enter How much You want to allocate in the 4 stocks [Apple,Cisco,IBM,Amazon] ex. .3,.4,.2,.1")
ins = list(map(float,inputstring.split(',')))
#Creating a new coloumn for Normalized Return i.e how much they have earned from the beginning
for stock in (aapl,cisco,ibm,amzn):
stock['Normed Return']=stock['Adj. Close']/stock.iloc[0]['Adj. Close']
#Creating a new coloumn of the total allocated stocks they have
for stock,aloc in zip((aapl,cisco,ibm,amzn),ins):
stock['Allocation']=stock['Normed Return']*aloc
#Getting input from the user on how much they want to invest in diversified portfolio
money = int(input("How Much Inital Total Money you want to invest in the diversified portfolio"))
#Creating a new coloumn for the total Pos. Or total money they have in that stock
for stock in (aapl,cisco,ibm,amzn):
stock['Total Pos']=stock['Allocation']*money
#Creating a new dataframe with all the total pos.
everything = [aapl['Total Pos'],cisco['Total Pos'],ibm['Total Pos'],amzn['Total Pos']]
portfolio_val = pd.concat(everything,axis=1)
portfolio_val.columns=['Apple Pos.','Cisco Pos.','IBM Pos.','Amazon Pos.']
portfolio_val['Total Pos.']=portfolio_val.sum(axis=1)
#Plotting All Portfolio stock Positions
portfolio_val.plot(figsize=(10,8))
plt.title("All Portfolio Pos.")
plt.show()
#Plotting Total Portfolio stock Positions
portfolio_val['Total Pos.'].plot(figsize=(10,8))
plt.title("Total Portfolio Pos.")
plt.show()
| 43.895833
| 115
| 0.756051
|
08d3ba6eb8f1b454b6c6f706d26b0d2efc937719
| 3,574
|
py
|
Python
|
recipe/app/settings.py
|
paccy-ops/react-recipe-app
|
b98717dd3b66754e032a03157385917689de2e5f
|
[
"MIT"
] | null | null | null |
recipe/app/settings.py
|
paccy-ops/react-recipe-app
|
b98717dd3b66754e032a03157385917689de2e5f
|
[
"MIT"
] | null | null | null |
recipe/app/settings.py
|
paccy-ops/react-recipe-app
|
b98717dd3b66754e032a03157385917689de2e5f
|
[
"MIT"
] | null | null | null |
"""
Django settings for app project.
Generated by 'django-admin startproject' using Django 3.2.7.
For more information on this file, see
https://docs.djangoproject.com/en/3.2/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/3.2/ref/settings/
"""
from pathlib import Path
# Build paths inside the project like this: BASE_DIR / 'subdir'.
BASE_DIR = Path(__file__).resolve().parent.parent
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/3.2/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'django-insecure-_d7j^(zhkdj(y3ohh0azvo7dx*l(xf4*qt_q4yca5cotnd&mae'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = ["*"]
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'rest_framework',
'core.apps.CoreConfig',
'user.apps.UserConfig',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'app.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'app.wsgi.application'
# Database
# https://docs.djangoproject.com/en/3.2/ref/settings/#databases
# DATABASES = {
# 'default': {
# 'ENGINE': 'django.db.backends.sqlite3',
# 'NAME': BASE_DIR / 'db.sqlite3',
# }
# }
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql',
'NAME': 'Recipe',
'USER': 'pacifique',
'PASSWORD': 'Zmlscw==',
'HOST': 'db',
'PORT': 5432,
}
}
# Password validation
# https://docs.djangoproject.com/en/3.2/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/3.2/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/3.2/howto/static-files/
STATIC_URL = '/static/'
# Default primary key field type
# https://docs.djangoproject.com/en/3.2/ref/settings/#default-auto-field
DEFAULT_AUTO_FIELD = 'django.db.models.BigAutoField'
AUTH_USER_MODEL = 'core.USER'
| 25.528571
| 91
| 0.682708
|
718019f3e5945c4e4b92a41239782ff133f5aaf8
| 13,955
|
py
|
Python
|
kala.py
|
delanym/kala
|
b4e760d4def03198aacf182d279c5c97ddb04a01
|
[
"BSD-3-Clause"
] | null | null | null |
kala.py
|
delanym/kala
|
b4e760d4def03198aacf182d279c5c97ddb04a01
|
[
"BSD-3-Clause"
] | null | null | null |
kala.py
|
delanym/kala
|
b4e760d4def03198aacf182d279c5c97ddb04a01
|
[
"BSD-3-Clause"
] | null | null | null |
#!/usr/bin/env python
# -*- coding: UTF-8 -*-
import os
import wx
import data
class Kala(wx.App):
def OnInit(self):
self.frame = KalaFrame(None, title="Kala - Revised Astrology")
self.SetTopWindow(self.frame)
self.frame.SetWindowStyle(wx.MAXIMIZE)
self.frame.Show()
return True
class KalaFrame(wx.Frame):
def __init__(self, parent, id=wx.ID_ANY, title="", pos=[0,0], size=[700,300], style=wx.DEFAULT_FRAME_STYLE, name="KalaFrame"):
super(KalaFrame, self).__init__(parent, id, title, pos, size, style, name)
icon = wx.Icon("./kala.ico", wx.BITMAP_TYPE_ICO)
self.SetIcon(icon)
# Event IDs
self.ID_Exit = 0
self.ID_NewPerson, self.ID_NewEvent = range(1,3)
self.ID_Signs, self.ID_Nakshatras, self.ID_Animals, self.ID_Tithis = range(11,15)
#self.ID_Private, self.ID_Public =
# Menus
self.mfile = wx.Menu()
self.mchronicle = wx.Menu()
self.mview = wx.Menu()
self.mfile.Append(self.ID_Exit, "Exit")
self.mchronicle.Append(self.ID_NewPerson, "New Person")
self.mchronicle.Append(self.ID_NewEvent, "New Event")
self.mview.Append(self.ID_Signs, "Signs")
self.mview.Append(self.ID_Nakshatras, "Nakshatras")
self.mview.Append(self.ID_Animals, "Animals")
self.mview.Append(self.ID_Tithis, "Tithis")
menubar = wx.MenuBar()
menubar.Append(self.mfile, "File")
menubar.Append(self.mchronicle, "Chronicle")
menubar.Append(self.mview, "View")
self.SetMenuBar(menubar)
self.Bind(wx.EVT_MENU, self.onExit, id=self.ID_Exit)
self.Bind(wx.EVT_MENU, self.onNewPerson, id=self.ID_NewPerson)
self.Bind(wx.EVT_MENU, self.onNewEvent, id=self.ID_NewEvent)
self.Bind(wx.EVT_MENU, self.onSigns, id=self.ID_Signs)
self.Bind(wx.EVT_MENU, self.onNakshatras, id=self.ID_Nakshatras)
self.Bind(wx.EVT_MENU, self.onAnimals, id=self.ID_Animals)
self.Bind(wx.EVT_MENU, self.onTithis, id=self.ID_Tithis)
# Layout
self.CreateStatusBar()
def onExit(self, event):
self.Destroy()
def onNewPerson(self, event):
dlg = PersonDialog(self)
dlg.ShowModal()
dlg.Destroy()
def onNewEvent(self, event):
dlg = EventDialog(self)
dlg.ShowModal()
dlg.Destroy()
def onSigns(self, event):
pass
def onNakshatras(self, event):
pass
def onAnimals(self, event):
pass
def onTithis(self, event):
pass
class PersonDialog(wx.Dialog):
def __init__(self, parent, title="Create New Person", style=wx.DEFAULT_DIALOG_STYLE):
super(PersonDialog, self).__init__(parent, title=title)
self.name = wx.TextCtrl(self)
self.public = wx.RadioButton(self, label="Public", style=wx.RB_GROUP)
self.private = wx.RadioButton(self, label="Private")
self.url = wx.TextCtrl(self)
self.getURL = wx.Button(self, label="Get Details")
self.notes = wx.TextCtrl(self, style=wx.TE_MULTILINE)
self.addEvent = wx.Button(self, label="Add Event")
self.accept = wx.Button(self, wx.ID_OK, "Accept")
self.cancel = wx.Button(self, wx.ID_CANCEL, "Cancel")
sizer = wx.GridBagSizer(vgap=8, hgap=8)
sizer.Add(wx.StaticText(self, label="Name"), (0, 1))
sizer.Add(self.name, (0, 2), (1, 14), wx.EXPAND)
sizer.Add(self.public, (1, 2), (1, 1))
sizer.Add(self.private, (1, 3), (1, 1))
sizer.Add(wx.StaticText(self, label="Astrodienst URL"), (2, 1))
sizer.Add(self.url, (2, 2), (1, 12), wx.EXPAND)
sizer.Add(self.getURL, (2, 14))
sizer.Add(wx.StaticText(self, label="Notes"), (3, 1))
sizer.Add(self.notes, (3, 2), (6, 14), wx.EXPAND)
sizer.Add(self.addEvent, (9, 2))
sizer.Add(self.accept, (9, 3))
sizer.Add(self.cancel, (9, 4))
mvsizer = wx.BoxSizer(wx.VERTICAL)
mvsizer.Add(sizer, 0, wx.GROW|wx.ALL, 10)
self.SetSizer(mvsizer)
mvsizer.Fit(self)
self.name.SetFocus()
self.Bind(wx.EVT_RADIOBUTTON, self.onPrivate, id=self.private.GetId())
self.Bind(wx.EVT_RADIOBUTTON, self.onPublic, id=self.public.GetId())
self.Bind(wx.EVT_BUTTON, self.onAccept, id=wx.ID_OK)
self.Bind(wx.EVT_BUTTON, parent.onNewEvent, id=self.addEvent.GetId())
def onPrivate(self, event):
self.url.Enable(False)
self.getURL.Enable(False)
def onPublic(self, event):
self.url.Enable(True)
self.getURL.Enable(True)
def onAccept(self, event):
self.Destroy()
class EventDialog(wx.Dialog):
def __init__(self, parent, title="Create New Event", style=wx.DEFAULT_DIALOG_STYLE):
super(EventDialog, self).__init__(parent, title=title)
self.event = wx.ComboBox(self, choices=["Born", "Died", "Personal Event", "Historical Event", "Astronomical / Astrological"])
self.event.SetStringSelection("Born")
self.description = wx.TextCtrl(self, style=wx.TE_MULTILINE)
self.addPoint = wx.Button(self, label="Add Point")
self.addRange = wx.Button(self, label="Add Range")
self.timesNotebook = Times(self)
# Layout
msizer = wx.BoxSizer(wx.VERTICAL)
gbsizer = wx.GridBagSizer(vgap=8, hgap=8)
gbsizer.Add(wx.StaticText(self, label="Type"), (0, 1))
gbsizer.Add(self.event, (0, 2), (1, 3))
gbsizer.Add(wx.StaticText(self, label="Description"), (1, 1))
gbsizer.Add(self.description, (1, 2), (4, 13), wx.EXPAND)
gbsizer.Add(self.addPoint, (5, 2))
gbsizer.Add(self.addRange, (5, 3))
msizer.Add(gbsizer, 0, wx.GROW|wx.ALL, 10)
msizer.Add(self.timesNotebook, 0, wx.GROW|wx.ALL, 10)
self.SetSizer(msizer)
msizer.Fit(self)
self.description.SetFocus()
class Times(wx.Notebook):
def __init__(self, parent):
super(Times, self).__init__(parent)
self.ptime = PointPanel(self)
self.rtime = RangePanel(self)
self.AddPage(self.ptime, "Point")
self.AddPage(self.rtime, "Range")
class PointPanel(wx.Panel):
def __init__(self, parent):
super(PointPanel, self).__init__(parent)
self.source = wx.TextCtrl(self, size=(300, 48), style=wx.TE_MULTILINE)
self.calendar = wx.ComboBox(self, size=(150,-1), choices=["Julian", "Gregorian"])
self.julianDayUT = wx.TextCtrl(self, size=(150,-1))
self.method = wx.ComboBox(self, size=(150,-1), choices=["Local Mean", "Zone", "No location"])
self.zone = wx.ComboBox(self, size=(75,-1), choices=data.ZONES)
self.delta = wx.TextCtrl(self, size=(50,-1))
self.longitude = wx.ComboBox(self, size=(150,-1), choices=["Decimal", "Degree"])
self.longdecdeg = wx.TextCtrl(self, size=(50,-1))
self.longmin = wx.TextCtrl(self, size=(50,-1))
self.year = wx.TextCtrl(self, size=(50,-1))
self.month = wx.TextCtrl(self, size=(50,-1))
self.day = wx.TextCtrl(self, size=(50,-1))
self.hour = wx.TextCtrl(self, size=(50,-1))
self.minute = wx.TextCtrl(self, size=(50,-1))
self.margin = wx.TextCtrl(self, size=(50,-1))
self.calendar.SetStringSelection("Gregorian")
self.method.SetStringSelection("Zone")
self.delta.SetToolTipString = "Daylight saving is usually +1 and wartime +2."
self.margin.SetToolTipString = "If you don't know the time enter 12. If you don't know the time or location enter 24."
self.longdecdeg.SetToolTipString = "Negative value for western hemisphere."
# Layout
flags = wx.SizerFlags(1)
flags.Expand().Border(wx.ALL, 2)
vsizer = wx.BoxSizer(wx.VERTICAL)
hsizer = wx.BoxSizer(wx.HORIZONTAL)
hsizer.Add(wx.StaticText(self, label="Source", size=(120,-1)), flag=wx.ALIGN_RIGHT|wx.ALIGN_CENTRE_VERTICAL)
hsizer.Add(self.source, wx.EXPAND)
vsizer.AddF(hsizer, flags)
hsizer = wx.BoxSizer(wx.HORIZONTAL)
hsizer.Add(wx.StaticText(self, label="Calendar", size=(120,-1)), flag=wx.ALIGN_RIGHT|wx.ALIGN_CENTRE_VERTICAL)
hsizer.Add(self.calendar)
hsizer.Add(self.julianDayUT)
vsizer.AddF(hsizer, flags)
hsizer = wx.BoxSizer(wx.HORIZONTAL)
hsizer.Add(wx.StaticText(self, label="Method", size=(120,-1)), flag=wx.ALIGN_RIGHT|wx.ALIGN_CENTRE_VERTICAL)
hsizer.Add(self.method)
hsizer.Add(self.zone)
hsizer.Add(wx.StaticText(self, label="Daylight"), flag=wx.ALIGN_RIGHT|wx.ALIGN_CENTRE_VERTICAL)
hsizer.Add(self.delta)
vsizer.AddF(hsizer, flags)
hsizer = wx.BoxSizer(wx.HORIZONTAL)
hsizer.Add(wx.StaticText(self, label="Longitude", size=(120,-1)), flag=wx.ALIGN_RIGHT|wx.ALIGN_CENTRE_VERTICAL)
hsizer.Add(self.longitude)
hsizer.Add(wx.StaticText(self, label="Degree"), flag=wx.ALIGN_RIGHT|wx.ALIGN_CENTRE_VERTICAL)
hsizer.Add(self.longdecdeg)
hsizer.Add(wx.StaticText(self, label="Minutes"), flag=wx.ALIGN_RIGHT|wx.ALIGN_CENTRE_VERTICAL)
hsizer.Add(self.longmin)
vsizer.AddF(hsizer, flags)
prsizer = wx.BoxSizer(wx.HORIZONTAL)
gbsizer = wx.GridBagSizer(vgap=2, hgap=4)
gbsizer.Add(wx.StaticText(self, label="Year"), (1, 1))
gbsizer.Add(wx.StaticText(self, label="Month"), (1, 2))
gbsizer.Add(wx.StaticText(self, label="Day"), (1, 3))
gbsizer.Add(self.year, (2, 1), (1, 1))
gbsizer.Add(self.month, (2, 2), (1, 1))
gbsizer.Add(self.day, (2, 3), (1, 1))
gbsizer.Add(wx.StaticText(self, label="Hour"), (3, 1))
gbsizer.Add(wx.StaticText(self, label="Minute"), (3, 2))
gbsizer.Add(wx.StaticText(self, label="Margin"), (3, 4))
gbsizer.Add(self.hour, (4, 1), (1, 1))
gbsizer.Add(self.minute, (4, 2), (1, 1))
gbsizer.Add(self.margin, (4, 4), (1, 1))
prsizer.Add(gbsizer)
vsizer.AddF(hsizer, flags)
#vsizer.Add(tsizer, 0, wx.GROW|wx.ALL, 10)
self.SetSizer(vsizer)
vsizer.Fit(self)
class RangePanel(wx.Panel):
def __init__(self, parent):
super(RangePanel, self).__init__(parent)
self.calendar = wx.ComboBox(self, choices=["Julian", "Gregorian"])
self.calendar.SetStringSelection("Gregorian")
self.calibration = wx.ComboBox(self, choices=["Local Mean", "Zone"])
self.calibration.SetStringSelection("Zone")
self.source = wx.TextCtrl(self, style=wx.TE_MULTILINE)
self.year1 = wx.TextCtrl(self, size=(50,-1))
self.month1 = wx.TextCtrl(self, size=(50,-1))
self.day1 = wx.TextCtrl(self, size=(50,-1))
self.hour1 = wx.TextCtrl(self, size=(50,-1))
self.minute1 = wx.TextCtrl(self, size=(50,-1))
self.delta1 = wx.TextCtrl(self, size=(50,-1))
self.margin1 = wx.TextCtrl(self, size=(50,-1))
self.year2 = wx.TextCtrl(self, size=(50,-1))
self.month2 = wx.TextCtrl(self, size=(50,-1))
self.day2 = wx.TextCtrl(self, size=(50,-1))
self.hour2 = wx.TextCtrl(self, size=(50,-1))
self.minute2 = wx.TextCtrl(self, size=(50,-1))
self.delta2 = wx.TextCtrl(self, size=(50,-1))
self.margin2 = wx.TextCtrl(self, size=(50,-1))
tsizer = wx.BoxSizer(wx.VERTICAL)
gbsizer = wx.GridBagSizer(vgap=8, hgap=8)
gbsizer.Add(wx.StaticText(self, label="Calendar"), (0, 1))
gbsizer.Add(self.calendar, (0, 2), (1, 3))
gbsizer.Add(wx.StaticText(self, label="Calibration"), (1, 1))
gbsizer.Add(self.calibration, (1, 2), (1, 3))
gbsizer.Add(wx.StaticText(self, label="Source"), (2, 1))
gbsizer.Add(self.source, (2, 2), (3, 15), wx.EXPAND)
tsizer.Add(gbsizer)
prsizer = wx.BoxSizer(wx.HORIZONTAL)
gbsizer = wx.GridBagSizer(vgap=2, hgap=4)
gbsizer.Add(wx.StaticText(self, label="Year"), (1, 1))
gbsizer.Add(wx.StaticText(self, label="Month"), (1, 2))
gbsizer.Add(wx.StaticText(self, label="Day"), (1, 3))
gbsizer.Add(self.year1, (2, 1), (1, 1))
gbsizer.Add(self.month1, (2, 2), (1, 1))
gbsizer.Add(self.day1, (2, 3), (1, 1))
gbsizer.Add(wx.StaticText(self, label="Hour"), (3, 1))
gbsizer.Add(wx.StaticText(self, label="Minute"), (3, 2))
gbsizer.Add(self.hour1, (4, 1), (1, 1))
gbsizer.Add(self.minute1, (4, 2), (1, 1))
gbsizer.Add(wx.StaticText(self, label="Delta"), (5, 1))
gbsizer.Add(wx.StaticText(self, label="Margin"), (5, 2))
gbsizer.Add(self.delta1, (6, 1), (1, 1))
gbsizer.Add(self.margin1, (6, 2), (1, 1))
prsizer.Add(gbsizer)
gbsizer = wx.GridBagSizer(vgap=2, hgap=4)
gbsizer.Add(wx.StaticText(self, label="Year"), (1, 1))
gbsizer.Add(wx.StaticText(self, label="Month"), (1, 2))
gbsizer.Add(wx.StaticText(self, label="Day"), (1, 3))
gbsizer.Add(self.year2, (2, 1), (1, 1))
gbsizer.Add(self.month2, (2, 2), (1, 1))
gbsizer.Add(self.day2, (2, 3), (1, 1))
gbsizer.Add(wx.StaticText(self, label="Hour"), (3, 1))
gbsizer.Add(wx.StaticText(self, label="Minute"), (3, 2))
gbsizer.Add(self.hour2, (4, 1), (1, 1))
gbsizer.Add(self.minute2, (4, 2), (1, 1))
gbsizer.Add(wx.StaticText(self, label="Delta"), (5, 1))
gbsizer.Add(wx.StaticText(self, label="Margin"), (5, 2))
gbsizer.Add(self.delta2, (6, 1), (1, 1))
gbsizer.Add(self.margin2, (6, 2), (1, 1))
prsizer.Add(gbsizer)
tsizer.Add(prsizer)
msizer = wx.BoxSizer(wx.VERTICAL)
msizer.Add(tsizer, 0, wx.GROW|wx.ALL, 10)
self.SetSizer(msizer)
msizer.Fit(self)
if __name__ == "__main__":
app = Kala(False)
app.MainLoop()
| 40.216138
| 133
| 0.607668
|
bf083cb71b7736203d02473137a90d7b5acff945
| 2,108
|
py
|
Python
|
blog_project/blog/views.py
|
AhteshamSid/Django-Blog-Application
|
f7bdba7632e58dd337e97a8b25a9c7b372fd5bde
|
[
"MIT"
] | null | null | null |
blog_project/blog/views.py
|
AhteshamSid/Django-Blog-Application
|
f7bdba7632e58dd337e97a8b25a9c7b372fd5bde
|
[
"MIT"
] | null | null | null |
blog_project/blog/views.py
|
AhteshamSid/Django-Blog-Application
|
f7bdba7632e58dd337e97a8b25a9c7b372fd5bde
|
[
"MIT"
] | null | null | null |
from django.shortcuts import render, get_object_or_404
from django.contrib.auth.mixins import LoginRequiredMixin, UserPassesTestMixin
from django.contrib.auth.models import User
from django.views.generic import ListView, DetailView, CreateView, UpdateView, DeleteView
from .models import Post
class PostListView(ListView):
model = Post
template_name = 'blog/home.html'
context_object_name = 'posts'
ordering = ['-date_posted']
paginate_by = 3
class UserPostListView(ListView):
model = Post
template_name = 'blog/user_posts.html'
context_object_name = 'posts'
paginate_by = 5
def get_queryset(self):
user = get_object_or_404(User, username=self.kwargs.get('username'))
return Post.objects.filter(author=user).order_by('-date_posted')
class PostDetailView(DetailView):
model = Post
template_name = 'blog/details.html'
context_object_name = 'post'
class PostCreateView(LoginRequiredMixin, CreateView):
model = Post
template_name = 'blog/create.html'
fields = ['title', 'content']
def form_valid(self, form):
form.instance.author = self.request.user
return super().form_valid(form)
class PostUpdateView(LoginRequiredMixin, UserPassesTestMixin, UpdateView):
model = Post
template_name = 'blog/create.html'
fields = ['title', 'content']
def form_valid(self, form):
form.instance.author = self.request.user
return super().form_valid(form)
def test_func(self):
post = self.get_object()
if self.request.user == post.author:
return True
else:
return False
class PostDeleteView(LoginRequiredMixin, UserPassesTestMixin, DeleteView):
model = Post
template_name = 'blog/delete.html'
success_url = '/'
def test_func(self):
post = self.get_object()
if self.request.user == post.author:
return True
else:
return False
def about(request):
return render(request, 'blog/about.html')
| 28.106667
| 90
| 0.661765
|
e3403f54ccf37485bd4b55171806f9724b18cda7
| 8,173
|
py
|
Python
|
tools/hardware/killerbee_msfrelay.py
|
lmrosa/metasploit-framework
|
984ff284acdb6ad67c9ffe1849cc963499ab335e
|
[
"BSD-2-Clause",
"BSD-3-Clause"
] | 9
|
2019-07-12T16:50:13.000Z
|
2022-02-19T02:43:56.000Z
|
tools/hardware/killerbee_msfrelay.py
|
Netw0rkLan/metasploit-framework
|
b5700beee4b99ddd14ae656293dd48638a00a640
|
[
"BSD-2-Clause",
"BSD-3-Clause"
] | 4
|
2020-12-28T19:35:25.000Z
|
2020-12-30T11:12:24.000Z
|
tools/hardware/killerbee_msfrelay.py
|
Netw0rkLan/metasploit-framework
|
b5700beee4b99ddd14ae656293dd48638a00a640
|
[
"BSD-2-Clause",
"BSD-3-Clause"
] | 7
|
2016-01-05T18:56:09.000Z
|
2021-01-08T19:41:17.000Z
|
#!/usr/bin/env python
# KillerBee Metasploit relay server
import re
import os
import sys
import cmd
import time
import json
import base64
import socket
import threading
import pkg_resources # Used to get killerbee version
from BaseHTTPServer import BaseHTTPRequestHandler,HTTPServer
from urlparse import parse_qs,urlparse
from killerbee import *
last_errors = 0
starttime = 0
packets_sent = 0
last_sent = 0
username = None
password = None
kb = None
class MSFHandler(BaseHTTPRequestHandler):
def status(self):
status = {}
hw_versions = []
fw_version = pkg_resources.get_distribution("killerbee").version
device_names = []
for dev in kbutils.devlist():
hw_versions.append(dev[2])
device_names.append(dev[1])
if len(hw_versions) > 0:
status["operational"] = 1
else:
status["operational"] = 0
status["hw_specialty"] = { "zigbee": True }
# TODO: We should check firmware before reporting transmit capabilities
status["hw_capabilities"] = { "transmit": True}
status["last_10_errors"] = last_errors
status["api_version"] = "0.0.3"
status["fw_version"] = fw_version
if len(hw_versions) == 1:
status["hw_version"] = hw_versions[0]
status["device_name"] = device_names[0]
elif len(hw_versions) > 1:
status["hw_version"] = ', '.join(hw_versions)
status["device_name"] = ', '.join(device_names)
else:
status["hw_version"] = "Not Supported"
return status
def statistics(self):
global packets_sent
stats = {}
stats["uptime"] = int(time.time()) - starttime
stats["packet_stats"] = packets_sent
stats["last_request"] = last_sent
stats["voltage"] = "0.0v"
return stats
def datetime(self):
return { "sytem_datetime": int(time.time()) }
def timezone(self):
return { "system_timezone": time.strftime("%Z") }
def set_channel(self, args):
if not "chan" in args:
return self.not_supported()
chan = int(args["chan"][0])
kb.set_channel(chan)
return { "success": True }
def inject(self, args):
global packets_sent
if not "data" in args:
return self.not_supported()
try:
kb.inject(base64.urlsafe_b64decode(args["data"][0]))
packets_sent+=1
except Exception, e:
print("ERROR: Unable to inject packet: {0}".format(e))
return { "success": False }
return { "success": True }
def recv(self):
pkt = kb.pnext()
if pkt != None and pkt[1]:
return {"data": base64.urlsafe_b64encode(pkt[0]), "valid_crc": pkt[1], "rssi": pkt[2] }
return {}
def sniffer_off(self):
kb.sniffer_off()
return {"success": True }
def sniffer_on(self):
kb.sniffer_on()
return {"success": True }
def supported_devices(self):
devices = []
for dev in kbutils.devlist():
devices.append(dev[0])
return { "devices": devices }
def not_supported(self):
return { "status": "not supported" }
def send(self, data, resp=200):
self.send_response(resp)
self.send_header('Content-type','application/json')
self.end_headers()
self.wfile.write(json.dumps(data))
return
def do_AUTHHEAD(self):
self.send_response(401)
self.send_header('WWW-Authenticate', 'Basic realm=\"Killerbee MSF Relay\"')
self.send_header('Content-type', 'text/html')
self.end_headers()
self.wfile.write("Please Authenticate")
def do_GET(self):
if not password == None:
if self.headers.getheader('Authorization') == None:
print("Did not authenticate")
self.do_AUTHHEAD()
return
if not self.headers.getheader('Authorization') == 'Basic '+base64.b64encode(username + ":" + password):
print("Bad Authentication")
self.do_AUTHHEAD()
return
url = urlparse(self.path)
args = parse_qs(url.query)
if self.path=="/status":
self.send(self.status())
elif self.path=="/statistics":
self.send(self.statistics())
elif self.path=="/settings/datetime":
self.send(self.datetime())
elif self.path=="/settings/timezone":
self.send(self.timezone())
elif self.path=="/zigbee/supported_devices":
self.send(self.supported_devices())
elif self.path.startswith("/zigbee/"):
re_dev = re.compile("/zigbee/([\d\w:]+)/")
m = re_dev.match(self.path)
if m:
dev = m.group(1)
if self.path.find("/set_channel?") > -1:
self.send(self.set_channel(args))
elif self.path.find("/inject?") > -1:
self.send(self.inject(args))
elif self.path.find("/recv") > -1:
self.send(self.recv())
elif self.path.find("/sniffer_off") > -1:
self.send(self.sniffer_off())
elif self.path.find("/sniffer_on") > -1:
self.send(self.sniffer_on())
else:
self.send(self.not_supported(), 404)
else:
self.send(self.not_supported(), 404)
else:
self.send(self.not_supported(), 404)
return
class Killerbee_MSFRelay(cmd.Cmd):
intro = """
KillerBee Metasploit Relay
"""
def __init__(self, ip='0.0.0.0', port=8080):
cmd.Cmd.__init__(self)
self._ip = ip
self._port = port
self._sock = None
self._pause = False
self.start()
def start(self):
self._go = True
while self._go:
# serve the NIC port
try:
self._sock = HTTPServer((self._ip, self._port), MSFHandler)
starttime = int(time.time())
print("KillerBee MSFRelay running.")
self._sock.serve_forever()
except KeyboardInterrupt:
self._sock.socket.close()
self._go = False
except:
sys.excepthook(*sys.exc_info())
if __name__ == "__main__":
import argparse
parser = argparse.ArgumentParser()
parser.add_argument('-i', '--iface', '--dev', action='store', dest='devstring')
parser.add_argument('-u', '--user', default="msf_relay", help='HTTP Username', type=str)
parser.add_argument('-p', '--password', default="rfcat_relaypass", help='HTTP Password', type=str)
parser.add_argument('-P', '--Port', default=8080, type=int)
parser.add_argument('--noauth', default=False, action="store_true", help='Do not require authentication')
parser.add_argument('--localonly', default=False, action="store_true", help='Listen on localhost only')
ifo = parser.parse_args()
try:
kb = KillerBee(device=ifo.devstring)
except KBInterfaceError as e:
print("Interface Error: {0}".format(e))
sys.exit(-1)
username = ifo.user
password = ifo.password
ip = "0.0.0.0"
port = ifo.Port
if ifo.noauth:
username = None
password = None
if ifo.localonly:
host = "127.0.0.1"
wait_msg = False
dev_found = False
while not dev_found:
try:
devs = kbutils.devlist()
if len(devs) > 0:
dev_found = True
elif not wait_msg:
print("Insert KillerBee compatible ZigBee device. (You may need to add permissions)")
wait_msg = True
except KeyboardInterrupt:
sys.exit()
except:
if not wait_msg:
print("Insert KillerBee compatible ZigBee device. (You may need to add permissions)")
wait_msg = True
beerelay = Killerbee_MSFRelay(ip, port)
import atexit
atexit.register(cleanupInteractiveAtExit)
| 32.43254
| 115
| 0.56809
|
31f2374614883055319ff1d8b89bcc895024eb04
| 4,300
|
py
|
Python
|
src/probability_util_test.py
|
Strilanc/honeycomb_threshold
|
d71737d3b4fb8878e856f8bd66b9632cc7078159
|
[
"Apache-2.0"
] | 5
|
2021-07-23T05:33:18.000Z
|
2022-01-27T00:59:40.000Z
|
src/probability_util_test.py
|
Strilanc/honeycomb_threshold
|
d71737d3b4fb8878e856f8bd66b9632cc7078159
|
[
"Apache-2.0"
] | 1
|
2021-08-03T20:58:26.000Z
|
2021-08-08T17:13:11.000Z
|
src/probability_util_test.py
|
Strilanc/honeycomb_threshold
|
d71737d3b4fb8878e856f8bd66b9632cc7078159
|
[
"Apache-2.0"
] | 1
|
2022-01-30T11:05:19.000Z
|
2022-01-30T11:05:19.000Z
|
import math
from typing import Union
import numpy as np
import pytest
from probability_util import binary_search, log_binomial, log_factorial, least_squares_output_range, least_squares_slope_range, \
binary_intercept, least_squares_through_point
@pytest.mark.parametrize(
"arg,result",
{
0: 0,
1: 0,
2: math.log(2),
3: math.log(2) + math.log(3),
# These values were taken from wolfram alpha:
10: 15.1044125730755152952257093292510,
100: 363.73937555556349014407999336965,
1000: 5912.128178488163348878130886725,
10000: 82108.9278368143534553850300635,
100000: 1051299.2218991218651292781082,
}.items(),
)
def test_log_factorial(arg, result):
np.testing.assert_allclose(log_factorial(arg), result, rtol=1e-5)
@pytest.mark.parametrize(
"n,p,hits,result",
[
(1, 0.5, 0, np.log(0.5)),
(1, 0.5, 1, np.log(0.5)),
(1, 0.1, 0, np.log(0.9)),
(1, 0.1, 1, np.log(0.1)),
(2, [0, 1, 0.1, 0.5], 0, [0, -np.inf, np.log(0.9 ** 2), np.log(0.25)]),
(2, [0, 1, 0.1, 0.5], 1, [-np.inf, -np.inf, np.log(0.1 * 0.9 * 2), np.log(0.5)]),
(2, [0, 1, 0.1, 0.5], 2, [-np.inf, 0, np.log(0.1 ** 2), np.log(0.25)]),
# Magic number comes from PDF[BinomialDistribution[10^10, 10^-6], 10000] on wolfram alpha.
(10 ** 10, 10 ** -6, 10 ** 4, np.log(0.0039893915536591)),
# Corner cases.
(1, 0.0, 0, 0),
(1, 0.0, 1, -np.inf),
(1, 1.0, 0, -np.inf),
(1, 1.0, 1, 0),
# Array broadcast.
(2, np.array([0.0, 0.5, 1.0]), 0, np.array([0.0, np.log(0.25), -np.inf])),
(2, np.array([0.0, 0.5, 1.0]), 1, np.array([-np.inf, np.log(0.5), -np.inf])),
(2, np.array([0.0, 0.5, 1.0]), 2, np.array([-np.inf, np.log(0.25), 0.0])),
],
)
def test_log_binomial(
n: int, p: Union[float, np.ndarray], hits: int, result: Union[float, np.ndarray]
) -> None:
np.testing.assert_allclose(log_binomial(n=n, p=p, hits=hits), result, rtol=1e-2)
def test_binary_search():
assert binary_search(func=lambda x: x**2, min_x=0, max_x=10**100, target=100.1) == 10
assert binary_search(func=lambda x: x**2, min_x=0, max_x=10**100, target=100) == 10
assert binary_search(func=lambda x: x**2, min_x=0, max_x=10**100, target=99.9) == 10
assert binary_search(func=lambda x: x**2, min_x=0, max_x=10**100, target=90) == 9
assert binary_search(func=lambda x: x**2, min_x=0, max_x=10**100, target=92) == 10
assert binary_search(func=lambda x: x**2, min_x=0, max_x=10**100, target=-100) == 0
assert binary_search(func=lambda x: x**2, min_x=0, max_x=10**100, target=10**300) == 10**100
def test_least_squares_through_point():
fit = least_squares_through_point(
xs=np.array([1, 2, 3]),
ys=np.array([2, 3, 4]),
required_x=1,
required_y=2)
np.testing.assert_allclose(fit.slope, 1)
np.testing.assert_allclose(fit.intercept, 1)
fit = least_squares_through_point(
xs=np.array([1, 2, 3]),
ys=np.array([2, 3, 4]),
required_x=1,
required_y=1)
np.testing.assert_allclose(fit.slope, 1.6, rtol=1e-5)
np.testing.assert_allclose(fit.intercept, -0.6, atol=1e-5)
def test_binary_intercept():
t = binary_intercept(func=lambda x: x**2, start_x=5, step=1, target_y=82.3, atol=0.01)
assert t > 0 and abs(t**2 - 82.3) <= 0.01
t = binary_intercept(func=lambda x: -x**2, start_x=5, step=1, target_y=-82.3, atol=0.01)
assert t > 0 and abs(t**2 - 82.3) <= 0.01
t = binary_intercept(func=lambda x: x**2, start_x=0, step=-1, target_y=82.3, atol=0.01)
assert t < 0 and abs(t**2 - 82.3) <= 0.01
t = binary_intercept(func=lambda x: -x**2, start_x=0, step=-1, target_y=-82.3, atol=0.2)
assert t < 0 and abs(t**2 - 82.3) <= 0.2
def least_squares_output_range():
low, high = least_squares_output_range(
xs=[1, 2, 3],
ys=[1, 5, 9],
target_x=100,
cost_increase=1,
)
assert 300 < low < 400 < high < 500
def test_least_squares_slope_range():
low, mid, high = least_squares_slope_range(
xs=[1, 2, 3],
ys=[1, 5, 9],
cost_increase=1,
)
np.testing.assert_allclose(mid, 4)
assert 3 < low < 3.5 < mid < 4.5 < high < 5
| 37.068966
| 129
| 0.592791
|
ed7e1a91ccd846701778f70f64ba4c28cb1877b6
| 2,143
|
py
|
Python
|
pytorch_toolbelt/utils/support.py
|
mohitktanwr/toolkits
|
f3acfca5da05cd7ccdd85e8d343d75fa40fb44d9
|
[
"MIT"
] | 1,281
|
2019-03-17T18:32:39.000Z
|
2022-03-31T03:47:22.000Z
|
pytorch_toolbelt/utils/support.py
|
mohitktanwr/toolkits
|
f3acfca5da05cd7ccdd85e8d343d75fa40fb44d9
|
[
"MIT"
] | 28
|
2019-04-05T10:49:25.000Z
|
2022-03-11T10:40:28.000Z
|
pytorch_toolbelt/utils/support.py
|
mohitktanwr/toolkits
|
f3acfca5da05cd7ccdd85e8d343d75fa40fb44d9
|
[
"MIT"
] | 99
|
2019-03-18T08:40:18.000Z
|
2022-03-26T10:52:57.000Z
|
import functools
import inspect
import warnings
string_types = (type(b""), type(""))
__all__ = ["pytorch_toolbelt_deprecated"]
def pytorch_toolbelt_deprecated(reason):
"""
Mark function or class as deprecated.
It will result in a warning being emitted when the function is used.
"""
if isinstance(reason, string_types):
# The @deprecated is used with a 'reason'.
#
# .. code-block:: python
#
# @deprecated("please, use another function")
# def old_function(x, y):
# pass
def decorator(func1):
if inspect.isclass(func1):
fmt1 = "Call to deprecated class {name} ({reason})."
else:
fmt1 = "Call to deprecated function {name} ({reason})."
@functools.wraps(func1)
def new_func1(*args, **kwargs):
warnings.simplefilter("always", DeprecationWarning)
warnings.warn(
fmt1.format(name=func1.__name__, reason=reason), category=DeprecationWarning, stacklevel=2
)
warnings.simplefilter("default", DeprecationWarning)
return func1(*args, **kwargs)
return new_func1
return decorator
elif inspect.isclass(reason) or inspect.isfunction(reason):
# The @deprecated is used without any 'reason'.
#
# .. code-block:: python
#
# @deprecated
# def old_function(x, y):
# pass
func2 = reason
if inspect.isclass(func2):
fmt2 = "Call to deprecated class {name}."
else:
fmt2 = "Call to deprecated function {name}."
@functools.wraps(func2)
def new_func2(*args, **kwargs):
warnings.simplefilter("always", DeprecationWarning)
warnings.warn(fmt2.format(name=func2.__name__), category=DeprecationWarning, stacklevel=2)
warnings.simplefilter("default", DeprecationWarning)
return func2(*args, **kwargs)
return new_func2
else:
raise TypeError(repr(type(reason)))
| 29.356164
| 110
| 0.575828
|
9d0327d207acf19ee9d7307fb0f1bbae2fad1a74
| 14,491
|
py
|
Python
|
simple_amqp/asyncio.py
|
rudineirk/py-simple-amqp
|
4f12f7ab44a04395e151d933e69ff8ce8436419b
|
[
"MIT"
] | null | null | null |
simple_amqp/asyncio.py
|
rudineirk/py-simple-amqp
|
4f12f7ab44a04395e151d933e69ff8ce8436419b
|
[
"MIT"
] | 1
|
2021-06-01T22:18:26.000Z
|
2021-06-01T22:18:26.000Z
|
simple_amqp/asyncio.py
|
rudineirk/py-simple-amqp
|
4f12f7ab44a04395e151d933e69ff8ce8436419b
|
[
"MIT"
] | null | null | null |
import traceback
from asyncio import ensure_future, get_event_loop, sleep
from aio_pika import ExchangeType as PikaExchangeType
from aio_pika import IncomingMessage as PikaIncomingMessage
from aio_pika import Message as PikaMessage
from aio_pika import connect as pika_connect
from aio_pika.exchange import Exchange
from aio_pika.queue import Queue
from .actions import (
BindConsumer,
BindExchange,
BindQueue,
CreateChannel,
CreateConnection,
DeclareExchange,
DeclareQueue
)
from .base import (
AmqpChannel,
AmqpConnection,
AmqpConnectionNotOpen,
AmqpConsumer,
AmqpStage
)
from .data import AmqpMsg, AmqpParameters
from .log import setup_logger
EXCHANGE_TYPES_MAP = {
'direct': PikaExchangeType.DIRECT,
'topic': PikaExchangeType.TOPIC,
'fanout': PikaExchangeType.FANOUT,
}
class AsyncioAmqpConnection(AmqpConnection):
def __init__(self, params: AmqpParameters, logger=None):
super().__init__(params)
self._conn = None
self._channels = {}
self._consumers = {}
self._consumer_queues = {}
self._queues = {}
self._exchanges = {}
self._connected = False
self._closing = False
self._auto_reconnect = False
self.reconnect_delay = 1
self._conn_error_handler = None
self._consumer_error_handler = None
self.log = logger if logger is not None else setup_logger()
async def start(self, auto_reconnect=True):
self._closing = False
self._auto_reconnect = auto_reconnect
await self._run_stages()
async def run_stage(self, stage: AmqpStage):
if stage not in self.stages:
self.add_stage(stage)
await self._stage_runner(stage)
async def stop(self):
self._connected = False
self._closing = True
await self._stop_consuming()
await self._close_channels()
await self._close_connection()
def cancel_consumer(
self,
channel: AmqpChannel,
consumer: AmqpConsumer,
):
real_channel = self._get_channel(channel.number)
queue = self._consumer_queues[consumer.tag]
return self._cancel_consumer(real_channel, queue, consumer.tag)
async def publish(self, channel: AmqpChannel, msg: AmqpMsg):
if not self._connected:
raise AmqpConnectionNotOpen
self.log.info(
'publishing message on channel {}'
.format(channel.number)
)
self.log.debug('publishing message: {}'.format(str(msg)))
real_channel = self._get_channel(channel.number)
if msg.exchange:
exchange = self._exchanges[channel.number][msg.exchange]
else:
real_channel = self._get_channel(channel.number)
exchange = real_channel.default_exchange
expiration = msg.expiration
if expiration is not None and expiration < 0:
expiration = None
pika_msg = PikaMessage(
body=msg.payload,
content_type=msg.content_type,
content_encoding=msg.encoding,
correlation_id=msg.correlation_id,
reply_to=msg.reply_to,
expiration=expiration,
headers=msg.headers,
)
await exchange.publish(
pika_msg,
msg.topic,
)
async def _run_stages(self):
self._connected = False
for stage in self.stages:
await self._stage_runner(stage)
self._connected = True
async def _stage_runner(self, stage: AmqpStage):
self.log.info('Starting stage [{}]'.format(stage.name))
while True:
exc = None
try:
await self._run_actions(stage)
except Exception as e:
exc = e
self.log.error('an error ocurred when processing actions')
if self._conn_error_handlers:
for handler in self._conn_error_handlers:
handler(e)
else:
traceback.print_exc()
if exc is None:
self.log.info('Stage [{}] done'.format(stage.name))
return
elif stage != self._stage_zero or not self._auto_reconnect:
raise exc
await sleep(self.reconnect_delay)
self.log.info('retrying to process actions')
async def _run_actions(self, stage: AmqpStage):
for action in stage.actions:
self.log.debug('action: {}'.format(str(action)))
if action.TYPE == CreateConnection.TYPE:
await self._connect(action)
elif action.TYPE == CreateChannel.TYPE:
await self._create_channel(action)
elif action.TYPE == DeclareExchange.TYPE:
await self._declare_exchange(action)
elif action.TYPE == DeclareQueue.TYPE:
await self._declare_queue(action)
elif action.TYPE == BindExchange.TYPE:
await self._bind_exchange(action)
elif action.TYPE == BindQueue.TYPE:
await self._bind_queue(action)
elif action.TYPE == BindConsumer.TYPE:
await self._bind_consumer(action)
def _get_channel(self, number: int):
return self._channels[number]
def _set_channel(self, number: int, channel):
self._channels[number] = channel
self._consumers[number] = set()
def _remove_channel(self, number):
self._channels.pop(number)
self._consumers.pop(number)
def _clear_channels(self):
self._channels = {}
self._consumers = {}
self._consumer_queues = {}
self._queues = {}
self._exchanges = {}
def _get_queue(self, channel_num, queue_name):
channel = self._get_channel(channel_num)
if channel_num not in self._queues:
self._queues[channel_num] = {}
if queue_name not in self._queues[channel_num]:
loop = get_event_loop()
self._queues[channel_num][queue_name] = Queue(
loop=loop,
channel=channel._channel,
future_store=channel._futures.get_child(),
name=queue_name,
durable=False,
exclusive=False,
auto_delete=False,
arguments={},
)
return self._queues[channel_num][queue_name]
def _get_exchange(self, channel_num, exchange_name):
channel = self._get_channel(channel_num)
if channel_num not in self._exchanges:
self._exchanges[channel_num] = {}
if exchange_name not in self._exchanges[channel_num]:
loop = get_event_loop()
self._exchanges[channel_num][exchange_name] = Exchange(
loop=loop,
channel=channel._channel,
publish_method=channel._publish,
future_store=channel._futures.get_child(),
name=exchange_name,
type=EXCHANGE_TYPES_MAP['topic'],
durable=False,
internal=False,
auto_delete=False,
passive=False,
arguments={},
)
return self._exchanges[channel_num][exchange_name]
async def _connect(self, action: CreateConnection):
self.log.info('starting connection')
self._conn = await pika_connect(
host=action.host,
port=action.port,
virtualhost=action.vhost,
login=action.username,
password=action.password,
)
self._conn.add_close_callback(
lambda *args: ensure_future(self._on_connection_close(*args))
)
async def _stop_consuming(self):
for channel_number, consumer_tags in self._consumers.items():
for consumer_tag in list(consumer_tags):
await self._cancel_consumer(channel_number, consumer_tag)
async def _cancel_consumer(self, channel_number, consumer_tag):
queue = self._consumer_queues[consumer_tag]
await queue.cancel(consumer_tag=consumer_tag)
self._consumers[channel_number].remove(consumer_tag)
self._consumer_queues.pop(consumer_tag)
async def _close_channels(self):
for channel in self._channels.values():
await channel.close()
async def _close_connection(self):
self._closing = True
await self._conn.close()
self._conn = None
async def _on_connection_close(self, _):
self.log.info('connection closed')
self._connected = False
self._clear_channels()
if not self._closing and self._auto_reconnect:
await sleep(self.reconnect_delay)
ensure_future(self._run_stages())
async def _create_channel(self, action: CreateChannel):
self.log.info('creating channel {}'.format(action.number))
channel = await self._conn.channel(
action.number,
publisher_confirms=False,
)
self.log.debug('channel {} opened'.format(action.number))
self._set_channel(action.number, channel)
async def _declare_queue(self, action: DeclareQueue):
self.log.info('declaring queue {}'.format(action.name))
channel = self._get_channel(action.channel)
if action.channel not in self._queues:
self._queues[action.channel] = {}
self._queues[action.channel][action.name] = await channel \
.declare_queue(
name=action.name,
durable=action.durable,
exclusive=action.exclusive,
auto_delete=action.auto_delete,
arguments=action.props,
)
self.log.debug('queue {} declared'.format(action.name))
async def _declare_exchange(self, action: DeclareExchange):
self.log.info('declaring exchange {}'.format(action.name))
channel = self._get_channel(action.channel)
if action.channel not in self._exchanges:
self._exchanges[action.channel] = {}
exchange_type = EXCHANGE_TYPES_MAP[action.type]
self._exchanges[action.channel][action.name] = await channel \
.declare_exchange(
name=action.name,
type=exchange_type,
durable=action.durable,
auto_delete=action.auto_delete,
internal=action.internal,
arguments=action.props,
)
self.log.debug('exchange {} declared'.format(action.name))
async def _bind_queue(self, action: BindQueue):
self.log.info('binding queue {} to exchange {}'.format(
action.queue,
action.exchange,
))
queue = self._get_queue(action.channel, action.queue)
exchange = self._get_exchange(action.channel, action.exchange)
await queue.bind(
exchange=exchange,
routing_key=action.routing_key,
arguments=action.props,
)
self.log.debug('bound queue {} to exchange {}'.format(
action.queue,
action.exchange,
))
async def _bind_exchange(self, action: BindExchange):
self.log.info('binding exchange {} to exchange {}'.format(
action.src_exchange,
action.dst_exchange,
))
src_exchange = self._get_exchange(action.channel, action.src_exchange)
dst_exchange = self._get_exchange(action.channel, action.dst_exchange)
await dst_exchange.bind(
src_exchange,
routing_key=action.routing_key,
props=action.props,
)
self.log.debug('bound exchange {} to exchange {}'.format(
action.src_exchange,
action.dst_exchange,
))
async def _bind_consumer(self, action: BindConsumer):
self.log.info('binding consumer to queue {}'.format(
action.queue,
))
queue = self._get_queue(action.channel, action.queue)
self._consumers[action.channel].add(action.tag)
self._consumer_queues[action.tag] = queue
def consumer(pika_msg: PikaIncomingMessage):
headers = pika_msg.headers if pika_msg.headers else {}
msg = AmqpMsg(
payload=pika_msg.body,
content_type=pika_msg.content_type,
encoding=pika_msg.content_encoding,
exchange=pika_msg.exchange,
topic=pika_msg.routing_key,
correlation_id=pika_msg.correlation_id.decode(),
reply_to=pika_msg.reply_to,
expiration=pika_msg.expiration,
headers=headers,
)
ensure_future(self._handle_msg(
action,
pika_msg,
msg,
))
await queue.consume(
callback=consumer,
no_ack=action.auto_ack,
exclusive=action.exclusive,
consumer_tag=action.tag,
arguments=action.props,
)
async def _handle_msg(
self,
action: BindConsumer,
pika_msg: PikaIncomingMessage,
msg: AmqpMsg,
):
self.log.info(
'received msg {} in queue {} '
'from exchange {} topic {}'.format(
pika_msg.delivery_tag,
action.queue,
msg.exchange,
msg.topic,
)
)
self.log.debug('msg received: {}'.format(str(msg)))
result = True
try:
result = await action.callback(msg)
self.log.debug('msg processed')
except Exception as e:
self.log.error('an error occurred when processing message')
result = False
if self._consumer_error_handlers:
for handler in self._consumer_error_handlers:
handler(e)
else:
traceback.print_exc()
if not action.auto_ack and result:
self.log.debug(
'sending ack for message {}'
.format(pika_msg.delivery_tag)
)
pika_msg.ack()
elif not action.auto_ack:
self.log.debug(
'sending nack for message {}'
.format(pika_msg.delivery_tag)
)
pika_msg.nack(requeue=action.nack_requeue)
| 34.176887
| 78
| 0.594645
|
874e739fb61126f078616a57faddbd98e2a1e480
| 8,241
|
py
|
Python
|
sdks/python/apache_beam/options/pipeline_options_validator.py
|
bschell/beam
|
5533acff51cf6157d62a63c60eb3f074f1958df5
|
[
"Apache-2.0"
] | 1
|
2018-12-03T09:37:01.000Z
|
2018-12-03T09:37:01.000Z
|
sdks/python/apache_beam/options/pipeline_options_validator.py
|
bschell/beam
|
5533acff51cf6157d62a63c60eb3f074f1958df5
|
[
"Apache-2.0"
] | 2
|
2018-09-09T16:51:47.000Z
|
2018-09-16T15:55:50.000Z
|
sdks/python/apache_beam/options/pipeline_options_validator.py
|
bschell/beam
|
5533acff51cf6157d62a63c60eb3f074f1958df5
|
[
"Apache-2.0"
] | null | null | null |
#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Pipeline options validator.
For internal use only; no backwards-compatibility guarantees.
"""
from __future__ import absolute_import
import re
from builtins import object
from apache_beam.internal import pickler
from apache_beam.options.pipeline_options import DebugOptions
from apache_beam.options.pipeline_options import GoogleCloudOptions
from apache_beam.options.pipeline_options import SetupOptions
from apache_beam.options.pipeline_options import StandardOptions
from apache_beam.options.pipeline_options import TestOptions
from apache_beam.options.pipeline_options import TypeOptions
from apache_beam.options.pipeline_options import WorkerOptions
class PipelineOptionsValidator(object):
"""Validates PipelineOptions.
Goes through a list of known PipelineOption subclassess and calls::
validate(validator)
if one is implemented. Aggregates a list of validation errors from all and
returns an aggregated list.
"""
# Validator will call validate on these subclasses of PipelineOptions
OPTIONS = [DebugOptions, GoogleCloudOptions, SetupOptions, StandardOptions,
TypeOptions, WorkerOptions, TestOptions]
# Possible validation errors.
ERR_MISSING_OPTION = 'Missing required option: %s.'
ERR_MISSING_GCS_PATH = 'Missing GCS path option: %s.'
ERR_INVALID_GCS_PATH = 'Invalid GCS path (%s), given for the option: %s.'
ERR_INVALID_GCS_BUCKET = (
'Invalid GCS bucket (%s), given for the option: %s. See '
'https://developers.google.com/storage/docs/bucketnaming '
'for more details.')
ERR_INVALID_GCS_OBJECT = 'Invalid GCS object (%s), given for the option: %s.'
ERR_INVALID_JOB_NAME = (
'Invalid job_name (%s); the name must consist of only the characters '
'[-a-z0-9], starting with a letter and ending with a letter or number')
ERR_INVALID_PROJECT_NUMBER = (
'Invalid Project ID (%s). Please make sure you specified the Project ID, '
'not project number.')
ERR_INVALID_PROJECT_ID = (
'Invalid Project ID (%s). Please make sure you specified the Project ID, '
'not project description.')
ERR_INVALID_NOT_POSITIVE = ('Invalid value (%s) for option: %s. Value needs '
'to be positive.')
ERR_INVALID_TEST_MATCHER_TYPE = (
'Invalid value (%s) for option: %s. Please extend your matcher object '
'from hamcrest.core.base_matcher.BaseMatcher.')
ERR_INVALID_TEST_MATCHER_UNPICKLABLE = (
'Invalid value (%s) for option: %s. Please make sure the test matcher '
'is unpicklable.')
# GCS path specific patterns.
GCS_URI = '(?P<SCHEME>[^:]+)://(?P<BUCKET>[^/]+)(/(?P<OBJECT>.*))?'
GCS_BUCKET = '^[a-z0-9][-_a-z0-9.]+[a-z0-9]$'
GCS_SCHEME = 'gs'
# GoogleCloudOptions specific patterns.
JOB_PATTERN = '[a-z]([-a-z0-9]*[a-z0-9])?'
PROJECT_ID_PATTERN = '[a-z][-a-z0-9:.]+[a-z0-9]'
PROJECT_NUMBER_PATTERN = '[0-9]*'
ENDPOINT_PATTERN = r'https://[\S]*googleapis\.com[/]?'
def __init__(self, options, runner):
self.options = options
self.runner = runner
def validate(self):
"""Calls validate on subclassess and returns a list of errors.
validate will call validate method on subclasses, accumulate the returned
list of errors, and returns the aggregate list.
Returns:
Aggregate list of errors after all calling all possible validate methods.
"""
errors = []
for cls in self.OPTIONS:
if 'validate' in cls.__dict__:
errors.extend(self.options.view_as(cls).validate(self))
return errors
def is_service_runner(self):
"""True if pipeline will execute on the Google Cloud Dataflow service."""
is_service_runner = (self.runner is not None and
type(self.runner).__name__ in [
'DataflowRunner',
'TestDataflowRunner'])
dataflow_endpoint = (
self.options.view_as(GoogleCloudOptions).dataflow_endpoint)
is_service_endpoint = (dataflow_endpoint is not None and
self.is_full_string_match(
self.ENDPOINT_PATTERN, dataflow_endpoint))
return is_service_runner and is_service_endpoint
def is_full_string_match(self, pattern, string):
"""Returns True if the pattern matches the whole string."""
pattern = '^%s$' % pattern
return re.search(pattern, string) is not None
def _validate_error(self, err, *args):
return [err % args]
def validate_gcs_path(self, view, arg_name):
"""Validates a GCS path against gs://bucket/object URI format."""
arg = getattr(view, arg_name, None)
if arg is None:
return self._validate_error(self.ERR_MISSING_GCS_PATH, arg_name)
match = re.match(self.GCS_URI, arg, re.DOTALL)
if match is None:
return self._validate_error(self.ERR_INVALID_GCS_PATH, arg, arg_name)
scheme = match.group('SCHEME')
bucket = match.group('BUCKET')
gcs_object = match.group('OBJECT')
if ((scheme is None) or (scheme.lower() != self.GCS_SCHEME) or
(bucket is None)):
return self._validate_error(self.ERR_INVALID_GCS_PATH, arg, arg_name)
if not self.is_full_string_match(self.GCS_BUCKET, bucket):
return self._validate_error(self.ERR_INVALID_GCS_BUCKET, arg, arg_name)
if gcs_object is None or '\n' in gcs_object or '\r' in gcs_object:
return self._validate_error(self.ERR_INVALID_GCS_OBJECT, arg, arg_name)
return []
def validate_cloud_options(self, view):
"""Validates job_name and project arguments."""
errors = []
if (view.job_name and
not self.is_full_string_match(self.JOB_PATTERN, view.job_name)):
errors.extend(self._validate_error(self.ERR_INVALID_JOB_NAME,
view.job_name))
project = view.project
if project is None:
errors.extend(self._validate_error(self.ERR_MISSING_OPTION, 'project'))
else:
if self.is_full_string_match(self.PROJECT_NUMBER_PATTERN, project):
errors.extend(
self._validate_error(self.ERR_INVALID_PROJECT_NUMBER, project))
elif not self.is_full_string_match(self.PROJECT_ID_PATTERN, project):
errors.extend(
self._validate_error(self.ERR_INVALID_PROJECT_ID, project))
return errors
def validate_optional_argument_positive(self, view, arg_name):
"""Validates that an optional argument (if set) has a positive value."""
arg = getattr(view, arg_name, None)
if arg is not None and int(arg) <= 0:
return self._validate_error(self.ERR_INVALID_NOT_POSITIVE, arg, arg_name)
return []
def validate_test_matcher(self, view, arg_name):
"""Validates that on_success_matcher argument if set.
Validates that on_success_matcher is unpicklable and is instance
of `hamcrest.core.base_matcher.BaseMatcher`.
"""
# This is a test only method and requires hamcrest
from hamcrest.core.base_matcher import BaseMatcher
pickled_matcher = view.on_success_matcher
errors = []
try:
matcher = pickler.loads(pickled_matcher)
if not isinstance(matcher, BaseMatcher):
errors.extend(
self._validate_error(
self.ERR_INVALID_TEST_MATCHER_TYPE, matcher, arg_name))
except: # pylint: disable=bare-except
errors.extend(
self._validate_error(
self.ERR_INVALID_TEST_MATCHER_UNPICKLABLE,
pickled_matcher,
arg_name))
return errors
| 40.2
| 80
| 0.703313
|
3f391b6b3d53201cbc52775a53246a1d23c0b515
| 3,659
|
py
|
Python
|
dnacentersdk/models/validators/v2_2_2_3/jsd_dfda5beca4cc5437876bff366493ebf0.py
|
oboehmer/dnacentersdk
|
25c4e99900640deee91a56aa886874d9cb0ca960
|
[
"MIT"
] | 32
|
2019-09-05T05:16:56.000Z
|
2022-03-22T09:50:38.000Z
|
dnacentersdk/models/validators/v2_2_2_3/jsd_dfda5beca4cc5437876bff366493ebf0.py
|
oboehmer/dnacentersdk
|
25c4e99900640deee91a56aa886874d9cb0ca960
|
[
"MIT"
] | 35
|
2019-09-07T18:58:54.000Z
|
2022-03-24T19:29:36.000Z
|
dnacentersdk/models/validators/v2_2_2_3/jsd_dfda5beca4cc5437876bff366493ebf0.py
|
oboehmer/dnacentersdk
|
25c4e99900640deee91a56aa886874d9cb0ca960
|
[
"MIT"
] | 18
|
2019-09-09T11:07:21.000Z
|
2022-03-25T08:49:59.000Z
|
# -*- coding: utf-8 -*-
"""Cisco DNA Center UpdateEventSubscriptions data model.
Copyright (c) 2019-2021 Cisco Systems.
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
"""
from __future__ import (
absolute_import,
division,
print_function,
unicode_literals,
)
import fastjsonschema
import json
from dnacentersdk.exceptions import MalformedRequest
from builtins import *
class JSONSchemaValidatorDfda5BecA4Cc5437876BFf366493Ebf0(object):
"""UpdateEventSubscriptions request schema definition."""
def __init__(self):
super(JSONSchemaValidatorDfda5BecA4Cc5437876BFf366493Ebf0, self).__init__()
self._validator = fastjsonschema.compile(json.loads(
'''{
"$schema": "http://json-schema.org/draft-04/schema#",
"items": {
"properties": {
"description":
{
"type": "string"
},
"filter": {
"properties": {
"eventIds": {
"items": {
"type": "string"
},
"type": "array"
}
},
"type": "object"
},
"name": {
"type": "string"
},
"subscriptionEndpoints": {
"items": {
"properties": {
"instanceId": {
"type": "string"
},
"subscriptionDetails": {
"properties": {
"connectorType": {
"type": "string"
},
"method": {
"type": "string"
},
"name": {
"type": "string"
},
"url": {
"type": "string"
}
},
"type": "object"
}
},
"type": "object"
},
"type": "array"
},
"subscriptionId": {
"type": "string"
},
"version": {
"type": "string"
}
},
"type": "object"
},
"type": "array"
}'''.replace("\n" + ' ' * 16, '')
))
def validate(self, request):
try:
self._validator(request)
except fastjsonschema.exceptions.JsonSchemaException as e:
raise MalformedRequest(
'{} is invalid. Reason: {}'.format(request, e.message)
)
| 31.817391
| 83
| 0.510249
|
c160b0400c1e37fad2d7eec6c52e1af023aab265
| 219,087
|
py
|
Python
|
venv/Lib/site-packages/emoji/unicode_codes/it.py
|
miradouro/CursoEmVideo-Python
|
cc7b05a9a4aad8e6ef3b29453d83370094d75e41
|
[
"MIT"
] | 13
|
2020-10-07T04:21:24.000Z
|
2022-01-31T20:36:55.000Z
|
venv/Lib/site-packages/emoji/unicode_codes/it.py
|
miradouro/CursoEmVideo-Python
|
cc7b05a9a4aad8e6ef3b29453d83370094d75e41
|
[
"MIT"
] | 1
|
2021-06-25T15:29:11.000Z
|
2021-06-25T15:29:11.000Z
|
venv/Lib/site-packages/emoji/unicode_codes/it.py
|
miradouro/CursoEmVideo-Python
|
cc7b05a9a4aad8e6ef3b29453d83370094d75e41
|
[
"MIT"
] | 1
|
2021-06-02T20:26:57.000Z
|
2021-06-02T20:26:57.000Z
|
# -*- coding: utf-8 -*-
"""Data literal storing emoji Italian names and Unicode codes."""
__all__ = ['EMOJI_UNICODE_ITALIAN', 'UNICODE_EMOJI_ITALIAN',]
EMOJI_UNICODE_ITALIAN = {
u':faccina_con_un_gran_sorriso:': u'\U0001F600',
u':faccina_con_un_gran_sorriso_e_occhi_spalancati:': u'\U0001F603',
u':faccina_con_sorriso_e_occhi_sorridenti:': u'\U0001F604',
u':faccina_raggiante_con_occhi_felici:': u'\U0001F601',
u':sorriso_a_bocca_aperta_con_occhi_chiusi:': u'\U0001F606',
u':faccina_con_un_gran_sorriso_e_goccia_di_sudore:': u'\U0001F605',
u':ridere_a_crepapelle:': u'\U0001F923',
u':faccina_con_lacrime_di_gioia:': u'\U0001F602',
u':faccina_con_sorriso_accennato:': u'\U0001F642',
u':faccina_sottosopra:': u'\U0001F643',
u':faccina_che_fa_l’occhiolino:': u'\U0001F609',
u':faccina_con_occhi_sorridenti:': u'\U0001F60A',
u':faccina_con_sorriso_e_aureola:': u'\U0001F607',
u':faccina_con_cuoricini:': u'\U0001F970',
u':faccina_con_sorriso_e_occhi_a_cuore:': u'\U0001F60D',
u':colpo_di_fulmine:': u'\U0001F929',
u':faccina_che_manda_un_bacio:': u'\U0001F618',
u':faccina_che_bacia:': u'\U0001F617',
u':faccina_sorridente:': u'\U0000263A\U0000FE0F',
u':faccina_che_bacia_con_occhi_chiusi:': u'\U0001F61A',
u':faccina_che_bacia_con_occhi_sorridenti:': u'\U0001F619',
u':faccina_sorridente_con_lacrima:': u'\U0001F972',
u':faccina_che_si_lecca_i_baffi:': u'\U0001F60B',
u':faccina_che_mostra_la_lingua:': u'\U0001F61B',
u':faccina_che_fa_l’occhiolino_e_mostra_la_lingua:': u'\U0001F61C',
u':faccina_impazzita:': u'\U0001F92A',
u':faccina_con_un_gran_sorriso_che_mostra_la_lingua:': u'\U0001F61D',
u':faccina_avida_di_denaro:': u'\U0001F911',
u':faccina_che_abbraccia:': u'\U0001F917',
u':faccina_con_mano_sulla_bocca:': u'\U0001F92D',
u':faccina_che_zittisce:': u'\U0001F92B',
u':faccina_concentrata:': u'\U0001F914',
u':faccina_con_bocca_con_cerniera:': u'\U0001F910',
u':faccia_con_sopracciglia_alzate:': u'\U0001F928',
u':faccina_neutra:': u'\U0001F610',
u':faccina_inespressiva:': u'\U0001F611',
u':faccina_senza_bocca:': u'\U0001F636',
u':faccina_con_sorrisetto:': u'\U0001F60F',
u':faccina_contrariata:': u'\U0001F612',
u':faccina_con_occhi_al_cielo:': u'\U0001F644',
u':faccina_con_smorfia:': u'\U0001F62C',
u':faccina_bugiarda:': u'\U0001F925',
u':faccina_sollevata:': u'\U0001F60C',
u':faccina_pensierosa:': u'\U0001F614',
u':faccina_assonnata:': u'\U0001F62A',
u':faccina_che_sbava:': u'\U0001F924',
u':faccina_che_dorme:': u'\U0001F634',
u':faccina_con_mascherina:': u'\U0001F637',
u':faccina_con_termometro:': u'\U0001F912',
u':faccina_con_la_testa_bendata:': u'\U0001F915',
u':faccina_nauseata:': u'\U0001F922',
u':faccina_che_vomita:': u'\U0001F92E',
u':faccina_che_starnutisce:': u'\U0001F927',
u':faccina_accaldata:': u'\U0001F975',
u':faccina_congelata:': u'\U0001F976',
u':faccina_stordita:': u'\U0001F974',
u':faccina_frastornata:': u'\U0001F635',
u':testa_che_esplode:': u'\U0001F92F',
u':faccina_con_cappello_da_cowboy:': u'\U0001F920',
u':faccina_che_festeggia:': u'\U0001F973',
u':faccina_travestita:': u'\U0001F978',
u':faccina_con_sorriso_e_occhiali_da_sole:': u'\U0001F60E',
u':faccina_nerd:': u'\U0001F913',
u':faccina_con_monocolo:': u'\U0001F9D0',
u':faccina_confusa:': u'\U0001F615',
u':faccina_preoccupata:': u'\U0001F61F',
u':faccina_leggermente_imbronciata:': u'\U0001F641',
u':faccina_imbronciata:': u'\U00002639\U0000FE0F',
u':faccina_con_bocca_aperta:': u'\U0001F62E',
u':faccina_sorpresa:': u'\U0001F62F',
u':faccina_stupita:': u'\U0001F632',
u':faccina_imbarazzata:': u'\U0001F633',
u':faccina_supplichevole:': u'\U0001F97A',
u':faccina_imbronciata_con_bocca_aperta:': u'\U0001F626',
u':faccina_angosciata:': u'\U0001F627',
u':faccina_impaurita:': u'\U0001F628',
u':faccina_sudata_in_ansia:': u'\U0001F630',
u':faccina_delusa_ma_sollevata:': u'\U0001F625',
u':faccina_che_piange:': u'\U0001F622',
u':faccina_disperata:': u'\U0001F62D',
u':faccina_terrorizzata:': u'\U0001F631',
u':faccina_frustrata:': u'\U0001F616',
u':faccina_perseverante:': u'\U0001F623',
u':faccina_delusa:': u'\U0001F61E',
u':faccina_sudata:': u'\U0001F613',
u':faccina_esausta:': u'\U0001F629',
u':faccina_stanca:': u'\U0001F62B',
u':faccina_che_sbadiglia:': u'\U0001F971',
u':faccina_che_sbuffa:': u'\U0001F624',
u':faccina_accigliata:': u'\U0001F621',
u':faccina_arrabbiata:': u'\U0001F620',
u':faccina_con_simboli_sulla_bocca:': u'\U0001F92C',
u':faccina_con_sorriso_e_corna:': u'\U0001F608',
u':faccina_arrabbiata_con_corna:': u'\U0001F47F',
u':teschio:': u'\U0001F480',
u':teschio_con_ossa_incrociate:': u'\U00002620\U0000FE0F',
u':cacca:': u'\U0001F4A9',
u':faccina_pagliaccio:': u'\U0001F921',
u':orco:': u'\U0001F479',
u':goblin:': u'\U0001F47A',
u':fantasma:': u'\U0001F47B',
u':alieno:': u'\U0001F47D',
u':mostro_alieno:': u'\U0001F47E',
u':faccina_di_robot:': u'\U0001F916',
u':gatto_che_sorride:': u'\U0001F63A',
u':gatto_che_sogghigna:': u'\U0001F638',
u':gatto_con_lacrime_di_gioia:': u'\U0001F639',
u':gatto_innamorato:': u'\U0001F63B',
u':gatto_con_sorriso_sarcastico:': u'\U0001F63C',
u':gatto_che_manda_baci:': u'\U0001F63D',
u':gatto_esterrefatto:': u'\U0001F640',
u':gatto_che_piange:': u'\U0001F63F',
u':gatto_imbronciato:': u'\U0001F63E',
u':non_vedo:': u'\U0001F648',
u':non_sento:': u'\U0001F649',
u':non_parlo:': u'\U0001F64A',
u':impronta_della_bocca:': u'\U0001F48B',
u':lettera_d’amore:': u'\U0001F48C',
u':cuore_con_freccia:': u'\U0001F498',
u':cuore_con_fiocco:': u'\U0001F49D',
u':cuore_che_luccica:': u'\U0001F496',
u':cuore_che_cresce:': u'\U0001F497',
u':cuore_che_batte:': u'\U0001F493',
u':cuori_che_girano:': u'\U0001F49E',
u':due_cuori:': u'\U0001F495',
u':decorazione_con_cuore:': u'\U0001F49F',
u':punto_esclamativo_a_cuore:': u'\U00002763\U0000FE0F',
u':cuore_infranto:': u'\U0001F494',
u':cuore_rosso:': u'\U00002764\U0000FE0F',
u':cuore_arancione:': u'\U0001F9E1',
u':cuore_giallo:': u'\U0001F49B',
u':cuore_verde:': u'\U0001F49A',
u':cuore_azzurro:': u'\U0001F499',
u':cuore_viola:': u'\U0001F49C',
u':cuore_marrone:': u'\U0001F90E',
u':cuore_nero:': u'\U0001F5A4',
u':cuore_bianco:': u'\U0001F90D',
u':100_punti:': u'\U0001F4AF',
u':rabbia:': u'\U0001F4A2',
u':collisione:': u'\U0001F4A5',
u':stella_con_scia:': u'\U0001F4AB',
u':gocce_di_sudore:': u'\U0001F4A6',
u':nuvola_di_polvere:': u'\U0001F4A8',
u':buco:': u'\U0001F573\U0000FE0F',
u':bomba:': u'\U0001F4A3',
u':fumetto:': u'\U0001F4AC',
u':occhio_nel_fumetto:': u'\U0001F441\U0000FE0F\U0000200D\U0001F5E8\U0000FE0F',
u':nuvoletta_nera:': u'\U0001F5E8\U0000FE0F',
u':nuvoletta_rabbia:': u'\U0001F5EF\U0000FE0F',
u':nuvoletta:': u'\U0001F4AD',
u':sonno:': u'\U0001F4A4',
u':mano_che_saluta:': u'\U0001F44B',
u':mano_che_saluta_carnagione_chiara:': u'\U0001F44B\U0001F3FB',
u':mano_che_saluta_carnagione_abbastanza_chiara:': u'\U0001F44B\U0001F3FC',
u':mano_che_saluta_carnagione_olivastra:': u'\U0001F44B\U0001F3FD',
u':mano_che_saluta_carnagione_abbastanza_scura:': u'\U0001F44B\U0001F3FE',
u':mano_che_saluta_carnagione_scura:': u'\U0001F44B\U0001F3FF',
u':dorso_mano_alzata:': u'\U0001F91A',
u':dorso_mano_alzata_carnagione_chiara:': u'\U0001F91A\U0001F3FB',
u':dorso_mano_alzata_carnagione_abbastanza_chiara:': u'\U0001F91A\U0001F3FC',
u':dorso_mano_alzata_carnagione_olivastra:': u'\U0001F91A\U0001F3FD',
u':dorso_mano_alzata_carnagione_abbastanza_scura:': u'\U0001F91A\U0001F3FE',
u':dorso_mano_alzata_carnagione_scura:': u'\U0001F91A\U0001F3FF',
u':mano_aperta:': u'\U0001F590\U0000FE0F',
u':mano_aperta_carnagione_chiara:': u'\U0001F590\U0001F3FB',
u':mano_aperta_carnagione_abbastanza_chiara:': u'\U0001F590\U0001F3FC',
u':mano_aperta_carnagione_olivastra:': u'\U0001F590\U0001F3FD',
u':mano_aperta_carnagione_abbastanza_scura:': u'\U0001F590\U0001F3FE',
u':mano_aperta_carnagione_scura:': u'\U0001F590\U0001F3FF',
u':mano_alzata:': u'\U0000270B',
u':mano_alzata_carnagione_chiara:': u'\U0000270B\U0001F3FB',
u':mano_alzata_carnagione_abbastanza_chiara:': u'\U0000270B\U0001F3FC',
u':mano_alzata_carnagione_olivastra:': u'\U0000270B\U0001F3FD',
u':mano_alzata_carnagione_abbastanza_scura:': u'\U0000270B\U0001F3FE',
u':mano_alzata_carnagione_scura:': u'\U0000270B\U0001F3FF',
u':saluto_vulcaniano:': u'\U0001F596',
u':saluto_vulcaniano_carnagione_chiara:': u'\U0001F596\U0001F3FB',
u':saluto_vulcaniano_carnagione_abbastanza_chiara:': u'\U0001F596\U0001F3FC',
u':saluto_vulcaniano_carnagione_olivastra:': u'\U0001F596\U0001F3FD',
u':saluto_vulcaniano_carnagione_abbastanza_scura:': u'\U0001F596\U0001F3FE',
u':saluto_vulcaniano_carnagione_scura:': u'\U0001F596\U0001F3FF',
u':mano_che_fa_ok:': u'\U0001F44C',
u':mano_che_fa_ok_carnagione_chiara:': u'\U0001F44C\U0001F3FB',
u':mano_che_fa_ok_carnagione_abbastanza_chiara:': u'\U0001F44C\U0001F3FC',
u':mano_che_fa_ok_carnagione_olivastra:': u'\U0001F44C\U0001F3FD',
u':mano_che_fa_ok_carnagione_abbastanza_scura:': u'\U0001F44C\U0001F3FE',
u':mano_che_fa_ok_carnagione_scura:': u'\U0001F44C\U0001F3FF',
u':mano_a_pigna:': u'\U0001F90C',
u':mano_a_pigna_carnagione_chiara:': u'\U0001F90C\U0001F3FB',
u':mano_a_pigna_carnagione_abbastanza_chiara:': u'\U0001F90C\U0001F3FC',
u':mano_a_pigna_carnagione_olivastra:': u'\U0001F90C\U0001F3FD',
u':mano_a_pigna_carnagione_abbastanza_scura:': u'\U0001F90C\U0001F3FE',
u':mano_a_pigna_carnagione_scura:': u'\U0001F90C\U0001F3FF',
u':mano_che_fa_il_gesto_del_pizzico:': u'\U0001F90F',
u':mano_che_fa_il_gesto_del_pizzico_carnagione_chiara:': u'\U0001F90F\U0001F3FB',
u':mano_che_fa_il_gesto_del_pizzico_carnagione_abbastanza_chiara:': u'\U0001F90F\U0001F3FC',
u':mano_che_fa_il_gesto_del_pizzico_carnagione_olivastra:': u'\U0001F90F\U0001F3FD',
u':mano_che_fa_il_gesto_del_pizzico_carnagione_abbastanza_scura:': u'\U0001F90F\U0001F3FE',
u':mano_che_fa_il_gesto_del_pizzico_carnagione_scura:': u'\U0001F90F\U0001F3FF',
u':vittoria:': u'\U0000270C\U0000FE0F',
u':vittoria_carnagione_chiara:': u'\U0000270C\U0001F3FB',
u':vittoria_carnagione_abbastanza_chiara:': u'\U0000270C\U0001F3FC',
u':vittoria_carnagione_olivastra:': u'\U0000270C\U0001F3FD',
u':vittoria_carnagione_abbastanza_scura:': u'\U0000270C\U0001F3FE',
u':vittoria_carnagione_scura:': u'\U0000270C\U0001F3FF',
u':dita_incrociate:': u'\U0001F91E',
u':dita_incrociate_carnagione_chiara:': u'\U0001F91E\U0001F3FB',
u':dita_incrociate_carnagione_abbastanza_chiara:': u'\U0001F91E\U0001F3FC',
u':dita_incrociate_carnagione_olivastra:': u'\U0001F91E\U0001F3FD',
u':dita_incrociate_carnagione_abbastanza_scura:': u'\U0001F91E\U0001F3FE',
u':dita_incrociate_carnagione_scura:': u'\U0001F91E\U0001F3FF',
u':gesto_ti_amo:': u'\U0001F91F',
u':gesto_ti_amo_carnagione_chiara:': u'\U0001F91F\U0001F3FB',
u':gesto_ti_amo_carnagione_abbastanza_chiara:': u'\U0001F91F\U0001F3FC',
u':gesto_ti_amo_carnagione_olivastra:': u'\U0001F91F\U0001F3FD',
u':gesto_ti_amo_carnagione_abbastanza_scura:': u'\U0001F91F\U0001F3FE',
u':gesto_ti_amo_carnagione_scura:': u'\U0001F91F\U0001F3FF',
u':segno_delle_corna:': u'\U0001F918',
u':segno_delle_corna_carnagione_chiara:': u'\U0001F918\U0001F3FB',
u':segno_delle_corna_carnagione_abbastanza_chiara:': u'\U0001F918\U0001F3FC',
u':segno_delle_corna_carnagione_olivastra:': u'\U0001F918\U0001F3FD',
u':segno_delle_corna_carnagione_abbastanza_scura:': u'\U0001F918\U0001F3FE',
u':segno_delle_corna_carnagione_scura:': u'\U0001F918\U0001F3FF',
u':mano_con_gesto_di_chiamata:': u'\U0001F919',
u':mano_con_gesto_di_chiamata_carnagione_chiara:': u'\U0001F919\U0001F3FB',
u':mano_con_gesto_di_chiamata_carnagione_abbastanza_chiara:': u'\U0001F919\U0001F3FC',
u':mano_con_gesto_di_chiamata_carnagione_olivastra:': u'\U0001F919\U0001F3FD',
u':mano_con_gesto_di_chiamata_carnagione_abbastanza_scura:': u'\U0001F919\U0001F3FE',
u':mano_con_gesto_di_chiamata_carnagione_scura:': u'\U0001F919\U0001F3FF',
u':indice_verso_sinistra:': u'\U0001F448',
u':indice_verso_sinistra_carnagione_chiara:': u'\U0001F448\U0001F3FB',
u':indice_verso_sinistra_carnagione_abbastanza_chiara:': u'\U0001F448\U0001F3FC',
u':indice_verso_sinistra_carnagione_olivastra:': u'\U0001F448\U0001F3FD',
u':indice_verso_sinistra_carnagione_abbastanza_scura:': u'\U0001F448\U0001F3FE',
u':indice_verso_sinistra_carnagione_scura:': u'\U0001F448\U0001F3FF',
u':indice_verso_destra:': u'\U0001F449',
u':indice_verso_destra_carnagione_chiara:': u'\U0001F449\U0001F3FB',
u':indice_verso_destra_carnagione_abbastanza_chiara:': u'\U0001F449\U0001F3FC',
u':indice_verso_destra_carnagione_olivastra:': u'\U0001F449\U0001F3FD',
u':indice_verso_destra_carnagione_abbastanza_scura:': u'\U0001F449\U0001F3FE',
u':indice_verso_destra_carnagione_scura:': u'\U0001F449\U0001F3FF',
u':indice_alzato:': u'\U0001F446',
u':indice_alzato_carnagione_chiara:': u'\U0001F446\U0001F3FB',
u':indice_alzato_carnagione_abbastanza_chiara:': u'\U0001F446\U0001F3FC',
u':indice_alzato_carnagione_olivastra:': u'\U0001F446\U0001F3FD',
u':indice_alzato_carnagione_abbastanza_scura:': u'\U0001F446\U0001F3FE',
u':indice_alzato_carnagione_scura:': u'\U0001F446\U0001F3FF',
u':dito_medio:': u'\U0001F595',
u':dito_medio_carnagione_chiara:': u'\U0001F595\U0001F3FB',
u':dito_medio_carnagione_abbastanza_chiara:': u'\U0001F595\U0001F3FC',
u':dito_medio_carnagione_olivastra:': u'\U0001F595\U0001F3FD',
u':dito_medio_carnagione_abbastanza_scura:': u'\U0001F595\U0001F3FE',
u':dito_medio_carnagione_scura:': u'\U0001F595\U0001F3FF',
u':indice_abbassato:': u'\U0001F447',
u':indice_abbassato_carnagione_chiara:': u'\U0001F447\U0001F3FB',
u':indice_abbassato_carnagione_abbastanza_chiara:': u'\U0001F447\U0001F3FC',
u':indice_abbassato_carnagione_olivastra:': u'\U0001F447\U0001F3FD',
u':indice_abbassato_carnagione_abbastanza_scura:': u'\U0001F447\U0001F3FE',
u':indice_abbassato_carnagione_scura:': u'\U0001F447\U0001F3FF',
u':indice_verso_l’alto:': u'\U0000261D\U0000FE0F',
u':indice_verso_l’alto_carnagione_chiara:': u'\U0000261D\U0001F3FB',
u':indice_verso_l’alto_carnagione_abbastanza_chiara:': u'\U0000261D\U0001F3FC',
u':indice_verso_l’alto_carnagione_olivastra:': u'\U0000261D\U0001F3FD',
u':indice_verso_l’alto_carnagione_abbastanza_scura:': u'\U0000261D\U0001F3FE',
u':indice_verso_l’alto_carnagione_scura:': u'\U0000261D\U0001F3FF',
u':pollice_in_su:': u'\U0001F44D',
u':pollice_in_su_carnagione_chiara:': u'\U0001F44D\U0001F3FB',
u':pollice_in_su_carnagione_abbastanza_chiara:': u'\U0001F44D\U0001F3FC',
u':pollice_in_su_carnagione_olivastra:': u'\U0001F44D\U0001F3FD',
u':pollice_in_su_carnagione_abbastanza_scura:': u'\U0001F44D\U0001F3FE',
u':pollice_in_su_carnagione_scura:': u'\U0001F44D\U0001F3FF',
u':pollice_verso:': u'\U0001F44E',
u':pollice_verso_carnagione_chiara:': u'\U0001F44E\U0001F3FB',
u':pollice_verso_carnagione_abbastanza_chiara:': u'\U0001F44E\U0001F3FC',
u':pollice_verso_carnagione_olivastra:': u'\U0001F44E\U0001F3FD',
u':pollice_verso_carnagione_abbastanza_scura:': u'\U0001F44E\U0001F3FE',
u':pollice_verso_carnagione_scura:': u'\U0001F44E\U0001F3FF',
u':pugno:': u'\U0000270A',
u':pugno_carnagione_chiara:': u'\U0000270A\U0001F3FB',
u':pugno_carnagione_abbastanza_chiara:': u'\U0000270A\U0001F3FC',
u':pugno_carnagione_olivastra:': u'\U0000270A\U0001F3FD',
u':pugno_carnagione_abbastanza_scura:': u'\U0000270A\U0001F3FE',
u':pugno_carnagione_scura:': u'\U0000270A\U0001F3FF',
u':pugno_chiuso:': u'\U0001F44A',
u':pugno_chiuso_carnagione_chiara:': u'\U0001F44A\U0001F3FB',
u':pugno_chiuso_carnagione_abbastanza_chiara:': u'\U0001F44A\U0001F3FC',
u':pugno_chiuso_carnagione_olivastra:': u'\U0001F44A\U0001F3FD',
u':pugno_chiuso_carnagione_abbastanza_scura:': u'\U0001F44A\U0001F3FE',
u':pugno_chiuso_carnagione_scura:': u'\U0001F44A\U0001F3FF',
u':pugno_a_sinistra:': u'\U0001F91B',
u':pugno_a_sinistra_carnagione_chiara:': u'\U0001F91B\U0001F3FB',
u':pugno_a_sinistra_carnagione_abbastanza_chiara:': u'\U0001F91B\U0001F3FC',
u':pugno_a_sinistra_carnagione_olivastra:': u'\U0001F91B\U0001F3FD',
u':pugno_a_sinistra_carnagione_abbastanza_scura:': u'\U0001F91B\U0001F3FE',
u':pugno_a_sinistra_carnagione_scura:': u'\U0001F91B\U0001F3FF',
u':pugno_a_destra:': u'\U0001F91C',
u':pugno_a_destra_carnagione_chiara:': u'\U0001F91C\U0001F3FB',
u':pugno_a_destra_carnagione_abbastanza_chiara:': u'\U0001F91C\U0001F3FC',
u':pugno_a_destra_carnagione_olivastra:': u'\U0001F91C\U0001F3FD',
u':pugno_a_destra_carnagione_abbastanza_scura:': u'\U0001F91C\U0001F3FE',
u':pugno_a_destra_carnagione_scura:': u'\U0001F91C\U0001F3FF',
u':mani_che_applaudono:': u'\U0001F44F',
u':mani_che_applaudono_carnagione_chiara:': u'\U0001F44F\U0001F3FB',
u':mani_che_applaudono_carnagione_abbastanza_chiara:': u'\U0001F44F\U0001F3FC',
u':mani_che_applaudono_carnagione_olivastra:': u'\U0001F44F\U0001F3FD',
u':mani_che_applaudono_carnagione_abbastanza_scura:': u'\U0001F44F\U0001F3FE',
u':mani_che_applaudono_carnagione_scura:': u'\U0001F44F\U0001F3FF',
u':mani_alzate:': u'\U0001F64C',
u':mani_alzate_carnagione_chiara:': u'\U0001F64C\U0001F3FB',
u':mani_alzate_carnagione_abbastanza_chiara:': u'\U0001F64C\U0001F3FC',
u':mani_alzate_carnagione_olivastra:': u'\U0001F64C\U0001F3FD',
u':mani_alzate_carnagione_abbastanza_scura:': u'\U0001F64C\U0001F3FE',
u':mani_alzate_carnagione_scura:': u'\U0001F64C\U0001F3FF',
u':mani_aperte:': u'\U0001F450',
u':mani_aperte_carnagione_chiara:': u'\U0001F450\U0001F3FB',
u':mani_aperte_carnagione_abbastanza_chiara:': u'\U0001F450\U0001F3FC',
u':mani_aperte_carnagione_olivastra:': u'\U0001F450\U0001F3FD',
u':mani_aperte_carnagione_abbastanza_scura:': u'\U0001F450\U0001F3FE',
u':mani_aperte_carnagione_scura:': u'\U0001F450\U0001F3FF',
u':mani_unite_in_alto:': u'\U0001F932',
u':mani_unite_in_alto_carnagione_chiara:': u'\U0001F932\U0001F3FB',
u':mani_unite_in_alto_carnagione_abbastanza_chiara:': u'\U0001F932\U0001F3FC',
u':mani_unite_in_alto_carnagione_olivastra:': u'\U0001F932\U0001F3FD',
u':mani_unite_in_alto_carnagione_abbastanza_scura:': u'\U0001F932\U0001F3FE',
u':mani_unite_in_alto_carnagione_scura:': u'\U0001F932\U0001F3FF',
u':stretta_di_mano:': u'\U0001F91D',
u':mani_giunte:': u'\U0001F64F',
u':mani_giunte_carnagione_chiara:': u'\U0001F64F\U0001F3FB',
u':mani_giunte_carnagione_abbastanza_chiara:': u'\U0001F64F\U0001F3FC',
u':mani_giunte_carnagione_olivastra:': u'\U0001F64F\U0001F3FD',
u':mani_giunte_carnagione_abbastanza_scura:': u'\U0001F64F\U0001F3FE',
u':mani_giunte_carnagione_scura:': u'\U0001F64F\U0001F3FF',
u':mano_che_scrive:': u'\U0000270D\U0000FE0F',
u':mano_che_scrive_carnagione_chiara:': u'\U0000270D\U0001F3FB',
u':mano_che_scrive_carnagione_abbastanza_chiara:': u'\U0000270D\U0001F3FC',
u':mano_che_scrive_carnagione_olivastra:': u'\U0000270D\U0001F3FD',
u':mano_che_scrive_carnagione_abbastanza_scura:': u'\U0000270D\U0001F3FE',
u':mano_che_scrive_carnagione_scura:': u'\U0000270D\U0001F3FF',
u':smalto_per_unghie:': u'\U0001F485',
u':smalto_per_unghie_carnagione_chiara:': u'\U0001F485\U0001F3FB',
u':smalto_per_unghie_carnagione_abbastanza_chiara:': u'\U0001F485\U0001F3FC',
u':smalto_per_unghie_carnagione_olivastra:': u'\U0001F485\U0001F3FD',
u':smalto_per_unghie_carnagione_abbastanza_scura:': u'\U0001F485\U0001F3FE',
u':smalto_per_unghie_carnagione_scura:': u'\U0001F485\U0001F3FF',
u':selfie:': u'\U0001F933',
u':selfie_carnagione_chiara:': u'\U0001F933\U0001F3FB',
u':selfie_carnagione_abbastanza_chiara:': u'\U0001F933\U0001F3FC',
u':selfie_carnagione_olivastra:': u'\U0001F933\U0001F3FD',
u':selfie_carnagione_abbastanza_scura:': u'\U0001F933\U0001F3FE',
u':selfie_carnagione_scura:': u'\U0001F933\U0001F3FF',
u':bicipite:': u'\U0001F4AA',
u':bicipite_carnagione_chiara:': u'\U0001F4AA\U0001F3FB',
u':bicipite_carnagione_abbastanza_chiara:': u'\U0001F4AA\U0001F3FC',
u':bicipite_carnagione_olivastra:': u'\U0001F4AA\U0001F3FD',
u':bicipite_carnagione_abbastanza_scura:': u'\U0001F4AA\U0001F3FE',
u':bicipite_carnagione_scura:': u'\U0001F4AA\U0001F3FF',
u':protesi_robotica_per_il_braccio:': u'\U0001F9BE',
u':protesi_robotica_per_la_gamba:': u'\U0001F9BF',
u':gamba:': u'\U0001F9B5',
u':gamba_carnagione_chiara:': u'\U0001F9B5\U0001F3FB',
u':gamba_carnagione_abbastanza_chiara:': u'\U0001F9B5\U0001F3FC',
u':gamba_carnagione_olivastra:': u'\U0001F9B5\U0001F3FD',
u':gamba_carnagione_abbastanza_scura:': u'\U0001F9B5\U0001F3FE',
u':gamba_carnagione_scura:': u'\U0001F9B5\U0001F3FF',
u':piede:': u'\U0001F9B6',
u':piede_carnagione_chiara:': u'\U0001F9B6\U0001F3FB',
u':piede_carnagione_abbastanza_chiara:': u'\U0001F9B6\U0001F3FC',
u':piede_carnagione_olivastra:': u'\U0001F9B6\U0001F3FD',
u':piede_carnagione_abbastanza_scura:': u'\U0001F9B6\U0001F3FE',
u':piede_carnagione_scura:': u'\U0001F9B6\U0001F3FF',
u':orecchio:': u'\U0001F442',
u':orecchio_carnagione_chiara:': u'\U0001F442\U0001F3FB',
u':orecchio_carnagione_abbastanza_chiara:': u'\U0001F442\U0001F3FC',
u':orecchio_carnagione_olivastra:': u'\U0001F442\U0001F3FD',
u':orecchio_carnagione_abbastanza_scura:': u'\U0001F442\U0001F3FE',
u':orecchio_carnagione_scura:': u'\U0001F442\U0001F3FF',
u':orecchio_con_apparecchio_acustico:': u'\U0001F9BB',
u':orecchio_con_apparecchio_acustico_carnagione_chiara:': u'\U0001F9BB\U0001F3FB',
u':orecchio_con_apparecchio_acustico_carnagione_abbastanza_chiara:': u'\U0001F9BB\U0001F3FC',
u':orecchio_con_apparecchio_acustico_carnagione_olivastra:': u'\U0001F9BB\U0001F3FD',
u':orecchio_con_apparecchio_acustico_carnagione_abbastanza_scura:': u'\U0001F9BB\U0001F3FE',
u':orecchio_con_apparecchio_acustico_carnagione_scura:': u'\U0001F9BB\U0001F3FF',
u':naso:': u'\U0001F443',
u':naso_carnagione_chiara:': u'\U0001F443\U0001F3FB',
u':naso_carnagione_abbastanza_chiara:': u'\U0001F443\U0001F3FC',
u':naso_carnagione_olivastra:': u'\U0001F443\U0001F3FD',
u':naso_carnagione_abbastanza_scura:': u'\U0001F443\U0001F3FE',
u':naso_carnagione_scura:': u'\U0001F443\U0001F3FF',
u':cervello:': u'\U0001F9E0',
u':organo_del_cuore:': u'\U0001FAC0',
u':polmoni:': u'\U0001FAC1',
u':dente:': u'\U0001F9B7',
u':osso:': u'\U0001F9B4',
u':occhi:': u'\U0001F440',
u':occhio:': u'\U0001F441\U0000FE0F',
u':lingua:': u'\U0001F445',
u':bocca:': u'\U0001F444',
u':neonato:': u'\U0001F476',
u':neonato_carnagione_chiara:': u'\U0001F476\U0001F3FB',
u':neonato_carnagione_abbastanza_chiara:': u'\U0001F476\U0001F3FC',
u':neonato_carnagione_olivastra:': u'\U0001F476\U0001F3FD',
u':neonato_carnagione_abbastanza_scura:': u'\U0001F476\U0001F3FE',
u':neonato_carnagione_scura:': u'\U0001F476\U0001F3FF',
u':bimbo:': u'\U0001F9D2',
u':bimbo_carnagione_chiara:': u'\U0001F9D2\U0001F3FB',
u':bimbo_carnagione_abbastanza_chiara:': u'\U0001F9D2\U0001F3FC',
u':bimbo_carnagione_olivastra:': u'\U0001F9D2\U0001F3FD',
u':bimbo_carnagione_abbastanza_scura:': u'\U0001F9D2\U0001F3FE',
u':bimbo_carnagione_scura:': u'\U0001F9D2\U0001F3FF',
u':bambino:': u'\U0001F466',
u':bambino_carnagione_chiara:': u'\U0001F466\U0001F3FB',
u':bambino_carnagione_abbastanza_chiara:': u'\U0001F466\U0001F3FC',
u':bambino_carnagione_olivastra:': u'\U0001F466\U0001F3FD',
u':bambino_carnagione_abbastanza_scura:': u'\U0001F466\U0001F3FE',
u':bambino_carnagione_scura:': u'\U0001F466\U0001F3FF',
u':bambina:': u'\U0001F467',
u':bambina_carnagione_chiara:': u'\U0001F467\U0001F3FB',
u':bambina_carnagione_abbastanza_chiara:': u'\U0001F467\U0001F3FC',
u':bambina_carnagione_olivastra:': u'\U0001F467\U0001F3FD',
u':bambina_carnagione_abbastanza_scura:': u'\U0001F467\U0001F3FE',
u':bambina_carnagione_scura:': u'\U0001F467\U0001F3FF',
u':persona:': u'\U0001F9D1',
u':persona_carnagione_chiara:': u'\U0001F9D1\U0001F3FB',
u':persona_carnagione_abbastanza_chiara:': u'\U0001F9D1\U0001F3FC',
u':persona_carnagione_olivastra:': u'\U0001F9D1\U0001F3FD',
u':persona_carnagione_abbastanza_scura:': u'\U0001F9D1\U0001F3FE',
u':persona_carnagione_scura:': u'\U0001F9D1\U0001F3FF',
u':persona_bionda:': u'\U0001F471',
u':persona_bionda_carnagione_chiara:': u'\U0001F471\U0001F3FB',
u':persona_bionda_carnagione_abbastanza_chiara:': u'\U0001F471\U0001F3FC',
u':persona_bionda_carnagione_olivastra:': u'\U0001F471\U0001F3FD',
u':persona_bionda_carnagione_abbastanza_scura:': u'\U0001F471\U0001F3FE',
u':persona_bionda_carnagione_scura:': u'\U0001F471\U0001F3FF',
u':uomo:': u'\U0001F468',
u':uomo_carnagione_chiara:': u'\U0001F468\U0001F3FB',
u':uomo_carnagione_abbastanza_chiara:': u'\U0001F468\U0001F3FC',
u':uomo_carnagione_olivastra:': u'\U0001F468\U0001F3FD',
u':uomo_carnagione_abbastanza_scura:': u'\U0001F468\U0001F3FE',
u':uomo_carnagione_scura:': u'\U0001F468\U0001F3FF',
u':uomo_con_la_barba:': u'\U0001F9D4',
u':uomo_con_la_barba_carnagione_chiara:': u'\U0001F9D4\U0001F3FB',
u':uomo_con_la_barba_carnagione_abbastanza_chiara:': u'\U0001F9D4\U0001F3FC',
u':uomo_con_la_barba_carnagione_olivastra:': u'\U0001F9D4\U0001F3FD',
u':uomo_con_la_barba_carnagione_abbastanza_scura:': u'\U0001F9D4\U0001F3FE',
u':uomo_con_la_barba_carnagione_scura:': u'\U0001F9D4\U0001F3FF',
u':uomo_capelli_rossi:': u'\U0001F468\U0000200D\U0001F9B0',
u':uomo_carnagione_chiara_e_capelli_rossi:': u'\U0001F468\U0001F3FB\U0000200D\U0001F9B0',
u':uomo_carnagione_abbastanza_chiara_e_capelli_rossi:': u'\U0001F468\U0001F3FC\U0000200D\U0001F9B0',
u':uomo_carnagione_olivastra_e_capelli_rossi:': u'\U0001F468\U0001F3FD\U0000200D\U0001F9B0',
u':uomo_carnagione_abbastanza_scura_e_capelli_rossi:': u'\U0001F468\U0001F3FE\U0000200D\U0001F9B0',
u':uomo_carnagione_scura_e_capelli_rossi:': u'\U0001F468\U0001F3FF\U0000200D\U0001F9B0',
u':uomo_capelli_ricci:': u'\U0001F468\U0000200D\U0001F9B1',
u':uomo_carnagione_chiara_e_capelli_ricci:': u'\U0001F468\U0001F3FB\U0000200D\U0001F9B1',
u':uomo_carnagione_abbastanza_chiara_e_capelli_ricci:': u'\U0001F468\U0001F3FC\U0000200D\U0001F9B1',
u':uomo_carnagione_olivastra_e_capelli_ricci:': u'\U0001F468\U0001F3FD\U0000200D\U0001F9B1',
u':uomo_carnagione_abbastanza_scura_e_capelli_ricci:': u'\U0001F468\U0001F3FE\U0000200D\U0001F9B1',
u':uomo_carnagione_scura_e_capelli_ricci:': u'\U0001F468\U0001F3FF\U0000200D\U0001F9B1',
u':uomo_capelli_bianchi:': u'\U0001F468\U0000200D\U0001F9B3',
u':uomo_carnagione_chiara_e_capelli_bianchi:': u'\U0001F468\U0001F3FB\U0000200D\U0001F9B3',
u':uomo_carnagione_abbastanza_chiara_e_capelli_bianchi:': u'\U0001F468\U0001F3FC\U0000200D\U0001F9B3',
u':uomo_carnagione_olivastra_e_capelli_bianchi:': u'\U0001F468\U0001F3FD\U0000200D\U0001F9B3',
u':uomo_carnagione_abbastanza_scura_e_capelli_bianchi:': u'\U0001F468\U0001F3FE\U0000200D\U0001F9B3',
u':uomo_carnagione_scura_e_capelli_bianchi:': u'\U0001F468\U0001F3FF\U0000200D\U0001F9B3',
u':uomo_calvo:': u'\U0001F468\U0000200D\U0001F9B2',
u':uomo_carnagione_chiara_e_calvo:': u'\U0001F468\U0001F3FB\U0000200D\U0001F9B2',
u':uomo_carnagione_abbastanza_chiara_e_calvo:': u'\U0001F468\U0001F3FC\U0000200D\U0001F9B2',
u':uomo_carnagione_olivastra_e_calvo:': u'\U0001F468\U0001F3FD\U0000200D\U0001F9B2',
u':uomo_carnagione_abbastanza_scura_e_calvo:': u'\U0001F468\U0001F3FE\U0000200D\U0001F9B2',
u':uomo_carnagione_scura_e_calvo:': u'\U0001F468\U0001F3FF\U0000200D\U0001F9B2',
u':donna:': u'\U0001F469',
u':donna_carnagione_chiara:': u'\U0001F469\U0001F3FB',
u':donna_carnagione_abbastanza_chiara:': u'\U0001F469\U0001F3FC',
u':donna_carnagione_olivastra:': u'\U0001F469\U0001F3FD',
u':donna_carnagione_abbastanza_scura:': u'\U0001F469\U0001F3FE',
u':donna_carnagione_scura:': u'\U0001F469\U0001F3FF',
u':donna_capelli_rossi:': u'\U0001F469\U0000200D\U0001F9B0',
u':donna_carnagione_chiara_e_capelli_rossi:': u'\U0001F469\U0001F3FB\U0000200D\U0001F9B0',
u':donna_carnagione_abbastanza_chiara_e_capelli_rossi:': u'\U0001F469\U0001F3FC\U0000200D\U0001F9B0',
u':donna_carnagione_olivastra_e_capelli_rossi:': u'\U0001F469\U0001F3FD\U0000200D\U0001F9B0',
u':donna_carnagione_abbastanza_scura_e_capelli_rossi:': u'\U0001F469\U0001F3FE\U0000200D\U0001F9B0',
u':donna_carnagione_scura_e_capelli_rossi:': u'\U0001F469\U0001F3FF\U0000200D\U0001F9B0',
u':persona_capelli_rossi:': u'\U0001F9D1\U0000200D\U0001F9B0',
u':persona_carnagione_chiara_e_capelli_rossi:': u'\U0001F9D1\U0001F3FB\U0000200D\U0001F9B0',
u':persona_carnagione_abbastanza_chiara_e_capelli_rossi:': u'\U0001F9D1\U0001F3FC\U0000200D\U0001F9B0',
u':persona_carnagione_olivastra_e_capelli_rossi:': u'\U0001F9D1\U0001F3FD\U0000200D\U0001F9B0',
u':persona_carnagione_abbastanza_scura_e_capelli_rossi:': u'\U0001F9D1\U0001F3FE\U0000200D\U0001F9B0',
u':persona_carnagione_scura_e_capelli_rossi:': u'\U0001F9D1\U0001F3FF\U0000200D\U0001F9B0',
u':donna_capelli_ricci:': u'\U0001F469\U0000200D\U0001F9B1',
u':donna_carnagione_chiara_e_capelli_ricci:': u'\U0001F469\U0001F3FB\U0000200D\U0001F9B1',
u':donna_carnagione_abbastanza_chiara_e_capelli_ricci:': u'\U0001F469\U0001F3FC\U0000200D\U0001F9B1',
u':donna_carnagione_olivastra_e_capelli_ricci:': u'\U0001F469\U0001F3FD\U0000200D\U0001F9B1',
u':donna_carnagione_abbastanza_scura_e_capelli_ricci:': u'\U0001F469\U0001F3FE\U0000200D\U0001F9B1',
u':donna_carnagione_scura_e_capelli_ricci:': u'\U0001F469\U0001F3FF\U0000200D\U0001F9B1',
u':persona_capelli_ricci:': u'\U0001F9D1\U0000200D\U0001F9B1',
u':persona_carnagione_chiara_e_capelli_ricci:': u'\U0001F9D1\U0001F3FB\U0000200D\U0001F9B1',
u':persona_carnagione_abbastanza_chiara_e_capelli_ricci:': u'\U0001F9D1\U0001F3FC\U0000200D\U0001F9B1',
u':persona_carnagione_olivastra_e_capelli_ricci:': u'\U0001F9D1\U0001F3FD\U0000200D\U0001F9B1',
u':persona_carnagione_abbastanza_scura_e_capelli_ricci:': u'\U0001F9D1\U0001F3FE\U0000200D\U0001F9B1',
u':persona_carnagione_scura_e_capelli_ricci:': u'\U0001F9D1\U0001F3FF\U0000200D\U0001F9B1',
u':donna_capelli_bianchi:': u'\U0001F469\U0000200D\U0001F9B3',
u':donna_carnagione_chiara_e_capelli_bianchi:': u'\U0001F469\U0001F3FB\U0000200D\U0001F9B3',
u':donna_carnagione_abbastanza_chiara_e_capelli_bianchi:': u'\U0001F469\U0001F3FC\U0000200D\U0001F9B3',
u':donna_carnagione_olivastra_e_capelli_bianchi:': u'\U0001F469\U0001F3FD\U0000200D\U0001F9B3',
u':donna_carnagione_abbastanza_scura_e_capelli_bianchi:': u'\U0001F469\U0001F3FE\U0000200D\U0001F9B3',
u':donna_carnagione_scura_e_capelli_bianchi:': u'\U0001F469\U0001F3FF\U0000200D\U0001F9B3',
u':persona_capelli_bianchi:': u'\U0001F9D1\U0000200D\U0001F9B3',
u':persona_carnagione_chiara_e_capelli_bianchi:': u'\U0001F9D1\U0001F3FB\U0000200D\U0001F9B3',
u':persona_carnagione_abbastanza_chiara_e_capelli_bianchi:': u'\U0001F9D1\U0001F3FC\U0000200D\U0001F9B3',
u':persona_carnagione_olivastra_e_capelli_bianchi:': u'\U0001F9D1\U0001F3FD\U0000200D\U0001F9B3',
u':persona_carnagione_abbastanza_scura_e_capelli_bianchi:': u'\U0001F9D1\U0001F3FE\U0000200D\U0001F9B3',
u':persona_carnagione_scura_e_capelli_bianchi:': u'\U0001F9D1\U0001F3FF\U0000200D\U0001F9B3',
u':donna_calvo:': u'\U0001F469\U0000200D\U0001F9B2',
u':donna_carnagione_chiara_e_calvo:': u'\U0001F469\U0001F3FB\U0000200D\U0001F9B2',
u':donna_carnagione_abbastanza_chiara_e_calvo:': u'\U0001F469\U0001F3FC\U0000200D\U0001F9B2',
u':donna_carnagione_olivastra_e_calvo:': u'\U0001F469\U0001F3FD\U0000200D\U0001F9B2',
u':donna_carnagione_abbastanza_scura_e_calvo:': u'\U0001F469\U0001F3FE\U0000200D\U0001F9B2',
u':donna_carnagione_scura_e_calvo:': u'\U0001F469\U0001F3FF\U0000200D\U0001F9B2',
u':persona_calvo:': u'\U0001F9D1\U0000200D\U0001F9B2',
u':persona_carnagione_chiara_e_calvo:': u'\U0001F9D1\U0001F3FB\U0000200D\U0001F9B2',
u':persona_carnagione_abbastanza_chiara_e_calvo:': u'\U0001F9D1\U0001F3FC\U0000200D\U0001F9B2',
u':persona_carnagione_olivastra_e_calvo:': u'\U0001F9D1\U0001F3FD\U0000200D\U0001F9B2',
u':persona_carnagione_abbastanza_scura_e_calvo:': u'\U0001F9D1\U0001F3FE\U0000200D\U0001F9B2',
u':persona_carnagione_scura_e_calvo:': u'\U0001F9D1\U0001F3FF\U0000200D\U0001F9B2',
u':donna_bionda:': u'\U0001F471\U0000200D\U00002640\U0000FE0F',
u':donna_bionda_carnagione_chiara:': u'\U0001F471\U0001F3FB\U0000200D\U00002640\U0000FE0F',
u':donna_bionda_carnagione_abbastanza_chiara:': u'\U0001F471\U0001F3FC\U0000200D\U00002640\U0000FE0F',
u':donna_bionda_carnagione_olivastra:': u'\U0001F471\U0001F3FD\U0000200D\U00002640\U0000FE0F',
u':donna_bionda_carnagione_abbastanza_scura:': u'\U0001F471\U0001F3FE\U0000200D\U00002640\U0000FE0F',
u':donna_bionda_carnagione_scura:': u'\U0001F471\U0001F3FF\U0000200D\U00002640\U0000FE0F',
u':uomo_biondo:': u'\U0001F471\U0000200D\U00002642\U0000FE0F',
u':uomo_biondo_carnagione_chiara:': u'\U0001F471\U0001F3FB\U0000200D\U00002642\U0000FE0F',
u':uomo_biondo_carnagione_abbastanza_chiara:': u'\U0001F471\U0001F3FC\U0000200D\U00002642\U0000FE0F',
u':uomo_biondo_carnagione_olivastra:': u'\U0001F471\U0001F3FD\U0000200D\U00002642\U0000FE0F',
u':uomo_biondo_carnagione_abbastanza_scura:': u'\U0001F471\U0001F3FE\U0000200D\U00002642\U0000FE0F',
u':uomo_biondo_carnagione_scura:': u'\U0001F471\U0001F3FF\U0000200D\U00002642\U0000FE0F',
u':persona_anziana:': u'\U0001F9D3',
u':persona_anziana_carnagione_chiara:': u'\U0001F9D3\U0001F3FB',
u':persona_anziana_carnagione_abbastanza_chiara:': u'\U0001F9D3\U0001F3FC',
u':persona_anziana_carnagione_olivastra:': u'\U0001F9D3\U0001F3FD',
u':persona_anziana_carnagione_abbastanza_scura:': u'\U0001F9D3\U0001F3FE',
u':persona_anziana_carnagione_scura:': u'\U0001F9D3\U0001F3FF',
u':uomo_anziano:': u'\U0001F474',
u':uomo_anziano_carnagione_chiara:': u'\U0001F474\U0001F3FB',
u':uomo_anziano_carnagione_abbastanza_chiara:': u'\U0001F474\U0001F3FC',
u':uomo_anziano_carnagione_olivastra:': u'\U0001F474\U0001F3FD',
u':uomo_anziano_carnagione_abbastanza_scura:': u'\U0001F474\U0001F3FE',
u':uomo_anziano_carnagione_scura:': u'\U0001F474\U0001F3FF',
u':donna_anziana:': u'\U0001F475',
u':donna_anziana_carnagione_chiara:': u'\U0001F475\U0001F3FB',
u':donna_anziana_carnagione_abbastanza_chiara:': u'\U0001F475\U0001F3FC',
u':donna_anziana_carnagione_olivastra:': u'\U0001F475\U0001F3FD',
u':donna_anziana_carnagione_abbastanza_scura:': u'\U0001F475\U0001F3FE',
u':donna_anziana_carnagione_scura:': u'\U0001F475\U0001F3FF',
u':persona_corrucciata:': u'\U0001F64D',
u':persona_corrucciata_carnagione_chiara:': u'\U0001F64D\U0001F3FB',
u':persona_corrucciata_carnagione_abbastanza_chiara:': u'\U0001F64D\U0001F3FC',
u':persona_corrucciata_carnagione_olivastra:': u'\U0001F64D\U0001F3FD',
u':persona_corrucciata_carnagione_abbastanza_scura:': u'\U0001F64D\U0001F3FE',
u':persona_corrucciata_carnagione_scura:': u'\U0001F64D\U0001F3FF',
u':uomo_corrucciato:': u'\U0001F64D\U0000200D\U00002642\U0000FE0F',
u':uomo_corrucciato_carnagione_chiara:': u'\U0001F64D\U0001F3FB\U0000200D\U00002642\U0000FE0F',
u':uomo_corrucciato_carnagione_abbastanza_chiara:': u'\U0001F64D\U0001F3FC\U0000200D\U00002642\U0000FE0F',
u':uomo_corrucciato_carnagione_olivastra:': u'\U0001F64D\U0001F3FD\U0000200D\U00002642\U0000FE0F',
u':uomo_corrucciato_carnagione_abbastanza_scura:': u'\U0001F64D\U0001F3FE\U0000200D\U00002642\U0000FE0F',
u':uomo_corrucciato_carnagione_scura:': u'\U0001F64D\U0001F3FF\U0000200D\U00002642\U0000FE0F',
u':donna_corrucciata:': u'\U0001F64D\U0000200D\U00002640\U0000FE0F',
u':donna_corrucciata_carnagione_chiara:': u'\U0001F64D\U0001F3FB\U0000200D\U00002640\U0000FE0F',
u':donna_corrucciata_carnagione_abbastanza_chiara:': u'\U0001F64D\U0001F3FC\U0000200D\U00002640\U0000FE0F',
u':donna_corrucciata_carnagione_olivastra:': u'\U0001F64D\U0001F3FD\U0000200D\U00002640\U0000FE0F',
u':donna_corrucciata_carnagione_abbastanza_scura:': u'\U0001F64D\U0001F3FE\U0000200D\U00002640\U0000FE0F',
u':donna_corrucciata_carnagione_scura:': u'\U0001F64D\U0001F3FF\U0000200D\U00002640\U0000FE0F',
u':persona_imbronciata:': u'\U0001F64E',
u':persona_imbronciata_carnagione_chiara:': u'\U0001F64E\U0001F3FB',
u':persona_imbronciata_carnagione_abbastanza_chiara:': u'\U0001F64E\U0001F3FC',
u':persona_imbronciata_carnagione_olivastra:': u'\U0001F64E\U0001F3FD',
u':persona_imbronciata_carnagione_abbastanza_scura:': u'\U0001F64E\U0001F3FE',
u':persona_imbronciata_carnagione_scura:': u'\U0001F64E\U0001F3FF',
u':uomo_imbronciato:': u'\U0001F64E\U0000200D\U00002642\U0000FE0F',
u':uomo_imbronciato_carnagione_chiara:': u'\U0001F64E\U0001F3FB\U0000200D\U00002642\U0000FE0F',
u':uomo_imbronciato_carnagione_abbastanza_chiara:': u'\U0001F64E\U0001F3FC\U0000200D\U00002642\U0000FE0F',
u':uomo_imbronciato_carnagione_olivastra:': u'\U0001F64E\U0001F3FD\U0000200D\U00002642\U0000FE0F',
u':uomo_imbronciato_carnagione_abbastanza_scura:': u'\U0001F64E\U0001F3FE\U0000200D\U00002642\U0000FE0F',
u':uomo_imbronciato_carnagione_scura:': u'\U0001F64E\U0001F3FF\U0000200D\U00002642\U0000FE0F',
u':donna_imbronciata:': u'\U0001F64E\U0000200D\U00002640\U0000FE0F',
u':donna_imbronciata_carnagione_chiara:': u'\U0001F64E\U0001F3FB\U0000200D\U00002640\U0000FE0F',
u':donna_imbronciata_carnagione_abbastanza_chiara:': u'\U0001F64E\U0001F3FC\U0000200D\U00002640\U0000FE0F',
u':donna_imbronciata_carnagione_olivastra:': u'\U0001F64E\U0001F3FD\U0000200D\U00002640\U0000FE0F',
u':donna_imbronciata_carnagione_abbastanza_scura:': u'\U0001F64E\U0001F3FE\U0000200D\U00002640\U0000FE0F',
u':donna_imbronciata_carnagione_scura:': u'\U0001F64E\U0001F3FF\U0000200D\U00002640\U0000FE0F',
u':persona_che_fa_segno_di_no:': u'\U0001F645',
u':persona_che_fa_segno_di_no_carnagione_chiara:': u'\U0001F645\U0001F3FB',
u':persona_che_fa_segno_di_no_carnagione_abbastanza_chiara:': u'\U0001F645\U0001F3FC',
u':persona_che_fa_segno_di_no_carnagione_olivastra:': u'\U0001F645\U0001F3FD',
u':persona_che_fa_segno_di_no_carnagione_abbastanza_scura:': u'\U0001F645\U0001F3FE',
u':persona_che_fa_segno_di_no_carnagione_scura:': u'\U0001F645\U0001F3FF',
u':uomo_con_gesto_di_rifiuto:': u'\U0001F645\U0000200D\U00002642\U0000FE0F',
u':uomo_con_gesto_di_rifiuto_carnagione_chiara:': u'\U0001F645\U0001F3FB\U0000200D\U00002642\U0000FE0F',
u':uomo_con_gesto_di_rifiuto_carnagione_abbastanza_chiara:': u'\U0001F645\U0001F3FC\U0000200D\U00002642\U0000FE0F',
u':uomo_con_gesto_di_rifiuto_carnagione_olivastra:': u'\U0001F645\U0001F3FD\U0000200D\U00002642\U0000FE0F',
u':uomo_con_gesto_di_rifiuto_carnagione_abbastanza_scura:': u'\U0001F645\U0001F3FE\U0000200D\U00002642\U0000FE0F',
u':uomo_con_gesto_di_rifiuto_carnagione_scura:': u'\U0001F645\U0001F3FF\U0000200D\U00002642\U0000FE0F',
u':donna_con_gesto_di_rifiuto:': u'\U0001F645\U0000200D\U00002640\U0000FE0F',
u':donna_con_gesto_di_rifiuto_carnagione_chiara:': u'\U0001F645\U0001F3FB\U0000200D\U00002640\U0000FE0F',
u':donna_con_gesto_di_rifiuto_carnagione_abbastanza_chiara:': u'\U0001F645\U0001F3FC\U0000200D\U00002640\U0000FE0F',
u':donna_con_gesto_di_rifiuto_carnagione_olivastra:': u'\U0001F645\U0001F3FD\U0000200D\U00002640\U0000FE0F',
u':donna_con_gesto_di_rifiuto_carnagione_abbastanza_scura:': u'\U0001F645\U0001F3FE\U0000200D\U00002640\U0000FE0F',
u':donna_con_gesto_di_rifiuto_carnagione_scura:': u'\U0001F645\U0001F3FF\U0000200D\U00002640\U0000FE0F',
u':persona_con_gesto_ok:': u'\U0001F646',
u':persona_con_gesto_ok_carnagione_chiara:': u'\U0001F646\U0001F3FB',
u':persona_con_gesto_ok_carnagione_abbastanza_chiara:': u'\U0001F646\U0001F3FC',
u':persona_con_gesto_ok_carnagione_olivastra:': u'\U0001F646\U0001F3FD',
u':persona_con_gesto_ok_carnagione_abbastanza_scura:': u'\U0001F646\U0001F3FE',
u':persona_con_gesto_ok_carnagione_scura:': u'\U0001F646\U0001F3FF',
u':uomo_con_gesto_ok:': u'\U0001F646\U0000200D\U00002642\U0000FE0F',
u':uomo_con_gesto_ok_carnagione_chiara:': u'\U0001F646\U0001F3FB\U0000200D\U00002642\U0000FE0F',
u':uomo_con_gesto_ok_carnagione_abbastanza_chiara:': u'\U0001F646\U0001F3FC\U0000200D\U00002642\U0000FE0F',
u':uomo_con_gesto_ok_carnagione_olivastra:': u'\U0001F646\U0001F3FD\U0000200D\U00002642\U0000FE0F',
u':uomo_con_gesto_ok_carnagione_abbastanza_scura:': u'\U0001F646\U0001F3FE\U0000200D\U00002642\U0000FE0F',
u':uomo_con_gesto_ok_carnagione_scura:': u'\U0001F646\U0001F3FF\U0000200D\U00002642\U0000FE0F',
u':donna_con_gesto_ok:': u'\U0001F646\U0000200D\U00002640\U0000FE0F',
u':donna_con_gesto_ok_carnagione_chiara:': u'\U0001F646\U0001F3FB\U0000200D\U00002640\U0000FE0F',
u':donna_con_gesto_ok_carnagione_abbastanza_chiara:': u'\U0001F646\U0001F3FC\U0000200D\U00002640\U0000FE0F',
u':donna_con_gesto_ok_carnagione_olivastra:': u'\U0001F646\U0001F3FD\U0000200D\U00002640\U0000FE0F',
u':donna_con_gesto_ok_carnagione_abbastanza_scura:': u'\U0001F646\U0001F3FE\U0000200D\U00002640\U0000FE0F',
u':donna_con_gesto_ok_carnagione_scura:': u'\U0001F646\U0001F3FF\U0000200D\U00002640\U0000FE0F',
u':persona_al_punto_informazioni:': u'\U0001F481',
u':persona_al_punto_informazioni_carnagione_chiara:': u'\U0001F481\U0001F3FB',
u':persona_al_punto_informazioni_carnagione_abbastanza_chiara:': u'\U0001F481\U0001F3FC',
u':persona_al_punto_informazioni_carnagione_olivastra:': u'\U0001F481\U0001F3FD',
u':persona_al_punto_informazioni_carnagione_abbastanza_scura:': u'\U0001F481\U0001F3FE',
u':persona_al_punto_informazioni_carnagione_scura:': u'\U0001F481\U0001F3FF',
u':uomo_con_suggerimento:': u'\U0001F481\U0000200D\U00002642\U0000FE0F',
u':uomo_con_suggerimento_carnagione_chiara:': u'\U0001F481\U0001F3FB\U0000200D\U00002642\U0000FE0F',
u':uomo_con_suggerimento_carnagione_abbastanza_chiara:': u'\U0001F481\U0001F3FC\U0000200D\U00002642\U0000FE0F',
u':uomo_con_suggerimento_carnagione_olivastra:': u'\U0001F481\U0001F3FD\U0000200D\U00002642\U0000FE0F',
u':uomo_con_suggerimento_carnagione_abbastanza_scura:': u'\U0001F481\U0001F3FE\U0000200D\U00002642\U0000FE0F',
u':uomo_con_suggerimento_carnagione_scura:': u'\U0001F481\U0001F3FF\U0000200D\U00002642\U0000FE0F',
u':donna_con_suggerimento:': u'\U0001F481\U0000200D\U00002640\U0000FE0F',
u':donna_con_suggerimento_carnagione_chiara:': u'\U0001F481\U0001F3FB\U0000200D\U00002640\U0000FE0F',
u':donna_con_suggerimento_carnagione_abbastanza_chiara:': u'\U0001F481\U0001F3FC\U0000200D\U00002640\U0000FE0F',
u':donna_con_suggerimento_carnagione_olivastra:': u'\U0001F481\U0001F3FD\U0000200D\U00002640\U0000FE0F',
u':donna_con_suggerimento_carnagione_abbastanza_scura:': u'\U0001F481\U0001F3FE\U0000200D\U00002640\U0000FE0F',
u':donna_con_suggerimento_carnagione_scura:': u'\U0001F481\U0001F3FF\U0000200D\U00002640\U0000FE0F',
u':persona_con_mano_alzata:': u'\U0001F64B',
u':persona_con_mano_alzata_carnagione_chiara:': u'\U0001F64B\U0001F3FB',
u':persona_con_mano_alzata_carnagione_abbastanza_chiara:': u'\U0001F64B\U0001F3FC',
u':persona_con_mano_alzata_carnagione_olivastra:': u'\U0001F64B\U0001F3FD',
u':persona_con_mano_alzata_carnagione_abbastanza_scura:': u'\U0001F64B\U0001F3FE',
u':persona_con_mano_alzata_carnagione_scura:': u'\U0001F64B\U0001F3FF',
u':uomo_con_mano_alzata:': u'\U0001F64B\U0000200D\U00002642\U0000FE0F',
u':uomo_con_mano_alzata_carnagione_chiara:': u'\U0001F64B\U0001F3FB\U0000200D\U00002642\U0000FE0F',
u':uomo_con_mano_alzata_carnagione_abbastanza_chiara:': u'\U0001F64B\U0001F3FC\U0000200D\U00002642\U0000FE0F',
u':uomo_con_mano_alzata_carnagione_olivastra:': u'\U0001F64B\U0001F3FD\U0000200D\U00002642\U0000FE0F',
u':uomo_con_mano_alzata_carnagione_abbastanza_scura:': u'\U0001F64B\U0001F3FE\U0000200D\U00002642\U0000FE0F',
u':uomo_con_mano_alzata_carnagione_scura:': u'\U0001F64B\U0001F3FF\U0000200D\U00002642\U0000FE0F',
u':donna_con_mano_alzata:': u'\U0001F64B\U0000200D\U00002640\U0000FE0F',
u':donna_con_mano_alzata_carnagione_chiara:': u'\U0001F64B\U0001F3FB\U0000200D\U00002640\U0000FE0F',
u':donna_con_mano_alzata_carnagione_abbastanza_chiara:': u'\U0001F64B\U0001F3FC\U0000200D\U00002640\U0000FE0F',
u':donna_con_mano_alzata_carnagione_olivastra:': u'\U0001F64B\U0001F3FD\U0000200D\U00002640\U0000FE0F',
u':donna_con_mano_alzata_carnagione_abbastanza_scura:': u'\U0001F64B\U0001F3FE\U0000200D\U00002640\U0000FE0F',
u':donna_con_mano_alzata_carnagione_scura:': u'\U0001F64B\U0001F3FF\U0000200D\U00002640\U0000FE0F',
u':persona_con_problemi_di_udito:': u'\U0001F9CF',
u':persona_con_problemi_di_udito_carnagione_chiara:': u'\U0001F9CF\U0001F3FB',
u':persona_con_problemi_di_udito_carnagione_abbastanza_chiara:': u'\U0001F9CF\U0001F3FC',
u':persona_con_problemi_di_udito_carnagione_olivastra:': u'\U0001F9CF\U0001F3FD',
u':persona_con_problemi_di_udito_carnagione_abbastanza_scura:': u'\U0001F9CF\U0001F3FE',
u':persona_con_problemi_di_udito_carnagione_scura:': u'\U0001F9CF\U0001F3FF',
u':uomo_con_problemi_di_udito:': u'\U0001F9CF\U0000200D\U00002642\U0000FE0F',
u':uomo_con_problemi_di_udito_carnagione_chiara:': u'\U0001F9CF\U0001F3FB\U0000200D\U00002642\U0000FE0F',
u':uomo_con_problemi_di_udito_carnagione_abbastanza_chiara:': u'\U0001F9CF\U0001F3FC\U0000200D\U00002642\U0000FE0F',
u':uomo_con_problemi_di_udito_carnagione_olivastra:': u'\U0001F9CF\U0001F3FD\U0000200D\U00002642\U0000FE0F',
u':uomo_con_problemi_di_udito_carnagione_abbastanza_scura:': u'\U0001F9CF\U0001F3FE\U0000200D\U00002642\U0000FE0F',
u':uomo_con_problemi_di_udito_carnagione_scura:': u'\U0001F9CF\U0001F3FF\U0000200D\U00002642\U0000FE0F',
u':donna_con_problemi_di_udito:': u'\U0001F9CF\U0000200D\U00002640\U0000FE0F',
u':donna_con_problemi_di_udito_carnagione_chiara:': u'\U0001F9CF\U0001F3FB\U0000200D\U00002640\U0000FE0F',
u':donna_con_problemi_di_udito_carnagione_abbastanza_chiara:': u'\U0001F9CF\U0001F3FC\U0000200D\U00002640\U0000FE0F',
u':donna_con_problemi_di_udito_carnagione_olivastra:': u'\U0001F9CF\U0001F3FD\U0000200D\U00002640\U0000FE0F',
u':donna_con_problemi_di_udito_carnagione_abbastanza_scura:': u'\U0001F9CF\U0001F3FE\U0000200D\U00002640\U0000FE0F',
u':donna_con_problemi_di_udito_carnagione_scura:': u'\U0001F9CF\U0001F3FF\U0000200D\U00002640\U0000FE0F',
u':persona_che_fa_un_inchino_profondo:': u'\U0001F647',
u':persona_che_fa_un_inchino_profondo_carnagione_chiara:': u'\U0001F647\U0001F3FB',
u':persona_che_fa_un_inchino_profondo_carnagione_abbastanza_chiara:': u'\U0001F647\U0001F3FC',
u':persona_che_fa_un_inchino_profondo_carnagione_olivastra:': u'\U0001F647\U0001F3FD',
u':persona_che_fa_un_inchino_profondo_carnagione_abbastanza_scura:': u'\U0001F647\U0001F3FE',
u':persona_che_fa_un_inchino_profondo_carnagione_scura:': u'\U0001F647\U0001F3FF',
u':uomo_che_fa_inchino_profondo:': u'\U0001F647\U0000200D\U00002642\U0000FE0F',
u':uomo_che_fa_inchino_profondo_carnagione_chiara:': u'\U0001F647\U0001F3FB\U0000200D\U00002642\U0000FE0F',
u':uomo_che_fa_inchino_profondo_carnagione_abbastanza_chiara:': u'\U0001F647\U0001F3FC\U0000200D\U00002642\U0000FE0F',
u':uomo_che_fa_inchino_profondo_carnagione_olivastra:': u'\U0001F647\U0001F3FD\U0000200D\U00002642\U0000FE0F',
u':uomo_che_fa_inchino_profondo_carnagione_abbastanza_scura:': u'\U0001F647\U0001F3FE\U0000200D\U00002642\U0000FE0F',
u':uomo_che_fa_inchino_profondo_carnagione_scura:': u'\U0001F647\U0001F3FF\U0000200D\U00002642\U0000FE0F',
u':donna_che_fa_inchino_profondo:': u'\U0001F647\U0000200D\U00002640\U0000FE0F',
u':donna_che_fa_inchino_profondo_carnagione_chiara:': u'\U0001F647\U0001F3FB\U0000200D\U00002640\U0000FE0F',
u':donna_che_fa_inchino_profondo_carnagione_abbastanza_chiara:': u'\U0001F647\U0001F3FC\U0000200D\U00002640\U0000FE0F',
u':donna_che_fa_inchino_profondo_carnagione_olivastra:': u'\U0001F647\U0001F3FD\U0000200D\U00002640\U0000FE0F',
u':donna_che_fa_inchino_profondo_carnagione_abbastanza_scura:': u'\U0001F647\U0001F3FE\U0000200D\U00002640\U0000FE0F',
u':donna_che_fa_inchino_profondo_carnagione_scura:': u'\U0001F647\U0001F3FF\U0000200D\U00002640\U0000FE0F',
u':persona_esasperata:': u'\U0001F926',
u':persona_esasperata_carnagione_chiara:': u'\U0001F926\U0001F3FB',
u':persona_esasperata_carnagione_abbastanza_chiara:': u'\U0001F926\U0001F3FC',
u':persona_esasperata_carnagione_olivastra:': u'\U0001F926\U0001F3FD',
u':persona_esasperata_carnagione_abbastanza_scura:': u'\U0001F926\U0001F3FE',
u':persona_esasperata_carnagione_scura:': u'\U0001F926\U0001F3FF',
u':uomo_esasperato:': u'\U0001F926\U0000200D\U00002642\U0000FE0F',
u':uomo_esasperato_carnagione_chiara:': u'\U0001F926\U0001F3FB\U0000200D\U00002642\U0000FE0F',
u':uomo_esasperato_carnagione_abbastanza_chiara:': u'\U0001F926\U0001F3FC\U0000200D\U00002642\U0000FE0F',
u':uomo_esasperato_carnagione_olivastra:': u'\U0001F926\U0001F3FD\U0000200D\U00002642\U0000FE0F',
u':uomo_esasperato_carnagione_abbastanza_scura:': u'\U0001F926\U0001F3FE\U0000200D\U00002642\U0000FE0F',
u':uomo_esasperato_carnagione_scura:': u'\U0001F926\U0001F3FF\U0000200D\U00002642\U0000FE0F',
u':donna_esasperata:': u'\U0001F926\U0000200D\U00002640\U0000FE0F',
u':donna_esasperata_carnagione_chiara:': u'\U0001F926\U0001F3FB\U0000200D\U00002640\U0000FE0F',
u':donna_esasperata_carnagione_abbastanza_chiara:': u'\U0001F926\U0001F3FC\U0000200D\U00002640\U0000FE0F',
u':donna_esasperata_carnagione_olivastra:': u'\U0001F926\U0001F3FD\U0000200D\U00002640\U0000FE0F',
u':donna_esasperata_carnagione_abbastanza_scura:': u'\U0001F926\U0001F3FE\U0000200D\U00002640\U0000FE0F',
u':donna_esasperata_carnagione_scura:': u'\U0001F926\U0001F3FF\U0000200D\U00002640\U0000FE0F',
u':persona_che_scrolla_le_spalle:': u'\U0001F937',
u':persona_che_scrolla_le_spalle_carnagione_chiara:': u'\U0001F937\U0001F3FB',
u':persona_che_scrolla_le_spalle_carnagione_abbastanza_chiara:': u'\U0001F937\U0001F3FC',
u':persona_che_scrolla_le_spalle_carnagione_olivastra:': u'\U0001F937\U0001F3FD',
u':persona_che_scrolla_le_spalle_carnagione_abbastanza_scura:': u'\U0001F937\U0001F3FE',
u':persona_che_scrolla_le_spalle_carnagione_scura:': u'\U0001F937\U0001F3FF',
u':uomo_che_scrolla_le_spalle:': u'\U0001F937\U0000200D\U00002642\U0000FE0F',
u':uomo_che_scrolla_le_spalle_carnagione_chiara:': u'\U0001F937\U0001F3FB\U0000200D\U00002642\U0000FE0F',
u':uomo_che_scrolla_le_spalle_carnagione_abbastanza_chiara:': u'\U0001F937\U0001F3FC\U0000200D\U00002642\U0000FE0F',
u':uomo_che_scrolla_le_spalle_carnagione_olivastra:': u'\U0001F937\U0001F3FD\U0000200D\U00002642\U0000FE0F',
u':uomo_che_scrolla_le_spalle_carnagione_abbastanza_scura:': u'\U0001F937\U0001F3FE\U0000200D\U00002642\U0000FE0F',
u':uomo_che_scrolla_le_spalle_carnagione_scura:': u'\U0001F937\U0001F3FF\U0000200D\U00002642\U0000FE0F',
u':donna_che_scrolla_le_spalle:': u'\U0001F937\U0000200D\U00002640\U0000FE0F',
u':donna_che_scrolla_le_spalle_carnagione_chiara:': u'\U0001F937\U0001F3FB\U0000200D\U00002640\U0000FE0F',
u':donna_che_scrolla_le_spalle_carnagione_abbastanza_chiara:': u'\U0001F937\U0001F3FC\U0000200D\U00002640\U0000FE0F',
u':donna_che_scrolla_le_spalle_carnagione_olivastra:': u'\U0001F937\U0001F3FD\U0000200D\U00002640\U0000FE0F',
u':donna_che_scrolla_le_spalle_carnagione_abbastanza_scura:': u'\U0001F937\U0001F3FE\U0000200D\U00002640\U0000FE0F',
u':donna_che_scrolla_le_spalle_carnagione_scura:': u'\U0001F937\U0001F3FF\U0000200D\U00002640\U0000FE0F',
u':persona_che_lavora_nella_sanità:': u'\U0001F9D1\U0000200D\U00002695\U0000FE0F',
u':persona_che_lavora_nella_sanità_carnagione_chiara:': u'\U0001F9D1\U0001F3FB\U0000200D\U00002695\U0000FE0F',
u':persona_che_lavora_nella_sanità_carnagione_abbastanza_chiara:': u'\U0001F9D1\U0001F3FC\U0000200D\U00002695\U0000FE0F',
u':persona_che_lavora_nella_sanità_carnagione_olivastra:': u'\U0001F9D1\U0001F3FD\U0000200D\U00002695\U0000FE0F',
u':persona_che_lavora_nella_sanità_carnagione_abbastanza_scura:': u'\U0001F9D1\U0001F3FE\U0000200D\U00002695\U0000FE0F',
u':persona_che_lavora_nella_sanità_carnagione_scura:': u'\U0001F9D1\U0001F3FF\U0000200D\U00002695\U0000FE0F',
u':operatore_sanitario:': u'\U0001F468\U0000200D\U00002695\U0000FE0F',
u':operatore_sanitario_carnagione_chiara:': u'\U0001F468\U0001F3FB\U0000200D\U00002695\U0000FE0F',
u':operatore_sanitario_carnagione_abbastanza_chiara:': u'\U0001F468\U0001F3FC\U0000200D\U00002695\U0000FE0F',
u':operatore_sanitario_carnagione_olivastra:': u'\U0001F468\U0001F3FD\U0000200D\U00002695\U0000FE0F',
u':operatore_sanitario_carnagione_abbastanza_scura:': u'\U0001F468\U0001F3FE\U0000200D\U00002695\U0000FE0F',
u':operatore_sanitario_carnagione_scura:': u'\U0001F468\U0001F3FF\U0000200D\U00002695\U0000FE0F',
u':operatrice_sanitaria:': u'\U0001F469\U0000200D\U00002695\U0000FE0F',
u':operatrice_sanitaria_carnagione_chiara:': u'\U0001F469\U0001F3FB\U0000200D\U00002695\U0000FE0F',
u':operatrice_sanitaria_carnagione_abbastanza_chiara:': u'\U0001F469\U0001F3FC\U0000200D\U00002695\U0000FE0F',
u':operatrice_sanitaria_carnagione_olivastra:': u'\U0001F469\U0001F3FD\U0000200D\U00002695\U0000FE0F',
u':operatrice_sanitaria_carnagione_abbastanza_scura:': u'\U0001F469\U0001F3FE\U0000200D\U00002695\U0000FE0F',
u':operatrice_sanitaria_carnagione_scura:': u'\U0001F469\U0001F3FF\U0000200D\U00002695\U0000FE0F',
u':studente:': u'\U0001F9D1\U0000200D\U0001F393',
u':studente_carnagione_chiara:': u'\U0001F9D1\U0001F3FB\U0000200D\U0001F393',
u':studente_carnagione_abbastanza_chiara:': u'\U0001F9D1\U0001F3FC\U0000200D\U0001F393',
u':studente_carnagione_olivastra:': u'\U0001F9D1\U0001F3FD\U0000200D\U0001F393',
u':studente_carnagione_abbastanza_scura:': u'\U0001F9D1\U0001F3FE\U0000200D\U0001F393',
u':studente_carnagione_scura:': u'\U0001F9D1\U0001F3FF\U0000200D\U0001F393',
u':studente_maschio:': u'\U0001F468\U0000200D\U0001F393',
u':studente_maschio_carnagione_chiara:': u'\U0001F468\U0001F3FB\U0000200D\U0001F393',
u':studente_maschio_carnagione_abbastanza_chiara:': u'\U0001F468\U0001F3FC\U0000200D\U0001F393',
u':studente_maschio_carnagione_olivastra:': u'\U0001F468\U0001F3FD\U0000200D\U0001F393',
u':studente_maschio_carnagione_abbastanza_scura:': u'\U0001F468\U0001F3FE\U0000200D\U0001F393',
u':studente_maschio_carnagione_scura:': u'\U0001F468\U0001F3FF\U0000200D\U0001F393',
u':studentessa:': u'\U0001F469\U0000200D\U0001F393',
u':studentessa_carnagione_chiara:': u'\U0001F469\U0001F3FB\U0000200D\U0001F393',
u':studentessa_carnagione_abbastanza_chiara:': u'\U0001F469\U0001F3FC\U0000200D\U0001F393',
u':studentessa_carnagione_olivastra:': u'\U0001F469\U0001F3FD\U0000200D\U0001F393',
u':studentessa_carnagione_abbastanza_scura:': u'\U0001F469\U0001F3FE\U0000200D\U0001F393',
u':studentessa_carnagione_scura:': u'\U0001F469\U0001F3FF\U0000200D\U0001F393',
u':insegnante:': u'\U0001F9D1\U0000200D\U0001F3EB',
u':insegnante_carnagione_chiara:': u'\U0001F9D1\U0001F3FB\U0000200D\U0001F3EB',
u':insegnante_carnagione_abbastanza_chiara:': u'\U0001F9D1\U0001F3FC\U0000200D\U0001F3EB',
u':insegnante_carnagione_olivastra:': u'\U0001F9D1\U0001F3FD\U0000200D\U0001F3EB',
u':insegnante_carnagione_abbastanza_scura:': u'\U0001F9D1\U0001F3FE\U0000200D\U0001F3EB',
u':insegnante_carnagione_scura:': u'\U0001F9D1\U0001F3FF\U0000200D\U0001F3EB',
u':professore:': u'\U0001F468\U0000200D\U0001F3EB',
u':professore_carnagione_chiara:': u'\U0001F468\U0001F3FB\U0000200D\U0001F3EB',
u':professore_carnagione_abbastanza_chiara:': u'\U0001F468\U0001F3FC\U0000200D\U0001F3EB',
u':professore_carnagione_olivastra:': u'\U0001F468\U0001F3FD\U0000200D\U0001F3EB',
u':professore_carnagione_abbastanza_scura:': u'\U0001F468\U0001F3FE\U0000200D\U0001F3EB',
u':professore_carnagione_scura:': u'\U0001F468\U0001F3FF\U0000200D\U0001F3EB',
u':professoressa:': u'\U0001F469\U0000200D\U0001F3EB',
u':professoressa_carnagione_chiara:': u'\U0001F469\U0001F3FB\U0000200D\U0001F3EB',
u':professoressa_carnagione_abbastanza_chiara:': u'\U0001F469\U0001F3FC\U0000200D\U0001F3EB',
u':professoressa_carnagione_olivastra:': u'\U0001F469\U0001F3FD\U0000200D\U0001F3EB',
u':professoressa_carnagione_abbastanza_scura:': u'\U0001F469\U0001F3FE\U0000200D\U0001F3EB',
u':professoressa_carnagione_scura:': u'\U0001F469\U0001F3FF\U0000200D\U0001F3EB',
u':giudice:': u'\U0001F9D1\U0000200D\U00002696\U0000FE0F',
u':giudice_carnagione_chiara:': u'\U0001F9D1\U0001F3FB\U0000200D\U00002696\U0000FE0F',
u':giudice_carnagione_abbastanza_chiara:': u'\U0001F9D1\U0001F3FC\U0000200D\U00002696\U0000FE0F',
u':giudice_carnagione_olivastra:': u'\U0001F9D1\U0001F3FD\U0000200D\U00002696\U0000FE0F',
u':giudice_carnagione_abbastanza_scura:': u'\U0001F9D1\U0001F3FE\U0000200D\U00002696\U0000FE0F',
u':giudice_carnagione_scura:': u'\U0001F9D1\U0001F3FF\U0000200D\U00002696\U0000FE0F',
u':giudice_uomo:': u'\U0001F468\U0000200D\U00002696\U0000FE0F',
u':giudice_uomo_carnagione_chiara:': u'\U0001F468\U0001F3FB\U0000200D\U00002696\U0000FE0F',
u':giudice_uomo_carnagione_abbastanza_chiara:': u'\U0001F468\U0001F3FC\U0000200D\U00002696\U0000FE0F',
u':giudice_uomo_carnagione_olivastra:': u'\U0001F468\U0001F3FD\U0000200D\U00002696\U0000FE0F',
u':giudice_uomo_carnagione_abbastanza_scura:': u'\U0001F468\U0001F3FE\U0000200D\U00002696\U0000FE0F',
u':giudice_uomo_carnagione_scura:': u'\U0001F468\U0001F3FF\U0000200D\U00002696\U0000FE0F',
u':giudice_donna:': u'\U0001F469\U0000200D\U00002696\U0000FE0F',
u':giudice_donna_carnagione_chiara:': u'\U0001F469\U0001F3FB\U0000200D\U00002696\U0000FE0F',
u':giudice_donna_carnagione_abbastanza_chiara:': u'\U0001F469\U0001F3FC\U0000200D\U00002696\U0000FE0F',
u':giudice_donna_carnagione_olivastra:': u'\U0001F469\U0001F3FD\U0000200D\U00002696\U0000FE0F',
u':giudice_donna_carnagione_abbastanza_scura:': u'\U0001F469\U0001F3FE\U0000200D\U00002696\U0000FE0F',
u':giudice_donna_carnagione_scura:': u'\U0001F469\U0001F3FF\U0000200D\U00002696\U0000FE0F',
u':agricoltore:': u'\U0001F9D1\U0000200D\U0001F33E',
u':agricoltore_carnagione_chiara:': u'\U0001F9D1\U0001F3FB\U0000200D\U0001F33E',
u':agricoltore_carnagione_abbastanza_chiara:': u'\U0001F9D1\U0001F3FC\U0000200D\U0001F33E',
u':agricoltore_carnagione_olivastra:': u'\U0001F9D1\U0001F3FD\U0000200D\U0001F33E',
u':agricoltore_carnagione_abbastanza_scura:': u'\U0001F9D1\U0001F3FE\U0000200D\U0001F33E',
u':agricoltore_carnagione_scura:': u'\U0001F9D1\U0001F3FF\U0000200D\U0001F33E',
u':contadino:': u'\U0001F468\U0000200D\U0001F33E',
u':contadino_carnagione_chiara:': u'\U0001F468\U0001F3FB\U0000200D\U0001F33E',
u':contadino_carnagione_abbastanza_chiara:': u'\U0001F468\U0001F3FC\U0000200D\U0001F33E',
u':contadino_carnagione_olivastra:': u'\U0001F468\U0001F3FD\U0000200D\U0001F33E',
u':contadino_carnagione_abbastanza_scura:': u'\U0001F468\U0001F3FE\U0000200D\U0001F33E',
u':contadino_carnagione_scura:': u'\U0001F468\U0001F3FF\U0000200D\U0001F33E',
u':contadina:': u'\U0001F469\U0000200D\U0001F33E',
u':contadina_carnagione_chiara:': u'\U0001F469\U0001F3FB\U0000200D\U0001F33E',
u':contadina_carnagione_abbastanza_chiara:': u'\U0001F469\U0001F3FC\U0000200D\U0001F33E',
u':contadina_carnagione_olivastra:': u'\U0001F469\U0001F3FD\U0000200D\U0001F33E',
u':contadina_carnagione_abbastanza_scura:': u'\U0001F469\U0001F3FE\U0000200D\U0001F33E',
u':contadina_carnagione_scura:': u'\U0001F469\U0001F3FF\U0000200D\U0001F33E',
u':persona_che_cucina:': u'\U0001F9D1\U0000200D\U0001F373',
u':persona_che_cucina_carnagione_chiara:': u'\U0001F9D1\U0001F3FB\U0000200D\U0001F373',
u':persona_che_cucina_carnagione_abbastanza_chiara:': u'\U0001F9D1\U0001F3FC\U0000200D\U0001F373',
u':persona_che_cucina_carnagione_olivastra:': u'\U0001F9D1\U0001F3FD\U0000200D\U0001F373',
u':persona_che_cucina_carnagione_abbastanza_scura:': u'\U0001F9D1\U0001F3FE\U0000200D\U0001F373',
u':persona_che_cucina_carnagione_scura:': u'\U0001F9D1\U0001F3FF\U0000200D\U0001F373',
u':cuoco:': u'\U0001F468\U0000200D\U0001F373',
u':cuoco_carnagione_chiara:': u'\U0001F468\U0001F3FB\U0000200D\U0001F373',
u':cuoco_carnagione_abbastanza_chiara:': u'\U0001F468\U0001F3FC\U0000200D\U0001F373',
u':cuoco_carnagione_olivastra:': u'\U0001F468\U0001F3FD\U0000200D\U0001F373',
u':cuoco_carnagione_abbastanza_scura:': u'\U0001F468\U0001F3FE\U0000200D\U0001F373',
u':cuoco_carnagione_scura:': u'\U0001F468\U0001F3FF\U0000200D\U0001F373',
u':cuoca:': u'\U0001F469\U0000200D\U0001F373',
u':cuoca_carnagione_chiara:': u'\U0001F469\U0001F3FB\U0000200D\U0001F373',
u':cuoca_carnagione_abbastanza_chiara:': u'\U0001F469\U0001F3FC\U0000200D\U0001F373',
u':cuoca_carnagione_olivastra:': u'\U0001F469\U0001F3FD\U0000200D\U0001F373',
u':cuoca_carnagione_abbastanza_scura:': u'\U0001F469\U0001F3FE\U0000200D\U0001F373',
u':cuoca_carnagione_scura:': u'\U0001F469\U0001F3FF\U0000200D\U0001F373',
u':meccanico:': u'\U0001F9D1\U0000200D\U0001F527',
u':meccanico_carnagione_chiara:': u'\U0001F9D1\U0001F3FB\U0000200D\U0001F527',
u':meccanico_carnagione_abbastanza_chiara:': u'\U0001F9D1\U0001F3FC\U0000200D\U0001F527',
u':meccanico_carnagione_olivastra:': u'\U0001F9D1\U0001F3FD\U0000200D\U0001F527',
u':meccanico_carnagione_abbastanza_scura:': u'\U0001F9D1\U0001F3FE\U0000200D\U0001F527',
u':meccanico_carnagione_scura:': u'\U0001F9D1\U0001F3FF\U0000200D\U0001F527',
u':meccanico_uomo:': u'\U0001F468\U0000200D\U0001F527',
u':meccanico_uomo_carnagione_chiara:': u'\U0001F468\U0001F3FB\U0000200D\U0001F527',
u':meccanico_uomo_carnagione_abbastanza_chiara:': u'\U0001F468\U0001F3FC\U0000200D\U0001F527',
u':meccanico_uomo_carnagione_olivastra:': u'\U0001F468\U0001F3FD\U0000200D\U0001F527',
u':meccanico_uomo_carnagione_abbastanza_scura:': u'\U0001F468\U0001F3FE\U0000200D\U0001F527',
u':meccanico_uomo_carnagione_scura:': u'\U0001F468\U0001F3FF\U0000200D\U0001F527',
u':meccanico_donna:': u'\U0001F469\U0000200D\U0001F527',
u':meccanico_donna_carnagione_chiara:': u'\U0001F469\U0001F3FB\U0000200D\U0001F527',
u':meccanico_donna_carnagione_abbastanza_chiara:': u'\U0001F469\U0001F3FC\U0000200D\U0001F527',
u':meccanico_donna_carnagione_olivastra:': u'\U0001F469\U0001F3FD\U0000200D\U0001F527',
u':meccanico_donna_carnagione_abbastanza_scura:': u'\U0001F469\U0001F3FE\U0000200D\U0001F527',
u':meccanico_donna_carnagione_scura:': u'\U0001F469\U0001F3FF\U0000200D\U0001F527',
u':persona_che_lavora_in_fabbrica:': u'\U0001F9D1\U0000200D\U0001F3ED',
u':persona_che_lavora_in_fabbrica_carnagione_chiara:': u'\U0001F9D1\U0001F3FB\U0000200D\U0001F3ED',
u':persona_che_lavora_in_fabbrica_carnagione_abbastanza_chiara:': u'\U0001F9D1\U0001F3FC\U0000200D\U0001F3ED',
u':persona_che_lavora_in_fabbrica_carnagione_olivastra:': u'\U0001F9D1\U0001F3FD\U0000200D\U0001F3ED',
u':persona_che_lavora_in_fabbrica_carnagione_abbastanza_scura:': u'\U0001F9D1\U0001F3FE\U0000200D\U0001F3ED',
u':persona_che_lavora_in_fabbrica_carnagione_scura:': u'\U0001F9D1\U0001F3FF\U0000200D\U0001F3ED',
u':operaio:': u'\U0001F468\U0000200D\U0001F3ED',
u':operaio_carnagione_chiara:': u'\U0001F468\U0001F3FB\U0000200D\U0001F3ED',
u':operaio_carnagione_abbastanza_chiara:': u'\U0001F468\U0001F3FC\U0000200D\U0001F3ED',
u':operaio_carnagione_olivastra:': u'\U0001F468\U0001F3FD\U0000200D\U0001F3ED',
u':operaio_carnagione_abbastanza_scura:': u'\U0001F468\U0001F3FE\U0000200D\U0001F3ED',
u':operaio_carnagione_scura:': u'\U0001F468\U0001F3FF\U0000200D\U0001F3ED',
u':operaia:': u'\U0001F469\U0000200D\U0001F3ED',
u':operaia_carnagione_chiara:': u'\U0001F469\U0001F3FB\U0000200D\U0001F3ED',
u':operaia_carnagione_abbastanza_chiara:': u'\U0001F469\U0001F3FC\U0000200D\U0001F3ED',
u':operaia_carnagione_olivastra:': u'\U0001F469\U0001F3FD\U0000200D\U0001F3ED',
u':operaia_carnagione_abbastanza_scura:': u'\U0001F469\U0001F3FE\U0000200D\U0001F3ED',
u':operaia_carnagione_scura:': u'\U0001F469\U0001F3FF\U0000200D\U0001F3ED',
u':persona_che_fa_un_lavoro_d’ufficio:': u'\U0001F9D1\U0000200D\U0001F4BC',
u':persona_che_fa_un_lavoro_d’ufficio_carnagione_chiara:': u'\U0001F9D1\U0001F3FB\U0000200D\U0001F4BC',
u':persona_che_fa_un_lavoro_d’ufficio_carnagione_abbastanza_chiara:': u'\U0001F9D1\U0001F3FC\U0000200D\U0001F4BC',
u':persona_che_fa_un_lavoro_d’ufficio_carnagione_olivastra:': u'\U0001F9D1\U0001F3FD\U0000200D\U0001F4BC',
u':persona_che_fa_un_lavoro_d’ufficio_carnagione_abbastanza_scura:': u'\U0001F9D1\U0001F3FE\U0000200D\U0001F4BC',
u':persona_che_fa_un_lavoro_d’ufficio_carnagione_scura:': u'\U0001F9D1\U0001F3FF\U0000200D\U0001F4BC',
u':impiegato:': u'\U0001F468\U0000200D\U0001F4BC',
u':impiegato_carnagione_chiara:': u'\U0001F468\U0001F3FB\U0000200D\U0001F4BC',
u':impiegato_carnagione_abbastanza_chiara:': u'\U0001F468\U0001F3FC\U0000200D\U0001F4BC',
u':impiegato_carnagione_olivastra:': u'\U0001F468\U0001F3FD\U0000200D\U0001F4BC',
u':impiegato_carnagione_abbastanza_scura:': u'\U0001F468\U0001F3FE\U0000200D\U0001F4BC',
u':impiegato_carnagione_scura:': u'\U0001F468\U0001F3FF\U0000200D\U0001F4BC',
u':impiegata:': u'\U0001F469\U0000200D\U0001F4BC',
u':impiegata_carnagione_chiara:': u'\U0001F469\U0001F3FB\U0000200D\U0001F4BC',
u':impiegata_carnagione_abbastanza_chiara:': u'\U0001F469\U0001F3FC\U0000200D\U0001F4BC',
u':impiegata_carnagione_olivastra:': u'\U0001F469\U0001F3FD\U0000200D\U0001F4BC',
u':impiegata_carnagione_abbastanza_scura:': u'\U0001F469\U0001F3FE\U0000200D\U0001F4BC',
u':impiegata_carnagione_scura:': u'\U0001F469\U0001F3FF\U0000200D\U0001F4BC',
u':persona_che_lavora_in_campo_scientifico:': u'\U0001F9D1\U0000200D\U0001F52C',
u':persona_che_lavora_in_campo_scientifico_carnagione_chiara:': u'\U0001F9D1\U0001F3FB\U0000200D\U0001F52C',
u':persona_che_lavora_in_campo_scientifico_carnagione_abbastanza_chiara:': u'\U0001F9D1\U0001F3FC\U0000200D\U0001F52C',
u':persona_che_lavora_in_campo_scientifico_carnagione_olivastra:': u'\U0001F9D1\U0001F3FD\U0000200D\U0001F52C',
u':persona_che_lavora_in_campo_scientifico_carnagione_abbastanza_scura:': u'\U0001F9D1\U0001F3FE\U0000200D\U0001F52C',
u':persona_che_lavora_in_campo_scientifico_carnagione_scura:': u'\U0001F9D1\U0001F3FF\U0000200D\U0001F52C',
u':scienziato:': u'\U0001F468\U0000200D\U0001F52C',
u':scienziato_carnagione_chiara:': u'\U0001F468\U0001F3FB\U0000200D\U0001F52C',
u':scienziato_carnagione_abbastanza_chiara:': u'\U0001F468\U0001F3FC\U0000200D\U0001F52C',
u':scienziato_carnagione_olivastra:': u'\U0001F468\U0001F3FD\U0000200D\U0001F52C',
u':scienziato_carnagione_abbastanza_scura:': u'\U0001F468\U0001F3FE\U0000200D\U0001F52C',
u':scienziato_carnagione_scura:': u'\U0001F468\U0001F3FF\U0000200D\U0001F52C',
u':scienziata:': u'\U0001F469\U0000200D\U0001F52C',
u':scienziata_carnagione_chiara:': u'\U0001F469\U0001F3FB\U0000200D\U0001F52C',
u':scienziata_carnagione_abbastanza_chiara:': u'\U0001F469\U0001F3FC\U0000200D\U0001F52C',
u':scienziata_carnagione_olivastra:': u'\U0001F469\U0001F3FD\U0000200D\U0001F52C',
u':scienziata_carnagione_abbastanza_scura:': u'\U0001F469\U0001F3FE\U0000200D\U0001F52C',
u':scienziata_carnagione_scura:': u'\U0001F469\U0001F3FF\U0000200D\U0001F52C',
u':persona_esperta_di_tecnologia:': u'\U0001F9D1\U0000200D\U0001F4BB',
u':persona_esperta_di_tecnologia_carnagione_chiara:': u'\U0001F9D1\U0001F3FB\U0000200D\U0001F4BB',
u':persona_esperta_di_tecnologia_carnagione_abbastanza_chiara:': u'\U0001F9D1\U0001F3FC\U0000200D\U0001F4BB',
u':persona_esperta_di_tecnologia_carnagione_olivastra:': u'\U0001F9D1\U0001F3FD\U0000200D\U0001F4BB',
u':persona_esperta_di_tecnologia_carnagione_abbastanza_scura:': u'\U0001F9D1\U0001F3FE\U0000200D\U0001F4BB',
u':persona_esperta_di_tecnologia_carnagione_scura:': u'\U0001F9D1\U0001F3FF\U0000200D\U0001F4BB',
u':tecnologo:': u'\U0001F468\U0000200D\U0001F4BB',
u':tecnologo_carnagione_chiara:': u'\U0001F468\U0001F3FB\U0000200D\U0001F4BB',
u':tecnologo_carnagione_abbastanza_chiara:': u'\U0001F468\U0001F3FC\U0000200D\U0001F4BB',
u':tecnologo_carnagione_olivastra:': u'\U0001F468\U0001F3FD\U0000200D\U0001F4BB',
u':tecnologo_carnagione_abbastanza_scura:': u'\U0001F468\U0001F3FE\U0000200D\U0001F4BB',
u':tecnologo_carnagione_scura:': u'\U0001F468\U0001F3FF\U0000200D\U0001F4BB',
u':tecnologa:': u'\U0001F469\U0000200D\U0001F4BB',
u':tecnologa_carnagione_chiara:': u'\U0001F469\U0001F3FB\U0000200D\U0001F4BB',
u':tecnologa_carnagione_abbastanza_chiara:': u'\U0001F469\U0001F3FC\U0000200D\U0001F4BB',
u':tecnologa_carnagione_olivastra:': u'\U0001F469\U0001F3FD\U0000200D\U0001F4BB',
u':tecnologa_carnagione_abbastanza_scura:': u'\U0001F469\U0001F3FE\U0000200D\U0001F4BB',
u':tecnologa_carnagione_scura:': u'\U0001F469\U0001F3FF\U0000200D\U0001F4BB',
u':cantante:': u'\U0001F9D1\U0000200D\U0001F3A4',
u':cantante_carnagione_chiara:': u'\U0001F9D1\U0001F3FB\U0000200D\U0001F3A4',
u':cantante_carnagione_abbastanza_chiara:': u'\U0001F9D1\U0001F3FC\U0000200D\U0001F3A4',
u':cantante_carnagione_olivastra:': u'\U0001F9D1\U0001F3FD\U0000200D\U0001F3A4',
u':cantante_carnagione_abbastanza_scura:': u'\U0001F9D1\U0001F3FE\U0000200D\U0001F3A4',
u':cantante_carnagione_scura:': u'\U0001F9D1\U0001F3FF\U0000200D\U0001F3A4',
u':cantante_uomo:': u'\U0001F468\U0000200D\U0001F3A4',
u':cantante_uomo_carnagione_chiara:': u'\U0001F468\U0001F3FB\U0000200D\U0001F3A4',
u':cantante_uomo_carnagione_abbastanza_chiara:': u'\U0001F468\U0001F3FC\U0000200D\U0001F3A4',
u':cantante_uomo_carnagione_olivastra:': u'\U0001F468\U0001F3FD\U0000200D\U0001F3A4',
u':cantante_uomo_carnagione_abbastanza_scura:': u'\U0001F468\U0001F3FE\U0000200D\U0001F3A4',
u':cantante_uomo_carnagione_scura:': u'\U0001F468\U0001F3FF\U0000200D\U0001F3A4',
u':cantante_donna:': u'\U0001F469\U0000200D\U0001F3A4',
u':cantante_donna_carnagione_chiara:': u'\U0001F469\U0001F3FB\U0000200D\U0001F3A4',
u':cantante_donna_carnagione_abbastanza_chiara:': u'\U0001F469\U0001F3FC\U0000200D\U0001F3A4',
u':cantante_donna_carnagione_olivastra:': u'\U0001F469\U0001F3FD\U0000200D\U0001F3A4',
u':cantante_donna_carnagione_abbastanza_scura:': u'\U0001F469\U0001F3FE\U0000200D\U0001F3A4',
u':cantante_donna_carnagione_scura:': u'\U0001F469\U0001F3FF\U0000200D\U0001F3A4',
u':artista:': u'\U0001F9D1\U0000200D\U0001F3A8',
u':artista_carnagione_chiara:': u'\U0001F9D1\U0001F3FB\U0000200D\U0001F3A8',
u':artista_carnagione_abbastanza_chiara:': u'\U0001F9D1\U0001F3FC\U0000200D\U0001F3A8',
u':artista_carnagione_olivastra:': u'\U0001F9D1\U0001F3FD\U0000200D\U0001F3A8',
u':artista_carnagione_abbastanza_scura:': u'\U0001F9D1\U0001F3FE\U0000200D\U0001F3A8',
u':artista_carnagione_scura:': u'\U0001F9D1\U0001F3FF\U0000200D\U0001F3A8',
u':artista_uomo:': u'\U0001F468\U0000200D\U0001F3A8',
u':artista_uomo_carnagione_chiara:': u'\U0001F468\U0001F3FB\U0000200D\U0001F3A8',
u':artista_uomo_carnagione_abbastanza_chiara:': u'\U0001F468\U0001F3FC\U0000200D\U0001F3A8',
u':artista_uomo_carnagione_olivastra:': u'\U0001F468\U0001F3FD\U0000200D\U0001F3A8',
u':artista_uomo_carnagione_abbastanza_scura:': u'\U0001F468\U0001F3FE\U0000200D\U0001F3A8',
u':artista_uomo_carnagione_scura:': u'\U0001F468\U0001F3FF\U0000200D\U0001F3A8',
u':artista_donna:': u'\U0001F469\U0000200D\U0001F3A8',
u':artista_donna_carnagione_chiara:': u'\U0001F469\U0001F3FB\U0000200D\U0001F3A8',
u':artista_donna_carnagione_abbastanza_chiara:': u'\U0001F469\U0001F3FC\U0000200D\U0001F3A8',
u':artista_donna_carnagione_olivastra:': u'\U0001F469\U0001F3FD\U0000200D\U0001F3A8',
u':artista_donna_carnagione_abbastanza_scura:': u'\U0001F469\U0001F3FE\U0000200D\U0001F3A8',
u':artista_donna_carnagione_scura:': u'\U0001F469\U0001F3FF\U0000200D\U0001F3A8',
u':pilota:': u'\U0001F9D1\U0000200D\U00002708\U0000FE0F',
u':pilota_carnagione_chiara:': u'\U0001F9D1\U0001F3FB\U0000200D\U00002708\U0000FE0F',
u':pilota_carnagione_abbastanza_chiara:': u'\U0001F9D1\U0001F3FC\U0000200D\U00002708\U0000FE0F',
u':pilota_carnagione_olivastra:': u'\U0001F9D1\U0001F3FD\U0000200D\U00002708\U0000FE0F',
u':pilota_carnagione_abbastanza_scura:': u'\U0001F9D1\U0001F3FE\U0000200D\U00002708\U0000FE0F',
u':pilota_carnagione_scura:': u'\U0001F9D1\U0001F3FF\U0000200D\U00002708\U0000FE0F',
u':pilota_uomo:': u'\U0001F468\U0000200D\U00002708\U0000FE0F',
u':pilota_uomo_carnagione_chiara:': u'\U0001F468\U0001F3FB\U0000200D\U00002708\U0000FE0F',
u':pilota_uomo_carnagione_abbastanza_chiara:': u'\U0001F468\U0001F3FC\U0000200D\U00002708\U0000FE0F',
u':pilota_uomo_carnagione_olivastra:': u'\U0001F468\U0001F3FD\U0000200D\U00002708\U0000FE0F',
u':pilota_uomo_carnagione_abbastanza_scura:': u'\U0001F468\U0001F3FE\U0000200D\U00002708\U0000FE0F',
u':pilota_uomo_carnagione_scura:': u'\U0001F468\U0001F3FF\U0000200D\U00002708\U0000FE0F',
u':pilota_donna:': u'\U0001F469\U0000200D\U00002708\U0000FE0F',
u':pilota_donna_carnagione_chiara:': u'\U0001F469\U0001F3FB\U0000200D\U00002708\U0000FE0F',
u':pilota_donna_carnagione_abbastanza_chiara:': u'\U0001F469\U0001F3FC\U0000200D\U00002708\U0000FE0F',
u':pilota_donna_carnagione_olivastra:': u'\U0001F469\U0001F3FD\U0000200D\U00002708\U0000FE0F',
u':pilota_donna_carnagione_abbastanza_scura:': u'\U0001F469\U0001F3FE\U0000200D\U00002708\U0000FE0F',
u':pilota_donna_carnagione_scura:': u'\U0001F469\U0001F3FF\U0000200D\U00002708\U0000FE0F',
u':astronauta:': u'\U0001F9D1\U0000200D\U0001F680',
u':astronauta_carnagione_chiara:': u'\U0001F9D1\U0001F3FB\U0000200D\U0001F680',
u':astronauta_carnagione_abbastanza_chiara:': u'\U0001F9D1\U0001F3FC\U0000200D\U0001F680',
u':astronauta_carnagione_olivastra:': u'\U0001F9D1\U0001F3FD\U0000200D\U0001F680',
u':astronauta_carnagione_abbastanza_scura:': u'\U0001F9D1\U0001F3FE\U0000200D\U0001F680',
u':astronauta_carnagione_scura:': u'\U0001F9D1\U0001F3FF\U0000200D\U0001F680',
u':astronauta_uomo:': u'\U0001F468\U0000200D\U0001F680',
u':astronauta_uomo_carnagione_chiara:': u'\U0001F468\U0001F3FB\U0000200D\U0001F680',
u':astronauta_uomo_carnagione_abbastanza_chiara:': u'\U0001F468\U0001F3FC\U0000200D\U0001F680',
u':astronauta_uomo_carnagione_olivastra:': u'\U0001F468\U0001F3FD\U0000200D\U0001F680',
u':astronauta_uomo_carnagione_abbastanza_scura:': u'\U0001F468\U0001F3FE\U0000200D\U0001F680',
u':astronauta_uomo_carnagione_scura:': u'\U0001F468\U0001F3FF\U0000200D\U0001F680',
u':astronauta_donna:': u'\U0001F469\U0000200D\U0001F680',
u':astronauta_donna_carnagione_chiara:': u'\U0001F469\U0001F3FB\U0000200D\U0001F680',
u':astronauta_donna_carnagione_abbastanza_chiara:': u'\U0001F469\U0001F3FC\U0000200D\U0001F680',
u':astronauta_donna_carnagione_olivastra:': u'\U0001F469\U0001F3FD\U0000200D\U0001F680',
u':astronauta_donna_carnagione_abbastanza_scura:': u'\U0001F469\U0001F3FE\U0000200D\U0001F680',
u':astronauta_donna_carnagione_scura:': u'\U0001F469\U0001F3FF\U0000200D\U0001F680',
u':pompiere:': u'\U0001F9D1\U0000200D\U0001F692',
u':pompiere_carnagione_chiara:': u'\U0001F9D1\U0001F3FB\U0000200D\U0001F692',
u':pompiere_carnagione_abbastanza_chiara:': u'\U0001F9D1\U0001F3FC\U0000200D\U0001F692',
u':pompiere_carnagione_olivastra:': u'\U0001F9D1\U0001F3FD\U0000200D\U0001F692',
u':pompiere_carnagione_abbastanza_scura:': u'\U0001F9D1\U0001F3FE\U0000200D\U0001F692',
u':pompiere_carnagione_scura:': u'\U0001F9D1\U0001F3FF\U0000200D\U0001F692',
u':pompiere_uomo:': u'\U0001F468\U0000200D\U0001F692',
u':pompiere_uomo_carnagione_chiara:': u'\U0001F468\U0001F3FB\U0000200D\U0001F692',
u':pompiere_uomo_carnagione_abbastanza_chiara:': u'\U0001F468\U0001F3FC\U0000200D\U0001F692',
u':pompiere_uomo_carnagione_olivastra:': u'\U0001F468\U0001F3FD\U0000200D\U0001F692',
u':pompiere_uomo_carnagione_abbastanza_scura:': u'\U0001F468\U0001F3FE\U0000200D\U0001F692',
u':pompiere_uomo_carnagione_scura:': u'\U0001F468\U0001F3FF\U0000200D\U0001F692',
u':pompiere_donna:': u'\U0001F469\U0000200D\U0001F692',
u':pompiere_donna_carnagione_chiara:': u'\U0001F469\U0001F3FB\U0000200D\U0001F692',
u':pompiere_donna_carnagione_abbastanza_chiara:': u'\U0001F469\U0001F3FC\U0000200D\U0001F692',
u':pompiere_donna_carnagione_olivastra:': u'\U0001F469\U0001F3FD\U0000200D\U0001F692',
u':pompiere_donna_carnagione_abbastanza_scura:': u'\U0001F469\U0001F3FE\U0000200D\U0001F692',
u':pompiere_donna_carnagione_scura:': u'\U0001F469\U0001F3FF\U0000200D\U0001F692',
u':agente_di_polizia:': u'\U0001F46E',
u':agente_di_polizia_carnagione_chiara:': u'\U0001F46E\U0001F3FB',
u':agente_di_polizia_carnagione_abbastanza_chiara:': u'\U0001F46E\U0001F3FC',
u':agente_di_polizia_carnagione_olivastra:': u'\U0001F46E\U0001F3FD',
u':agente_di_polizia_carnagione_abbastanza_scura:': u'\U0001F46E\U0001F3FE',
u':agente_di_polizia_carnagione_scura:': u'\U0001F46E\U0001F3FF',
u':poliziotto_uomo:': u'\U0001F46E\U0000200D\U00002642\U0000FE0F',
u':poliziotto_uomo_carnagione_chiara:': u'\U0001F46E\U0001F3FB\U0000200D\U00002642\U0000FE0F',
u':poliziotto_uomo_carnagione_abbastanza_chiara:': u'\U0001F46E\U0001F3FC\U0000200D\U00002642\U0000FE0F',
u':poliziotto_uomo_carnagione_olivastra:': u'\U0001F46E\U0001F3FD\U0000200D\U00002642\U0000FE0F',
u':poliziotto_uomo_carnagione_abbastanza_scura:': u'\U0001F46E\U0001F3FE\U0000200D\U00002642\U0000FE0F',
u':poliziotto_uomo_carnagione_scura:': u'\U0001F46E\U0001F3FF\U0000200D\U00002642\U0000FE0F',
u':poliziotta:': u'\U0001F46E\U0000200D\U00002640\U0000FE0F',
u':poliziotta_carnagione_chiara:': u'\U0001F46E\U0001F3FB\U0000200D\U00002640\U0000FE0F',
u':poliziotta_carnagione_abbastanza_chiara:': u'\U0001F46E\U0001F3FC\U0000200D\U00002640\U0000FE0F',
u':poliziotta_carnagione_olivastra:': u'\U0001F46E\U0001F3FD\U0000200D\U00002640\U0000FE0F',
u':poliziotta_carnagione_abbastanza_scura:': u'\U0001F46E\U0001F3FE\U0000200D\U00002640\U0000FE0F',
u':poliziotta_carnagione_scura:': u'\U0001F46E\U0001F3FF\U0000200D\U00002640\U0000FE0F',
u':detective:': u'\U0001F575\U0000FE0F',
u':detective_carnagione_chiara:': u'\U0001F575\U0001F3FB',
u':detective_carnagione_abbastanza_chiara:': u'\U0001F575\U0001F3FC',
u':detective_carnagione_olivastra:': u'\U0001F575\U0001F3FD',
u':detective_carnagione_abbastanza_scura:': u'\U0001F575\U0001F3FE',
u':detective_carnagione_scura:': u'\U0001F575\U0001F3FF',
u':investigatore:': u'\U0001F575\U0000FE0F\U0000200D\U00002642\U0000FE0F',
u':investigatore_carnagione_chiara:': u'\U0001F575\U0001F3FB\U0000200D\U00002642\U0000FE0F',
u':investigatore_carnagione_abbastanza_chiara:': u'\U0001F575\U0001F3FC\U0000200D\U00002642\U0000FE0F',
u':investigatore_carnagione_olivastra:': u'\U0001F575\U0001F3FD\U0000200D\U00002642\U0000FE0F',
u':investigatore_carnagione_abbastanza_scura:': u'\U0001F575\U0001F3FE\U0000200D\U00002642\U0000FE0F',
u':investigatore_carnagione_scura:': u'\U0001F575\U0001F3FF\U0000200D\U00002642\U0000FE0F',
u':investigatrice:': u'\U0001F575\U0000FE0F\U0000200D\U00002640\U0000FE0F',
u':investigatrice_carnagione_chiara:': u'\U0001F575\U0001F3FB\U0000200D\U00002640\U0000FE0F',
u':investigatrice_carnagione_abbastanza_chiara:': u'\U0001F575\U0001F3FC\U0000200D\U00002640\U0000FE0F',
u':investigatrice_carnagione_olivastra:': u'\U0001F575\U0001F3FD\U0000200D\U00002640\U0000FE0F',
u':investigatrice_carnagione_abbastanza_scura:': u'\U0001F575\U0001F3FE\U0000200D\U00002640\U0000FE0F',
u':investigatrice_carnagione_scura:': u'\U0001F575\U0001F3FF\U0000200D\U00002640\U0000FE0F',
u':guardia:': u'\U0001F482',
u':guardia_carnagione_chiara:': u'\U0001F482\U0001F3FB',
u':guardia_carnagione_abbastanza_chiara:': u'\U0001F482\U0001F3FC',
u':guardia_carnagione_olivastra:': u'\U0001F482\U0001F3FD',
u':guardia_carnagione_abbastanza_scura:': u'\U0001F482\U0001F3FE',
u':guardia_carnagione_scura:': u'\U0001F482\U0001F3FF',
u':guardia_uomo:': u'\U0001F482\U0000200D\U00002642\U0000FE0F',
u':guardia_uomo_carnagione_chiara:': u'\U0001F482\U0001F3FB\U0000200D\U00002642\U0000FE0F',
u':guardia_uomo_carnagione_abbastanza_chiara:': u'\U0001F482\U0001F3FC\U0000200D\U00002642\U0000FE0F',
u':guardia_uomo_carnagione_olivastra:': u'\U0001F482\U0001F3FD\U0000200D\U00002642\U0000FE0F',
u':guardia_uomo_carnagione_abbastanza_scura:': u'\U0001F482\U0001F3FE\U0000200D\U00002642\U0000FE0F',
u':guardia_uomo_carnagione_scura:': u'\U0001F482\U0001F3FF\U0000200D\U00002642\U0000FE0F',
u':guardia_donna:': u'\U0001F482\U0000200D\U00002640\U0000FE0F',
u':guardia_donna_carnagione_chiara:': u'\U0001F482\U0001F3FB\U0000200D\U00002640\U0000FE0F',
u':guardia_donna_carnagione_abbastanza_chiara:': u'\U0001F482\U0001F3FC\U0000200D\U00002640\U0000FE0F',
u':guardia_donna_carnagione_olivastra:': u'\U0001F482\U0001F3FD\U0000200D\U00002640\U0000FE0F',
u':guardia_donna_carnagione_abbastanza_scura:': u'\U0001F482\U0001F3FE\U0000200D\U00002640\U0000FE0F',
u':guardia_donna_carnagione_scura:': u'\U0001F482\U0001F3FF\U0000200D\U00002640\U0000FE0F',
u':ninja:': u'\U0001F977',
u':ninja_carnagione_chiara:': u'\U0001F977\U0001F3FB',
u':ninja_carnagione_abbastanza_chiara:': u'\U0001F977\U0001F3FC',
u':ninja_carnagione_olivastra:': u'\U0001F977\U0001F3FD',
u':ninja_carnagione_abbastanza_scura:': u'\U0001F977\U0001F3FE',
u':ninja_carnagione_scura:': u'\U0001F977\U0001F3FF',
u':operaio_edile:': u'\U0001F477',
u':operaio_edile_carnagione_chiara:': u'\U0001F477\U0001F3FB',
u':operaio_edile_carnagione_abbastanza_chiara:': u'\U0001F477\U0001F3FC',
u':operaio_edile_carnagione_olivastra:': u'\U0001F477\U0001F3FD',
u':operaio_edile_carnagione_abbastanza_scura:': u'\U0001F477\U0001F3FE',
u':operaio_edile_carnagione_scura:': u'\U0001F477\U0001F3FF',
u':operaio_edile_uomo:': u'\U0001F477\U0000200D\U00002642\U0000FE0F',
u':operaio_edile_uomo_carnagione_chiara:': u'\U0001F477\U0001F3FB\U0000200D\U00002642\U0000FE0F',
u':operaio_edile_uomo_carnagione_abbastanza_chiara:': u'\U0001F477\U0001F3FC\U0000200D\U00002642\U0000FE0F',
u':operaio_edile_uomo_carnagione_olivastra:': u'\U0001F477\U0001F3FD\U0000200D\U00002642\U0000FE0F',
u':operaio_edile_uomo_carnagione_abbastanza_scura:': u'\U0001F477\U0001F3FE\U0000200D\U00002642\U0000FE0F',
u':operaio_edile_uomo_carnagione_scura:': u'\U0001F477\U0001F3FF\U0000200D\U00002642\U0000FE0F',
u':operaia_edile:': u'\U0001F477\U0000200D\U00002640\U0000FE0F',
u':operaia_edile_carnagione_chiara:': u'\U0001F477\U0001F3FB\U0000200D\U00002640\U0000FE0F',
u':operaia_edile_carnagione_abbastanza_chiara:': u'\U0001F477\U0001F3FC\U0000200D\U00002640\U0000FE0F',
u':operaia_edile_carnagione_olivastra:': u'\U0001F477\U0001F3FD\U0000200D\U00002640\U0000FE0F',
u':operaia_edile_carnagione_abbastanza_scura:': u'\U0001F477\U0001F3FE\U0000200D\U00002640\U0000FE0F',
u':operaia_edile_carnagione_scura:': u'\U0001F477\U0001F3FF\U0000200D\U00002640\U0000FE0F',
u':principe:': u'\U0001F934',
u':principe_carnagione_chiara:': u'\U0001F934\U0001F3FB',
u':principe_carnagione_abbastanza_chiara:': u'\U0001F934\U0001F3FC',
u':principe_carnagione_olivastra:': u'\U0001F934\U0001F3FD',
u':principe_carnagione_abbastanza_scura:': u'\U0001F934\U0001F3FE',
u':principe_carnagione_scura:': u'\U0001F934\U0001F3FF',
u':principessa:': u'\U0001F478',
u':principessa_carnagione_chiara:': u'\U0001F478\U0001F3FB',
u':principessa_carnagione_abbastanza_chiara:': u'\U0001F478\U0001F3FC',
u':principessa_carnagione_olivastra:': u'\U0001F478\U0001F3FD',
u':principessa_carnagione_abbastanza_scura:': u'\U0001F478\U0001F3FE',
u':principessa_carnagione_scura:': u'\U0001F478\U0001F3FF',
u':persona_con_turbante:': u'\U0001F473',
u':persona_con_turbante_carnagione_chiara:': u'\U0001F473\U0001F3FB',
u':persona_con_turbante_carnagione_abbastanza_chiara:': u'\U0001F473\U0001F3FC',
u':persona_con_turbante_carnagione_olivastra:': u'\U0001F473\U0001F3FD',
u':persona_con_turbante_carnagione_abbastanza_scura:': u'\U0001F473\U0001F3FE',
u':persona_con_turbante_carnagione_scura:': u'\U0001F473\U0001F3FF',
u':uomo_con_turbante:': u'\U0001F473\U0000200D\U00002642\U0000FE0F',
u':uomo_con_turbante_carnagione_chiara:': u'\U0001F473\U0001F3FB\U0000200D\U00002642\U0000FE0F',
u':uomo_con_turbante_carnagione_abbastanza_chiara:': u'\U0001F473\U0001F3FC\U0000200D\U00002642\U0000FE0F',
u':uomo_con_turbante_carnagione_olivastra:': u'\U0001F473\U0001F3FD\U0000200D\U00002642\U0000FE0F',
u':uomo_con_turbante_carnagione_abbastanza_scura:': u'\U0001F473\U0001F3FE\U0000200D\U00002642\U0000FE0F',
u':uomo_con_turbante_carnagione_scura:': u'\U0001F473\U0001F3FF\U0000200D\U00002642\U0000FE0F',
u':donna_con_turbante:': u'\U0001F473\U0000200D\U00002640\U0000FE0F',
u':donna_con_turbante_carnagione_chiara:': u'\U0001F473\U0001F3FB\U0000200D\U00002640\U0000FE0F',
u':donna_con_turbante_carnagione_abbastanza_chiara:': u'\U0001F473\U0001F3FC\U0000200D\U00002640\U0000FE0F',
u':donna_con_turbante_carnagione_olivastra:': u'\U0001F473\U0001F3FD\U0000200D\U00002640\U0000FE0F',
u':donna_con_turbante_carnagione_abbastanza_scura:': u'\U0001F473\U0001F3FE\U0000200D\U00002640\U0000FE0F',
u':donna_con_turbante_carnagione_scura:': u'\U0001F473\U0001F3FF\U0000200D\U00002640\U0000FE0F',
u':uomo_con_zucchetto_cinese:': u'\U0001F472',
u':uomo_con_zucchetto_cinese_carnagione_chiara:': u'\U0001F472\U0001F3FB',
u':uomo_con_zucchetto_cinese_carnagione_abbastanza_chiara:': u'\U0001F472\U0001F3FC',
u':uomo_con_zucchetto_cinese_carnagione_olivastra:': u'\U0001F472\U0001F3FD',
u':uomo_con_zucchetto_cinese_carnagione_abbastanza_scura:': u'\U0001F472\U0001F3FE',
u':uomo_con_zucchetto_cinese_carnagione_scura:': u'\U0001F472\U0001F3FF',
u':donna_con_velo:': u'\U0001F9D5',
u':donna_con_velo_carnagione_chiara:': u'\U0001F9D5\U0001F3FB',
u':donna_con_velo_carnagione_abbastanza_chiara:': u'\U0001F9D5\U0001F3FC',
u':donna_con_velo_carnagione_olivastra:': u'\U0001F9D5\U0001F3FD',
u':donna_con_velo_carnagione_abbastanza_scura:': u'\U0001F9D5\U0001F3FE',
u':donna_con_velo_carnagione_scura:': u'\U0001F9D5\U0001F3FF',
u':persona_in_smoking:': u'\U0001F935',
u':persona_in_smoking_carnagione_chiara:': u'\U0001F935\U0001F3FB',
u':persona_in_smoking_carnagione_abbastanza_chiara:': u'\U0001F935\U0001F3FC',
u':persona_in_smoking_carnagione_olivastra:': u'\U0001F935\U0001F3FD',
u':persona_in_smoking_carnagione_abbastanza_scura:': u'\U0001F935\U0001F3FE',
u':persona_in_smoking_carnagione_scura:': u'\U0001F935\U0001F3FF',
u':uomo_in_smoking:': u'\U0001F935\U0000200D\U00002642\U0000FE0F',
u':uomo_in_smoking_carnagione_chiara:': u'\U0001F935\U0001F3FB\U0000200D\U00002642\U0000FE0F',
u':uomo_in_smoking_carnagione_abbastanza_chiara:': u'\U0001F935\U0001F3FC\U0000200D\U00002642\U0000FE0F',
u':uomo_in_smoking_carnagione_olivastra:': u'\U0001F935\U0001F3FD\U0000200D\U00002642\U0000FE0F',
u':uomo_in_smoking_carnagione_abbastanza_scura:': u'\U0001F935\U0001F3FE\U0000200D\U00002642\U0000FE0F',
u':uomo_in_smoking_carnagione_scura:': u'\U0001F935\U0001F3FF\U0000200D\U00002642\U0000FE0F',
u':donna_in_smoking:': u'\U0001F935\U0000200D\U00002640\U0000FE0F',
u':donna_in_smoking_carnagione_chiara:': u'\U0001F935\U0001F3FB\U0000200D\U00002640\U0000FE0F',
u':donna_in_smoking_carnagione_abbastanza_chiara:': u'\U0001F935\U0001F3FC\U0000200D\U00002640\U0000FE0F',
u':donna_in_smoking_carnagione_olivastra:': u'\U0001F935\U0001F3FD\U0000200D\U00002640\U0000FE0F',
u':donna_in_smoking_carnagione_abbastanza_scura:': u'\U0001F935\U0001F3FE\U0000200D\U00002640\U0000FE0F',
u':donna_in_smoking_carnagione_scura:': u'\U0001F935\U0001F3FF\U0000200D\U00002640\U0000FE0F',
u':persona_con_velo:': u'\U0001F470',
u':persona_con_velo_carnagione_chiara:': u'\U0001F470\U0001F3FB',
u':persona_con_velo_carnagione_abbastanza_chiara:': u'\U0001F470\U0001F3FC',
u':persona_con_velo_carnagione_olivastra:': u'\U0001F470\U0001F3FD',
u':persona_con_velo_carnagione_abbastanza_scura:': u'\U0001F470\U0001F3FE',
u':persona_con_velo_carnagione_scura:': u'\U0001F470\U0001F3FF',
u':sposo_con_velo:': u'\U0001F470\U0000200D\U00002642\U0000FE0F',
u':sposo_con_velo_carnagione_chiara:': u'\U0001F470\U0001F3FB\U0000200D\U00002642\U0000FE0F',
u':sposo_con_velo_carnagione_abbastanza_chiara:': u'\U0001F470\U0001F3FC\U0000200D\U00002642\U0000FE0F',
u':sposo_con_velo_carnagione_olivastra:': u'\U0001F470\U0001F3FD\U0000200D\U00002642\U0000FE0F',
u':sposo_con_velo_carnagione_abbastanza_scura:': u'\U0001F470\U0001F3FE\U0000200D\U00002642\U0000FE0F',
u':sposo_con_velo_carnagione_scura:': u'\U0001F470\U0001F3FF\U0000200D\U00002642\U0000FE0F',
u':sposa_con_velo:': u'\U0001F470\U0000200D\U00002640\U0000FE0F',
u':sposa_con_velo_carnagione_chiara:': u'\U0001F470\U0001F3FB\U0000200D\U00002640\U0000FE0F',
u':sposa_con_velo_carnagione_abbastanza_chiara:': u'\U0001F470\U0001F3FC\U0000200D\U00002640\U0000FE0F',
u':sposa_con_velo_carnagione_olivastra:': u'\U0001F470\U0001F3FD\U0000200D\U00002640\U0000FE0F',
u':sposa_con_velo_carnagione_abbastanza_scura:': u'\U0001F470\U0001F3FE\U0000200D\U00002640\U0000FE0F',
u':sposa_con_velo_carnagione_scura:': u'\U0001F470\U0001F3FF\U0000200D\U00002640\U0000FE0F',
u':donna_incinta:': u'\U0001F930',
u':donna_incinta_carnagione_chiara:': u'\U0001F930\U0001F3FB',
u':donna_incinta_carnagione_abbastanza_chiara:': u'\U0001F930\U0001F3FC',
u':donna_incinta_carnagione_olivastra:': u'\U0001F930\U0001F3FD',
u':donna_incinta_carnagione_abbastanza_scura:': u'\U0001F930\U0001F3FE',
u':donna_incinta_carnagione_scura:': u'\U0001F930\U0001F3FF',
u':allattare:': u'\U0001F931',
u':allattare_carnagione_chiara:': u'\U0001F931\U0001F3FB',
u':allattare_carnagione_abbastanza_chiara:': u'\U0001F931\U0001F3FC',
u':allattare_carnagione_olivastra:': u'\U0001F931\U0001F3FD',
u':allattare_carnagione_abbastanza_scura:': u'\U0001F931\U0001F3FE',
u':allattare_carnagione_scura:': u'\U0001F931\U0001F3FF',
u':donna_che_allatta:': u'\U0001F469\U0000200D\U0001F37C',
u':donna_che_allatta_carnagione_chiara:': u'\U0001F469\U0001F3FB\U0000200D\U0001F37C',
u':donna_che_allatta_carnagione_abbastanza_chiara:': u'\U0001F469\U0001F3FC\U0000200D\U0001F37C',
u':donna_che_allatta_carnagione_olivastra:': u'\U0001F469\U0001F3FD\U0000200D\U0001F37C',
u':donna_che_allatta_carnagione_abbastanza_scura:': u'\U0001F469\U0001F3FE\U0000200D\U0001F37C',
u':donna_che_allatta_carnagione_scura:': u'\U0001F469\U0001F3FF\U0000200D\U0001F37C',
u':uomo_che_allatta:': u'\U0001F468\U0000200D\U0001F37C',
u':uomo_che_allatta_carnagione_chiara:': u'\U0001F468\U0001F3FB\U0000200D\U0001F37C',
u':uomo_che_allatta_carnagione_abbastanza_chiara:': u'\U0001F468\U0001F3FC\U0000200D\U0001F37C',
u':uomo_che_allatta_carnagione_olivastra:': u'\U0001F468\U0001F3FD\U0000200D\U0001F37C',
u':uomo_che_allatta_carnagione_abbastanza_scura:': u'\U0001F468\U0001F3FE\U0000200D\U0001F37C',
u':uomo_che_allatta_carnagione_scura:': u'\U0001F468\U0001F3FF\U0000200D\U0001F37C',
u':persona_che_allatta:': u'\U0001F9D1\U0000200D\U0001F37C',
u':persona_che_allatta_carnagione_chiara:': u'\U0001F9D1\U0001F3FB\U0000200D\U0001F37C',
u':persona_che_allatta_carnagione_abbastanza_chiara:': u'\U0001F9D1\U0001F3FC\U0000200D\U0001F37C',
u':persona_che_allatta_carnagione_olivastra:': u'\U0001F9D1\U0001F3FD\U0000200D\U0001F37C',
u':persona_che_allatta_carnagione_abbastanza_scura:': u'\U0001F9D1\U0001F3FE\U0000200D\U0001F37C',
u':persona_che_allatta_carnagione_scura:': u'\U0001F9D1\U0001F3FF\U0000200D\U0001F37C',
u':angioletto:': u'\U0001F47C',
u':angioletto_carnagione_chiara:': u'\U0001F47C\U0001F3FB',
u':angioletto_carnagione_abbastanza_chiara:': u'\U0001F47C\U0001F3FC',
u':angioletto_carnagione_olivastra:': u'\U0001F47C\U0001F3FD',
u':angioletto_carnagione_abbastanza_scura:': u'\U0001F47C\U0001F3FE',
u':angioletto_carnagione_scura:': u'\U0001F47C\U0001F3FF',
u':babbo_natale:': u'\U0001F385',
u':babbo_natale_carnagione_chiara:': u'\U0001F385\U0001F3FB',
u':babbo_natale_carnagione_abbastanza_chiara:': u'\U0001F385\U0001F3FC',
u':babbo_natale_carnagione_olivastra:': u'\U0001F385\U0001F3FD',
u':babbo_natale_carnagione_abbastanza_scura:': u'\U0001F385\U0001F3FE',
u':babbo_natale_carnagione_scura:': u'\U0001F385\U0001F3FF',
u':mamma_natale:': u'\U0001F936',
u':mamma_natale_carnagione_chiara:': u'\U0001F936\U0001F3FB',
u':mamma_natale_carnagione_abbastanza_chiara:': u'\U0001F936\U0001F3FC',
u':mamma_natale_carnagione_olivastra:': u'\U0001F936\U0001F3FD',
u':mamma_natale_carnagione_abbastanza_scura:': u'\U0001F936\U0001F3FE',
u':mamma_natale_carnagione_scura:': u'\U0001F936\U0001F3FF',
u':santa_claus:': u'\U0001F9D1\U0000200D\U0001F384',
u':santa_claus_carnagione_chiara:': u'\U0001F9D1\U0001F3FB\U0000200D\U0001F384',
u':santa_claus_carnagione_abbastanza_chiara:': u'\U0001F9D1\U0001F3FC\U0000200D\U0001F384',
u':santa_claus_carnagione_olivastra:': u'\U0001F9D1\U0001F3FD\U0000200D\U0001F384',
u':santa_claus_carnagione_abbastanza_scura:': u'\U0001F9D1\U0001F3FE\U0000200D\U0001F384',
u':santa_claus_carnagione_scura:': u'\U0001F9D1\U0001F3FF\U0000200D\U0001F384',
u':supereroe:': u'\U0001F9B8',
u':supereroe_carnagione_chiara:': u'\U0001F9B8\U0001F3FB',
u':supereroe_carnagione_abbastanza_chiara:': u'\U0001F9B8\U0001F3FC',
u':supereroe_carnagione_olivastra:': u'\U0001F9B8\U0001F3FD',
u':supereroe_carnagione_abbastanza_scura:': u'\U0001F9B8\U0001F3FE',
u':supereroe_carnagione_scura:': u'\U0001F9B8\U0001F3FF',
u':supereroe_uomo:': u'\U0001F9B8\U0000200D\U00002642\U0000FE0F',
u':supereroe_uomo_carnagione_chiara:': u'\U0001F9B8\U0001F3FB\U0000200D\U00002642\U0000FE0F',
u':supereroe_uomo_carnagione_abbastanza_chiara:': u'\U0001F9B8\U0001F3FC\U0000200D\U00002642\U0000FE0F',
u':supereroe_uomo_carnagione_olivastra:': u'\U0001F9B8\U0001F3FD\U0000200D\U00002642\U0000FE0F',
u':supereroe_uomo_carnagione_abbastanza_scura:': u'\U0001F9B8\U0001F3FE\U0000200D\U00002642\U0000FE0F',
u':supereroe_uomo_carnagione_scura:': u'\U0001F9B8\U0001F3FF\U0000200D\U00002642\U0000FE0F',
u':supereroina:': u'\U0001F9B8\U0000200D\U00002640\U0000FE0F',
u':supereroina_carnagione_chiara:': u'\U0001F9B8\U0001F3FB\U0000200D\U00002640\U0000FE0F',
u':supereroina_carnagione_abbastanza_chiara:': u'\U0001F9B8\U0001F3FC\U0000200D\U00002640\U0000FE0F',
u':supereroina_carnagione_olivastra:': u'\U0001F9B8\U0001F3FD\U0000200D\U00002640\U0000FE0F',
u':supereroina_carnagione_abbastanza_scura:': u'\U0001F9B8\U0001F3FE\U0000200D\U00002640\U0000FE0F',
u':supereroina_carnagione_scura:': u'\U0001F9B8\U0001F3FF\U0000200D\U00002640\U0000FE0F',
u':supercattivo:': u'\U0001F9B9',
u':supercattivo_carnagione_chiara:': u'\U0001F9B9\U0001F3FB',
u':supercattivo_carnagione_abbastanza_chiara:': u'\U0001F9B9\U0001F3FC',
u':supercattivo_carnagione_olivastra:': u'\U0001F9B9\U0001F3FD',
u':supercattivo_carnagione_abbastanza_scura:': u'\U0001F9B9\U0001F3FE',
u':supercattivo_carnagione_scura:': u'\U0001F9B9\U0001F3FF',
u':supercattivo_uomo:': u'\U0001F9B9\U0000200D\U00002642\U0000FE0F',
u':supercattivo_uomo_carnagione_chiara:': u'\U0001F9B9\U0001F3FB\U0000200D\U00002642\U0000FE0F',
u':supercattivo_uomo_carnagione_abbastanza_chiara:': u'\U0001F9B9\U0001F3FC\U0000200D\U00002642\U0000FE0F',
u':supercattivo_uomo_carnagione_olivastra:': u'\U0001F9B9\U0001F3FD\U0000200D\U00002642\U0000FE0F',
u':supercattivo_uomo_carnagione_abbastanza_scura:': u'\U0001F9B9\U0001F3FE\U0000200D\U00002642\U0000FE0F',
u':supercattivo_uomo_carnagione_scura:': u'\U0001F9B9\U0001F3FF\U0000200D\U00002642\U0000FE0F',
u':supercattiva:': u'\U0001F9B9\U0000200D\U00002640\U0000FE0F',
u':supercattiva_carnagione_chiara:': u'\U0001F9B9\U0001F3FB\U0000200D\U00002640\U0000FE0F',
u':supercattiva_carnagione_abbastanza_chiara:': u'\U0001F9B9\U0001F3FC\U0000200D\U00002640\U0000FE0F',
u':supercattiva_carnagione_olivastra:': u'\U0001F9B9\U0001F3FD\U0000200D\U00002640\U0000FE0F',
u':supercattiva_carnagione_abbastanza_scura:': u'\U0001F9B9\U0001F3FE\U0000200D\U00002640\U0000FE0F',
u':supercattiva_carnagione_scura:': u'\U0001F9B9\U0001F3FF\U0000200D\U00002640\U0000FE0F',
u':mago:': u'\U0001F9D9',
u':mago_carnagione_chiara:': u'\U0001F9D9\U0001F3FB',
u':mago_carnagione_abbastanza_chiara:': u'\U0001F9D9\U0001F3FC',
u':mago_carnagione_olivastra:': u'\U0001F9D9\U0001F3FD',
u':mago_carnagione_abbastanza_scura:': u'\U0001F9D9\U0001F3FE',
u':mago_carnagione_scura:': u'\U0001F9D9\U0001F3FF',
u':mago_uomo:': u'\U0001F9D9\U0000200D\U00002642\U0000FE0F',
u':mago_uomo_carnagione_chiara:': u'\U0001F9D9\U0001F3FB\U0000200D\U00002642\U0000FE0F',
u':mago_uomo_carnagione_abbastanza_chiara:': u'\U0001F9D9\U0001F3FC\U0000200D\U00002642\U0000FE0F',
u':mago_uomo_carnagione_olivastra:': u'\U0001F9D9\U0001F3FD\U0000200D\U00002642\U0000FE0F',
u':mago_uomo_carnagione_abbastanza_scura:': u'\U0001F9D9\U0001F3FE\U0000200D\U00002642\U0000FE0F',
u':mago_uomo_carnagione_scura:': u'\U0001F9D9\U0001F3FF\U0000200D\U00002642\U0000FE0F',
u':maga:': u'\U0001F9D9\U0000200D\U00002640\U0000FE0F',
u':maga_carnagione_chiara:': u'\U0001F9D9\U0001F3FB\U0000200D\U00002640\U0000FE0F',
u':maga_carnagione_abbastanza_chiara:': u'\U0001F9D9\U0001F3FC\U0000200D\U00002640\U0000FE0F',
u':maga_carnagione_olivastra:': u'\U0001F9D9\U0001F3FD\U0000200D\U00002640\U0000FE0F',
u':maga_carnagione_abbastanza_scura:': u'\U0001F9D9\U0001F3FE\U0000200D\U00002640\U0000FE0F',
u':maga_carnagione_scura:': u'\U0001F9D9\U0001F3FF\U0000200D\U00002640\U0000FE0F',
u':fata:': u'\U0001F9DA',
u':fata_carnagione_chiara:': u'\U0001F9DA\U0001F3FB',
u':fata_carnagione_abbastanza_chiara:': u'\U0001F9DA\U0001F3FC',
u':fata_carnagione_olivastra:': u'\U0001F9DA\U0001F3FD',
u':fata_carnagione_abbastanza_scura:': u'\U0001F9DA\U0001F3FE',
u':fata_carnagione_scura:': u'\U0001F9DA\U0001F3FF',
u':folletto_alato:': u'\U0001F9DA\U0000200D\U00002642\U0000FE0F',
u':folletto_alato_carnagione_chiara:': u'\U0001F9DA\U0001F3FB\U0000200D\U00002642\U0000FE0F',
u':folletto_alato_carnagione_abbastanza_chiara:': u'\U0001F9DA\U0001F3FC\U0000200D\U00002642\U0000FE0F',
u':folletto_alato_carnagione_olivastra:': u'\U0001F9DA\U0001F3FD\U0000200D\U00002642\U0000FE0F',
u':folletto_alato_carnagione_abbastanza_scura:': u'\U0001F9DA\U0001F3FE\U0000200D\U00002642\U0000FE0F',
u':folletto_alato_carnagione_scura:': u'\U0001F9DA\U0001F3FF\U0000200D\U00002642\U0000FE0F',
u':fata_donna:': u'\U0001F9DA\U0000200D\U00002640\U0000FE0F',
u':fata_donna_carnagione_chiara:': u'\U0001F9DA\U0001F3FB\U0000200D\U00002640\U0000FE0F',
u':fata_donna_carnagione_abbastanza_chiara:': u'\U0001F9DA\U0001F3FC\U0000200D\U00002640\U0000FE0F',
u':fata_donna_carnagione_olivastra:': u'\U0001F9DA\U0001F3FD\U0000200D\U00002640\U0000FE0F',
u':fata_donna_carnagione_abbastanza_scura:': u'\U0001F9DA\U0001F3FE\U0000200D\U00002640\U0000FE0F',
u':fata_donna_carnagione_scura:': u'\U0001F9DA\U0001F3FF\U0000200D\U00002640\U0000FE0F',
u':vampiro:': u'\U0001F9DB',
u':vampiro_carnagione_chiara:': u'\U0001F9DB\U0001F3FB',
u':vampiro_carnagione_abbastanza_chiara:': u'\U0001F9DB\U0001F3FC',
u':vampiro_carnagione_olivastra:': u'\U0001F9DB\U0001F3FD',
u':vampiro_carnagione_abbastanza_scura:': u'\U0001F9DB\U0001F3FE',
u':vampiro_carnagione_scura:': u'\U0001F9DB\U0001F3FF',
u':vampiro_uomo:': u'\U0001F9DB\U0000200D\U00002642\U0000FE0F',
u':vampiro_uomo_carnagione_chiara:': u'\U0001F9DB\U0001F3FB\U0000200D\U00002642\U0000FE0F',
u':vampiro_uomo_carnagione_abbastanza_chiara:': u'\U0001F9DB\U0001F3FC\U0000200D\U00002642\U0000FE0F',
u':vampiro_uomo_carnagione_olivastra:': u'\U0001F9DB\U0001F3FD\U0000200D\U00002642\U0000FE0F',
u':vampiro_uomo_carnagione_abbastanza_scura:': u'\U0001F9DB\U0001F3FE\U0000200D\U00002642\U0000FE0F',
u':vampiro_uomo_carnagione_scura:': u'\U0001F9DB\U0001F3FF\U0000200D\U00002642\U0000FE0F',
u':vampira:': u'\U0001F9DB\U0000200D\U00002640\U0000FE0F',
u':vampira_carnagione_chiara:': u'\U0001F9DB\U0001F3FB\U0000200D\U00002640\U0000FE0F',
u':vampira_carnagione_abbastanza_chiara:': u'\U0001F9DB\U0001F3FC\U0000200D\U00002640\U0000FE0F',
u':vampira_carnagione_olivastra:': u'\U0001F9DB\U0001F3FD\U0000200D\U00002640\U0000FE0F',
u':vampira_carnagione_abbastanza_scura:': u'\U0001F9DB\U0001F3FE\U0000200D\U00002640\U0000FE0F',
u':vampira_carnagione_scura:': u'\U0001F9DB\U0001F3FF\U0000200D\U00002640\U0000FE0F',
u':sirena:': u'\U0001F9DC',
u':sirena_carnagione_chiara:': u'\U0001F9DC\U0001F3FB',
u':sirena_carnagione_abbastanza_chiara:': u'\U0001F9DC\U0001F3FC',
u':sirena_carnagione_olivastra:': u'\U0001F9DC\U0001F3FD',
u':sirena_carnagione_abbastanza_scura:': u'\U0001F9DC\U0001F3FE',
u':sirena_carnagione_scura:': u'\U0001F9DC\U0001F3FF',
u':tritone:': u'\U0001F9DC\U0000200D\U00002642\U0000FE0F',
u':tritone_carnagione_chiara:': u'\U0001F9DC\U0001F3FB\U0000200D\U00002642\U0000FE0F',
u':tritone_carnagione_abbastanza_chiara:': u'\U0001F9DC\U0001F3FC\U0000200D\U00002642\U0000FE0F',
u':tritone_carnagione_olivastra:': u'\U0001F9DC\U0001F3FD\U0000200D\U00002642\U0000FE0F',
u':tritone_carnagione_abbastanza_scura:': u'\U0001F9DC\U0001F3FE\U0000200D\U00002642\U0000FE0F',
u':tritone_carnagione_scura:': u'\U0001F9DC\U0001F3FF\U0000200D\U00002642\U0000FE0F',
u':sirena_donna:': u'\U0001F9DC\U0000200D\U00002640\U0000FE0F',
u':sirena_donna_carnagione_chiara:': u'\U0001F9DC\U0001F3FB\U0000200D\U00002640\U0000FE0F',
u':sirena_donna_carnagione_abbastanza_chiara:': u'\U0001F9DC\U0001F3FC\U0000200D\U00002640\U0000FE0F',
u':sirena_donna_carnagione_olivastra:': u'\U0001F9DC\U0001F3FD\U0000200D\U00002640\U0000FE0F',
u':sirena_donna_carnagione_abbastanza_scura:': u'\U0001F9DC\U0001F3FE\U0000200D\U00002640\U0000FE0F',
u':sirena_donna_carnagione_scura:': u'\U0001F9DC\U0001F3FF\U0000200D\U00002640\U0000FE0F',
u':elfo:': u'\U0001F9DD',
u':elfo_carnagione_chiara:': u'\U0001F9DD\U0001F3FB',
u':elfo_carnagione_abbastanza_chiara:': u'\U0001F9DD\U0001F3FC',
u':elfo_carnagione_olivastra:': u'\U0001F9DD\U0001F3FD',
u':elfo_carnagione_abbastanza_scura:': u'\U0001F9DD\U0001F3FE',
u':elfo_carnagione_scura:': u'\U0001F9DD\U0001F3FF',
u':elfo_uomo:': u'\U0001F9DD\U0000200D\U00002642\U0000FE0F',
u':elfo_uomo_carnagione_chiara:': u'\U0001F9DD\U0001F3FB\U0000200D\U00002642\U0000FE0F',
u':elfo_uomo_carnagione_abbastanza_chiara:': u'\U0001F9DD\U0001F3FC\U0000200D\U00002642\U0000FE0F',
u':elfo_uomo_carnagione_olivastra:': u'\U0001F9DD\U0001F3FD\U0000200D\U00002642\U0000FE0F',
u':elfo_uomo_carnagione_abbastanza_scura:': u'\U0001F9DD\U0001F3FE\U0000200D\U00002642\U0000FE0F',
u':elfo_uomo_carnagione_scura:': u'\U0001F9DD\U0001F3FF\U0000200D\U00002642\U0000FE0F',
u':elfo_donna:': u'\U0001F9DD\U0000200D\U00002640\U0000FE0F',
u':elfo_donna_carnagione_chiara:': u'\U0001F9DD\U0001F3FB\U0000200D\U00002640\U0000FE0F',
u':elfo_donna_carnagione_abbastanza_chiara:': u'\U0001F9DD\U0001F3FC\U0000200D\U00002640\U0000FE0F',
u':elfo_donna_carnagione_olivastra:': u'\U0001F9DD\U0001F3FD\U0000200D\U00002640\U0000FE0F',
u':elfo_donna_carnagione_abbastanza_scura:': u'\U0001F9DD\U0001F3FE\U0000200D\U00002640\U0000FE0F',
u':elfo_donna_carnagione_scura:': u'\U0001F9DD\U0001F3FF\U0000200D\U00002640\U0000FE0F',
u':genio:': u'\U0001F9DE',
u':genio_uomo:': u'\U0001F9DE\U0000200D\U00002642\U0000FE0F',
u':genio_donna:': u'\U0001F9DE\U0000200D\U00002640\U0000FE0F',
u':zombie:': u'\U0001F9DF',
u':zombie_uomo:': u'\U0001F9DF\U0000200D\U00002642\U0000FE0F',
u':zombie_donna:': u'\U0001F9DF\U0000200D\U00002640\U0000FE0F',
u':persona_che_riceve_un_massaggio:': u'\U0001F486',
u':persona_che_riceve_un_massaggio_carnagione_chiara:': u'\U0001F486\U0001F3FB',
u':persona_che_riceve_un_massaggio_carnagione_abbastanza_chiara:': u'\U0001F486\U0001F3FC',
u':persona_che_riceve_un_massaggio_carnagione_olivastra:': u'\U0001F486\U0001F3FD',
u':persona_che_riceve_un_massaggio_carnagione_abbastanza_scura:': u'\U0001F486\U0001F3FE',
u':persona_che_riceve_un_massaggio_carnagione_scura:': u'\U0001F486\U0001F3FF',
u':uomo_che_riceve_un_massaggio:': u'\U0001F486\U0000200D\U00002642\U0000FE0F',
u':uomo_che_riceve_un_massaggio_carnagione_chiara:': u'\U0001F486\U0001F3FB\U0000200D\U00002642\U0000FE0F',
u':uomo_che_riceve_un_massaggio_carnagione_abbastanza_chiara:': u'\U0001F486\U0001F3FC\U0000200D\U00002642\U0000FE0F',
u':uomo_che_riceve_un_massaggio_carnagione_olivastra:': u'\U0001F486\U0001F3FD\U0000200D\U00002642\U0000FE0F',
u':uomo_che_riceve_un_massaggio_carnagione_abbastanza_scura:': u'\U0001F486\U0001F3FE\U0000200D\U00002642\U0000FE0F',
u':uomo_che_riceve_un_massaggio_carnagione_scura:': u'\U0001F486\U0001F3FF\U0000200D\U00002642\U0000FE0F',
u':donna_che_riceve_un_massaggio:': u'\U0001F486\U0000200D\U00002640\U0000FE0F',
u':donna_che_riceve_un_massaggio_carnagione_chiara:': u'\U0001F486\U0001F3FB\U0000200D\U00002640\U0000FE0F',
u':donna_che_riceve_un_massaggio_carnagione_abbastanza_chiara:': u'\U0001F486\U0001F3FC\U0000200D\U00002640\U0000FE0F',
u':donna_che_riceve_un_massaggio_carnagione_olivastra:': u'\U0001F486\U0001F3FD\U0000200D\U00002640\U0000FE0F',
u':donna_che_riceve_un_massaggio_carnagione_abbastanza_scura:': u'\U0001F486\U0001F3FE\U0000200D\U00002640\U0000FE0F',
u':donna_che_riceve_un_massaggio_carnagione_scura:': u'\U0001F486\U0001F3FF\U0000200D\U00002640\U0000FE0F',
u':taglio_di_capelli:': u'\U0001F487',
u':taglio_di_capelli_carnagione_chiara:': u'\U0001F487\U0001F3FB',
u':taglio_di_capelli_carnagione_abbastanza_chiara:': u'\U0001F487\U0001F3FC',
u':taglio_di_capelli_carnagione_olivastra:': u'\U0001F487\U0001F3FD',
u':taglio_di_capelli_carnagione_abbastanza_scura:': u'\U0001F487\U0001F3FE',
u':taglio_di_capelli_carnagione_scura:': u'\U0001F487\U0001F3FF',
u':taglio_di_capelli_per_uomo:': u'\U0001F487\U0000200D\U00002642\U0000FE0F',
u':taglio_di_capelli_per_uomo_carnagione_chiara:': u'\U0001F487\U0001F3FB\U0000200D\U00002642\U0000FE0F',
u':taglio_di_capelli_per_uomo_carnagione_abbastanza_chiara:': u'\U0001F487\U0001F3FC\U0000200D\U00002642\U0000FE0F',
u':taglio_di_capelli_per_uomo_carnagione_olivastra:': u'\U0001F487\U0001F3FD\U0000200D\U00002642\U0000FE0F',
u':taglio_di_capelli_per_uomo_carnagione_abbastanza_scura:': u'\U0001F487\U0001F3FE\U0000200D\U00002642\U0000FE0F',
u':taglio_di_capelli_per_uomo_carnagione_scura:': u'\U0001F487\U0001F3FF\U0000200D\U00002642\U0000FE0F',
u':taglio_di_capelli_per_donna:': u'\U0001F487\U0000200D\U00002640\U0000FE0F',
u':taglio_di_capelli_per_donna_carnagione_chiara:': u'\U0001F487\U0001F3FB\U0000200D\U00002640\U0000FE0F',
u':taglio_di_capelli_per_donna_carnagione_abbastanza_chiara:': u'\U0001F487\U0001F3FC\U0000200D\U00002640\U0000FE0F',
u':taglio_di_capelli_per_donna_carnagione_olivastra:': u'\U0001F487\U0001F3FD\U0000200D\U00002640\U0000FE0F',
u':taglio_di_capelli_per_donna_carnagione_abbastanza_scura:': u'\U0001F487\U0001F3FE\U0000200D\U00002640\U0000FE0F',
u':taglio_di_capelli_per_donna_carnagione_scura:': u'\U0001F487\U0001F3FF\U0000200D\U00002640\U0000FE0F',
u':pedone:': u'\U0001F6B6',
u':pedone_carnagione_chiara:': u'\U0001F6B6\U0001F3FB',
u':pedone_carnagione_abbastanza_chiara:': u'\U0001F6B6\U0001F3FC',
u':pedone_carnagione_olivastra:': u'\U0001F6B6\U0001F3FD',
u':pedone_carnagione_abbastanza_scura:': u'\U0001F6B6\U0001F3FE',
u':pedone_carnagione_scura:': u'\U0001F6B6\U0001F3FF',
u':uomo_che_cammina:': u'\U0001F6B6\U0000200D\U00002642\U0000FE0F',
u':uomo_che_cammina_carnagione_chiara:': u'\U0001F6B6\U0001F3FB\U0000200D\U00002642\U0000FE0F',
u':uomo_che_cammina_carnagione_abbastanza_chiara:': u'\U0001F6B6\U0001F3FC\U0000200D\U00002642\U0000FE0F',
u':uomo_che_cammina_carnagione_olivastra:': u'\U0001F6B6\U0001F3FD\U0000200D\U00002642\U0000FE0F',
u':uomo_che_cammina_carnagione_abbastanza_scura:': u'\U0001F6B6\U0001F3FE\U0000200D\U00002642\U0000FE0F',
u':uomo_che_cammina_carnagione_scura:': u'\U0001F6B6\U0001F3FF\U0000200D\U00002642\U0000FE0F',
u':donna_che_cammina:': u'\U0001F6B6\U0000200D\U00002640\U0000FE0F',
u':donna_che_cammina_carnagione_chiara:': u'\U0001F6B6\U0001F3FB\U0000200D\U00002640\U0000FE0F',
u':donna_che_cammina_carnagione_abbastanza_chiara:': u'\U0001F6B6\U0001F3FC\U0000200D\U00002640\U0000FE0F',
u':donna_che_cammina_carnagione_olivastra:': u'\U0001F6B6\U0001F3FD\U0000200D\U00002640\U0000FE0F',
u':donna_che_cammina_carnagione_abbastanza_scura:': u'\U0001F6B6\U0001F3FE\U0000200D\U00002640\U0000FE0F',
u':donna_che_cammina_carnagione_scura:': u'\U0001F6B6\U0001F3FF\U0000200D\U00002640\U0000FE0F',
u':persona_in_piedi:': u'\U0001F9CD',
u':persona_in_piedi_carnagione_chiara:': u'\U0001F9CD\U0001F3FB',
u':persona_in_piedi_carnagione_abbastanza_chiara:': u'\U0001F9CD\U0001F3FC',
u':persona_in_piedi_carnagione_olivastra:': u'\U0001F9CD\U0001F3FD',
u':persona_in_piedi_carnagione_abbastanza_scura:': u'\U0001F9CD\U0001F3FE',
u':persona_in_piedi_carnagione_scura:': u'\U0001F9CD\U0001F3FF',
u':uomo_in_piedi:': u'\U0001F9CD\U0000200D\U00002642\U0000FE0F',
u':uomo_in_piedi_carnagione_chiara:': u'\U0001F9CD\U0001F3FB\U0000200D\U00002642\U0000FE0F',
u':uomo_in_piedi_carnagione_abbastanza_chiara:': u'\U0001F9CD\U0001F3FC\U0000200D\U00002642\U0000FE0F',
u':uomo_in_piedi_carnagione_olivastra:': u'\U0001F9CD\U0001F3FD\U0000200D\U00002642\U0000FE0F',
u':uomo_in_piedi_carnagione_abbastanza_scura:': u'\U0001F9CD\U0001F3FE\U0000200D\U00002642\U0000FE0F',
u':uomo_in_piedi_carnagione_scura:': u'\U0001F9CD\U0001F3FF\U0000200D\U00002642\U0000FE0F',
u':donna_in_piedi:': u'\U0001F9CD\U0000200D\U00002640\U0000FE0F',
u':donna_in_piedi_carnagione_chiara:': u'\U0001F9CD\U0001F3FB\U0000200D\U00002640\U0000FE0F',
u':donna_in_piedi_carnagione_abbastanza_chiara:': u'\U0001F9CD\U0001F3FC\U0000200D\U00002640\U0000FE0F',
u':donna_in_piedi_carnagione_olivastra:': u'\U0001F9CD\U0001F3FD\U0000200D\U00002640\U0000FE0F',
u':donna_in_piedi_carnagione_abbastanza_scura:': u'\U0001F9CD\U0001F3FE\U0000200D\U00002640\U0000FE0F',
u':donna_in_piedi_carnagione_scura:': u'\U0001F9CD\U0001F3FF\U0000200D\U00002640\U0000FE0F',
u':persona_inginocchiata:': u'\U0001F9CE',
u':persona_inginocchiata_carnagione_chiara:': u'\U0001F9CE\U0001F3FB',
u':persona_inginocchiata_carnagione_abbastanza_chiara:': u'\U0001F9CE\U0001F3FC',
u':persona_inginocchiata_carnagione_olivastra:': u'\U0001F9CE\U0001F3FD',
u':persona_inginocchiata_carnagione_abbastanza_scura:': u'\U0001F9CE\U0001F3FE',
u':persona_inginocchiata_carnagione_scura:': u'\U0001F9CE\U0001F3FF',
u':uomo_inginocchiato:': u'\U0001F9CE\U0000200D\U00002642\U0000FE0F',
u':uomo_inginocchiato_carnagione_chiara:': u'\U0001F9CE\U0001F3FB\U0000200D\U00002642\U0000FE0F',
u':uomo_inginocchiato_carnagione_abbastanza_chiara:': u'\U0001F9CE\U0001F3FC\U0000200D\U00002642\U0000FE0F',
u':uomo_inginocchiato_carnagione_olivastra:': u'\U0001F9CE\U0001F3FD\U0000200D\U00002642\U0000FE0F',
u':uomo_inginocchiato_carnagione_abbastanza_scura:': u'\U0001F9CE\U0001F3FE\U0000200D\U00002642\U0000FE0F',
u':uomo_inginocchiato_carnagione_scura:': u'\U0001F9CE\U0001F3FF\U0000200D\U00002642\U0000FE0F',
u':donna_inginocchiata:': u'\U0001F9CE\U0000200D\U00002640\U0000FE0F',
u':donna_inginocchiata_carnagione_chiara:': u'\U0001F9CE\U0001F3FB\U0000200D\U00002640\U0000FE0F',
u':donna_inginocchiata_carnagione_abbastanza_chiara:': u'\U0001F9CE\U0001F3FC\U0000200D\U00002640\U0000FE0F',
u':donna_inginocchiata_carnagione_olivastra:': u'\U0001F9CE\U0001F3FD\U0000200D\U00002640\U0000FE0F',
u':donna_inginocchiata_carnagione_abbastanza_scura:': u'\U0001F9CE\U0001F3FE\U0000200D\U00002640\U0000FE0F',
u':donna_inginocchiata_carnagione_scura:': u'\U0001F9CE\U0001F3FF\U0000200D\U00002640\U0000FE0F',
u':persona_con_bastone_di_orientamento:': u'\U0001F9D1\U0000200D\U0001F9AF',
u':persona_con_bastone_di_orientamento_carnagione_chiara:': u'\U0001F9D1\U0001F3FB\U0000200D\U0001F9AF',
u':persona_con_bastone_di_orientamento_carnagione_abbastanza_chiara:': u'\U0001F9D1\U0001F3FC\U0000200D\U0001F9AF',
u':persona_con_bastone_di_orientamento_carnagione_olivastra:': u'\U0001F9D1\U0001F3FD\U0000200D\U0001F9AF',
u':persona_con_bastone_di_orientamento_carnagione_abbastanza_scura:': u'\U0001F9D1\U0001F3FE\U0000200D\U0001F9AF',
u':persona_con_bastone_di_orientamento_carnagione_scura:': u'\U0001F9D1\U0001F3FF\U0000200D\U0001F9AF',
u':uomo_con_bastone_bianco_di_orientamento:': u'\U0001F468\U0000200D\U0001F9AF',
u':uomo_con_bastone_bianco_di_orientamento_carnagione_chiara:': u'\U0001F468\U0001F3FB\U0000200D\U0001F9AF',
u':uomo_con_bastone_bianco_di_orientamento_carnagione_abbastanza_chiara:': u'\U0001F468\U0001F3FC\U0000200D\U0001F9AF',
u':uomo_con_bastone_bianco_di_orientamento_carnagione_olivastra:': u'\U0001F468\U0001F3FD\U0000200D\U0001F9AF',
u':uomo_con_bastone_bianco_di_orientamento_carnagione_abbastanza_scura:': u'\U0001F468\U0001F3FE\U0000200D\U0001F9AF',
u':uomo_con_bastone_bianco_di_orientamento_carnagione_scura:': u'\U0001F468\U0001F3FF\U0000200D\U0001F9AF',
u':donna_con_bastone_bianco_di_orientamento:': u'\U0001F469\U0000200D\U0001F9AF',
u':donna_con_bastone_bianco_di_orientamento_carnagione_chiara:': u'\U0001F469\U0001F3FB\U0000200D\U0001F9AF',
u':donna_con_bastone_bianco_di_orientamento_carnagione_abbastanza_chiara:': u'\U0001F469\U0001F3FC\U0000200D\U0001F9AF',
u':donna_con_bastone_bianco_di_orientamento_carnagione_olivastra:': u'\U0001F469\U0001F3FD\U0000200D\U0001F9AF',
u':donna_con_bastone_bianco_di_orientamento_carnagione_abbastanza_scura:': u'\U0001F469\U0001F3FE\U0000200D\U0001F9AF',
u':donna_con_bastone_bianco_di_orientamento_carnagione_scura:': u'\U0001F469\U0001F3FF\U0000200D\U0001F9AF',
u':persona_su_sedia_a_rotelle_motorizzata:': u'\U0001F9D1\U0000200D\U0001F9BC',
u':persona_su_sedia_a_rotelle_motorizzata_carnagione_chiara:': u'\U0001F9D1\U0001F3FB\U0000200D\U0001F9BC',
u':persona_su_sedia_a_rotelle_motorizzata_carnagione_abbastanza_chiara:': u'\U0001F9D1\U0001F3FC\U0000200D\U0001F9BC',
u':persona_su_sedia_a_rotelle_motorizzata_carnagione_olivastra:': u'\U0001F9D1\U0001F3FD\U0000200D\U0001F9BC',
u':persona_su_sedia_a_rotelle_motorizzata_carnagione_abbastanza_scura:': u'\U0001F9D1\U0001F3FE\U0000200D\U0001F9BC',
u':persona_su_sedia_a_rotelle_motorizzata_carnagione_scura:': u'\U0001F9D1\U0001F3FF\U0000200D\U0001F9BC',
u':uomo_su_sedia_a_rotelle_motorizzata:': u'\U0001F468\U0000200D\U0001F9BC',
u':uomo_su_sedia_a_rotelle_motorizzata_carnagione_chiara:': u'\U0001F468\U0001F3FB\U0000200D\U0001F9BC',
u':uomo_su_sedia_a_rotelle_motorizzata_carnagione_abbastanza_chiara:': u'\U0001F468\U0001F3FC\U0000200D\U0001F9BC',
u':uomo_su_sedia_a_rotelle_motorizzata_carnagione_olivastra:': u'\U0001F468\U0001F3FD\U0000200D\U0001F9BC',
u':uomo_su_sedia_a_rotelle_motorizzata_carnagione_abbastanza_scura:': u'\U0001F468\U0001F3FE\U0000200D\U0001F9BC',
u':uomo_su_sedia_a_rotelle_motorizzata_carnagione_scura:': u'\U0001F468\U0001F3FF\U0000200D\U0001F9BC',
u':donna_su_sedia_a_rotelle_motorizzata:': u'\U0001F469\U0000200D\U0001F9BC',
u':donna_su_sedia_a_rotelle_motorizzata_carnagione_chiara:': u'\U0001F469\U0001F3FB\U0000200D\U0001F9BC',
u':donna_su_sedia_a_rotelle_motorizzata_carnagione_abbastanza_chiara:': u'\U0001F469\U0001F3FC\U0000200D\U0001F9BC',
u':donna_su_sedia_a_rotelle_motorizzata_carnagione_olivastra:': u'\U0001F469\U0001F3FD\U0000200D\U0001F9BC',
u':donna_su_sedia_a_rotelle_motorizzata_carnagione_abbastanza_scura:': u'\U0001F469\U0001F3FE\U0000200D\U0001F9BC',
u':donna_su_sedia_a_rotelle_motorizzata_carnagione_scura:': u'\U0001F469\U0001F3FF\U0000200D\U0001F9BC',
u':persona_su_sedia_a_rotelle_manuale:': u'\U0001F9D1\U0000200D\U0001F9BD',
u':persona_su_sedia_a_rotelle_manuale_carnagione_chiara:': u'\U0001F9D1\U0001F3FB\U0000200D\U0001F9BD',
u':persona_su_sedia_a_rotelle_manuale_carnagione_abbastanza_chiara:': u'\U0001F9D1\U0001F3FC\U0000200D\U0001F9BD',
u':persona_su_sedia_a_rotelle_manuale_carnagione_olivastra:': u'\U0001F9D1\U0001F3FD\U0000200D\U0001F9BD',
u':persona_su_sedia_a_rotelle_manuale_carnagione_abbastanza_scura:': u'\U0001F9D1\U0001F3FE\U0000200D\U0001F9BD',
u':persona_su_sedia_a_rotelle_manuale_carnagione_scura:': u'\U0001F9D1\U0001F3FF\U0000200D\U0001F9BD',
u':uomo_su_sedia_a_rotelle_manuale:': u'\U0001F468\U0000200D\U0001F9BD',
u':uomo_su_sedia_a_rotelle_manuale_carnagione_chiara:': u'\U0001F468\U0001F3FB\U0000200D\U0001F9BD',
u':uomo_su_sedia_a_rotelle_manuale_carnagione_abbastanza_chiara:': u'\U0001F468\U0001F3FC\U0000200D\U0001F9BD',
u':uomo_su_sedia_a_rotelle_manuale_carnagione_olivastra:': u'\U0001F468\U0001F3FD\U0000200D\U0001F9BD',
u':uomo_su_sedia_a_rotelle_manuale_carnagione_abbastanza_scura:': u'\U0001F468\U0001F3FE\U0000200D\U0001F9BD',
u':uomo_su_sedia_a_rotelle_manuale_carnagione_scura:': u'\U0001F468\U0001F3FF\U0000200D\U0001F9BD',
u':donna_su_sedia_a_rotelle_manuale:': u'\U0001F469\U0000200D\U0001F9BD',
u':donna_su_sedia_a_rotelle_manuale_carnagione_chiara:': u'\U0001F469\U0001F3FB\U0000200D\U0001F9BD',
u':donna_su_sedia_a_rotelle_manuale_carnagione_abbastanza_chiara:': u'\U0001F469\U0001F3FC\U0000200D\U0001F9BD',
u':donna_su_sedia_a_rotelle_manuale_carnagione_olivastra:': u'\U0001F469\U0001F3FD\U0000200D\U0001F9BD',
u':donna_su_sedia_a_rotelle_manuale_carnagione_abbastanza_scura:': u'\U0001F469\U0001F3FE\U0000200D\U0001F9BD',
u':donna_su_sedia_a_rotelle_manuale_carnagione_scura:': u'\U0001F469\U0001F3FF\U0000200D\U0001F9BD',
u':persona_che_corre:': u'\U0001F3C3',
u':persona_che_corre_carnagione_chiara:': u'\U0001F3C3\U0001F3FB',
u':persona_che_corre_carnagione_abbastanza_chiara:': u'\U0001F3C3\U0001F3FC',
u':persona_che_corre_carnagione_olivastra:': u'\U0001F3C3\U0001F3FD',
u':persona_che_corre_carnagione_abbastanza_scura:': u'\U0001F3C3\U0001F3FE',
u':persona_che_corre_carnagione_scura:': u'\U0001F3C3\U0001F3FF',
u':uomo_che_corre:': u'\U0001F3C3\U0000200D\U00002642\U0000FE0F',
u':uomo_che_corre_carnagione_chiara:': u'\U0001F3C3\U0001F3FB\U0000200D\U00002642\U0000FE0F',
u':uomo_che_corre_carnagione_abbastanza_chiara:': u'\U0001F3C3\U0001F3FC\U0000200D\U00002642\U0000FE0F',
u':uomo_che_corre_carnagione_olivastra:': u'\U0001F3C3\U0001F3FD\U0000200D\U00002642\U0000FE0F',
u':uomo_che_corre_carnagione_abbastanza_scura:': u'\U0001F3C3\U0001F3FE\U0000200D\U00002642\U0000FE0F',
u':uomo_che_corre_carnagione_scura:': u'\U0001F3C3\U0001F3FF\U0000200D\U00002642\U0000FE0F',
u':donna_che_corre:': u'\U0001F3C3\U0000200D\U00002640\U0000FE0F',
u':donna_che_corre_carnagione_chiara:': u'\U0001F3C3\U0001F3FB\U0000200D\U00002640\U0000FE0F',
u':donna_che_corre_carnagione_abbastanza_chiara:': u'\U0001F3C3\U0001F3FC\U0000200D\U00002640\U0000FE0F',
u':donna_che_corre_carnagione_olivastra:': u'\U0001F3C3\U0001F3FD\U0000200D\U00002640\U0000FE0F',
u':donna_che_corre_carnagione_abbastanza_scura:': u'\U0001F3C3\U0001F3FE\U0000200D\U00002640\U0000FE0F',
u':donna_che_corre_carnagione_scura:': u'\U0001F3C3\U0001F3FF\U0000200D\U00002640\U0000FE0F',
u':donna_che_balla:': u'\U0001F483',
u':donna_che_balla_carnagione_chiara:': u'\U0001F483\U0001F3FB',
u':donna_che_balla_carnagione_abbastanza_chiara:': u'\U0001F483\U0001F3FC',
u':donna_che_balla_carnagione_olivastra:': u'\U0001F483\U0001F3FD',
u':donna_che_balla_carnagione_abbastanza_scura:': u'\U0001F483\U0001F3FE',
u':donna_che_balla_carnagione_scura:': u'\U0001F483\U0001F3FF',
u':uomo_che_balla:': u'\U0001F57A',
u':uomo_che_balla_carnagione_chiara:': u'\U0001F57A\U0001F3FB',
u':uomo_che_balla_carnagione_abbastanza_chiara:': u'\U0001F57A\U0001F3FC',
u':uomo_che_balla_carnagione_olivastra:': u'\U0001F57A\U0001F3FD',
u':uomo_che_balla_carnagione_abbastanza_scura:': u'\U0001F57A\U0001F3FE',
u':uomo_che_balla_carnagione_scura:': u'\U0001F57A\U0001F3FF',
u':uomo_con_completo_che_levita:': u'\U0001F574\U0000FE0F',
u':uomo_con_completo_che_levita_carnagione_chiara:': u'\U0001F574\U0001F3FB',
u':uomo_con_completo_che_levita_carnagione_abbastanza_chiara:': u'\U0001F574\U0001F3FC',
u':uomo_con_completo_che_levita_carnagione_olivastra:': u'\U0001F574\U0001F3FD',
u':uomo_con_completo_che_levita_carnagione_abbastanza_scura:': u'\U0001F574\U0001F3FE',
u':uomo_con_completo_che_levita_carnagione_scura:': u'\U0001F574\U0001F3FF',
u':persone_con_orecchie_da_coniglio:': u'\U0001F46F',
u':uomini_con_orecchie_da_coniglio:': u'\U0001F46F\U0000200D\U00002642\U0000FE0F',
u':donne_con_orecchie_da_coniglio:': u'\U0001F46F\U0000200D\U00002640\U0000FE0F',
u':persona_in_sauna:': u'\U0001F9D6',
u':persona_in_sauna_carnagione_chiara:': u'\U0001F9D6\U0001F3FB',
u':persona_in_sauna_carnagione_abbastanza_chiara:': u'\U0001F9D6\U0001F3FC',
u':persona_in_sauna_carnagione_olivastra:': u'\U0001F9D6\U0001F3FD',
u':persona_in_sauna_carnagione_abbastanza_scura:': u'\U0001F9D6\U0001F3FE',
u':persona_in_sauna_carnagione_scura:': u'\U0001F9D6\U0001F3FF',
u':uomo_in_sauna:': u'\U0001F9D6\U0000200D\U00002642\U0000FE0F',
u':uomo_in_sauna_carnagione_chiara:': u'\U0001F9D6\U0001F3FB\U0000200D\U00002642\U0000FE0F',
u':uomo_in_sauna_carnagione_abbastanza_chiara:': u'\U0001F9D6\U0001F3FC\U0000200D\U00002642\U0000FE0F',
u':uomo_in_sauna_carnagione_olivastra:': u'\U0001F9D6\U0001F3FD\U0000200D\U00002642\U0000FE0F',
u':uomo_in_sauna_carnagione_abbastanza_scura:': u'\U0001F9D6\U0001F3FE\U0000200D\U00002642\U0000FE0F',
u':uomo_in_sauna_carnagione_scura:': u'\U0001F9D6\U0001F3FF\U0000200D\U00002642\U0000FE0F',
u':donna_in_sauna:': u'\U0001F9D6\U0000200D\U00002640\U0000FE0F',
u':donna_in_sauna_carnagione_chiara:': u'\U0001F9D6\U0001F3FB\U0000200D\U00002640\U0000FE0F',
u':donna_in_sauna_carnagione_abbastanza_chiara:': u'\U0001F9D6\U0001F3FC\U0000200D\U00002640\U0000FE0F',
u':donna_in_sauna_carnagione_olivastra:': u'\U0001F9D6\U0001F3FD\U0000200D\U00002640\U0000FE0F',
u':donna_in_sauna_carnagione_abbastanza_scura:': u'\U0001F9D6\U0001F3FE\U0000200D\U00002640\U0000FE0F',
u':donna_in_sauna_carnagione_scura:': u'\U0001F9D6\U0001F3FF\U0000200D\U00002640\U0000FE0F',
u':persona_che_scala:': u'\U0001F9D7',
u':persona_che_scala_carnagione_chiara:': u'\U0001F9D7\U0001F3FB',
u':persona_che_scala_carnagione_abbastanza_chiara:': u'\U0001F9D7\U0001F3FC',
u':persona_che_scala_carnagione_olivastra:': u'\U0001F9D7\U0001F3FD',
u':persona_che_scala_carnagione_abbastanza_scura:': u'\U0001F9D7\U0001F3FE',
u':persona_che_scala_carnagione_scura:': u'\U0001F9D7\U0001F3FF',
u':scalatore:': u'\U0001F9D7\U0000200D\U00002642\U0000FE0F',
u':scalatore_carnagione_chiara:': u'\U0001F9D7\U0001F3FB\U0000200D\U00002642\U0000FE0F',
u':scalatore_carnagione_abbastanza_chiara:': u'\U0001F9D7\U0001F3FC\U0000200D\U00002642\U0000FE0F',
u':scalatore_carnagione_olivastra:': u'\U0001F9D7\U0001F3FD\U0000200D\U00002642\U0000FE0F',
u':scalatore_carnagione_abbastanza_scura:': u'\U0001F9D7\U0001F3FE\U0000200D\U00002642\U0000FE0F',
u':scalatore_carnagione_scura:': u'\U0001F9D7\U0001F3FF\U0000200D\U00002642\U0000FE0F',
u':scalatrice:': u'\U0001F9D7\U0000200D\U00002640\U0000FE0F',
u':scalatrice_carnagione_chiara:': u'\U0001F9D7\U0001F3FB\U0000200D\U00002640\U0000FE0F',
u':scalatrice_carnagione_abbastanza_chiara:': u'\U0001F9D7\U0001F3FC\U0000200D\U00002640\U0000FE0F',
u':scalatrice_carnagione_olivastra:': u'\U0001F9D7\U0001F3FD\U0000200D\U00002640\U0000FE0F',
u':scalatrice_carnagione_abbastanza_scura:': u'\U0001F9D7\U0001F3FE\U0000200D\U00002640\U0000FE0F',
u':scalatrice_carnagione_scura:': u'\U0001F9D7\U0001F3FF\U0000200D\U00002640\U0000FE0F',
u':schermidore:': u'\U0001F93A',
u':ippica:': u'\U0001F3C7',
u':ippica_carnagione_chiara:': u'\U0001F3C7\U0001F3FB',
u':ippica_carnagione_abbastanza_chiara:': u'\U0001F3C7\U0001F3FC',
u':ippica_carnagione_olivastra:': u'\U0001F3C7\U0001F3FD',
u':ippica_carnagione_abbastanza_scura:': u'\U0001F3C7\U0001F3FE',
u':ippica_carnagione_scura:': u'\U0001F3C7\U0001F3FF',
u':sciatore:': u'\U000026F7\U0000FE0F',
u':persona_sullo_snowboard:': u'\U0001F3C2',
u':persona_sullo_snowboard_carnagione_chiara:': u'\U0001F3C2\U0001F3FB',
u':persona_sullo_snowboard_carnagione_abbastanza_chiara:': u'\U0001F3C2\U0001F3FC',
u':persona_sullo_snowboard_carnagione_olivastra:': u'\U0001F3C2\U0001F3FD',
u':persona_sullo_snowboard_carnagione_abbastanza_scura:': u'\U0001F3C2\U0001F3FE',
u':persona_sullo_snowboard_carnagione_scura:': u'\U0001F3C2\U0001F3FF',
u':persona_che_gioca_a_golf:': u'\U0001F3CC\U0000FE0F',
u':persona_che_gioca_a_golf_carnagione_chiara:': u'\U0001F3CC\U0001F3FB',
u':persona_che_gioca_a_golf_carnagione_abbastanza_chiara:': u'\U0001F3CC\U0001F3FC',
u':persona_che_gioca_a_golf_carnagione_olivastra:': u'\U0001F3CC\U0001F3FD',
u':persona_che_gioca_a_golf_carnagione_abbastanza_scura:': u'\U0001F3CC\U0001F3FE',
u':persona_che_gioca_a_golf_carnagione_scura:': u'\U0001F3CC\U0001F3FF',
u':golfista_uomo:': u'\U0001F3CC\U0000FE0F\U0000200D\U00002642\U0000FE0F',
u':golfista_uomo_carnagione_chiara:': u'\U0001F3CC\U0001F3FB\U0000200D\U00002642\U0000FE0F',
u':golfista_uomo_carnagione_abbastanza_chiara:': u'\U0001F3CC\U0001F3FC\U0000200D\U00002642\U0000FE0F',
u':golfista_uomo_carnagione_olivastra:': u'\U0001F3CC\U0001F3FD\U0000200D\U00002642\U0000FE0F',
u':golfista_uomo_carnagione_abbastanza_scura:': u'\U0001F3CC\U0001F3FE\U0000200D\U00002642\U0000FE0F',
u':golfista_uomo_carnagione_scura:': u'\U0001F3CC\U0001F3FF\U0000200D\U00002642\U0000FE0F',
u':golfista_donna:': u'\U0001F3CC\U0000FE0F\U0000200D\U00002640\U0000FE0F',
u':golfista_donna_carnagione_chiara:': u'\U0001F3CC\U0001F3FB\U0000200D\U00002640\U0000FE0F',
u':golfista_donna_carnagione_abbastanza_chiara:': u'\U0001F3CC\U0001F3FC\U0000200D\U00002640\U0000FE0F',
u':golfista_donna_carnagione_olivastra:': u'\U0001F3CC\U0001F3FD\U0000200D\U00002640\U0000FE0F',
u':golfista_donna_carnagione_abbastanza_scura:': u'\U0001F3CC\U0001F3FE\U0000200D\U00002640\U0000FE0F',
u':golfista_donna_carnagione_scura:': u'\U0001F3CC\U0001F3FF\U0000200D\U00002640\U0000FE0F',
u':persona_che_fa_surf:': u'\U0001F3C4',
u':persona_che_fa_surf_carnagione_chiara:': u'\U0001F3C4\U0001F3FB',
u':persona_che_fa_surf_carnagione_abbastanza_chiara:': u'\U0001F3C4\U0001F3FC',
u':persona_che_fa_surf_carnagione_olivastra:': u'\U0001F3C4\U0001F3FD',
u':persona_che_fa_surf_carnagione_abbastanza_scura:': u'\U0001F3C4\U0001F3FE',
u':persona_che_fa_surf_carnagione_scura:': u'\U0001F3C4\U0001F3FF',
u':surfista_uomo:': u'\U0001F3C4\U0000200D\U00002642\U0000FE0F',
u':surfista_uomo_carnagione_chiara:': u'\U0001F3C4\U0001F3FB\U0000200D\U00002642\U0000FE0F',
u':surfista_uomo_carnagione_abbastanza_chiara:': u'\U0001F3C4\U0001F3FC\U0000200D\U00002642\U0000FE0F',
u':surfista_uomo_carnagione_olivastra:': u'\U0001F3C4\U0001F3FD\U0000200D\U00002642\U0000FE0F',
u':surfista_uomo_carnagione_abbastanza_scura:': u'\U0001F3C4\U0001F3FE\U0000200D\U00002642\U0000FE0F',
u':surfista_uomo_carnagione_scura:': u'\U0001F3C4\U0001F3FF\U0000200D\U00002642\U0000FE0F',
u':surfista_donna:': u'\U0001F3C4\U0000200D\U00002640\U0000FE0F',
u':surfista_donna_carnagione_chiara:': u'\U0001F3C4\U0001F3FB\U0000200D\U00002640\U0000FE0F',
u':surfista_donna_carnagione_abbastanza_chiara:': u'\U0001F3C4\U0001F3FC\U0000200D\U00002640\U0000FE0F',
u':surfista_donna_carnagione_olivastra:': u'\U0001F3C4\U0001F3FD\U0000200D\U00002640\U0000FE0F',
u':surfista_donna_carnagione_abbastanza_scura:': u'\U0001F3C4\U0001F3FE\U0000200D\U00002640\U0000FE0F',
u':surfista_donna_carnagione_scura:': u'\U0001F3C4\U0001F3FF\U0000200D\U00002640\U0000FE0F',
u':persona_in_barca_a_remi:': u'\U0001F6A3',
u':persona_in_barca_a_remi_carnagione_chiara:': u'\U0001F6A3\U0001F3FB',
u':persona_in_barca_a_remi_carnagione_abbastanza_chiara:': u'\U0001F6A3\U0001F3FC',
u':persona_in_barca_a_remi_carnagione_olivastra:': u'\U0001F6A3\U0001F3FD',
u':persona_in_barca_a_remi_carnagione_abbastanza_scura:': u'\U0001F6A3\U0001F3FE',
u':persona_in_barca_a_remi_carnagione_scura:': u'\U0001F6A3\U0001F3FF',
u':uomo_in_barca_a_remi:': u'\U0001F6A3\U0000200D\U00002642\U0000FE0F',
u':uomo_in_barca_a_remi_carnagione_chiara:': u'\U0001F6A3\U0001F3FB\U0000200D\U00002642\U0000FE0F',
u':uomo_in_barca_a_remi_carnagione_abbastanza_chiara:': u'\U0001F6A3\U0001F3FC\U0000200D\U00002642\U0000FE0F',
u':uomo_in_barca_a_remi_carnagione_olivastra:': u'\U0001F6A3\U0001F3FD\U0000200D\U00002642\U0000FE0F',
u':uomo_in_barca_a_remi_carnagione_abbastanza_scura:': u'\U0001F6A3\U0001F3FE\U0000200D\U00002642\U0000FE0F',
u':uomo_in_barca_a_remi_carnagione_scura:': u'\U0001F6A3\U0001F3FF\U0000200D\U00002642\U0000FE0F',
u':donna_in_barca_a_remi:': u'\U0001F6A3\U0000200D\U00002640\U0000FE0F',
u':donna_in_barca_a_remi_carnagione_chiara:': u'\U0001F6A3\U0001F3FB\U0000200D\U00002640\U0000FE0F',
u':donna_in_barca_a_remi_carnagione_abbastanza_chiara:': u'\U0001F6A3\U0001F3FC\U0000200D\U00002640\U0000FE0F',
u':donna_in_barca_a_remi_carnagione_olivastra:': u'\U0001F6A3\U0001F3FD\U0000200D\U00002640\U0000FE0F',
u':donna_in_barca_a_remi_carnagione_abbastanza_scura:': u'\U0001F6A3\U0001F3FE\U0000200D\U00002640\U0000FE0F',
u':donna_in_barca_a_remi_carnagione_scura:': u'\U0001F6A3\U0001F3FF\U0000200D\U00002640\U0000FE0F',
u':persona_che_nuota:': u'\U0001F3CA',
u':persona_che_nuota_carnagione_chiara:': u'\U0001F3CA\U0001F3FB',
u':persona_che_nuota_carnagione_abbastanza_chiara:': u'\U0001F3CA\U0001F3FC',
u':persona_che_nuota_carnagione_olivastra:': u'\U0001F3CA\U0001F3FD',
u':persona_che_nuota_carnagione_abbastanza_scura:': u'\U0001F3CA\U0001F3FE',
u':persona_che_nuota_carnagione_scura:': u'\U0001F3CA\U0001F3FF',
u':nuotatore:': u'\U0001F3CA\U0000200D\U00002642\U0000FE0F',
u':nuotatore_carnagione_chiara:': u'\U0001F3CA\U0001F3FB\U0000200D\U00002642\U0000FE0F',
u':nuotatore_carnagione_abbastanza_chiara:': u'\U0001F3CA\U0001F3FC\U0000200D\U00002642\U0000FE0F',
u':nuotatore_carnagione_olivastra:': u'\U0001F3CA\U0001F3FD\U0000200D\U00002642\U0000FE0F',
u':nuotatore_carnagione_abbastanza_scura:': u'\U0001F3CA\U0001F3FE\U0000200D\U00002642\U0000FE0F',
u':nuotatore_carnagione_scura:': u'\U0001F3CA\U0001F3FF\U0000200D\U00002642\U0000FE0F',
u':nuotatrice:': u'\U0001F3CA\U0000200D\U00002640\U0000FE0F',
u':nuotatrice_carnagione_chiara:': u'\U0001F3CA\U0001F3FB\U0000200D\U00002640\U0000FE0F',
u':nuotatrice_carnagione_abbastanza_chiara:': u'\U0001F3CA\U0001F3FC\U0000200D\U00002640\U0000FE0F',
u':nuotatrice_carnagione_olivastra:': u'\U0001F3CA\U0001F3FD\U0000200D\U00002640\U0000FE0F',
u':nuotatrice_carnagione_abbastanza_scura:': u'\U0001F3CA\U0001F3FE\U0000200D\U00002640\U0000FE0F',
u':nuotatrice_carnagione_scura:': u'\U0001F3CA\U0001F3FF\U0000200D\U00002640\U0000FE0F',
u':persona_che_fa_rimbalzare_una_palla:': u'\U000026F9\U0000FE0F',
u':persona_che_fa_rimbalzare_una_palla_carnagione_chiara:': u'\U000026F9\U0001F3FB',
u':persona_che_fa_rimbalzare_una_palla_carnagione_abbastanza_chiara:': u'\U000026F9\U0001F3FC',
u':persona_che_fa_rimbalzare_una_palla_carnagione_olivastra:': u'\U000026F9\U0001F3FD',
u':persona_che_fa_rimbalzare_una_palla_carnagione_abbastanza_scura:': u'\U000026F9\U0001F3FE',
u':persona_che_fa_rimbalzare_una_palla_carnagione_scura:': u'\U000026F9\U0001F3FF',
u':uomo_che_fa_rimbalzare_una_palla:': u'\U000026F9\U0000FE0F\U0000200D\U00002642\U0000FE0F',
u':uomo_che_fa_rimbalzare_una_palla_carnagione_chiara:': u'\U000026F9\U0001F3FB\U0000200D\U00002642\U0000FE0F',
u':uomo_che_fa_rimbalzare_una_palla_carnagione_abbastanza_chiara:': u'\U000026F9\U0001F3FC\U0000200D\U00002642\U0000FE0F',
u':uomo_che_fa_rimbalzare_una_palla_carnagione_olivastra:': u'\U000026F9\U0001F3FD\U0000200D\U00002642\U0000FE0F',
u':uomo_che_fa_rimbalzare_una_palla_carnagione_abbastanza_scura:': u'\U000026F9\U0001F3FE\U0000200D\U00002642\U0000FE0F',
u':uomo_che_fa_rimbalzare_una_palla_carnagione_scura:': u'\U000026F9\U0001F3FF\U0000200D\U00002642\U0000FE0F',
u':donna_che_fa_rimbalzare_una_palla:': u'\U000026F9\U0000FE0F\U0000200D\U00002640\U0000FE0F',
u':donna_che_fa_rimbalzare_una_palla_carnagione_chiara:': u'\U000026F9\U0001F3FB\U0000200D\U00002640\U0000FE0F',
u':donna_che_fa_rimbalzare_una_palla_carnagione_abbastanza_chiara:': u'\U000026F9\U0001F3FC\U0000200D\U00002640\U0000FE0F',
u':donna_che_fa_rimbalzare_una_palla_carnagione_olivastra:': u'\U000026F9\U0001F3FD\U0000200D\U00002640\U0000FE0F',
u':donna_che_fa_rimbalzare_una_palla_carnagione_abbastanza_scura:': u'\U000026F9\U0001F3FE\U0000200D\U00002640\U0000FE0F',
u':donna_che_fa_rimbalzare_una_palla_carnagione_scura:': u'\U000026F9\U0001F3FF\U0000200D\U00002640\U0000FE0F',
u':persona_che_solleva_pesi:': u'\U0001F3CB\U0000FE0F',
u':persona_che_solleva_pesi_carnagione_chiara:': u'\U0001F3CB\U0001F3FB',
u':persona_che_solleva_pesi_carnagione_abbastanza_chiara:': u'\U0001F3CB\U0001F3FC',
u':persona_che_solleva_pesi_carnagione_olivastra:': u'\U0001F3CB\U0001F3FD',
u':persona_che_solleva_pesi_carnagione_abbastanza_scura:': u'\U0001F3CB\U0001F3FE',
u':persona_che_solleva_pesi_carnagione_scura:': u'\U0001F3CB\U0001F3FF',
u':uomo_che_solleva_pesi:': u'\U0001F3CB\U0000FE0F\U0000200D\U00002642\U0000FE0F',
u':uomo_che_solleva_pesi_carnagione_chiara:': u'\U0001F3CB\U0001F3FB\U0000200D\U00002642\U0000FE0F',
u':uomo_che_solleva_pesi_carnagione_abbastanza_chiara:': u'\U0001F3CB\U0001F3FC\U0000200D\U00002642\U0000FE0F',
u':uomo_che_solleva_pesi_carnagione_olivastra:': u'\U0001F3CB\U0001F3FD\U0000200D\U00002642\U0000FE0F',
u':uomo_che_solleva_pesi_carnagione_abbastanza_scura:': u'\U0001F3CB\U0001F3FE\U0000200D\U00002642\U0000FE0F',
u':uomo_che_solleva_pesi_carnagione_scura:': u'\U0001F3CB\U0001F3FF\U0000200D\U00002642\U0000FE0F',
u':donna_che_solleva_pesi:': u'\U0001F3CB\U0000FE0F\U0000200D\U00002640\U0000FE0F',
u':donna_che_solleva_pesi_carnagione_chiara:': u'\U0001F3CB\U0001F3FB\U0000200D\U00002640\U0000FE0F',
u':donna_che_solleva_pesi_carnagione_abbastanza_chiara:': u'\U0001F3CB\U0001F3FC\U0000200D\U00002640\U0000FE0F',
u':donna_che_solleva_pesi_carnagione_olivastra:': u'\U0001F3CB\U0001F3FD\U0000200D\U00002640\U0000FE0F',
u':donna_che_solleva_pesi_carnagione_abbastanza_scura:': u'\U0001F3CB\U0001F3FE\U0000200D\U00002640\U0000FE0F',
u':donna_che_solleva_pesi_carnagione_scura:': u'\U0001F3CB\U0001F3FF\U0000200D\U00002640\U0000FE0F',
u':ciclista:': u'\U0001F6B4',
u':ciclista_carnagione_chiara:': u'\U0001F6B4\U0001F3FB',
u':ciclista_carnagione_abbastanza_chiara:': u'\U0001F6B4\U0001F3FC',
u':ciclista_carnagione_olivastra:': u'\U0001F6B4\U0001F3FD',
u':ciclista_carnagione_abbastanza_scura:': u'\U0001F6B4\U0001F3FE',
u':ciclista_carnagione_scura:': u'\U0001F6B4\U0001F3FF',
u':ciclista_uomo:': u'\U0001F6B4\U0000200D\U00002642\U0000FE0F',
u':ciclista_uomo_carnagione_chiara:': u'\U0001F6B4\U0001F3FB\U0000200D\U00002642\U0000FE0F',
u':ciclista_uomo_carnagione_abbastanza_chiara:': u'\U0001F6B4\U0001F3FC\U0000200D\U00002642\U0000FE0F',
u':ciclista_uomo_carnagione_olivastra:': u'\U0001F6B4\U0001F3FD\U0000200D\U00002642\U0000FE0F',
u':ciclista_uomo_carnagione_abbastanza_scura:': u'\U0001F6B4\U0001F3FE\U0000200D\U00002642\U0000FE0F',
u':ciclista_uomo_carnagione_scura:': u'\U0001F6B4\U0001F3FF\U0000200D\U00002642\U0000FE0F',
u':ciclista_donna:': u'\U0001F6B4\U0000200D\U00002640\U0000FE0F',
u':ciclista_donna_carnagione_chiara:': u'\U0001F6B4\U0001F3FB\U0000200D\U00002640\U0000FE0F',
u':ciclista_donna_carnagione_abbastanza_chiara:': u'\U0001F6B4\U0001F3FC\U0000200D\U00002640\U0000FE0F',
u':ciclista_donna_carnagione_olivastra:': u'\U0001F6B4\U0001F3FD\U0000200D\U00002640\U0000FE0F',
u':ciclista_donna_carnagione_abbastanza_scura:': u'\U0001F6B4\U0001F3FE\U0000200D\U00002640\U0000FE0F',
u':ciclista_donna_carnagione_scura:': u'\U0001F6B4\U0001F3FF\U0000200D\U00002640\U0000FE0F',
u':ciclista_di_mountain_bike:': u'\U0001F6B5',
u':ciclista_di_mountain_bike_carnagione_chiara:': u'\U0001F6B5\U0001F3FB',
u':ciclista_di_mountain_bike_carnagione_abbastanza_chiara:': u'\U0001F6B5\U0001F3FC',
u':ciclista_di_mountain_bike_carnagione_olivastra:': u'\U0001F6B5\U0001F3FD',
u':ciclista_di_mountain_bike_carnagione_abbastanza_scura:': u'\U0001F6B5\U0001F3FE',
u':ciclista_di_mountain_bike_carnagione_scura:': u'\U0001F6B5\U0001F3FF',
u':ciclista_uomo_di_mountain_bike:': u'\U0001F6B5\U0000200D\U00002642\U0000FE0F',
u':ciclista_uomo_di_mountain_bike_carnagione_chiara:': u'\U0001F6B5\U0001F3FB\U0000200D\U00002642\U0000FE0F',
u':ciclista_uomo_di_mountain_bike_carnagione_abbastanza_chiara:': u'\U0001F6B5\U0001F3FC\U0000200D\U00002642\U0000FE0F',
u':ciclista_uomo_di_mountain_bike_carnagione_olivastra:': u'\U0001F6B5\U0001F3FD\U0000200D\U00002642\U0000FE0F',
u':ciclista_uomo_di_mountain_bike_carnagione_abbastanza_scura:': u'\U0001F6B5\U0001F3FE\U0000200D\U00002642\U0000FE0F',
u':ciclista_uomo_di_mountain_bike_carnagione_scura:': u'\U0001F6B5\U0001F3FF\U0000200D\U00002642\U0000FE0F',
u':ciclista_donna_di_mountain_bike:': u'\U0001F6B5\U0000200D\U00002640\U0000FE0F',
u':ciclista_donna_di_mountain_bike_carnagione_chiara:': u'\U0001F6B5\U0001F3FB\U0000200D\U00002640\U0000FE0F',
u':ciclista_donna_di_mountain_bike_carnagione_abbastanza_chiara:': u'\U0001F6B5\U0001F3FC\U0000200D\U00002640\U0000FE0F',
u':ciclista_donna_di_mountain_bike_carnagione_olivastra:': u'\U0001F6B5\U0001F3FD\U0000200D\U00002640\U0000FE0F',
u':ciclista_donna_di_mountain_bike_carnagione_abbastanza_scura:': u'\U0001F6B5\U0001F3FE\U0000200D\U00002640\U0000FE0F',
u':ciclista_donna_di_mountain_bike_carnagione_scura:': u'\U0001F6B5\U0001F3FF\U0000200D\U00002640\U0000FE0F',
u':persona_che_fa_la_ruota:': u'\U0001F938',
u':persona_che_fa_la_ruota_carnagione_chiara:': u'\U0001F938\U0001F3FB',
u':persona_che_fa_la_ruota_carnagione_abbastanza_chiara:': u'\U0001F938\U0001F3FC',
u':persona_che_fa_la_ruota_carnagione_olivastra:': u'\U0001F938\U0001F3FD',
u':persona_che_fa_la_ruota_carnagione_abbastanza_scura:': u'\U0001F938\U0001F3FE',
u':persona_che_fa_la_ruota_carnagione_scura:': u'\U0001F938\U0001F3FF',
u':uomo_che_fa_la_ruota:': u'\U0001F938\U0000200D\U00002642\U0000FE0F',
u':uomo_che_fa_la_ruota_carnagione_chiara:': u'\U0001F938\U0001F3FB\U0000200D\U00002642\U0000FE0F',
u':uomo_che_fa_la_ruota_carnagione_abbastanza_chiara:': u'\U0001F938\U0001F3FC\U0000200D\U00002642\U0000FE0F',
u':uomo_che_fa_la_ruota_carnagione_olivastra:': u'\U0001F938\U0001F3FD\U0000200D\U00002642\U0000FE0F',
u':uomo_che_fa_la_ruota_carnagione_abbastanza_scura:': u'\U0001F938\U0001F3FE\U0000200D\U00002642\U0000FE0F',
u':uomo_che_fa_la_ruota_carnagione_scura:': u'\U0001F938\U0001F3FF\U0000200D\U00002642\U0000FE0F',
u':donna_che_fa_la_ruota:': u'\U0001F938\U0000200D\U00002640\U0000FE0F',
u':donna_che_fa_la_ruota_carnagione_chiara:': u'\U0001F938\U0001F3FB\U0000200D\U00002640\U0000FE0F',
u':donna_che_fa_la_ruota_carnagione_abbastanza_chiara:': u'\U0001F938\U0001F3FC\U0000200D\U00002640\U0000FE0F',
u':donna_che_fa_la_ruota_carnagione_olivastra:': u'\U0001F938\U0001F3FD\U0000200D\U00002640\U0000FE0F',
u':donna_che_fa_la_ruota_carnagione_abbastanza_scura:': u'\U0001F938\U0001F3FE\U0000200D\U00002640\U0000FE0F',
u':donna_che_fa_la_ruota_carnagione_scura:': u'\U0001F938\U0001F3FF\U0000200D\U00002640\U0000FE0F',
u':persone_che_fanno_la_lotta:': u'\U0001F93C',
u':lottatori:': u'\U0001F93C\U0000200D\U00002642\U0000FE0F',
u':lottatrici:': u'\U0001F93C\U0000200D\U00002640\U0000FE0F',
u':persona_che_gioca_a_pallanuoto:': u'\U0001F93D',
u':persona_che_gioca_a_pallanuoto_carnagione_chiara:': u'\U0001F93D\U0001F3FB',
u':persona_che_gioca_a_pallanuoto_carnagione_abbastanza_chiara:': u'\U0001F93D\U0001F3FC',
u':persona_che_gioca_a_pallanuoto_carnagione_olivastra:': u'\U0001F93D\U0001F3FD',
u':persona_che_gioca_a_pallanuoto_carnagione_abbastanza_scura:': u'\U0001F93D\U0001F3FE',
u':persona_che_gioca_a_pallanuoto_carnagione_scura:': u'\U0001F93D\U0001F3FF',
u':pallanuotista_uomo:': u'\U0001F93D\U0000200D\U00002642\U0000FE0F',
u':pallanuotista_uomo_carnagione_chiara:': u'\U0001F93D\U0001F3FB\U0000200D\U00002642\U0000FE0F',
u':pallanuotista_uomo_carnagione_abbastanza_chiara:': u'\U0001F93D\U0001F3FC\U0000200D\U00002642\U0000FE0F',
u':pallanuotista_uomo_carnagione_olivastra:': u'\U0001F93D\U0001F3FD\U0000200D\U00002642\U0000FE0F',
u':pallanuotista_uomo_carnagione_abbastanza_scura:': u'\U0001F93D\U0001F3FE\U0000200D\U00002642\U0000FE0F',
u':pallanuotista_uomo_carnagione_scura:': u'\U0001F93D\U0001F3FF\U0000200D\U00002642\U0000FE0F',
u':pallanuotista_donna:': u'\U0001F93D\U0000200D\U00002640\U0000FE0F',
u':pallanuotista_donna_carnagione_chiara:': u'\U0001F93D\U0001F3FB\U0000200D\U00002640\U0000FE0F',
u':pallanuotista_donna_carnagione_abbastanza_chiara:': u'\U0001F93D\U0001F3FC\U0000200D\U00002640\U0000FE0F',
u':pallanuotista_donna_carnagione_olivastra:': u'\U0001F93D\U0001F3FD\U0000200D\U00002640\U0000FE0F',
u':pallanuotista_donna_carnagione_abbastanza_scura:': u'\U0001F93D\U0001F3FE\U0000200D\U00002640\U0000FE0F',
u':pallanuotista_donna_carnagione_scura:': u'\U0001F93D\U0001F3FF\U0000200D\U00002640\U0000FE0F',
u':persona_che_gioca_a_pallamano:': u'\U0001F93E',
u':persona_che_gioca_a_pallamano_carnagione_chiara:': u'\U0001F93E\U0001F3FB',
u':persona_che_gioca_a_pallamano_carnagione_abbastanza_chiara:': u'\U0001F93E\U0001F3FC',
u':persona_che_gioca_a_pallamano_carnagione_olivastra:': u'\U0001F93E\U0001F3FD',
u':persona_che_gioca_a_pallamano_carnagione_abbastanza_scura:': u'\U0001F93E\U0001F3FE',
u':persona_che_gioca_a_pallamano_carnagione_scura:': u'\U0001F93E\U0001F3FF',
u':pallamanista_uomo:': u'\U0001F93E\U0000200D\U00002642\U0000FE0F',
u':pallamanista_uomo_carnagione_chiara:': u'\U0001F93E\U0001F3FB\U0000200D\U00002642\U0000FE0F',
u':pallamanista_uomo_carnagione_abbastanza_chiara:': u'\U0001F93E\U0001F3FC\U0000200D\U00002642\U0000FE0F',
u':pallamanista_uomo_carnagione_olivastra:': u'\U0001F93E\U0001F3FD\U0000200D\U00002642\U0000FE0F',
u':pallamanista_uomo_carnagione_abbastanza_scura:': u'\U0001F93E\U0001F3FE\U0000200D\U00002642\U0000FE0F',
u':pallamanista_uomo_carnagione_scura:': u'\U0001F93E\U0001F3FF\U0000200D\U00002642\U0000FE0F',
u':pallamanista_donna:': u'\U0001F93E\U0000200D\U00002640\U0000FE0F',
u':pallamanista_donna_carnagione_chiara:': u'\U0001F93E\U0001F3FB\U0000200D\U00002640\U0000FE0F',
u':pallamanista_donna_carnagione_abbastanza_chiara:': u'\U0001F93E\U0001F3FC\U0000200D\U00002640\U0000FE0F',
u':pallamanista_donna_carnagione_olivastra:': u'\U0001F93E\U0001F3FD\U0000200D\U00002640\U0000FE0F',
u':pallamanista_donna_carnagione_abbastanza_scura:': u'\U0001F93E\U0001F3FE\U0000200D\U00002640\U0000FE0F',
u':pallamanista_donna_carnagione_scura:': u'\U0001F93E\U0001F3FF\U0000200D\U00002640\U0000FE0F',
u':persona_che_fa_giocoleria:': u'\U0001F939',
u':persona_che_fa_giocoleria_carnagione_chiara:': u'\U0001F939\U0001F3FB',
u':persona_che_fa_giocoleria_carnagione_abbastanza_chiara:': u'\U0001F939\U0001F3FC',
u':persona_che_fa_giocoleria_carnagione_olivastra:': u'\U0001F939\U0001F3FD',
u':persona_che_fa_giocoleria_carnagione_abbastanza_scura:': u'\U0001F939\U0001F3FE',
u':persona_che_fa_giocoleria_carnagione_scura:': u'\U0001F939\U0001F3FF',
u':giocoliere_uomo:': u'\U0001F939\U0000200D\U00002642\U0000FE0F',
u':giocoliere_uomo_carnagione_chiara:': u'\U0001F939\U0001F3FB\U0000200D\U00002642\U0000FE0F',
u':giocoliere_uomo_carnagione_abbastanza_chiara:': u'\U0001F939\U0001F3FC\U0000200D\U00002642\U0000FE0F',
u':giocoliere_uomo_carnagione_olivastra:': u'\U0001F939\U0001F3FD\U0000200D\U00002642\U0000FE0F',
u':giocoliere_uomo_carnagione_abbastanza_scura:': u'\U0001F939\U0001F3FE\U0000200D\U00002642\U0000FE0F',
u':giocoliere_uomo_carnagione_scura:': u'\U0001F939\U0001F3FF\U0000200D\U00002642\U0000FE0F',
u':giocoliere_donna:': u'\U0001F939\U0000200D\U00002640\U0000FE0F',
u':giocoliere_donna_carnagione_chiara:': u'\U0001F939\U0001F3FB\U0000200D\U00002640\U0000FE0F',
u':giocoliere_donna_carnagione_abbastanza_chiara:': u'\U0001F939\U0001F3FC\U0000200D\U00002640\U0000FE0F',
u':giocoliere_donna_carnagione_olivastra:': u'\U0001F939\U0001F3FD\U0000200D\U00002640\U0000FE0F',
u':giocoliere_donna_carnagione_abbastanza_scura:': u'\U0001F939\U0001F3FE\U0000200D\U00002640\U0000FE0F',
u':giocoliere_donna_carnagione_scura:': u'\U0001F939\U0001F3FF\U0000200D\U00002640\U0000FE0F',
u':persona_in_posizione_del_loto:': u'\U0001F9D8',
u':persona_in_posizione_del_loto_carnagione_chiara:': u'\U0001F9D8\U0001F3FB',
u':persona_in_posizione_del_loto_carnagione_abbastanza_chiara:': u'\U0001F9D8\U0001F3FC',
u':persona_in_posizione_del_loto_carnagione_olivastra:': u'\U0001F9D8\U0001F3FD',
u':persona_in_posizione_del_loto_carnagione_abbastanza_scura:': u'\U0001F9D8\U0001F3FE',
u':persona_in_posizione_del_loto_carnagione_scura:': u'\U0001F9D8\U0001F3FF',
u':uomo_in_posizione_del_loto:': u'\U0001F9D8\U0000200D\U00002642\U0000FE0F',
u':uomo_in_posizione_del_loto_carnagione_chiara:': u'\U0001F9D8\U0001F3FB\U0000200D\U00002642\U0000FE0F',
u':uomo_in_posizione_del_loto_carnagione_abbastanza_chiara:': u'\U0001F9D8\U0001F3FC\U0000200D\U00002642\U0000FE0F',
u':uomo_in_posizione_del_loto_carnagione_olivastra:': u'\U0001F9D8\U0001F3FD\U0000200D\U00002642\U0000FE0F',
u':uomo_in_posizione_del_loto_carnagione_abbastanza_scura:': u'\U0001F9D8\U0001F3FE\U0000200D\U00002642\U0000FE0F',
u':uomo_in_posizione_del_loto_carnagione_scura:': u'\U0001F9D8\U0001F3FF\U0000200D\U00002642\U0000FE0F',
u':donna_in_posizione_del_loto:': u'\U0001F9D8\U0000200D\U00002640\U0000FE0F',
u':donna_in_posizione_del_loto_carnagione_chiara:': u'\U0001F9D8\U0001F3FB\U0000200D\U00002640\U0000FE0F',
u':donna_in_posizione_del_loto_carnagione_abbastanza_chiara:': u'\U0001F9D8\U0001F3FC\U0000200D\U00002640\U0000FE0F',
u':donna_in_posizione_del_loto_carnagione_olivastra:': u'\U0001F9D8\U0001F3FD\U0000200D\U00002640\U0000FE0F',
u':donna_in_posizione_del_loto_carnagione_abbastanza_scura:': u'\U0001F9D8\U0001F3FE\U0000200D\U00002640\U0000FE0F',
u':donna_in_posizione_del_loto_carnagione_scura:': u'\U0001F9D8\U0001F3FF\U0000200D\U00002640\U0000FE0F',
u':persona_che_fa_il_bagno:': u'\U0001F6C0',
u':persona_che_fa_il_bagno_carnagione_chiara:': u'\U0001F6C0\U0001F3FB',
u':persona_che_fa_il_bagno_carnagione_abbastanza_chiara:': u'\U0001F6C0\U0001F3FC',
u':persona_che_fa_il_bagno_carnagione_olivastra:': u'\U0001F6C0\U0001F3FD',
u':persona_che_fa_il_bagno_carnagione_abbastanza_scura:': u'\U0001F6C0\U0001F3FE',
u':persona_che_fa_il_bagno_carnagione_scura:': u'\U0001F6C0\U0001F3FF',
u':persona_a_letto:': u'\U0001F6CC',
u':persona_a_letto_carnagione_chiara:': u'\U0001F6CC\U0001F3FB',
u':persona_a_letto_carnagione_abbastanza_chiara:': u'\U0001F6CC\U0001F3FC',
u':persona_a_letto_carnagione_olivastra:': u'\U0001F6CC\U0001F3FD',
u':persona_a_letto_carnagione_abbastanza_scura:': u'\U0001F6CC\U0001F3FE',
u':persona_a_letto_carnagione_scura:': u'\U0001F6CC\U0001F3FF',
u':persone_che_si_tengono_per_mano:': u'\U0001F9D1\U0000200D\U0001F91D\U0000200D\U0001F9D1',
u':persone_che_si_tengono_per_mano_carnagione_chiara:': u'\U0001F9D1\U0001F3FB\U0000200D\U0001F91D\U0000200D\U0001F9D1\U0001F3FB',
u':persone_che_si_tengono_per_mano_carnagione_chiara_e_carnagione_abbastanza_chiara:': u'\U0001F9D1\U0001F3FB\U0000200D\U0001F91D\U0000200D\U0001F9D1\U0001F3FC',
u':persone_che_si_tengono_per_mano_carnagione_chiara_e_carnagione_olivastra:': u'\U0001F9D1\U0001F3FB\U0000200D\U0001F91D\U0000200D\U0001F9D1\U0001F3FD',
u':persone_che_si_tengono_per_mano_carnagione_chiara_e_carnagione_abbastanza_scura:': u'\U0001F9D1\U0001F3FB\U0000200D\U0001F91D\U0000200D\U0001F9D1\U0001F3FE',
u':persone_che_si_tengono_per_mano_carnagione_chiara_e_carnagione_scura:': u'\U0001F9D1\U0001F3FB\U0000200D\U0001F91D\U0000200D\U0001F9D1\U0001F3FF',
u':persone_che_si_tengono_per_mano_carnagione_abbastanza_chiara_e_carnagione_chiara:': u'\U0001F9D1\U0001F3FC\U0000200D\U0001F91D\U0000200D\U0001F9D1\U0001F3FB',
u':persone_che_si_tengono_per_mano_carnagione_abbastanza_chiara:': u'\U0001F9D1\U0001F3FC\U0000200D\U0001F91D\U0000200D\U0001F9D1\U0001F3FC',
u':persone_che_si_tengono_per_mano_carnagione_abbastanza_chiara_e_carnagione_olivastra:': u'\U0001F9D1\U0001F3FC\U0000200D\U0001F91D\U0000200D\U0001F9D1\U0001F3FD',
u':persone_che_si_tengono_per_mano_carnagione_abbastanza_chiara_e_carnagione_abbastanza_scura:': u'\U0001F9D1\U0001F3FC\U0000200D\U0001F91D\U0000200D\U0001F9D1\U0001F3FE',
u':persone_che_si_tengono_per_mano_carnagione_abbastanza_chiara_e_carnagione_scura:': u'\U0001F9D1\U0001F3FC\U0000200D\U0001F91D\U0000200D\U0001F9D1\U0001F3FF',
u':persone_che_si_tengono_per_mano_carnagione_olivastra_e_carnagione_chiara:': u'\U0001F9D1\U0001F3FD\U0000200D\U0001F91D\U0000200D\U0001F9D1\U0001F3FB',
u':persone_che_si_tengono_per_mano_carnagione_olivastra_e_carnagione_abbastanza_chiara:': u'\U0001F9D1\U0001F3FD\U0000200D\U0001F91D\U0000200D\U0001F9D1\U0001F3FC',
u':persone_che_si_tengono_per_mano_carnagione_olivastra:': u'\U0001F9D1\U0001F3FD\U0000200D\U0001F91D\U0000200D\U0001F9D1\U0001F3FD',
u':persone_che_si_tengono_per_mano_carnagione_olivastra_e_carnagione_abbastanza_scura:': u'\U0001F9D1\U0001F3FD\U0000200D\U0001F91D\U0000200D\U0001F9D1\U0001F3FE',
u':persone_che_si_tengono_per_mano_carnagione_olivastra_e_carnagione_scura:': u'\U0001F9D1\U0001F3FD\U0000200D\U0001F91D\U0000200D\U0001F9D1\U0001F3FF',
u':persone_che_si_tengono_per_mano_carnagione_abbastanza_scura_e_carnagione_chiara:': u'\U0001F9D1\U0001F3FE\U0000200D\U0001F91D\U0000200D\U0001F9D1\U0001F3FB',
u':persone_che_si_tengono_per_mano_carnagione_abbastanza_scura_e_carnagione_abbastanza_chiara:': u'\U0001F9D1\U0001F3FE\U0000200D\U0001F91D\U0000200D\U0001F9D1\U0001F3FC',
u':persone_che_si_tengono_per_mano_carnagione_abbastanza_scura_e_carnagione_olivastra:': u'\U0001F9D1\U0001F3FE\U0000200D\U0001F91D\U0000200D\U0001F9D1\U0001F3FD',
u':persone_che_si_tengono_per_mano_carnagione_abbastanza_scura:': u'\U0001F9D1\U0001F3FE\U0000200D\U0001F91D\U0000200D\U0001F9D1\U0001F3FE',
u':persone_che_si_tengono_per_mano_carnagione_abbastanza_scura_e_carnagione_scura:': u'\U0001F9D1\U0001F3FE\U0000200D\U0001F91D\U0000200D\U0001F9D1\U0001F3FF',
u':persone_che_si_tengono_per_mano_carnagione_scura_e_carnagione_chiara:': u'\U0001F9D1\U0001F3FF\U0000200D\U0001F91D\U0000200D\U0001F9D1\U0001F3FB',
u':persone_che_si_tengono_per_mano_carnagione_scura_e_carnagione_abbastanza_chiara:': u'\U0001F9D1\U0001F3FF\U0000200D\U0001F91D\U0000200D\U0001F9D1\U0001F3FC',
u':persone_che_si_tengono_per_mano_carnagione_scura_e_carnagione_olivastra:': u'\U0001F9D1\U0001F3FF\U0000200D\U0001F91D\U0000200D\U0001F9D1\U0001F3FD',
u':persone_che_si_tengono_per_mano_carnagione_scura_e_carnagione_abbastanza_scura:': u'\U0001F9D1\U0001F3FF\U0000200D\U0001F91D\U0000200D\U0001F9D1\U0001F3FE',
u':persone_che_si_tengono_per_mano_carnagione_scura:': u'\U0001F9D1\U0001F3FF\U0000200D\U0001F91D\U0000200D\U0001F9D1\U0001F3FF',
u':due_donne_che_si_tengono_per_mano:': u'\U0001F46D',
u':due_donne_che_si_tengono_per_mano_carnagione_chiara:': u'\U0001F46D\U0001F3FB',
u':due_donne_che_si_tengono_per_mano_carnagione_chiara_e_carnagione_abbastanza_chiara:': u'\U0001F469\U0001F3FB\U0000200D\U0001F91D\U0000200D\U0001F469\U0001F3FC',
u':due_donne_che_si_tengono_per_mano_carnagione_chiara_e_carnagione_olivastra:': u'\U0001F469\U0001F3FB\U0000200D\U0001F91D\U0000200D\U0001F469\U0001F3FD',
u':due_donne_che_si_tengono_per_mano_carnagione_chiara_e_carnagione_abbastanza_scura:': u'\U0001F469\U0001F3FB\U0000200D\U0001F91D\U0000200D\U0001F469\U0001F3FE',
u':due_donne_che_si_tengono_per_mano_carnagione_chiara_e_carnagione_scura:': u'\U0001F469\U0001F3FB\U0000200D\U0001F91D\U0000200D\U0001F469\U0001F3FF',
u':due_donne_che_si_tengono_per_mano_carnagione_abbastanza_chiara_e_carnagione_chiara:': u'\U0001F469\U0001F3FC\U0000200D\U0001F91D\U0000200D\U0001F469\U0001F3FB',
u':due_donne_che_si_tengono_per_mano_carnagione_abbastanza_chiara:': u'\U0001F46D\U0001F3FC',
u':due_donne_che_si_tengono_per_mano_carnagione_abbastanza_chiara_e_carnagione_olivastra:': u'\U0001F469\U0001F3FC\U0000200D\U0001F91D\U0000200D\U0001F469\U0001F3FD',
u':due_donne_che_si_tengono_per_mano_carnagione_abbastanza_chiara_e_carnagione_abbastanza_scura:': u'\U0001F469\U0001F3FC\U0000200D\U0001F91D\U0000200D\U0001F469\U0001F3FE',
u':due_donne_che_si_tengono_per_mano_carnagione_abbastanza_chiara_e_carnagione_scura:': u'\U0001F469\U0001F3FC\U0000200D\U0001F91D\U0000200D\U0001F469\U0001F3FF',
u':due_donne_che_si_tengono_per_mano_carnagione_olivastra_e_carnagione_chiara:': u'\U0001F469\U0001F3FD\U0000200D\U0001F91D\U0000200D\U0001F469\U0001F3FB',
u':due_donne_che_si_tengono_per_mano_carnagione_olivastra_e_carnagione_abbastanza_chiara:': u'\U0001F469\U0001F3FD\U0000200D\U0001F91D\U0000200D\U0001F469\U0001F3FC',
u':due_donne_che_si_tengono_per_mano_carnagione_olivastra:': u'\U0001F46D\U0001F3FD',
u':due_donne_che_si_tengono_per_mano_carnagione_olivastra_e_carnagione_abbastanza_scura:': u'\U0001F469\U0001F3FD\U0000200D\U0001F91D\U0000200D\U0001F469\U0001F3FE',
u':due_donne_che_si_tengono_per_mano_carnagione_olivastra_e_carnagione_scura:': u'\U0001F469\U0001F3FD\U0000200D\U0001F91D\U0000200D\U0001F469\U0001F3FF',
u':due_donne_che_si_tengono_per_mano_carnagione_abbastanza_scura_e_carnagione_chiara:': u'\U0001F469\U0001F3FE\U0000200D\U0001F91D\U0000200D\U0001F469\U0001F3FB',
u':due_donne_che_si_tengono_per_mano_carnagione_abbastanza_scura_e_carnagione_abbastanza_chiara:': u'\U0001F469\U0001F3FE\U0000200D\U0001F91D\U0000200D\U0001F469\U0001F3FC',
u':due_donne_che_si_tengono_per_mano_carnagione_abbastanza_scura_e_carnagione_olivastra:': u'\U0001F469\U0001F3FE\U0000200D\U0001F91D\U0000200D\U0001F469\U0001F3FD',
u':due_donne_che_si_tengono_per_mano_carnagione_abbastanza_scura:': u'\U0001F46D\U0001F3FE',
u':due_donne_che_si_tengono_per_mano_carnagione_abbastanza_scura_e_carnagione_scura:': u'\U0001F469\U0001F3FE\U0000200D\U0001F91D\U0000200D\U0001F469\U0001F3FF',
u':due_donne_che_si_tengono_per_mano_carnagione_scura_e_carnagione_chiara:': u'\U0001F469\U0001F3FF\U0000200D\U0001F91D\U0000200D\U0001F469\U0001F3FB',
u':due_donne_che_si_tengono_per_mano_carnagione_scura_e_carnagione_abbastanza_chiara:': u'\U0001F469\U0001F3FF\U0000200D\U0001F91D\U0000200D\U0001F469\U0001F3FC',
u':due_donne_che_si_tengono_per_mano_carnagione_scura_e_carnagione_olivastra:': u'\U0001F469\U0001F3FF\U0000200D\U0001F91D\U0000200D\U0001F469\U0001F3FD',
u':due_donne_che_si_tengono_per_mano_carnagione_scura_e_carnagione_abbastanza_scura:': u'\U0001F469\U0001F3FF\U0000200D\U0001F91D\U0000200D\U0001F469\U0001F3FE',
u':due_donne_che_si_tengono_per_mano_carnagione_scura:': u'\U0001F46D\U0001F3FF',
u':uomo_e_donna_che_si_tengono_per_mano:': u'\U0001F46B',
u':uomo_e_donna_che_si_tengono_per_mano_carnagione_chiara:': u'\U0001F46B\U0001F3FB',
u':uomo_e_donna_che_si_tengono_per_mano_carnagione_chiara_e_carnagione_abbastanza_chiara:': u'\U0001F469\U0001F3FB\U0000200D\U0001F91D\U0000200D\U0001F468\U0001F3FC',
u':uomo_e_donna_che_si_tengono_per_mano_carnagione_chiara_e_carnagione_olivastra:': u'\U0001F469\U0001F3FB\U0000200D\U0001F91D\U0000200D\U0001F468\U0001F3FD',
u':uomo_e_donna_che_si_tengono_per_mano_carnagione_chiara_e_carnagione_abbastanza_scura:': u'\U0001F469\U0001F3FB\U0000200D\U0001F91D\U0000200D\U0001F468\U0001F3FE',
u':uomo_e_donna_che_si_tengono_per_mano_carnagione_chiara_e_carnagione_scura:': u'\U0001F469\U0001F3FB\U0000200D\U0001F91D\U0000200D\U0001F468\U0001F3FF',
u':uomo_e_donna_che_si_tengono_per_mano_carnagione_abbastanza_chiara_e_carnagione_chiara:': u'\U0001F469\U0001F3FC\U0000200D\U0001F91D\U0000200D\U0001F468\U0001F3FB',
u':uomo_e_donna_che_si_tengono_per_mano_carnagione_abbastanza_chiara:': u'\U0001F46B\U0001F3FC',
u':uomo_e_donna_che_si_tengono_per_mano_carnagione_abbastanza_chiara_e_carnagione_olivastra:': u'\U0001F469\U0001F3FC\U0000200D\U0001F91D\U0000200D\U0001F468\U0001F3FD',
u':uomo_e_donna_che_si_tengono_per_mano_carnagione_abbastanza_chiara_e_carnagione_abbastanza_scura:': u'\U0001F469\U0001F3FC\U0000200D\U0001F91D\U0000200D\U0001F468\U0001F3FE',
u':uomo_e_donna_che_si_tengono_per_mano_carnagione_abbastanza_chiara_e_carnagione_scura:': u'\U0001F469\U0001F3FC\U0000200D\U0001F91D\U0000200D\U0001F468\U0001F3FF',
u':uomo_e_donna_che_si_tengono_per_mano_carnagione_olivastra_e_carnagione_chiara:': u'\U0001F469\U0001F3FD\U0000200D\U0001F91D\U0000200D\U0001F468\U0001F3FB',
u':uomo_e_donna_che_si_tengono_per_mano_carnagione_olivastra_e_carnagione_abbastanza_chiara:': u'\U0001F469\U0001F3FD\U0000200D\U0001F91D\U0000200D\U0001F468\U0001F3FC',
u':uomo_e_donna_che_si_tengono_per_mano_carnagione_olivastra:': u'\U0001F46B\U0001F3FD',
u':uomo_e_donna_che_si_tengono_per_mano_carnagione_olivastra_e_carnagione_abbastanza_scura:': u'\U0001F469\U0001F3FD\U0000200D\U0001F91D\U0000200D\U0001F468\U0001F3FE',
u':uomo_e_donna_che_si_tengono_per_mano_carnagione_olivastra_e_carnagione_scura:': u'\U0001F469\U0001F3FD\U0000200D\U0001F91D\U0000200D\U0001F468\U0001F3FF',
u':uomo_e_donna_che_si_tengono_per_mano_carnagione_abbastanza_scura_e_carnagione_chiara:': u'\U0001F469\U0001F3FE\U0000200D\U0001F91D\U0000200D\U0001F468\U0001F3FB',
u':uomo_e_donna_che_si_tengono_per_mano_carnagione_abbastanza_scura_e_carnagione_abbastanza_chiara:': u'\U0001F469\U0001F3FE\U0000200D\U0001F91D\U0000200D\U0001F468\U0001F3FC',
u':uomo_e_donna_che_si_tengono_per_mano_carnagione_abbastanza_scura_e_carnagione_olivastra:': u'\U0001F469\U0001F3FE\U0000200D\U0001F91D\U0000200D\U0001F468\U0001F3FD',
u':uomo_e_donna_che_si_tengono_per_mano_carnagione_abbastanza_scura:': u'\U0001F46B\U0001F3FE',
u':uomo_e_donna_che_si_tengono_per_mano_carnagione_abbastanza_scura_e_carnagione_scura:': u'\U0001F469\U0001F3FE\U0000200D\U0001F91D\U0000200D\U0001F468\U0001F3FF',
u':uomo_e_donna_che_si_tengono_per_mano_carnagione_scura_e_carnagione_chiara:': u'\U0001F469\U0001F3FF\U0000200D\U0001F91D\U0000200D\U0001F468\U0001F3FB',
u':uomo_e_donna_che_si_tengono_per_mano_carnagione_scura_e_carnagione_abbastanza_chiara:': u'\U0001F469\U0001F3FF\U0000200D\U0001F91D\U0000200D\U0001F468\U0001F3FC',
u':uomo_e_donna_che_si_tengono_per_mano_carnagione_scura_e_carnagione_olivastra:': u'\U0001F469\U0001F3FF\U0000200D\U0001F91D\U0000200D\U0001F468\U0001F3FD',
u':uomo_e_donna_che_si_tengono_per_mano_carnagione_scura_e_carnagione_abbastanza_scura:': u'\U0001F469\U0001F3FF\U0000200D\U0001F91D\U0000200D\U0001F468\U0001F3FE',
u':uomo_e_donna_che_si_tengono_per_mano_carnagione_scura:': u'\U0001F46B\U0001F3FF',
u':due_uomini_che_si_tengono_per_mano:': u'\U0001F46C',
u':due_uomini_che_si_tengono_per_mano_carnagione_chiara:': u'\U0001F46C\U0001F3FB',
u':due_uomini_che_si_tengono_per_mano_carnagione_chiara_e_carnagione_abbastanza_chiara:': u'\U0001F468\U0001F3FB\U0000200D\U0001F91D\U0000200D\U0001F468\U0001F3FC',
u':due_uomini_che_si_tengono_per_mano_carnagione_chiara_e_carnagione_olivastra:': u'\U0001F468\U0001F3FB\U0000200D\U0001F91D\U0000200D\U0001F468\U0001F3FD',
u':due_uomini_che_si_tengono_per_mano_carnagione_chiara_e_carnagione_abbastanza_scura:': u'\U0001F468\U0001F3FB\U0000200D\U0001F91D\U0000200D\U0001F468\U0001F3FE',
u':due_uomini_che_si_tengono_per_mano_carnagione_chiara_e_carnagione_scura:': u'\U0001F468\U0001F3FB\U0000200D\U0001F91D\U0000200D\U0001F468\U0001F3FF',
u':due_uomini_che_si_tengono_per_mano_carnagione_abbastanza_chiara_e_carnagione_chiara:': u'\U0001F468\U0001F3FC\U0000200D\U0001F91D\U0000200D\U0001F468\U0001F3FB',
u':due_uomini_che_si_tengono_per_mano_carnagione_abbastanza_chiara:': u'\U0001F46C\U0001F3FC',
u':due_uomini_che_si_tengono_per_mano_carnagione_abbastanza_chiara_e_carnagione_olivastra:': u'\U0001F468\U0001F3FC\U0000200D\U0001F91D\U0000200D\U0001F468\U0001F3FD',
u':due_uomini_che_si_tengono_per_mano_carnagione_abbastanza_chiara_e_carnagione_abbastanza_scura:': u'\U0001F468\U0001F3FC\U0000200D\U0001F91D\U0000200D\U0001F468\U0001F3FE',
u':due_uomini_che_si_tengono_per_mano_carnagione_abbastanza_chiara_e_carnagione_scura:': u'\U0001F468\U0001F3FC\U0000200D\U0001F91D\U0000200D\U0001F468\U0001F3FF',
u':due_uomini_che_si_tengono_per_mano_carnagione_olivastra_e_carnagione_chiara:': u'\U0001F468\U0001F3FD\U0000200D\U0001F91D\U0000200D\U0001F468\U0001F3FB',
u':due_uomini_che_si_tengono_per_mano_carnagione_olivastra_e_carnagione_abbastanza_chiara:': u'\U0001F468\U0001F3FD\U0000200D\U0001F91D\U0000200D\U0001F468\U0001F3FC',
u':due_uomini_che_si_tengono_per_mano_carnagione_olivastra:': u'\U0001F46C\U0001F3FD',
u':due_uomini_che_si_tengono_per_mano_carnagione_olivastra_e_carnagione_abbastanza_scura:': u'\U0001F468\U0001F3FD\U0000200D\U0001F91D\U0000200D\U0001F468\U0001F3FE',
u':due_uomini_che_si_tengono_per_mano_carnagione_olivastra_e_carnagione_scura:': u'\U0001F468\U0001F3FD\U0000200D\U0001F91D\U0000200D\U0001F468\U0001F3FF',
u':due_uomini_che_si_tengono_per_mano_carnagione_abbastanza_scura_e_carnagione_chiara:': u'\U0001F468\U0001F3FE\U0000200D\U0001F91D\U0000200D\U0001F468\U0001F3FB',
u':due_uomini_che_si_tengono_per_mano_carnagione_abbastanza_scura_e_carnagione_abbastanza_chiara:': u'\U0001F468\U0001F3FE\U0000200D\U0001F91D\U0000200D\U0001F468\U0001F3FC',
u':due_uomini_che_si_tengono_per_mano_carnagione_abbastanza_scura_e_carnagione_olivastra:': u'\U0001F468\U0001F3FE\U0000200D\U0001F91D\U0000200D\U0001F468\U0001F3FD',
u':due_uomini_che_si_tengono_per_mano_carnagione_abbastanza_scura:': u'\U0001F46C\U0001F3FE',
u':due_uomini_che_si_tengono_per_mano_carnagione_abbastanza_scura_e_carnagione_scura:': u'\U0001F468\U0001F3FE\U0000200D\U0001F91D\U0000200D\U0001F468\U0001F3FF',
u':due_uomini_che_si_tengono_per_mano_carnagione_scura_e_carnagione_chiara:': u'\U0001F468\U0001F3FF\U0000200D\U0001F91D\U0000200D\U0001F468\U0001F3FB',
u':due_uomini_che_si_tengono_per_mano_carnagione_scura_e_carnagione_abbastanza_chiara:': u'\U0001F468\U0001F3FF\U0000200D\U0001F91D\U0000200D\U0001F468\U0001F3FC',
u':due_uomini_che_si_tengono_per_mano_carnagione_scura_e_carnagione_olivastra:': u'\U0001F468\U0001F3FF\U0000200D\U0001F91D\U0000200D\U0001F468\U0001F3FD',
u':due_uomini_che_si_tengono_per_mano_carnagione_scura_e_carnagione_abbastanza_scura:': u'\U0001F468\U0001F3FF\U0000200D\U0001F91D\U0000200D\U0001F468\U0001F3FE',
u':due_uomini_che_si_tengono_per_mano_carnagione_scura:': u'\U0001F46C\U0001F3FF',
u':bacio_tra_coppia:': u'\U0001F48F',
u':bacio_tra_coppia_donna_e_uomo:': u'\U0001F469\U0000200D\U00002764\U0000FE0F\U0000200D\U0001F48B\U0000200D\U0001F468',
u':bacio_tra_coppia_uomo_e_uomo:': u'\U0001F468\U0000200D\U00002764\U0000FE0F\U0000200D\U0001F48B\U0000200D\U0001F468',
u':bacio_tra_coppia_donna_e_donna:': u'\U0001F469\U0000200D\U00002764\U0000FE0F\U0000200D\U0001F48B\U0000200D\U0001F469',
u':coppia_con_cuore:': u'\U0001F491',
u':coppia_con_cuore_donna_e_uomo:': u'\U0001F469\U0000200D\U00002764\U0000FE0F\U0000200D\U0001F468',
u':coppia_con_cuore_uomo_e_uomo:': u'\U0001F468\U0000200D\U00002764\U0000FE0F\U0000200D\U0001F468',
u':coppia_con_cuore_donna_e_donna:': u'\U0001F469\U0000200D\U00002764\U0000FE0F\U0000200D\U0001F469',
u':famiglia:': u'\U0001F46A',
u':famiglia_uomo_donna_e_bambino:': u'\U0001F468\U0000200D\U0001F469\U0000200D\U0001F466',
u':famiglia_uomo_donna_e_bambina:': u'\U0001F468\U0000200D\U0001F469\U0000200D\U0001F467',
u':famiglia_uomo_donna_bambina_e_bambino:': u'\U0001F468\U0000200D\U0001F469\U0000200D\U0001F467\U0000200D\U0001F466',
u':famiglia_uomo_donna_bambino_e_bambino:': u'\U0001F468\U0000200D\U0001F469\U0000200D\U0001F466\U0000200D\U0001F466',
u':famiglia_uomo_donna_bambina_e_bambina:': u'\U0001F468\U0000200D\U0001F469\U0000200D\U0001F467\U0000200D\U0001F467',
u':famiglia_uomo_uomo_e_bambino:': u'\U0001F468\U0000200D\U0001F468\U0000200D\U0001F466',
u':famiglia_uomo_uomo_e_bambina:': u'\U0001F468\U0000200D\U0001F468\U0000200D\U0001F467',
u':famiglia_uomo_uomo_bambina_e_bambino:': u'\U0001F468\U0000200D\U0001F468\U0000200D\U0001F467\U0000200D\U0001F466',
u':famiglia_uomo_uomo_bambino_e_bambino:': u'\U0001F468\U0000200D\U0001F468\U0000200D\U0001F466\U0000200D\U0001F466',
u':famiglia_uomo_uomo_bambina_e_bambina:': u'\U0001F468\U0000200D\U0001F468\U0000200D\U0001F467\U0000200D\U0001F467',
u':famiglia_donna_donna_e_bambino:': u'\U0001F469\U0000200D\U0001F469\U0000200D\U0001F466',
u':famiglia_donna_donna_e_bambina:': u'\U0001F469\U0000200D\U0001F469\U0000200D\U0001F467',
u':famiglia_donna_donna_bambina_e_bambino:': u'\U0001F469\U0000200D\U0001F469\U0000200D\U0001F467\U0000200D\U0001F466',
u':famiglia_donna_donna_bambino_e_bambino:': u'\U0001F469\U0000200D\U0001F469\U0000200D\U0001F466\U0000200D\U0001F466',
u':famiglia_donna_donna_bambina_e_bambina:': u'\U0001F469\U0000200D\U0001F469\U0000200D\U0001F467\U0000200D\U0001F467',
u':famiglia_uomo_e_bambino:': u'\U0001F468\U0000200D\U0001F466',
u':famiglia_uomo_bambino_e_bambino:': u'\U0001F468\U0000200D\U0001F466\U0000200D\U0001F466',
u':famiglia_uomo_e_bambina:': u'\U0001F468\U0000200D\U0001F467',
u':famiglia_uomo_bambina_e_bambino:': u'\U0001F468\U0000200D\U0001F467\U0000200D\U0001F466',
u':famiglia_uomo_bambina_e_bambina:': u'\U0001F468\U0000200D\U0001F467\U0000200D\U0001F467',
u':famiglia_donna_e_bambino:': u'\U0001F469\U0000200D\U0001F466',
u':famiglia_donna_bambino_e_bambino:': u'\U0001F469\U0000200D\U0001F466\U0000200D\U0001F466',
u':famiglia_donna_e_bambina:': u'\U0001F469\U0000200D\U0001F467',
u':famiglia_donna_bambina_e_bambino:': u'\U0001F469\U0000200D\U0001F467\U0000200D\U0001F466',
u':famiglia_donna_bambina_e_bambina:': u'\U0001F469\U0000200D\U0001F467\U0000200D\U0001F467',
u':persona_che_parla:': u'\U0001F5E3\U0000FE0F',
u':profilo_di_persona:': u'\U0001F464',
u':profilo_di_due_persone:': u'\U0001F465',
u':persone_che_si_abbracciano:': u'\U0001FAC2',
u':impronta_di_piedi:': u'\U0001F463',
u':carnagione_chiara:': u'\U0001F3FB',
u':carnagione_abbastanza_chiara:': u'\U0001F3FC',
u':carnagione_olivastra:': u'\U0001F3FD',
u':carnagione_abbastanza_scura:': u'\U0001F3FE',
u':carnagione_scura:': u'\U0001F3FF',
u':capelli_rossi:': u'\U0001F9B0',
u':capelli_ricci:': u'\U0001F9B1',
u':capelli_bianchi:': u'\U0001F9B3',
u':calvo:': u'\U0001F9B2',
u':muso_di_scimmia:': u'\U0001F435',
u':scimmia:': u'\U0001F412',
u':gorilla:': u'\U0001F98D',
u':orangotango:': u'\U0001F9A7',
u':muso_di_cane:': u'\U0001F436',
u':cane:': u'\U0001F415',
u':cane_guida:': u'\U0001F9AE',
u':cane_da_assistenza:': u'\U0001F415\U0000200D\U0001F9BA',
u':barboncino:': u'\U0001F429',
u':lupo:': u'\U0001F43A',
u':volpe:': u'\U0001F98A',
u':procione:': u'\U0001F99D',
u':muso_di_gatto:': u'\U0001F431',
u':gatto:': u'\U0001F408',
u':gatto_nero:': u'\U0001F408\U0000200D\U00002B1B',
u':leone:': u'\U0001F981',
u':muso_di_tigre:': u'\U0001F42F',
u':tigre:': u'\U0001F405',
u':leopardo:': u'\U0001F406',
u':muso_di_cavallo:': u'\U0001F434',
u':cavallo:': u'\U0001F40E',
u':unicorno:': u'\U0001F984',
u':zebra:': u'\U0001F993',
u':cervo:': u'\U0001F98C',
u':bisonte:': u'\U0001F9AC',
u':muso_di_mucca:': u'\U0001F42E',
u':bue:': u'\U0001F402',
u':bufalo_d’acqua:': u'\U0001F403',
u':mucca:': u'\U0001F404',
u':muso_di_maiale:': u'\U0001F437',
u':maiale:': u'\U0001F416',
u':cinghiale:': u'\U0001F417',
u':naso_da_maiale:': u'\U0001F43D',
u':montone:': u'\U0001F40F',
u':pecora:': u'\U0001F411',
u':capra:': u'\U0001F410',
u':dromedario:': u'\U0001F42A',
u':cammello:': u'\U0001F42B',
u':lama:': u'\U0001F999',
u':giraffa:': u'\U0001F992',
u':elefante:': u'\U0001F418',
u':mammut:': u'\U0001F9A3',
u':rinoceronte:': u'\U0001F98F',
u':ippopotamo:': u'\U0001F99B',
u':muso_di_topo:': u'\U0001F42D',
u':topo:': u'\U0001F401',
u':ratto:': u'\U0001F400',
u':criceto:': u'\U0001F439',
u':muso_di_coniglio:': u'\U0001F430',
u':coniglio:': u'\U0001F407',
u':scoiattolo:': u'\U0001F43F\U0000FE0F',
u':castoro:': u'\U0001F9AB',
u':riccio:': u'\U0001F994',
u':pipistrello:': u'\U0001F987',
u':orso:': u'\U0001F43B',
u':orso_polare:': u'\U0001F43B\U0000200D\U00002744\U0000FE0F',
u':koala:': u'\U0001F428',
u':panda:': u'\U0001F43C',
u':bradipo:': u'\U0001F9A5',
u':lontra:': u'\U0001F9A6',
u':puzzola:': u'\U0001F9A8',
u':canguro:': u'\U0001F998',
u':tasso:': u'\U0001F9A1',
u':impronte_di_zampe:': u'\U0001F43E',
u':tacchino:': u'\U0001F983',
u':gallina:': u'\U0001F414',
u':gallo:': u'\U0001F413',
u':pulcino_che_nasce:': u'\U0001F423',
u':pulcino:': u'\U0001F424',
u':pulcino_visto_di_fronte:': u'\U0001F425',
u':uccello:': u'\U0001F426',
u':pinguino:': u'\U0001F427',
u':colomba:': u'\U0001F54A\U0000FE0F',
u':aquila:': u'\U0001F985',
u':anatra:': u'\U0001F986',
u':cigno:': u'\U0001F9A2',
u':gufo:': u'\U0001F989',
u':dodo:': u'\U0001F9A4',
u':piuma:': u'\U0001FAB6',
u':fenicottero:': u'\U0001F9A9',
u':pavone:': u'\U0001F99A',
u':pappagallo:': u'\U0001F99C',
u':rana:': u'\U0001F438',
u':coccodrillo:': u'\U0001F40A',
u':tartaruga:': u'\U0001F422',
u':lucertola:': u'\U0001F98E',
u':serpente:': u'\U0001F40D',
u':testa_di_drago:': u'\U0001F432',
u':drago:': u'\U0001F409',
u':sauropode:': u'\U0001F995',
u':t-rex:': u'\U0001F996',
u':balena_che_spruzza_acqua:': u'\U0001F433',
u':balena:': u'\U0001F40B',
u':delfino:': u'\U0001F42C',
u':foca:': u'\U0001F9AD',
u':pesce:': u'\U0001F41F',
u':pesce_tropicale:': u'\U0001F420',
u':pesce_palla:': u'\U0001F421',
u':squalo:': u'\U0001F988',
u':polpo:': u'\U0001F419',
u':conchiglia:': u'\U0001F41A',
u':lumaca:': u'\U0001F40C',
u':farfalla:': u'\U0001F98B',
u':insetto:': u'\U0001F41B',
u':formica:': u'\U0001F41C',
u':ape:': u'\U0001F41D',
u':coleottero:': u'\U0001FAB2',
u':coccinella:': u'\U0001F41E',
u':grillo:': u'\U0001F997',
u':scarafaggio:': u'\U0001FAB3',
u':ragno:': u'\U0001F577\U0000FE0F',
u':ragnatela:': u'\U0001F578\U0000FE0F',
u':scorpione:': u'\U0001F982',
u':zanzara:': u'\U0001F99F',
u':mosca:': u'\U0001FAB0',
u':verme:': u'\U0001FAB1',
u':microbo:': u'\U0001F9A0',
u':mazzo_di_fiori:': u'\U0001F490',
u':fiore_di_ciliegio:': u'\U0001F338',
u':fiore_bianco:': u'\U0001F4AE',
u':rosetta:': u'\U0001F3F5\U0000FE0F',
u':rosa:': u'\U0001F339',
u':fiore_appassito:': u'\U0001F940',
u':ibisco:': u'\U0001F33A',
u':girasole:': u'\U0001F33B',
u':fiore:': u'\U0001F33C',
u':tulipano:': u'\U0001F337',
u':germoglio:': u'\U0001F331',
u':pianta_in_vaso:': u'\U0001FAB4',
u':albero_sempreverde:': u'\U0001F332',
u':albero_deciduo:': u'\U0001F333',
u':palma:': u'\U0001F334',
u':cactus:': u'\U0001F335',
u':pianta_di_riso:': u'\U0001F33E',
u':pianta:': u'\U0001F33F',
u':trifoglio:': u'\U00002618\U0000FE0F',
u':quadrifoglio:': u'\U0001F340',
u':foglia_d’acero:': u'\U0001F341',
u':foglia_caduta:': u'\U0001F342',
u':foglia_al_vento:': u'\U0001F343',
u':uva:': u'\U0001F347',
u':melone:': u'\U0001F348',
u':anguria:': u'\U0001F349',
u':mandarino:': u'\U0001F34A',
u':limone:': u'\U0001F34B',
u':banana:': u'\U0001F34C',
u':ananas:': u'\U0001F34D',
u':mango:': u'\U0001F96D',
u':mela_rossa:': u'\U0001F34E',
u':mela_verde:': u'\U0001F34F',
u':pera:': u'\U0001F350',
u':pesca:': u'\U0001F351',
u':ciliegie:': u'\U0001F352',
u':fragola:': u'\U0001F353',
u':mirtilli:': u'\U0001FAD0',
u':kiwi:': u'\U0001F95D',
u':pomodoro:': u'\U0001F345',
u':oliva:': u'\U0001FAD2',
u':cocco:': u'\U0001F965',
u':avocado:': u'\U0001F951',
u':melanzana:': u'\U0001F346',
u':patata:': u'\U0001F954',
u':carota:': u'\U0001F955',
u':pannocchia:': u'\U0001F33D',
u':peperoncino:': u'\U0001F336\U0000FE0F',
u':peperone:': u'\U0001FAD1',
u':cetriolo:': u'\U0001F952',
u':verdure_a_foglia:': u'\U0001F96C',
u':broccoli:': u'\U0001F966',
u':aglio:': u'\U0001F9C4',
u':cipolla:': u'\U0001F9C5',
u':fungo:': u'\U0001F344',
u':arachidi:': u'\U0001F95C',
u':castagna:': u'\U0001F330',
u':pane_in_cassetta:': u'\U0001F35E',
u':croissant:': u'\U0001F950',
u':baguette:': u'\U0001F956',
u':focaccia:': u'\U0001FAD3',
u':pretzel:': u'\U0001F968',
u':bagel:': u'\U0001F96F',
u':pancake:': u'\U0001F95E',
u':waffle:': u'\U0001F9C7',
u':fetta_di_formaggio:': u'\U0001F9C0',
u':carne:': u'\U0001F356',
u':coscia_di_pollo:': u'\U0001F357',
u':taglio_di_carne:': u'\U0001F969',
u':pancetta:': u'\U0001F953',
u':hamburger:': u'\U0001F354',
u':patatine:': u'\U0001F35F',
u':pizza:': u'\U0001F355',
u':hot_dog:': u'\U0001F32D',
u':sandwich:': u'\U0001F96A',
u':taco:': u'\U0001F32E',
u':burrito:': u'\U0001F32F',
u':tamal:': u'\U0001FAD4',
u':pita_farcita:': u'\U0001F959',
u':falafel:': u'\U0001F9C6',
u':uovo:': u'\U0001F95A',
u':cucinare:': u'\U0001F373',
u':padella_con_cibo:': u'\U0001F958',
u':ciotola_di_cibo:': u'\U0001F372',
u':fonduta:': u'\U0001FAD5',
u':ciotola_con_cucchiaio:': u'\U0001F963',
u':insalata_verde:': u'\U0001F957',
u':popcorn:': u'\U0001F37F',
u':burro:': u'\U0001F9C8',
u':sale:': u'\U0001F9C2',
u':cibo_in_scatola:': u'\U0001F96B',
u':bento_box:': u'\U0001F371',
u':galletta_di_riso:': u'\U0001F358',
u':onigiri:': u'\U0001F359',
u':riso_bollito:': u'\U0001F35A',
u':riso_al_curry:': u'\U0001F35B',
u':ciotola_fumante:': u'\U0001F35C',
u':spaghetti:': u'\U0001F35D',
u':patata_dolce_arrosto:': u'\U0001F360',
u':oden_giapponese:': u'\U0001F362',
u':sushi:': u'\U0001F363',
u':gambero_fritto:': u'\U0001F364',
u':tortino_di_pesce_a_spirale:': u'\U0001F365',
u':torta_della_luna:': u'\U0001F96E',
u':dolcetto:': u'\U0001F361',
u':raviolo:': u'\U0001F95F',
u':biscotto_della_fortuna:': u'\U0001F960',
u':confezione_da_asporto:': u'\U0001F961',
u':granchio:': u'\U0001F980',
u':aragosta:': u'\U0001F99E',
u':gamberetto:': u'\U0001F990',
u':calamaro:': u'\U0001F991',
u':ostrica:': u'\U0001F9AA',
u':cono_gelato:': u'\U0001F366',
u':granita:': u'\U0001F367',
u':coppa_di_gelato:': u'\U0001F368',
u':ciambella:': u'\U0001F369',
u':biscotto:': u'\U0001F36A',
u':torta_di_compleanno:': u'\U0001F382',
u':fetta_di_torta:': u'\U0001F370',
u':cupcake:': u'\U0001F9C1',
u':torta:': u'\U0001F967',
u':cioccolato:': u'\U0001F36B',
u':caramella:': u'\U0001F36C',
u':lecca_lecca:': u'\U0001F36D',
u':budino:': u'\U0001F36E',
u':barattolo_di_miele:': u'\U0001F36F',
u':biberon:': u'\U0001F37C',
u':bicchiere_di_latte:': u'\U0001F95B',
u':bevanda_calda:': u'\U00002615',
u':teiera:': u'\U0001FAD6',
u':tazza_da_tè_senza_manico:': u'\U0001F375',
u':sake:': u'\U0001F376',
u':bottiglia_stappata:': u'\U0001F37E',
u':bicchiere_di_vino:': u'\U0001F377',
u':cocktail:': u'\U0001F378',
u':cocktail_tropicale:': u'\U0001F379',
u':boccale_di_birra:': u'\U0001F37A',
u':boccali_di_birra:': u'\U0001F37B',
u':brindisi:': u'\U0001F942',
u':bicchiere_tumbler:': u'\U0001F943',
u':bicchiere_con_cannuccia:': u'\U0001F964',
u':bubble_tea:': u'\U0001F9CB',
u':bevanda_monodose:': u'\U0001F9C3',
u':mate:': u'\U0001F9C9',
u':cubetto_di_ghiaccio:': u'\U0001F9CA',
u':bacchette:': u'\U0001F962',
u':piatto_e_posate:': u'\U0001F37D\U0000FE0F',
u':forchetta_e_coltello:': u'\U0001F374',
u':cucchiaio:': u'\U0001F944',
u':coltello_da_cucina:': u'\U0001F52A',
u':anfora:': u'\U0001F3FA',
u':europa_e_africa:': u'\U0001F30D',
u':america:': u'\U0001F30E',
u':asia_e_australia:': u'\U0001F30F',
u':globo_con_meridiani:': u'\U0001F310',
u':mappa_mondiale:': u'\U0001F5FA\U0000FE0F',
u':mappa_del_giappone:': u'\U0001F5FE',
u':bussola:': u'\U0001F9ED',
u':montagna_innevata:': u'\U0001F3D4\U0000FE0F',
u':montagna:': u'\U000026F0\U0000FE0F',
u':vulcano:': u'\U0001F30B',
u':monte_fuji:': u'\U0001F5FB',
u':campeggio:': u'\U0001F3D5\U0000FE0F',
u':spiaggia_con_ombrellone:': u'\U0001F3D6\U0000FE0F',
u':deserto:': u'\U0001F3DC\U0000FE0F',
u':isola_deserta:': u'\U0001F3DD\U0000FE0F',
u':parco_nazionale:': u'\U0001F3DE\U0000FE0F',
u':stadio:': u'\U0001F3DF\U0000FE0F',
u':edificio_classico:': u'\U0001F3DB\U0000FE0F',
u':edificio_in_costruzione:': u'\U0001F3D7\U0000FE0F',
u':mattoni:': u'\U0001F9F1',
u':roccia:': u'\U0001FAA8',
u':legna:': u'\U0001FAB5',
u':capanna:': u'\U0001F6D6',
u':case:': u'\U0001F3D8\U0000FE0F',
u':casa_in_rovina:': u'\U0001F3DA\U0000FE0F',
u':casa:': u'\U0001F3E0',
u':casa_con_giardino:': u'\U0001F3E1',
u':edificio:': u'\U0001F3E2',
u':ufficio_postale_giapponese:': u'\U0001F3E3',
u':ufficio_postale:': u'\U0001F3E4',
u':ospedale:': u'\U0001F3E5',
u':banca:': u'\U0001F3E6',
u':hotel:': u'\U0001F3E8',
u':love_hotel:': u'\U0001F3E9',
u':minimarket:': u'\U0001F3EA',
u':scuola:': u'\U0001F3EB',
u':grande_magazzino:': u'\U0001F3EC',
u':fabbrica:': u'\U0001F3ED',
u':castello_giapponese:': u'\U0001F3EF',
u':castello:': u'\U0001F3F0',
u':chiesa_per_matrimonio:': u'\U0001F492',
u':torre_di_tokyo:': u'\U0001F5FC',
u':statua_della_libertà:': u'\U0001F5FD',
u':chiesa:': u'\U000026EA',
u':moschea:': u'\U0001F54C',
u':tempio_indù:': u'\U0001F6D5',
u':sinagoga:': u'\U0001F54D',
u':santuario_shintoista:': u'\U000026E9\U0000FE0F',
u':kaaba:': u'\U0001F54B',
u':fontana:': u'\U000026F2',
u':tenda:': u'\U000026FA',
u':nebbioso:': u'\U0001F301',
u':notte_stellata:': u'\U0001F303',
u':paesaggio_urbano:': u'\U0001F3D9\U0000FE0F',
u':alba_sulle_montagne:': u'\U0001F304',
u':alba:': u'\U0001F305',
u':città_al_tramonto:': u'\U0001F306',
u':tramonto:': u'\U0001F307',
u':ponte_di_notte:': u'\U0001F309',
u':sorgenti_termali:': u'\U00002668\U0000FE0F',
u':cavallo_da_giostra:': u'\U0001F3A0',
u':ruota_panoramica:': u'\U0001F3A1',
u':montagne_russe:': u'\U0001F3A2',
u':barbiere:': u'\U0001F488',
u':circo:': u'\U0001F3AA',
u':locomotiva:': u'\U0001F682',
u':vagone:': u'\U0001F683',
u':treno_alta_velocità:': u'\U0001F684',
u':treno_alta_velocità_punta_arrotondata:': u'\U0001F685',
u':treno:': u'\U0001F686',
u':metropolitana:': u'\U0001F687',
u':metrotranvia:': u'\U0001F688',
u':stazione:': u'\U0001F689',
u':tram:': u'\U0001F68A',
u':monorotaia:': u'\U0001F69D',
u':ferrovia_di_montagna:': u'\U0001F69E',
u':vagone_del_tram:': u'\U0001F68B',
u':bus:': u'\U0001F68C',
u':bus_in_arrivo:': u'\U0001F68D',
u':filobus:': u'\U0001F68E',
u':pulmino:': u'\U0001F690',
u':ambulanza:': u'\U0001F691',
u':camion_dei_pompieri:': u'\U0001F692',
u':macchina_della_polizia:': u'\U0001F693',
u':macchina_della_polizia_in_arrivo:': u'\U0001F694',
u':taxi:': u'\U0001F695',
u':taxi_in_arrivo:': u'\U0001F696',
u':auto:': u'\U0001F697',
u':auto_in_arrivo:': u'\U0001F698',
u':suv:': u'\U0001F699',
u':pickup:': u'\U0001F6FB',
u':camion:': u'\U0001F69A',
u':autoarticolato:': u'\U0001F69B',
u':trattore:': u'\U0001F69C',
u':auto_da_corsa:': u'\U0001F3CE\U0000FE0F',
u':motocicletta:': u'\U0001F3CD\U0000FE0F',
u':scooter:': u'\U0001F6F5',
u':sedia_a_rotelle_manuale:': u'\U0001F9BD',
u':sedia_a_rotelle_motorizzata:': u'\U0001F9BC',
u':risciò_a_motore:': u'\U0001F6FA',
u':bicicletta:': u'\U0001F6B2',
u':monopattino:': u'\U0001F6F4',
u':skateboard:': u'\U0001F6F9',
u':pattini_a_rotelle:': u'\U0001F6FC',
u':fermata_dell’autobus:': u'\U0001F68F',
u':autostrada:': u'\U0001F6E3\U0000FE0F',
u':binari:': u'\U0001F6E4\U0000FE0F',
u':barile_di_petrolio:': u'\U0001F6E2\U0000FE0F',
u':stazione_di_servizio:': u'\U000026FD',
u':lampeggiante:': u'\U0001F6A8',
u':semaforo_orizzontale:': u'\U0001F6A5',
u':semaforo_verticale:': u'\U0001F6A6',
u':segnale_di_stop:': u'\U0001F6D1',
u':area_cantiere:': u'\U0001F6A7',
u':ancora:': u'\U00002693',
u':barca_a_vela:': u'\U000026F5',
u':canoa:': u'\U0001F6F6',
u':motoscafo:': u'\U0001F6A4',
u':nave_da_crociera:': u'\U0001F6F3\U0000FE0F',
u':traghetto:': u'\U000026F4\U0000FE0F',
u':barca_a_motore:': u'\U0001F6E5\U0000FE0F',
u':nave:': u'\U0001F6A2',
u':aeroplano:': u'\U00002708\U0000FE0F',
u':aereo_turistico:': u'\U0001F6E9\U0000FE0F',
u':decollo:': u'\U0001F6EB',
u':atterraggio:': u'\U0001F6EC',
u':paracadute:': u'\U0001FA82',
u':sedile:': u'\U0001F4BA',
u':elicottero:': u'\U0001F681',
u':funivia:': u'\U0001F69F',
u':funicolare:': u'\U0001F6A0',
u':funivia_aerea:': u'\U0001F6A1',
u':satellite:': u'\U0001F6F0\U0000FE0F',
u':razzo:': u'\U0001F680',
u':disco_volante:': u'\U0001F6F8',
u':campanello_da_hotel:': u'\U0001F6CE\U0000FE0F',
u':valigia:': u'\U0001F9F3',
u':clessidra:': u'\U0000231B',
u':clessidra_che_scorre:': u'\U000023F3',
u':orologio:': u'\U0000231A',
u':sveglia:': u'\U000023F0',
u':cronometro:': u'\U000023F1\U0000FE0F',
u':timer:': u'\U000023F2\U0000FE0F',
u':orologio_da_mensola:': u'\U0001F570\U0000FE0F',
u':ore_dodici:': u'\U0001F55B',
u':ore_dodici_e_mezza:': u'\U0001F567',
u':ore_una:': u'\U0001F550',
u':ore_una_e_mezza:': u'\U0001F55C',
u':ore_due:': u'\U0001F551',
u':ore_due_e_mezza:': u'\U0001F55D',
u':ore_tre:': u'\U0001F552',
u':ore_tre_e_mezza:': u'\U0001F55E',
u':ore_quattro:': u'\U0001F553',
u':ore_quattro_e_mezza:': u'\U0001F55F',
u':ore_cinque:': u'\U0001F554',
u':ore_cinque_e_mezza:': u'\U0001F560',
u':ore_sei:': u'\U0001F555',
u':ore_sei_e_mezza:': u'\U0001F561',
u':ore_sette:': u'\U0001F556',
u':ore_sette_e_mezza:': u'\U0001F562',
u':ore_otto:': u'\U0001F557',
u':ore_otto_e_mezza:': u'\U0001F563',
u':ore_nove:': u'\U0001F558',
u':ore_nove_e_mezza:': u'\U0001F564',
u':ore_dieci:': u'\U0001F559',
u':ore_dieci_e_mezza:': u'\U0001F565',
u':ore_undici:': u'\U0001F55A',
u':ore_undici_e_mezza:': u'\U0001F566',
u':luna_nuova:': u'\U0001F311',
u':luna_crescente:': u'\U0001F312',
u':primo_quarto_di_luna:': u'\U0001F313',
u':luna_gibbosa_crescente:': u'\U0001F314',
u':luna_piena:': u'\U0001F315',
u':luna_gibbosa_calante:': u'\U0001F316',
u':ultimo_quarto_di_luna:': u'\U0001F317',
u':luna_calante:': u'\U0001F318',
u':spicchio_di_luna:': u'\U0001F319',
u':faccina_luna_nuova:': u'\U0001F31A',
u':faccina_primo_quarto_di_luna:': u'\U0001F31B',
u':faccina_ultimo_quarto_di_luna:': u'\U0001F31C',
u':termometro:': u'\U0001F321\U0000FE0F',
u':sole:': u'\U00002600\U0000FE0F',
u':faccina_luna_piena:': u'\U0001F31D',
u':faccina_sole:': u'\U0001F31E',
u':pianeta_con_satellite:': u'\U0001FA90',
u':stella:': u'\U00002B50',
u':stella_che_brilla:': u'\U0001F31F',
u':stella_cadente:': u'\U0001F320',
u':via_lattea:': u'\U0001F30C',
u':nuvola:': u'\U00002601\U0000FE0F',
u':sole_coperto:': u'\U000026C5',
u':temporale:': u'\U000026C8\U0000FE0F',
u':parzialmente_nuvoloso:': u'\U0001F324\U0000FE0F',
u':molto_nuvoloso:': u'\U0001F325\U0000FE0F',
u':sole_coperto_e_pioggia:': u'\U0001F326\U0000FE0F',
u':pioggia:': u'\U0001F327\U0000FE0F',
u':neve:': u'\U0001F328\U0000FE0F',
u':fulmini:': u'\U0001F329\U0000FE0F',
u':tornado:': u'\U0001F32A\U0000FE0F',
u':nebbia:': u'\U0001F32B\U0000FE0F',
u':vento:': u'\U0001F32C\U0000FE0F',
u':ciclone:': u'\U0001F300',
u':arcobaleno:': u'\U0001F308',
u':ombrello_chiuso:': u'\U0001F302',
u':ombrello:': u'\U00002602\U0000FE0F',
u':ombrello_con_gocce_di_pioggia:': u'\U00002614',
u':ombrellone:': u'\U000026F1\U0000FE0F',
u':alta_tensione:': u'\U000026A1',
u':fiocco_di_neve:': u'\U00002744\U0000FE0F',
u':pupazzo_di_neve:': u'\U00002603\U0000FE0F',
u':pupazzo_di_neve_senza_neve:': u'\U000026C4',
u':cometa:': u'\U00002604\U0000FE0F',
u':fuoco:': u'\U0001F525',
u':goccia:': u'\U0001F4A7',
u':onda:': u'\U0001F30A',
u':zucca_di_halloween:': u'\U0001F383',
u':albero_di_natale:': u'\U0001F384',
u':fuochi_d’artificio:': u'\U0001F386',
u':stellina_scintillante:': u'\U0001F387',
u':petardo:': u'\U0001F9E8',
u':stelline:': u'\U00002728',
u':palloncino:': u'\U0001F388',
u':spara_coriandoli:': u'\U0001F389',
u':coriandoli:': u'\U0001F38A',
u':albero_decorato:': u'\U0001F38B',
u':bambù_decorato:': u'\U0001F38D',
u':bambole_giapponesi:': u'\U0001F38E',
u':aquilone_a_forma_di_carpa:': u'\U0001F38F',
u':campana_al_vento:': u'\U0001F390',
u':contemplazione_della_luna:': u'\U0001F391',
u':busta_rossa:': u'\U0001F9E7',
u':fiocco:': u'\U0001F380',
u':regalo:': u'\U0001F381',
u':nastro:': u'\U0001F397\U0000FE0F',
u':biglietto_d’ingresso:': u'\U0001F39F\U0000FE0F',
u':biglietto:': u'\U0001F3AB',
u':medaglia_militare:': u'\U0001F396\U0000FE0F',
u':coppa:': u'\U0001F3C6',
u':medaglia_sportiva:': u'\U0001F3C5',
u':medaglia_d’oro:': u'\U0001F947',
u':medaglia_d’argento:': u'\U0001F948',
u':medaglia_di_bronzo:': u'\U0001F949',
u':pallone_da_calcio:': u'\U000026BD',
u':palla_da_baseball:': u'\U000026BE',
u':palla_da_softball:': u'\U0001F94E',
u':palla_da_pallacanestro:': u'\U0001F3C0',
u':palla_da_pallavolo:': u'\U0001F3D0',
u':football_americano:': u'\U0001F3C8',
u':pallone_da_rugby:': u'\U0001F3C9',
u':tennis:': u'\U0001F3BE',
u':frisbee:': u'\U0001F94F',
u':bowling:': u'\U0001F3B3',
u':cricket:': u'\U0001F3CF',
u':hockey_su_prato:': u'\U0001F3D1',
u':hockey_su_ghiaccio:': u'\U0001F3D2',
u':lacrosse:': u'\U0001F94D',
u':ping_pong:': u'\U0001F3D3',
u':badminton:': u'\U0001F3F8',
u':guantone_da_pugilato:': u'\U0001F94A',
u':kimono_per_arti_marziali:': u'\U0001F94B',
u':porta_da_calcio:': u'\U0001F945',
u':bandiera_in_buca:': u'\U000026F3',
u':pattinaggio_su_ghiaccio:': u'\U000026F8\U0000FE0F',
u':canna_da_pesca:': u'\U0001F3A3',
u':maschera_da_sub:': u'\U0001F93F',
u':maglia_da_corsa:': u'\U0001F3BD',
u':sci:': u'\U0001F3BF',
u':slitta:': u'\U0001F6F7',
u':stone_da_curling:': u'\U0001F94C',
u':bersaglio:': u'\U0001F3AF',
u':yo-yo:': u'\U0001FA80',
u':aquilone:': u'\U0001FA81',
u':palla_da_biliardo:': u'\U0001F3B1',
u':sfera_di_cristallo:': u'\U0001F52E',
u':bacchetta_magica:': u'\U0001FA84',
u':occhio_di_allah:': u'\U0001F9FF',
u':gamepad:': u'\U0001F3AE',
u':joystick:': u'\U0001F579\U0000FE0F',
u':slot_machine:': u'\U0001F3B0',
u':dado:': u'\U0001F3B2',
u':pezzo_di_puzzle:': u'\U0001F9E9',
u':orsetto:': u'\U0001F9F8',
u':pentolaccia:': u'\U0001FA85',
u':matrioska:': u'\U0001FA86',
u':picche:': u'\U00002660\U0000FE0F',
u':cuori:': u'\U00002665\U0000FE0F',
u':quadri:': u'\U00002666\U0000FE0F',
u':fiori:': u'\U00002663\U0000FE0F',
u':pedina_degli_scacchi:': u'\U0000265F\U0000FE0F',
u':jolly:': u'\U0001F0CF',
u':tessera_mahjong:': u'\U0001F004',
u':carta_da_gioco:': u'\U0001F3B4',
u':maschere:': u'\U0001F3AD',
u':quadro_incorniciato:': u'\U0001F5BC\U0000FE0F',
u':tavolozza:': u'\U0001F3A8',
u':filo:': u'\U0001F9F5',
u':ago_da_cucito:': u'\U0001FAA1',
u':gomitolo:': u'\U0001F9F6',
u':nodo:': u'\U0001FAA2',
u':occhiali_da_vista:': u'\U0001F453',
u':occhiali_da_sole:': u'\U0001F576\U0000FE0F',
u':maschera:': u'\U0001F97D',
u':camice:': u'\U0001F97C',
u':gilet_di_sicurezza:': u'\U0001F9BA',
u':cravatta:': u'\U0001F454',
u':t-shirt:': u'\U0001F455',
u':jeans:': u'\U0001F456',
u':sciarpa:': u'\U0001F9E3',
u':guanti:': u'\U0001F9E4',
u':cappotto:': u'\U0001F9E5',
u':calzini:': u'\U0001F9E6',
u':vestito:': u'\U0001F457',
u':kimono:': u'\U0001F458',
u':sari:': u'\U0001F97B',
u':costume_da_bagno_intero:': u'\U0001FA71',
u':slip:': u'\U0001FA72',
u':pantaloncini:': u'\U0001FA73',
u':bikini:': u'\U0001F459',
u':maglietta_da_donna:': u'\U0001F45A',
u':borsellino:': u'\U0001F45B',
u':borsa:': u'\U0001F45C',
u':pochette:': u'\U0001F45D',
u':borse_della_spesa:': u'\U0001F6CD\U0000FE0F',
u':zaino:': u'\U0001F392',
u':infradito:': u'\U0001FA74',
u':scarpa_da_uomo:': u'\U0001F45E',
u':scarpa_sportiva:': u'\U0001F45F',
u':stivale_da_trekking:': u'\U0001F97E',
u':ballerina:': u'\U0001F97F',
u':scarpa_con_il_tacco:': u'\U0001F460',
u':sandalo_da_donna:': u'\U0001F461',
u':scarpette_da_ballerina:': u'\U0001FA70',
u':stivale_da_donna:': u'\U0001F462',
u':corona:': u'\U0001F451',
u':cappello_da_donna:': u'\U0001F452',
u':cilindro:': u'\U0001F3A9',
u':cappello_da_laureato:': u'\U0001F393',
u':cappello_con_visiera:': u'\U0001F9E2',
u':elmetto_militare:': u'\U0001FA96',
u':elmetto_con_croce_bianca:': u'\U000026D1\U0000FE0F',
u':rosario:': u'\U0001F4FF',
u':rossetto:': u'\U0001F484',
u':anello:': u'\U0001F48D',
u':gemma:': u'\U0001F48E',
u':altoparlante_disattivato:': u'\U0001F507',
u':altoparlante_a_volume_basso:': u'\U0001F508',
u':altoparlante_a_volume_intermedio:': u'\U0001F509',
u':altoparlante_a_volume_alto:': u'\U0001F50A',
u':altoparlante:': u'\U0001F4E2',
u':megafono:': u'\U0001F4E3',
u':corno_postale:': u'\U0001F4EF',
u':campana:': u'\U0001F514',
u':campana_sbarrata:': u'\U0001F515',
u':pentagramma:': u'\U0001F3BC',
u':nota_musicale:': u'\U0001F3B5',
u':note_musicali:': u'\U0001F3B6',
u':microfono_radiofonico:': u'\U0001F399\U0000FE0F',
u':cursore_del_volume:': u'\U0001F39A\U0000FE0F',
u':manopole_di_controllo:': u'\U0001F39B\U0000FE0F',
u':microfono:': u'\U0001F3A4',
u':cuffie:': u'\U0001F3A7',
u':radio:': u'\U0001F4FB',
u':sassofono:': u'\U0001F3B7',
u':fisarmonica:': u'\U0001FA97',
u':chitarra:': u'\U0001F3B8',
u':piano:': u'\U0001F3B9',
u':tromba:': u'\U0001F3BA',
u':violino:': u'\U0001F3BB',
u':banjo:': u'\U0001FA95',
u':tamburo:': u'\U0001F941',
u':tamburo_lungo:': u'\U0001FA98',
u':telefono_cellulare:': u'\U0001F4F1',
u':telefono_cellulare_con_freccia:': u'\U0001F4F2',
u':telefono_fisso:': u'\U0000260E\U0000FE0F',
u':cornetta:': u'\U0001F4DE',
u':cercapersone:': u'\U0001F4DF',
u':fax:': u'\U0001F4E0',
u':batteria:': u'\U0001F50B',
u':spina_elettrica:': u'\U0001F50C',
u':computer_portatile:': u'\U0001F4BB',
u':computer_fisso:': u'\U0001F5A5\U0000FE0F',
u':stampante:': u'\U0001F5A8\U0000FE0F',
u':tastiera:': u'\U00002328\U0000FE0F',
u':mouse:': u'\U0001F5B1\U0000FE0F',
u':trackball:': u'\U0001F5B2\U0000FE0F',
u':minidisc:': u'\U0001F4BD',
u':floppy_disc:': u'\U0001F4BE',
u':disco_ottico:': u'\U0001F4BF',
u':dvd:': u'\U0001F4C0',
u':abaco:': u'\U0001F9EE',
u':cinepresa:': u'\U0001F3A5',
u':pellicola_cinematografica:': u'\U0001F39E\U0000FE0F',
u':proiettore_cinematografico:': u'\U0001F4FD\U0000FE0F',
u':ciak:': u'\U0001F3AC',
u':televisore:': u'\U0001F4FA',
u':fotocamera:': u'\U0001F4F7',
u':fotocamera_con_flash:': u'\U0001F4F8',
u':videocamera:': u'\U0001F4F9',
u':videocassetta:': u'\U0001F4FC',
u':lente_di_ingrandimento_rivolta_a_sinistra:': u'\U0001F50D',
u':lente_di_ingrandimento_rivolta_a_destra:': u'\U0001F50E',
u':candela:': u'\U0001F56F\U0000FE0F',
u':lampadina:': u'\U0001F4A1',
u':torcia:': u'\U0001F526',
u':lanterna_rossa:': u'\U0001F3EE',
u':diya:': u'\U0001FA94',
u':quaderno_con_copertina_decorata:': u'\U0001F4D4',
u':libro_chiuso:': u'\U0001F4D5',
u':libro_aperto:': u'\U0001F4D6',
u':libro_verde:': u'\U0001F4D7',
u':libro_blu:': u'\U0001F4D8',
u':libro_arancione:': u'\U0001F4D9',
u':libri:': u'\U0001F4DA',
u':quaderno:': u'\U0001F4D3',
u':quaderno_ad_anelli:': u'\U0001F4D2',
u':pagina_con_piega:': u'\U0001F4C3',
u':pergamena:': u'\U0001F4DC',
u':pagina:': u'\U0001F4C4',
u':quotidiano:': u'\U0001F4F0',
u':quotidiano_arrotolato:': u'\U0001F5DE\U0000FE0F',
u':etichette_segnalibro:': u'\U0001F4D1',
u':segnalibro:': u'\U0001F516',
u':etichetta:': u'\U0001F3F7\U0000FE0F',
u':sacco_di_soldi:': u'\U0001F4B0',
u':moneta:': u'\U0001FA99',
u':banconota_yen:': u'\U0001F4B4',
u':banconota_dollaro:': u'\U0001F4B5',
u':banconota_euro:': u'\U0001F4B6',
u':banconota_sterlina:': u'\U0001F4B7',
u':soldi_con_le_ali:': u'\U0001F4B8',
u':carta_di_credito:': u'\U0001F4B3',
u':scontrino:': u'\U0001F9FE',
u':grafico_ascendente:': u'\U0001F4B9',
u':busta:': u'\U00002709\U0000FE0F',
u':e-mail:': u'\U0001F4E7',
u':posta_in_arrivo:': u'\U0001F4E8',
u':posta_in_uscita:': u'\U0001F4E9',
u':posta_inviata:': u'\U0001F4E4',
u':posta_ricevuta:': u'\U0001F4E5',
u':pacco:': u'\U0001F4E6',
u':cassetta_postale_chiusa_bandierina_alzata:': u'\U0001F4EB',
u':cassetta_postale_chiusa_bandierina_abbassata:': u'\U0001F4EA',
u':cassetta_postale_aperta_bandierina_alzata:': u'\U0001F4EC',
u':cassetta_postale_con_bandierina_abbassata:': u'\U0001F4ED',
u':cassetta_delle_lettere:': u'\U0001F4EE',
u':urna_elettorale_con_scheda:': u'\U0001F5F3\U0000FE0F',
u':matita:': u'\U0000270F\U0000FE0F',
u':pennino:': u'\U00002712\U0000FE0F',
u':penna_stilografica:': u'\U0001F58B\U0000FE0F',
u':penna_a_sfera:': u'\U0001F58A\U0000FE0F',
u':pennello:': u'\U0001F58C\U0000FE0F',
u':pastello_a_cera:': u'\U0001F58D\U0000FE0F',
u':memo:': u'\U0001F4DD',
u':valigetta_24_ore:': u'\U0001F4BC',
u':cartella_file:': u'\U0001F4C1',
u':cartella_aperta:': u'\U0001F4C2',
u':divisori_per_schedario:': u'\U0001F5C2\U0000FE0F',
u':calendario:': u'\U0001F4C5',
u':calendario_a_strappo:': u'\U0001F4C6',
u':blocco_per_appunti_con_spirale:': u'\U0001F5D2\U0000FE0F',
u':calendario_a_spirale:': u'\U0001F5D3\U0000FE0F',
u':schedario:': u'\U0001F4C7',
u':grafico_con_andamento_positivo:': u'\U0001F4C8',
u':grafico_con_andamento_negativo:': u'\U0001F4C9',
u':grafico_a_barre:': u'\U0001F4CA',
u':portablocco:': u'\U0001F4CB',
u':puntina:': u'\U0001F4CC',
u':puntina_arrotondata:': u'\U0001F4CD',
u':graffetta:': u'\U0001F4CE',
u':graffette_attaccate:': u'\U0001F587\U0000FE0F',
u':righello:': u'\U0001F4CF',
u':squadra:': u'\U0001F4D0',
u':forbici:': u'\U00002702\U0000FE0F',
u':schedario_da_tavolo:': u'\U0001F5C3\U0000FE0F',
u':schedario_da_ufficio:': u'\U0001F5C4\U0000FE0F',
u':cestino:': u'\U0001F5D1\U0000FE0F',
u':lucchetto_chiuso:': u'\U0001F512',
u':lucchetto_aperto:': u'\U0001F513',
u':lucchetto_con_penna_stilo:': u'\U0001F50F',
u':lucchetto_chiuso_con_chiave:': u'\U0001F510',
u':chiave:': u'\U0001F511',
u':chiave_antica:': u'\U0001F5DD\U0000FE0F',
u':martello:': u'\U0001F528',
u':ascia:': u'\U0001FA93',
u':piccone:': u'\U000026CF\U0000FE0F',
u':piccone_e_martello:': u'\U00002692\U0000FE0F',
u':martello_e_chiave_inglese:': u'\U0001F6E0\U0000FE0F',
u':pugnale:': u'\U0001F5E1\U0000FE0F',
u':spade_incrociate:': u'\U00002694\U0000FE0F',
u':pistola:': u'\U0001F52B',
u':boomerang:': u'\U0001FA83',
u':arco_e_freccia:': u'\U0001F3F9',
u':scudo:': u'\U0001F6E1\U0000FE0F',
u':sega_da_falegname:': u'\U0001FA9A',
u':chiave_inglese:': u'\U0001F527',
u':cacciavite:': u'\U0001FA9B',
u':vite_e_bullone:': u'\U0001F529',
u':ingranaggio:': u'\U00002699\U0000FE0F',
u':morsetto:': u'\U0001F5DC\U0000FE0F',
u':bilancia_a_doppio_piatto:': u'\U00002696\U0000FE0F',
u':bastone_per_non_vedenti:': u'\U0001F9AF',
u':collegamento:': u'\U0001F517',
u':catene:': u'\U000026D3\U0000FE0F',
u':gancio:': u'\U0001FA9D',
u':cassetta_degli_attrezzi:': u'\U0001F9F0',
u':calamita:': u'\U0001F9F2',
u':scala:': u'\U0001FA9C',
u':alambicco:': u'\U00002697\U0000FE0F',
u':provetta:': u'\U0001F9EA',
u':piastra_di_petri:': u'\U0001F9EB',
u':dna:': u'\U0001F9EC',
u':microscopio:': u'\U0001F52C',
u':telescopio:': u'\U0001F52D',
u':antenna_satellitare:': u'\U0001F4E1',
u':siringa:': u'\U0001F489',
u':goccia_di_sangue:': u'\U0001FA78',
u':pillola:': u'\U0001F48A',
u':cerotto:': u'\U0001FA79',
u':stetoscopio:': u'\U0001FA7A',
u':porta:': u'\U0001F6AA',
u':ascensore:': u'\U0001F6D7',
u':specchio:': u'\U0001FA9E',
u':finestra:': u'\U0001FA9F',
u':letto:': u'\U0001F6CF\U0000FE0F',
u':divano_e_lampada:': u'\U0001F6CB\U0000FE0F',
u':sedia:': u'\U0001FA91',
u':toilette:': u'\U0001F6BD',
u':sturalavandini:': u'\U0001FAA0',
u':doccia:': u'\U0001F6BF',
u':vasca:': u'\U0001F6C1',
u':trappola_per_topi:': u'\U0001FAA4',
u':rasoio:': u'\U0001FA92',
u':flacone:': u'\U0001F9F4',
u':spilla_da_balia:': u'\U0001F9F7',
u':scopa:': u'\U0001F9F9',
u':cesto:': u'\U0001F9FA',
u':rotolo_di_carta_igienica:': u'\U0001F9FB',
u':secchio:': u'\U0001FAA3',
u':sapone:': u'\U0001F9FC',
u':spazzolino_da_denti:': u'\U0001FAA5',
u':spugna:': u'\U0001F9FD',
u':estintore:': u'\U0001F9EF',
u':carrello:': u'\U0001F6D2',
u':sigaretta:': u'\U0001F6AC',
u':bara:': u'\U000026B0\U0000FE0F',
u':lapide:': u'\U0001FAA6',
u':urna_funeraria:': u'\U000026B1\U0000FE0F',
u':moai:': u'\U0001F5FF',
u':cartello:': u'\U0001FAA7',
u':simbolo_dello_sportello_bancomat:': u'\U0001F3E7',
u':simbolo_per_la_raccolta_dei_rifiuti:': u'\U0001F6AE',
u':acqua_potabile:': u'\U0001F6B0',
u':simbolo_della_disabilità:': u'\U0000267F',
u':simbolo_del_bagno_degli_uomini:': u'\U0001F6B9',
u':simbolo_del_bagno_delle_donne:': u'\U0001F6BA',
u':simbolo_dei_servizi_igienici:': u'\U0001F6BB',
u':simbolo_con_immagine_di_bambino:': u'\U0001F6BC',
u':simbolo_del_wc:': u'\U0001F6BE',
u':simbolo_del_controllo_passaporti:': u'\U0001F6C2',
u':simbolo_della_dogana:': u'\U0001F6C3',
u':simbolo_del_ritiro_bagagli:': u'\U0001F6C4',
u':simbolo_del_deposito_bagagli:': u'\U0001F6C5',
u':simbolo_di_avviso:': u'\U000026A0\U0000FE0F',
u':attraversamento_bambini:': u'\U0001F6B8',
u':segnale_di_divieto_di_accesso:': u'\U000026D4',
u':segnale_di_divieto:': u'\U0001F6AB',
u':segnale_di_divieto_di_transito_delle_biciclette:': u'\U0001F6B3',
u':segnale_di_vietato_fumare:': u'\U0001F6AD',
u':simbolo_del_divieto_di_gettare_rifiuti:': u'\U0001F6AF',
u':simbolo_di_acqua_non_potabile:': u'\U0001F6B1',
u':simbolo_del_divieto_di_transito_pedoni:': u'\U0001F6B7',
u':simbolo_che_vieta_l’utilizzo_dei_telefoni_cellulari:': u'\U0001F4F5',
u':simbolo_di_divieto_ai_minorenni:': u'\U0001F51E',
u':simbolo_della_radioattività:': u'\U00002622\U0000FE0F',
u':simbolo_del_rischio_biologico:': u'\U00002623\U0000FE0F',
u':freccia_rivolta_verso_l’alto:': u'\U00002B06\U0000FE0F',
u':freccia_rivolta_verso_destra_che_punta_in_alto:': u'\U00002197\U0000FE0F',
u':freccia_rivolta_verso_destra:': u'\U000027A1\U0000FE0F',
u':freccia_in_basso_a_destra:': u'\U00002198\U0000FE0F',
u':freccia_rivolta_verso_il_basso:': u'\U00002B07\U0000FE0F',
u':freccia_in_basso_a_sinistra:': u'\U00002199\U0000FE0F',
u':freccia_rivolta_a_sinistra:': u'\U00002B05\U0000FE0F',
u':freccia_in_alto_a_sinistra:': u'\U00002196\U0000FE0F',
u':freccia_su-giù:': u'\U00002195\U0000FE0F',
u':freccia_sinistra-destra:': u'\U00002194\U0000FE0F',
u':freccia_curva_a_sinistra:': u'\U000021A9\U0000FE0F',
u':freccia_curva_a_destra:': u'\U000021AA\U0000FE0F',
u':freccia_curva_in_alto:': u'\U00002934\U0000FE0F',
u':freccia_curva_in_basso:': u'\U00002935\U0000FE0F',
u':frecce_verticali_che_ruotano_in_senso_orario:': u'\U0001F503',
u':pulsante_con_frecce_che_ruotano_in_senso_antiorario:': u'\U0001F504',
u':freccia_back:': u'\U0001F519',
u':freccia_end:': u'\U0001F51A',
u':freccia_on:': u'\U0001F51B',
u':freccia_soon:': u'\U0001F51C',
u':freccia_top:': u'\U0001F51D',
u':luogo_di_culto:': u'\U0001F6D0',
u':simbolo_dell’atomo:': u'\U0000269B\U0000FE0F',
u':om:': u'\U0001F549\U0000FE0F',
u':stella_di_david:': u'\U00002721\U0000FE0F',
u':ruota_del_dharma:': u'\U00002638\U0000FE0F',
u':yin_e_yang:': u'\U0000262F\U0000FE0F',
u':croce_latina:': u'\U0000271D\U0000FE0F',
u':croce_ortodossa:': u'\U00002626\U0000FE0F',
u':mezzaluna_e_stella:': u'\U0000262A\U0000FE0F',
u':simbolo_della_pace:': u'\U0000262E\U0000FE0F',
u':menorah:': u'\U0001F54E',
u':stella_a_sei_punte:': u'\U0001F52F',
u':segno_zodiacale_dell’ariete:': u'\U00002648',
u':segno_zodiacale_del_toro:': u'\U00002649',
u':segno_zodiacale_dei_gemelli:': u'\U0000264A',
u':segno_zodiacale_del_cancro:': u'\U0000264B',
u':segno_zodiacale_del_leone:': u'\U0000264C',
u':segno_zodiacale_della_vergine:': u'\U0000264D',
u':segno_zodiacale_della_bilancia:': u'\U0000264E',
u':segno_zodiacale_dello_scorpione:': u'\U0000264F',
u':segno_zodiacale_del_saggitario:': u'\U00002650',
u':segno_zodiacale_del_capricorno:': u'\U00002651',
u':segno_zodiacale_dell’acquario:': u'\U00002652',
u':segno_zodiacale_dei_pesci:': u'\U00002653',
u':segno_zodiacale_dell’ofiuco:': u'\U000026CE',
u':pulsante_di_riproduzione_casuale:': u'\U0001F500',
u':pulsante_di_ripetizione_della_riproduzione:': u'\U0001F501',
u':pulsante_di_ripetizione_della_riproduzione_di_una_traccia:': u'\U0001F502',
u':pulsante_di_riproduzione:': u'\U000025B6\U0000FE0F',
u':pulsante_di_avanzamento_rapido:': u'\U000023E9',
u':traccia_successiva:': u'\U000023ED\U0000FE0F',
u':pulsante_riproduci_pausa:': u'\U000023EF\U0000FE0F',
u':pulsante_di_riavvolgimento:': u'\U000025C0\U0000FE0F',
u':pulsante_di_riavvolgimento_rapido:': u'\U000023EA',
u':pulsante_traccia_precedente:': u'\U000023EE\U0000FE0F',
u':pulsante_a_triangolo_rivolto_verso_l’alto:': u'\U0001F53C',
u':pulsante_doppia_freccia_in_alto:': u'\U000023EB',
u':pulsante_a_triangolo_rivolto_verso_il_basso:': u'\U0001F53D',
u':doppia_freccia_in_basso:': u'\U000023EC',
u':pulsante_pausa:': u'\U000023F8\U0000FE0F',
u':pulsante_stop:': u'\U000023F9\U0000FE0F',
u':pulsante_di_registrazione:': u'\U000023FA\U0000FE0F',
u':pulsante_di_espulsione:': u'\U000023CF\U0000FE0F',
u':simbolo_del_cinema:': u'\U0001F3A6',
u':luminosità_bassa:': u'\U0001F505',
u':luminosità_elevata:': u'\U0001F506',
u':segnale_cellulare:': u'\U0001F4F6',
u':modalità_vibrazione:': u'\U0001F4F3',
u':cellulare_spento:': u'\U0001F4F4',
u':simbolo_genere_femminile:': u'\U00002640\U0000FE0F',
u':simbolo_genere_maschile:': u'\U00002642\U0000FE0F',
u':simbolo_transgender:': u'\U000026A7\U0000FE0F',
u':segno_moltiplicazione:': u'\U00002716\U0000FE0F',
u':simbolo_dell’addizione:': u'\U00002795',
u':simbolo_della_sottrazione:': u'\U00002796',
u':simbolo_della_divisione:': u'\U00002797',
u':simbolo_dell’infinito:': u'\U0000267E\U0000FE0F',
u':doppio_punto_esclamativo:': u'\U0000203C\U0000FE0F',
u':punto_esclamativo_e_interrogativo:': u'\U00002049\U0000FE0F',
u':punto_interrogativo_rosso:': u'\U00002753',
u':punto_interrogativo_bianco:': u'\U00002754',
u':punto_esclamativo_bianco:': u'\U00002755',
u':punto_esclamativo_rosso:': u'\U00002757',
u':trattino_ondulato:': u'\U00003030\U0000FE0F',
u':cambio_valuta:': u'\U0001F4B1',
u':dollaro:': u'\U0001F4B2',
u':simbolo_della_medicina:': u'\U00002695\U0000FE0F',
u':simbolo_del_riciclaggio:': u'\U0000267B\U0000FE0F',
u':giglio_araldico:': u'\U0000269C\U0000FE0F',
u':tridente:': u'\U0001F531',
u':tesserino_per_nome:': u'\U0001F4DB',
u':ideogramma_giapponese_di_“principiante”:': u'\U0001F530',
u':cerchio:': u'\U00002B55',
u':segno_di_spunta:': u'\U00002705',
u':riquadro_con_spunta:': u'\U00002611\U0000FE0F',
u':segno_di_spunta_nero:': u'\U00002714\U0000FE0F',
u':croce:': u'\U0000274C',
u':croce_con_quadrato:': u'\U0000274E',
u':occhiello:': u'\U000027B0',
u':doppio_occhiello:': u'\U000027BF',
u':simbolo_di_alternanza_delle_parti:': u'\U0000303D\U0000FE0F',
u':asterisco:': u'\U00002733\U0000FE0F',
u':stella_stilizzata:': u'\U00002734\U0000FE0F',
u':scintilla_stilizzata:': u'\U00002747\U0000FE0F',
u':copyright:': u'\U000000A9\U0000FE0F',
u':marchio_registrato:': u'\U000000AE\U0000FE0F',
u':marchio:': u'\U00002122\U0000FE0F',
u':tasto_#:': u'\U00002122\U0000FE0F',
u':tasto_*:': u'\U0000002A\U0000FE0F\U000020E3',
u':tasto_0:': u'\U00000030\U0000FE0F\U000020E3',
u':tasto_1:': u'\U00000031\U0000FE0F\U000020E3',
u':tasto_2:': u'\U00000032\U0000FE0F\U000020E3',
u':tasto_3:': u'\U00000033\U0000FE0F\U000020E3',
u':tasto_4:': u'\U00000034\U0000FE0F\U000020E3',
u':tasto_5:': u'\U00000035\U0000FE0F\U000020E3',
u':tasto_6:': u'\U00000036\U0000FE0F\U000020E3',
u':tasto_7:': u'\U00000037\U0000FE0F\U000020E3',
u':tasto_8:': u'\U00000038\U0000FE0F\U000020E3',
u':tasto_9:': u'\U00000039\U0000FE0F\U000020E3',
u':tasto_10:': u'\U0001F51F',
u':pulsante_con_lettere_latine_maiuscole:': u'\U0001F520',
u':pulsante_con_lettere_latine_minuscole:': u'\U0001F521',
u':pulsante_con_numeri:': u'\U0001F522',
u':pulsante_con_simboli:': u'\U0001F523',
u':pulsante_con_lettere_latine:': u'\U0001F524',
u':gruppo_sanguigno_a:': u'\U0001F170\U0000FE0F',
u':gruppo_sanguigno_ab:': u'\U0001F18E',
u':gruppo_sanguigno_b:': u'\U0001F171\U0000FE0F',
u':pulsante_cl:': u'\U0001F191',
u':pulsante_cool:': u'\U0001F192',
u':pulsante_free:': u'\U0001F193',
u':punto_informazioni:': u'\U00002139\U0000FE0F',
u':pulsante_id:': u'\U0001F194',
u':pulsante_m_cerchiata:': u'\U000024C2\U0000FE0F',
u':pulsante_new:': u'\U0001F195',
u':pulsante_ng:': u'\U0001F196',
u':gruppo_sanguigno_0:': u'\U0001F17E\U0000FE0F',
u':pulsante_ok:': u'\U0001F197',
u':pulsante_p:': u'\U0001F17F\U0000FE0F',
u':pulsante_sos:': u'\U0001F198',
u':pulsante_up!:': u'\U0001F199',
u':pulsante_vs:': u'\U0001F19A',
u':ideogramma_giapponese_per_“qui”:': u'\U0001F201',
u':ideogramma_giapponese_di_“costo_del_servizio”:': u'\U0001F202\U0000FE0F',
u':ideogramma_giapponese_di_“importo_mensile”:': u'\U0001F237\U0000FE0F',
u':ideogramma_giapponese_di_“a_pagamento”:': u'\U0001F236',
u':ideogramma_giapponese_di_“riservato”:': u'\U0001F22F',
u':ideogramma_giapponese_di_“occasione”:': u'\U0001F250',
u':ideogramma_giapponese_di_“sconto”:': u'\U0001F239',
u':ideogramma_giapponese_di_“gratis”:': u'\U0001F21A',
u':ideogramma_giapponese_di_“proibito”:': u'\U0001F232',
u':ideogramma_giapponese_di_“accettabile”:': u'\U0001F251',
u':ideogramma_giapponese_di_“candidatura”:': u'\U0001F238',
u':ideogramma_giapponese_di_“voto_di_sufficienza”:': u'\U0001F234',
u':ideogramma_giapponese_di_“posto_libero”:': u'\U0001F233',
u':ideogramma_giapponese_di_“congratulazioni”:': u'\U00003297\U0000FE0F',
u':ideogramma_giapponese_di_“segreto”:': u'\U00003299\U0000FE0F',
u':ideogramma_giapponese_di_“aperto_al_pubblico”:': u'\U0001F23A',
u':ideogramma_giapponese_di_“nessun_posto_libero”:': u'\U0001F235',
u':cerchio_rosso:': u'\U0001F534',
u':cerchio_arancione:': u'\U0001F7E0',
u':cerchio_giallo:': u'\U0001F7E1',
u':cerchio_verde:': u'\U0001F7E2',
u':cerchio_blu:': u'\U0001F535',
u':cerchio_viola:': u'\U0001F7E3',
u':cerchio_marrone:': u'\U0001F7E4',
u':cerchio_nero:': u'\U000026AB',
u':cerchio_bianco:': u'\U000026AA',
u':quadrato_rosso:': u'\U0001F7E5',
u':quadrato_arancione:': u'\U0001F7E7',
u':quadrato_giallo:': u'\U0001F7E8',
u':quadrato_verde:': u'\U0001F7E9',
u':quadrato_blu:': u'\U0001F7E6',
u':quadrato_viola:': u'\U0001F7EA',
u':quadrato_marrone:': u'\U0001F7EB',
u':quadrato_nero_grande:': u'\U00002B1B',
u':quadrato_bianco_grande:': u'\U00002B1C',
u':quadrato_nero_medio:': u'\U000025FC\U0000FE0F',
u':quadrato_bianco_medio:': u'\U000025FB\U0000FE0F',
u':quadrato_nero_medio-piccolo:': u'\U000025FE',
u':quadrato_bianco_medio-piccolo:': u'\U000025FD',
u':quadrato_nero_piccolo:': u'\U000025AA\U0000FE0F',
u':quadrato_bianco_piccolo:': u'\U000025AB\U0000FE0F',
u':rombo_arancione_grande:': u'\U0001F536',
u':rombo_blu_grande:': u'\U0001F537',
u':rombo_arancione_piccolo:': u'\U0001F538',
u':rombo_blu_piccolo:': u'\U0001F539',
u':triangolo_rosso_con_punta_verso_l’alto:': u'\U0001F53A',
u':triangolo_rosso_con_punta_verso_il_basso:': u'\U0001F53B',
u':petalo_di_fiore:': u'\U0001F4A0',
u':pulsante_rotondo:': u'\U0001F518',
u':tasto_quadrato_nero_con_bordo_bianco:': u'\U0001F533',
u':tasto_quadrato_bianco_con_bordo_nero:': u'\U0001F532',
u':bandiera_a_scacchi:': u'\U0001F3C1',
u':bandierina_rossa:': u'\U0001F6A9',
u':bandiere_del_giappone_incrociate:': u'\U0001F38C',
u':bandiera_nera:': u'\U0001F3F4',
u':bandiera_bianca:': u'\U0001F3F3\U0000FE0F',
u':bandiera_arcobaleno:': u'\U0001F3F3\U0000FE0F\U0000200D\U0001F308',
u':bandiera_transgender:': u'\U0001F3F3\U0000FE0F\U0000200D\U000026A7\U0000FE0F',
u':bandiera_dei_pirati:': u'\U0001F3F4\U0000200D\U00002620\U0000FE0F',
u':bandiera_isola_ascensione:': u'\U0001F1E6\U0001F1E8',
u':bandiera_andorra:': u'\U0001F1E6\U0001F1E9',
u':bandiera_emirati_arabi_uniti:': u'\U0001F1E6\U0001F1EA',
u':bandiera_afghanistan:': u'\U0001F1E6\U0001F1EB',
u':bandiera_antigua_e_barbuda:': u'\U0001F1E6\U0001F1EC',
u':bandiera_anguilla:': u'\U0001F1E6\U0001F1EE',
u':bandiera_albania:': u'\U0001F1E6\U0001F1F1',
u':bandiera_armenia:': u'\U0001F1E6\U0001F1F2',
u':bandiera_angola:': u'\U0001F1E6\U0001F1F4',
u':bandiera_antartide:': u'\U0001F1E6\U0001F1F6',
u':bandiera_argentina:': u'\U0001F1E6\U0001F1F7',
u':bandiera_samoa_americane:': u'\U0001F1E6\U0001F1F8',
u':bandiera_austria:': u'\U0001F1E6\U0001F1F9',
u':bandiera_australia:': u'\U0001F1E6\U0001F1FA',
u':bandiera_aruba:': u'\U0001F1E6\U0001F1FC',
u':bandiera_isole_åland:': u'\U0001F1E6\U0001F1FD',
u':bandiera_azerbaigian:': u'\U0001F1E6\U0001F1FF',
u':bandiera_bosnia_ed_erzegovina:': u'\U0001F1E7\U0001F1E6',
u':bandiera_barbados:': u'\U0001F1E7\U0001F1E7',
u':bandiera_bangladesh:': u'\U0001F1E7\U0001F1E9',
u':bandiera_belgio:': u'\U0001F1E7\U0001F1EA',
u':bandiera_burkina_faso:': u'\U0001F1E7\U0001F1EB',
u':bandiera_bulgaria:': u'\U0001F1E7\U0001F1EC',
u':bandiera_bahrein:': u'\U0001F1E7\U0001F1ED',
u':bandiera_burundi:': u'\U0001F1E7\U0001F1EE',
u':bandiera_benin:': u'\U0001F1E7\U0001F1EF',
u':bandiera_saint-barthélemy:': u'\U0001F1E7\U0001F1F1',
u':bandiera_bermuda:': u'\U0001F1E7\U0001F1F2',
u':bandiera_brunei:': u'\U0001F1E7\U0001F1F3',
u':bandiera_bolivia:': u'\U0001F1E7\U0001F1F4',
u':bandiera_caraibi_olandesi:': u'\U0001F1E7\U0001F1F6',
u':bandiera_brasile:': u'\U0001F1E7\U0001F1F7',
u':bandiera_bahamas:': u'\U0001F1E7\U0001F1F8',
u':bandiera_bhutan:': u'\U0001F1E7\U0001F1F9',
u':bandiera_isola_bouvet:': u'\U0001F1E7\U0001F1FB',
u':bandiera_botswana:': u'\U0001F1E7\U0001F1FC',
u':bandiera_bielorussia:': u'\U0001F1E7\U0001F1FE',
u':bandiera_belize:': u'\U0001F1E7\U0001F1FF',
u':bandiera_canada:': u'\U0001F1E8\U0001F1E6',
u':bandiera_isole_cocos_(keeling):': u'\U0001F1E8\U0001F1E8',
u':bandiera_congo_–_kinshasa:': u'\U0001F1E8\U0001F1E9',
u':bandiera_repubblica_centrafricana:': u'\U0001F1E8\U0001F1EB',
u':bandiera_congo-brazzaville:': u'\U0001F1E8\U0001F1EC',
u':bandiera_svizzera:': u'\U0001F1E8\U0001F1ED',
u':bandiera_costa_d’avorio:': u'\U0001F1E8\U0001F1EE',
u':bandiera_isole_cook:': u'\U0001F1E8\U0001F1F0',
u':bandiera_cile:': u'\U0001F1E8\U0001F1F1',
u':bandiera_camerun:': u'\U0001F1E8\U0001F1F2',
u':bandiera_cina:': u'\U0001F1E8\U0001F1F3',
u':bandiera_colombia:': u'\U0001F1E8\U0001F1F4',
u':bandiera_isola_di_clipperton:': u'\U0001F1E8\U0001F1F5',
u':bandiera_costa_rica:': u'\U0001F1E8\U0001F1F7',
u':bandiera_cuba:': u'\U0001F1E8\U0001F1FA',
u':bandiera_capo_verde:': u'\U0001F1E8\U0001F1FB',
u':bandiera_curaçao:': u'\U0001F1E8\U0001F1FC',
u':bandiera_isola_christmas:': u'\U0001F1E8\U0001F1FD',
u':bandiera_cipro:': u'\U0001F1E8\U0001F1FE',
u':bandiera_cechia:': u'\U0001F1E8\U0001F1FF',
u':bandiera_germania:': u'\U0001F1E9\U0001F1EA',
u':bandiera_diego_garcia:': u'\U0001F1E9\U0001F1EC',
u':bandiera_gibuti:': u'\U0001F1E9\U0001F1EF',
u':bandiera_danimarca:': u'\U0001F1E9\U0001F1F0',
u':bandiera_dominica:': u'\U0001F1E9\U0001F1F2',
u':bandiera_repubblica_dominicana:': u'\U0001F1E9\U0001F1F4',
u':bandiera_algeria:': u'\U0001F1E9\U0001F1FF',
u':bandiera_ceuta_e_melilla:': u'\U0001F1EA\U0001F1E6',
u':bandiera_ecuador:': u'\U0001F1EA\U0001F1E8',
u':bandiera_estonia:': u'\U0001F1EA\U0001F1EA',
u':bandiera_egitto:': u'\U0001F1EA\U0001F1EC',
u':bandiera_sahara_occidentale:': u'\U0001F1EA\U0001F1ED',
u':bandiera_eritrea:': u'\U0001F1EA\U0001F1F7',
u':bandiera_spagna:': u'\U0001F1EA\U0001F1F8',
u':bandiera_etiopia:': u'\U0001F1EA\U0001F1F9',
u':bandiera_unione_europea:': u'\U0001F1EA\U0001F1FA',
u':bandiera_finlandia:': u'\U0001F1EB\U0001F1EE',
u':bandiera_figi:': u'\U0001F1EB\U0001F1EF',
u':bandiera_isole_falkland:': u'\U0001F1EB\U0001F1F0',
u':bandiera_micronesia:': u'\U0001F1EB\U0001F1F2',
u':bandiera_isole_fær_øer:': u'\U0001F1EB\U0001F1F4',
u':bandiera_francia:': u'\U0001F1EB\U0001F1F7',
u':bandiera_gabon:': u'\U0001F1EC\U0001F1E6',
u':bandiera_regno_unito:': u'\U0001F1EC\U0001F1E7',
u':bandiera_grenada:': u'\U0001F1EC\U0001F1E9',
u':bandiera_georgia:': u'\U0001F1EC\U0001F1EA',
u':bandiera_guyana_francese:': u'\U0001F1EC\U0001F1EB',
u':bandiera_guernsey:': u'\U0001F1EC\U0001F1EC',
u':bandiera_ghana:': u'\U0001F1EC\U0001F1ED',
u':bandiera_gibilterra:': u'\U0001F1EC\U0001F1EE',
u':bandiera_groenlandia:': u'\U0001F1EC\U0001F1F1',
u':bandiera_gambia:': u'\U0001F1EC\U0001F1F2',
u':bandiera_guinea:': u'\U0001F1EC\U0001F1F3',
u':bandiera_guadalupa:': u'\U0001F1EC\U0001F1F5',
u':bandiera_guinea_equatoriale:': u'\U0001F1EC\U0001F1F6',
u':bandiera_grecia:': u'\U0001F1EC\U0001F1F7',
u':bandiera_georgia_del_sud_e_sandwich_australi:': u'\U0001F1EC\U0001F1F8',
u':bandiera_guatemala:': u'\U0001F1EC\U0001F1F9',
u':bandiera_guam:': u'\U0001F1EC\U0001F1FA',
u':bandiera_guinea-bissau:': u'\U0001F1EC\U0001F1FC',
u':bandiera_guyana:': u'\U0001F1EC\U0001F1FE',
u':bandiera_ras_di_hong_kong:': u'\U0001F1ED\U0001F1F0',
u':bandiera_isole_heard_e_mcdonald:': u'\U0001F1ED\U0001F1F2',
u':bandiera_honduras:': u'\U0001F1ED\U0001F1F3',
u':bandiera_croazia:': u'\U0001F1ED\U0001F1F7',
u':bandiera_haiti:': u'\U0001F1ED\U0001F1F9',
u':bandiera_ungheria:': u'\U0001F1ED\U0001F1FA',
u':bandiera_isole_canarie:': u'\U0001F1EE\U0001F1E8',
u':bandiera_indonesia:': u'\U0001F1EE\U0001F1E9',
u':bandiera_irlanda:': u'\U0001F1EE\U0001F1EA',
u':bandiera_israele:': u'\U0001F1EE\U0001F1F1',
u':bandiera_isola_di_man:': u'\U0001F1EE\U0001F1F2',
u':bandiera_india:': u'\U0001F1EE\U0001F1F3',
u':bandiera_territorio_britannico_dell’oceano_indiano:': u'\U0001F1EE\U0001F1F4',
u':bandiera_iraq:': u'\U0001F1EE\U0001F1F6',
u':bandiera_iran:': u'\U0001F1EE\U0001F1F7',
u':bandiera_islanda:': u'\U0001F1EE\U0001F1F8',
u':bandiera_italia:': u'\U0001F1EE\U0001F1F9',
u':bandiera_jersey:': u'\U0001F1EF\U0001F1EA',
u':bandiera_giamaica:': u'\U0001F1EF\U0001F1F2',
u':bandiera_giordania:': u'\U0001F1EF\U0001F1F4',
u':bandiera_giappone:': u'\U0001F1EF\U0001F1F5',
u':bandiera_kenya:': u'\U0001F1F0\U0001F1EA',
u':bandiera_kirghizistan:': u'\U0001F1F0\U0001F1EC',
u':bandiera_cambogia:': u'\U0001F1F0\U0001F1ED',
u':bandiera_kiribati:': u'\U0001F1F0\U0001F1EE',
u':bandiera_comore:': u'\U0001F1F0\U0001F1F2',
u':bandiera_saint_kitts_e_nevis:': u'\U0001F1F0\U0001F1F3',
u':bandiera_corea_del_nord:': u'\U0001F1F0\U0001F1F5',
u':bandiera_corea_del_sud:': u'\U0001F1F0\U0001F1F7',
u':bandiera_kuwait:': u'\U0001F1F0\U0001F1FC',
u':bandiera_isole_cayman:': u'\U0001F1F0\U0001F1FE',
u':bandiera_kazakistan:': u'\U0001F1F0\U0001F1FF',
u':bandiera_laos:': u'\U0001F1F1\U0001F1E6',
u':bandiera_libano:': u'\U0001F1F1\U0001F1E7',
u':bandiera_saint_lucia:': u'\U0001F1F1\U0001F1E8',
u':bandiera_liechtenstein:': u'\U0001F1F1\U0001F1EE',
u':bandiera_sri_lanka:': u'\U0001F1F1\U0001F1F0',
u':bandiera_liberia:': u'\U0001F1F1\U0001F1F7',
u':bandiera_lesotho:': u'\U0001F1F1\U0001F1F8',
u':bandiera_lituania:': u'\U0001F1F1\U0001F1F9',
u':bandiera_lussemburgo:': u'\U0001F1F1\U0001F1FA',
u':bandiera_lettonia:': u'\U0001F1F1\U0001F1FB',
u':bandiera_libia:': u'\U0001F1F1\U0001F1FE',
u':bandiera_marocco:': u'\U0001F1F2\U0001F1E6',
u':bandiera_monaco:': u'\U0001F1F2\U0001F1E8',
u':bandiera_moldavia:': u'\U0001F1F2\U0001F1E9',
u':bandiera_montenegro:': u'\U0001F1F2\U0001F1EA',
u':bandiera_saint_martin:': u'\U0001F1F2\U0001F1EB',
u':bandiera_madagascar:': u'\U0001F1F2\U0001F1EC',
u':bandiera_isole_marshall:': u'\U0001F1F2\U0001F1ED',
u':bandiera_macedonia_del_nord:': u'\U0001F1F2\U0001F1F0',
u':bandiera_mali:': u'\U0001F1F2\U0001F1F1',
u':bandiera_myanmar_(birmania):': u'\U0001F1F2\U0001F1F2',
u':bandiera_mongolia:': u'\U0001F1F2\U0001F1F3',
u':bandiera_ras_di_macao:': u'\U0001F1F2\U0001F1F4',
u':bandiera_isole_marianne_settentrionali:': u'\U0001F1F2\U0001F1F5',
u':bandiera_martinica:': u'\U0001F1F2\U0001F1F6',
u':bandiera_mauritania:': u'\U0001F1F2\U0001F1F7',
u':bandiera_montserrat:': u'\U0001F1F2\U0001F1F8',
u':bandiera_malta:': u'\U0001F1F2\U0001F1F9',
u':bandiera_mauritius:': u'\U0001F1F2\U0001F1FA',
u':bandiera_maldive:': u'\U0001F1F2\U0001F1FB',
u':bandiera_malawi:': u'\U0001F1F2\U0001F1FC',
u':bandiera_messico:': u'\U0001F1F2\U0001F1FD',
u':bandiera_malaysia:': u'\U0001F1F2\U0001F1FE',
u':bandiera_mozambico:': u'\U0001F1F2\U0001F1FF',
u':bandiera_namibia:': u'\U0001F1F3\U0001F1E6',
u':bandiera_nuova_caledonia:': u'\U0001F1F3\U0001F1E8',
u':bandiera_niger:': u'\U0001F1F3\U0001F1EA',
u':bandiera_isola_norfolk:': u'\U0001F1F3\U0001F1EB',
u':bandiera_nigeria:': u'\U0001F1F3\U0001F1EC',
u':bandiera_nicaragua:': u'\U0001F1F3\U0001F1EE',
u':bandiera_paesi_bassi:': u'\U0001F1F3\U0001F1F1',
u':bandiera_norvegia:': u'\U0001F1F3\U0001F1F4',
u':bandiera_nepal:': u'\U0001F1F3\U0001F1F5',
u':bandiera_nauru:': u'\U0001F1F3\U0001F1F7',
u':bandiera_niue:': u'\U0001F1F3\U0001F1FA',
u':bandiera_nuova_zelanda:': u'\U0001F1F3\U0001F1FF',
u':bandiera_oman:': u'\U0001F1F4\U0001F1F2',
u':bandiera_panamá:': u'\U0001F1F5\U0001F1E6',
u':bandiera_perù:': u'\U0001F1F5\U0001F1EA',
u':bandiera_polinesia_francese:': u'\U0001F1F5\U0001F1EB',
u':bandiera_papua_nuova_guinea:': u'\U0001F1F5\U0001F1EC',
u':bandiera_filippine:': u'\U0001F1F5\U0001F1ED',
u':bandiera_pakistan:': u'\U0001F1F5\U0001F1F0',
u':bandiera_polonia:': u'\U0001F1F5\U0001F1F1',
u':bandiera_saint-pierre_e_miquelon:': u'\U0001F1F5\U0001F1F2',
u':bandiera_isole_pitcairn:': u'\U0001F1F5\U0001F1F3',
u':bandiera_portorico:': u'\U0001F1F5\U0001F1F7',
u':bandiera_territori_palestinesi:': u'\U0001F1F5\U0001F1F8',
u':bandiera_portogallo:': u'\U0001F1F5\U0001F1F9',
u':bandiera_palau:': u'\U0001F1F5\U0001F1FC',
u':bandiera_paraguay:': u'\U0001F1F5\U0001F1FE',
u':bandiera_qatar:': u'\U0001F1F6\U0001F1E6',
u':bandiera_riunione:': u'\U0001F1F7\U0001F1EA',
u':bandiera_romania:': u'\U0001F1F7\U0001F1F4',
u':bandiera_serbia:': u'\U0001F1F7\U0001F1F8',
u':bandiera_russia:': u'\U0001F1F7\U0001F1FA',
u':bandiera_ruanda:': u'\U0001F1F7\U0001F1FC',
u':bandiera_arabia_saudita:': u'\U0001F1F8\U0001F1E6',
u':bandiera_isole_salomone:': u'\U0001F1F8\U0001F1E7',
u':bandiera_seychelles:': u'\U0001F1F8\U0001F1E8',
u':bandiera_sudan:': u'\U0001F1F8\U0001F1E9',
u':bandiera_svezia:': u'\U0001F1F8\U0001F1EA',
u':bandiera_singapore:': u'\U0001F1F8\U0001F1EC',
u':bandiera_sant’elena:': u'\U0001F1F8\U0001F1ED',
u':bandiera_slovenia:': u'\U0001F1F8\U0001F1EE',
u':bandiera_svalbard_e_jan_mayen:': u'\U0001F1F8\U0001F1EF',
u':bandiera_slovacchia:': u'\U0001F1F8\U0001F1F0',
u':bandiera_sierra_leone:': u'\U0001F1F8\U0001F1F1',
u':bandiera_san_marino:': u'\U0001F1F8\U0001F1F2',
u':bandiera_senegal:': u'\U0001F1F8\U0001F1F3',
u':bandiera_somalia:': u'\U0001F1F8\U0001F1F4',
u':bandiera_suriname:': u'\U0001F1F8\U0001F1F7',
u':bandiera_sud_sudan:': u'\U0001F1F8\U0001F1F8',
u':bandiera_são_tomé_e_príncipe:': u'\U0001F1F8\U0001F1F9',
u':bandiera_el_salvador:': u'\U0001F1F8\U0001F1FB',
u':bandiera_sint_maarten:': u'\U0001F1F8\U0001F1FD',
u':bandiera_siria:': u'\U0001F1F8\U0001F1FE',
u':bandiera_swaziland:': u'\U0001F1F8\U0001F1FF',
u':bandiera_tristan_da_cunha:': u'\U0001F1F9\U0001F1E6',
u':bandiera_isole_turks_e_caicos:': u'\U0001F1F9\U0001F1E8',
u':bandiera_ciad:': u'\U0001F1F9\U0001F1E9',
u':bandiera_terre_australi_francesi:': u'\U0001F1F9\U0001F1EB',
u':bandiera_togo:': u'\U0001F1F9\U0001F1EC',
u':bandiera_thailandia:': u'\U0001F1F9\U0001F1ED',
u':bandiera_tagikistan:': u'\U0001F1F9\U0001F1EF',
u':bandiera_tokelau:': u'\U0001F1F9\U0001F1F0',
u':bandiera_timor_est:': u'\U0001F1F9\U0001F1F1',
u':bandiera_turkmenistan:': u'\U0001F1F9\U0001F1F2',
u':bandiera_tunisia:': u'\U0001F1F9\U0001F1F3',
u':bandiera_tonga:': u'\U0001F1F9\U0001F1F4',
u':bandiera_turchia:': u'\U0001F1F9\U0001F1F7',
u':bandiera_trinidad_e_tobago:': u'\U0001F1F9\U0001F1F9',
u':bandiera_tuvalu:': u'\U0001F1F9\U0001F1FB',
u':bandiera_taiwan:': u'\U0001F1F9\U0001F1FC',
u':bandiera_tanzania:': u'\U0001F1F9\U0001F1FF',
u':bandiera_ucraina:': u'\U0001F1FA\U0001F1E6',
u':bandiera_uganda:': u'\U0001F1FA\U0001F1EC',
u':bandiera_altre_isole_americane_del_pacifico:': u'\U0001F1FA\U0001F1F2',
u':bandiera_nazioni_unite:': u'\U0001F1FA\U0001F1F3',
u':bandiera_stati_uniti:': u'\U0001F1FA\U0001F1F8',
u':bandiera_uruguay:': u'\U0001F1FA\U0001F1FE',
u':bandiera_uzbekistan:': u'\U0001F1FA\U0001F1FF',
u':bandiera_città_del_vaticano:': u'\U0001F1FB\U0001F1E6',
u':bandiera_saint_vincent_e_grenadine:': u'\U0001F1FB\U0001F1E8',
u':bandiera_venezuela:': u'\U0001F1FB\U0001F1EA',
u':bandiera_isole_vergini_britanniche:': u'\U0001F1FB\U0001F1EC',
u':bandiera_isole_vergini_americane:': u'\U0001F1FB\U0001F1EE',
u':bandiera_vietnam:': u'\U0001F1FB\U0001F1F3',
u':bandiera_vanuatu:': u'\U0001F1FB\U0001F1FA',
u':bandiera_wallis_e_futuna:': u'\U0001F1FC\U0001F1EB',
u':bandiera_samoa:': u'\U0001F1FC\U0001F1F8',
u':bandiera_kosovo:': u'\U0001F1FD\U0001F1F0',
u':bandiera_yemen:': u'\U0001F1FE\U0001F1EA',
u':bandiera_mayotte:': u'\U0001F1FE\U0001F1F9',
u':bandiera_sudafrica:': u'\U0001F1FF\U0001F1E6',
u':bandiera_zambia:': u'\U0001F1FF\U0001F1F2',
u':bandiera_zimbabwe:': u'\U0001F1FF\U0001F1FC',
u':bandiera_inghilterra:': u'\U0001F3F4\U000E0067\U000E0062\U000E0065\U000E006E\U000E0067\U000E007F',
u':bandiera_scozia:': u'\U0001F3F4\U000E0067\U000E0062\U000E0073\U000E0063\U000E0074\U000E007F',
u':bandiera_galles:': u'\U0001F3F4\U000E0067\U000E0062\U000E0077\U000E006C\U000E0073\U000E007F'
}
UNICODE_EMOJI_ITALIAN = {v: k for k, v in EMOJI_UNICODE_ITALIAN.items()}
| 66.069662
| 177
| 0.794511
|
e2f34bf334785e2fefa9d85187ec63a6a827788c
| 1,086
|
py
|
Python
|
poznaj/stories/filters.py
|
KlubJagiellonski/poznaj-wroclaw-backend
|
c1e50fb43e5f54a16bf42c70ec890a02063cd58e
|
[
"Apache-2.0"
] | 6
|
2017-01-31T09:17:46.000Z
|
2020-05-28T09:27:21.000Z
|
poznaj/stories/filters.py
|
KlubJagiellonski/poznaj-wroclaw-backend
|
c1e50fb43e5f54a16bf42c70ec890a02063cd58e
|
[
"Apache-2.0"
] | 64
|
2017-01-21T14:04:16.000Z
|
2019-06-17T12:29:07.000Z
|
poznaj/stories/filters.py
|
KlubJagiellonski/poznaj-wroclaw-backend
|
c1e50fb43e5f54a16bf42c70ec890a02063cd58e
|
[
"Apache-2.0"
] | 5
|
2017-01-21T14:18:31.000Z
|
2018-06-28T16:41:16.000Z
|
from django.contrib.gis.geos import GEOSGeometry
from rest_framework import filters
from rest_framework.exceptions import ValidationError
WRONG_LAT_LONG_TEXT = 'Provide float for latitude and longitude'
class FirstPointFilter(filters.BaseFilterBackend):
def filter_queryset(self, request, queryset, view):
latitude = request.query_params.get('lat', None)
longitude = request.query_params.get('long', None)
if latitude and longitude:
point = self.create_point(latitude, longitude)
return queryset.distance(point, field_name='first_point__geom').order_by('distance')
return queryset
@staticmethod
def create_point(latitude, longitude):
try:
point = GEOSGeometry(
'POINT({long} {lat})'.format(long=float(longitude), lat=float(latitude)), srid=4326
)
except ValueError:
raise ValidationError(WRONG_LAT_LONG_TEXT)
return point
def to_html(self, request, queryset, view):
return 'To order by distance use ?lat=10&long10 in url'
| 33.9375
| 99
| 0.687845
|
7d60cd29a9b36480c0b919d035f1169291d1e5c7
| 2,471
|
py
|
Python
|
deeppavlov/models/preprocessors/sanitizer.py
|
xbodx/DeepPavlov
|
4b60bf162df4294b8b0db3b72786cdd699c674fa
|
[
"Apache-2.0"
] | 5,893
|
2018-02-01T18:13:20.000Z
|
2022-03-31T19:22:21.000Z
|
deeppavlov/models/preprocessors/sanitizer.py
|
xbodx/DeepPavlov
|
4b60bf162df4294b8b0db3b72786cdd699c674fa
|
[
"Apache-2.0"
] | 749
|
2018-01-31T11:36:02.000Z
|
2022-03-30T07:24:22.000Z
|
deeppavlov/models/preprocessors/sanitizer.py
|
xbodx/DeepPavlov
|
4b60bf162df4294b8b0db3b72786cdd699c674fa
|
[
"Apache-2.0"
] | 1,155
|
2018-02-01T10:52:15.000Z
|
2022-03-29T02:12:15.000Z
|
# Copyright 2017 Neural Networks and Deep Learning lab, MIPT
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import re
import sys
import unicodedata
from deeppavlov.core.common.registry import register
from deeppavlov.core.models.component import Component
@register('sanitizer')
class Sanitizer(Component):
"""Remove all combining characters like diacritical marks from tokens
Args:
diacritical: whether to remove diacritical signs or not
diacritical signs are something like hats and stress marks
nums: whether to replace all digits with 1 or not
"""
def __init__(self,
diacritical: bool = True,
nums: bool = False,
*args, **kwargs) -> None:
self.diacritical = diacritical
self.nums = nums
self.combining_characters = dict.fromkeys([c for c in range(sys.maxunicode)
if unicodedata.combining(chr(c))])
def filter_diacritical(self, tokens_batch):
"""Takes batch of tokens and returns the batch with sanitized tokens"""
sanitized_batch = []
for utterance in tokens_batch:
sanitized_utterance = []
for token in utterance:
token = unicodedata.normalize('NFD', token)
sanitized_utterance.append(token.translate(self.combining_characters))
sanitized_batch.append(sanitized_utterance)
return sanitized_batch
def replace_nums(self, tokens_batch):
sanitized_batch = []
for utterance in tokens_batch:
sanitized_batch.append([re.sub('[0-9]', '1', token) for token in utterance])
return sanitized_batch
def __call__(self, tokens_batch, **kwargs):
if self.filter_diacritical:
tokens_batch = self.filter_diacritical(tokens_batch)
if self.nums:
tokens_batch = self.replace_nums(tokens_batch)
return tokens_batch
| 38.015385
| 88
| 0.672602
|
82cdefea57a257341e43bea54720b6871f611aa9
| 1,155
|
py
|
Python
|
python/tests/structural/test_flyweight.py
|
harkhuang/designpatterns
|
dfd6623976410882753913498158dcb0ea70c1d2
|
[
"Apache-2.0"
] | null | null | null |
python/tests/structural/test_flyweight.py
|
harkhuang/designpatterns
|
dfd6623976410882753913498158dcb0ea70c1d2
|
[
"Apache-2.0"
] | null | null | null |
python/tests/structural/test_flyweight.py
|
harkhuang/designpatterns
|
dfd6623976410882753913498158dcb0ea70c1d2
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import unittest
from patterns.structural.flyweight import Card
class TestCard(unittest.TestCase):
def test_instances_shall_reference_same_object(self):
c1 = Card('9', 'h')
c2 = Card('9', 'h')
self.assertEqual(c1, c2)
self.assertEqual(id(c1), id(c2))
def test_instances_with_different_suit(self):
"""
shall reference different objects
"""
c1 = Card('9', 'a')
c2 = Card('9', 'b')
self.assertNotEqual(id(c1), id(c2))
def test_instances_with_different_values(self):
"""
shall reference different objects
"""
c1 = Card('9', 'h')
c2 = Card('A', 'h')
self.assertNotEqual(id(c1), id(c2))
def test_instances_shall_share_additional_attributes(self):
expected_attribute_name = 'attr'
expected_attribute_value = 'value of attr'
c1 = Card('9', 'h')
c1.attr = expected_attribute_value
c2 = Card('9', 'h')
self.assertEqual(hasattr(c2, expected_attribute_name), True)
self.assertEqual(c2.attr, expected_attribute_value)
| 30.394737
| 68
| 0.608658
|
afcda748cc6c23e976f96da7c8cdcf5ee0fbcaa3
| 21,717
|
py
|
Python
|
python/smqtk/representation/key_value/postgres.py
|
joshanderson-kw/SMQTK
|
594e7c733fe7f4e514a1a08a7343293a883a41fc
|
[
"BSD-3-Clause"
] | 82
|
2015-01-07T15:33:29.000Z
|
2021-08-11T18:34:05.000Z
|
python/smqtk/representation/key_value/postgres.py
|
joshanderson-kw/SMQTK
|
594e7c733fe7f4e514a1a08a7343293a883a41fc
|
[
"BSD-3-Clause"
] | 230
|
2015-04-08T14:36:51.000Z
|
2022-03-14T17:55:30.000Z
|
python/smqtk/representation/key_value/postgres.py
|
joshanderson-kw/SMQTK
|
594e7c733fe7f4e514a1a08a7343293a883a41fc
|
[
"BSD-3-Clause"
] | 65
|
2015-01-04T15:00:16.000Z
|
2021-11-19T18:09:11.000Z
|
import logging
import multiprocessing
import pickle
from typing import Hashable, Set
from smqtk.representation.key_value import KeyValueStore, NO_DEFAULT_VALUE
from smqtk.utils.postgres import norm_psql_cmd_string, PsqlConnectionHelper
try:
import psycopg2 # type: ignore
import psycopg2.extras # type: ignore
except ImportError as ex:
logging.getLogger(__name__)\
.warning("Failed to import psycopg2: %s", str(ex))
psycopg2 = None
PSQL_TABLE_CREATE_RLOCK = multiprocessing.RLock()
class PostgresKeyValueStore (KeyValueStore):
"""
PostgreSQL-backed key-value storage.
"""
class SqlTemplates (object):
"""
Container for static PostgreSQL queries used by the containing class.
"""
UPSERT_TABLE_TMPL = norm_psql_cmd_string("""
CREATE TABLE IF NOT EXISTS {table_name:s} (
{key_col:s} BYTEA NOT NULL,
{value_col:s} BYTEA NOT NULL,
PRIMARY KEY ({key_col:s})
);
""")
SELECT_TMPL = norm_psql_cmd_string("""
SELECT {query:s} FROM {table_name:s};
""")
SELECT_LIKE_TMPL = norm_psql_cmd_string("""
SELECT {query:s}
FROM {table_name:s}
WHERE {key_col:s} LIKE %(key_like)s
""")
SELECT_MANY_TMPL = norm_psql_cmd_string("""
SELECT {query:s}
FROM {table_name:s}
WHERE {key_col:s} IN %(key_tuple)s
""")
UPSERT_TMPL = norm_psql_cmd_string("""
INSERT INTO {table_name:s} ({key_col:s}, {value_col:s})
VALUES (%(key)s, %(val)s)
ON CONFLICT ({key_col:s})
DO UPDATE
SET {value_col:s} = EXCLUDED.{value_col:s}
""")
DELETE_LIKE_TMPL = norm_psql_cmd_string("""
DELETE FROM {table_name:s}
WHERE {key_col:s} LIKE %(key_like)s
""")
DELETE_ALL = norm_psql_cmd_string("""
DELETE FROM {table_name:s}
""")
@classmethod
def is_usable(cls):
return psycopg2 is not None
def __init__(self, table_name="data_set",
key_col='key', value_col='value', db_name='postgres',
db_host=None, db_port=None, db_user=None, db_pass=None,
batch_size=1000, pickle_protocol=-1,
read_only=False, create_table=True):
"""
Initialize a PostgreSQL-backed data set instance.
:param table_name: Name of the table to use.
:type table_name: str
:param key_col: Name of the column containing the UUID signatures.
:type key_col: str
:param value_col: Name of the table column that will contain
serialized elements.
:type value_col: str
:param db_name: The name of the database to connect to.
:type db_name: str
:param db_host: Host address of the Postgres server. If None, we
assume the server is on the local machine and use the UNIX socket.
This might be a required field on Windows machines (not tested yet).
:type db_host: str | None
:param db_port: Port the Postgres server is exposed on. If None, we
assume the default port (5423).
:type db_port: int | None
:param db_user: Postgres user to connect as. If None, postgres
defaults to using the current accessing user account name on the
operating system.
:type db_user: str | None
:param db_pass: Password for the user we're connecting as. This may be
None if no password is to be used.
:type db_pass: str | None
:param batch_size: For queries that handle sending or
receiving many queries at a time, batch queries based on this size.
If this is None, then no batching occurs.
The advantage of batching is that it reduces the memory impact for
queries dealing with a very large number of elements (don't have to
store the full query for all elements in RAM), but the transaction
will be some amount slower due to splitting the query into multiple
transactions.
:type batch_size: int | None
:param pickle_protocol: Pickling protocol to use. We will use -1 by
default (latest version, probably binary).
:type pickle_protocol: int
:param read_only: Only allow read actions against this index.
Modification actions will throw a ReadOnlyError exceptions.
:type read_only: bool
:param create_table: If this instance should try to create the storing
table before actions are performed against it when not set to be
read-only. If the configured user does not have sufficient
permissions to create the table and it does not currently exist, an
exception will be raised.
:type create_table: bool
"""
super(PostgresKeyValueStore, self).__init__()
self._table_name = table_name
self._key_col = key_col
self._value_col = value_col
self._batch_size = batch_size
self._pickle_protocol = pickle_protocol
self._read_only = bool(read_only)
self._create_table = create_table
# Checking parameters where necessary
if self._batch_size is not None:
self._batch_size = int(self._batch_size)
assert self._batch_size > 0, \
"A given batch size must be greater than 0 in size " \
"(given: %d)." % self._batch_size
assert -1 <= self._pickle_protocol <= 2, \
("Given pickle protocol is not in the known valid range [-1, 2]. "
"Given: %s." % self._pickle_protocol)
# helper structure for SQL operations.
self._psql_helper = PsqlConnectionHelper(
db_name, db_host, db_port, db_user, db_pass,
itersize=batch_size,
table_upsert_lock=PSQL_TABLE_CREATE_RLOCK,
)
# Only set table upsert if not read-only.
if not self._read_only and self._create_table:
# NOT read-only, so allow table upsert.
self._psql_helper.set_table_upsert_sql(
self.SqlTemplates.UPSERT_TABLE_TMPL.format(
table_name=self._table_name,
key_col=self._key_col,
value_col=self._value_col
)
)
@staticmethod
def _py_to_bin(k):
"""
Convert a python hashable value into psycopg2.Binary via pickle.
:param k: Python object instance to be converted into a
``psycopg2.Binary`` instance via ``pickle`` serialization.
:type k: object
:return: ``psycopg2.Binary`` buffer instance to use for insertion into
or query against a table.
:rtype: psycopg2.Binary
"""
return psycopg2.Binary(pickle.dumps(k))
@staticmethod
def _bin_to_py(b):
"""
Un-"translate" psycopg2.Binary value (buffer) to a python type.
:param b: ``psycopg2.Binary`` buffer instance as retrieved from a
PostgreSQL query.
:type b: psycopg2.Binary
:return: Python object instance as loaded via pickle from the given
``psycopg2.Binary`` buffer.
:rtype: object
"""
return pickle.loads(bytes(b))
def get_config(self):
"""
Return a JSON-compliant dictionary that could be passed to this class's
``from_config`` method to produce an instance with identical
configuration.
In the common case, this involves naming the keys of the dictionary
based on the initialization argument names as if it were to be passed
to the constructor via dictionary expansion.
:return: JSON type compliant configuration dictionary.
:rtype: dict
"""
return {
"table_name": self._table_name,
"key_col": self._key_col,
"value_col": self._value_col,
"db_name": self._psql_helper.db_name,
"db_host": self._psql_helper.db_host,
"db_port": self._psql_helper.db_port,
"db_user": self._psql_helper.db_user,
"db_pass": self._psql_helper.db_pass,
"batch_size": self._batch_size,
"pickle_protocol": self._pickle_protocol,
"read_only": self._read_only,
"create_table": self._create_table,
}
def __repr__(self):
"""
Return representative string for this class.
:return: Representative string for this class.
:rtype: str
"""
return super(PostgresKeyValueStore, self).__repr__() \
% ("table_name: %s, key_col: %s, value_col: %s, "
"db_name: %s, db_host: %s, db_port: %s, db_user: %s, "
"db_pass: %s, batch_size: %d, pickle_protocol: %d, "
"read_only: %s, create_table: %s"
% (self._table_name, self._key_col, self._value_col,
self._psql_helper.db_name, self._psql_helper.db_host,
self._psql_helper.db_port, self._psql_helper.db_user,
self._psql_helper.db_pass, self._batch_size,
self._pickle_protocol, self._read_only, self._create_table))
def count(self):
"""
:return: The number of key-value relationships in this store.
:rtype: int | long
"""
def cb(cur):
cur.execute(self.SqlTemplates.SELECT_TMPL.format(
query='count(%s)' % self._key_col,
table_name=self._table_name,
))
return list(self._psql_helper.single_execute(
cb, yield_result_rows=True
))[0][0]
def keys(self):
"""
:return: Iterator over keys in this store.
:rtype: collections.abc.Iterator[collections.abc.Hashable]
"""
def cb(cur):
cur.execute(self.SqlTemplates.SELECT_TMPL.format(
query=self._key_col,
table_name=self._table_name,
))
# We can use a named cursor because this is a select statement as well
# as server table size may be large.
for r in self._psql_helper.single_execute(cb, yield_result_rows=True,
named=True):
# Convert from buffer -> string -> python
yield self._bin_to_py(r[0])
def values(self):
"""
:return: Iterator over values in this store. Values are not guaranteed
to be in any particular order.
:rtype: collections.abc.Iterator[object]
"""
def cb(cur):
cur.execute(self.SqlTemplates.SELECT_TMPL.format(
query=self._value_col,
table_name=self._table_name,
))
for r in self._psql_helper.single_execute(cb, yield_result_rows=True,
named=True):
# Convert from buffer -> string -> python
yield self._bin_to_py(r[0])
def is_read_only(self):
"""
:return: True if this instance is read-only and False if it is not.
:rtype: bool
"""
return self._read_only
def has(self, key):
"""
Check if this store has a value for the given key.
:param key: Key to check for a value for.
:type key: collections.abc.Hashable
:return: If this store has a value for the given key.
:rtype: bool
"""
super(PostgresKeyValueStore, self).has(key)
# Try to select based on given key value. If any rows are returned,
# there is clearly a key that matches.
q = self.SqlTemplates.SELECT_LIKE_TMPL.format(
query='true',
table_name=self._table_name,
key_col=self._key_col,
)
def cb(cur):
cur.execute(q, {'key_like': self._py_to_bin(key)})
return bool(list(self._psql_helper.single_execute(
cb, yield_result_rows=True
)))
def add(self, key, value):
"""
Add a key-value pair to this store.
:param key: Key for the value. Must be hashable.
:type key: collections.abc.Hashable
:param value: Python object to store.
:type value: object
:raises ReadOnlyError: If this instance is marked as read-only.
:return: Self.
:rtype: KeyValueStore
"""
super(PostgresKeyValueStore, self).add(key, value)
q = self.SqlTemplates.UPSERT_TMPL.format(
table_name=self._table_name,
key_col=self._key_col,
value_col=self._value_col,
)
v = {
'key': self._py_to_bin(key),
'val': self._py_to_bin(value),
}
def cb(cur):
cur.execute(q, v)
list(self._psql_helper.single_execute(cb))
return self
def add_many(self, d):
"""
Add multiple key-value pairs at a time into this store as represented in
the provided dictionary `d`.
:param d: Dictionary of key-value pairs to add to this store.
:type d: dict[collections.abc.Hashable, object]
:return: Self.
:rtype: KeyValueStore
"""
super(PostgresKeyValueStore, self).add_many(d)
q = self.SqlTemplates.UPSERT_TMPL.format(
table_name=self._table_name,
key_col=self._key_col,
value_col=self._value_col,
)
# Iterator over transformed inputs into values for statement.
def val_iter():
for key, val in d.items():
yield {
'key': self._py_to_bin(key),
'val': self._py_to_bin(val)
}
def cb(cur, v_batch):
psycopg2.extras.execute_batch(cur, q, v_batch,
page_size=self._batch_size)
list(self._psql_helper.batch_execute(val_iter(), cb, self._batch_size))
return self
def remove(self, key):
"""
Remove a single key-value entry.
:param key: Key to remove.
:type key: collections.abc.Hashable
:raises ReadOnlyError: If this instance is marked as read-only.
:raises KeyError: The given key is not present in this store and no
default value given.
:return: Self.
:rtype: KeyValueStore
"""
super(PostgresKeyValueStore, self).remove(key)
if key not in self:
raise KeyError(key)
q = self.SqlTemplates.DELETE_LIKE_TMPL.format(
table_name=self._table_name,
key_col=self._key_col,
)
v = dict(
key_like=self._py_to_bin(key)
)
def cb(cursor):
cursor.execute(q, v)
list(self._psql_helper.single_execute(cb))
return self
def _check_contained_keys(self, keys):
"""
Check if the table contains the following keys.
:param set keys: Keys to check for.
:return: An set of keys NOT present in the table.
:rtype: set[collections.abc.Hashable]
"""
def key_like_iter():
for k_ in keys:
yield self._py_to_bin(k_)
has_many_q = self.SqlTemplates.SELECT_MANY_TMPL.format(
query=self._key_col,
table_name=self._table_name,
key_col=self._key_col,
)
# Keys found in table
matched_keys: Set[Hashable] = set()
def cb(cursor, batch):
cursor.execute(has_many_q, {'key_tuple': tuple(batch)})
matched_keys.update(self._bin_to_py(r[0]) for r in cursor)
list(self._psql_helper.batch_execute(key_like_iter(), cb,
self._batch_size))
return keys - matched_keys
def remove_many(self, keys):
"""
Remove multiple keys and associated values.
:param keys: Iterable of keys to remove. If this is empty this method
does nothing.
:type keys: collections.abc.Iterable[collections.abc.Hashable]
:raises ReadOnlyError: If this instance is marked as read-only.
:raises KeyError: The given key is not present in this store and no
default value given. The store is not modified if any key is
invalid.
:return: Self.
:rtype: KeyValueStore
"""
super(PostgresKeyValueStore, self).remove_many(keys)
keys = set(keys)
# Check that all keys requested for removal are contained in our table
# before attempting to remove any of them.
key_diff = self._check_contained_keys(keys)
# If we're trying to remove a key not in our table, appropriately raise
# a KeyError.
if key_diff:
if len(key_diff) == 1:
raise KeyError(list(key_diff)[0])
else:
raise KeyError(key_diff)
# Proceed with removal
def key_like_iter():
""" Iterator over query value sets. """
for k_ in keys:
yield self._py_to_bin(k_)
del_q = self.SqlTemplates.DELETE_LIKE_TMPL.format(
table_name=self._table_name,
key_col=self._key_col,
)
def del_cb(cursor, v_batch):
# Execute the query with a list of value dicts.
psycopg2.extras.execute_batch(cursor, del_q,
[{'key_like': k} for k in v_batch],
page_size=self._batch_size)
list(self._psql_helper.batch_execute(key_like_iter(), del_cb,
self._batch_size))
return self
def get(self, key, default=NO_DEFAULT_VALUE):
"""
Get the value for the given key.
*NOTE:* **Implementing sub-classes are responsible for raising a
``KeyError`` where appropriate.**
:param key: Key to get the value of.
:type key: collections.abc.Hashable
:param default: Optional default value if the given key is not present
in this store. This may be any value except for the
``NO_DEFAULT_VALUE`` constant (custom anonymous class instance).
:type default: object
:raises KeyError: The given key is not present in this store and no
default value given.
:return: Deserialized python object stored for the given key.
:rtype: object
"""
q = self.SqlTemplates.SELECT_LIKE_TMPL.format(
query=self._value_col,
table_name=self._table_name,
key_col=self._key_col,
)
v = {'key_like': self._py_to_bin(key)}
def cb(cur):
cur.execute(q, v)
rows = list(self._psql_helper.single_execute(
cb, yield_result_rows=True
))
# If no rows and no default, raise KeyError.
if len(rows) == 0:
if default is NO_DEFAULT_VALUE:
raise KeyError(key)
else:
return default
return self._bin_to_py(rows[0][0])
def get_many(self, keys, default=NO_DEFAULT_VALUE):
"""
Get the values for the given keys.
*NOTE:* **Implementing sub-classes are responsible for raising a
``KeyError`` where appropriate.**
:param keys: The keys for which associated values are requested.
:type keys: collections.abc.Iterable[collections.abc.Hashable]
:param default: Optional default value if a given key is not present
in this store. This may be any value except for the
``NO_DEFAULT_VALUE`` constant (custom anonymous class instance).
:type default: object
:raises KeyError: A given key is not present in this store and no
default value given.
:return: Iterable of deserialized python objects stored for the given
keys in the order that the corresponding keys were provided.
:rtype: collections.abc.Iterable
"""
sql_command_string = self.SqlTemplates.SELECT_MANY_TMPL.format(
query=', '.join((self._key_col, self._value_col)),
table_name=self._table_name,
key_col=self._key_col
)
keys = list(keys)
sql_keys = tuple(self._py_to_bin(key_) for key_ in keys)
sql_variables = {'key_tuple': sql_keys}
def postgres_callback(cursor):
cursor.execute(sql_command_string, sql_variables)
retrieved_dict = {
self._bin_to_py(row_[0]): self._bin_to_py(row_[1])
for row_ in self._psql_helper.single_execute(
postgres_callback, yield_result_rows=True
)
}
if default is NO_DEFAULT_VALUE:
for key_ in keys:
yield retrieved_dict[key_]
else:
for key_ in keys:
yield retrieved_dict.get(key_, default)
def clear(self):
"""
Clear this key-value store.
*NOTE:* **Implementing sub-classes should call this super-method. This
super method should not be considered a critical section for thread
safety.**
:raises ReadOnlyError: If this instance is marked as read-only.
"""
q = self.SqlTemplates.DELETE_ALL.format(table_name=self._table_name)
def cb(cur):
cur.execute(q)
list(self._psql_helper.single_execute(cb))
| 33.985915
| 80
| 0.588341
|
1e7b6a7675215459ae82f91ea5178fd7cfe13741
| 808
|
py
|
Python
|
pandas/pandas2.py
|
changeui99/python-study
|
a0226ff2361d5a08ea0661d23b7af0a4feab4876
|
[
"MIT"
] | null | null | null |
pandas/pandas2.py
|
changeui99/python-study
|
a0226ff2361d5a08ea0661d23b7af0a4feab4876
|
[
"MIT"
] | null | null | null |
pandas/pandas2.py
|
changeui99/python-study
|
a0226ff2361d5a08ea0661d23b7af0a4feab4876
|
[
"MIT"
] | null | null | null |
import pandas as pd
import numpy as np
if __name__ == "__main__" :
#Series data operation
s = pd.Series([1, 2, 3, 4], ['a', 'b', 'c', 'd'])
s2 = pd.Series([6, 3, 2, 1], ['d', 'c', 'b', 'e'])
print(s)
print(s2)
print(s + s2)
print(s * s2)
print(s ** s2)
#boolean selection
s = pd.Series(np.arange(10), np.arange(10) + 1)
print(s[s >= 3])
print(s[s.index > 5])
print(s[(s.index > 5) & (s <= 8)])
#Series change data and slicing
s = pd.Series(np.arange(100, 105), ['a', 'c', 'b', 'd', 'e'])
print(s)
s['a'] = 200
s['k'] = 300
print(s)
temp = s.drop('k') #set inplace as true to apply on s directly
print(s)
print(temp)
s2 = pd.Series(np.arange(100, 105))
print(s2[1:3])
print(s[1:3])
print(s['c':'d'])
| 23.085714
| 66
| 0.502475
|
f9f51aa656665def07f8e0ffbd1dc1555e1ff00e
| 2,558
|
py
|
Python
|
flexget/plugins/sites/horriblesubs.py
|
davidcollom/Flexget
|
cd763e04afdf6da8f1673dd567a42d55d4cb3b6c
|
[
"MIT"
] | null | null | null |
flexget/plugins/sites/horriblesubs.py
|
davidcollom/Flexget
|
cd763e04afdf6da8f1673dd567a42d55d4cb3b6c
|
[
"MIT"
] | null | null | null |
flexget/plugins/sites/horriblesubs.py
|
davidcollom/Flexget
|
cd763e04afdf6da8f1673dd567a42d55d4cb3b6c
|
[
"MIT"
] | null | null | null |
from __future__ import unicode_literals, division, absolute_import
from builtins import * # noqa pylint: disable=unused-import, redefined-builtin
import logging
from flexget import plugin
from flexget.entry import Entry
from flexget.event import event
from flexget.utils.cached_input import cached
from flexget.utils.requests import RequestException
from flexget.utils.soup import get_soup
log = logging.getLogger('horriblesubs')
class HorribleSubs(object):
"""
Give latest horriblesubs releases
"""
schema = {'type': 'boolean'}
@staticmethod
def horrible_entries(requests, page_url):
entries = []
try:
soup = get_soup(requests.get(page_url).content)
except RequestException as e:
log.error('HorribleSubs request failed: %s', e)
return entries
for td_label in soup.findAll('td', attrs={'class': 'dl-label'}):
title = '[HorribleSubs] {0}'.format(str(td_label.find('i').string))
urls = []
log.debug('Found title `{0}`'.format(title))
for span in td_label.parent.findAll('span', attrs={'class': 'dl-link'}):
# skip non torrent based links
if 'hs-ddl-link' in span.parent.attrs['class']:
continue
url = str(span.find('a').attrs['href'])
log.debug('Found url `{0}`'.format(url))
urls.append(url)
# move magnets to last, a bit hacky
for url in urls[:]:
if url.startswith('magnet'):
urls.remove(url)
urls.append(url)
entries.append(Entry(title=title, url=urls[0], urls=urls))
return entries
@cached('horriblesubs')
def on_task_input(self, task, config):
if not config:
return
return HorribleSubs.horrible_entries(
task.requests, 'http://horriblesubs.info/lib/latest.php')
def search(self, task, entry, config):
if not config:
return
entries = []
for search_string in entry.get('search_strings', [entry['title']]):
log.debug('Searching `{0}`'.format(search_string))
results = HorribleSubs.horrible_entries(
task.requests, 'http://horriblesubs.info/lib/search.php?value={0}'.format(search_string))
entries.extend(results)
return entries
@event('plugin.register')
def register_plugin():
plugin.register(HorribleSubs, 'horriblesubs', groups=['search'], api_ver=2)
| 35.041096
| 105
| 0.60907
|
c92df652ce6ef87d64d4639e180b26d1712e8f4b
| 473
|
py
|
Python
|
uni_ticket/migrations/0169_alter_ticketcategory_footer_text.py
|
biotech2021/uniTicket
|
8c441eac18e67a983e158326b1c4b82f00f1f1ef
|
[
"Apache-2.0"
] | 15
|
2019-09-06T06:47:08.000Z
|
2022-01-17T06:39:54.000Z
|
uni_ticket/migrations/0169_alter_ticketcategory_footer_text.py
|
biotech2021/uniTicket
|
8c441eac18e67a983e158326b1c4b82f00f1f1ef
|
[
"Apache-2.0"
] | 69
|
2019-09-06T12:03:19.000Z
|
2022-03-26T14:30:53.000Z
|
uni_ticket/migrations/0169_alter_ticketcategory_footer_text.py
|
biotech2021/uniTicket
|
8c441eac18e67a983e158326b1c4b82f00f1f1ef
|
[
"Apache-2.0"
] | 13
|
2019-09-11T10:54:20.000Z
|
2021-11-23T09:09:19.000Z
|
# Generated by Django 3.2.7 on 2021-11-05 11:52
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('uni_ticket', '0168_ticketcategory_footer_text'),
]
operations = [
migrations.AlterField(
model_name='ticketcategory',
name='footer_text',
field=models.TextField(blank=True, null=True, verbose_name='Testo in calce per versione stmapabile'),
),
]
| 24.894737
| 113
| 0.64482
|
49c8e1d843cb51ae9efab3b23f861be2d0eb33e1
| 112
|
py
|
Python
|
ex_025.py
|
antonioravila/Exercicios-CEV-Python
|
aef19b7872b90b8ad40b0b1b54f7e063832181e3
|
[
"MIT"
] | null | null | null |
ex_025.py
|
antonioravila/Exercicios-CEV-Python
|
aef19b7872b90b8ad40b0b1b54f7e063832181e3
|
[
"MIT"
] | null | null | null |
ex_025.py
|
antonioravila/Exercicios-CEV-Python
|
aef19b7872b90b8ad40b0b1b54f7e063832181e3
|
[
"MIT"
] | null | null | null |
nome = str(input("Insira seu nome completo: ").split()).lower()
print(f'Seu nome tem Silva? {"silva" in nome}')
| 37.333333
| 63
| 0.669643
|
a85fe97daa15ecd3f9e48bd9a6e69d68e176cc02
| 31,436
|
py
|
Python
|
common/policy/policy_test_helper.py
|
atsgen/tf-test
|
2748fcd81491450c75dadc71849d2a1c11061029
|
[
"Apache-2.0"
] | 5
|
2020-09-29T00:36:57.000Z
|
2022-02-16T06:51:32.000Z
|
common/policy/policy_test_helper.py
|
atsgen/tf-test
|
2748fcd81491450c75dadc71849d2a1c11061029
|
[
"Apache-2.0"
] | 27
|
2019-11-02T02:18:34.000Z
|
2022-02-24T18:49:08.000Z
|
common/policy/policy_test_helper.py
|
atsgen/tf-test
|
2748fcd81491450c75dadc71849d2a1c11061029
|
[
"Apache-2.0"
] | 20
|
2019-11-28T16:02:25.000Z
|
2022-01-06T05:56:58.000Z
|
from __future__ import print_function
from builtins import str
from builtins import range
import os
import copy
import traceback
from vnc_api.vnc_api import *
from vnc_api.gen.resource_test import *
from quantum_test import *
from nova_test import *
from policy_test import *
from vn_test import *
import string
def comp_rules_from_policy_to_system(self):
""" Comparing Policy rule to system rule(agent) .
"""
# Initializing the connections to quantum/api/nova/agent fixtures from self
self.connections = ContrailConnections(self.project_inputs, self.logger)
self.agent_inspect = self.connections.agent_inspect
self.quantum_h = self.connections.quantum_h
self.nova_h = self.connections.nova_h
self.orch = self.connections.orch
self.api_s_inspect = self.connections.api_server_inspect
self.logger = self.inputs.logger
self.project_name = self.project_inputs.project_name
result = True
msg = []
#
# Step 1 :Get all projects
project_names, project_ids, project_domains = get_project_list(self)
for pr in range(len(project_names)):
pro_vm_list = None
if self.project_name == project_names[pr]:
# Step 2:Check VMs are exist for selected project
pro_vm_list = self.orch.get_vm_list(project_id=project_ids[pr])
else:
pro_vm_list = None
if pro_vm_list is not None:
# Arragenging all VM's
vm_list = []
old_vn = ''
for i in range(len(pro_vm_list)):
vm = str(pro_vm_list[i].name)
vm_list.append(vm)
# Step 2:Verify quantum rules for each VM.
for vm in range(len(vm_list)):
policys_list = []
vn_list = []
# Step 3 :Get All VNs of selected VM
vns_of_vm = self.orch.get_networks_of_vm(pro_vm_list[vm])
for i in range(len(vns_of_vm)):
vn_obj = str(vns_of_vm[i])
vn_list.append(vn_obj)
# Verifying the quntum rules for each VN
for vn in vn_list:
if old_vn != vn:
# step 4:Get the policys associated with vn from API
# server
policys_list = self.api_s_inspect.get_cs_vn_policys(
project=project_names[pr],
domain=project_domains[pr],
vn=vn,
refresh=True)
if policys_list == []:
break
else:
pass
self.logger.debug("Order of the policy's list:%s" %
(policys_list))
user_rules_tx = {}
rules_by_vn = {}
rules_by_vn[vn] = []
# Step 5 :Aggregating all attached policys rules for
# each network.
for policy in policys_list:
# Get the rules from quantum client
policy_detail = self.vnc_lib.network_policy_read(fq_name=[u'default-domain',
str(project_names[pr]), str(policy)])
self.logger.debug(
"%s, %s, %s, %s, %s" %
(policy_detail,
policys_list,
project_names,
pro_vm_list,
vn_list))
# Total no of rules for each policy
list_of_rules = policy_detail.network_policy_entries.exportDict()
list_of_rules = list_of_rules['PolicyEntriesType']['policy_rule']
no_of_rules = []
for each_rule in list_of_rules:
if ((each_rule['dst_addresses'][0]
['network_policy'] is not None) and
(each_rule['src_addresses'][0]
['network_policy'] is not None)):
dst_pol = str(each_rule['dst_addresses'][0]
['network_policy'])
src_pol = str(each_rule['src_addresses'][0]
['network_policy'])
for each_vn in self.topo.policy_vn[
dst_pol.split(':')[2]]:
new_rule = copy.deepcopy(each_rule)
new_fqn = [project_domains[pr],
project_names[pr], each_vn]
new_vnfqn = ':'.join(new_fqn)
new_rule['dst_addresses'][0][
'virtual_network'] = new_vnfqn
new_rule['dst_addresses'][0][
'network_policy'] = None
for srcvn in self.topo.policy_vn[
src_pol.split(':')[2]]:
new_rule2 = copy.deepcopy(new_rule)
new_fqns = [project_domains[pr],
project_names[pr], srcvn]
new_vnfqns = ':'.join(new_fqns)
new_rule2['src_addresses'][0][
'virtual_network'] = new_vnfqns
new_rule2['src_addresses'][0][
'network_policy'] = None
no_of_rules.append(new_rule2)
elif ((each_rule['dst_addresses'][0][
'network_policy'] is not None) and
(each_rule['src_addresses'][0][
'network_policy'] is None)):
dst_pol = str(each_rule['dst_addresses'][0][
'network_policy'])
for each_vn in self.topo.policy_vn[
dst_pol.split(':')[2]]:
new_rule = copy.deepcopy(each_rule)
new_fqn = [project_domains[pr],
project_names[pr], each_vn]
new_vnfqn = ':'.join(new_fqn)
new_rule['dst_addresses'][0][
'virtual_network'] = new_vnfqn
new_rule['dst_addresses'][0][
'network_policy'] = None
no_of_rules.append(new_rule)
elif ((each_rule['dst_addresses'][0][
'network_policy'] is None) and
(each_rule['src_addresses'][0][
'network_policy'] is not None)):
src_pol = str(each_rule['src_addresses'][0][
'network_policy'])
for srcvn in self.topo.policy_vn[
src_pol.split(':')[2]]:
new_rule = copy.deepcopy(each_rule)
new_fqn = [project_domains[pr],
project_names[pr], srcvn]
new_vnfqn = ':'.join(new_fqn)
new_rule['src_addresses'][0][
'virtual_network'] = new_vnfqn
new_rule['src_addresses'][0][
'network_policy'] = None
no_of_rules.append(new_rule)
else:
no_of_rules.append(each_rule)
# Traslation of quantum rules to ACES
fq_name = [project_domains[pr],
project_names[pr], vn]
fq_vn = ':'.join(fq_name)
self.logger.debug(
"Traslation of quantum rules to ACES format")
updated_quantum_rules, uni_rule = tx_quantum_rules_to_aces(
no_of_rules, fq_vn)
user_rules_tx[policy] = updated_quantum_rules
# Step 5b: Aggregate rules by network
self.logger.debug("vn is %s, vn_policy is %s" %
(vn, policy))
rules_by_vn[vn] += user_rules_tx[policy]
# Step 6:Remove the duplicate rules if the multilple
# policies have same rule
rules_by_vn[vn] = trim_duplicate_rules(rules_by_vn[vn])
# Step 7:Translate quantum- ACEs to system format and
# update ACE IDs
if rules_by_vn[vn] != []:
rules_by_vn[vn] = tx_quntum_def_aces_to_system(
fq_vn, rules_by_vn[vn], uni_rule)
rules_by_vn[vn] = policy_test_utils.update_rule_ace_id(
rules_by_vn[vn])
self.logger.debug("VN: %s, expected ACE's is " % (vn))
for r in rules_by_vn[vn]:
self.logger.debug("%s" %
(json.dumps(r, sort_keys=True)))
# end building VN ACE's from user rules
# Step 8:Get actual from vna in compute nodes [referred
# as cn] and compare with quntum rules and update the
# result
rules_by_all_vn = rules_by_vn[vn]
project_name = project_names[pr]
result, msg = comp_user_rules_to_system_rules(
self, vn, rules_by_all_vn, policys_list, pro_vm_list, vm_list, vm, project_name)
self.logger.debug(
"Verify policy rules for other vn if it is present")
old_vn = vn
else:
pass
else:
self.logger.debug(
"Skipping the policy rule comparison since VM's are not exist for selected project:%s" %
(project_names[pr]))
self.logger.info(
"Policy rules comparison with system for all Virtual networks are done")
return (result, msg)
# end comp_rules_from_policy_to_system
def get_project_list(self):
all_projects = self.api_s_inspect.get_cs_domain()['domain']['projects']
project_names = []
project_ids = []
project_domains = []
for i in range(len(all_projects)):
pro_domain = str(all_projects[i]['to'][0])
pro_name = str(all_projects[i]['to'][1])
pro_id = str(all_projects[i]['uuid'])
if all(
x != pro_name for x in (
'default-project',
'invisible_to_admin',
'service')):
if pro_name.startswith('vpc'):
pass
else:
project_names.append(pro_name)
project_ids.append(pro_id)
project_domains.append(pro_domain)
else:
pass
return (project_names, project_ids, project_domains)
def tx_quantum_rules_to_aces(no_of_rules, fq_vn):
''' Generating the quantum rules to aces '''
total_rules = len(no_of_rules)
user_rules_tx = []
uni_rule = {}
# step 1: Getting all tuples list from quantum rules :
for i in range(total_rules):
temp_rule = {}
temp_rule['direction'] = str(no_of_rules[i]['direction'])
temp_rule['proto_l'] = str(no_of_rules[i]['protocol'])
dest = str(no_of_rules[i]['dst_addresses'][0]['virtual_network'])
if dest == 'any':
temp_rule['dst'] = 'any'
elif dest == 'local':
temp_rule['dst'] = fq_vn
else:
# dst_ntw=string.split(dest,':')
# temp_rule['dst']=dst_ntw[2]
temp_rule['dst'] = dest
temp_rule['simple_action'] = str(
no_of_rules[i]['action_list']['simple_action'])
temp_rule['action_l'] = [
no_of_rules[i]['action_list']]
source_addr = str(
no_of_rules[i]['src_addresses'][0]['virtual_network'])
if source_addr == 'any':
temp_rule['src'] = 'any'
elif source_addr == 'local':
temp_rule['src'] = fq_vn
else:
# src_addr=string.split(source_addr,':')
# temp_rule['src']=src_addr[2]
temp_rule['src'] = source_addr
if ((no_of_rules[i]['src_ports'][0]['start_port']) == -1
and (no_of_rules[i]['src_ports'][0]['end_port']) == -1):
temp_rule['src_port_l'] = {'max': '65535', 'min': '0'}
else:
a = str(no_of_rules[i]['src_ports'][0]['start_port'])
b = str(no_of_rules[i]['src_ports'][0]['end_port'])
temp_rule['src_port_l'] = {'max': a, 'min': b}
if ((no_of_rules[i]['dst_ports'][0]['start_port']) == -
1 and (no_of_rules[i]['dst_ports'][0]['end_port']) == -
1):
temp_rule['dst_port_l'] = {'max': '65535', 'min': '0'}
else:
a = str(no_of_rules[i]['dst_ports'][0]['start_port'])
b = str(no_of_rules[i]['dst_ports'][0]['end_port'])
temp_rule['dst_port_l'] = {'max': a, 'min': b}
user_rules_tx.append(temp_rule)
# step 2 :protocol value mapping
for rule in user_rules_tx:
if rule['proto_l'] == 'any':
rule['proto_l'] = {'max': '255', 'min': '0'}
else:
rule['proto_l'] = {'max': str(rule['proto_l']),
'min': str(rule['proto_l'])}
# step 3: expanding rules if bidir rule
# for all pass rule don't expand
for rule in user_rules_tx:
if (rule['direction'] == '<>' and
(rule['proto_l']['max'] != '255' or rule['proto_l']['min'] != '0' or
rule['src'] != 'any' or rule['dst'] != 'any' or
rule['src_port_l']['max'] != '65535' or
rule['src_port_l']['min'] != '0' or
rule['dst_port_l']['max'] != '65535' or
rule['dst_port_l']['min'] != '0')):
rule['direction'] = '>'
pos = user_rules_tx.index(rule)
new_rule = copy.deepcopy(rule)
# update newly copied rule: swap address/ports & insert
new_rule['src'], new_rule['dst'] = new_rule['dst'], new_rule['src']
new_rule['src_port_l'], new_rule['dst_port_l'] = new_rule[
'dst_port_l'], new_rule['src_port_l']
if 'action_l' in new_rule and 'apply_service' in new_rule['action_l'][0]:
if rule['src'] == fq_vn:
new_rule['action_l'][0]['apply_service'] = []
if rule['dst'] == fq_vn:
user_rules_tx[pos]['action_l'][0]['apply_service'] = []
user_rules_tx.insert(pos + 1, new_rule)
return (user_rules_tx, uni_rule)
# end of tx_quantum_rules_to_aces
def trim_duplicate_rules(rules_by_vn):
temp_rule = rules_by_vn
for i, left in enumerate(temp_rule):
for j, right in enumerate(temp_rule):
if left != right:
if (
(
left['src'] == right['src']) and (
left['dst'] == right['dst']) and (
left['src_port_l'] == right['src_port_l']) and (
left['dst_port_l'] == right['dst_port_l']) and (
left['proto_l'] == right['proto_l'])):
temp_rule.pop(j)
else:
pass
return temp_rule
# end of trim_duplicate_rules
def comp_user_rules_to_system_rules(
self,
vn,
rules_by_all_vn,
policy,
all_vms,
vm_list,
vm,
project_name):
# Step 1:Get actual from vna in compute nodes [referred as cn]
result = True
cn_vna_rules_by_vn = {} # {'vn1':[{...}, {..}], 'vn2': [{..}]}
err_msg = {} # To capture error {compute: {vn: error_msg}}
for compNode in self.inputs.compute_ips:
self.logger.debug("Verify rules expected in CN if VN-VM in CN")
self.logger.debug("CN: %s, Check for expected data" % (compNode))
inspect_h = self.agent_inspect[compNode]
got_vm_name = inspect_h.get_vna_tap_interface_by_vm(
str(all_vms[vm].id))
if got_vm_name:
print("checking for vn %s in compute %s" % (vn, compNode))
vn_fq_name = inspect_h.get_vna_vn(
vn_name=vn, project=project_name)['name']
vna_acl = inspect_h.get_vna_acl_by_vn(vn_fq_name)
if vna_acl:
cn_vna_rules_by_vn[vn] = vna_acl['entries'] # system_rules
else:
cn_vna_rules_by_vn[vn] = []
# compare with test input & assert on failure
ret = policy_test_utils.compare_rules_list(
rules_by_all_vn, cn_vna_rules_by_vn[vn],
logger=self.logger)
if ret:
result = ret['state']
msg = ret['msg']
err_msg[compNode] = {vn: msg}
self.logger.error("CN: %s, VN: %s, test result not expected, \
msg: %s" % (compNode, vn, msg))
self.logger.debug("expected rules: ")
for r in rules_by_all_vn:
self.logger.debug(r)
self.logger.debug("actual rules from system: ")
for r in cn_vna_rules_by_vn[vn]:
self.logger.debug(r)
result = False
else:
self.logger.debug(
"CN: %s, VN: %s, result of expected rules check passed" %
(compNode, vn))
self.logger.info(
"Validated the rules for VM:%s with attached policy:%s and vn:%s " %
(vm_list[vm], policy, vn))
else:
pass
return (result, err_msg)
# end of comp_user_rules_to_system_rules
def tx_quntum_def_aces_to_system(test_vn, user_rules_tx, uni_rule):
'''convert ACEs derived from user rules to system format:
1. For every user rule, add deny rule; skip adding duplicates
2. For non-empty policy, add permit-all at the end
3. add ace_id, rule_type
4. Update VN to FQDN format
5. remove direction and simple_action fields @end..
'''
if user_rules_tx == []:
return user_rules_tx
any_proto_port_rule = {
'direction': '>', 'proto_l': {'max': '255', 'min': '0'},
'src_port_l': {'max': '65535', 'min': '0'},
'dst_port_l': {'max': '65535', 'min': '0'}}
# step 0: check & build allow_all for local VN if rules are defined in
# policy
test_vn_allow_all_rule = copy.copy(any_proto_port_rule)
test_vn_allow_all_rule['simple_action'] = 'pass'
test_vn_allow_all_rule['action_l'] = ['pass']
test_vn_allow_all_rule['src'], test_vn_allow_all_rule[
'dst'] = test_vn, test_vn
# check the rule for any protocol with same network exist and for deny
# rule
test_vn_deny_all_rule = copy.copy(any_proto_port_rule)
test_vn_deny_all_rule['simple_action'] = 'deny'
test_vn_deny_all_rule['action_l'] = ['deny']
test_vn_deny_all_rule['src'], test_vn_deny_all_rule[
'dst'] = test_vn, test_vn
# step 1: check & add permit-all rule for same VN but not for 'any'
# network
last_rule = copy.copy(any_proto_port_rule)
last_rule['simple_action'] = 'pass'
last_rule['action_l'] = [{'simple_action': 'pass', 'gateway_name': None,
'apply_service': [], 'mirror_to': None,
'assign_routing_instance': None,
'log': False, 'alert': False,
'qos_action': None}]
last_rule['src'], last_rule['dst'] = 'any', 'any'
# check any rule exist in policy :
final_user_rule = get_any_rule_if_exist(last_rule, user_rules_tx)
# step 2: check & add deny_all for every user-created rule
system_added_rules = []
for rule in user_rules_tx:
pos = len(user_rules_tx)
new_rule = copy.deepcopy(rule)
new_rule['proto_l'] = {'max': '255', 'min':
'0'}
new_rule['direction'] = '>'
new_rule['src_port_l'], new_rule['dst_port_l'] = {
'max': '65535', 'min': '0'}, {'max': '65535', 'min': '0'}
new_rule['simple_action'] = 'deny'
new_rule['action_l'] = ['deny']
system_added_rules.append(new_rule)
# step to check any one of the rule is any protocol and source and dst ntw
# is test vn then check for the duplicate rules
final_any_rules = get_any_rule_if_src_dst_same_ntw_exist(
test_vn_allow_all_rule, test_vn_deny_all_rule, user_rules_tx)
if final_any_rules:
user_rules_tx = final_any_rules
else:
pass
# Skip adding rules if they already exist...
# print json.dumps(system_added_rules, sort_keys=True)
if not policy_test_utils.check_rule_in_rules(
test_vn_allow_all_rule,
user_rules_tx):
user_rules_tx.append(test_vn_allow_all_rule)
for rule in system_added_rules:
if not policy_test_utils.check_rule_in_rules(rule, user_rules_tx):
user_rules_tx.append(rule)
# step 3: check & add permit-all rule for same VN but not for 'any'
# network
last_rule = copy.copy(any_proto_port_rule)
last_rule['simple_action'], last_rule['action_l'] = 'pass', ['pass']
last_rule['src'], last_rule['dst'] = 'any', 'any'
# if rule is unidirectional then append the deny rule if src and dst is
# different
if uni_rule:
user_rules_tx.append(uni_rule)
else:
pass
# if the first rule is not 'any rule ' then append the last rule defined
# above.
for rule in user_rules_tx:
any_rule_flag = True
if ((rule['src'] == 'any') and (rule['dst'] == 'any')):
any_rule_flag = False
else:
pass
if any_rule_flag:
user_rules_tx.append(last_rule)
else:
pass
# triming the duplicate rules
user_rules_tx = policy_test_utils.remove_dup_rules(user_rules_tx)
# triming the protocol with any option for rest of the fileds
tcp_any_rule = {
'proto_l': {
'max': 'tcp', 'min': 'tcp'}, 'src': 'any', 'dst': 'any', 'src_port_l': {
'max': '65535', 'min': '0'}, 'dst_port_l': {
'max': '65535', 'min': '0'}}
udp_any_rule = {
'proto_l': {
'max': 'udp', 'min': 'udp'}, 'src': 'any', 'dst': 'any', 'src_port_l': {
'max': '65535', 'min': '0'}, 'dst_port_l': {
'max': '65535', 'min': '0'}}
icmp_any_rule = {
'proto_l': {
'max': 'icmp', 'min': 'icmp'}, 'src': 'any', 'dst': 'any', 'src_port_l': {
'max': '65535', 'min': '0'}, 'dst_port_l': {
'max': '65535', 'min': '0'}}
icmp_match, index_icmp = check_5tuple_in_rules(
icmp_any_rule, user_rules_tx)
tcp_match, index_tcp = check_5tuple_in_rules(tcp_any_rule, user_rules_tx)
udp_match, index_udp = check_5tuple_in_rules(udp_any_rule, user_rules_tx)
if icmp_match:
for rule in user_rules_tx[index_icmp + 1:len(user_rules_tx)]:
if rule['proto_l'] == {'max': 'icmp', 'min': 'icmp'}:
user_rules_tx.remove(rule)
else:
pass
if tcp_match:
for rule in user_rules_tx[index_tcp + 1:len(user_rules_tx)]:
if rule['proto_l'] == {'max': 'tcp', 'min': 'tcp'}:
user_rules_tx.remove(rule)
else:
pass
if udp_match:
for rule in user_rules_tx[index_udp + 1:len(user_rules_tx)]:
if rule['proto_l'] == {'max': 'udp', 'min': 'udp'}:
user_rules_tx.remove(rule)
else:
pass
# if any rule is exist the it will execute
if final_user_rule:
user_rules_tx = final_user_rule
else:
pass
# step 4: add ace_id, type, src to all rules
for rule in user_rules_tx:
rule['ace_id'] = str(user_rules_tx.index(rule) + 1)
rule['rule_type'] = 'Terminal' # currently checking policy aces only
# if rule['src'] != 'any' :
# m = re.match(r"(\S+):(\S+):(\S+)", rule['src'])
# if not m: rule['src'] = ':'.join(self.inputs.project_fq_name) + ':' + rule['src']
# if rule['dst'] != 'any':
# m = re.match(r"(\S+):(\S+):(\S+)", rule['dst'])
# if not m: rule['dst'] = ':'.join(self.inputs.project_fq_name) + ':' + rule['dst']
try:
del rule['direction']
except:
continue
try:
del rule['simple_action']
except:
continue
return user_rules_tx
# end tx_user_def_aces_to_system
def get_any_rule_if_exist(all_rule, user_rules_tx):
final_rules = []
if policy_test_utils.check_rule_in_rules(all_rule, user_rules_tx):
for rule in user_rules_tx:
if rule == all_rule:
final_rules.append(rule)
break
else:
final_rules.append(rule)
else:
pass
return final_rules
# end get_any_rule_if_exist
def get_any_rule_if_src_dst_same_ntw_exist(
test_vn_allow_all_rule,
test_vn_deny_all_rule,
user_rules_tx):
final_any_rules = []
if (
policy_test_utils.check_rule_in_rules(
test_vn_allow_all_rule,
user_rules_tx) or policy_test_utils.check_rule_in_rules(
test_vn_deny_all_rule,
user_rules_tx)):
for rule in user_rules_tx:
if ((rule == test_vn_allow_all_rule)
or (rule == test_vn_deny_all_rule)):
final_any_rules.append(rule)
break
else:
final_any_rules.append(rule)
else:
pass
return final_any_rules
# end get_any_rule_if_src_dst_same_ntw_exist
def check_5tuple_in_rules(rule, rules):
'''check if 5-tuple of given rule exists in given rule-set..Return True if rule exists; else False'''
#print ("check rule %s in rules" %(json.dumps(rule, sort_keys=True)))
match_keys = ['proto_l', 'src', 'dst', 'src_port_l', 'dst_port_l']
for r in rules:
match = True
for k in match_keys:
if r[k] != rule[k]:
# print ("current rule not matching due to key %s, move on.." %k)
match = False
break
if match:
break
return (match, rules.index(r))
# end check_5tuple_in_rules
def _create_n_policy_n_rules(self, number_of_policy, valid_rules, number_of_dummy_rules, option='api', verify=True):
''' Create n number of policy & n number of rules
created policy will be policy1,policy2,policy3...policyn so on
Sample rules_list:
src_ports and dst_ports : can be 'any'/tuple/list as shown below
protocol : 'any' or a string representing a protocol number : ICMP(1), TCP(6), UDP(17)
simple_action : pass/deny
source_network/dest_network : VN name
rules= [
{
'direction' : '<>', 'simple_action' : 'pass',
'protocol' : 'any',
'source_network': vn1_name,
'src_ports' : 'any',
'src_ports' : (10,100),
'dest_network' : vn1_name,
'dst_ports' : [100,10],
},
]
'''
option='api'
x = 80
y = 80
rules_list = []
policy_name = 'policy'
self.logger.debug('Creating %d dummy rules' % (number_of_dummy_rules))
total_policy = number_of_policy
rules = [
{
'direction': '<>', 'simple_action': 'deny',
'protocol': 'udp', 'src_ports': (x, x),
'dst_ports': (y, y),
'source_network': 'any',
'dest_network': 'any',
},
]
while len(rules_list) < number_of_dummy_rules:
rules_list.append(rules[0])
x += 1
y += 1
rules = [
{
'direction': '<>', 'simple_action': 'deny',
'protocol': 'udp', 'src_ports': (x, x),
'dst_ports': (y, y),
'source_network': 'any',
'dest_network': 'any',
},
]
# end while
# append valid rule at the end
self.logger.debug('Appending %d valid rules to end of the rule list' %
(len(valid_rules)))
for rule in valid_rules:
rules_list.append(rule)
self.logger.debug('Using policy fixture to create %d policy with %d rules' %
(number_of_policy, len(rules_list)))
number_of_policy += 1
policy_objs_list = []
for i in range(1, number_of_policy):
if i > 1:
for j in range(0, number_of_dummy_rules):
rules_list[j]['src_ports'] = (rules_list[j]['src_ports'][0]+(number_of_dummy_rules + 5) , rules_list[j]['src_ports'][1]+(number_of_dummy_rules + 5))
rules_list[j]['dst_ports'] = (rules_list[j]['dst_ports'][0]+(number_of_dummy_rules + 5) , rules_list[j]['dst_ports'][1]+(number_of_dummy_rules + 5))
try:
policy_fixture = self.useFixture(
PolicyFixture(policy_name=policy_name + str(i),
rules_list=rules_list, inputs=self.inputs,
connections=self.connections))
except Exception as e:
self.logger.error(
'Exception %s occured while creating %d policy with %d rules' %
(e, total_policy, len(rules_list)))
self.assertTrue(
False, 'Exception occured while creating %d policy with %d rules' %
(total_policy, len(rules_list)))
policy_objs_list.append(policy_fixture.policy_obj)
if verify == True:
policy_fixture.verify_policy_in_api_server()
# end for
return policy_objs_list
# end _create_n_policy_n_rules
| 43.479945
| 164
| 0.505662
|
fe278ce42fa409a4f81d97af69b21d524c59f59d
| 2,809
|
py
|
Python
|
spyder/app/cli_options.py
|
HelenaNascimento/spyder
|
2857a6f1adb93c7a069d205dc3a4fe3bbf4f85c1
|
[
"MIT"
] | 1
|
2018-10-17T22:09:50.000Z
|
2018-10-17T22:09:50.000Z
|
spyder/app/cli_options.py
|
HelenaNascimento/spyder
|
2857a6f1adb93c7a069d205dc3a4fe3bbf4f85c1
|
[
"MIT"
] | null | null | null |
spyder/app/cli_options.py
|
HelenaNascimento/spyder
|
2857a6f1adb93c7a069d205dc3a4fe3bbf4f85c1
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
#
# Copyright © Spyder Project Contributors
# Licensed under the terms of the MIT License
# (see spyder/__init__.py for details)
import argparse
def get_options(argv=None):
"""
Convert options into commands
return commands, message
"""
parser = argparse.ArgumentParser(usage="spyder [options] files")
parser.add_argument('--new-instance', action='store_true', default=False,
help="Run a new instance of Spyder, even if the single "
"instance mode has been turned on (default)")
parser.add_argument('--defaults', dest="reset_to_defaults",
action='store_true', default=False,
help="Reset configuration settings to defaults")
parser.add_argument('--reset', dest="reset_config_files",
action='store_true', default=False,
help="Remove all configuration files!")
parser.add_argument('--optimize', action='store_true', default=False,
help="Optimize Spyder bytecode (this may require "
"administrative privileges)")
parser.add_argument('-w', '--workdir', dest="working_directory", default=None,
help="Default working directory")
parser.add_argument('--hide-console', action='store_true', default=False,
help="Hide parent console window (Windows)")
parser.add_argument('--show-console', action='store_true', default=False,
help="(Deprecated) Does nothing, now the default behavior "
"is to show the console")
parser.add_argument('--multithread', dest="multithreaded",
action='store_true', default=False,
help="Internal console is executed in another thread "
"(separate from main application thread)")
parser.add_argument('--profile', action='store_true', default=False,
help="Profile mode (internal test, "
"not related with Python profiling)")
parser.add_argument('--window-title', type=str, default=None,
help="String to show in the main window title")
parser.add_argument('-p', '--project', default=None, type=str,
dest="project",
help="Path that contains an Spyder project")
parser.add_argument('--opengl', default=None,
dest="opengl_implementation",
choices=['software', 'desktop', 'gles'],
help=("OpenGL implementation to pass to Qt")
)
parser.add_argument('files', nargs='*')
options = parser.parse_args(argv)
args = options.files
return options, args
| 51.072727
| 82
| 0.587754
|
98751f83e02ca52c7b9a95c339613cdf69ad5d4d
| 3,730
|
py
|
Python
|
tests/test_cards/test_actions/test_sentry.py
|
evanofslack/pyminion
|
0d0bfc6d8e84e9f33e617c7d01b6edb649166290
|
[
"MIT"
] | 5
|
2021-12-17T20:34:55.000Z
|
2022-01-24T15:18:05.000Z
|
tests/test_cards/test_actions/test_sentry.py
|
evanofslack/pyminion
|
0d0bfc6d8e84e9f33e617c7d01b6edb649166290
|
[
"MIT"
] | 31
|
2021-10-29T21:05:00.000Z
|
2022-03-22T03:27:14.000Z
|
tests/test_cards/test_actions/test_sentry.py
|
evanofslack/pyminion
|
0d0bfc6d8e84e9f33e617c7d01b6edb649166290
|
[
"MIT"
] | 1
|
2021-12-23T18:32:47.000Z
|
2021-12-23T18:32:47.000Z
|
from pyminion.expansions.base import copper, estate, gold, sentry
from pyminion.game import Game
from pyminion.players import Human
def test_sentry_no_reorder(human: Human, game: Game, monkeypatch):
human.deck.cards = []
human.deck.add(gold)
human.deck.add(copper)
human.deck.add(copper)
human.hand.add(sentry)
assert len(human.discard_pile) == 0
assert len(game.trash) == 0
assert human.deck.cards[1].name == "Copper"
assert human.deck.cards[0].name == "Gold"
responses = iter(["", "", "no"])
monkeypatch.setattr("builtins.input", lambda input: next(responses))
human.play(sentry, game)
assert len(human.hand) == 1
assert len(human.playmat) == 1
assert len(human.discard_pile) == 0
assert len(game.trash) == 0
assert human.state.actions == 1
assert len(human.deck) == 2
assert human.deck.cards[1].name == "Copper"
assert human.deck.cards[0].name == "Gold"
def test_sentry_yes_reorder(human: Human, game: Game, monkeypatch):
human.deck.cards = []
human.deck.add(gold)
human.deck.add(copper)
human.deck.add(copper)
human.hand.add(sentry)
assert len(human.discard_pile) == 0
assert len(game.trash) == 0
assert human.deck.cards[1].name == "Copper"
assert human.deck.cards[0].name == "Gold"
responses = iter(["", "", "yes"])
monkeypatch.setattr("builtins.input", lambda input: next(responses))
human.play(sentry, game)
assert len(human.hand) == 1
assert len(human.playmat) == 1
assert len(human.discard_pile) == 0
assert len(game.trash) == 0
assert human.state.actions == 1
assert len(human.deck) == 2
assert human.deck.cards[0].name == "Copper"
assert human.deck.cards[1].name == "Gold"
def test_sentry_trash_two(human: Human, game: Game, monkeypatch):
human.deck.cards = []
human.deck.add(estate)
human.deck.add(copper)
human.deck.add(copper)
human.hand.add(sentry)
assert len(human.discard_pile) == 0
assert len(game.trash) == 0
assert human.deck.cards[1].name == "Copper"
assert human.deck.cards[0].name == "Estate"
responses = iter(["copper, estate"])
monkeypatch.setattr("builtins.input", lambda input: next(responses))
human.play(sentry, game)
assert len(human.hand) == 1
assert len(human.discard_pile) == 0
assert len(game.trash) == 2
assert len(human.deck) == 0
def test_sentry_discard_two(human: Human, game: Game, monkeypatch):
human.deck.cards = []
human.deck.add(estate)
human.deck.add(copper)
human.deck.add(copper)
human.hand.add(sentry)
assert len(human.discard_pile) == 0
assert len(game.trash) == 0
assert human.deck.cards[1].name == "Copper"
assert human.deck.cards[0].name == "Estate"
responses = iter(["", "copper, estate"])
monkeypatch.setattr("builtins.input", lambda input: next(responses))
human.play(sentry, game)
assert len(human.hand) == 1
assert len(human.discard_pile) == 2
assert len(game.trash) == 0
assert len(human.deck) == 0
def test_sentry_trash_one_discard_one(human: Human, game: Game, monkeypatch):
human.deck.cards = []
human.deck.add(estate)
human.deck.add(copper)
human.deck.add(copper)
human.hand.add(sentry)
assert len(human.discard_pile) == 0
assert len(game.trash) == 0
assert human.deck.cards[1].name == "Copper"
assert human.deck.cards[0].name == "Estate"
responses = iter(["copper", "estate"])
monkeypatch.setattr("builtins.input", lambda input: next(responses))
human.play(sentry, game)
assert len(human.hand) == 1
assert len(human.discard_pile) == 1
assert len(game.trash) == 1
assert len(human.deck) == 0
| 30.57377
| 77
| 0.660054
|
29facc4ca3b28bf8ccb8cd7a651b882f12055092
| 634
|
py
|
Python
|
basic_examples/getDatabases.py
|
flemic/ETD
|
ed0434c4683684243ad769a666f581944cc9433a
|
[
"BSD-3-Clause"
] | 2
|
2015-12-21T13:10:32.000Z
|
2020-09-20T01:28:43.000Z
|
basic_examples/getDatabases.py
|
flemic/ETD
|
ed0434c4683684243ad769a666f581944cc9433a
|
[
"BSD-3-Clause"
] | null | null | null |
basic_examples/getDatabases.py
|
flemic/ETD
|
ed0434c4683684243ad769a666f581944cc9433a
|
[
"BSD-3-Clause"
] | 1
|
2020-09-20T01:28:54.000Z
|
2020-09-20T01:28:54.000Z
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""getDatabases.py: Get a list of databases in the ETD service."""
__author__ = "Filip Lemic"
__copyright__ = "Copyright 2015, Telecommunication Networks Group (TKN), TU Berlin"
__version__ = "1.0.0"
__maintainer__ = "Filip Lemic"
__email__ = "lemic@tkn.tu-berlin.de"
__status__ = "Development"
import sys
import urllib2
import json
# The URL where server listens
apiURL = 'http://localhost:5000/'
req = urllib2.Request(apiURL + 'etd/v1.0/database', headers={"Content-Type": "application/json"})
resp = urllib2.urlopen(req)
databases = json.loads(resp.read())
print databases.keys()
| 26.416667
| 97
| 0.725552
|
d3fda723319472ae931f0b90517c5a1c99038eba
| 1,260
|
py
|
Python
|
2016/day9.py
|
bloy/adventofcode
|
3c98325666f18bfb1af08aac876156e055eed9f6
|
[
"MIT"
] | null | null | null |
2016/day9.py
|
bloy/adventofcode
|
3c98325666f18bfb1af08aac876156e055eed9f6
|
[
"MIT"
] | 2
|
2019-12-01T15:44:32.000Z
|
2019-12-01T15:44:32.000Z
|
2016/day9.py
|
bloy/adventofcode
|
3c98325666f18bfb1af08aac876156e055eed9f6
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python3
import pprint
import re
COMPRESSION_REGEX = r'\((\d+)x(\d+)\)'
def solve1(data):
decompressed = ""
split = re.split(COMPRESSION_REGEX, data, maxsplit=1)
while split[0] != data:
decompressed += split[0]
(run, count, data) = (int(split[1]), int(split[2]), split[3])
decompressed += (data[:run] * count)
data = data[run:]
split = re.split(COMPRESSION_REGEX, data, maxsplit=1)
decompressed += split[0]
return len(decompressed)
def decompressed_length(compressed):
decom_len = 0
split = re.split(COMPRESSION_REGEX, compressed, maxsplit=1)
while split[0] != compressed:
decom_len += len(split[0])
(run, count, compressed) = (int(split[1]), int(split[2]), split[3])
decom_len += (decompressed_length(compressed[:run]) * count)
compressed = compressed[run:]
split = re.split(COMPRESSION_REGEX, compressed, maxsplit=1)
decom_len += len(split[0])
return decom_len
def solve2(data):
return decompressed_length(data)
if __name__ == '__main__':
with open('day9_input.txt') as f:
data = f.read().strip()
# data = '(27x12)(20x12)(13x14)(7x10)(1x12)A'
print(solve1(data))
print(solve2(data))
| 26.25
| 75
| 0.62381
|
ce19af2a86064f288c1025e6942cd72c190120ab
| 4,240
|
py
|
Python
|
dffml/plugins.py
|
0dust/dffml
|
28cca0761a5d7963c4b0a952c0ae1bce7ce9b20f
|
[
"MIT"
] | 1
|
2019-03-11T17:24:17.000Z
|
2019-03-11T17:24:17.000Z
|
dffml/plugins.py
|
Devansh252/dffml
|
9fe9f18553215b217f7dd9bea9d2d3b3b2b19cc6
|
[
"MIT"
] | 24
|
2020-05-20T23:29:57.000Z
|
2021-04-14T04:18:21.000Z
|
dffml/plugins.py
|
Devansh252/dffml
|
9fe9f18553215b217f7dd9bea9d2d3b3b2b19cc6
|
[
"MIT"
] | 1
|
2020-05-06T19:07:02.000Z
|
2020-05-06T19:07:02.000Z
|
"""
This file is imported by the top level setup.py and therefore must remain as
independent as possible (no relative imports)
"""
import os
import sys
import pathlib
import inspect
import platform
import tempfile
import contextlib
import subprocess
import importlib.util
def inpath(binary):
return any(
list(
map(
lambda dirname: os.path.isfile(os.path.join(dirname, binary)),
os.environ.get("PATH", "").split(":"),
)
)
)
# List of plugins
CORE_PLUGINS = [
("configloader", "yaml"),
("configloader", "image"),
("model", "scratch"),
("model", "scikit"),
("model", "tensorflow"),
("model", "tensorflow_hub"),
("model", "transformers"),
("model", "vowpalWabbit"),
("model", "xgboost"),
("model", "pytorch"),
("model", "spacy"),
("model", "daal4py"),
]
# Models which currently don't support Windows or MacOS
if platform.system() not in {"Windows", "Darwin"}:
CORE_PLUGINS += [
("model", "autosklearn"),
]
CORE_PLUGINS += [
("examples", "shouldi"),
("feature", "git"),
("feature", "auth"),
("operations", "binsec"),
("operations", "deploy"),
("operations", "image"),
("operations", "nlp"),
("service", "http"),
("source", "mysql"),
]
def python_package_installed(module_name: str) -> bool:
"""
Check if a Python package is installed and can be imported
"""
spec = None
with contextlib.suppress(ModuleNotFoundError):
spec = importlib.util.find_spec(module_name)
return bool(spec is not None)
# Dependencies of plugins and how to check if they exist on the system or not
CORE_PLUGIN_DEPS = {
("model", "autosklearn"): {
"swig": lambda: inpath("swig"),
"cython": lambda: inpath("cython"),
}
if platform.system() not in {"Windows", "Darwin"}
and not python_package_installed("autosklearn")
else {},
}
CORE_PLUGIN_DEPS[("model", "daal4py")] = {
# Must be installed already via conda, do not provide a pypi package yet
"daal4py": lambda: python_package_installed("daal4py")
}
# All packages under configloader/ are really named dffml-config-{name}
ALTERNATIVES = {"configloader": "config"}
# Build a dict of plugin_type_name (aka model, config): list(package_names)
def package_names_by_plugin(validation=None):
by_plugin = {
(plugin_type + ("s" if not plugin_type.endswith("s") else "")): [
"dffml-%s-%s"
% (
ALTERNATIVES.get(plugin_type, plugin_type),
name.replace("_", "-"),
)
for sub_plugin_type, name in CORE_PLUGINS
if sub_plugin_type == plugin_type
and (not validation or validation(sub_plugin_type, name))
]
for plugin_type, plugin_name in CORE_PLUGINS
if plugin_type != "examples"
}
# Operations used to be named features
by_plugin["operations"].extend(by_plugin["features"])
del by_plugin["features"]
# All packages
by_plugin["all"] = [
"dffml-%s-%s"
% (ALTERNATIVES.get(plugin_type, plugin_type), name.replace("_", "-"),)
for plugin_type, name in CORE_PLUGINS
if plugin_type != "examples"
and (not validation or validation(plugin_type, name))
]
return by_plugin
PACKAGE_NAMES_BY_PLUGIN = package_names_by_plugin()
# Same as PACKAGE_NAMES_BY_PLUGIN but only with plugins that have all their
# pre-install dependencies met
PACKAGE_NAMES_BY_PLUGIN_INSTALLABLE = package_names_by_plugin(
lambda plugin_type, plugin_name: all(
map(
lambda check: check(),
CORE_PLUGIN_DEPS.get((plugin_type, plugin_name), {}).values(),
)
)
)
def package_names_to_directory(validation=None):
pkgs = {}
for plugin_type, name in CORE_PLUGINS:
if plugin_type == "examples":
pkg = name
else:
pkg = "dffml-%s-%s" % (
ALTERNATIVES.get(plugin_type, plugin_type),
name.replace("_", "-"),
)
pkgs[pkg] = (
plugin_type,
name,
)
return pkgs
PACKAGE_NAMES_TO_DIRECTORY = package_names_to_directory()
| 27.179487
| 79
| 0.610849
|
145609bf5a9750fa9ef6e695a02e78ce5a452bd6
| 1,506
|
py
|
Python
|
setup.py
|
scrapli/scrapli_asyncssh
|
15c260ac0cd8e111f10d1f5f4057c91df2937d7a
|
[
"MIT"
] | 3
|
2020-05-23T23:44:01.000Z
|
2021-03-08T15:39:10.000Z
|
setup.py
|
scrapli/scrapli_asyncssh
|
15c260ac0cd8e111f10d1f5f4057c91df2937d7a
|
[
"MIT"
] | 6
|
2020-10-31T17:12:06.000Z
|
2020-11-29T15:04:30.000Z
|
setup.py
|
scrapli/scrapli_asyncssh
|
15c260ac0cd8e111f10d1f5f4057c91df2937d7a
|
[
"MIT"
] | 1
|
2020-09-28T14:49:37.000Z
|
2020-09-28T14:49:37.000Z
|
#!/usr/bin/env python
"""scrapli_asyncssh - asyncssh transport plugin for scrapli"""
import setuptools
from scrapli_asyncssh import __version__
__author__ = "Carl Montanari"
with open("README.md", "r", encoding="utf-8") as f:
README = f.read()
with open("requirements.txt", "r") as f:
INSTALL_REQUIRES = f.read().splitlines()
setuptools.setup(
name="scrapli_asyncssh",
version=__version__,
author=__author__,
author_email="carl.r.montanari@gmail.com",
description="asyncssh transport plugin for the scrapli SSH|Telnet screen scraping library",
long_description=README,
long_description_content_type="text/markdown",
url="https://github.com/scrapli/scrapli_asyncssh",
project_urls={
"Changelog": "https://github.com/scrapli/scrapli_asyncssh/blob/master/CHANGELOG.md"
},
license="MIT",
packages=setuptools.find_packages(),
install_requires=INSTALL_REQUIRES,
extras_require={},
classifiers=[
"License :: OSI Approved :: MIT License",
"Operating System :: POSIX :: Linux",
"Operating System :: MacOS",
"Programming Language :: Python",
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: 3.7",
"Programming Language :: Python :: 3.8",
"Programming Language :: Python :: 3.9",
"Programming Language :: Python :: 3 :: Only",
"Topic :: Software Development :: Libraries :: Python Modules",
],
python_requires=">=3.6",
)
| 33.466667
| 95
| 0.664011
|
43e966499c216a965749db2436aea609077101a1
| 814
|
py
|
Python
|
codigo/Live145/exemplo_01.py
|
cassiasamp/live-de-python
|
00b5e51793097544ba9b75c97a0d30e63970bf45
|
[
"MIT"
] | 572
|
2018-04-03T03:17:08.000Z
|
2022-03-31T19:05:32.000Z
|
codigo/Live145/exemplo_01.py
|
cassiasamp/live-de-python
|
00b5e51793097544ba9b75c97a0d30e63970bf45
|
[
"MIT"
] | 176
|
2018-05-18T15:56:16.000Z
|
2022-03-28T20:39:07.000Z
|
codigo/Live145/exemplo_01.py
|
cassiasamp/live-de-python
|
00b5e51793097544ba9b75c97a0d30e63970bf45
|
[
"MIT"
] | 140
|
2018-04-18T13:59:11.000Z
|
2022-03-29T00:43:49.000Z
|
from kivy.app import App
from kivy.uix.button import Button
from kivy.uix.label import Label
from kivy.uix.boxlayout import BoxLayout
from kivy.uix.textinput import TextInput
def on_press(btn):
btn.text = 'Apertado'
def on_release(btn):
btn.text = 'Solto!'
class MyApp(App):
def build(self):
box1 = BoxLayout(orientation='vertical')
box2 = BoxLayout()
box1.add_widget(box2)
label = Label(text='Olar Mundo')
label.font_size = 50
text_input = TextInput()
btn = Button(
text='Butaum',
on_press=on_press,
on_release=on_release,
)
btn.font_size = 50
box1.add_widget(label)
box1.add_widget(text_input)
box2.add_widget(btn)
return box1
MyApp().run()
| 20.35
| 48
| 0.609337
|
5ac007dab3523f09b6787e883180bebbca30a367
| 15,848
|
py
|
Python
|
snakemake/conda.py
|
smatsumt/snakemake
|
6e3afca962a2485bd7310639d0c78265d0d482f5
|
[
"MIT"
] | 1
|
2020-05-09T12:36:33.000Z
|
2020-05-09T12:36:33.000Z
|
snakemake/conda.py
|
smatsumt/snakemake
|
6e3afca962a2485bd7310639d0c78265d0d482f5
|
[
"MIT"
] | null | null | null |
snakemake/conda.py
|
smatsumt/snakemake
|
6e3afca962a2485bd7310639d0c78265d0d482f5
|
[
"MIT"
] | null | null | null |
import os
import re
import subprocess
import tempfile
from urllib.request import urlopen
from urllib.parse import urlparse
from urllib.error import URLError
import hashlib
import shutil
from distutils.version import StrictVersion
import json
from glob import glob
import tarfile
import uuid
from snakemake.exceptions import CreateCondaEnvironmentException, WorkflowError
from snakemake.logging import logger
from snakemake.common import strip_prefix
from snakemake import utils
from snakemake import singularity
from snakemake.io import git_content
def content(env_file):
if env_file.startswith("git+file:"):
return git_content(env_file).encode("utf-8")
elif urlparse(env_file).scheme:
try:
return urlopen(env_file).read()
except URLError as e:
raise WorkflowError(
"Failed to open environment file {}:".format(env_file), e
)
else:
if not os.path.exists(env_file):
raise WorkflowError("Conda env file does not " "exist: {}".format(env_file))
with open(env_file, "rb") as f:
return f.read()
class Env:
"""Conda environment from a given specification file."""
def __init__(self, env_file, dag, singularity_img=None):
self.file = env_file
self._env_dir = dag.workflow.persistence.conda_env_path
self._env_archive_dir = dag.workflow.persistence.conda_env_archive_path
self._hash = None
self._content_hash = None
self._content = None
self._path = None
self._archive_file = None
self._singularity_img = singularity_img
@property
def singularity_img_url(self):
return self._singularity_img.url if self._singularity_img else None
@property
def content(self):
if self._content is None:
self._content = content(self.file)
return self._content
@property
def hash(self):
if self._hash is None:
md5hash = hashlib.md5()
# Include the absolute path of the target env dir into the hash.
# By this, moving the working directory around automatically
# invalidates all environments. This is necessary, because binaries
# in conda environments can contain hardcoded absolute RPATHs.
assert os.path.isabs(self._env_dir)
md5hash.update(self._env_dir.encode())
if self._singularity_img:
md5hash.update(self._singularity_img.url.encode())
md5hash.update(self.content)
self._hash = md5hash.hexdigest()
return self._hash
@property
def content_hash(self):
if self._content_hash is None:
md5hash = hashlib.md5()
md5hash.update(self.content)
self._content_hash = md5hash.hexdigest()
return self._content_hash
@property
def path(self):
"""Path to directory of the conda environment.
First tries full hash, if it does not exist, (8-prefix) is used
as default.
"""
hash = self.hash
env_dir = self._env_dir
for h in [hash, hash[:8]]:
path = os.path.join(env_dir, h)
if os.path.exists(path):
return path
return path
@property
def archive_file(self):
"""Path to archive of the conda environment, which may or may not exist."""
if self._archive_file is None:
self._archive_file = os.path.join(self._env_archive_dir, self.content_hash)
return self._archive_file
def create_archive(self):
"""Create self-contained archive of environment."""
from snakemake.shell import shell
try:
import yaml
except ImportError:
raise WorkflowError(
"Error importing PyYAML. " "Please install PyYAML to archive workflows."
)
# importing requests locally because it interferes with instantiating conda environments
import requests
env_archive = self.archive_file
if os.path.exists(env_archive):
return env_archive
try:
# Download
logger.info(
"Downloading packages for conda environment {}...".format(self.file)
)
os.makedirs(env_archive, exist_ok=True)
try:
out = shell.check_output(
"conda list --explicit --prefix '{}'".format(self.path),
stderr=subprocess.STDOUT,
)
logger.debug(out.decode())
except subprocess.CalledProcessError as e:
raise WorkflowError(
"Error exporting conda packages:\n" + e.output.decode()
)
with open(os.path.join(env_archive, "packages.txt"), "w") as pkg_list:
for l in out.decode().split("\n"):
if l and not l.startswith("#") and not l.startswith("@"):
pkg_url = l
logger.info(pkg_url)
parsed = urlparse(pkg_url)
pkg_name = os.path.basename(parsed.path)
# write package name to list
print(pkg_name, file=pkg_list)
# download package
pkg_path = os.path.join(env_archive, pkg_name)
with open(pkg_path, "wb") as copy:
r = requests.get(pkg_url)
r.raise_for_status()
copy.write(r.content)
try:
tarfile.open(pkg_path)
except:
raise WorkflowError(
"Package is invalid tar archive: {}".format(pkg_url)
)
except (
requests.exceptions.ChunkedEncodingError,
requests.exceptions.HTTPError,
) as e:
shutil.rmtree(env_archive)
raise WorkflowError("Error downloading conda package {}.".format(pkg_url))
except (Exception, BaseException) as e:
shutil.rmtree(env_archive)
raise e
return env_archive
def create(self, dryrun=False):
""" Create the conda enviroment."""
from snakemake.shell import shell
# Read env file and create hash.
env_file = self.file
tmp_file = None
url_scheme, *_ = urlparse(env_file)
if (url_scheme and not url_scheme == "file") or (
not url_scheme and env_file.startswith("git+file:/")
):
with tempfile.NamedTemporaryFile(delete=False, suffix=".yaml") as tmp:
tmp.write(self.content)
env_file = tmp.name
tmp_file = tmp.name
env_hash = self.hash
env_path = self.path
# Check for broken environment
if os.path.exists(
os.path.join(env_path, "env_setup_start")
) and not os.path.exists(os.path.join(env_path, "env_setup_done")):
if dryrun:
logger.info(
"Incomplete Conda environment {} will be recreated.".format(
utils.simplify_path(self.file)
)
)
else:
logger.info(
"Removing incomplete Conda environment {}...".format(
utils.simplify_path(self.file)
)
)
shutil.rmtree(env_path, ignore_errors=True)
# Create environment if not already present.
if not os.path.exists(env_path):
if dryrun:
logger.info(
"Conda environment {} will be created.".format(
utils.simplify_path(self.file)
)
)
return env_path
conda = Conda(self._singularity_img)
logger.info(
"Creating conda environment {}...".format(
utils.simplify_path(self.file)
)
)
# Check if env archive exists. Use that if present.
env_archive = self.archive_file
try:
# Touch "start" flag file
os.makedirs(env_path, exist_ok=True)
with open(os.path.join(env_path, "env_setup_start"), "a") as f:
pass
if os.path.exists(env_archive):
logger.info("Installing archived conda packages.")
pkg_list = os.path.join(env_archive, "packages.txt")
if os.path.exists(pkg_list):
# read pacakges in correct order
# this is for newer env archives where the package list
# was stored
packages = [
os.path.join(env_archive, pkg.rstrip())
for pkg in open(pkg_list)
]
else:
# guess order
packages = glob(os.path.join(env_archive, "*.tar.bz2"))
# install packages manually from env archive
cmd = " ".join(
["conda", "create", "--copy", "--prefix '{}'".format(env_path)]
+ packages
)
if self._singularity_img:
cmd = singularity.shellcmd(
self._singularity_img.path,
cmd,
envvars=self.get_singularity_envvars(),
)
out = shell.check_output(cmd, stderr=subprocess.STDOUT)
else:
# Copy env file to env_path (because they can be on
# different volumes and singularity should only mount one).
# In addition, this allows to immediately see what an
# environment in .snakemake/conda contains.
target_env_file = env_path + ".yaml"
shutil.copy(env_file, target_env_file)
logger.info("Downloading and installing remote packages.")
cmd = " ".join(
[
"conda",
"env",
"create",
"--file '{}'".format(target_env_file),
"--prefix '{}'".format(env_path),
]
)
if self._singularity_img:
cmd = singularity.shellcmd(
self._singularity_img.path,
cmd,
envvars=self.get_singularity_envvars(),
)
out = shell.check_output(cmd, stderr=subprocess.STDOUT)
# Touch "done" flag file
with open(os.path.join(env_path, "env_setup_done"), "a") as f:
pass
logger.debug(out.decode())
logger.info(
"Environment for {} created (location: {})".format(
os.path.relpath(env_file), os.path.relpath(env_path)
)
)
except subprocess.CalledProcessError as e:
# remove potential partially installed environment
shutil.rmtree(env_path, ignore_errors=True)
raise CreateCondaEnvironmentException(
"Could not create conda environment from {}:\n".format(env_file)
+ e.output.decode()
)
if tmp_file:
# temporary file was created
os.remove(tmp_file)
return env_path
@classmethod
def get_singularity_envvars(self):
return {"CONDA_PKGS_DIRS": "/tmp/conda/{}".format(uuid.uuid4())}
def __hash__(self):
# this hash is only for object comparison, not for env paths
return hash(self.file)
def __eq__(self, other):
if isinstance(other, Env):
return self.file == other.file
return False
class Conda:
instances = dict()
def __new__(cls, singularity_img=None):
if singularity_img not in cls.instances:
inst = super().__new__(cls)
inst.__init__(singularity_img=singularity_img)
cls.instances[singularity_img] = inst
return inst
else:
return cls.instances[singularity_img]
def __init__(self, singularity_img=None):
from snakemake.shell import shell
from snakemake import singularity
if isinstance(singularity_img, singularity.Image):
singularity_img = singularity_img.path
self.singularity_img = singularity_img
self._check()
self.info = json.loads(shell.check_output(self._get_cmd("conda info --json")))
def _get_cmd(self, cmd):
if self.singularity_img:
return singularity.shellcmd(self.singularity_img, cmd)
return cmd
def _check(self):
from snakemake.shell import shell
try:
# Use type here since conda now is a function.
# type allows to check for both functions and regular commands.
shell.check_output(self._get_cmd("type conda"), stderr=subprocess.STDOUT)
except subprocess.CalledProcessError as e:
if self.singularity_img:
raise CreateCondaEnvironmentException(
"The 'conda' command is not "
"available inside "
"your singularity container "
"image. Snakemake mounts "
"your conda installation "
"into singularity. "
"Sometimes, this can fail "
"because of shell restrictions. "
"It has been tested to work "
"with docker://ubuntu, but "
"it e.g. fails with "
"docker://bash "
)
else:
raise CreateCondaEnvironmentException(
"The 'conda' command is not "
"available in the "
"shell {} that will be "
"used by Snakemake. You have "
"to ensure that it is in your "
"PATH, e.g., first activating "
"the conda base environment "
"with `conda activate base`.".format(shell.get_executable())
)
try:
version = (
shell.check_output(
self._get_cmd("conda --version"), stderr=subprocess.STDOUT
)
.decode()
.split()[1]
)
if StrictVersion(version) < StrictVersion("4.2"):
raise CreateCondaEnvironmentException(
"Conda must be version 4.2 or later."
)
except subprocess.CalledProcessError as e:
raise CreateCondaEnvironmentException(
"Unable to check conda version:\n" + e.output.decode()
)
def prefix_path(self):
return self.info["conda_prefix"]
def bin_path(self):
return os.path.join(self.prefix_path(), "bin")
def shellcmd(self, env_path, cmd):
from snakemake.shell import shell
# get path to activate script
activate = os.path.join(self.bin_path(), "activate")
return "source {} '{}'; {}".format(activate, env_path, cmd)
| 37.554502
| 96
| 0.52972
|
d9a72bc8926211229421f019b17162dd79e715aa
| 7,317
|
py
|
Python
|
tensorflow_addons/optimizers/lookahead.py
|
failure-to-thrive/addons
|
63c82e318e68b07eb1162d1ff247fe9f4d3194fc
|
[
"Apache-2.0"
] | 1
|
2020-01-20T17:48:35.000Z
|
2020-01-20T17:48:35.000Z
|
tensorflow_addons/optimizers/lookahead.py
|
failure-to-thrive/addons
|
63c82e318e68b07eb1162d1ff247fe9f4d3194fc
|
[
"Apache-2.0"
] | null | null | null |
tensorflow_addons/optimizers/lookahead.py
|
failure-to-thrive/addons
|
63c82e318e68b07eb1162d1ff247fe9f4d3194fc
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2019 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
import tensorflow as tf
@tf.keras.utils.register_keras_serializable(package='Addons')
class Lookahead(tf.keras.optimizers.Optimizer):
"""This class allows to extend optimizers with the lookahead mechanism.
The mechanism is proposed by Michael R. Zhang et.al in the paper
[Lookahead Optimizer: k steps forward, 1 step back]
(https://arxiv.org/abs/1907.08610v1). The optimizer iteratively updates two
sets of weights: the search directions for weights are chosen by the inner
optimizer, while the "slow weights" are updated each `k` steps based on the
directions of the "fast weights" and the two sets of weights are
synchronized. This method improves the learning stability and lowers the
variance of its inner optimizer.
Example of usage:
```python
opt = tf.keras.optimizers.SGD(learning_rate)
opt = tfa.optimizers.Lookahead(opt)
```
"""
def __init__(self,
optimizer,
sync_period=6,
slow_step_size=0.5,
name="Lookahead",
**kwargs):
r"""Wrap optimizer with the lookahead mechanism.
Args:
optimizer: The original optimizer that will be used to compute
and apply the gradients.
sync_period: An integer. The synchronization period of lookahead.
Enable lookahead mechanism by setting it with a positive value.
slow_step_size: A floating point value.
The ratio for updating the slow weights.
name: Optional name for the operations created when applying
gradients. Defaults to "Lookahead".
**kwargs: keyword arguments. Allowed to be {`clipnorm`,
`clipvalue`, `lr`, `decay`}. `clipnorm` is clip gradients
by norm; `clipvalue` is clip gradients by value, `decay` is
included for backward compatibility to allow time inverse
decay of learning rate. `lr` is included for backward
compatibility, recommended to use `learning_rate` instead.
"""
super().__init__(name, **kwargs)
if isinstance(optimizer, str):
optimizer = tf.keras.optimizers.get(optimizer)
if not isinstance(optimizer, tf.keras.optimizers.Optimizer):
raise TypeError(
"optimizer is not an object of tf.keras.optimizers.Optimizer")
self._optimizer = optimizer
self._set_hyper('sync_period', sync_period)
self._set_hyper('slow_step_size', slow_step_size)
self._initialized = False
def _create_slots(self, var_list):
self._optimizer._create_slots(var_list=var_list) # pylint: disable=protected-access
for var in var_list:
self.add_slot(var, 'slow')
def _create_hypers(self):
self._optimizer._create_hypers() # pylint: disable=protected-access
def _prepare(self, var_list):
return self._optimizer._prepare(var_list=var_list) # pylint: disable=protected-access
def apply_gradients(self, grads_and_vars, name=None):
self._optimizer._iterations = self.iterations # pylint: disable=protected-access
return super().apply_gradients(grads_and_vars, name)
def _init_op(self, var):
slow_var = self.get_slot(var, 'slow')
return slow_var.assign(
tf.where(
tf.equal(self.iterations,
tf.constant(0, dtype=self.iterations.dtype)),
var,
slow_var,
),
use_locking=self._use_locking)
def _look_ahead_op(self, var):
var_dtype = var.dtype.base_dtype
slow_var = self.get_slot(var, 'slow')
local_step = tf.cast(self.iterations + 1, tf.dtypes.int64)
sync_period = self._get_hyper('sync_period', tf.dtypes.int64)
slow_step_size = self._get_hyper('slow_step_size', var_dtype)
step_back = slow_var + slow_step_size * (var - slow_var)
sync_cond = tf.equal(
tf.math.floordiv(local_step, sync_period) * sync_period,
local_step)
with tf.control_dependencies([step_back]):
slow_update = slow_var.assign(
tf.where(
sync_cond,
step_back,
slow_var,
),
use_locking=self._use_locking)
var_update = var.assign(
tf.where(
sync_cond,
step_back,
var,
),
use_locking=self._use_locking)
return tf.group(slow_update, var_update)
@property
def weights(self):
return self._weights + self._optimizer.weights
def _resource_apply_dense(self, grad, var):
init_op = self._init_op(var)
with tf.control_dependencies([init_op]):
train_op = self._optimizer._resource_apply_dense(grad, var) # pylint: disable=protected-access
with tf.control_dependencies([train_op]):
look_ahead_op = self._look_ahead_op(var)
return tf.group(init_op, train_op, look_ahead_op)
def _resource_apply_sparse(self, grad, var, indices):
init_op = self._init_op(var)
with tf.control_dependencies([init_op]):
train_op = self._optimizer._resource_apply_sparse( # pylint: disable=protected-access
grad, var, indices)
with tf.control_dependencies([train_op]):
look_ahead_op = self._look_ahead_op(var)
return tf.group(init_op, train_op, look_ahead_op)
def get_config(self):
config = {
'optimizer': tf.keras.optimizers.serialize(self._optimizer),
'sync_period': self._serialize_hyperparameter('sync_period'),
'slow_step_size': self._serialize_hyperparameter('slow_step_size'),
}
base_config = super().get_config()
return {**base_config, **config}
@property
def learning_rate(self):
return self._optimizer._get_hyper('learning_rate')
@learning_rate.setter
def learning_rate(self, learning_rate):
self._optimizer._set_hyper('learning_rate', learning_rate)
@property
def lr(self):
return self.learning_rate
@lr.setter
def lr(self, lr):
self.learning_rate = lr
@classmethod
def from_config(cls, config, custom_objects=None):
optimizer = tf.keras.optimizers.deserialize(
config.pop('optimizer'),
custom_objects=custom_objects,
)
return cls(optimizer, **config)
| 39.983607
| 107
| 0.63086
|
f69e79550f8a36f8ee0c85eac4f7118c11f40e95
| 1,356
|
py
|
Python
|
irc/asparagus/modules/shibe.py
|
Xe/code
|
d970038329f7c4e4f0ee9dcd1b345741dd0fcc51
|
[
"Zlib"
] | 7
|
2015-03-26T07:35:06.000Z
|
2021-12-09T00:03:33.000Z
|
irc/asparagus/modules/shibe.py
|
Xe/code
|
d970038329f7c4e4f0ee9dcd1b345741dd0fcc51
|
[
"Zlib"
] | null | null | null |
irc/asparagus/modules/shibe.py
|
Xe/code
|
d970038329f7c4e4f0ee9dcd1b345741dd0fcc51
|
[
"Zlib"
] | 1
|
2020-11-03T22:59:31.000Z
|
2020-11-03T22:59:31.000Z
|
from random import choice, random, randint
import time
COLORS = ['03','04','06','07','08','09','10','11','12','13']
#Shibe generation code borrowed from aji
class pvec:
def __init__(self, num):
self.v = [1.0] * num
self.norm()
def norm(self):
s = sum(self.v)
self.v = [x / s for x in self.v]
def pick(self):
r = random() * sum(self.v) # sum should always be 1, but meh
s = 0
for i, x in enumerate(self.v):
s += x
if r < s:
break
def calc(j, x):
fac = (1 - 3.5 / (abs(i - j) + 4.5))
return x * fac
self.v = [calc(j, x) for j, x in enumerate(self.v)]
self.norm()
return i
spvec = pvec(40)
for i in range(10):
spvec.pick()
last_color = '00'
def gen_prefix():
global last_color
color = choice(COLORS)
while color == last_color:
color = choice(COLORS)
last_color = color
return ' ' * spvec.pick() + '\3' + color
NAME="Shibe"
DESC="Wow, such bot command"
def initModule(cod):
cod.addBotCommand("SHIBE", wowSuchImplementation)
def destroyModule(cod):
cod.delBotCommand("SHIBE")
def wowSuchImplementation(cod, line, splitline, source, destination):
cod.reply(source, destination, "%s%s" %\
(gen_prefix(), " ".join(splitline[1:])))
| 23.37931
| 69
| 0.553835
|
210f50646369f75a5259bdd2283c7a67e0915787
| 2,838
|
py
|
Python
|
gpt.py
|
salil-gtm/gpt-3-use-case-analyzer
|
a7525191e214985fd63c578db109a5381177a18e
|
[
"Apache-2.0"
] | 8
|
2021-07-16T22:03:37.000Z
|
2022-02-02T16:29:14.000Z
|
gpt.py
|
salil-gtm/gpt-3-use-case-analyzer
|
a7525191e214985fd63c578db109a5381177a18e
|
[
"Apache-2.0"
] | null | null | null |
gpt.py
|
salil-gtm/gpt-3-use-case-analyzer
|
a7525191e214985fd63c578db109a5381177a18e
|
[
"Apache-2.0"
] | null | null | null |
"""Creates the Example and GPT classes for a user to interface with the OpenAI API."""
import openai
def set_openai_key(key):
"""Sets OpenAI key."""
openai.api_key = key
class Example():
"""Stores an input, output pair and formats it to prime the model."""
def __init__(self, inp, out):
self.input = inp
self.output = out
def get_input(self):
"""Returns the input of the example."""
return self.input
def get_output(self):
"""Returns the intended output of the example."""
return self.output
def format(self):
"""Formats the input, output pair."""
return f"input: {self.input}\noutput: {self.output}\n"
class GPT:
"""The main class for a user to interface with the OpenAI API.
A user can add examples and set parameters of the API request."""
def __init__(self, engine='davinci',
temperature=0.5,
max_tokens=100):
self.examples = []
self.engine = engine
self.temperature = temperature
self.max_tokens = max_tokens
def add_example(self, ex):
"""Adds an example to the object. Example must be an instance
of the Example class."""
assert isinstance(ex, Example), "Please create an Example object."
self.examples.append(ex.format())
def get_prime_text(self):
"""Formats all examples to prime the model."""
return '\n'.join(self.examples) + '\n'
def get_engine(self):
"""Returns the engine specified for the API."""
return self.engine
def get_temperature(self):
"""Returns the temperature specified for the API."""
return self.temperature
def get_max_tokens(self):
"""Returns the max tokens specified for the API."""
return self.max_tokens
def craft_query(self, prompt):
"""Creates the query for the API request."""
return self.get_prime_text() + "input: " + prompt + "\n"
def submit_request(self, prompt):
"""Calls the OpenAI API with the specified parameters."""
response = openai.Completion.create(engine=self.get_engine(),
prompt=self.craft_query(prompt),
max_tokens=self.get_max_tokens(),
temperature=self.get_temperature(),
top_p=1,
n=1,
stream=False,
stop="\ninput:")
return response
def get_top_reply(self, prompt):
"""Obtains the best result as returned by the API."""
response = self.submit_request(prompt)
return response['choices'][0]['text']
| 34.192771
| 86
| 0.564834
|
ec60334df561c205bf4743cd6af1ee4ba344d7dd
| 7,391
|
py
|
Python
|
federated/objectives/base.py
|
alshedivat/fedpa
|
695c400c17672f70971599513f0e2388cd302078
|
[
"Apache-2.0"
] | 37
|
2020-12-08T21:46:23.000Z
|
2022-03-16T17:05:19.000Z
|
federated/objectives/base.py
|
alshedivat/fedpa
|
695c400c17672f70971599513f0e2388cd302078
|
[
"Apache-2.0"
] | null | null | null |
federated/objectives/base.py
|
alshedivat/fedpa
|
695c400c17672f70971599513f0e2388cd302078
|
[
"Apache-2.0"
] | 5
|
2021-01-02T06:51:29.000Z
|
2021-12-22T13:51:35.000Z
|
# coding=utf-8
# Copyright 2020 Maruan Al-Shedivat.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Base classes and functionality for objective functions."""
import abc
import functools
from typing import Any, Dict, Tuple
import attr
import jax.numpy as jnp
from jax import grad, jit, random, vmap
# Types.
Dataset = Tuple[jnp.ndarray, jnp.ndarray]
ObjectiveParams = Tuple[jnp.ndarray, ...]
class Objective(abc.ABC):
"""Abstract base class for objective functions."""
@property
def kwargs(self) -> Dict[str, Any]:
return {}
@classmethod
@functools.partial(jit, static_argnums=(0,))
def _veval(
cls, params: ObjectiveParams, x: jnp.ndarray, **kwargs
) -> jnp.ndarray:
_eval = functools.partial(cls.eval, params, **kwargs)
return vmap(_eval)(x)
@classmethod
@functools.partial(jit, static_argnums=(0,))
def _vgrad(
cls, params: ObjectiveParams, x: jnp.ndarray, **kwargs
) -> jnp.ndarray:
_eval = functools.partial(cls.eval, params, **kwargs)
return vmap(grad(_eval))(x)
def __call__(self, x: jnp.ndarray) -> jnp.ndarray:
"""Computes the value of the objective at `x`."""
# Add batch dimension, if necessary.
squeeze = False
if x.ndim == 1:
x = jnp.expand_dims(x, axis=0)
squeeze = True
# Run a vectorized version of grad.
value = self._veval(self.params, x, **self.kwargs)
if squeeze:
value = jnp.squeeze(value)
return value
def grad(self, x: jnp.ndarray) -> jnp.ndarray:
"""Returns the gradient of the objective at `x`."""
# Add batch dimension, if necessary.
squeeze = False
if x.ndim == 1:
x = jnp.expand_dims(x, axis=0)
squeeze = True
# Run a vectorized version of grad.
value = self._vgrad(self.params, x, **self.kwargs)
if squeeze:
value = jnp.squeeze(value)
return value
@property
@abc.abstractmethod
def params(self) -> ObjectiveParams:
"""Must return a tuple of parameters of the objective"""
pass
@staticmethod
@abc.abstractmethod
def eval(params: ObjectiveParams, x: jnp.ndarray) -> jnp.ndarray:
"""Must return the value of the objective at `x`."""
pass
@attr.s(eq=False)
class StochasticObjective(abc.ABC):
"""Abstract base class for stochastic objective functions.
Stochastic objectives must be build around a dataset of input-output pairs.
Whenever the objective (or its gradient) called on an input `x`, it computes
the stochastic value (or stochastic gradient) based on a batch of data
randomly sampled from the underlying dataset.
"""
X: jnp.ndarray = attr.ib()
y: jnp.ndarray = attr.ib()
batch_size: int = attr.ib()
@property
def data(self):
return self.X, self.y
@property
def kwargs(self) -> Dict[str, Any]:
return {}
@property
def num_points(self) -> int:
return self.X.shape[0]
@staticmethod
@functools.partial(jit, static_argnums=(0,))
def _sample_batch(
batch_size: int, data: Dataset, prng_key: jnp.ndarray
) -> Dataset:
x, y = data
num_points = x.shape[0]
batch_indices = random.choice(
prng_key, num_points, (batch_size,), replace=False
)
x_batch = jnp.take(x, batch_indices, axis=0)
y_batch = jnp.take(y, batch_indices, axis=0)
return x_batch, y_batch
@classmethod
@functools.partial(jit, static_argnums=(0, 1))
def _eval(
cls,
batch_size: int,
data: Dataset,
prng_key: jnp.ndarray,
x: jnp.ndarray,
**kwargs,
) -> jnp.ndarray:
data_batch = cls._sample_batch(batch_size, data, prng_key)
return cls.eval(x, data_batch, **kwargs)
@classmethod
@functools.partial(jit, static_argnums=(0, 1))
def _veval(
cls,
batch_size: int,
data: Dataset,
prng_keys: jnp.ndarray,
x: jnp.ndarray,
**kwargs,
) -> jnp.ndarray:
_eval = functools.partial(cls._eval, batch_size, data, **kwargs)
return vmap(_eval)(prng_keys, x)
@classmethod
@functools.partial(jit, static_argnums=(0, 1))
def _grad(
cls,
batch_size: int,
data: Dataset,
prng_key: jnp.ndarray,
x: jnp.ndarray,
**kwargs,
) -> jnp.ndarray:
_eval = functools.partial(
cls._eval, batch_size, data, prng_key, **kwargs
)
return grad(_eval)(x)
@classmethod
@functools.partial(jit, static_argnums=(0, 1))
def _vgrad(
cls,
batch_size: int,
data: Dataset,
prng_keys: jnp.ndarray,
x: jnp.ndarray,
**kwargs,
) -> jnp.ndarray:
_grad = functools.partial(cls._grad, batch_size, data, **kwargs)
return vmap(_grad)(prng_keys, x)
def __call__(
self, x: jnp.ndarray, prng_key: jnp.ndarray, deterministic: bool = False
) -> jnp.ndarray:
"""Computes the (stochastic) value of the objective at `x`."""
# Add batch dimension, if necessary.
squeeze = False
if x.ndim == 1:
x = jnp.expand_dims(x, axis=0)
squeeze = True
# Run a vectorized version of eval.
subkeys = random.split(prng_key, x.shape[0])
batch_size = self.num_points if deterministic else self.batch_size
args = batch_size, self.data, jnp.stack(subkeys)
value = self._veval(*args, x, **self.kwargs)
if squeeze:
value = jnp.squeeze(value)
return value
def grad(
self, x: jnp.ndarray, prng_key: jnp.ndarray, deterministic: bool = False
) -> jnp.ndarray:
"""Computes the (stochastic) gradient of the objective at `x`."""
# Add batch dimension, if necessary.
squeeze = False
if x.ndim == 1:
x = jnp.expand_dims(x, axis=0)
squeeze = True
# Run a vectorized version of grad.
subkeys = random.split(prng_key, x.shape[0])
batch_size = self.num_points if deterministic else self.batch_size
args = batch_size, self.data, jnp.stack(subkeys)
value = self._vgrad(*args, x, **self.kwargs)
if squeeze:
value = jnp.squeeze(value)
return value
@property
@abc.abstractmethod
def dim(self) -> int:
"""Must return the dimensionality of the problem."""
pass
@staticmethod
@abc.abstractmethod
def eval(x: jnp.ndarray, data_batch: Dataset, **kwargs) -> jnp.ndarray:
"""Must compute objective value at `x` given `data_batch`."""
pass
@abc.abstractmethod
def solve(self) -> jnp.ndarray:
"""Must return the minimizer of the objective."""
pass
| 31.054622
| 80
| 0.611825
|
4e59729ea423585610ba5b9221c3afbd2f64cbbb
| 2,753
|
py
|
Python
|
tests/sentry/api/endpoints/test_team_details.py
|
arsh-co/sentry
|
7a83a7e8a13047a9471756d679e2deb596cc2ca1
|
[
"BSD-3-Clause"
] | null | null | null |
tests/sentry/api/endpoints/test_team_details.py
|
arsh-co/sentry
|
7a83a7e8a13047a9471756d679e2deb596cc2ca1
|
[
"BSD-3-Clause"
] | null | null | null |
tests/sentry/api/endpoints/test_team_details.py
|
arsh-co/sentry
|
7a83a7e8a13047a9471756d679e2deb596cc2ca1
|
[
"BSD-3-Clause"
] | null | null | null |
from django.core.urlresolvers import reverse
from mock import patch
from sentry.models import (
OrganizationMemberType, Team, TeamStatus
)
from sentry.testutils import APITestCase
class TeamDetailsTest(APITestCase):
def test_simple(self):
team = self.team # force creation
self.login_as(user=self.user)
url = reverse('sentry-api-0-team-details', kwargs={'team_id': team.id})
response = self.client.get(url)
assert response.status_code == 200
assert response.data['id'] == str(team.id)
class TeamUpdateTest(APITestCase):
def test_simple(self):
team = self.team # force creation
self.login_as(user=self.user)
url = reverse('sentry-api-0-team-details', kwargs={'team_id': team.id})
resp = self.client.put(url, data={
'name': 'hello world',
'slug': 'foobar',
})
assert resp.status_code == 200, resp.content
team = Team.objects.get(id=team.id)
assert team.name == 'hello world'
assert team.slug == 'foobar'
class TeamDeleteTest(APITestCase):
@patch('sentry.api.endpoints.team_details.delete_team')
def test_as_admin(self, delete_team):
org = self.create_organization()
team = self.create_team(organization=org)
project = self.create_project(team=team) # NOQA
user = self.create_user(email='foo@example.com', is_superuser=False)
org.member_set.create(
user=user,
has_global_access=True,
type=OrganizationMemberType.ADMIN,
)
self.login_as(user)
url = reverse('sentry-api-0-team-details', kwargs={'team_id': team.id})
with self.settings(SENTRY_PROJECT=0):
response = self.client.delete(url)
team = Team.objects.get(id=team.id)
assert response.status_code == 204, response.data
assert team.status == TeamStatus.PENDING_DELETION
delete_team.delay.assert_called_once_with(
object_id=team.id,
countdown=60 * 5,
)
def test_as_member(self):
org = self.create_organization(owner=self.user)
team = self.create_team(organization=org)
project = self.create_project(team=team) # NOQA
user = self.create_user(email='foo@example.com', is_superuser=False)
team.organization.member_set.create_or_update(
organization=org,
user=user,
defaults={
'type': OrganizationMemberType.MEMBER,
}
)
self.login_as(user=user)
url = reverse('sentry-api-0-team-details', kwargs={'team_id': team.id})
response = self.client.delete(url)
assert response.status_code == 403
| 30.932584
| 79
| 0.628769
|
265b92a6777968351a722920746656478f467bde
| 179
|
py
|
Python
|
Task/Special-variables/Python/special-variables.py
|
LaudateCorpus1/RosettaCodeData
|
9ad63ea473a958506c041077f1d810c0c7c8c18d
|
[
"Info-ZIP"
] | 1
|
2018-11-09T22:08:38.000Z
|
2018-11-09T22:08:38.000Z
|
Task/Special-variables/Python/special-variables.py
|
seanwallawalla-forks/RosettaCodeData
|
9ad63ea473a958506c041077f1d810c0c7c8c18d
|
[
"Info-ZIP"
] | null | null | null |
Task/Special-variables/Python/special-variables.py
|
seanwallawalla-forks/RosettaCodeData
|
9ad63ea473a958506c041077f1d810c0c7c8c18d
|
[
"Info-ZIP"
] | 1
|
2018-11-09T22:08:40.000Z
|
2018-11-09T22:08:40.000Z
|
names = sorted((set(globals().keys()) | set(__builtins__.__dict__.keys())) - set('_ names i'.split()))
print( '\n'.join(' '.join(names[i:i+8]) for i in range(0, len(names), 8)) )
| 59.666667
| 102
| 0.614525
|
c97cfdc16042fa42ee7a277b8691bd1e36027880
| 25,016
|
py
|
Python
|
salt/pillar/makostack.py
|
fake-name/salt
|
d8f04936e4407f51946e32e8166159778f6c31a5
|
[
"Apache-2.0"
] | null | null | null |
salt/pillar/makostack.py
|
fake-name/salt
|
d8f04936e4407f51946e32e8166159778f6c31a5
|
[
"Apache-2.0"
] | null | null | null |
salt/pillar/makostack.py
|
fake-name/salt
|
d8f04936e4407f51946e32e8166159778f6c31a5
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding: utf-8 -*-
"""
Simple and flexible YAML ext_pillar which can read pillar from within pillar.
.. versionadded:: 2016.3.0
This custom saltstack ``ext_pillar`` is a direct ripoff of the 'stack' ext_pillar, simply ported
to use mako instead of jinja2 for templating.
It supports the following features:
- multiple config files that are mako templates with support for ``pillar``, ``__grains__``,
``__salt__``, ``__opts__`` variable dereferencing.
- a config file renders as an ordered list of files. Unless absolute, the paths of these files are
relative to the current config file - if absolute, they will be treated literally.
- this list of files are read in order as mako templates with support for ``stack``, ``pillar``,
``__grains__``, ``__salt__``, ``__opts__`` variable dereferencing.
- all these rendered files are then parsed as ``yaml``.
- then all yaml dicts are merged in order, with support for the following merging strategies:
``merge-first``
``merge-last``
``remove``
``overwrite``
- MakoStack config files can be matched based on ``pillar``, ``grains``, or ``opts`` values, which
make it possible to support kind of self-contained environments.
Configuration in Salt
---------------------
Like any other external pillar, its configuration is declared via the ``ext_pillar`` key in the
master config file. However, you can configure MakoStack in 3 different ways:
Single config file
~~~~~~~~~~~~~~~~~~
This is the simplest option, you just need to set the path to your single MakoStack config file
as shown below:
.. code:: yaml
ext_pillar:
- makostack: /path/to/stack.cfg
List of config files
~~~~~~~~~~~~~~~~~~~~
You can also provide a list of config files:
.. code:: yaml
ext_pillar:
- makostack:
- /path/to/infrastructure.cfg
- /path/to/production.cfg
Select config files through grains|pillar|opts matching
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
You can also use a much more flexible configuration: MakoStack allows one to select the config
files for the current minion based on matching values from either grains, or pillar, or opts.
Here is an example of such a configuration, which should hopefully speak for itself:
.. code:: yaml
ext_pillar:
- makostack:
pillar:environment:
dev: /path/to/dev/stack.cfg
prod: /path/to/prod/stack.cfg
grains:custom:grain:
value:
- /path/to/stack1.cfg
- /path/to/stack2.cfg
opts:custom:opt:
value: /path/to/stack0.cfg
Grafting data from files to arbitrary namespaces
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
An extended syntax for config files permits defining "graft points" on a per-config-file basis.
As an example, if the file foo.cfg would produce the following:
.. code:: yaml
foo:
- bar
- baz
and you specified the cfg file as ``/path/to/foo.cfg:yummy:fur``, the following would actually end
up in pillar after all merging was complete:
.. code:: yaml
yummy:
fur:
foo:
- bar
- baz
MakoStack configuration files
-----------------------------
The config files that are referenced in the above ``ext_pillar`` configuration are mako templates,
which must (eventually) render as a single, simple, flat ordered list of ``yaml`` files which will
then be themselves templated with mako, with their results merged to build pillar data.
Unless an absolute path name is specified, the path of these ``yaml`` files is assumed to be
relative to the directory containing the MakoStack config file. If a path begins with '/',
however, it will be treated literally and can be anywhere on the filesystem.
The following variables are available for interpolation in makostack configuration files:
- ``pillar``: the pillar data (as passed by Salt to our ``ext_pillar`` function)
- ``minion_id``: the minion id ;-)
- ``__opts__``: a dictionary of mostly Salt configuration options
- ``__grains__``: a dictionary of the grains of the minion making this pillar call
- ``__salt__``: a dictionary of Salt module functions, useful so you don't have to duplicate
functions that already exist (note: runs on the master)
So you can use all the power of mako to build your list of ``yaml`` files that then will be merged
in pillar data.
For example, you could have a MakoStack config file which looks like:
.. code:: mako
$ cat /path/to/makostack/config.cfg
core.yaml
osarchs/%{ __grains__['osarch'] }}.yaml
oscodenames/%{ __grains__['oscodename'] }.yaml
% for role in pillar.get('roles', []):
roles/%{ role }.yaml
% endfor
minions/%{ minion_id }.yaml
while the directory structure could look like:
.. code::
$ tree /path/to/makostack/
/path/to/makostack/
├── config.cfg
├── core.yaml
├── osarchs/
│ ├── amd64.yaml
│ └── armhf.yaml
├── oscodenames/
│ ├── wheezy.yaml
│ └── jessie.yaml
├── roles/
│ ├── web.yaml
│ └── db.yaml
└── minions/
├── test-1-dev.yaml
└── test-2-dev.yaml
Overall process
---------------
In the above configuration, given the test-1-dev minion is an amd64 platform running Debian Jessie
and that pillar ``roles`` is ``["db"]``, the following ``yaml`` files would be merged in order:
- ``core.yml``
- ``osarchs/amd64.yml``
- ``oscodenames/jessie.yml``
- ``roles/db.yml``
- ``minions/test-1-dev.yml``
Before merging, every files above will be preprocessed as mako templates. The following variables
are available in mako templating of ``yaml`` files:
- ``stack``: the MakoStack pillar data object under construction (e.g. data from any and all
previous ``yaml`` files in MakoStack configuration loaded so far).
- ``pillar``: the pillar data (as passed by Salt to our ``ext_pillar`` function)
- ``minion_id``: the minion id ;-)
- ``__opts__``: a dictionary of mostly Salt configuration options
- ``__grains__``: a dictionary of the grains of the minion making this pillar call
- ``__salt__``: a dictionary of Salt module functions, useful so you don't have to duplicate
functions that already exist (note: runs on the master)
So you can use all the power of mako to build your pillar data, and even use other MakoStack values
that have already been parsed and evaluated (from ``yaml`` files earlier in the configuration)
through the ``stack`` variable.
Once a ``yaml`` file is processed by mako, we obtain a Python dict - let's call it ``yml_data``.
This ``yml_data`` dict is then merged into in the main ``stack`` dict (which itself is the already
merged MakoStack pillar data)., based on the declared ``merge-strategy``. By default, MakoStack
will deeply merge ``yml_data`` into ``stack`` (much like the ``recurse`` option for salt's
``pillar_source_merging_strategy``), but 3 other merging strategies (see next section) are also
available, on a per-object basis, to give you full control over the rendered data.
Once all ``yaml`` files have been processed, the ``stack`` dict will contain MakoStack's copmlete
pillar data. At this point the MakoStack ``ext_pillar`` returns the ``stack`` dict to Salt, which
then merges it in with any other pillars, finally returning the whole pillar to the minion.
Merging strategies
------------------
The way the data from a new ``yaml_data`` dict is merged with the existing ``stack`` data can be
controlled by specifying a merging strategy. Available strategies are:
- ``merge-last`` (the default)
- ``merge-first``
- ``remove``
- ``overwrite``
Note that scalar values like strings, integers, booleans, etc. (`leaf nodes` in yaml parlance) are
always (necessarily) evaluated using ``overwrite`` (other strategies don't make sense in that case).
The merging strategy can be set by including a dict in the form of:
.. code:: yaml
__: <merging strategy>
as the first item of the dict or list. This allows fine grained control over the merging process.
``merge-last`` (default) strategy
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
If the ``merge-last`` strategy is selected (the default), then content of dict or list variables is
merged recursively with previous definitions of this variable (similarly to the ``recurse`` salt
``pillar_source_merging_strategy``). This allows for extending previously defined data.
``merge-first`` strategy
~~~~~~~~~~~~~~~~~~~~~~~~
If the ``merge-first`` strategy is selected, then the content of dict or list variables are swapped
between the ``yaml_data`` and ``stack`` objects before being merged recursively with the previous
``merge-last`` strategy. This allows for e.g. prepending to list items and such, and keeping
previously defined dictionary keys (to prevent overwriting "default values" for instance).
``remove`` strategy
~~~~~~~~~~~~~~~~~~~
If the ``remove`` strategy is selected, then content of dict or list variables in ``stack`` are
removed only if the corresponding item is present in the ``yaml_data`` dict. This allows for
removing items entirely from previously defined data without replacing them with something else.
``overwrite`` strategy
~~~~~~~~~~~~~~~~~~~~~~
If the ``overwrite`` strategy is selected, then the content of dict or list variables in ``stack``
is overwritten by the content of ``yaml_data`` dict. This allows one to overwrite variables from
previous definitions.
Merging examples
----------------
Let's go through small examples that should clarify what's going on when a ``yaml_data`` dict is
merged in the ``stack`` dict.
When you don't specify any strategy, the default ``merge-last`` strategy is selected:
+----------------------+-----------------------+-------------------------+
| ``stack`` | ``yaml_data`` | ``stack`` (after merge) |
+======================+=======================+=========================+
| .. code:: yaml | .. code:: yaml | .. code:: yaml |
| | | |
| users: | users: | users: |
| tom: | tom: | tom: |
| uid: 500 | uid: 1000 | uid: 1000 |
| roles: | roles: | roles: |
| - sysadmin | - developer | - sysadmin |
| root: | mat: | - developer |
| uid: 0 | uid: 1001 | mat: |
| | | uid: 1001 |
| | | root: |
| | | uid: 0 |
+----------------------+-----------------------+-------------------------+
Then you can select a custom merging strategy using the ``__`` key in a dict:
+----------------------+-----------------------+-------------------------+
| ``stack`` | ``yaml_data`` | ``stack`` (after merge) |
+======================+=======================+=========================+
| .. code:: yaml | .. code:: yaml | .. code:: yaml |
| | | |
| users: | users: | users: |
| tom: | __: merge-last | tom: |
| uid: 500 | tom: | uid: 1000 |
| roles: | uid: 1000 | roles: |
| - sysadmin | roles: | - sysadmin |
| root: | - developer | - developer |
| uid: 0 | mat: | mat: |
| | uid: 1001 | uid: 1001 |
| | | root: |
| | | uid: 0 |
+----------------------+-----------------------+-------------------------+
| .. code:: yaml | .. code:: yaml | .. code:: yaml |
| | | |
| users: | users: | users: |
| tom: | __: merge-first | tom: |
| uid: 500 | tom: | uid: 500 |
| roles: | uid: 1000 | roles: |
| - sysadmin | roles: | - developer |
| root: | - developer | - sysadmin |
| uid: 0 | mat: | mat: |
| | uid: 1001 | uid: 1001 |
| | | root: |
| | | uid: 0 |
+----------------------+-----------------------+-------------------------+
| .. code:: yaml | .. code:: yaml | .. code:: yaml |
| | | |
| users: | users: | users: |
| tom: | __: remove | root: |
| uid: 500 | tom: | uid: 0 |
| roles: | mat: | |
| - sysadmin | | |
| root: | | |
| uid: 0 | | |
+----------------------+-----------------------+-------------------------+
| .. code:: yaml | .. code:: yaml | .. code:: yaml |
| | | |
| users: | users: | users: |
| tom: | __: overwrite | tom: |
| uid: 500 | tom: | uid: 1000 |
| roles: | uid: 1000 | roles: |
| - sysadmin | roles: | - developer |
| root: | - developer | mat: |
| uid: 0 | mat: | uid: 1001 |
| | uid: 1001 | |
+----------------------+-----------------------+-------------------------+
Similarly, list allow a custom merging strategy using a ``__`` item:
+----------------+-------------------------+-------------------------+
| ``stack`` | ``yaml_data`` | ``stack`` (after merge) |
+================+=========================+=========================+
| .. code:: yaml | .. code:: yaml | .. code:: yaml |
| | | |
| users: | users: | users: |
| - tom | - __: merge-last | - tom |
| - root | - mat | - root |
| | | - mat |
+----------------+-------------------------+-------------------------+
| .. code:: yaml | .. code:: yaml | .. code:: yaml |
| | | |
| users: | users: | users: |
| - tom | - __: merge-first | - mat |
| - root | - mat | - tom |
| | | - root |
+----------------+-------------------------+-------------------------+
| .. code:: yaml | .. code:: yaml | .. code:: yaml |
| | | |
| users: | users: | users: |
| - tom | - __: remove | - root |
| - root | - mat | |
| | - tom | |
+----------------+-------------------------+-------------------------+
| .. code:: yaml | .. code:: yaml | .. code:: yaml |
| | | |
| users: | users: | users: |
| - tom | - __: overwrite | - mat |
| - root | - mat | |
+----------------+-------------------------+-------------------------+
Tweaking MakoStack
------------------
Out of the box, MakoStack (following the ``stack`` module it was cribbed from), will more or less
silently pass over template files it cannot load due to mako or yaml parsing errors. This is
convenient, but arguably WRONG, behaviour; but for backwards compatibility, it is maintained as the
default.
If desired, a configuration option may be set via a ``config`` entry under the ``ext_pillar``
definition for MakoStack, as shown in the following snippet:
It's also possible (though not really recommended) to set a ``fail_on_missing_file`` option, which
will cause a compilation error whenever a "potential" file isn't found during processing. This is
largely contrary to the intended usage of MakoStack (which is to let it search for and utilize any
files under a directory tree, quietly loading those it finds, and ignoring any missing), but it
MIGHT be useful to someone, somewhere so I added it...
.. code:: yaml
ext_pillar:
- config:
fail_on_parse_error: True
fail_on_missing_file: False
- makostack: /path/to/stack.cfg
This will cause MakoStack to still ignore non-existant files, but fail on actual parse errors
inside files that do exist. Note that False is the default for both options, so neither need be
provided unless the intention is to set it to True.
'''
# Import Python libs
from __future__ import absolute_import, print_function, unicode_literals
import functools
import logging
import os
# Import Salt libs
import salt.utils.yaml
from salt.exceptions import CommandExecutionError
# Import 3rd-party libs
from salt.ext import six
try:
from mako.lookup import TemplateLookup
from mako import exceptions
HAS_MAKO = True
except ImportError:
HAS_MAKO = False
log = logging.getLogger(__name__)
strategies = ("overwrite", "merge-first", "merge-last", "remove")
__virtualname__ = "makostack"
# Only load in this module if the EC2 configurations are in place
def __virtual__():
"""
Set up the libcloud functions and check for EC2 configurations
"""
if HAS_MAKO is True:
return __virtualname__
return False
def ext_pillar(minion_id, pillar, *args, **kwargs):
import salt.utils.data
stack = {}
config = {}
stack_config_files = []
for item in args:
if isinstance(item, dict) and 'config' in item:
config = item['config']
else:
stack_config_files += [item]
traverse = {
'pillar': functools.partial(salt.utils.data.traverse_dict_and_list, pillar),
'grains': functools.partial(salt.utils.data.traverse_dict_and_list, __grains__),
'opts': functools.partial(salt.utils.data.traverse_dict_and_list, __opts__)
}
for matcher, matchs in six.iteritems(kwargs):
t, matcher = matcher.split(":", 1)
if t not in traverse:
raise Exception('Unknown traverse option `{}`, should be one of '
'{}'.format(t, traverse.keys()))
cfgs = matchs.get(traverse[t](matcher, None), [])
if not isinstance(cfgs, list):
cfgs = [cfgs]
stack_config_files += cfgs
for cfg in stack_config_files:
if ":" in cfg:
cfg, namespace = cfg.split(":", 1)
else:
namespace = None
if not os.path.isfile(cfg):
log.warning('Ignoring MakoStack cfg `%s`: file not found', cfg)
continue
stack = _process_stack_cfg(cfg, stack, minion_id, pillar, namespace, config)
return stack
def _process_stack_cfg(cfg, stack, minion_id, pillar, namespace, config):
basedir, filename = os.path.split(cfg)
lookup = TemplateLookup(directories=[basedir])
data = lookup.get_template(filename).render(__opts__=__opts__, __salt__=__salt__,
__grains__=__grains__, minion_id=minion_id, pillar=pillar, stack=stack)
for line in _parse_top_cfg(data, cfg):
dirs = [basedir]
dirs += ['/'] if line.startswith('/') else []
lookup = TemplateLookup(directories=dirs)
try:
p = lookup.get_template(line).render(__opts__=__opts__, __salt__=__salt__,
__grains__=__grains__, minion_id=minion_id, pillar=pillar, stack=stack)
obj = salt.utils.yaml.safe_load(p)
if not isinstance(obj, dict):
msg = "Can't parse makostack template `{}` as a valid yaml dictionary".format(line)
log.error(msg)
raise KeyError(msg)
if namespace:
for sub in namespace.split(":")[::-1]:
obj = {sub: obj}
stack = _merge_dict(stack, obj)
log.debug('MakoStack template `%s` parsed', line)
except exceptions.TopLevelLookupException as err:
if config.get('fail_on_missing_file'):
msg = 'MakoStack template `{}` not found - aborting compilation.'.format(line)
log.error(msg)
raise CommandExecutionError(msg)
log.info('MakoStack template `%s` not found.', line)
continue
except Exception as err:
# Catches the above KeyError, and any other parsing errors...
if config.get('fail_on_parse_error'):
msg = 'Invalid MakoStack template `{}` - aborting compilation:\n{}'.format(line,
exceptions.text_error_template().render())
log.error(msg)
raise CommandExecutionError(msg)
msg = 'Invalid MakoStack template `{}`:\n{}'.format(line,
exceptions.text_error_template().render())
log.warning(msg)
continue
return stack
def _cleanup(obj):
if obj:
if isinstance(obj, dict):
obj.pop("__", None)
for k, v in six.iteritems(obj):
obj[k] = _cleanup(v)
elif isinstance(obj, list) and isinstance(obj[0], dict) and '__' in obj[0]:
del obj[0]
return obj
def _merge_dict(stack, obj):
strategy = obj.pop("__", "merge-last")
if strategy not in strategies:
raise Exception('Unknown strategy `{}`, should be one of {}'.format(strategy, strategies))
if strategy == 'overwrite':
return _cleanup(obj)
else:
for k, v in six.iteritems(obj):
if strategy == "remove":
stack.pop(k, None)
continue
if k in stack:
if strategy == "merge-first":
# merge-first is same as merge-last but the other way round
# so let's switch stack[k] and v
stack_k = stack[k]
stack[k] = _cleanup(v)
v = stack_k
if type(stack[k]) != type(v):
log.debug('Force overwrite, types differ: `%s` != `%s`', stack[k], v)
stack[k] = _cleanup(v)
elif isinstance(v, dict):
stack[k] = _merge_dict(stack[k], v)
elif isinstance(v, list):
stack[k] = _merge_list(stack[k], v)
else:
stack[k] = v
else:
stack[k] = _cleanup(v)
return stack
def _merge_list(stack, obj):
strategy = "merge-last"
if obj and isinstance(obj[0], dict) and "__" in obj[0]:
strategy = obj[0]["__"]
del obj[0]
if strategy not in strategies:
raise Exception('Unknown strategy `{}`, should be one of {}'.format(strategy, strategies))
if strategy == 'overwrite':
return obj
elif strategy == "remove":
return [item for item in stack if item not in obj]
elif strategy == "merge-first":
return obj + stack
else:
return stack + obj
def _parse_top_cfg(content, filename):
'''
Allow top_cfg to be YAML
"""
try:
obj = salt.utils.yaml.safe_load(content)
if isinstance(obj, list):
log.debug('MakoStack cfg `%s` parsed as YAML', filename)
return obj
except Exception as err:
pass
log.debug('MakoStack cfg `%s` parsed as plain text', filename)
return content.splitlines()
| 43.131034
| 100
| 0.514271
|
fa472a3a8d48cb554d26e2b9bb092fa315de9def
| 5,452
|
py
|
Python
|
mgeconvert/caffe_converter/caffe_converter.py
|
xpmemeda/mgeconvert
|
d30e28dffaa9c42cbbefd7a8c41c688f9d2a8acd
|
[
"Apache-2.0"
] | null | null | null |
mgeconvert/caffe_converter/caffe_converter.py
|
xpmemeda/mgeconvert
|
d30e28dffaa9c42cbbefd7a8c41c688f9d2a8acd
|
[
"Apache-2.0"
] | null | null | null |
mgeconvert/caffe_converter/caffe_converter.py
|
xpmemeda/mgeconvert
|
d30e28dffaa9c42cbbefd7a8c41c688f9d2a8acd
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding: utf-8 -*-
# MegEngine is Licensed under the Apache License, Version 2.0 (the "License")
#
# Copyright (c) 2014-2020 Megvii Inc. All rights reserved.
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT ARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
from google.protobuf import text_format # type: ignore[attr-defined]
from tqdm import tqdm
from ..mge_context import TopologyNetwork, TransformerRule, optimize_for_conversion
from ..mge_context.mge_utils import get_symvar_value
from .caffe_op import MGE2CAFFE
from .caffe_pb import caffe_pb2 as cp # pylint: disable=import-error
class CaffeConverter:
transformer_options = [
TransformerRule.FUSE_FOR_LEAKY_RELU,
TransformerRule.FUSE_FOR_CONV_BIAS,
TransformerRule.FUSE_FOR_DECONV_BIAS,
TransformerRule.FUSE_FOR_FULLY_CONNECTED,
]
def __init__(self, toponet, transform_options=None, use_empty_blobs=False):
self.net = toponet
self.var2blob_map = {}
self.layers = []
self._names = set()
self._count = 0
self.use_empty_blobs = use_empty_blobs
if transform_options is not None:
self.transformer_options = transform_options
optimize_for_conversion(self.net, self.transformer_options)
def dump(self, proto_file, caffe_file=None):
CaffeNet = cp.NetParameter(layer=self.layers)
if caffe_file is not None:
with open(caffe_file, "wb") as f:
f.write(CaffeNet.SerializeToString())
for layer in CaffeNet.layer:
layer.ClearField("blobs")
with open(proto_file, "w") as f:
f.write(text_format.MessageToString(CaffeNet))
@property
def gen_name(self):
self._count = self._count + 1
while "_caffe_{0}".format(self._count) in self._names:
self._count = self._count + 1
return "_caffe_{0}".format(self._count)
def get_blob_name(self, varNode):
if varNode not in self.var2blob_map:
raise KeyError("can not find VarNode {}".format(varNode))
return self.var2blob_map[varNode]
def set_blob_name(self, varNode, name=None):
assert varNode not in self.var2blob_map, "{} already be set".format(varNode)
if name is not None:
assert isinstance(name, str)
self.var2blob_map[varNode] = name
else:
self.var2blob_map[varNode] = self.gen_name
self._names.add(self.var2blob_map[varNode])
return self.var2blob_map[varNode]
def reset_blob_name(self, varNode, name=None):
assert varNode in self.var2blob_map, "{} should be set".format(varNode)
if name is not None:
assert isinstance(name, str)
self.var2blob_map[varNode] = name
else:
self.var2blob_map[varNode] = self.gen_name
self._names.add(self.var2blob_map[varNode])
return self.var2blob_map[varNode]
def gen_blob_proto(self, data):
if self.use_empty_blobs:
return cp.BlobProto()
if isinstance(data, (int, float)):
return cp.BlobProto(data=[data])
else:
return cp.BlobProto(
data=data.reshape(-1), shape=cp.BlobShape(dim=data.shape)
)
def add_layer(self, layer):
if isinstance(layer, list):
for x in layer:
self.layers.append(x)
else:
self.layers.append(layer)
def convert(self):
unsupported_oprs = []
for opr in self.net.all_oprs:
if not isinstance(opr, tuple(MGE2CAFFE.keys())):
unsupported_oprs.append(opr)
continue
unsupported_oprs = set(map(type, unsupported_oprs))
assert not unsupported_oprs, "Operators {} are not supported yet".format(
unsupported_oprs
)
def need_convert(opr):
is_const = [data.np_data is not None for data in opr.inp_vars]
return not all(is_const) or len(opr.inp_vars) == 0
all_oprs = list(self.net.all_oprs)
for index in range(len(all_oprs) - 1, -1, -1):
if all_oprs[index].skip:
del all_oprs[index]
for opr in tqdm(all_oprs):
if not need_convert(opr):
for tensor in opr.out_vars:
if tensor.np_data is None:
tensor.np_data = get_symvar_value(tensor._var)
continue
MGE2CAFFE[type(opr)](opr, self)
def convert_to_caffe(
mge_fpath, prototxt="out.prototxt", caffemodel="out.caffemodel", outspec=None
):
"""
Convert megengine model to Caffe,
and save caffe model to `prototxt` and `caffemodel`.
:param mge_fpath: the file path of megengine model.
:type mge_fpath: str
:param prototxt: the filename used for saved model definition.
:type prototxt: str
:param caffemodel: the filename used for saved model weights.
:type caffemodel: str
"""
assert isinstance(mge_fpath, str), "mge_fpath must be string"
net = TopologyNetwork(mge_fpath, outspec=outspec)
converter = CaffeConverter(net)
converter.convert()
assert isinstance(prototxt, str) and isinstance(
caffemodel, str
), "'prototxt' and 'caffemodel' must be string"
converter.dump(prototxt, caffemodel)
| 35.868421
| 88
| 0.642517
|
89ab8dc3153da87d3f4c542fe54d365a13eae083
| 1,128
|
py
|
Python
|
src/sentry/search/base.py
|
faulkner/sentry
|
f9dd4d0d7c683632cf02810c03bd42d7051ad010
|
[
"BSD-3-Clause"
] | null | null | null |
src/sentry/search/base.py
|
faulkner/sentry
|
f9dd4d0d7c683632cf02810c03bd42d7051ad010
|
[
"BSD-3-Clause"
] | null | null | null |
src/sentry/search/base.py
|
faulkner/sentry
|
f9dd4d0d7c683632cf02810c03bd42d7051ad010
|
[
"BSD-3-Clause"
] | null | null | null |
"""
sentry.search.base
~~~~~~~~~~~~~~~~~~
:copyright: (c) 2010-2014 by the Sentry Team, see AUTHORS for more details.
:license: BSD, see LICENSE for more details.
"""
from __future__ import absolute_import
ANY = object()
EMPTY = object()
class SearchBackend(object):
def __init__(self, **options):
pass
def validate(self):
"""
Validates the settings for this backend (i.e. such as proper connection
info).
Raise ``InvalidConfiguration`` if there is a configuration error.
"""
def query(self, project, query=None, status=None, tags=None,
bookmarked_by=None, assigned_to=None, first_release=None,
sort_by='date', age_from=None, age_to=None,
unassigned=None, date_from=None, date_to=None,
cursor=None, limit=100):
"""
The return value should be a CursorResult.
The limit here is a soft input limit, which gets trimmed by the Cursor.
This means the backend should query limit + 2 and return that within the
CursorResult.
"""
raise NotImplementedError
| 28.2
| 80
| 0.630319
|
52ca733654f73287b45ba2ffb3079a8db8ac4451
| 23,129
|
py
|
Python
|
model/sketch.nyu/network.py
|
keremyldrr/TorchSSC
|
2fd21ad25af92cd9f9ad28de3c4bc897c0ae8b43
|
[
"MIT"
] | 22
|
2019-12-23T05:32:51.000Z
|
2022-03-02T08:15:56.000Z
|
model/sketch.nyu/network.py
|
keremyldrr/TorchSSC
|
2fd21ad25af92cd9f9ad28de3c4bc897c0ae8b43
|
[
"MIT"
] | 13
|
2019-12-23T05:33:17.000Z
|
2022-01-29T21:38:06.000Z
|
model/sketch.nyu/network.py
|
keremyldrr/TorchSSC
|
2fd21ad25af92cd9f9ad28de3c4bc897c0ae8b43
|
[
"MIT"
] | 9
|
2020-08-01T02:58:40.000Z
|
2022-02-23T07:58:52.000Z
|
# encoding: utf-8
import torch
import torch.nn as nn
import torch.nn.functional as F
import numpy as np
from functools import partial
from collections import OrderedDict
from config import config
from resnet import get_resnet50
class SimpleRB(nn.Module):
def __init__(self, in_channel, norm_layer, bn_momentum):
super(SimpleRB, self).__init__()
self.path = nn.Sequential(
nn.Conv3d(in_channel, in_channel, kernel_size=3, padding=1, bias=False),
norm_layer(in_channel, momentum=bn_momentum),
nn.ReLU(),
nn.Conv3d(in_channel, in_channel, kernel_size=3, padding=1, bias=False),
norm_layer(in_channel, momentum=bn_momentum),
)
self.relu = nn.ReLU()
def forward(self, x):
residual = x
conv_path = self.path(x)
out = residual + conv_path
out = self.relu(out)
return out
'''
3D Residual Block,3x3x3 conv ==> 3 smaller 3D conv, refered from DDRNet
'''
class Bottleneck3D(nn.Module):
def __init__(self, inplanes, planes, norm_layer, stride=1, dilation=[1, 1, 1], expansion=4, downsample=None,
fist_dilation=1, multi_grid=1,
bn_momentum=0.0003):
super(Bottleneck3D, self).__init__()
# often,planes = inplanes // 4
self.expansion = expansion
self.conv1 = nn.Conv3d(inplanes, planes, kernel_size=1, bias=False)
self.bn1 = norm_layer(planes, momentum=bn_momentum)
self.conv2 = nn.Conv3d(planes, planes, kernel_size=(1, 1, 3), stride=(1, 1, stride),
dilation=(1, 1, dilation[0]), padding=(0, 0, dilation[0]), bias=False)
self.bn2 = norm_layer(planes, momentum=bn_momentum)
self.conv3 = nn.Conv3d(planes, planes, kernel_size=(1, 3, 1), stride=(1, stride, 1),
dilation=(1, dilation[1], 1), padding=(0, dilation[1], 0), bias=False)
self.bn3 = norm_layer(planes, momentum=bn_momentum)
self.conv4 = nn.Conv3d(planes, planes, kernel_size=(3, 1, 1), stride=(stride, 1, 1),
dilation=(dilation[2], 1, 1), padding=(dilation[2], 0, 0), bias=False)
self.bn4 = norm_layer(planes, momentum=bn_momentum)
self.conv5 = nn.Conv3d(planes, planes * self.expansion, kernel_size=(1, 1, 1), bias=False)
self.bn5 = norm_layer(planes * self.expansion, momentum=bn_momentum)
self.relu = nn.ReLU(inplace=False)
self.relu_inplace = nn.ReLU(inplace=True)
self.downsample = downsample
self.dilation = dilation
self.stride = stride
self.downsample2 = nn.Sequential(
nn.AvgPool3d(kernel_size=(1, stride, 1), stride=(1, stride, 1)),
nn.Conv3d(planes, planes, kernel_size=1, stride=1, bias=False),
norm_layer(planes, momentum=bn_momentum),
)
self.downsample3 = nn.Sequential(
nn.AvgPool3d(kernel_size=(stride, 1, 1), stride=(stride, 1, 1)),
nn.Conv3d(planes, planes, kernel_size=1, stride=1, bias=False),
norm_layer(planes, momentum=bn_momentum),
)
self.downsample4 = nn.Sequential(
nn.AvgPool3d(kernel_size=(stride, 1, 1), stride=(stride, 1, 1)),
nn.Conv3d(planes, planes, kernel_size=1, stride=1, bias=False),
norm_layer(planes, momentum=bn_momentum),
)
def forward(self, x):
residual = x
out1 = self.relu(self.bn1(self.conv1(x)))
out2 = self.bn2(self.conv2(out1))
out2_relu = self.relu(out2)
out3 = self.bn3(self.conv3(out2_relu))
if self.stride != 1:
out2 = self.downsample2(out2)
out3 = out3 + out2
out3_relu = self.relu(out3)
out4 = self.bn4(self.conv4(out3_relu))
if self.stride != 1:
out2 = self.downsample3(out2)
out3 = self.downsample4(out3)
out4 = out4 + out2 + out3
out4_relu = self.relu(out4)
out5 = self.bn5(self.conv5(out4_relu))
if self.downsample is not None:
residual = self.downsample(x)
out = out5 + residual
out_relu = self.relu(out)
return out_relu
'''
Input: 60*36*60 sketch
Latent code: 15*9*15
'''
class CVAE(nn.Module):
def __init__(self, norm_layer, bn_momentum, latent_size=16):
super(CVAE, self).__init__()
self.latent_size = latent_size
self.encoder = nn.Sequential(
nn.Conv3d(2, 3, kernel_size=3, padding=1, bias=False),
norm_layer(3, momentum=bn_momentum),
nn.ReLU(),
nn.Conv3d(3, 16, kernel_size=3, padding=1, bias=False),
norm_layer(16, momentum=bn_momentum),
nn.ReLU(),
nn.Conv3d(16, 16, kernel_size=3, padding=1, bias=False),
norm_layer(16, momentum=bn_momentum),
nn.ReLU(),
nn.AvgPool3d(kernel_size=2, stride=2),
nn.Conv3d(16, self.latent_size, kernel_size=3, padding=1, bias=False),
norm_layer(self.latent_size, momentum=bn_momentum),
nn.ReLU(),
nn.AvgPool3d(kernel_size=2, stride=2),
nn.Conv3d(self.latent_size, self.latent_size, kernel_size=3, padding=1, bias=False),
norm_layer(self.latent_size, momentum=bn_momentum),
nn.ReLU(),
)
self.mean = nn.Conv3d(self.latent_size, self.latent_size, kernel_size=1, bias=True) # predict mean.
self.log_var = nn.Conv3d(self.latent_size, self.latent_size, kernel_size=1, bias=True) # predict log(var).
self.decoder_x = nn.Sequential(
nn.Conv3d(1, 3, kernel_size=3, padding=1, bias=False),
norm_layer(3, momentum=bn_momentum),
nn.ReLU(),
nn.Conv3d(3, 16, kernel_size=3, padding=1, bias=False),
norm_layer(16, momentum=bn_momentum),
nn.ReLU(),
nn.Conv3d(16, 16, kernel_size=3, padding=1, bias=False),
norm_layer(16, momentum=bn_momentum),
nn.ReLU(),
nn.AvgPool3d(kernel_size=2, stride=2),
nn.Conv3d(16, self.latent_size, kernel_size=3, padding=1, bias=False),
norm_layer(self.latent_size, momentum=bn_momentum),
nn.ReLU(),
nn.AvgPool3d(kernel_size=2, stride=2),
nn.Conv3d(self.latent_size, self.latent_size, kernel_size=3, padding=1, bias=False),
norm_layer(self.latent_size, momentum=bn_momentum),
nn.ReLU(),
)
self.decoder = nn.Sequential(
nn.ConvTranspose3d(self.latent_size*2, self.latent_size, kernel_size=3, stride=2, padding=1, dilation=1, output_padding=1),
norm_layer(self.latent_size, momentum=bn_momentum),
nn.ReLU(inplace=False),
nn.ConvTranspose3d(self.latent_size, self.latent_size, kernel_size=3, stride=2, padding=1, dilation=1, output_padding=1),
norm_layer(self.latent_size, momentum=bn_momentum),
nn.ReLU(inplace=False),
nn.Dropout3d(0.1),
nn.Conv3d(self.latent_size, 2, kernel_size=1, bias=True)
)
def forward(self, x, gt=None):
b, c, h, w, l = x.shape
if self.training:
gt = gt.view(b, 1, h, w, l).float()
for_encoder = torch.cat([x, gt], dim=1)
enc = self.encoder(for_encoder)
pred_mean = self.mean(enc)
pred_log_var = self.log_var(enc)
decoder_x = self.decoder_x(x)
out_samples = []
out_samples_gsnn = []
for i in range(config.samples):
std = pred_log_var.mul(0.5).exp_()
eps = torch.randn([b, self.latent_size, h // 4, w // 4, l // 4]).cuda()
z1 = eps * std + pred_mean
z2 = torch.randn([b, self.latent_size, h // 4, w // 4, l // 4]).cuda()
sketch = self.decoder(torch.cat([decoder_x, z1], dim=1))
out_samples.append(sketch)
sketch_gsnn = self.decoder(torch.cat([decoder_x, z2], dim=1))
out_samples_gsnn.append(sketch_gsnn)
sketch = torch.cat([torch.unsqueeze(out_sample, dim=0) for out_sample in out_samples])
sketch = torch.mean(sketch, dim=0)
sketch_gsnn = torch.cat([torch.unsqueeze(out_sample, dim=0) for out_sample in out_samples_gsnn])
sketch_gsnn = torch.mean(sketch_gsnn, dim=0)
return pred_mean, pred_log_var, sketch_gsnn, sketch
else:
out_samples = []
for i in range(config.samples):
z = torch.randn([b, self.latent_size, h // 4, w // 4, l // 4]).cuda()
decoder_x = self.decoder_x(x)
out = self.decoder(torch.cat([decoder_x, z], dim=1))
out_samples.append(out)
sketch_gsnn = torch.cat([torch.unsqueeze(out_sample, dim=0) for out_sample in out_samples])
sketch_gsnn = torch.mean(sketch_gsnn, dim=0)
return None, None, sketch_gsnn, None
class STAGE1(nn.Module):
def __init__(self, class_num, norm_layer, resnet_out=2048, feature=512, ThreeDinit=True,
bn_momentum=0.1, pretrained_model=None, eval=False, freeze_bn=False):
super(STAGE1, self).__init__()
self.business_layer = []
self.oper1 = nn.Sequential(
nn.Conv3d(1, 3, kernel_size=3, padding=1, bias=False),
norm_layer(3, momentum=bn_momentum),
nn.ReLU(),
nn.Conv3d(3, 64, kernel_size=3, padding=1, bias=False),
norm_layer(64, momentum=bn_momentum),
nn.ReLU(),
nn.Conv3d(64, feature, kernel_size=3, padding=1, bias=False),
norm_layer(feature, momentum=bn_momentum),
nn.ReLU(inplace=False),
)
self.business_layer.append(self.oper1)
self.completion_layer1 = nn.Sequential(
Bottleneck3D(feature, feature // 4, bn_momentum=bn_momentum, expansion=4, stride=2, downsample=
nn.Sequential(
nn.AvgPool3d(kernel_size=2, stride=2),
nn.Conv3d(feature, feature,
kernel_size=1, stride=1, bias=False),
norm_layer(feature, momentum=bn_momentum),
# nn.ReLU(),
), norm_layer=norm_layer), # feature --> feature*2
Bottleneck3D(feature, feature // 4, bn_momentum=bn_momentum, norm_layer=norm_layer, dilation=[1, 1, 1]),
Bottleneck3D(feature, feature // 4, bn_momentum=bn_momentum, norm_layer=norm_layer, dilation=[2, 2, 2]),
Bottleneck3D(feature, feature // 4, bn_momentum=bn_momentum, norm_layer=norm_layer, dilation=[3, 3, 3]),
)
self.business_layer.append(self.completion_layer1)
self.completion_layer2 = nn.Sequential(
Bottleneck3D(feature, feature // 4, bn_momentum=bn_momentum, expansion=8, stride=2, downsample=
nn.Sequential(
nn.AvgPool3d(kernel_size=2, stride=2),
nn.Conv3d(feature, feature * 2,
kernel_size=1, stride=1, bias=False),
norm_layer(feature * 2, momentum=bn_momentum),
# nn.ReLU(),
), norm_layer=norm_layer),
Bottleneck3D(feature * 2, feature // 2, bn_momentum=bn_momentum, norm_layer=norm_layer, dilation=[1, 1, 1]),
Bottleneck3D(feature * 2, feature // 2, bn_momentum=bn_momentum, norm_layer=norm_layer, dilation=[2, 2, 2]),
Bottleneck3D(feature * 2, feature // 2, bn_momentum=bn_momentum, norm_layer=norm_layer, dilation=[3, 3, 3]),
)
self.business_layer.append(self.completion_layer2)
self.cvae = CVAE(norm_layer=norm_layer, bn_momentum=bn_momentum, latent_size=config.lantent_size)
self.business_layer.append(self.cvae)
self.classify_sketch = nn.ModuleList([
nn.Sequential(
nn.ConvTranspose3d(feature * 2, feature, kernel_size=3, stride=2, padding=1, dilation=1,
output_padding=1),
norm_layer(feature, momentum=bn_momentum),
nn.ReLU(inplace=False),
),
nn.Sequential(
nn.ConvTranspose3d(feature, feature, kernel_size=3, stride=2, padding=1, dilation=1, output_padding=1),
norm_layer(feature, momentum=bn_momentum),
nn.ReLU(inplace=False),
),
nn.Sequential(
nn.Dropout3d(.1),
nn.Conv3d(feature, 2, kernel_size=1, bias=True)
)]
)
self.business_layer.append(self.classify_sketch)
def forward(self, tsdf, depth_mapping_3d, sketch_gt=None):
'''
extract 3D feature
'''
tsdf = self.oper1(tsdf)
completion1 = self.completion_layer1(tsdf)
completion2 = self.completion_layer2(completion1)
up_sketch1 = self.classify_sketch[0](completion2)
up_sketch1 = up_sketch1 + completion1
up_sketch2 = self.classify_sketch[1](up_sketch1)
pred_sketch_raw = self.classify_sketch[2](up_sketch2)
_, pred_sketch_binary = torch.max(pred_sketch_raw, dim=1, keepdim=True) # (b, 1, 60, 36, 60) binary-voxel sketch
pred_mean, pred_log_var, pred_sketch_gsnn, pred_sketch= self.cvae(pred_sketch_binary.float(), sketch_gt)
return pred_sketch_raw, pred_sketch_gsnn, pred_sketch, pred_mean, pred_log_var
class STAGE2(nn.Module):
def __init__(self, class_num, norm_layer, resnet_out=2048, feature=512, ThreeDinit=True,
bn_momentum=0.1, pretrained_model=None, eval=False, freeze_bn=False):
super(STAGE2, self).__init__()
self.business_layer = []
if eval:
self.downsample = nn.Sequential(
nn.Conv2d(resnet_out, feature, kernel_size=1, bias=False),
nn.BatchNorm2d(feature, momentum=bn_momentum),
nn.ReLU()
)
else:
self.downsample = nn.Sequential(
nn.Conv2d(resnet_out, feature, kernel_size=1, bias=False),
norm_layer(feature, momentum=bn_momentum),
nn.ReLU()
)
self.business_layer.append(self.downsample)
self.resnet_out = resnet_out
self.feature = feature
self.ThreeDinit = ThreeDinit
self.pooling = nn.AvgPool3d(kernel_size=3, padding=1, stride=1)
self.business_layer.append(self.pooling)
self.semantic_layer1 = nn.Sequential(
Bottleneck3D(feature, feature // 4, bn_momentum=bn_momentum, expansion=4, stride=2, downsample=
nn.Sequential(
nn.AvgPool3d(kernel_size=2, stride=2),
nn.Conv3d(feature, feature,
kernel_size=1, stride=1, bias=False),
norm_layer(feature, momentum=bn_momentum),
), norm_layer=norm_layer), # feature --> feature*2
Bottleneck3D(feature, feature // 4, bn_momentum=bn_momentum, norm_layer=norm_layer, dilation=[1, 1, 1]),
Bottleneck3D(feature, feature // 4, bn_momentum=bn_momentum, norm_layer=norm_layer, dilation=[2, 2, 2]),
Bottleneck3D(feature, feature // 4, bn_momentum=bn_momentum, norm_layer=norm_layer, dilation=[3, 3, 3]),
)
self.business_layer.append(self.semantic_layer1)
self.semantic_layer2 = nn.Sequential(
Bottleneck3D(feature, feature // 4, bn_momentum=bn_momentum, expansion=8, stride=2, downsample=
nn.Sequential(
nn.AvgPool3d(kernel_size=2, stride=2),
nn.Conv3d(feature, feature * 2,
kernel_size=1, stride=1, bias=False),
norm_layer(feature * 2, momentum=bn_momentum),
), norm_layer=norm_layer),
Bottleneck3D(feature * 2, feature // 2, bn_momentum=bn_momentum, norm_layer=norm_layer, dilation=[1, 1, 1]),
Bottleneck3D(feature * 2, feature // 2, bn_momentum=bn_momentum, norm_layer=norm_layer, dilation=[2, 2, 2]),
Bottleneck3D(feature * 2, feature // 2, bn_momentum=bn_momentum, norm_layer=norm_layer, dilation=[3, 3, 3]),
)
self.business_layer.append(self.semantic_layer2)
self.classify_semantic = nn.ModuleList([
nn.Sequential(
nn.ConvTranspose3d(feature * 2, feature, kernel_size=3, stride=2, padding=1, dilation=1,
output_padding=1),
norm_layer(feature, momentum=bn_momentum),
nn.ReLU(inplace=False),
),
nn.Sequential(
nn.ConvTranspose3d(feature, feature, kernel_size=3, stride=2, padding=1, dilation=1, output_padding=1),
norm_layer(feature, momentum=bn_momentum),
nn.ReLU(inplace=False),
),
nn.Sequential(
nn.Dropout3d(.1),
nn.Conv3d(feature, class_num, kernel_size=1, bias=True)
)]
)
self.business_layer.append(self.classify_semantic)
self.oper_sketch = nn.Sequential(
nn.Conv3d(2, 3, kernel_size=3, padding=1, bias=False),
norm_layer(3, momentum=bn_momentum),
nn.ReLU(),
nn.Conv3d(3, 64, kernel_size=3, padding=1, bias=False),
norm_layer(64, momentum=bn_momentum),
nn.ReLU(),
nn.Conv3d(64, feature, kernel_size=3, padding=1, bias=False),
norm_layer(feature, momentum=bn_momentum),
nn.ReLU(inplace=False),
)
self.oper_sketch_cvae = nn.Sequential(
nn.Conv3d(2, 3, kernel_size=3, padding=1, bias=False),
norm_layer(3, momentum=bn_momentum),
nn.ReLU(),
nn.Conv3d(3, 64, kernel_size=3, padding=1, bias=False),
norm_layer(64, momentum=bn_momentum),
nn.ReLU(),
nn.Conv3d(64, feature, kernel_size=3, padding=1, bias=False),
norm_layer(feature, momentum=bn_momentum),
nn.ReLU(inplace=False),
)
self.business_layer.append(self.oper_sketch)
self.business_layer.append(self.oper_sketch_cvae)
def forward(self, feature2d, depth_mapping_3d, pred_sketch_raw, pred_sketch_gsnn):
# reduce the channel of 2D feature map
if self.resnet_out != self.feature:
feature2d = self.downsample(feature2d)
feature2d = F.interpolate(feature2d, scale_factor=16, mode='bilinear', align_corners=True)
'''
project 2D feature to 3D space
'''
b, c, h, w = feature2d.shape
feature2d = feature2d.view(b, c, h * w).permute(0, 2, 1) # b x h*w x c
zerosVec = torch.zeros(b, 1, c).cuda() # for voxels that could not be projected from the depth map, we assign them zero vector
segVec = torch.cat((feature2d, zerosVec), 1)
segres = [torch.index_select(segVec[i], 0, depth_mapping_3d[i]) for i in range(b)]
segres = torch.stack(segres).permute(0, 2, 1).contiguous().view(b, c, 60, 36, 60) # B, (channel), 60, 36, 60
'''
init the 3D feature
'''
if self.ThreeDinit:
pool = self.pooling(segres)
zero = (segres == 0).float()
pool = pool * zero
segres = segres + pool
'''
extract 3D feature
'''
sketch_proi = self.oper_sketch(pred_sketch_raw)
sketch_proi_gsnn = self.oper_sketch_cvae(pred_sketch_gsnn)
seg_fea = segres + sketch_proi + sketch_proi_gsnn
semantic1 = self.semantic_layer1(seg_fea)
semantic2 = self.semantic_layer2(semantic1)
up_sem1 = self.classify_semantic[0](semantic2)
up_sem1 = up_sem1 + semantic1
up_sem2 = self.classify_semantic[1](up_sem1)
pred_semantic = self.classify_semantic[2](up_sem2)
return pred_semantic, None
'''
main network
'''
class Network(nn.Module):
def __init__(self, class_num, norm_layer, resnet_out=2048, feature=512, ThreeDinit=True,
bn_momentum=0.1, pretrained_model=None, eval=False, freeze_bn=False):
super(Network, self).__init__()
self.business_layer = []
if eval:
self.backbone = get_resnet50(num_classes=19, dilation=[1, 1, 1, 2], bn_momentum=config.bn_momentum,
is_fpn=False,
BatchNorm2d=nn.BatchNorm2d)
else:
self.backbone = get_resnet50(num_classes=19, dilation=[1, 1, 1, 2], bn_momentum=config.bn_momentum,
is_fpn=False,
BatchNorm2d=norm_layer)
self.dilate = 2
for m in self.backbone.layer4.children():
m.apply(partial(self._nostride_dilate, dilate=self.dilate))
self.dilate *= 2
self.stage1 = STAGE1(class_num, norm_layer, resnet_out=resnet_out, feature=feature, ThreeDinit=ThreeDinit,
bn_momentum=bn_momentum, pretrained_model=pretrained_model, eval=eval, freeze_bn=freeze_bn)
self.business_layer += self.stage1.business_layer
self.stage2 = STAGE2(class_num, norm_layer, resnet_out=resnet_out, feature=feature, ThreeDinit=ThreeDinit,
bn_momentum=bn_momentum, pretrained_model=pretrained_model, eval=eval, freeze_bn=freeze_bn)
self.business_layer += self.stage2.business_layer
def forward(self, rgb, depth_mapping_3d, tsdf, sketch_gt=None):
h, w = rgb.size(2), rgb.size(3)
feature2d = self.backbone(rgb)
pred_sketch_raw, pred_sketch_gsnn, pred_sketch, pred_mean, pred_log_var = self.stage1(tsdf, depth_mapping_3d, sketch_gt)
pred_semantic, _ = self.stage2(feature2d, depth_mapping_3d, pred_sketch_raw,
pred_sketch_gsnn)
if self.training:
return pred_semantic, _, pred_sketch_raw, pred_sketch_gsnn, pred_sketch, pred_mean, pred_log_var
return pred_semantic, _, pred_sketch_gsnn
# @staticmethod
def _nostride_dilate(self, m, dilate):
if isinstance(m, nn.Conv2d):
if m.stride == (2, 2):
m.stride = (1, 1)
if m.kernel_size == (3, 3):
m.dilation = (dilate, dilate)
m.padding = (dilate, dilate)
else:
if m.kernel_size == (3, 3):
m.dilation = (dilate, dilate)
m.padding = (dilate, dilate)
if __name__ == '__main__':
model = Network(class_num=12, norm_layer=nn.BatchNorm3d, feature=128, eval=True)
# print(model)
device = torch.device('cuda' if torch.cuda.is_available() else 'cpu')
model = model.to(device)
model.eval()
left = torch.rand(1, 3, 480, 640).cuda()
right = torch.rand(1, 3, 480, 640).cuda()
depth_mapping_3d = torch.from_numpy(np.ones((1, 129600)).astype(np.int64)).long().cuda()
tsdf = torch.rand(1, 1, 60, 36, 60).cuda()
out = model(left, depth_mapping_3d, tsdf, None)
| 44.308429
| 135
| 0.598124
|
2e76148d852e70bf1e65e7dd50567c95687a94fd
| 16,856
|
py
|
Python
|
generation/fairseq/data/xdae_denoising_dataset.py
|
koukoulala/Unicoder
|
15fa30d74b700c440565079d03b34beb5b6d2bb9
|
[
"MIT"
] | 67
|
2020-06-03T12:17:08.000Z
|
2022-03-24T08:49:52.000Z
|
generation/fairseq/data/xdae_denoising_dataset.py
|
koukoulala/Unicoder
|
15fa30d74b700c440565079d03b34beb5b6d2bb9
|
[
"MIT"
] | 5
|
2020-06-22T13:22:47.000Z
|
2021-05-31T02:45:25.000Z
|
generation/fairseq/data/xdae_denoising_dataset.py
|
koukoulala/Unicoder
|
15fa30d74b700c440565079d03b34beb5b6d2bb9
|
[
"MIT"
] | 14
|
2020-06-03T12:09:12.000Z
|
2021-11-10T08:23:24.000Z
|
# Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
import numpy as np
import torch
import math
from . import data_utils, FairseqDataset
def collate(
samples,
pad_idx,
bos_idx,
vocab,
left_pad_source=False,
left_pad_target=False,
input_feeding=True,
):
assert input_feeding
if len(samples) == 0:
return {}
for sp in samples:
sp['source'] = sp['source'][1:]
def merge(key, left_pad, move_eos_to_beginning=False):
return data_utils.collate_tokens(
[s[key] for s in samples],
pad_idx, bos_idx, left_pad, move_eos_to_beginning,
)
id = torch.LongTensor([s['id'] for s in samples])
src_tokens = merge('source', left_pad=left_pad_source)
# sort by descending source length
src_lengths = torch.LongTensor([s['source'].numel() for s in samples])
src_lengths, sort_order = src_lengths.sort(descending=True)
id = id.index_select(0, sort_order)
src_tokens = src_tokens.index_select(0, sort_order)
prev_output_tokens = None
target = None
if samples[0].get('target', None) is not None:
target = merge('target', left_pad=left_pad_target)
target = target.index_select(0, sort_order)
ntokens = sum(len(s['target']) for s in samples)
if input_feeding:
# we create a shifted version of targets for feeding the
# previous output token(s) into the next decoder step
prev_output_tokens = merge(
'target',
left_pad=left_pad_target,
move_eos_to_beginning=False,
)
prev_output_tokens = prev_output_tokens.index_select(0, sort_order)
else:
ntokens = sum(len(s['source']) for s in samples)
batch = {
'id': id,
'ntokens': ntokens,
'net_input': {
'src_tokens': src_tokens,
'src_lengths': src_lengths,
},
'target': target[:,1:],
'nsentences': samples[0]['source'].size(0),
}
if prev_output_tokens is not None:
batch['net_input']['prev_output_tokens'] = prev_output_tokens[:,:-1]
return batch
class XDAEDenoisingDataset(FairseqDataset):
"""
A wrapper around TokenBlockDataset for BART dataset.
Args:
dataset (TokenBlockDataset): dataset to wrap
sizes (List[int]): sentence lengths
vocab (~fairseq.data.Dictionary): vocabulary
mask_idx (int): dictionary index used for masked token
mask_whole_words: only mask whole words. This should be a byte mask
over vocab indices, indicating whether it is the beginning of a
word. We will extend any mask to encompass the whole word.
shuffle (bool, optional): shuffle the elements before batching.
Default: ``True``
seed: Seed for random number generator for reproducibility.
args: argparse arguments.
"""
def __init__(
self,
dataset,
sizes,
vocab,
mask_idx,
mask_whole_words,
shuffle,
seed,
args,
eos=None,
bos=None,
no_prepend_bos=False
):
self.dataset = dataset
self.sizes = sizes
self.vocab = vocab
self.shuffle = shuffle
self.seed = seed
self.mask_idx = mask_idx
self.mask_whole_word = mask_whole_words
self.mask_ratio = args.mask
self.random_ratio = args.mask_random
self.insert_ratio = args.insert
self.rotate_ratio = args.rotate
self.word_shuffle = args.word_shuffle
self.word_dropout = args.word_dropout
self.word_blank = args.word_blank
self.permute_sentence_ratio = args.permute_sentences
self.eos = (eos if eos is not None else vocab.eos())
self.bos_idx = bos if bos is not None else vocab.eos()
self.no_prepend_bos = no_prepend_bos
if args.bpe != 'gpt2':
self.full_stop_index = self.vocab.eos()
else:
assert args.bpe == 'gpt2'
self.full_stop_index = self.vocab.index('13')
self.replace_length = args.replace_length
if not self.replace_length in [-1, 0, 1]:
raise (f'invalid arg: replace_length={self.replace_length}')
if not args.mask_length in ['subword', 'word', 'span-poisson']:
raise (f'invalid arg: mask-length={args.mask_length}')
if args.mask_length == 'subword' and not args.replace_length in [0, 1]:
raise (f'if using subwords, use replace-length=1 or 0')
self.mask_span_distribution = None
if args.mask_length == 'span-poisson':
_lambda = args.poisson_lambda
lambda_to_the_k = 1
e_to_the_minus_lambda = math.exp(-_lambda)
k_factorial = 1
ps = []
for k in range(0, 128):
ps.append(e_to_the_minus_lambda * lambda_to_the_k / k_factorial)
lambda_to_the_k *= _lambda
k_factorial *= (k + 1)
if ps[-1] < 0.0000001:
break
ps = torch.FloatTensor(ps)
self.mask_span_distribution = torch.distributions.Categorical(ps)
self.epoch = 0
def set_epoch(self, epoch, **unused):
self.epoch = epoch
def __getitem__(self, index):
with data_utils.numpy_seed(self.seed, self.epoch, index):
tokens = self.dataset[index]
assert tokens[-1] == self.eos
source, target = tokens, tokens.clone()
if self.permute_sentence_ratio > 0.0:
source = self.permute_sentences(source, self.permute_sentence_ratio)
if self.mask_ratio > 0:
source = self.add_whole_word_mask(source, self.mask_ratio)
if self.word_shuffle > 0:
source = self.add_word_shuffle(source)
if self.word_dropout > 0:
source = self.add_word_dropout(source)
if self.word_blank > 0:
source = self.add_word_blank(source)
if self.insert_ratio > 0:
source = self.add_insertion_noise(source, self.insert_ratio)
if self.rotate_ratio > 0.0 and np.random.random() < self.rotate_ratio:
source = self.add_rolling_noise(source)
assert (source >= 0).all()
assert (source[1:-1] >= 1).all()
assert (source <= len(self.vocab)).all()
assert self.no_prepend_bos or source[0] == self.vocab.bos()
assert source[-1] == self.eos
return {
'id': index,
'source': source,
'target': target,
}
def __len__(self):
return len(self.dataset)
def permute_sentences(self, source, p=1.0):
full_stops = (source == self.full_stop_index)
# Pretend it ends with a full stop so last span is a sentence
full_stops[-2] = 1
# Tokens that are full stops, where the previous token is not
sentence_ends = (full_stops[1:] * ~full_stops[:-1]).nonzero() + 2
result = source.clone()
num_sentences = sentence_ends.size(0)
num_to_permute = math.ceil((num_sentences * 2 * p) / 2.0)
substitutions = torch.randperm(num_sentences)[:num_to_permute]
ordering = torch.arange(0, num_sentences)
ordering[substitutions] = substitutions[torch.randperm(num_to_permute)]
# Ignore <bos> at start
index = 1
for i in ordering:
sentence = source[(sentence_ends[i - 1] if i > 0 else 1):sentence_ends[i]]
result[index:index + sentence.size(0)] = sentence
index += sentence.size(0)
return result
def word_starts(self, source):
if self.mask_whole_word is not None:
is_word_start = self.mask_whole_word.gather(0, source)
else:
is_word_start = torch.ones(source.size())
is_word_start[0] = 0
is_word_start[-1] = 0
return is_word_start
def add_word_shuffle(self, source):
"""
Randomly shuffle input words.
"""
# define noise word scores
assert self.word_shuffle > 1
L = source.size(0)
noise = np.random.uniform(0, self.word_shuffle, size=(L-2,))
noise[0] = -1 # do not move start sentence symbol
x2 = source.clone()
scores = np.arange(L-2) + noise
permutation = scores.argsort()
# shuffle words
x2[:L-2].copy_(x2[:L-2][torch.from_numpy(permutation)])
return x2
def add_word_dropout(self, source):
"""
Randomly drop input words.
"""
assert 0 < self.word_dropout < 1
# define words to drop
L = source.size(0)
keep = np.random.rand(L-2) >= self.word_dropout
keep[0] = 1 # do not drop the start sentence symbol
words = source[:L-2].tolist()
# randomly drop words from the input
new_s = [w for j, w in enumerate(words) if keep[j]]
# we need to have at least one word in the sentence (more than the start / end sentence symbols)
if len(new_s) == 1:
new_s.append(words[np.random.randint(1, len(words))])
new_s += source[-2:].tolist()
assert len(new_s) >= 3
# re-construct input
x2 = torch.LongTensor(new_s)
return x2
def add_word_blank(self, source):
"""
Randomly blank input words.
"""
assert 0 < self.word_blank < 1
# define words to blank
L = source.size(0)
keep = np.random.rand(L-2) >= self.word_blank
keep[0] = 1 # do not blank the start sentence symbol
words = source[:L - 2].tolist()
# randomly blank words from the input
new_s = [w if keep[j] else self.mask_idx for j, w in enumerate(words)]
new_s += source[-2:].tolist()
assert len(new_s) == L
# re-construct input
x2 = torch.LongTensor(new_s)
return x2
def add_whole_word_mask(self, source, p):
is_word_start = self.word_starts(source)
num_to_mask = int(math.ceil(is_word_start.float().sum() * p))
num_inserts = 0
if num_to_mask == 0:
return source
if self.mask_span_distribution is not None:
lengths = self.mask_span_distribution.sample(sample_shape=(num_to_mask,))
# Make sure we have enough to mask
cum_length = torch.cumsum(lengths, 0)
while cum_length[-1] < num_to_mask:
lengths = torch.cat([lengths, self.mask_span_distribution.sample(sample_shape=(num_to_mask,))], dim=0)
cum_length = torch.cumsum(lengths, 0)
# Trim to masking budget
i = 0
while cum_length[i] < num_to_mask:
i += 1
lengths[i] = num_to_mask - (0 if i == 0 else cum_length[i - 1])
num_to_mask = i + 1
lengths = lengths[:num_to_mask]
# Handle 0-length mask (inserts) separately
lengths = lengths[lengths > 0]
num_inserts = num_to_mask - lengths.size(0)
num_to_mask -= num_inserts
if num_to_mask == 0:
return self.add_insertion_noise(source, num_inserts / source.size(0))
assert (lengths > 0).all()
else:
lengths = torch.ones((num_to_mask,)).long()
assert is_word_start[-1] == 0
word_starts = is_word_start.nonzero()
indices = word_starts[torch.randperm(word_starts.size(0))[:num_to_mask]].squeeze(1)
mask_random = torch.FloatTensor(num_to_mask).uniform_() < self.random_ratio
source_length = source.size(0)
assert source_length - 1 not in indices
to_keep = torch.ones(source_length, dtype=torch.bool)
is_word_start[-1] = 255 # acts as a long length, so spans don't go over the end of doc
if self.replace_length == 0:
to_keep[indices] = 0
else:
# keep index, but replace it with [MASK]
source[indices] = self.mask_idx
source[indices[mask_random]] = torch.randint(1, len(self.vocab), size=(mask_random.sum(),))
if self.mask_span_distribution is not None:
assert len(lengths.size()) == 1
assert lengths.size() == indices.size()
lengths -= 1
while indices.size(0) > 0:
assert lengths.size() == indices.size()
lengths -= is_word_start[indices + 1].long()
uncompleted = lengths >= 0
indices = indices[uncompleted] + 1
mask_random = mask_random[uncompleted]
lengths = lengths[uncompleted]
if self.replace_length != -1:
# delete token
to_keep[indices] = 0
else:
# keep index, but replace it with [MASK]
source[indices] = self.mask_idx
source[indices[mask_random]] = torch.randint(1, len(self.vocab), size=(mask_random.sum(),))
else:
# A bit faster when all lengths are 1
while indices.size(0) > 0:
uncompleted = is_word_start[indices + 1] == 0
indices = indices[uncompleted] + 1
mask_random = mask_random[uncompleted]
if self.replace_length != -1:
# delete token
to_keep[indices] = 0
else:
# keep index, but replace it with [MASK]
source[indices] = self.mask_idx
source[indices[mask_random]] = torch.randint(1, len(self.vocab), size=(mask_random.sum(),))
assert source_length - 1 not in indices
source = source[to_keep]
if num_inserts > 0:
source = self.add_insertion_noise(source, num_inserts / source.size(0))
return source
def add_permuted_noise(self, tokens, p):
num_words = len(tokens)
num_to_permute = math.ceil(((num_words * 2) * p) / 2.0)
substitutions = torch.randperm(num_words - 2)[:num_to_permute] + 1
tokens[substitutions] = tokens[substitutions[torch.randperm(num_to_permute)]]
return tokens
def add_rolling_noise(self, tokens):
offset = np.random.randint(1, max(1, tokens.size(-1) - 1) + 1)
tokens = torch.cat(
(tokens[0:1], tokens[offset:-1], tokens[1:offset], tokens[-1:]),
dim=0,
)
return tokens
def add_insertion_noise(self, tokens, p):
if p == 0.0:
return tokens
num_tokens = len(tokens)
n = int(math.ceil(num_tokens * p))
noise_indices = torch.randperm(num_tokens + n - 2)[:n] + 1
noise_mask = torch.zeros(size=(num_tokens + n,), dtype=torch.bool)
noise_mask[noise_indices] = 1
result = torch.LongTensor(n + len(tokens)).fill_(-1)
num_random = int(math.ceil(n * self.random_ratio))
result[noise_indices[num_random:]] = self.mask_idx
result[noise_indices[:num_random]] = torch.randint(low=1, high=len(self.vocab), size=(num_random,))
result[~noise_mask] = tokens
assert (result >= 0).all()
return result
def collater(self, samples):
"""Merge a list of samples to form a mini-batch.
Args:
samples (List[dict]): samples to collate
Returns:
dict: a mini-batch of data
"""
return collate(samples, self.vocab.pad(), self.bos_idx, self.vocab)
def num_tokens(self, index):
"""Return the number of tokens in a sample. This value is used to
enforce ``--max-tokens`` during batching."""
return self.sizes[index]
def size(self, index):
"""Return an example's size as a float or tuple. This value is used when
filtering a dataset with ``--max-positions``."""
return self.sizes[index]
def ordered_indices(self):
"""Return an ordered list of indices. Batches will be constructed based
on this order."""
if self.shuffle:
indices = np.random.permutation(len(self))
else:
indices = np.arange(len(self))
return indices[np.argsort(self.sizes[indices], kind='mergesort')]
def prefetch(self, indices):
self.src.prefetch(indices)
self.tgt.prefetch(indices)
@property
def supports_prefetch(self):
return (
hasattr(self.src, 'supports_prefetch')
and self.src.supports_prefetch
and hasattr(self.tgt, 'supports_prefetch')
and self.tgt.supports_prefetch
)
| 35.711864
| 118
| 0.586082
|
23b3c2d419ba78374dcdb0edaab7bcd93f1569d4
| 157,760
|
py
|
Python
|
release/stubs/Autodesk/Revit/DB/Architecture.py
|
htlcnn/ironpython-stubs
|
780d829e2104b2789d5f4d6f32b0ec9f2930ca03
|
[
"MIT"
] | 182
|
2017-06-27T02:26:15.000Z
|
2022-03-30T18:53:43.000Z
|
release/stubs/Autodesk/Revit/DB/Architecture.py
|
htlcnn/ironpython-stubs
|
780d829e2104b2789d5f4d6f32b0ec9f2930ca03
|
[
"MIT"
] | 28
|
2017-06-27T13:38:23.000Z
|
2022-03-15T11:19:44.000Z
|
release/stubs/Autodesk/Revit/DB/Architecture.py
|
htlcnn/ironpython-stubs
|
780d829e2104b2789d5f4d6f32b0ec9f2930ca03
|
[
"MIT"
] | 67
|
2017-06-28T09:43:59.000Z
|
2022-03-20T21:17:10.000Z
|
# encoding: utf-8
# module Autodesk.Revit.DB.Architecture calls itself Architecture
# from RevitAPI, Version=17.0.0.0, Culture=neutral, PublicKeyToken=null
# by generator 1.145
# no doc
# no imports
# no functions
# classes
class BuildingPad(CeilingAndFloor, IDisposable):
""" Represents a BuildingPad element. """
@staticmethod
def Create(document, buildingPadTypeId, levelId, curveLoops):
""" Create(document: Document, buildingPadTypeId: ElementId, levelId: ElementId, curveLoops: IList[CurveLoop]) -> BuildingPad """
pass
def Dispose(self):
""" Dispose(self: CeilingAndFloor, A_0: bool) """
pass
def GetBoundary(self):
"""
GetBoundary(self: BuildingPad) -> IList[CurveLoop]
Gets the boundary of current BuildingPad element.
Returns: The curve loops that represent the boundary of the BuildingPad.
"""
pass
def getBoundingBox(self, *args): #cannot find CLR method
""" getBoundingBox(self: Element, view: View) -> BoundingBoxXYZ """
pass
def ReleaseUnmanagedResources(self, *args): #cannot find CLR method
""" ReleaseUnmanagedResources(self: Element, disposing: bool) """
pass
def SetBoundary(self, curveLoops):
""" SetBoundary(self: BuildingPad, curveLoops: IList[CurveLoop]) """
pass
def setElementType(self, *args): #cannot find CLR method
""" setElementType(self: Element, type: ElementType, incompatibleExceptionMessage: str) """
pass
def __enter__(self, *args): #cannot find CLR method
""" __enter__(self: IDisposable) -> object """
pass
def __exit__(self, *args): #cannot find CLR method
""" __exit__(self: IDisposable, exc_type: object, exc_value: object, exc_back: object) """
pass
def __init__(self, *args): #cannot find CLR method
""" x.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signature """
pass
AssociatedTopographySurfaceId = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""The element id of a topography surface created by the introduction of this building pad.
Get: AssociatedTopographySurfaceId(self: BuildingPad) -> ElementId
"""
HostId = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""The element id of the topography surface hosting this BuidlingPad.
Get: HostId(self: BuildingPad) -> ElementId
"""
class ContinuousRail(Element, IDisposable):
"""
Represents a continuous rail element in Autodesk Revit.
Type Data
Misc Data
Path and Profile Data
"""
def Dispose(self):
""" Dispose(self: Element, A_0: bool) """
pass
def getBoundingBox(self, *args): #cannot find CLR method
""" getBoundingBox(self: Element, view: View) -> BoundingBoxXYZ """
pass
def GetEndExtensionPath(self):
"""
GetEndExtensionPath(self: ContinuousRail) -> IList[Curve]
Retrieves the start extension path.
Returns: The start extension path of the rail.
"""
pass
def GetPath(self):
"""
GetPath(self: ContinuousRail) -> IList[Curve]
Retrieves the rail path.
Returns: The path of the rail.
"""
pass
def GetStartExtensionPath(self):
"""
GetStartExtensionPath(self: ContinuousRail) -> IList[Curve]
Retrieves the start extension path.
Returns: The start extension path of the rail.
"""
pass
def GetSupports(self):
"""
GetSupports(self: ContinuousRail) -> IList[ElementId]
Returns all the railing supports attached to the rail.
"""
pass
def ReleaseUnmanagedResources(self, *args): #cannot find CLR method
""" ReleaseUnmanagedResources(self: Element, disposing: bool) """
pass
def setElementType(self, *args): #cannot find CLR method
""" setElementType(self: Element, type: ElementType, incompatibleExceptionMessage: str) """
pass
def __enter__(self, *args): #cannot find CLR method
""" __enter__(self: IDisposable) -> object """
pass
def __exit__(self, *args): #cannot find CLR method
""" __exit__(self: IDisposable, exc_type: object, exc_value: object, exc_back: object) """
pass
def __init__(self, *args): #cannot find CLR method
""" x.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signature """
pass
HostRailingId = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""The id of the host Railing that contains this rail.
Get: HostRailingId(self: ContinuousRail) -> ElementId
"""
Length = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""The length of the rail.
Get: Length(self: ContinuousRail) -> float
"""
class ContinuousRailType(ElementType, IDisposable):
""" A type element containing the properties of a continuous rail. """
def Dispose(self):
""" Dispose(self: Element, A_0: bool) """
pass
def getBoundingBox(self, *args): #cannot find CLR method
""" getBoundingBox(self: Element, view: View) -> BoundingBoxXYZ """
pass
def ReleaseUnmanagedResources(self, *args): #cannot find CLR method
""" ReleaseUnmanagedResources(self: Element, disposing: bool) """
pass
def setElementType(self, *args): #cannot find CLR method
""" setElementType(self: Element, type: ElementType, incompatibleExceptionMessage: str) """
pass
def __enter__(self, *args): #cannot find CLR method
""" __enter__(self: IDisposable) -> object """
pass
def __exit__(self, *args): #cannot find CLR method
""" __exit__(self: IDisposable, exc_type: object, exc_value: object, exc_back: object) """
pass
def __init__(self, *args): #cannot find CLR method
""" x.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signature """
pass
DefaultJoinOption = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""The default join option between two rails.
Get: DefaultJoinOption(self: ContinuousRailType) -> RailTypeDefaultJoinOption
Set: DefaultJoinOption(self: ContinuousRailType) = value
"""
EndOrTopExtensionLength = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""The extension length of the rail termination at the end or top.
Get: EndOrTopExtensionLength(self: ContinuousRailType) -> float
Set: EndOrTopExtensionLength(self: ContinuousRailType) = value
"""
EndOrTopExtensionStyle = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""The extension style of the rail termination at the end or top.
Get: EndOrTopExtensionStyle(self: ContinuousRailType) -> RailExtensionStyle
Set: EndOrTopExtensionStyle(self: ContinuousRailType) = value
"""
EndOrTopTermination = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""The termination of the rail at the end or top.
Get: EndOrTopTermination(self: ContinuousRailType) -> ElementId
Set: EndOrTopTermination(self: ContinuousRailType) = value
"""
FilletRadius = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""The fillet radius of the rail join.
Get: FilletRadius(self: ContinuousRailType) -> float
Set: FilletRadius(self: ContinuousRailType) = value
"""
HandClearance = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""The horizontal distance between the inner boundary of the rail and the path.
Get: HandClearance(self: ContinuousRailType) -> float
Set: HandClearance(self: ContinuousRailType) = value
"""
ProfileId = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""The id of the profile of the rail
Get: ProfileId(self: ContinuousRailType) -> ElementId
Set: ProfileId(self: ContinuousRailType) = value
"""
Projection = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""The horizontal distance between the outer boundary of the rail and the path.
Get: Projection(self: ContinuousRailType) -> float
"""
StartOrBottomExtensionLength = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""The extension length of the rail termination at the beginning or bottom.
Get: StartOrBottomExtensionLength(self: ContinuousRailType) -> float
Set: StartOrBottomExtensionLength(self: ContinuousRailType) = value
"""
StartOrBottomExtensionStyle = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""The extension style of the rail termination at the beginning or bottom.
Get: StartOrBottomExtensionStyle(self: ContinuousRailType) -> RailExtensionStyle
Set: StartOrBottomExtensionStyle(self: ContinuousRailType) = value
"""
StartOrBottomTermination = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""The termination of the rail at the beginning or bottom.
Get: StartOrBottomTermination(self: ContinuousRailType) -> ElementId
Set: StartOrBottomTermination(self: ContinuousRailType) = value
"""
Transition = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""The transition option of the rail.
Get: Transition(self: ContinuousRailType) -> RailTransitionOption
Set: Transition(self: ContinuousRailType) = value
"""
class CutLineType(Enum, IComparable, IFormattable, IConvertible):
"""
The available line types for a stairs cut line.
enum CutLineType, values: DoubleLine (1), SingleLine (0)
"""
def __eq__(self, *args): #cannot find CLR method
""" x.__eq__(y) <==> x==yx.__eq__(y) <==> x==yx.__eq__(y) <==> x==y """
pass
def __format__(self, *args): #cannot find CLR method
""" __format__(formattable: IFormattable, format: str) -> str """
pass
def __ge__(self, *args): #cannot find CLR method
pass
def __gt__(self, *args): #cannot find CLR method
pass
def __init__(self, *args): #cannot find CLR method
""" x.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signature """
pass
def __le__(self, *args): #cannot find CLR method
pass
def __lt__(self, *args): #cannot find CLR method
pass
def __ne__(self, *args): #cannot find CLR method
pass
def __reduce_ex__(self, *args): #cannot find CLR method
pass
def __str__(self, *args): #cannot find CLR method
pass
DoubleLine = None
SingleLine = None
value__ = None
class CutMarkSymbol(Enum, IComparable, IFormattable, IConvertible):
"""
The available shapes for the cut mark symbol.
enum CutMarkSymbol, values: Curve (2), None (0), Zigzag (1)
"""
def __eq__(self, *args): #cannot find CLR method
""" x.__eq__(y) <==> x==yx.__eq__(y) <==> x==yx.__eq__(y) <==> x==y """
pass
def __format__(self, *args): #cannot find CLR method
""" __format__(formattable: IFormattable, format: str) -> str """
pass
def __ge__(self, *args): #cannot find CLR method
pass
def __gt__(self, *args): #cannot find CLR method
pass
def __init__(self, *args): #cannot find CLR method
""" x.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signature """
pass
def __le__(self, *args): #cannot find CLR method
pass
def __lt__(self, *args): #cannot find CLR method
pass
def __ne__(self, *args): #cannot find CLR method
pass
def __reduce_ex__(self, *args): #cannot find CLR method
pass
def __str__(self, *args): #cannot find CLR method
pass
Curve = None
None = None
value__ = None
Zigzag = None
class CutMarkType(ElementType, IDisposable):
""" An object represents the cut mark type in Autodesk Revit. """
def Dispose(self):
""" Dispose(self: Element, A_0: bool) """
pass
def getBoundingBox(self, *args): #cannot find CLR method
""" getBoundingBox(self: Element, view: View) -> BoundingBoxXYZ """
pass
def ReleaseUnmanagedResources(self, *args): #cannot find CLR method
""" ReleaseUnmanagedResources(self: Element, disposing: bool) """
pass
def setElementType(self, *args): #cannot find CLR method
""" setElementType(self: Element, type: ElementType, incompatibleExceptionMessage: str) """
pass
def __enter__(self, *args): #cannot find CLR method
""" __enter__(self: IDisposable) -> object """
pass
def __exit__(self, *args): #cannot find CLR method
""" __exit__(self: IDisposable, exc_type: object, exc_value: object, exc_back: object) """
pass
def __init__(self, *args): #cannot find CLR method
""" x.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signature """
pass
CutLineAngle = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""The rotation angle of the cut mark.
Get: CutLineAngle(self: CutMarkType) -> float
Set: CutLineAngle(self: CutMarkType) = value
"""
CutLineDistance = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""The distance between 2 cut lines.
Get: CutLineDistance(self: CutMarkType) -> float
Set: CutLineDistance(self: CutMarkType) = value
"""
CutLineExtension = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""The extension distance to the boundary.
Get: CutLineExtension(self: CutMarkType) -> float
Set: CutLineExtension(self: CutMarkType) = value
"""
CutLineType = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""The cut line type of the cut mark.
Get: CutLineType(self: CutMarkType) -> CutLineType
Set: CutLineType(self: CutMarkType) = value
"""
CutMarkSymbol = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""The symbol type of the cut mark.
Get: CutMarkSymbol(self: CutMarkType) -> CutMarkSymbol
Set: CutMarkSymbol(self: CutMarkType) = value
"""
CutMarkSymbolSize = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""The size of the cut mark symbol.
Get: CutMarkSymbolSize(self: CutMarkType) -> float
Set: CutMarkSymbolSize(self: CutMarkType) = value
"""
class Fascia(HostedSweep, IDisposable):
""" An object that represents a fascia within the Autodesk Revit project. """
def AddSegment(self, targetRef):
"""
AddSegment(self: Fascia, targetRef: Reference)
Add segments to the fascia.
targetRef: Segment's reference on which want to be added.
"""
pass
def Dispose(self):
""" Dispose(self: Element, A_0: bool) """
pass
def getBoundingBox(self, *args): #cannot find CLR method
""" getBoundingBox(self: Element, view: View) -> BoundingBoxXYZ """
pass
def ReleaseUnmanagedResources(self, *args): #cannot find CLR method
""" ReleaseUnmanagedResources(self: Element, disposing: bool) """
pass
def setElementType(self, *args): #cannot find CLR method
""" setElementType(self: Element, type: ElementType, incompatibleExceptionMessage: str) """
pass
def __enter__(self, *args): #cannot find CLR method
""" __enter__(self: IDisposable) -> object """
pass
def __exit__(self, *args): #cannot find CLR method
""" __exit__(self: IDisposable, exc_type: object, exc_value: object, exc_back: object) """
pass
def __init__(self, *args): #cannot find CLR method
""" x.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signature """
pass
FasciaType = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Retrieves/set an object that represents the type of the Fascia.
Get: FasciaType(self: Fascia) -> FasciaType
Set: FasciaType(self: Fascia) = value
"""
class FasciaType(HostedSweepType, IDisposable):
"""
An object that represents the fascia type
in Autodesk Revit.
"""
def Dispose(self):
""" Dispose(self: Element, A_0: bool) """
pass
def getBoundingBox(self, *args): #cannot find CLR method
""" getBoundingBox(self: Element, view: View) -> BoundingBoxXYZ """
pass
def ReleaseUnmanagedResources(self, *args): #cannot find CLR method
""" ReleaseUnmanagedResources(self: Element, disposing: bool) """
pass
def setElementType(self, *args): #cannot find CLR method
""" setElementType(self: Element, type: ElementType, incompatibleExceptionMessage: str) """
pass
def __enter__(self, *args): #cannot find CLR method
""" __enter__(self: IDisposable) -> object """
pass
def __exit__(self, *args): #cannot find CLR method
""" __exit__(self: IDisposable, exc_type: object, exc_value: object, exc_back: object) """
pass
def __init__(self, *args): #cannot find CLR method
""" x.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signature """
pass
class Gutter(HostedSweep, IDisposable):
""" An object that represents a gutter within the Autodesk Revit project. """
def AddSegment(self, targetRef):
"""
AddSegment(self: Gutter, targetRef: Reference)
Add segments to the gutter.
targetRef: Segment's reference on which want to be added.
"""
pass
def Dispose(self):
""" Dispose(self: Element, A_0: bool) """
pass
def getBoundingBox(self, *args): #cannot find CLR method
""" getBoundingBox(self: Element, view: View) -> BoundingBoxXYZ """
pass
def ReleaseUnmanagedResources(self, *args): #cannot find CLR method
""" ReleaseUnmanagedResources(self: Element, disposing: bool) """
pass
def setElementType(self, *args): #cannot find CLR method
""" setElementType(self: Element, type: ElementType, incompatibleExceptionMessage: str) """
pass
def __enter__(self, *args): #cannot find CLR method
""" __enter__(self: IDisposable) -> object """
pass
def __exit__(self, *args): #cannot find CLR method
""" __exit__(self: IDisposable, exc_type: object, exc_value: object, exc_back: object) """
pass
def __init__(self, *args): #cannot find CLR method
""" x.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signature """
pass
GutterType = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Retrieves/set an object that represents the type of the Gutter.
Get: GutterType(self: Gutter) -> GutterType
Set: GutterType(self: Gutter) = value
"""
class GutterType(HostedSweepType, IDisposable):
"""
An object that represents the gutter type
in Autodesk Revit.
"""
def Dispose(self):
""" Dispose(self: Element, A_0: bool) """
pass
def getBoundingBox(self, *args): #cannot find CLR method
""" getBoundingBox(self: Element, view: View) -> BoundingBoxXYZ """
pass
def ReleaseUnmanagedResources(self, *args): #cannot find CLR method
""" ReleaseUnmanagedResources(self: Element, disposing: bool) """
pass
def setElementType(self, *args): #cannot find CLR method
""" setElementType(self: Element, type: ElementType, incompatibleExceptionMessage: str) """
pass
def __enter__(self, *args): #cannot find CLR method
""" __enter__(self: IDisposable) -> object """
pass
def __exit__(self, *args): #cannot find CLR method
""" __exit__(self: IDisposable, exc_type: object, exc_value: object, exc_back: object) """
pass
def __init__(self, *args): #cannot find CLR method
""" x.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signature """
pass
class HandRail(ContinuousRail, IDisposable):
""" Represents a hand rail element in Autodesk Revit. """
def Dispose(self):
""" Dispose(self: Element, A_0: bool) """
pass
def getBoundingBox(self, *args): #cannot find CLR method
""" getBoundingBox(self: Element, view: View) -> BoundingBoxXYZ """
pass
def ReleaseUnmanagedResources(self, *args): #cannot find CLR method
""" ReleaseUnmanagedResources(self: Element, disposing: bool) """
pass
def setElementType(self, *args): #cannot find CLR method
""" setElementType(self: Element, type: ElementType, incompatibleExceptionMessage: str) """
pass
def __enter__(self, *args): #cannot find CLR method
""" __enter__(self: IDisposable) -> object """
pass
def __exit__(self, *args): #cannot find CLR method
""" __exit__(self: IDisposable, exc_type: object, exc_value: object, exc_back: object) """
pass
def __init__(self, *args): #cannot find CLR method
""" x.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signature """
pass
class HandRailPosition(Enum, IComparable, IFormattable, IConvertible):
"""
The position of the hand rail.
enum HandRailPosition, values: Left (1), LeftAndRight (3), None (0), Right (2)
"""
def __eq__(self, *args): #cannot find CLR method
""" x.__eq__(y) <==> x==yx.__eq__(y) <==> x==yx.__eq__(y) <==> x==y """
pass
def __format__(self, *args): #cannot find CLR method
""" __format__(formattable: IFormattable, format: str) -> str """
pass
def __ge__(self, *args): #cannot find CLR method
pass
def __gt__(self, *args): #cannot find CLR method
pass
def __init__(self, *args): #cannot find CLR method
""" x.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signature """
pass
def __le__(self, *args): #cannot find CLR method
pass
def __lt__(self, *args): #cannot find CLR method
pass
def __ne__(self, *args): #cannot find CLR method
pass
def __reduce_ex__(self, *args): #cannot find CLR method
pass
def __str__(self, *args): #cannot find CLR method
pass
Left = None
LeftAndRight = None
None = None
Right = None
value__ = None
class HandRailType(ContinuousRailType, IDisposable):
""" A rail type object that is used in the generation of hand rail. """
def Dispose(self):
""" Dispose(self: Element, A_0: bool) """
pass
def getBoundingBox(self, *args): #cannot find CLR method
""" getBoundingBox(self: Element, view: View) -> BoundingBoxXYZ """
pass
def ReleaseUnmanagedResources(self, *args): #cannot find CLR method
""" ReleaseUnmanagedResources(self: Element, disposing: bool) """
pass
def setElementType(self, *args): #cannot find CLR method
""" setElementType(self: Element, type: ElementType, incompatibleExceptionMessage: str) """
pass
def __enter__(self, *args): #cannot find CLR method
""" __enter__(self: IDisposable) -> object """
pass
def __exit__(self, *args): #cannot find CLR method
""" __exit__(self: IDisposable, exc_type: object, exc_value: object, exc_back: object) """
pass
def __init__(self, *args): #cannot find CLR method
""" x.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signature """
pass
Height = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""The height of the handrail.
Get: Height(self: HandRailType) -> float
Set: Height(self: HandRailType) = value
"""
SupportJustification = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""The support justification method of the handrail.
Get: SupportJustification(self: HandRailType) -> RailSupportJustification
Set: SupportJustification(self: HandRailType) = value
"""
SupportLayout = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""The support layout method of the handrail.
Get: SupportLayout(self: HandRailType) -> RailSupportsLayout
Set: SupportLayout(self: HandRailType) = value
"""
SupportNumber = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""The number of supports of the handrail.
Get: SupportNumber(self: HandRailType) -> int
Set: SupportNumber(self: HandRailType) = value
"""
SupportSpacing = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""The support spacing of the handrail.
Get: SupportSpacing(self: HandRailType) -> float
Set: SupportSpacing(self: HandRailType) = value
"""
SupportTypeId = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""The support type of the handrail.
Get: SupportTypeId(self: HandRailType) -> ElementId
Set: SupportTypeId(self: HandRailType) = value
"""
class RailAngledJoinOption(Enum, IComparable, IFormattable, IConvertible):
"""
The angled joins of the rails.
enum RailAngledJoinOption, values: AddVerticalOrHorizontalSegments (0), NoConnector (1)
"""
def __eq__(self, *args): #cannot find CLR method
""" x.__eq__(y) <==> x==yx.__eq__(y) <==> x==yx.__eq__(y) <==> x==y """
pass
def __format__(self, *args): #cannot find CLR method
""" __format__(formattable: IFormattable, format: str) -> str """
pass
def __ge__(self, *args): #cannot find CLR method
pass
def __gt__(self, *args): #cannot find CLR method
pass
def __init__(self, *args): #cannot find CLR method
""" x.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signature """
pass
def __le__(self, *args): #cannot find CLR method
pass
def __lt__(self, *args): #cannot find CLR method
pass
def __ne__(self, *args): #cannot find CLR method
pass
def __reduce_ex__(self, *args): #cannot find CLR method
pass
def __str__(self, *args): #cannot find CLR method
pass
AddVerticalOrHorizontalSegments = None
NoConnector = None
value__ = None
class RailConnectionOption(Enum, IComparable, IFormattable, IConvertible):
"""
The connections between rails.
enum RailConnectionOption, values: Trim (0), Weld (1)
"""
def __eq__(self, *args): #cannot find CLR method
""" x.__eq__(y) <==> x==yx.__eq__(y) <==> x==yx.__eq__(y) <==> x==y """
pass
def __format__(self, *args): #cannot find CLR method
""" __format__(formattable: IFormattable, format: str) -> str """
pass
def __ge__(self, *args): #cannot find CLR method
pass
def __gt__(self, *args): #cannot find CLR method
pass
def __init__(self, *args): #cannot find CLR method
""" x.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signature """
pass
def __le__(self, *args): #cannot find CLR method
pass
def __lt__(self, *args): #cannot find CLR method
pass
def __ne__(self, *args): #cannot find CLR method
pass
def __reduce_ex__(self, *args): #cannot find CLR method
pass
def __str__(self, *args): #cannot find CLR method
pass
Trim = None
value__ = None
Weld = None
class RailExtensionStyle(Enum, IComparable, IFormattable, IConvertible):
"""
The extension style of the rail.
enum RailExtensionStyle, values: Floor (2), None (0), Post (3), Wall (1)
"""
def __eq__(self, *args): #cannot find CLR method
""" x.__eq__(y) <==> x==yx.__eq__(y) <==> x==yx.__eq__(y) <==> x==y """
pass
def __format__(self, *args): #cannot find CLR method
""" __format__(formattable: IFormattable, format: str) -> str """
pass
def __ge__(self, *args): #cannot find CLR method
pass
def __gt__(self, *args): #cannot find CLR method
pass
def __init__(self, *args): #cannot find CLR method
""" x.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signature """
pass
def __le__(self, *args): #cannot find CLR method
pass
def __lt__(self, *args): #cannot find CLR method
pass
def __ne__(self, *args): #cannot find CLR method
pass
def __reduce_ex__(self, *args): #cannot find CLR method
pass
def __str__(self, *args): #cannot find CLR method
pass
Floor = None
None = None
Post = None
value__ = None
Wall = None
class RailIndex(Enum, IComparable, IFormattable, IConvertible):
"""
The continuous rail position index.
enum RailIndex, values: LeftPrimary (1), LeftSecondary (3), RightPrimary (2), RightSecondary (4), Top (0)
"""
def __eq__(self, *args): #cannot find CLR method
""" x.__eq__(y) <==> x==yx.__eq__(y) <==> x==yx.__eq__(y) <==> x==y """
pass
def __format__(self, *args): #cannot find CLR method
""" __format__(formattable: IFormattable, format: str) -> str """
pass
def __ge__(self, *args): #cannot find CLR method
pass
def __gt__(self, *args): #cannot find CLR method
pass
def __init__(self, *args): #cannot find CLR method
""" x.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signature """
pass
def __le__(self, *args): #cannot find CLR method
pass
def __lt__(self, *args): #cannot find CLR method
pass
def __ne__(self, *args): #cannot find CLR method
pass
def __reduce_ex__(self, *args): #cannot find CLR method
pass
def __str__(self, *args): #cannot find CLR method
pass
LeftPrimary = None
LeftSecondary = None
RightPrimary = None
RightSecondary = None
Top = None
value__ = None
class Railing(Element, IDisposable):
""" Represents a railing element in Autodesk Revit. """
@staticmethod
def Create(document, *__args):
"""
Create(document: Document, stairsOrRampId: ElementId, railingTypeId: ElementId, placePosition: RailingPlacementPosition) -> ICollection[ElementId]
Automatically creates new railings with the specified railing type on all sides
of a stairs or ramp element.
document: The document.
stairsOrRampId: The stairs or ramp to which the new railing will host.
The stairs or ramp
should have no associated railings yet.
railingTypeId: The railing type of the new railing is to be created.
placePosition: The placement position of the new railing.
Returns: The new railing instances successfully created on the stairs.
Create(document: Document, curveLoop: CurveLoop, railingTypeId: ElementId, baseLevelId: ElementId) -> Railing
Creates a new railing by specifying the railing path in the project document.
document: The document.
curveLoop: The railing path which the new railing will be created along with.
The
curveLoop should be continuous with curves which are only bounded lines and
arcs on the same horizontal plane.
railingTypeId: The railing type of the new railing is to be created.
baseLevelId: The base level on which the new railing will be created.
Returns: The new railing instance if creation was successful, otherwise ll.
"""
pass
def Dispose(self):
""" Dispose(self: Element, A_0: bool) """
pass
def Flip(self):
"""
Flip(self: Railing)
Flips the railing.
"""
pass
def getBoundingBox(self, *args): #cannot find CLR method
""" getBoundingBox(self: Element, view: View) -> BoundingBoxXYZ """
pass
def GetHandRails(self):
"""
GetHandRails(self: Railing) -> IList[ElementId]
Get all the handrails of the railing.
Returns: All handrails of the railing.
"""
pass
def GetPath(self):
"""
GetPath(self: Railing) -> IList[Curve]
Gets the railing path.
Returns: The curve array of the railing path.
"""
pass
@staticmethod
def IsValidHostForNewRailing(document, elementId):
"""
IsValidHostForNewRailing(document: Document, elementId: ElementId) -> bool
Checks whether new railing can be created and placed on the specified host.
document: The document.
elementId: The element to check.
Returns: True if new railing can be created and placed on the host, False otherwise.
"""
pass
def RailingCanBeHostedByElement(self, hostId):
"""
RailingCanBeHostedByElement(self: Railing, hostId: ElementId) -> bool
Checks whether the specified element can be used as a host for the railing.
The host can be a stairs, ramp, floor, slab edge, wall or roof.
hostId: Element id to check.
Returns: True if the element can be used as host for the railing.
"""
pass
def ReleaseUnmanagedResources(self, *args): #cannot find CLR method
""" ReleaseUnmanagedResources(self: Element, disposing: bool) """
pass
def RemoveHost(self):
"""
RemoveHost(self: Railing)
Removes the association between the railing and its host.
"""
pass
def Reset(self):
"""
Reset(self: Railing)
Resets the railing to the default one that the system generates.
"""
pass
def setElementType(self, *args): #cannot find CLR method
""" setElementType(self: Element, type: ElementType, incompatibleExceptionMessage: str) """
pass
def SetPath(self, curveLoop):
"""
SetPath(self: Railing, curveLoop: CurveLoop)
Sets the railing path.
curveLoop: The new curve array for the railing path.
The curveLoop should be
continuous with curves which are only bounded lines and arcs on the same
horizontal plane.
"""
pass
def __enter__(self, *args): #cannot find CLR method
""" __enter__(self: IDisposable) -> object """
pass
def __exit__(self, *args): #cannot find CLR method
""" __exit__(self: IDisposable, exc_type: object, exc_value: object, exc_back: object) """
pass
def __init__(self, *args): #cannot find CLR method
""" x.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signature """
pass
CanReset = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""True if the railing can be reset, False otherwise.
Get: CanReset(self: Railing) -> bool
"""
Flipped = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Indicates if the railing is flipped.
Get: Flipped(self: Railing) -> bool
"""
HasHost = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Indicates if the railing has a host.
Get: HasHost(self: Railing) -> bool
"""
HostId = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""The host of the railing.
Get: HostId(self: Railing) -> ElementId
Set: HostId(self: Railing) = value
"""
IsDefault = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Indicates if the railing is the default one that system generates.
Get: IsDefault(self: Railing) -> bool
"""
TopRail = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""The top rail of the railing.
Get: TopRail(self: Railing) -> ElementId
"""
class RailingHeightCorrectionOption(Enum, IComparable, IFormattable, IConvertible):
"""
Railing height correction option.
enum RailingHeightCorrectionOption, values: ByType (0), Custom (1)
"""
def __eq__(self, *args): #cannot find CLR method
""" x.__eq__(y) <==> x==yx.__eq__(y) <==> x==yx.__eq__(y) <==> x==y """
pass
def __format__(self, *args): #cannot find CLR method
""" __format__(formattable: IFormattable, format: str) -> str """
pass
def __ge__(self, *args): #cannot find CLR method
pass
def __gt__(self, *args): #cannot find CLR method
pass
def __init__(self, *args): #cannot find CLR method
""" x.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signature """
pass
def __le__(self, *args): #cannot find CLR method
pass
def __lt__(self, *args): #cannot find CLR method
pass
def __ne__(self, *args): #cannot find CLR method
pass
def __reduce_ex__(self, *args): #cannot find CLR method
pass
def __str__(self, *args): #cannot find CLR method
pass
ByType = None
Custom = None
value__ = None
class RailingPathCurveJoinOption(Enum, IComparable, IFormattable, IConvertible):
"""
The join type of the railing path.
enum RailingPathCurveJoinOption, values: AddVerticalOrHorizontalSegments (2), ByType (0), ExtendRailsToMeet (1), NoConnector (3)
"""
def __eq__(self, *args): #cannot find CLR method
""" x.__eq__(y) <==> x==yx.__eq__(y) <==> x==yx.__eq__(y) <==> x==y """
pass
def __format__(self, *args): #cannot find CLR method
""" __format__(formattable: IFormattable, format: str) -> str """
pass
def __ge__(self, *args): #cannot find CLR method
pass
def __gt__(self, *args): #cannot find CLR method
pass
def __init__(self, *args): #cannot find CLR method
""" x.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signature """
pass
def __le__(self, *args): #cannot find CLR method
pass
def __lt__(self, *args): #cannot find CLR method
pass
def __ne__(self, *args): #cannot find CLR method
pass
def __reduce_ex__(self, *args): #cannot find CLR method
pass
def __str__(self, *args): #cannot find CLR method
pass
AddVerticalOrHorizontalSegments = None
ByType = None
ExtendRailsToMeet = None
NoConnector = None
value__ = None
class RailingPlacementPosition(Enum, IComparable, IFormattable, IConvertible):
"""
Railing placement position.
enum RailingPlacementPosition, values: Stringer (1), Treads (0), Undefined (-1)
"""
def __eq__(self, *args): #cannot find CLR method
""" x.__eq__(y) <==> x==yx.__eq__(y) <==> x==yx.__eq__(y) <==> x==y """
pass
def __format__(self, *args): #cannot find CLR method
""" __format__(formattable: IFormattable, format: str) -> str """
pass
def __ge__(self, *args): #cannot find CLR method
pass
def __gt__(self, *args): #cannot find CLR method
pass
def __init__(self, *args): #cannot find CLR method
""" x.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signature """
pass
def __le__(self, *args): #cannot find CLR method
pass
def __lt__(self, *args): #cannot find CLR method
pass
def __ne__(self, *args): #cannot find CLR method
pass
def __reduce_ex__(self, *args): #cannot find CLR method
pass
def __str__(self, *args): #cannot find CLR method
pass
Stringer = None
Treads = None
Undefined = None
value__ = None
class RailingSlopeOption(Enum, IComparable, IFormattable, IConvertible):
"""
The option determines the slope of the railing.
enum RailingSlopeOption, values: ByHost (0), Flat (1), Sloped (2)
"""
def __eq__(self, *args): #cannot find CLR method
""" x.__eq__(y) <==> x==yx.__eq__(y) <==> x==yx.__eq__(y) <==> x==y """
pass
def __format__(self, *args): #cannot find CLR method
""" __format__(formattable: IFormattable, format: str) -> str """
pass
def __ge__(self, *args): #cannot find CLR method
pass
def __gt__(self, *args): #cannot find CLR method
pass
def __init__(self, *args): #cannot find CLR method
""" x.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signature """
pass
def __le__(self, *args): #cannot find CLR method
pass
def __lt__(self, *args): #cannot find CLR method
pass
def __ne__(self, *args): #cannot find CLR method
pass
def __reduce_ex__(self, *args): #cannot find CLR method
pass
def __str__(self, *args): #cannot find CLR method
pass
ByHost = None
Flat = None
Sloped = None
value__ = None
class RailingType(ElementType, IDisposable):
""" A railing type object that is used in the generation of railing. """
def Dispose(self):
""" Dispose(self: Element, A_0: bool) """
pass
def getBoundingBox(self, *args): #cannot find CLR method
""" getBoundingBox(self: Element, view: View) -> BoundingBoxXYZ """
pass
def ReleaseUnmanagedResources(self, *args): #cannot find CLR method
""" ReleaseUnmanagedResources(self: Element, disposing: bool) """
pass
def setElementType(self, *args): #cannot find CLR method
""" setElementType(self: Element, type: ElementType, incompatibleExceptionMessage: str) """
pass
def __enter__(self, *args): #cannot find CLR method
""" __enter__(self: IDisposable) -> object """
pass
def __exit__(self, *args): #cannot find CLR method
""" __exit__(self: IDisposable, exc_type: object, exc_value: object, exc_back: object) """
pass
def __init__(self, *args): #cannot find CLR method
""" x.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signature """
pass
PrimaryHandrailHeight = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""The height of the primary handrail.
Get: PrimaryHandrailHeight(self: RailingType) -> float
"""
PrimaryHandrailLateralOffset = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""The lateral offset of the primary handrail.
Get: PrimaryHandrailLateralOffset(self: RailingType) -> float
"""
PrimaryHandRailPosition = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""The position of the primary handrail.
Get: PrimaryHandRailPosition(self: RailingType) -> HandRailPosition
Set: PrimaryHandRailPosition(self: RailingType) = value
"""
PrimaryHandrailType = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""The type of the primary handrail.
Get: PrimaryHandrailType(self: RailingType) -> ElementId
Set: PrimaryHandrailType(self: RailingType) = value
"""
SecondaryHandrailHeight = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""The height of the secondary handrail.
Get: SecondaryHandrailHeight(self: RailingType) -> float
"""
SecondaryHandrailLateralOffset = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""The lateral offset of the secondary handrail.
Get: SecondaryHandrailLateralOffset(self: RailingType) -> float
"""
SecondaryHandRailPosition = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""The position of the secondary handrail.
Get: SecondaryHandRailPosition(self: RailingType) -> HandRailPosition
Set: SecondaryHandRailPosition(self: RailingType) = value
"""
SecondaryHandrailType = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""The type of the secondary handrail.
Get: SecondaryHandrailType(self: RailingType) -> ElementId
Set: SecondaryHandrailType(self: RailingType) = value
"""
TopRailHeight = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""The height of the top rail in the railing system.
Get: TopRailHeight(self: RailingType) -> float
Set: TopRailHeight(self: RailingType) = value
"""
TopRailType = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""The type of the top rail in the railing system.
Get: TopRailType(self: RailingType) -> ElementId
Set: TopRailType(self: RailingType) = value
"""
class RailJoinOption(Enum, IComparable, IFormattable, IConvertible):
"""
The join type of the system rails.
enum RailJoinOption, values: ByType (-1), Fillet (1), Miter (0)
"""
def __eq__(self, *args): #cannot find CLR method
""" x.__eq__(y) <==> x==yx.__eq__(y) <==> x==yx.__eq__(y) <==> x==y """
pass
def __format__(self, *args): #cannot find CLR method
""" __format__(formattable: IFormattable, format: str) -> str """
pass
def __ge__(self, *args): #cannot find CLR method
pass
def __gt__(self, *args): #cannot find CLR method
pass
def __init__(self, *args): #cannot find CLR method
""" x.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signature """
pass
def __le__(self, *args): #cannot find CLR method
pass
def __lt__(self, *args): #cannot find CLR method
pass
def __ne__(self, *args): #cannot find CLR method
pass
def __reduce_ex__(self, *args): #cannot find CLR method
pass
def __str__(self, *args): #cannot find CLR method
pass
ByType = None
Fillet = None
Miter = None
value__ = None
class RailSupportJustification(Enum, IComparable, IFormattable, IConvertible):
"""
The justification of the rail supports.
enum RailSupportJustification, values: Begin (0), Center (1), End (2)
"""
def __eq__(self, *args): #cannot find CLR method
""" x.__eq__(y) <==> x==yx.__eq__(y) <==> x==yx.__eq__(y) <==> x==y """
pass
def __format__(self, *args): #cannot find CLR method
""" __format__(formattable: IFormattable, format: str) -> str """
pass
def __ge__(self, *args): #cannot find CLR method
pass
def __gt__(self, *args): #cannot find CLR method
pass
def __init__(self, *args): #cannot find CLR method
""" x.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signature """
pass
def __le__(self, *args): #cannot find CLR method
pass
def __lt__(self, *args): #cannot find CLR method
pass
def __ne__(self, *args): #cannot find CLR method
pass
def __reduce_ex__(self, *args): #cannot find CLR method
pass
def __str__(self, *args): #cannot find CLR method
pass
Begin = None
Center = None
End = None
value__ = None
class RailSupportsLayout(Enum, IComparable, IFormattable, IConvertible):
"""
The layout of the rail supports.
enum RailSupportsLayout, values: AlignWithRailingPosts (2), FixedDistance (1), FixedNumber (3), MaxSpacing (4), MinSpacing (5), None (0)
"""
def __eq__(self, *args): #cannot find CLR method
""" x.__eq__(y) <==> x==yx.__eq__(y) <==> x==yx.__eq__(y) <==> x==y """
pass
def __format__(self, *args): #cannot find CLR method
""" __format__(formattable: IFormattable, format: str) -> str """
pass
def __ge__(self, *args): #cannot find CLR method
pass
def __gt__(self, *args): #cannot find CLR method
pass
def __init__(self, *args): #cannot find CLR method
""" x.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signature """
pass
def __le__(self, *args): #cannot find CLR method
pass
def __lt__(self, *args): #cannot find CLR method
pass
def __ne__(self, *args): #cannot find CLR method
pass
def __reduce_ex__(self, *args): #cannot find CLR method
pass
def __str__(self, *args): #cannot find CLR method
pass
AlignWithRailingPosts = None
FixedDistance = None
FixedNumber = None
MaxSpacing = None
MinSpacing = None
None = None
value__ = None
class RailTagentJoinOption(Enum, IComparable, IFormattable, IConvertible):
"""
The tangent joins of the rails.
enum RailTagentJoinOption, values: AddVerticalOrHorizontalSegments (0), ExtendRailsToMeet (2), NoConnector (1)
"""
def __eq__(self, *args): #cannot find CLR method
""" x.__eq__(y) <==> x==yx.__eq__(y) <==> x==yx.__eq__(y) <==> x==y """
pass
def __format__(self, *args): #cannot find CLR method
""" __format__(formattable: IFormattable, format: str) -> str """
pass
def __ge__(self, *args): #cannot find CLR method
pass
def __gt__(self, *args): #cannot find CLR method
pass
def __init__(self, *args): #cannot find CLR method
""" x.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signature """
pass
def __le__(self, *args): #cannot find CLR method
pass
def __lt__(self, *args): #cannot find CLR method
pass
def __ne__(self, *args): #cannot find CLR method
pass
def __reduce_ex__(self, *args): #cannot find CLR method
pass
def __str__(self, *args): #cannot find CLR method
pass
AddVerticalOrHorizontalSegments = None
ExtendRailsToMeet = None
NoConnector = None
value__ = None
class RailTransitionOption(Enum, IComparable, IFormattable, IConvertible):
"""
The transition type of the continuous rail.
enum RailTransitionOption, values: Gooseneck (1), None (0), Simple (2)
"""
def __eq__(self, *args): #cannot find CLR method
""" x.__eq__(y) <==> x==yx.__eq__(y) <==> x==yx.__eq__(y) <==> x==y """
pass
def __format__(self, *args): #cannot find CLR method
""" __format__(formattable: IFormattable, format: str) -> str """
pass
def __ge__(self, *args): #cannot find CLR method
pass
def __gt__(self, *args): #cannot find CLR method
pass
def __init__(self, *args): #cannot find CLR method
""" x.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signature """
pass
def __le__(self, *args): #cannot find CLR method
pass
def __lt__(self, *args): #cannot find CLR method
pass
def __ne__(self, *args): #cannot find CLR method
pass
def __reduce_ex__(self, *args): #cannot find CLR method
pass
def __str__(self, *args): #cannot find CLR method
pass
Gooseneck = None
None = None
Simple = None
value__ = None
class RailTypeDefaultJoinOption(Enum, IComparable, IFormattable, IConvertible):
"""
The default join type of the rail.
enum RailTypeDefaultJoinOption, values: Fillet (1), Miter (0)
"""
def __eq__(self, *args): #cannot find CLR method
""" x.__eq__(y) <==> x==yx.__eq__(y) <==> x==yx.__eq__(y) <==> x==y """
pass
def __format__(self, *args): #cannot find CLR method
""" __format__(formattable: IFormattable, format: str) -> str """
pass
def __ge__(self, *args): #cannot find CLR method
pass
def __gt__(self, *args): #cannot find CLR method
pass
def __init__(self, *args): #cannot find CLR method
""" x.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signature """
pass
def __le__(self, *args): #cannot find CLR method
pass
def __lt__(self, *args): #cannot find CLR method
pass
def __ne__(self, *args): #cannot find CLR method
pass
def __reduce_ex__(self, *args): #cannot find CLR method
pass
def __str__(self, *args): #cannot find CLR method
pass
Fillet = None
Miter = None
value__ = None
class RiserToTreadConnectionOption(Enum, IComparable, IFormattable, IConvertible):
"""
Represents the connection style of the riser and tread in relation to each other.
enum RiserToTreadConnectionOption, values: JoinAll (2), RiserBehindTread (0), TreadUnderRiser (1)
"""
def __eq__(self, *args): #cannot find CLR method
""" x.__eq__(y) <==> x==yx.__eq__(y) <==> x==yx.__eq__(y) <==> x==y """
pass
def __format__(self, *args): #cannot find CLR method
""" __format__(formattable: IFormattable, format: str) -> str """
pass
def __ge__(self, *args): #cannot find CLR method
pass
def __gt__(self, *args): #cannot find CLR method
pass
def __init__(self, *args): #cannot find CLR method
""" x.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signature """
pass
def __le__(self, *args): #cannot find CLR method
pass
def __lt__(self, *args): #cannot find CLR method
pass
def __ne__(self, *args): #cannot find CLR method
pass
def __reduce_ex__(self, *args): #cannot find CLR method
pass
def __str__(self, *args): #cannot find CLR method
pass
JoinAll = None
RiserBehindTread = None
TreadUnderRiser = None
value__ = None
class Room(SpatialElement, IDisposable):
""" Provides access to the room topology in Autodesk Revit. """
def Dispose(self):
""" Dispose(self: Element, A_0: bool) """
pass
def getBoundingBox(self, *args): #cannot find CLR method
""" getBoundingBox(self: Element, view: View) -> BoundingBoxXYZ """
pass
def IsPointInRoom(self, point):
"""
IsPointInRoom(self: Room, point: XYZ) -> bool
Determines if a point lies within the volume of the room.
point: Point to be checked.
"""
pass
def ReleaseUnmanagedResources(self, *args): #cannot find CLR method
""" ReleaseUnmanagedResources(self: Element, disposing: bool) """
pass
def setElementType(self, *args): #cannot find CLR method
""" setElementType(self: Element, type: ElementType, incompatibleExceptionMessage: str) """
pass
def Unplace(self):
"""
Unplace(self: Room)
Remove the room from its location, but the project still contains the room.
The room can be placed in another location after unplaced.
"""
pass
def __enter__(self, *args): #cannot find CLR method
""" __enter__(self: IDisposable) -> object """
pass
def __exit__(self, *args): #cannot find CLR method
""" __exit__(self: IDisposable, exc_type: object, exc_value: object, exc_back: object) """
pass
def __init__(self, *args): #cannot find CLR method
""" x.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signature """
pass
BaseOffset = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get or Set the Base Offset of the Room.
Get: BaseOffset(self: Room) -> float
Set: BaseOffset(self: Room) = value
"""
ClosedShell = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Return the closedShell of the Room.
Get: ClosedShell(self: Room) -> GeometryElement
"""
LimitOffset = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get or Set the Limit Offset of the Room.
Get: LimitOffset(self: Room) -> float
Set: LimitOffset(self: Room) = value
"""
UnboundedHeight = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get the Unbounded Height of the Room.
Get: UnboundedHeight(self: Room) -> float
"""
UpperLimit = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get or Set the Upper Limit of the Room.
Get: UpperLimit(self: Room) -> Level
Set: UpperLimit(self: Room) = value
"""
Volume = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get the Volume of the Room.
Get: Volume(self: Room) -> float
"""
class RoomFilter(ElementSlowFilter, IDisposable):
"""
A filter used to match rooms.
RoomFilter()
"""
def Dispose(self):
""" Dispose(self: ElementFilter, A_0: bool) """
pass
def ReleaseUnmanagedResources(self, *args): #cannot find CLR method
""" ReleaseUnmanagedResources(self: ElementFilter, disposing: bool) """
pass
def __enter__(self, *args): #cannot find CLR method
""" __enter__(self: IDisposable) -> object """
pass
def __exit__(self, *args): #cannot find CLR method
""" __exit__(self: IDisposable, exc_type: object, exc_value: object, exc_back: object) """
pass
def __init__(self, *args): #cannot find CLR method
""" x.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signature """
pass
class RoomTag(SpatialElementTag, IDisposable):
""" Provides access to the room tag in Autodesk Revit. """
def Dispose(self):
""" Dispose(self: Element, A_0: bool) """
pass
def getBoundingBox(self, *args): #cannot find CLR method
""" getBoundingBox(self: Element, view: View) -> BoundingBoxXYZ """
pass
def ReleaseUnmanagedResources(self, *args): #cannot find CLR method
""" ReleaseUnmanagedResources(self: Element, disposing: bool) """
pass
def setElementType(self, *args): #cannot find CLR method
""" setElementType(self: Element, type: ElementType, incompatibleExceptionMessage: str) """
pass
def __enter__(self, *args): #cannot find CLR method
""" __enter__(self: IDisposable) -> object """
pass
def __exit__(self, *args): #cannot find CLR method
""" __exit__(self: IDisposable, exc_type: object, exc_value: object, exc_back: object) """
pass
def __init__(self, *args): #cannot find CLR method
""" x.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signature """
pass
IsInRoom = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Identifies if the tag is lockated in a room.
Get: IsInRoom(self: RoomTag) -> bool
"""
Room = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""The room that the tag is associated with.
Get: Room(self: RoomTag) -> Room
"""
RoomTagType = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""The tag type.
Get: RoomTagType(self: RoomTag) -> RoomTagType
Set: RoomTagType(self: RoomTag) = value
"""
TaggedLocalRoomId = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""The ElementId of the tagged room.
Get: TaggedLocalRoomId(self: RoomTag) -> ElementId
"""
TaggedRoomId = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""The LinkElementId of the tagged room.
Get: TaggedRoomId(self: RoomTag) -> LinkElementId
"""
class RoomTagFilter(ElementSlowFilter, IDisposable):
"""
A filter used to match room tags.
RoomTagFilter()
"""
def Dispose(self):
""" Dispose(self: ElementFilter, A_0: bool) """
pass
def ReleaseUnmanagedResources(self, *args): #cannot find CLR method
""" ReleaseUnmanagedResources(self: ElementFilter, disposing: bool) """
pass
def __enter__(self, *args): #cannot find CLR method
""" __enter__(self: IDisposable) -> object """
pass
def __exit__(self, *args): #cannot find CLR method
""" __exit__(self: IDisposable, exc_type: object, exc_value: object, exc_back: object) """
pass
def __init__(self, *args): #cannot find CLR method
""" x.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signature """
pass
class RoomTagType(FamilySymbol, IDisposable):
""" An object that represents a Room Tag type. """
def Dispose(self):
""" Dispose(self: Element, A_0: bool) """
pass
def getBoundingBox(self, *args): #cannot find CLR method
""" getBoundingBox(self: Element, view: View) -> BoundingBoxXYZ """
pass
def ReleaseUnmanagedResources(self, *args): #cannot find CLR method
""" ReleaseUnmanagedResources(self: Element, disposing: bool) """
pass
def setElementType(self, *args): #cannot find CLR method
""" setElementType(self: Element, type: ElementType, incompatibleExceptionMessage: str) """
pass
def __enter__(self, *args): #cannot find CLR method
""" __enter__(self: IDisposable) -> object """
pass
def __exit__(self, *args): #cannot find CLR method
""" __exit__(self: IDisposable, exc_type: object, exc_value: object, exc_back: object) """
pass
def __init__(self, *args): #cannot find CLR method
""" x.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signature """
pass
class SiteSubRegion(object, IDisposable):
""" Represents a proxy class exposing the interfaces needed to access details of a subregion. """
@staticmethod
def Create(document, curveLoops, hostTopoSurfaceId=None):
"""
Create(document: Document, curveLoops: IList[CurveLoop]) -> SiteSubRegion
Create(document: Document, curveLoops: IList[CurveLoop], hostTopoSurfaceId: ElementId) -> SiteSubRegion
"""
pass
def Dispose(self):
""" Dispose(self: SiteSubRegion) """
pass
def GetBoundary(self):
"""
GetBoundary(self: SiteSubRegion) -> IList[CurveLoop]
Gets the boundary of current subregion.
Returns: The curve loops that represent the boundary.
"""
pass
@staticmethod
def IsValidBoundary(curveLoops):
""" IsValidBoundary(curveLoops: IList[CurveLoop]) -> bool """
pass
def ReleaseUnmanagedResources(self, *args): #cannot find CLR method
""" ReleaseUnmanagedResources(self: SiteSubRegion, disposing: bool) """
pass
def SetBoundary(self, curveLoops):
""" SetBoundary(self: SiteSubRegion, curveLoops: IList[CurveLoop]) """
pass
def __enter__(self, *args): #cannot find CLR method
""" __enter__(self: IDisposable) -> object """
pass
def __exit__(self, *args): #cannot find CLR method
""" __exit__(self: IDisposable, exc_type: object, exc_value: object, exc_back: object) """
pass
def __init__(self, *args): #cannot find CLR method
""" x.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signature """
pass
def __repr__(self, *args): #cannot find CLR method
""" __repr__(self: object) -> str """
pass
HostId = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""The element id of the topography surface hosting this SiteSubRegion.
Get: HostId(self: SiteSubRegion) -> ElementId
"""
IsValidObject = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Specifies whether the .NET object represents a valid Revit entity.
Get: IsValidObject(self: SiteSubRegion) -> bool
"""
TopographySurface = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""The TopographySurface element which this SiteSubRegion represents.
Get: TopographySurface(self: SiteSubRegion) -> TopographySurface
"""
class SketchedCurveSlopeOption(Enum, IComparable, IFormattable, IConvertible):
"""
The option determines the slop of the sketched run/landing.
enum SketchedCurveSlopeOption, values: Auto (0), Flat (1), Sloped (2)
"""
def __eq__(self, *args): #cannot find CLR method
""" x.__eq__(y) <==> x==yx.__eq__(y) <==> x==yx.__eq__(y) <==> x==y """
pass
def __format__(self, *args): #cannot find CLR method
""" __format__(formattable: IFormattable, format: str) -> str """
pass
def __ge__(self, *args): #cannot find CLR method
pass
def __gt__(self, *args): #cannot find CLR method
pass
def __init__(self, *args): #cannot find CLR method
""" x.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signature """
pass
def __le__(self, *args): #cannot find CLR method
pass
def __lt__(self, *args): #cannot find CLR method
pass
def __ne__(self, *args): #cannot find CLR method
pass
def __reduce_ex__(self, *args): #cannot find CLR method
pass
def __str__(self, *args): #cannot find CLR method
pass
Auto = None
Flat = None
Sloped = None
value__ = None
class Stairs(Element, IDisposable):
""" Represents a stairs element in Autodesk Revit. """
def Dispose(self):
""" Dispose(self: Element, A_0: bool) """
pass
def GetAssociatedRailings(self):
"""
GetAssociatedRailings(self: Stairs) -> ICollection[ElementId]
Gets a list of the Railing elements which are associated to the boundaries of
the stairs.
Returns: The ids of the Railing elements.
"""
pass
def getBoundingBox(self, *args): #cannot find CLR method
""" getBoundingBox(self: Element, view: View) -> BoundingBoxXYZ """
pass
def GetStairsLandings(self):
"""
GetStairsLandings(self: Stairs) -> ICollection[ElementId]
Returns all the stairs landing components in the stairs.
Returns: The stairs landing components in the stairs.
"""
pass
def GetStairsRuns(self):
"""
GetStairsRuns(self: Stairs) -> ICollection[ElementId]
Returns all the stairs run components in the stairs.
Returns: The stairs run components in the stairs.
"""
pass
def GetStairsSupports(self):
"""
GetStairsSupports(self: Stairs) -> ICollection[ElementId]
Returns all the stairs support components in the stairs.
"""
pass
@staticmethod
def IsByComponent(document, stairsId):
"""
IsByComponent(document: Document, stairsId: ElementId) -> bool
Indicates if the stairs is created by stairs components(runs, landings and
supports).
document: The document.
stairsId: The stairs element to check.
Returns: True if the stairs is created by components, False otherwise.
"""
pass
def IsInEditMode(self):
"""
IsInEditMode(self: Stairs) -> bool
Indicates whether the stairs is in edit mode or not.
"""
pass
def ReleaseUnmanagedResources(self, *args): #cannot find CLR method
""" ReleaseUnmanagedResources(self: Element, disposing: bool) """
pass
def setElementType(self, *args): #cannot find CLR method
""" setElementType(self: Element, type: ElementType, incompatibleExceptionMessage: str) """
pass
def __enter__(self, *args): #cannot find CLR method
""" __enter__(self: IDisposable) -> object """
pass
def __exit__(self, *args): #cannot find CLR method
""" __exit__(self: IDisposable, exc_type: object, exc_value: object, exc_back: object) """
pass
def __init__(self, *args): #cannot find CLR method
""" x.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signature """
pass
ActualRiserHeight = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""The actual height of the stairs risers in the stairs.
Get: ActualRiserHeight(self: Stairs) -> float
"""
ActualRisersNumber = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""The total number of actually created risers in model.
Get: ActualRisersNumber(self: Stairs) -> int
"""
ActualTreadDepth = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""The actual depth of the stairs treads in the stairs, actual tread depth is equal to minimum tread depth by default.
Get: ActualTreadDepth(self: Stairs) -> float
Set: ActualTreadDepth(self: Stairs) = value
"""
ActualTreadsNumber = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""The number of treads actually created in the stairs.
Get: ActualTreadsNumber(self: Stairs) -> int
"""
BaseElevation = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""The base elevation of the stairs.
Get: BaseElevation(self: Stairs) -> float
"""
DesiredRisersNumber = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""The number of risers is calculated based on the height between levels.
Get: DesiredRisersNumber(self: Stairs) -> int
Set: DesiredRisersNumber(self: Stairs) = value
"""
Height = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""The height of the stair between the base and top levels.
Get: Height(self: Stairs) -> float
Set: Height(self: Stairs) = value
"""
NumberOfStories = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""The number of stories of a multi-story stair, or 1 for a single-story stair.
Get: NumberOfStories(self: Stairs) -> int
"""
TopElevation = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""The top elevation of the stairs.
Get: TopElevation(self: Stairs) -> float
"""
class StairsComponentConnection(object, IDisposable):
""" Represents information about a connection among stairs components(run to landing). """
def Dispose(self):
""" Dispose(self: StairsComponentConnection) """
pass
def ReleaseUnmanagedResources(self, *args): #cannot find CLR method
""" ReleaseUnmanagedResources(self: StairsComponentConnection, disposing: bool) """
pass
def __enter__(self, *args): #cannot find CLR method
""" __enter__(self: IDisposable) -> object """
pass
def __exit__(self, *args): #cannot find CLR method
""" __exit__(self: IDisposable, exc_type: object, exc_value: object, exc_back: object) """
pass
def __init__(self, *args): #cannot find CLR method
""" x.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signature """
pass
def __repr__(self, *args): #cannot find CLR method
""" __repr__(self: object) -> str """
pass
ConnectionType = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""The connection type of the connected stairs component.
Get: ConnectionType(self: StairsComponentConnection) -> StairsComponentConnectionEndType
"""
ElementId = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""The element Id of connected stairs component in the stairs connection.
Get: ElementId(self: StairsComponentConnection) -> ElementId
"""
IsValidObject = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Specifies whether the .NET object represents a valid Revit entity.
Get: IsValidObject(self: StairsComponentConnection) -> bool
"""
PeerConnectionType = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""The connection type of the peer connected stairs component.
Get: PeerConnectionType(self: StairsComponentConnection) -> StairsComponentConnectionEndType
"""
PeerElementId = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""The element Id of peer connected stairs component in the stairs connection.
Get: PeerElementId(self: StairsComponentConnection) -> ElementId
"""
class StairsComponentConnectionEndType(Enum, IComparable, IFormattable, IConvertible):
"""
The end type identifying the connection type among stairs runs and landings.
enum StairsComponentConnectionEndType, values: ET_Landing (0), ET_RunEnd (2), ET_RunStart (1)
"""
def __eq__(self, *args): #cannot find CLR method
""" x.__eq__(y) <==> x==yx.__eq__(y) <==> x==yx.__eq__(y) <==> x==y """
pass
def __format__(self, *args): #cannot find CLR method
""" __format__(formattable: IFormattable, format: str) -> str """
pass
def __ge__(self, *args): #cannot find CLR method
pass
def __gt__(self, *args): #cannot find CLR method
pass
def __init__(self, *args): #cannot find CLR method
""" x.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signature """
pass
def __le__(self, *args): #cannot find CLR method
pass
def __lt__(self, *args): #cannot find CLR method
pass
def __ne__(self, *args): #cannot find CLR method
pass
def __reduce_ex__(self, *args): #cannot find CLR method
pass
def __str__(self, *args): #cannot find CLR method
pass
ET_Landing = None
ET_RunEnd = None
ET_RunStart = None
value__ = None
class StairsConstructionMethod(Enum, IComparable, IFormattable, IConvertible):
"""
Represents the construction method of the stairs.
enum StairsConstructionMethod, values: Assembled (0), CastInPlace (1), Precast (2)
"""
def __eq__(self, *args): #cannot find CLR method
""" x.__eq__(y) <==> x==yx.__eq__(y) <==> x==yx.__eq__(y) <==> x==y """
pass
def __format__(self, *args): #cannot find CLR method
""" __format__(formattable: IFormattable, format: str) -> str """
pass
def __ge__(self, *args): #cannot find CLR method
pass
def __gt__(self, *args): #cannot find CLR method
pass
def __init__(self, *args): #cannot find CLR method
""" x.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signature """
pass
def __le__(self, *args): #cannot find CLR method
pass
def __lt__(self, *args): #cannot find CLR method
pass
def __ne__(self, *args): #cannot find CLR method
pass
def __reduce_ex__(self, *args): #cannot find CLR method
pass
def __str__(self, *args): #cannot find CLR method
pass
Assembled = None
CastInPlace = None
Precast = None
value__ = None
class StairsEndConnectionType(Enum, IComparable, IFormattable, IConvertible):
"""
The join style between a run and landing.
enum StairsEndConnectionType, values: Notch (1), StraightCut (0)
"""
def __eq__(self, *args): #cannot find CLR method
""" x.__eq__(y) <==> x==yx.__eq__(y) <==> x==yx.__eq__(y) <==> x==y """
pass
def __format__(self, *args): #cannot find CLR method
""" __format__(formattable: IFormattable, format: str) -> str """
pass
def __ge__(self, *args): #cannot find CLR method
pass
def __gt__(self, *args): #cannot find CLR method
pass
def __init__(self, *args): #cannot find CLR method
""" x.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signature """
pass
def __le__(self, *args): #cannot find CLR method
pass
def __lt__(self, *args): #cannot find CLR method
pass
def __ne__(self, *args): #cannot find CLR method
pass
def __reduce_ex__(self, *args): #cannot find CLR method
pass
def __str__(self, *args): #cannot find CLR method
pass
Notch = None
StraightCut = None
value__ = None
class StairsEndNotchOption(Enum, IComparable, IFormattable, IConvertible):
"""
The style of notch width for the stairs.
enum StairsEndNotchOption, values: Custom (1), FullRunWidth (0)
"""
def __eq__(self, *args): #cannot find CLR method
""" x.__eq__(y) <==> x==yx.__eq__(y) <==> x==yx.__eq__(y) <==> x==y """
pass
def __format__(self, *args): #cannot find CLR method
""" __format__(formattable: IFormattable, format: str) -> str """
pass
def __ge__(self, *args): #cannot find CLR method
pass
def __gt__(self, *args): #cannot find CLR method
pass
def __init__(self, *args): #cannot find CLR method
""" x.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signature """
pass
def __le__(self, *args): #cannot find CLR method
pass
def __lt__(self, *args): #cannot find CLR method
pass
def __ne__(self, *args): #cannot find CLR method
pass
def __reduce_ex__(self, *args): #cannot find CLR method
pass
def __str__(self, *args): #cannot find CLR method
pass
Custom = None
FullRunWidth = None
value__ = None
class StairsLanding(Element, IDisposable):
""" An object that represents a stairs landing in Autodesk Revit. """
@staticmethod
def CanCreateAutomaticLanding(document, firstRunId, secondRunId):
"""
CanCreateAutomaticLanding(document: Document, firstRunId: ElementId, secondRunId: ElementId) -> bool
Checks whether automatic landing(s) can be created between the given two stairs
runs and
logically join(s) with the stairs runs.
document: The document that owns the stairs runs.
firstRunId: The first stairs run.
secondRunId: The second stairs run.
Returns: True if automatic landing(s) can be created between the two stairs runs, False
otherwise.
"""
pass
@staticmethod
def CreateAutomaticLanding(document, firstRunId, secondRunId):
"""
CreateAutomaticLanding(document: Document, firstRunId: ElementId, secondRunId: ElementId) -> IList[ElementId]
Creates automatic landing(s) between two stairs runs.
document: The document that owns the stairs runs and new landing(s).
firstRunId: The first stairs run.
secondRunId: The second stairs run.
Returns: The created landing(s) between the two stairs runs.
"""
pass
@staticmethod
def CreateSketchedLanding(document, stairsId, curveLoop, baseElevation):
"""
CreateSketchedLanding(document: Document, stairsId: ElementId, curveLoop: CurveLoop, baseElevation: float) -> StairsLanding
Creates a customized landing between two runs by providing the closed boundary
curves of the landing.
document: The document that owns the landing.
stairsId: The stairs that the new sketched landing belongs to.
curveLoop: The closed boundary curves of the new landing.
baseElevation: Base elevation of the new stairs run. The elevation has following restriction:
The base elevation is relative to the base elevation of the stairs.The base
elevation will be rounded automatically to a multiple of the riser height. The
base elevation should be equal to or greater than half of the riser height.
Returns: The new sketched landing.
"""
pass
@staticmethod
def CreateSketchedLandingWithSlopeData(document, stairsId, curveLoop, baseElevation):
""" CreateSketchedLandingWithSlopeData(document: Document, stairsId: ElementId, curveLoop: IList[SketchedStairsCurveData], baseElevation: float) -> StairsLanding """
pass
def Dispose(self):
""" Dispose(self: Element, A_0: bool) """
pass
def GetAllSupports(self):
"""
GetAllSupports(self: StairsLanding) -> IList[ElementId]
Returns all the supports hosting the stairs landing.
Returns: All the supports hosting the stairs landings.
"""
pass
def getBoundingBox(self, *args): #cannot find CLR method
""" getBoundingBox(self: Element, view: View) -> BoundingBoxXYZ """
pass
def GetConnections(self):
"""
GetConnections(self: StairsLanding) -> IList[StairsComponentConnection]
Returns information about the connections in which the stairs landing
participates.
Returns: The connections in which the stairs landing participates.
"""
pass
def GetFootprintBoundary(self):
"""
GetFootprintBoundary(self: StairsLanding) -> CurveLoop
Returns the landing's boundary curves which are projected on the stairs base
level.
Returns: The boundary curves of the landing.
"""
pass
def GetStairs(self):
"""
GetStairs(self: StairsLanding) -> Stairs
Returns the stairs to which the landing belongs.
Returns: The stairs to which the landing belongs.
"""
pass
def GetStairsPath(self):
"""
GetStairsPath(self: StairsLanding) -> CurveLoop
Returns the stairs path curves on the landing. The curves are projected on the
stairs base level.
Returns: The stairs path curves of the landing.
"""
pass
def ReleaseUnmanagedResources(self, *args): #cannot find CLR method
""" ReleaseUnmanagedResources(self: Element, disposing: bool) """
pass
def setElementType(self, *args): #cannot find CLR method
""" setElementType(self: Element, type: ElementType, incompatibleExceptionMessage: str) """
pass
def SetSketchedLandingBoundaryAndPath(self, document, boundaryCurveLoop, pathCurveLoop):
"""
SetSketchedLandingBoundaryAndPath(self: StairsLanding, document: Document, boundaryCurveLoop: CurveLoop, pathCurveLoop: CurveLoop)
Sets the boundary and path curves of the sketched landing.
document: The document that owns the landing.
boundaryCurveLoop: The closed boundary curves of the landing.
pathCurveLoop: The path curves of the landing, can be an empty CurveLoop.
"""
pass
def __enter__(self, *args): #cannot find CLR method
""" __enter__(self: IDisposable) -> object """
pass
def __exit__(self, *args): #cannot find CLR method
""" __exit__(self: IDisposable, exc_type: object, exc_value: object, exc_back: object) """
pass
def __init__(self, *args): #cannot find CLR method
""" x.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signature """
pass
BaseElevation = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""The base elevation of the landing.
Get: BaseElevation(self: StairsLanding) -> float
Set: BaseElevation(self: StairsLanding) = value
"""
IsAutomaticLanding = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""True if the landing is an automatic landing, False otherwise.
Get: IsAutomaticLanding(self: StairsLanding) -> bool
"""
Thickness = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""The thickness of the landing.
Get: Thickness(self: StairsLanding) -> float
"""
class StairsLandingType(ElementType, IDisposable):
""" Represents a stairs landing type in Autodesk Revit. """
def Dispose(self):
""" Dispose(self: Element, A_0: bool) """
pass
def getBoundingBox(self, *args): #cannot find CLR method
""" getBoundingBox(self: Element, view: View) -> BoundingBoxXYZ """
pass
def ReleaseUnmanagedResources(self, *args): #cannot find CLR method
""" ReleaseUnmanagedResources(self: Element, disposing: bool) """
pass
def setElementType(self, *args): #cannot find CLR method
""" setElementType(self: Element, type: ElementType, incompatibleExceptionMessage: str) """
pass
def __enter__(self, *args): #cannot find CLR method
""" __enter__(self: IDisposable) -> object """
pass
def __exit__(self, *args): #cannot find CLR method
""" __exit__(self: IDisposable, exc_type: object, exc_value: object, exc_back: object) """
pass
def __init__(self, *args): #cannot find CLR method
""" x.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signature """
pass
IsMonolithic = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""True if the stairs landing is monolithic, false otherwise.
Get: IsMonolithic(self: StairsLandingType) -> bool
"""
Thickness = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Thickness of the stairs landing.
Get: Thickness(self: StairsLandingType) -> float
Set: Thickness(self: StairsLandingType) = value
"""
class StairsNumberSystemReferenceOption(Enum, IComparable, IFormattable, IConvertible):
"""
The reference types permitted for a number system to refer to the geometry of a stairs run.
enum StairsNumberSystemReferenceOption, values: Center (0), Left (1), LeftQuarter (3), Right (2), RightQuarter (4)
"""
def __eq__(self, *args): #cannot find CLR method
""" x.__eq__(y) <==> x==yx.__eq__(y) <==> x==yx.__eq__(y) <==> x==y """
pass
def __format__(self, *args): #cannot find CLR method
""" __format__(formattable: IFormattable, format: str) -> str """
pass
def __ge__(self, *args): #cannot find CLR method
pass
def __gt__(self, *args): #cannot find CLR method
pass
def __init__(self, *args): #cannot find CLR method
""" x.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signature """
pass
def __le__(self, *args): #cannot find CLR method
pass
def __lt__(self, *args): #cannot find CLR method
pass
def __ne__(self, *args): #cannot find CLR method
pass
def __reduce_ex__(self, *args): #cannot find CLR method
pass
def __str__(self, *args): #cannot find CLR method
pass
Center = None
Left = None
LeftQuarter = None
Right = None
RightQuarter = None
value__ = None
class StairsPath(Element, IDisposable):
""" An object that represents the stairs path. """
@staticmethod
def Create(document, stairsId, typeId, planViewId):
"""
Create(document: Document, stairsId: LinkElementId, typeId: ElementId, planViewId: ElementId) -> StairsPath
Creates a new stairs path for the specified stairs with the specified stairs
path type only in the plan view.
document: The document.
stairsId: The id of the stairs element either in the host document or in a linked
document.
typeId: The type of stairs path.
planViewId: The plan view in which the stairs path will be shown.
Returns: The new stairs path.
"""
pass
def Dispose(self):
""" Dispose(self: Element, A_0: bool) """
pass
def getBoundingBox(self, *args): #cannot find CLR method
""" getBoundingBox(self: Element, view: View) -> BoundingBoxXYZ """
pass
def ReleaseUnmanagedResources(self, *args): #cannot find CLR method
""" ReleaseUnmanagedResources(self: Element, disposing: bool) """
pass
def setElementType(self, *args): #cannot find CLR method
""" setElementType(self: Element, type: ElementType, incompatibleExceptionMessage: str) """
pass
def __enter__(self, *args): #cannot find CLR method
""" __enter__(self: IDisposable) -> object """
pass
def __exit__(self, *args): #cannot find CLR method
""" __exit__(self: IDisposable, exc_type: object, exc_value: object, exc_back: object) """
pass
def __init__(self, *args): #cannot find CLR method
""" x.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signature """
pass
DownText = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""The stairs down text.
Get: DownText(self: StairsPath) -> str
Set: DownText(self: StairsPath) = value
"""
DownTextOffset = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""The offset of stairs down text.
Get: DownTextOffset(self: StairsPath) -> XYZ
Set: DownTextOffset(self: StairsPath) = value
"""
ShowDownText = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Represents whether show stairs down text or not.
Get: ShowDownText(self: StairsPath) -> bool
Set: ShowDownText(self: StairsPath) = value
"""
ShowUpText = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Represents whether show stairs up text or not.
Get: ShowUpText(self: StairsPath) -> bool
Set: ShowUpText(self: StairsPath) = value
"""
StairsId = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""The host stairs element id.
Get: StairsId(self: StairsPath) -> LinkElementId
"""
StairsPathOffset = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""The offset of stairs path to center line of stairs.
Get: StairsPathOffset(self: StairsPath) -> float
Set: StairsPathOffset(self: StairsPath) = value
"""
TextOrientation = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""The orientation of stair up and down text.
Get: TextOrientation(self: StairsPath) -> StairsTextOrientation
Set: TextOrientation(self: StairsPath) = value
"""
UpText = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""The stairs up text.
Get: UpText(self: StairsPath) -> str
Set: UpText(self: StairsPath) = value
"""
UpTextOffset = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""The offset of stairs up text.
Get: UpTextOffset(self: StairsPath) -> XYZ
Set: UpTextOffset(self: StairsPath) = value
"""
class StairsPathDirection(Enum, IComparable, IFormattable, IConvertible):
"""
The direction style of stairs path.
enum StairsPathDirection, values: AlwaysUp (0), AutomaticUpDown (1)
"""
def __eq__(self, *args): #cannot find CLR method
""" x.__eq__(y) <==> x==yx.__eq__(y) <==> x==yx.__eq__(y) <==> x==y """
pass
def __format__(self, *args): #cannot find CLR method
""" __format__(formattable: IFormattable, format: str) -> str """
pass
def __ge__(self, *args): #cannot find CLR method
pass
def __gt__(self, *args): #cannot find CLR method
pass
def __init__(self, *args): #cannot find CLR method
""" x.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signature """
pass
def __le__(self, *args): #cannot find CLR method
pass
def __lt__(self, *args): #cannot find CLR method
pass
def __ne__(self, *args): #cannot find CLR method
pass
def __reduce_ex__(self, *args): #cannot find CLR method
pass
def __str__(self, *args): #cannot find CLR method
pass
AlwaysUp = None
AutomaticUpDown = None
value__ = None
class StairsPathLineShapeAtCorner(Enum, IComparable, IFormattable, IConvertible):
"""
The options for the line shape of a stairs path at a corner.
enum StairsPathLineShapeAtCorner, values: Curved (1), Straight (0)
"""
def __eq__(self, *args): #cannot find CLR method
""" x.__eq__(y) <==> x==yx.__eq__(y) <==> x==yx.__eq__(y) <==> x==y """
pass
def __format__(self, *args): #cannot find CLR method
""" __format__(formattable: IFormattable, format: str) -> str """
pass
def __ge__(self, *args): #cannot find CLR method
pass
def __gt__(self, *args): #cannot find CLR method
pass
def __init__(self, *args): #cannot find CLR method
""" x.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signature """
pass
def __le__(self, *args): #cannot find CLR method
pass
def __lt__(self, *args): #cannot find CLR method
pass
def __ne__(self, *args): #cannot find CLR method
pass
def __reduce_ex__(self, *args): #cannot find CLR method
pass
def __str__(self, *args): #cannot find CLR method
pass
Curved = None
Straight = None
value__ = None
class StairsPathType(ElementType, IDisposable):
""" An object represents the stairs path type. """
def Dispose(self):
""" Dispose(self: Element, A_0: bool) """
pass
def getBoundingBox(self, *args): #cannot find CLR method
""" getBoundingBox(self: Element, view: View) -> BoundingBoxXYZ """
pass
def ReleaseUnmanagedResources(self, *args): #cannot find CLR method
""" ReleaseUnmanagedResources(self: Element, disposing: bool) """
pass
def setElementType(self, *args): #cannot find CLR method
""" setElementType(self: Element, type: ElementType, incompatibleExceptionMessage: str) """
pass
def __enter__(self, *args): #cannot find CLR method
""" __enter__(self: IDisposable) -> object """
pass
def __exit__(self, *args): #cannot find CLR method
""" __exit__(self: IDisposable, exc_type: object, exc_value: object, exc_back: object) """
pass
def __init__(self, *args): #cannot find CLR method
""" x.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signature """
pass
ArrowheadTypeId = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""The arrow head type of the stairs path.
Get: ArrowheadTypeId(self: StairsPathType) -> ElementId
Set: ArrowheadTypeId(self: StairsPathType) = value
"""
DistanceToCutMark = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""The distance from the stairs path arrow to cut mark.
Get: DistanceToCutMark(self: StairsPathType) -> float
Set: DistanceToCutMark(self: StairsPathType) = value
"""
DrawForEachRun = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""True if stairs paths should be drawn for each run, false if it should be drawn for the whole stairs.
Get: DrawForEachRun(self: StairsPathType) -> bool
Set: DrawForEachRun(self: StairsPathType) = value
"""
EndAtRiser = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Represents whether the stairs path ends at the riser.
Get: EndAtRiser(self: StairsPathType) -> bool
Set: EndAtRiser(self: StairsPathType) = value
"""
FullStepArrow = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""True if the arrow fills the full step width, false if it fills by the specified arrow size.
Get: FullStepArrow(self: StairsPathType) -> bool
Set: FullStepArrow(self: StairsPathType) = value
"""
LineShapeAtCorner = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""The line shape of stairs path at the corner.
Get: LineShapeAtCorner(self: StairsPathType) -> StairsPathLineShapeAtCorner
Set: LineShapeAtCorner(self: StairsPathType) = value
"""
ShowArrowheadToCutMark = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""True if the stairs path arrowhead should be shown to the cutmark, false if the arrow head is not shown.
Get: ShowArrowheadToCutMark(self: StairsPathType) -> bool
Set: ShowArrowheadToCutMark(self: StairsPathType) = value
"""
StairsPathDirection = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""The style of the stairs path.
Get: StairsPathDirection(self: StairsPathType) -> StairsPathDirection
"""
StartExtension = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""The start extension length of the stairs path.
Get: StartExtension(self: StairsPathType) -> float
Set: StartExtension(self: StairsPathType) = value
"""
StartFromRiser = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""True if the stairs path starts from the riser, false if it starts from the tread.
Get: StartFromRiser(self: StairsPathType) -> bool
Set: StartFromRiser(self: StairsPathType) = value
"""
StartSymbolTypeId = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""The start symbol type of stairs path.
Get: StartSymbolTypeId(self: StairsPathType) -> ElementId
Set: StartSymbolTypeId(self: StairsPathType) = value
"""
class StairsRun(Element, IDisposable):
""" Represents a stairs run element in Autodesk Revit. """
@staticmethod
def CreateSketchedRun(document, stairsId, baseElevation, boundaryCurves, riserCurves, stairsPath):
""" CreateSketchedRun(document: Document, stairsId: ElementId, baseElevation: float, boundaryCurves: IList[Curve], riserCurves: IList[Curve], stairsPath: IList[Curve]) -> StairsRun """
pass
@staticmethod
def CreateSketchedRunWithSlopeData(document, stairsId, baseElevation, boundaryCurves, riserCurves, stairsPath):
""" CreateSketchedRunWithSlopeData(document: Document, stairsId: ElementId, baseElevation: float, boundaryCurves: IList[SketchedStairsCurveData], riserCurves: IList[Curve], stairsPath: IList[Curve]) -> StairsRun """
pass
@staticmethod
def CreateSpiralRun(document, stairsId, center, radius, startAngle, includedAngle, clockwise, justification):
"""
CreateSpiralRun(document: Document, stairsId: ElementId, center: XYZ, radius: float, startAngle: float, includedAngle: float, clockwise: bool, justification: StairsRunJustification) -> StairsRun
Creates a spiral run in the project document by providing the center, start
angle and included angle.
document: The document.
stairsId: The stairs that the new stairs run will belong to.
center: The center of the location arc of the spiral run.
The Z coordinate of the
center is the base elevation for the new run (in model coordinates).
It
must be greater than or equal to the stairs base elevation.
radius: The radius of the location arc of the spiral run.
startAngle: The start angle of the location arc of the spiral run.
The angle's
coordinate system is world coordinate system which always is XYZ.BasisX and
XYZ.BasisY.
includedAngle: The total angle covered by the spiral run. Must be a positive value (direction
is determined by the clockwise flag).
clockwise: True if the spiral run will be created along clockwise direction, False
otherwise.
justification: The location path justification of the new stairs run.
Returns: The new stairs run.
"""
pass
@staticmethod
def CreateStraightRun(document, stairsId, locationPath, justification):
"""
CreateStraightRun(document: Document, stairsId: ElementId, locationPath: Line, justification: StairsRunJustification) -> StairsRun
Creates a straight run in the project document.
document: The document.
stairsId: The stairs that the new stairs run will belong to.
locationPath: The line for location path of the new stairs run. The line has following
restriction:
The line should be bound line which is parallel to the XY
plane.The Z coordinate of the line is the base elevation for the new run (in
model coordinates).
It must be greater than or equal to the stairs base
elevation.The number of created risers will be calculated by rounding the
length of the
location path to a multiple of the tread depth.
justification: The location path justification of the new stairs run.
Returns: The new stairs run.
"""
pass
def Dispose(self):
""" Dispose(self: Element, A_0: bool) """
pass
def GetAllSupports(self):
"""
GetAllSupports(self: StairsRun) -> IList[ElementId]
Retrieves all supports hosted by the stair's run.
Returns: All supports hosted by the stair's run.
"""
pass
def getBoundingBox(self, *args): #cannot find CLR method
""" getBoundingBox(self: Element, view: View) -> BoundingBoxXYZ """
pass
def GetConnections(self):
"""
GetConnections(self: StairsRun) -> IList[StairsComponentConnection]
Returns information about the connections in which the stairs run participates.
The stairs run may have no connection, or have at maximum two connections
at the lower and upper ends.
Returns: The connections in which the stairs run participates.
"""
pass
def GetFootprintBoundary(self):
"""
GetFootprintBoundary(self: StairsRun) -> CurveLoop
Returns the run's boundary curves which are projected on the stairs base level.
Returns: The boundary curves of the stairs run.
"""
pass
def GetLeftSupports(self):
"""
GetLeftSupports(self: StairsRun) -> IList[ElementId]
Retrieves all supports on the left side of run boundaries.
Returns: The supports on the left side of run boundaries.
"""
pass
def GetNumberSystemReference(self, referenceOption):
"""
GetNumberSystemReference(self: StairsRun, referenceOption: StairsNumberSystemReferenceOption) -> Reference
Gets the number system reference corresponding to the given reference options.
referenceOption: The reference option.
Returns: The reference.
"""
pass
def GetRightSupports(self):
"""
GetRightSupports(self: StairsRun) -> IList[ElementId]
Retrieves all supports on the right side of run boundaries.
Returns: The supports on the right side of run boundaries.
"""
pass
def GetStairs(self):
"""
GetStairs(self: StairsRun) -> Stairs
Returns the stairs to which the stairs run belongs.
Returns: The stairs to which the stairs run belongs.
"""
pass
def GetStairsPath(self):
"""
GetStairsPath(self: StairsRun) -> CurveLoop
Returns the stairs path curves on the run. The curves are projected on base
level of the stairs.
Returns: The stairs path curves.
"""
pass
def ReleaseUnmanagedResources(self, *args): #cannot find CLR method
""" ReleaseUnmanagedResources(self: Element, disposing: bool) """
pass
def setElementType(self, *args): #cannot find CLR method
""" setElementType(self: Element, type: ElementType, incompatibleExceptionMessage: str) """
pass
@staticmethod
def SetLocationPathForSpiralRun(stairsRun, center, radius, startAngle, includedAngle, clockwise, justification):
"""
SetLocationPathForSpiralRun(stairsRun: StairsRun, center: XYZ, radius: float, startAngle: float, includedAngle: float, clockwise: bool, justification: StairsRunJustification) -> bool
Set Location path for a spiral run.
stairsRun: The run whose location path will be set.
center: The center of the location arc of the spiral run.
The Z coordinate of the
center is the base elevation for the new run (in model coordinates).
It
must be greater than or equal to the stairs base elevation.
radius: The radius of the location arc of the spiral run.
startAngle: The start angle of the location arc of the spiral run.
The angle's
coordinate system is world coordinate system which always is XYZ.BasisX and
XYZ.BasisY.
includedAngle: The total angle covered by the spiral run. Must be a positive value (direction
is determined by the clockwise flag).
clockwise: True if the spiral run will be created along clockwise direction, False
otherwise.
justification: The location path justification of the new stairs run.
Returns: Indicate if set is success or not.
"""
pass
@staticmethod
def SetLocationPathForStraightRun(stairsRun, locationPath):
"""
SetLocationPathForStraightRun(stairsRun: StairsRun, locationPath: Line) -> bool
Set location path for a straight run by giving a line.
stairsRun: The run whose location path will be set.
locationPath: The location path.
Returns: Indicate if set is success or not.
"""
pass
def __enter__(self, *args): #cannot find CLR method
""" __enter__(self: IDisposable) -> object """
pass
def __exit__(self, *args): #cannot find CLR method
""" __exit__(self: IDisposable, exc_type: object, exc_value: object, exc_back: object) """
pass
def __init__(self, *args): #cannot find CLR method
""" x.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signature """
pass
ActualRisersNumber = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""The actual number of stairs risers in the stairs run.
Get: ActualRisersNumber(self: StairsRun) -> int
"""
ActualRunWidth = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Specifies the value of the tread width excluding the width of independent side supports.
Get: ActualRunWidth(self: StairsRun) -> float
Set: ActualRunWidth(self: StairsRun) = value
"""
ActualTreadsNumber = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""The actual number of stairs treads in the stairs run.
Get: ActualTreadsNumber(self: StairsRun) -> int
"""
BaseElevation = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""The base elevation of the stairs run.
Get: BaseElevation(self: StairsRun) -> float
Set: BaseElevation(self: StairsRun) = value
"""
BeginsWithRiser = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""True if the stairs run begins with a riser, false otherwise.
Get: BeginsWithRiser(self: StairsRun) -> bool
Set: BeginsWithRiser(self: StairsRun) = value
"""
EndsWithRiser = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""True if the stairs run ends with a riser, false otherwise.
Get: EndsWithRiser(self: StairsRun) -> bool
Set: EndsWithRiser(self: StairsRun) = value
"""
ExtensionBelowRiserBase = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Specifies a value to extend/trim the run's first step against base elevation of the stairs if the stairs begins with a riser.
Get: ExtensionBelowRiserBase(self: StairsRun) -> float
Set: ExtensionBelowRiserBase(self: StairsRun) = value
"""
ExtensionBelowTreadBase = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Specifies a value to extend/trim the run's first step against base elevation of the stairs if the stairs begins with a tread.
Get: ExtensionBelowTreadBase(self: StairsRun) -> float
Set: ExtensionBelowTreadBase(self: StairsRun) = value
"""
Height = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""The calculated height of the stairs run.
Get: Height(self: StairsRun) -> float
"""
LocationLineJustification = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""The position of the run relative to the Up-direction path used to create the run.
Get: LocationLineJustification(self: StairsRun) -> StairsRunJustification
Set: LocationLineJustification(self: StairsRun) = value
"""
StairsRunStyle = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""The style of the stairs run such as straight, winder, etc.
Get: StairsRunStyle(self: StairsRun) -> StairsRunStyle
"""
TopElevation = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""The top elevation of the stairs run.
Get: TopElevation(self: StairsRun) -> float
Set: TopElevation(self: StairsRun) = value
"""
class StairsRunJustification(Enum, IComparable, IFormattable, IConvertible):
"""
The position of the run relative to the Up-direction path used to create the run.
enum StairsRunJustification, values: Center (1), Left (0), LeftExterior (3), Right (2), RightExterior (4)
"""
def __eq__(self, *args): #cannot find CLR method
""" x.__eq__(y) <==> x==yx.__eq__(y) <==> x==yx.__eq__(y) <==> x==y """
pass
def __format__(self, *args): #cannot find CLR method
""" __format__(formattable: IFormattable, format: str) -> str """
pass
def __ge__(self, *args): #cannot find CLR method
pass
def __gt__(self, *args): #cannot find CLR method
pass
def __init__(self, *args): #cannot find CLR method
""" x.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signature """
pass
def __le__(self, *args): #cannot find CLR method
pass
def __lt__(self, *args): #cannot find CLR method
pass
def __ne__(self, *args): #cannot find CLR method
pass
def __reduce_ex__(self, *args): #cannot find CLR method
pass
def __str__(self, *args): #cannot find CLR method
pass
Center = None
Left = None
LeftExterior = None
Right = None
RightExterior = None
value__ = None
class StairsRunStyle(Enum, IComparable, IFormattable, IConvertible):
"""
The shape of a run. Different shape has different ways of manipulation.
enum StairsRunStyle, values: Sketched (2), Spiral (4), Straight (3), Winder (1)
"""
def __eq__(self, *args): #cannot find CLR method
""" x.__eq__(y) <==> x==yx.__eq__(y) <==> x==yx.__eq__(y) <==> x==y """
pass
def __format__(self, *args): #cannot find CLR method
""" __format__(formattable: IFormattable, format: str) -> str """
pass
def __ge__(self, *args): #cannot find CLR method
pass
def __gt__(self, *args): #cannot find CLR method
pass
def __init__(self, *args): #cannot find CLR method
""" x.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signature """
pass
def __le__(self, *args): #cannot find CLR method
pass
def __lt__(self, *args): #cannot find CLR method
pass
def __ne__(self, *args): #cannot find CLR method
pass
def __reduce_ex__(self, *args): #cannot find CLR method
pass
def __str__(self, *args): #cannot find CLR method
pass
Sketched = None
Spiral = None
Straight = None
value__ = None
Winder = None
class StairsRunType(ElementType, IDisposable):
""" A stairs run type object that is used in the generation of stairs run. """
def Dispose(self):
""" Dispose(self: Element, A_0: bool) """
pass
def getBoundingBox(self, *args): #cannot find CLR method
""" getBoundingBox(self: Element, view: View) -> BoundingBoxXYZ """
pass
def ReleaseUnmanagedResources(self, *args): #cannot find CLR method
""" ReleaseUnmanagedResources(self: Element, disposing: bool) """
pass
def setElementType(self, *args): #cannot find CLR method
""" setElementType(self: Element, type: ElementType, incompatibleExceptionMessage: str) """
pass
def __enter__(self, *args): #cannot find CLR method
""" __enter__(self: IDisposable) -> object """
pass
def __exit__(self, *args): #cannot find CLR method
""" __exit__(self: IDisposable, exc_type: object, exc_value: object, exc_back: object) """
pass
def __init__(self, *args): #cannot find CLR method
""" x.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signature """
pass
HasRisers = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""True if the stairs run will include risers on steps, false otherwise.
Get: HasRisers(self: StairsRunType) -> bool
Set: HasRisers(self: StairsRunType) = value
"""
HasTreads = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""True if the stairs run will include treads on steps, false otherwise.
Get: HasTreads(self: StairsRunType) -> bool
Set: HasTreads(self: StairsRunType) = value
"""
IsMonolithic = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""True if the stairs run is monolithic, false otherwise.
Get: IsMonolithic(self: StairsRunType) -> bool
"""
IsSlanted = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""True if risers will be slanted, false if they will be straight.
Get: IsSlanted(self: StairsRunType) -> bool
Set: IsSlanted(self: StairsRunType) = value
"""
MaterialId = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""The material of the stairs run, only available for monolithic stairs run.
Get: MaterialId(self: StairsRunType) -> ElementId
Set: MaterialId(self: StairsRunType) = value
"""
NosingLength = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""The amount of the tread depth that overhangs the next tread.
Get: NosingLength(self: StairsRunType) -> float
Set: NosingLength(self: StairsRunType) = value
"""
NosingProfile = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""The id of the nosing profile of the treads.
Get: NosingProfile(self: StairsRunType) -> ElementId
Set: NosingProfile(self: StairsRunType) = value
"""
RiserProfile = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""The id of the profile of the risers.
Get: RiserProfile(self: StairsRunType) -> ElementId
Set: RiserProfile(self: StairsRunType) = value
"""
RiserThickness = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""The thickness of the risers.
Get: RiserThickness(self: StairsRunType) -> float
Set: RiserThickness(self: StairsRunType) = value
"""
RiserToTreadConnect = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""The connection of the riser to tread in relation to each other.
Get: RiserToTreadConnect(self: StairsRunType) -> RiserToTreadConnectionOption
Set: RiserToTreadConnect(self: StairsRunType) = value
"""
StructuralDepth = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""The structural depth of the stairs run, only available for monolithic stairs run.
Get: StructuralDepth(self: StairsRunType) -> float
Set: StructuralDepth(self: StairsRunType) = value
"""
TotalDepth = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""The total depth of the stairs run, only available for monolithic stairs run.
Get: TotalDepth(self: StairsRunType) -> float
"""
TreadNosingPosition = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Represents on which edges of the nosing to apply the nosing profile.
Get: TreadNosingPosition(self: StairsRunType) -> TreadNosingPosition
Set: TreadNosingPosition(self: StairsRunType) = value
"""
TreadProfile = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""The id of the profile of the treads.
Get: TreadProfile(self: StairsRunType) -> ElementId
Set: TreadProfile(self: StairsRunType) = value
"""
TreadThickness = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""The thickness of the treads.
Get: TreadThickness(self: StairsRunType) -> float
Set: TreadThickness(self: StairsRunType) = value
"""
UndersideSurfaceStyle = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""The underside surface style of the stairs run, only available for monolithic stairs run.
Get: UndersideSurfaceStyle(self: StairsRunType) -> StairsUndersideSurfaceStyle
Set: UndersideSurfaceStyle(self: StairsRunType) = value
"""
class StairsSupportTopsideSurfaceType(Enum, IComparable, IFormattable, IConvertible):
"""
The style of the topside surface of the support.
enum StairsSupportTopsideSurfaceType, values: Closed (0), Open (1)
"""
def __eq__(self, *args): #cannot find CLR method
""" x.__eq__(y) <==> x==yx.__eq__(y) <==> x==yx.__eq__(y) <==> x==y """
pass
def __format__(self, *args): #cannot find CLR method
""" __format__(formattable: IFormattable, format: str) -> str """
pass
def __ge__(self, *args): #cannot find CLR method
pass
def __gt__(self, *args): #cannot find CLR method
pass
def __init__(self, *args): #cannot find CLR method
""" x.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signature """
pass
def __le__(self, *args): #cannot find CLR method
pass
def __lt__(self, *args): #cannot find CLR method
pass
def __ne__(self, *args): #cannot find CLR method
pass
def __reduce_ex__(self, *args): #cannot find CLR method
pass
def __str__(self, *args): #cannot find CLR method
pass
Closed = None
Open = None
value__ = None
class StairsTextOrientation(Enum, IComparable, IFormattable, IConvertible):
"""
The options to be used when orienting text annotations relative to stairs.
enum StairsTextOrientation, values: Horizontal (0), Vertical (1)
"""
def __eq__(self, *args): #cannot find CLR method
""" x.__eq__(y) <==> x==yx.__eq__(y) <==> x==yx.__eq__(y) <==> x==y """
pass
def __format__(self, *args): #cannot find CLR method
""" __format__(formattable: IFormattable, format: str) -> str """
pass
def __ge__(self, *args): #cannot find CLR method
pass
def __gt__(self, *args): #cannot find CLR method
pass
def __init__(self, *args): #cannot find CLR method
""" x.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signature """
pass
def __le__(self, *args): #cannot find CLR method
pass
def __lt__(self, *args): #cannot find CLR method
pass
def __ne__(self, *args): #cannot find CLR method
pass
def __reduce_ex__(self, *args): #cannot find CLR method
pass
def __str__(self, *args): #cannot find CLR method
pass
Horizontal = None
value__ = None
Vertical = None
class StairsType(ElementType, IDisposable):
""" A type element containing the properties for a component-based stair. """
def Dispose(self):
""" Dispose(self: Element, A_0: bool) """
pass
def getBoundingBox(self, *args): #cannot find CLR method
""" getBoundingBox(self: Element, view: View) -> BoundingBoxXYZ """
pass
def ReleaseUnmanagedResources(self, *args): #cannot find CLR method
""" ReleaseUnmanagedResources(self: Element, disposing: bool) """
pass
def setElementType(self, *args): #cannot find CLR method
""" setElementType(self: Element, type: ElementType, incompatibleExceptionMessage: str) """
pass
def __enter__(self, *args): #cannot find CLR method
""" __enter__(self: IDisposable) -> object """
pass
def __exit__(self, *args): #cannot find CLR method
""" __exit__(self: IDisposable, exc_type: object, exc_value: object, exc_back: object) """
pass
def __init__(self, *args): #cannot find CLR method
""" x.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signature """
pass
ConstructionMethod = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""The construction method of the stairs.
Get: ConstructionMethod(self: StairsType) -> StairsConstructionMethod
"""
EndConnectionType = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""The join style between a run and landing.
Get: EndConnectionType(self: StairsType) -> StairsEndConnectionType
Set: EndConnectionType(self: StairsType) = value
"""
HasMiddleSupports = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""True if the stairs type has middle supports, False otherwise.
Get: HasMiddleSupports(self: StairsType) -> bool
Set: HasMiddleSupports(self: StairsType) = value
"""
LandingType = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""The type for all landings in the stair element.
Get: LandingType(self: StairsType) -> ElementId
Set: LandingType(self: StairsType) = value
"""
LeftLateralOffset = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""The offset for the left support from the edge of the run in a horizontal direction.
Get: LeftLateralOffset(self: StairsType) -> float
Set: LeftLateralOffset(self: StairsType) = value
"""
LeftSideSupportType = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""The type of left support used in the stair.
Get: LeftSideSupportType(self: StairsType) -> ElementId
Set: LeftSideSupportType(self: StairsType) = value
"""
MaxRiserHeight = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""The maximum height of each riser on the stair element.
Get: MaxRiserHeight(self: StairsType) -> float
Set: MaxRiserHeight(self: StairsType) = value
"""
MiddleSupportsNumber = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""The number of middle supports used in the stair.
Get: MiddleSupportsNumber(self: StairsType) -> int
Set: MiddleSupportsNumber(self: StairsType) = value
"""
MiddleSupportType = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""The type of middle supports used in the stair.
Get: MiddleSupportType(self: StairsType) -> ElementId
Set: MiddleSupportType(self: StairsType) = value
"""
MinRunWidth = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""The initial value for the width of a common run.
Get: MinRunWidth(self: StairsType) -> float
Set: MinRunWidth(self: StairsType) = value
"""
MinTreadDepth = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""The minimum tread width along the center path for all common runs (winder, arc, and straight).
Get: MinTreadDepth(self: StairsType) -> float
Set: MinTreadDepth(self: StairsType) = value
"""
NotchExtension = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""The horizontal length of the notch profile.
Get: NotchExtension(self: StairsType) -> float
Set: NotchExtension(self: StairsType) = value
"""
NotchHorizontalGap = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""The width of the horizontal gap in the stairs notch.
Get: NotchHorizontalGap(self: StairsType) -> float
Set: NotchHorizontalGap(self: StairsType) = value
"""
NotchThickness = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""The vertical length of the notch profile from the top.
Get: NotchThickness(self: StairsType) -> float
Set: NotchThickness(self: StairsType) = value
"""
NotchVerticalGap = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""The width of the vertical gap in the stairs notch.
Get: NotchVerticalGap(self: StairsType) -> float
Set: NotchVerticalGap(self: StairsType) = value
"""
RightLateralOffset = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""The offset for the right support from the edge of the run in a horizontal direction.
Get: RightLateralOffset(self: StairsType) -> float
Set: RightLateralOffset(self: StairsType) = value
"""
RightSideSupportType = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""The type of right support used in the stair.
Get: RightSideSupportType(self: StairsType) -> ElementId
Set: RightSideSupportType(self: StairsType) = value
"""
RunType = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""The type for all runs in the stair element.
Get: RunType(self: StairsType) -> ElementId
Set: RunType(self: StairsType) = value
"""
class StairsUndersideSurfaceStyle(Enum, IComparable, IFormattable, IConvertible):
"""
The style of the underside surface of the run.
enum StairsUndersideSurfaceStyle, values: Smooth (1), Stepped (0)
"""
def __eq__(self, *args): #cannot find CLR method
""" x.__eq__(y) <==> x==yx.__eq__(y) <==> x==yx.__eq__(y) <==> x==y """
pass
def __format__(self, *args): #cannot find CLR method
""" __format__(formattable: IFormattable, format: str) -> str """
pass
def __ge__(self, *args): #cannot find CLR method
pass
def __gt__(self, *args): #cannot find CLR method
pass
def __init__(self, *args): #cannot find CLR method
""" x.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signature """
pass
def __le__(self, *args): #cannot find CLR method
pass
def __lt__(self, *args): #cannot find CLR method
pass
def __ne__(self, *args): #cannot find CLR method
pass
def __reduce_ex__(self, *args): #cannot find CLR method
pass
def __str__(self, *args): #cannot find CLR method
pass
Smooth = None
Stepped = None
value__ = None
class StairsWinderStyle(Enum, IComparable, IFormattable, IConvertible):
"""
The calculation method for the layout of the winder run steps.
enum StairsWinderStyle, values: Balanced (0), SinglePoint (2)
"""
def __eq__(self, *args): #cannot find CLR method
""" x.__eq__(y) <==> x==yx.__eq__(y) <==> x==yx.__eq__(y) <==> x==y """
pass
def __format__(self, *args): #cannot find CLR method
""" __format__(formattable: IFormattable, format: str) -> str """
pass
def __ge__(self, *args): #cannot find CLR method
pass
def __gt__(self, *args): #cannot find CLR method
pass
def __init__(self, *args): #cannot find CLR method
""" x.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signature """
pass
def __le__(self, *args): #cannot find CLR method
pass
def __lt__(self, *args): #cannot find CLR method
pass
def __ne__(self, *args): #cannot find CLR method
pass
def __reduce_ex__(self, *args): #cannot find CLR method
pass
def __str__(self, *args): #cannot find CLR method
pass
Balanced = None
SinglePoint = None
value__ = None
class TopographyEditScope(EditScope, IDisposable):
"""
A TopographyEditScope allows an application to create and maintain an editing session for a TopographySurface.
TopographyEditScope(document: Document, transactionName: str)
"""
def Dispose(self):
""" Dispose(self: EditScope, A_0: bool) """
pass
def ReleaseUnmanagedResources(self, *args): #cannot find CLR method
""" ReleaseUnmanagedResources(self: EditScope, disposing: bool) """
pass
def Start(self, topoSurfaceId):
"""
Start(self: TopographyEditScope, topoSurfaceId: ElementId) -> ElementId
Starts a topography surface edit mode for an existing TopographySurface element.
topoSurfaceId: The TopographySurface element to be edited.
Returns: The Id of the topography Surface being eddited.
"""
pass
def __enter__(self, *args): #cannot find CLR method
""" __enter__(self: IDisposable) -> object """
pass
def __exit__(self, *args): #cannot find CLR method
""" __exit__(self: IDisposable, exc_type: object, exc_value: object, exc_back: object) """
pass
def __init__(self, *args): #cannot find CLR method
""" x.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signature """
pass
@staticmethod # known case of __new__
def __new__(self, document, transactionName):
""" __new__(cls: type, document: Document, transactionName: str) """
pass
IsPermitted = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Tests if the TopographyEditScope is permitted to start.
Get: IsPermitted(self: TopographyEditScope) -> bool
"""
class TopographySurface(Element, IDisposable):
""" Represents a TopographySurface element. """
def AddPoints(self, points):
""" AddPoints(self: TopographySurface, points: IList[XYZ]) """
pass
@staticmethod
def ArePointsDistinct(points):
""" ArePointsDistinct(points: IList[XYZ]) -> bool """
pass
def AsSiteSubRegion(self):
"""
AsSiteSubRegion(self: TopographySurface) -> SiteSubRegion
Obtains the subregion object represented by this element.
Returns: The SiteSubRegion element. If this does not represent a SiteSubRegion, this
will be ll.
"""
pass
def ChangePointElevation(self, point, elevationValue):
"""
ChangePointElevation(self: TopographySurface, point: XYZ, elevationValue: float)
Changes the elevation value for a point.
point: The point to be modified.
elevationValue: The new elevation value.
"""
pass
def ChangePointsElevation(self, points, elevationValue):
""" ChangePointsElevation(self: TopographySurface, points: IList[XYZ], elevationValue: float) """
pass
def ContainsPoint(self, point):
"""
ContainsPoint(self: TopographySurface, point: XYZ) -> bool
Identifies whether the given point exists in the topography surface.
point: The point to be checked.
Returns: True if the input point exists in the topography surface, otherwise false.
"""
pass
@staticmethod
def Create(document, points):
""" Create(document: Document, points: IList[XYZ]) -> TopographySurface """
pass
def DeletePoints(self, points):
""" DeletePoints(self: TopographySurface, points: IList[XYZ]) """
pass
def Dispose(self):
""" Dispose(self: Element, A_0: bool) """
pass
def FindPoints(self, boundingBox):
"""
FindPoints(self: TopographySurface, boundingBox: Outline) -> IList[XYZ]
Filters and returns only the points of the topography surface which lie within
the input bounding box.
boundingBox: The 3D bounding box.
Returns: The result points within the 3D bounding box
"""
pass
def GetBoundaryPoints(self):
"""
GetBoundaryPoints(self: TopographySurface) -> IList[XYZ]
Gets the points which are on the boundary of the topography surface.
Returns: The collection of boundary points.
"""
pass
def getBoundingBox(self, *args): #cannot find CLR method
""" getBoundingBox(self: Element, view: View) -> BoundingBoxXYZ """
pass
def GetHostedSubRegionIds(self):
"""
GetHostedSubRegionIds(self: TopographySurface) -> IList[ElementId]
Gets the ids of all subregion elements hosted on this topography surface.
Returns: The hosted subregion ids.
"""
pass
def GetInteriorPoints(self):
"""
GetInteriorPoints(self: TopographySurface) -> IList[XYZ]
Gets all of the points that are not boundary points for the topography surface.
Returns: The collection of interior points.
"""
pass
def GetPoints(self):
"""
GetPoints(self: TopographySurface) -> IList[XYZ]
Gets the points that define this topography surface.
Returns: The collection of points.
"""
pass
def IsBoundaryPoint(self, point):
"""
IsBoundaryPoint(self: TopographySurface, point: XYZ) -> bool
Identifies whether the given point is an existing boundary point of the current
topography surface.
point: The point to be checked.
Returns: Returns true if a given point is an existing boundary point.
For
TopographySurface and SiteSubRegion elements, it returns false if the given
point is an existing interior point of current topography surface.
For the
topography surface associated with a BuildingPad element, it always returns
true if the point is a part of the element (all points are boundary
points
for the topography surface associated with a BuildingPad element).
"""
pass
@staticmethod
def IsValidRegion(points):
""" IsValidRegion(points: IList[XYZ]) -> bool """
pass
def MovePoint(self, movedPoint, targetPoint):
"""
MovePoint(self: TopographySurface, movedPoint: XYZ, targetPoint: XYZ)
Moves a point in a TopographySurface to a new designated location.
movedPoint: The point to be moved.
targetPoint: The new designated location of this point will move to.
"""
pass
def MovePoints(self, movedPoints, moveVector):
""" MovePoints(self: TopographySurface, movedPoints: IList[XYZ], moveVector: XYZ) """
pass
def ReleaseUnmanagedResources(self, *args): #cannot find CLR method
""" ReleaseUnmanagedResources(self: Element, disposing: bool) """
pass
def setElementType(self, *args): #cannot find CLR method
""" setElementType(self: Element, type: ElementType, incompatibleExceptionMessage: str) """
pass
def __enter__(self, *args): #cannot find CLR method
""" __enter__(self: IDisposable) -> object """
pass
def __exit__(self, *args): #cannot find CLR method
""" __exit__(self: IDisposable, exc_type: object, exc_value: object, exc_back: object) """
pass
def __init__(self, *args): #cannot find CLR method
""" x.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signature """
pass
AssociatedBuildingPadId = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""The element id of the building pad which causes this topography surface to be formed.
Get: AssociatedBuildingPadId(self: TopographySurface) -> ElementId
"""
IsAssociatedWithBuildingPad = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Identifies if this element represents a topography surface associated with a building pad.
Get: IsAssociatedWithBuildingPad(self: TopographySurface) -> bool
"""
IsSiteSubRegion = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Identifies if this element represents a subregion.
Get: IsSiteSubRegion(self: TopographySurface) -> bool
"""
MaterialId = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""The id of the material applied to this element.
Get: MaterialId(self: TopographySurface) -> ElementId
Set: MaterialId(self: TopographySurface) = value
"""
class TopRail(ContinuousRail, IDisposable):
""" Represents a top rail element in Autodesk Revit. """
def Dispose(self):
""" Dispose(self: Element, A_0: bool) """
pass
def getBoundingBox(self, *args): #cannot find CLR method
""" getBoundingBox(self: Element, view: View) -> BoundingBoxXYZ """
pass
def ReleaseUnmanagedResources(self, *args): #cannot find CLR method
""" ReleaseUnmanagedResources(self: Element, disposing: bool) """
pass
def setElementType(self, *args): #cannot find CLR method
""" setElementType(self: Element, type: ElementType, incompatibleExceptionMessage: str) """
pass
def __enter__(self, *args): #cannot find CLR method
""" __enter__(self: IDisposable) -> object """
pass
def __exit__(self, *args): #cannot find CLR method
""" __exit__(self: IDisposable, exc_type: object, exc_value: object, exc_back: object) """
pass
def __init__(self, *args): #cannot find CLR method
""" x.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signature """
pass
class TopRailType(ContinuousRailType, IDisposable):
""" A rail type object that is used in the generation of top rail. """
def Dispose(self):
""" Dispose(self: Element, A_0: bool) """
pass
def getBoundingBox(self, *args): #cannot find CLR method
""" getBoundingBox(self: Element, view: View) -> BoundingBoxXYZ """
pass
def ReleaseUnmanagedResources(self, *args): #cannot find CLR method
""" ReleaseUnmanagedResources(self: Element, disposing: bool) """
pass
def setElementType(self, *args): #cannot find CLR method
""" setElementType(self: Element, type: ElementType, incompatibleExceptionMessage: str) """
pass
def __enter__(self, *args): #cannot find CLR method
""" __enter__(self: IDisposable) -> object """
pass
def __exit__(self, *args): #cannot find CLR method
""" __exit__(self: IDisposable, exc_type: object, exc_value: object, exc_back: object) """
pass
def __init__(self, *args): #cannot find CLR method
""" x.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signature """
pass
class TreadNosingPosition(Enum, IComparable, IFormattable, IConvertible):
"""
Represents on which edges of the nosing to apply the nosing profile.
enum TreadNosingPosition, values: FrontAndLeft (1), FrontAndRight (2), FrontLeftAndRight (3), FrontOnly (0)
"""
def __eq__(self, *args): #cannot find CLR method
""" x.__eq__(y) <==> x==yx.__eq__(y) <==> x==yx.__eq__(y) <==> x==y """
pass
def __format__(self, *args): #cannot find CLR method
""" __format__(formattable: IFormattable, format: str) -> str """
pass
def __ge__(self, *args): #cannot find CLR method
pass
def __gt__(self, *args): #cannot find CLR method
pass
def __init__(self, *args): #cannot find CLR method
""" x.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signature """
pass
def __le__(self, *args): #cannot find CLR method
pass
def __lt__(self, *args): #cannot find CLR method
pass
def __ne__(self, *args): #cannot find CLR method
pass
def __reduce_ex__(self, *args): #cannot find CLR method
pass
def __str__(self, *args): #cannot find CLR method
pass
FrontAndLeft = None
FrontAndRight = None
FrontLeftAndRight = None
FrontOnly = None
value__ = None
class WinderPathResult(Enum, IComparable, IFormattable, IConvertible):
"""
Flag indicates whether curves are valid to use as base lines for winder path.
enum WinderPathResult, values: ColinearOrOverlap (6), InvalidCurveType (8), Noncontinuous (3), NotOpenLoop (4), NotSupported (9), NumberOutOfRange (1), SelfIntersect (7), Success (0), TooShort (5), Unbound (2)
"""
def __eq__(self, *args): #cannot find CLR method
""" x.__eq__(y) <==> x==yx.__eq__(y) <==> x==yx.__eq__(y) <==> x==y """
pass
def __format__(self, *args): #cannot find CLR method
""" __format__(formattable: IFormattable, format: str) -> str """
pass
def __ge__(self, *args): #cannot find CLR method
pass
def __gt__(self, *args): #cannot find CLR method
pass
def __init__(self, *args): #cannot find CLR method
""" x.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signature """
pass
def __le__(self, *args): #cannot find CLR method
pass
def __lt__(self, *args): #cannot find CLR method
pass
def __ne__(self, *args): #cannot find CLR method
pass
def __reduce_ex__(self, *args): #cannot find CLR method
pass
def __str__(self, *args): #cannot find CLR method
pass
ColinearOrOverlap = None
InvalidCurveType = None
Noncontinuous = None
NotOpenLoop = None
NotSupported = None
NumberOutOfRange = None
SelfIntersect = None
Success = None
TooShort = None
Unbound = None
value__ = None
| 35.451685
| 224
| 0.629437
|
0156623efa81f30157d0d8fe6140dfe74f495af1
| 5,567
|
py
|
Python
|
0522/tsp/M2PC.py
|
Kurogi-Lab/CAN2
|
abd029895f2ff9d1c8debdb3825b0d4b9314d136
|
[
"CECILL-B"
] | null | null | null |
0522/tsp/M2PC.py
|
Kurogi-Lab/CAN2
|
abd029895f2ff9d1c8debdb3825b0d4b9314d136
|
[
"CECILL-B"
] | null | null | null |
0522/tsp/M2PC.py
|
Kurogi-Lab/CAN2
|
abd029895f2ff9d1c8debdb3825b0d4b9314d136
|
[
"CECILL-B"
] | 1
|
2020-12-01T00:54:18.000Z
|
2020-12-01T00:54:18.000Z
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import pandas as pd #for pandas see http://keisanbutsuriya.hateblo.jp/entry/201\
import argparse
import numpy as np
import math
import subprocess
import glob
import os
#from matplotlib import pylab as plt
import matplotlib.pyplot as plt
from numpy.lib.stride_tricks import as_strided
import mylib
import time
import datetime
import sys
S=['fhs', 'fms', 'mkk', 'mko','mmt','mnh','mym']
L=[1,2,3,4,5,6,7,8,9,10]
D=['zero','ichi','ni','san','si','go','roku','nana','hachi','kyu']
R=[1,0.8,0.6,0.4,0.2]
if __name__ == "__main__":
#oob4speakerdigit+sX_2018 sp:fhs:fms tx:zero:ichi ntxi:9 k:36 mbas:$mbas dir:$dir1 dir2:$dir2 s:-1 N:${N} sX:${sX}
#oob4speakerdigit+ sp:fhs:fms:mkk:mko:mmt:mnh:mym tx:zero:ichi:ni:san:yon:go:roku:nana:hachi:kyu ntxi:10 k:36 mbas:2:300:1.6:1 dir:$dira s:-1 N:40
parser = argparse.ArgumentParser(description='speech normalize')
parser.add_argument('-S', default='fhs:fms:mkk:mko:mmt:mnh:mym', type=str, help='speaker')
parser.add_argument('-D', default='zero:ichi:ni:san:si:go:roku:nana:hachi:kyu', type=str, help='text')
parser.add_argument('-L', default='1:2:3:4:5:6:7:8:9:10', type=str, help='datetime index')
parser.add_argument('-R', default='1:0.8:0.6:0.4:0.2:0.1', type=str, help='lambda')
parser.add_argument('-dl', default='../../12voicedata_ueki_all', type=str, help='folder involving original')
parser.add_argument('-dn', default='../../12voicedata_ueki_all_n', type=str, help='folder involving normalized')
parser.add_argument('-dm', default='../../12voicedata_ueki_all_m', type=str, help='folder involving mixed')
# parser.add_argument('-dq', default='../../12voicedata_ueki_all_q', type=str, help='folder involving q')
parser.add_argument('-dp', default='../../12voicedata_ueki_all_p', type=str, help='folder involving p')
# parser.add_argument('-ds', default='../../12voicedata_ueki_all_k8', type=str, help='folder involving source')
parser.add_argument('-dr', default='../../12voicedata_ueki_all_k8', type=str, help='folder involving r')
parser.add_argument('-k', default=8, type=int, help='embedding dimension')
parser.add_argument('-N', default=24, type=int, help='Number of units')
parser.add_argument('-sql', default='0', type=str, help='1 for search q lack')
# parser.add_argument('-lm', default='tspredv3', type=str, choices=('tspredv3', 'tspredv3er'), help='learning machine ')
parser.add_argument('-DISP', default='10', type=str, help='DISP[0]==1 to make files, DISP[1]==1 to display')
parser.add_argument('-ow', default=1, type=int, help='Overwite files if ow=1, omit otherwise.')
parser.add_argument('-dbgi', default=-1, type=int, help='debug at i-th iteration')
parser.add_argument('-fnerr', default='mmt-san2:fhs-hachi5', type=str, help='files lacking data')
args = parser.parse_args()
Sa=(args.S).split(':') #speakers
Da=(args.D).split(':') #text
La=(args.L).split(':') #index
Ra=(args.R).split(':') #lambda
dl=args.dl
dn=args.dn
dm=args.dm
# dq=args.dq
# dp=args.dp
# ds=args.ds
dr=args.dr
N=args.N
DISP=args.DISP
fnerr=(args.fnerr).split(':') #fnerr=['mmt-san2', 'fhs-hachi5']
nS=len(S)
nL=len(L)
nD=len(D)
np.random.seed(seed=32)
for d in [dr]:# for d in [dl, dn, dm, dq, dp, df]:
if not os.path.exists(d): os.mkdir(d)
start_time=time.time()
print('#start time:{}'.format(datetime.datetime.now()))
it=0
for r in Ra:
fnq='{}/*R{}*-M.dat'.format(dr,r)
Fq=[(f.split('/')[-1])[:-6] for f in glob.glob(fnq)] #file list
for s in Sa:
for d in Da:
for l in La:
fn=s +'-'+ d + l
if not fn in fnerr:
fnB0='{}-{}{}-R{}'.format(s,d,l,r)
fnB=''
for fnq in Fq:
if fnB0 in fnq:
fnB=fnq
# if fnB0=='fhs-roku7-R0.8-fms-si6-zero2-R0.8':
# import pdb;pdb.set_trace(); #for debug
if fnB == '':
print('#{}*-M.dat noexist. Make *-M.dat first!'.format(fnB0))
continue
fnout='{}/{}-PC.dat'.format(dr,fnB)#
if os.path.exists(fnout) and args.ow == 0:
print('#{} exists: Omit calc becaus -ow 0'.format(fnout))
continue
# cmd0='fnB:"{}/{}";load("M2xyC.mac");'.format(dr,fnB);
# import pdb;pdb.set_trace(); #for debug
cmd='echo {}fnB:"{}/{}";batchload("M2PC.mac");{}|maxima'.format("'",dr,fnB,"'")
# cmd='echo {}fnB:"{}/{}";batchload("M2pc.mac");{}|maxima'.format("'",dr,fnB,"'")
# cmd='echo {}fnB:"{}/{}";batchload("M2xyC.mac");{}|maxima >tmp/tmp.log'.format("'",dr,fnB,"'")
# cmd='echo {}fnB:"{}/{}";batch("M2xyC.mac");{}|maxima -b >/dev/null'.format("'",dr,fnB,"'")
#cmd='maxima < M2xyC.mac' #read tmp/M.dat produce tmp/xyC.dat
print('#{}'.format(cmd))
# import pdb;pdb.set_trace(); #for debug 20191105
mylib.myshell(cmd);
print('#{}-PC.dat created'.format(fnB))
# import pdb;pdb.set_trace(); #for debug 20191105
sys.stdout.flush()
it += 1
if args.dbgi>0 and it==args.dbgi:
print('### Now in debug mode');
import pdb;pdb.set_trace(); #for debug 20191105
# import pdb;pdb.set_trace(); #for debug
# import pdb;pdb.set_trace(); #for debug
elapsed_time=time.time()-start_time
etime='ElapsedTime {:.3f}s({})'.format(elapsed_time,str(datetime.timedelta(seconds=elapsed_time))[:-3])
print('#{}'.format(etime))
| 47.991379
| 146
| 0.608047
|
5414b667b92a535322a23f4c7c522b07fa0d2b15
| 9,424
|
py
|
Python
|
app/portal/horizon/releasenotes/source/conf.py
|
haoshen61/f5-adcaas-openstack
|
4bda29271930bf7c621f4184bda8d43b2fa96336
|
[
"Apache-2.0"
] | 4
|
2019-06-21T06:42:07.000Z
|
2020-12-04T11:59:25.000Z
|
app/portal/horizon/releasenotes/source/conf.py
|
haoshen61/f5-adcaas-openstack
|
4bda29271930bf7c621f4184bda8d43b2fa96336
|
[
"Apache-2.0"
] | 106
|
2019-01-18T03:06:55.000Z
|
2019-11-29T05:06:18.000Z
|
app/portal/horizon/releasenotes/source/conf.py
|
haoshen61/f5-adcaas-openstack
|
4bda29271930bf7c621f4184bda8d43b2fa96336
|
[
"Apache-2.0"
] | 23
|
2019-01-10T01:49:08.000Z
|
2020-05-26T01:10:38.000Z
|
# -*- coding: utf-8 -*-
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Horizon Release Notes documentation build configuration file, created by
# sphinx-quickstart on Tue Nov 3 17:40:50 2015.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
# sys.path.insert(0, os.path.abspath('.'))
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
# needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
'reno.sphinxext',
'sphinx.ext.extlinks',
'openstackdocstheme',
]
# openstackdocstheme options
repository_name = 'openstack/horizon'
bug_project = 'horizon'
bug_tag = 'documentation'
html_last_updated_fmt = '%Y-%m-%d %H:%M'
# Set aliases for extlinks
# - generic launchpad bug - [:bug:`12345`]
# - horizon blueprint - [:blueprint:`drop-nova-network`]
extlinks = {
'bug': (
'https://bugs.launchpad.net/bugs/%s',
'bug ',
),
'blueprint': (
'https://blueprints.launchpad.net/horizon/+spec/%s',
'blueprint ',
),
}
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
# source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'Horizon Release Notes'
copyright = u'2015, Horizon Developers'
# Release notes are version independent
# The full version, including alpha/beta/rc tags.
release = ''
# The short X.Y version.
version = ''
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
# language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
# today = ''
# Else, today_fmt is used as the format for a strftime call.
# today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = []
# The reST default role (used for this markup: `text`) to use for all
# documents.
# default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
# add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
# add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
# show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
# modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built documents.
# keep_warnings = False
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'openstackdocs'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
# html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
# html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
# html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
# html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
# html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
# html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# Add any extra paths that contain custom files (such as robots.txt or
# .htaccess) here, relative to this directory. These files are copied
# directly to the root of the documentation.
# html_extra_path = []
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
# html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
# html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
# html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
# html_additional_pages = {}
# If false, no module index is generated.
# html_domain_indices = True
# If false, no index is generated.
# html_use_index = True
# If true, the index is split into individual pages for each letter.
# html_split_index = False
# If true, links to the reST sources are added to the pages.
# html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
# html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
# html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
# html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
# html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'HorizonReleaseNotesdoc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
# 'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
# 'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
# 'preamble': '',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
('index', 'HorizonReleaseNotes.tex',
u'Horizon Release Notes Documentation',
u'Horizon Developers', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
# latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
# latex_use_parts = False
# If true, show page references after internal links.
# latex_show_pagerefs = False
# If true, show URL addresses after external links.
# latex_show_urls = False
# Documents to append as an appendix to all manuals.
# latex_appendices = []
# If false, no module index is generated.
# latex_domain_indices = True
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'horizonreleasenotes', u'Horizon Release Notes Documentation',
[u'Horizon Developers'], 1)
]
# If true, show URL addresses after external links.
# man_show_urls = False
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
('index', 'HorizonReleaseNotes', u'Horizon Release Notes Documentation',
u'Horizon Developers', 'HorizonReleaseNotes',
'Dashboard for OpenStack.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
# texinfo_appendices = []
# If false, no module index is generated.
# texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
# texinfo_show_urls = 'footnote'
# If true, do not generate a @detailmenu in the "Top" node's menu.
# texinfo_no_detailmenu = False
# -- Options for Internationalization output ------------------------------
locale_dirs = ['locale/']
| 32.054422
| 79
| 0.708404
|
d5fff0e3574dcfcd4f7f0c0fdec984acf8c89083
| 2,971
|
py
|
Python
|
pytest_cpp/plugin.py
|
maximilianriemensberger/pytest-cpp
|
404de526aa6f09838746f18702866d9e020a6fe9
|
[
"MIT"
] | null | null | null |
pytest_cpp/plugin.py
|
maximilianriemensberger/pytest-cpp
|
404de526aa6f09838746f18702866d9e020a6fe9
|
[
"MIT"
] | null | null | null |
pytest_cpp/plugin.py
|
maximilianriemensberger/pytest-cpp
|
404de526aa6f09838746f18702866d9e020a6fe9
|
[
"MIT"
] | null | null | null |
import os
import stat
import pytest
from pytest_cpp.boost import BoostTestFacade
from pytest_cpp.error import CppFailureRepr, CppFailureError
from pytest_cpp.google import GoogleTestFacade
FACADES = [GoogleTestFacade, BoostTestFacade]
DEFAULT_MASKS = ('test_*', '*_test')
_ARGUMENTS = 'cpp_arguments'
_EMULATOR = 'cpp_emulator'
def pytest_collect_file(parent, path):
try:
is_executable = os.stat(str(path)).st_mode & stat.S_IXUSR
except OSError:
# in some situations the file might not be available anymore at this point
is_executable = False
if not is_executable:
return
config = parent.config
masks = config.getini('cpp_files') or DEFAULT_MASKS
test_args = config.getini(_ARGUMENTS) or ()
test_emuargs = config.getini(_EMULATOR) or ()
if not parent.session.isinitpath(path):
for pat in masks:
if path.fnmatch(pat):
break
else:
return
for facade_class in FACADES:
if facade_class.is_test_suite(str(path)):
return CppFile(path, parent, facade_class(), test_args,
test_emuargs)
def pytest_addoption(parser):
parser.addini("cpp_files", type="args",
default=DEFAULT_MASKS,
help="glob-style file patterns for C++ test module discovery")
parser.addini(_ARGUMENTS,
type='args',
default='',
help='Additional arguments for test executables')
parser.addini(_EMULATOR,
type='args',
default='',
help='Emulator and arguments for executing the tests (e.g. qemu-user-static)')
class CppFile(pytest.File):
def __init__(self, path, parent, facade, arguments, emuargs):
pytest.File.__init__(self, path, parent)
self.facade = facade
self._arguments = arguments
self._emuargs = emuargs
def collect(self):
for test_id in self.facade.list_tests(str(self.fspath), self._emuargs):
yield CppItem(test_id, self, self.facade, self._arguments,
self._emuargs)
class CppItem(pytest.Item):
def __init__(self, name, collector, facade, arguments, emuargs):
pytest.Item.__init__(self, name, collector)
self.facade = facade
self._arguments = arguments
self._emuargs = emuargs
def runtest(self):
failures = self.facade.run_test(str(self.fspath),
self.name,
self._arguments,
self._emuargs)
if failures:
raise CppFailureError(failures)
def repr_failure(self, excinfo):
if isinstance(excinfo.value, CppFailureError):
return CppFailureRepr(excinfo.value.failures)
return pytest.Item.repr_failure(self, excinfo)
def reportinfo(self):
return self.fspath, 0, self.name
| 31.946237
| 96
| 0.62033
|
2f7fa15b446d569576b9cb9ed1395c5e3d2faea2
| 1,382
|
py
|
Python
|
runoob/advanced_tutorial/employee.py
|
zeroonegit/python
|
919f8bb14ae91e37e42ff08192df24b60135596f
|
[
"MIT"
] | 1
|
2017-03-30T00:43:40.000Z
|
2017-03-30T00:43:40.000Z
|
runoob/advanced_tutorial/employee.py
|
QuinceySun/Python
|
919f8bb14ae91e37e42ff08192df24b60135596f
|
[
"MIT"
] | null | null | null |
runoob/advanced_tutorial/employee.py
|
QuinceySun/Python
|
919f8bb14ae91e37e42ff08192df24b60135596f
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
############################
# File Name: employee.py
# Author: One Zero
# Mail: zeroonegit@gmail.com
# Created Time: 2015-12-28 20:18:40
############################
class Employee:
"""所有员工的基类"""
empCount = 0
def __init__(self, name, salary):
self.name = name
self.salary = salary
Employee.empCount += 1
def displayCount(self):
print("Total Employee {}".format(Employee.empCount))
def displayEmployee(self):
print("Name: {}, Salary: {}".format(self.name, self.salary))
# 创建 Employee 类的第一个对象
emp1 = Employee("Zara", 2000)
# 创建 Employee 类的第二个对象
emp2 = Employee("Manni", 5000)
emp1.displayEmployee()
emp2.displayEmployee()
print("Total Employee {}".format(Employee.empCount))
# 添加,修改,删除类的属性
emp1.age = 7 # 添加一个 'age' 属性
emp1.age = 8 # 修改 'age' 属性
del emp1.age # 删除 'age' 属性
# 用函数的方式来访问属性
hasattr(emp1, 'age') # 如果存在 'age' 属性返回 True
setattr(emp1, 'age', 8) # 添加 'age' 属性的值为 8
getattr(emp1, 'age') # 返回 'age' 属性的值
delattr(emp1, 'age') # 删除属性 'age'
# 内置类属性调用
print("Employee.__doc__: {}".format(Employee.__doc__))
print("Employee.__name__: {}".format(Employee.__name__))
print("Employee.__module__: {}".format(Employee.__module__))
print("Employee.__bases__: {}".format(Employee.__bases__))
print("Employee.__dict__: {}".format(Employee.__dict__))
| 26.075472
| 68
| 0.630246
|
1d2df12ea65f1ce5e1c9e6c78678cb3ea3ac2bfc
| 6,072
|
py
|
Python
|
nesasm/tests/ora_test.py
|
gutomaia/nesasm_py
|
ddd746498806dce77fba75438c7a4338ff442842
|
[
"BSD-3-Clause"
] | 14
|
2015-08-02T05:36:38.000Z
|
2021-01-26T13:16:48.000Z
|
nesasm/tests/ora_test.py
|
gutomaia/nesasm_py
|
ddd746498806dce77fba75438c7a4338ff442842
|
[
"BSD-3-Clause"
] | 9
|
2015-09-19T16:22:54.000Z
|
2017-10-18T20:15:36.000Z
|
nesasm/tests/ora_test.py
|
gutomaia/nesasm_py
|
ddd746498806dce77fba75438c7a4338ff442842
|
[
"BSD-3-Clause"
] | 3
|
2015-10-18T15:37:19.000Z
|
2021-03-08T22:51:32.000Z
|
# -*- coding: utf-8 -*-
'''
ORA, OR with Accumulator Test
This is an Logical operation of the 6502
'''
import unittest
from nesasm.compiler import lexical, syntax, semantic
class OraTest(unittest.TestCase):
def test_ora_imm(self):
'''Test logical OR operation between $10 (Decimal 16) and the
content of the Accumulator'''
tokens = list(lexical('ORA #$10'))
self.assertEquals(2, len(tokens))
self.assertEquals('T_INSTRUCTION', tokens[0]['type'])
self.assertEquals('T_HEX_NUMBER', tokens[1]['type'])
ast = syntax(tokens)
self.assertEquals(1, len(ast))
self.assertEquals('S_IMMEDIATE', ast[0]['type'])
code = semantic(ast)
self.assertEquals(code, [0x09, 0x10])
def test_ora_imm_with_decimal(self):
'''Test logical OR operation between #10 (Decimal 10) and the
content of the Accumulator'''
tokens = list(lexical('ORA #10'))
self.assertEquals(2, len(tokens))
self.assertEquals('T_INSTRUCTION', tokens[0]['type'])
self.assertEquals('T_DECIMAL_NUMBER', tokens[1]['type'])
ast = syntax(tokens)
self.assertEquals(1, len(ast))
self.assertEquals('S_IMMEDIATE', ast[0]['type'])
code = semantic(ast)
self.assertEquals(code, [0x09, 0x0a])
def test_ora_imm_with_binary(self):
'''Test logical OR operation between binary #%00000100
(Decimal 4) and the content of the Accumulator'''
tokens = list(lexical('ORA #%00000100'))
self.assertEquals(2, len(tokens))
self.assertEquals('T_INSTRUCTION', tokens[0]['type'])
self.assertEquals('T_BINARY_NUMBER', tokens[1]['type'])
ast = syntax(tokens)
self.assertEquals(1, len(ast))
self.assertEquals('S_IMMEDIATE', ast[0]['type'])
code = semantic(ast)
self.assertEquals(code, [0x09, 0x04])
def test_ora_zp(self):
'''Test logical OR operation between the content of the
Accumulator and the content of zero page $00'''
tokens = list(lexical('ORA $00'))
self.assertEquals(2, len(tokens))
self.assertEquals('T_INSTRUCTION', tokens[0]['type'])
self.assertEquals('T_ADDRESS', tokens[1]['type'])
ast = syntax(tokens)
self.assertEquals(1, len(ast))
self.assertEquals('S_ZEROPAGE', ast[0]['type'])
code = semantic(ast)
self.assertEquals(code, [0x05, 0x00])
def test_ora_zpx(self):
tokens = list(lexical('ORA $10,X'))
self.assertEquals(4, len(tokens))
self.assertEquals('T_INSTRUCTION', tokens[0]['type'])
self.assertEquals('T_ADDRESS', tokens[1]['type'])
self.assertEquals('T_SEPARATOR', tokens[2]['type'])
self.assertEquals('T_REGISTER', tokens[3]['type'])
ast = syntax(tokens)
self.assertEquals(1, len(ast))
self.assertEquals('S_ZEROPAGE_X', ast[0]['type'])
code = semantic(ast)
self.assertEquals(code, [0x15, 0x10])
def test_ora_abs(self):
tokens = list(lexical('ORA $1234'))
self.assertEquals(2, len(tokens))
self.assertEquals('T_INSTRUCTION', tokens[0]['type'])
self.assertEquals('T_ADDRESS', tokens[1]['type'])
self.assertEquals('$1234', tokens[1]['value'])
ast = syntax(tokens)
self.assertEquals(1, len(ast))
self.assertEquals('S_ABSOLUTE', ast[0]['type'])
code = semantic(ast)
self.assertEquals(code, [0x0d, 0x34, 0x12])
def test_ora_absx(self):
tokens = list(lexical('ORA $1234,X'))
self.assertEquals(4, len(tokens))
self.assertEquals('T_INSTRUCTION', tokens[0]['type'])
self.assertEquals('T_ADDRESS', tokens[1]['type'])
self.assertEquals('$1234', tokens[1]['value'])
self.assertEquals('T_SEPARATOR', tokens[2]['type'])
self.assertEquals('T_REGISTER', tokens[3]['type'])
ast = syntax(tokens)
self.assertEquals(1, len(ast))
self.assertEquals('S_ABSOLUTE_X', ast[0]['type'])
code = semantic(ast)
self.assertEquals(code, [0x1d, 0x34, 0x12])
def test_ora_absy(self):
tokens = list(lexical('ORA $1234,Y'))
self.assertEquals(4, len(tokens))
self.assertEquals('T_INSTRUCTION', tokens[0]['type'])
self.assertEquals('T_ADDRESS', tokens[1]['type'])
self.assertEquals('T_SEPARATOR', tokens[2]['type'])
self.assertEquals('T_REGISTER', tokens[3]['type'])
ast = syntax(tokens)
self.assertEquals(1, len(ast))
self.assertEquals('S_ABSOLUTE_Y', ast[0]['type'])
code = semantic(ast)
self.assertEquals(code, [0x19, 0x34, 0x12])
def test_ora_indx(self):
tokens = list(lexical('ORA ($20,X)'))
self.assertEquals(6, len(tokens))
self.assertEquals('T_INSTRUCTION', tokens[0]['type'])
self.assertEquals('T_OPEN', tokens[1]['type'])
self.assertEquals('T_ADDRESS', tokens[2]['type'])
self.assertEquals('$20', tokens[2]['value'])
self.assertEquals('T_SEPARATOR', tokens[3]['type'])
self.assertEquals('T_REGISTER', tokens[4]['type'])
self.assertEquals('T_CLOSE', tokens[5]['type'])
ast = syntax(tokens)
self.assertEquals(1, len(ast))
self.assertEquals('S_INDIRECT_X', ast[0]['type'])
code = semantic(ast)
self.assertEquals(code, [0x01, 0x20])
def test_ora_indy(self):
tokens = list(lexical('ORA ($20),Y'))
self.assertEquals(6, len(tokens))
self.assertEquals('T_INSTRUCTION', tokens[0]['type'])
self.assertEquals('T_OPEN', tokens[1]['type'])
self.assertEquals('T_ADDRESS', tokens[2]['type'])
self.assertEquals('T_CLOSE', tokens[3]['type'])
self.assertEquals('T_SEPARATOR', tokens[4]['type'])
self.assertEquals('T_REGISTER', tokens[5]['type'])
ast = syntax(tokens)
self.assertEquals(1, len(ast))
self.assertEquals('S_INDIRECT_Y', ast[0]['type'])
code = semantic(ast)
self.assertEquals(code, [0x11, 0x20])
| 40.751678
| 69
| 0.612813
|
4d757838bc48706a0259dc43279c32f14ee49fca
| 555
|
py
|
Python
|
utils.py
|
mask-application/maskapp_bot
|
768f68565b826c3f03565138e67e560775de1be7
|
[
"MIT"
] | 3
|
2020-03-24T20:13:03.000Z
|
2020-04-06T09:24:22.000Z
|
utils.py
|
mask-application/maskapp_bot
|
768f68565b826c3f03565138e67e560775de1be7
|
[
"MIT"
] | null | null | null |
utils.py
|
mask-application/maskapp_bot
|
768f68565b826c3f03565138e67e560775de1be7
|
[
"MIT"
] | null | null | null |
import logging
import sys
def config_logging(log_level, file_name):
root_logger = logging.getLogger()
root_logger.setLevel(log_level)
log_formatter = logging.Formatter(fmt='%(asctime)s_%(name)s_%(levelname)s: %(message)s')
file_handler = logging.FileHandler("logs/" + file_name, encoding='utf-8')
file_handler.setFormatter(log_formatter)
root_logger.addHandler(file_handler)
sysout_handler = logging.StreamHandler(stream=sys.stdout)
sysout_handler.setFormatter(log_formatter)
root_logger.addHandler(sysout_handler)
| 32.647059
| 92
| 0.765766
|
1925333b96f227151a0046aba44cd817f2d04e57
| 227
|
py
|
Python
|
BackEnd/api/service/scholar_question.py
|
camilleAmaury/X5GON_project
|
8d5b61eb45a357fe1881c0523389d463724c6448
|
[
"Unlicense"
] | 1
|
2021-05-02T14:24:38.000Z
|
2021-05-02T14:24:38.000Z
|
BackEnd/api/service/scholar_question.py
|
camilleAmaury/X5GON_project
|
8d5b61eb45a357fe1881c0523389d463724c6448
|
[
"Unlicense"
] | 1
|
2022-02-10T00:48:47.000Z
|
2022-02-10T00:48:47.000Z
|
BackEnd/api/service/scholar_question.py
|
camilleAmaury/X5GON_project
|
8d5b61eb45a357fe1881c0523389d463724c6448
|
[
"Unlicense"
] | null | null | null |
def build_scholar_question_schema(scholar_question):
mod = {}
mod['question_id'] = scholar_question.question_id
mod['question'] = scholar_question.question
mod['answer'] = scholar_question.answer
return mod
| 32.428571
| 53
| 0.735683
|
f305b83d594ed67c0faf32e2b70648cadd0f8990
| 430
|
py
|
Python
|
setup.py
|
olpa/python-lxslt
|
8d6acdd5dd97baed9ad5ec0135c18d95b0d530d9
|
[
"MIT"
] | null | null | null |
setup.py
|
olpa/python-lxslt
|
8d6acdd5dd97baed9ad5ec0135c18d95b0d530d9
|
[
"MIT"
] | null | null | null |
setup.py
|
olpa/python-lxslt
|
8d6acdd5dd97baed9ad5ec0135c18d95b0d530d9
|
[
"MIT"
] | null | null | null |
import setuptools
setuptools.setup(
name='lxslt',
version="0.1.0",
author="Oleg Parashchenko",
author_email="olpa@uucode.com",
description="XSLT-like transformations over python lists",
url="https://github.com/olpa/python-lxslt",
classifiers=[
"Topic :: Text Processing :: Markup :: XML",
],
package_dir={'lxslt': './src/lxslt'},
packages=['lxslt'],
python_requires=">=3.6",
)
| 25.294118
| 62
| 0.630233
|
08f3dc61dfbf90d9d28e284a65c45e9a97894992
| 4,226
|
py
|
Python
|
tests/test_neuron.py
|
dimdamop/single-neuron
|
fa649bcd2c7cc68b46c87e63e3c5869f772fecdf
|
[
"MIT"
] | null | null | null |
tests/test_neuron.py
|
dimdamop/single-neuron
|
fa649bcd2c7cc68b46c87e63e3c5869f772fecdf
|
[
"MIT"
] | 2
|
2019-02-21T19:16:02.000Z
|
2019-02-21T20:12:33.000Z
|
tests/test_neuron.py
|
dimdamop/single-neuron
|
fa649bcd2c7cc68b46c87e63e3c5869f772fecdf
|
[
"MIT"
] | null | null | null |
# Author: Dimitrios Damopoulos
# MIT license (see LICENCE.txt in the top-level folder)
import unittest
import os
from tempfile import mkstemp
from os.path import exists
import numpy as np
from numpy import random
import string
from random import choice as std_choice
from single_neuron import neuron as neuron
csv_files_to_check_n = 100
max_column_header_len = 30
max_features_n = 100
max_samples_n = 100000
class CsvParsingTestCase(unittest.TestCase):
@classmethod
def synthetic_csv(cls, fd_out, N, m, sep, name_delim='', comment_char='#'):
"""
Creates a synthetic CSV character stream with column headers, values and
extra lines which are either empty or they are comments.
Args:
fd_out (output stream): Where to write the CSV. Typically, that is a
file descriptor.
N (int): The number of samples
m (int): The number of features per sample
sep (str): The character that separates the values in the CSV file
name_delim (str): An optional character to add before and after
every header column name.
comment_char (str): When the first non-whitespace character of a
line is the character `comment_char', then that line should be
treated as a comment.
Returns:
A list of the headers (without the `name_delim')
A np.ndarray of the values in the CSV.
"""
charset = string.ascii_letters + string.punctuation + ' \t'
charset = charset.replace(sep, '')
charset = charset.replace(comment_char, '')
if len(name_delim) > 0:
charset = charset.replace(name_delim, '')
headers = []
while len(headers) < m:
header_len = random.randint(1, max_column_header_len + 1)
header = ''.join(std_choice(charset) for _ in range(header_len))
headers.append(header.strip())
values = 2000 * (random.rand(N, m) - 0.5)
val_line_idx = 0
is_header_written = False
while val_line_idx < N:
# insert some comments
if random.rand() < 0.1:
line = comment_char + \
''.join(std_choice(charset) for _ in range(100))
# insert some black lines
elif random.rand() < 0.1:
line = ''
elif random.rand() < 0.1:
line = ' '
elif not(is_header_written):
line = sep.join([name_delim + header + name_delim
for header in headers])
is_header_written = True
else:
line = sep.join([str(element)
for element in values[val_line_idx]])
val_line_idx += 1
fd_out.write(line + '\n')
return values, headers
def test_parse_csv(self):
candidate_characters = string.ascii_letters + string.punctuation
candidate_characters = candidate_characters.replace('.', '')
candidate_characters = candidate_characters.replace('-', '')
for _ in range(0, csv_files_to_check_n):
N = random.randint(1, max_samples_n + 1)
m = random.randint(1, max_features_n + 1)
N = 10
m = 3
n_sep = ';'
v_sep = ';'
c_sep = '#'
while c_sep == n_sep or c_sep == v_sep or n_sep == v_sep:
n_sep = std_choice(candidate_characters)
v_sep = std_choice(candidate_characters)
if random.rand() < 0.5:
n_sep = ''
_, csv_fn = mkstemp()
try:
csv_fd = open(csv_fn, 'w')
V1, H1 = self.synthetic_csv(csv_fd, N, m, sep=v_sep,
name_delim=n_sep, comment_char=c_sep)
csv_fd.close()
V2, H2 = neuron.parse_csv(csv_fn, sep=v_sep, name_delim=n_sep)
finally:
os.remove(csv_fn)
self.assertEqual(H1, H2)
self.assertTrue((V1 == V2).all())
| 33.015625
| 81
| 0.551585
|
4907e40b24429f96f3c576a4aca52798c718ca24
| 1,000
|
py
|
Python
|
authors/apps/profiles/serializers.py
|
andela/ah-backend-sparta
|
fcc394e486a736993702bfa1e6fd9e9b189b93ae
|
[
"BSD-3-Clause"
] | null | null | null |
authors/apps/profiles/serializers.py
|
andela/ah-backend-sparta
|
fcc394e486a736993702bfa1e6fd9e9b189b93ae
|
[
"BSD-3-Clause"
] | 11
|
2019-03-25T14:38:23.000Z
|
2019-04-18T08:02:10.000Z
|
authors/apps/profiles/serializers.py
|
andela/ah-backend-sparta
|
fcc394e486a736993702bfa1e6fd9e9b189b93ae
|
[
"BSD-3-Clause"
] | 5
|
2019-06-12T08:22:58.000Z
|
2020-02-07T08:26:37.000Z
|
from rest_framework import serializers
from .models import Profile
class ProfileSerializer(serializers.ModelSerializer):
followers_no = serializers.SerializerMethodField()
following_no = serializers.SerializerMethodField()
class Meta:
model = Profile
fields = ('username','firstname', 'lastname', 'bio', 'image', 'followers_no', 'following_no')
read_only_fields = ('followers_no', 'following_no')
def get_followers_no(self, instance):
'''Method calculates the number of users a user follows'''
number_of_users_a_user_follows = instance
return Profile.follows.through.objects.filter(to_profile_id=number_of_users_a_user_follows.pk).count()
def get_following_no(self, instance):
'''Method calculates the number of users following a user'''
number_of_users_following_a_user = instance
return Profile.followed_by.through.objects.filter(from_profile_id=number_of_users_following_a_user.pk).count()
| 34.482759
| 118
| 0.736
|
9ff65436b9c0205b51fb30d00db215030e23b4f5
| 1,524
|
py
|
Python
|
locallibrary/urls.py
|
mrmarkhurlburt/django_media_library
|
59bcff3dcf8a803dbfa22eb4d073bd5e9f73cdee
|
[
"CC0-1.0"
] | null | null | null |
locallibrary/urls.py
|
mrmarkhurlburt/django_media_library
|
59bcff3dcf8a803dbfa22eb4d073bd5e9f73cdee
|
[
"CC0-1.0"
] | 1
|
2022-02-10T11:04:51.000Z
|
2022-02-10T11:04:51.000Z
|
locallibrary/urls.py
|
mrmarkhurlburt/django_media_library
|
59bcff3dcf8a803dbfa22eb4d073bd5e9f73cdee
|
[
"CC0-1.0"
] | null | null | null |
"""locallibrary URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.10/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.conf.urls import url, include
2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls'))
"""
from django.conf.urls import url
from django.contrib import admin
urlpatterns = [
url(r'^admin/', admin.site.urls),
]
# Use include() to add URLS from the catalog application
from django.conf.urls import include
urlpatterns += [
url(r'^catalog/', include('catalog.urls')),
]
#Add URL maps to redirect the base URL to our application
from django.views.generic import RedirectView
urlpatterns += [
url(r'^$', RedirectView.as_view(url='/catalog/', permanent=True)),
]
# Use static() to add url mapping to serve static files during development (only)
from django.conf import settings
from django.conf.urls.static import static
urlpatterns+= static(settings.STATIC_URL, document_root=settings.STATIC_ROOT)
#Add Django site authentication urls (for login, logout, password management)
urlpatterns += [
url('^accounts/', include('django.contrib.auth.urls')),
]
| 33.130435
| 81
| 0.727034
|
77d1e4b422d603a27d36d28595013b65618c18b8
| 14,409
|
py
|
Python
|
easygl/prefabs/line.py
|
overdev/easygl-0.1.0-alpha1
|
c3ab70c272db670cbe4e79a99371d21da466d8a0
|
[
"MIT"
] | null | null | null |
easygl/prefabs/line.py
|
overdev/easygl-0.1.0-alpha1
|
c3ab70c272db670cbe4e79a99371d21da466d8a0
|
[
"MIT"
] | null | null | null |
easygl/prefabs/line.py
|
overdev/easygl-0.1.0-alpha1
|
c3ab70c272db670cbe4e79a99371d21da466d8a0
|
[
"MIT"
] | null | null | null |
# !/usr/bin/python
# -*- coding: utf-8 -*-
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
#
# The MIT License (MIT)
#
# Copyright (c) 2017 Jorge A. Gomes (jorgegomes83 at hotmail dot com)
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
#
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
import pygame as pg
from OpenGL.GL import GL_LINE_STRIP, GL_LINES, GL_LINE_LOOP
from typing import Optional, Callable
from easygl.arrays import VertexArrayData, DType, attribute, vertex, vertex_copy, VertexArray
from easygl.shaders import ShaderProgramData, ShaderProgram
from easygl.textures import TexDescriptor, TextureData, MipMap, Wrap, Filter
from easygl.structures import FrozenMat4, Vec2, Vec4, FrozenVec4
from easygl.display import BlendMode, GLWindow, Projection
__all__ = [
'init',
'line',
'lines',
'lineset',
'vline',
'hline',
'bake_lines',
'line_batch',
'line_vertex_data',
'line_shader',
'line_shader_data',
]
_initialized = False
line_shader = None
line_shader_data = None
line_vertex_data = None
# region - - -- ----==<[ STUBS ]>==---- -- - -
def line_batch(window, view, projection, vert_array, count, color_a, color_b=None, tex=None, vcoord=0, blend=BlendMode):
# type: (GLWindow, Mat4, Mat4, VertexArray, int, Union[Vec4, FrozenVec4], Optional[Union[Vec4, FrozenVec4]], Optional[TexDescription], float, BlendMode) -> None
pass
def line(window, view, projection, point_a, point_b, color_a, color_b=None, tex=None, vcoord=0, blend=BlendMode.alpha, update=True):
# type: (GLWindow, Mat4, Mat4, Vec2, Vec2, Union[Vec4, FrozenVec4], Union[Vec4, FrozenVec4], Optional[TexDescriptor], float, BlendMode, bool) -> None
pass
def lines(window, view, projection, points, closed, color_a, color_b=None, tex=None, vcoord=0, blend=BlendMode.alpha):
# type: (GLWindow, Mat4, Mat4, Union[list, tuple], bool, Union[Vec4, FrozenVec4], Union[Vec4, FrozenVec4], Optional[TexDescriptor], float, BlendMode, bool) -> None
pass
def lineset(window, view, projection, points, color_a, color_b=None, tex=None, vcoord=0, blend=BlendMode.alpha, update=True, count=-1):
# type: (GLWindow, Mat4, Mat4, Union[list, tuple], Union[Vec4, FrozenVec4], Union[Vec4, FrozenVec4], float, Optional[TexDescriptor], BlendMode, bool, int) -> None
pass
def vline(window, view, projection, start, length, color_a, color_b, tex=None, vcoord=0, blend=BlendMode.alpha):
# type: (GLWindow, Mat4, Mat4, Vec2, float, Union[Vec2, FrozenVec4], Union[Vec4, FrozenVec4], Optional[TexDescriptor], float, BlendMode) -> None
pass
def hline(window, view, projection, start, length, color_a, color_b, tex=None, vcoord=0, blend=BlendMode.alpha):
# type: (GLWindow, Mat4, Mat4, Vec2, float, Union[Vec2, FrozenVec4], Union[Vec4, FrozenVec4], Optional[TexDescriptor], float, BlendMode) -> None
pass
def bezier(window, view, projection, points, ctrl_points, tex=None, vcoord=0, blend=BlendMode.alpha):
# type: (GLWindow, Mat4, Mat4, Union[list, tuple], Union[list, tuple], Optional[TexDescriptor], float, BlendMode) -> None
pass
def bake_lines(points, buffer=None):
# type: (Union[list, tuple], bytearray) -> None
pass
# endregion
def init():
# type: () -> None
global _initialized, line, lines, vline, hline, bezier, bake_lines, lineset, line_vertex_data, line_shader_data,\
line_shader, line_batch
if _initialized:
return
# region - - -- ----==<[ VERTEX DATA ]>==---- -- - -
line_vertex_data = VertexArrayData()
with line_vertex_data.definition():
attribute('position', DType.float_v2)
with line_vertex_data.new_primitive('line', 1024):
v = 1. / 1024.
for i in range(1024):
vertex(position=(v * i, v * i))
# endregion
# region - - -- ----==<[ TEXTURES ]>==---- -- - -
s = pg.Surface((4, 1))
s.fill((255, 255, 255))
texdata = TextureData()
texdata.create_from_surface('line_tex', s, False, False, MipMap.linear_linear, Wrap.repeat,
Filter.linear)
# endregion
# region - - -- ----==<[ SHADERS ]>==---- -- - -
line_vshader_code = """
#version 330 core
in vec2 position;
uniform mat4 view;
uniform mat4 projection;
uniform vec4 start_color;
uniform vec4 end_color;
uniform float point_count;
uniform float vcoord;
out vec4 color;
out vec2 coord;
void main() {
gl_Position = projection * view * vec4(position, 1.f, 1.f);
color = mix(start_color, end_color, gl_VertexID / (point_count - 1));
coord = vec2(mod(gl_VertexID, 2.f), vcoord);
}
"""
line_fshader_code = """
#version 330 core
in vec4 color;
in vec2 coord;
uniform sampler2D tex;
uniform bool solidcolor;
void main() {
vec4 basecolor = color;
if (solidcolor)
basecolor *= texture(tex, coord);
gl_FragColor = basecolor;
}
"""
line_shader_data = ShaderProgramData("")
line_shader_data.compile_vertex_shader('line', shader_code=line_vshader_code)
line_shader_data.compile_fragment_shader('line', shader_code=line_fshader_code)
line_shader_data.link('line', vertex='line', fragment='line')
line_shader = line_shader_data.build('line')
# endregion
# region - - -- ----==<[ VAOS ]>==---- -- - -
line_vertex_array = VertexArray(line_vertex_data, 'line', line_shader)
# endregion
# region - - -- ----==<[ HELPER FUNCTIONS ]>==---- -- - -
def bake_lines(points, buffer=None):
# type: (Union[list, tuple], bytearray) -> None
verts = len(points)
if verts > 1024:
raise ValueError("Line is too long (more then 1024 vertices).")
stride = Vec2.bytesize()
data = bytearray(stride * verts) if buffer is None else buffer
for i, (x, y) in enumerate(points):
offset = i * stride
Vec2.pack_values_into(x, y, buffer=data, offset=offset)
line_vertex_array.update_data(0, data)
# endregion
# region - - -- ----==<[ RENDER FUNCTIONS ]>==---- -- - -
def line_batch(window, view, projection, vert_array, count, color_a, color_b = None, tex=None, vcoord=0, blend=BlendMode.alpha):
# type: (GLWindow, Mat4, Mat4, VertexArray, int, Union[Vec4, FrozenVec4], Optional[Union[Vec4, FrozenVec4]], Optional[TexDescription], float, BlendMode) -> None
current = window.blend_mode
window.blend_mode = blend
with vert_array.render(GL_LINES, count) as shader: # type: ShaderProgram
shader.load_matrix4f('view', 1, False, tuple(view))
shader.load_matrix4f('projection', 1, False, tuple(projection))
shader.load4f('start_color', *color_a)
shader.load4f('end_color', *color_b)
shader.load1f('point_count', count)
shader.load1f('vcoord', vcoord)
if isinstance(tex, TexDescriptor):
shader.load_sampler2d('tex', tex.id, 0)
shader.load1i('solidcolor', 0)
else:
shader.load_sampler2d('tex', texdata['line_tex'].id, 0)
shader.load1i('solidcolor', 1)
window.blend_mode = current
def line(window, view, projection, point_a, point_b, color_a, color_b=None, tex=None, vcoord=0, blend=BlendMode.alpha, update=True):
# type: (GLWindow, Mat4, Mat4, Vec2, Vec2, Union[Vec4, FrozenVec4], Union[Vec4, FrozenVec4], Optional[TexDescriptor], float, BlendMode, bool) -> None
current = window.blend_mode
if update:
data = Vec2(point_a).pack() + Vec2(point_b).pack() # type: bytes
line_vertex_array.update_data(0, data)
if not isinstance(color_b , (Vec4, FrozenVec4)):
color_b = color_a
window.blend_mode = blend
with line_vertex_array.render(GL_LINES, 2) as shader: # type: ShaderProgram
shader.load_matrix4f('view', 1, False, tuple(view))
shader.load_matrix4f('projection', 1, False, tuple(projection))
shader.load4f('start_color', *color_a)
shader.load4f('end_color', *color_b)
shader.load1f('point_count', 2.)
shader.load1f('vcoord', vcoord)
if isinstance(tex, TexDescriptor):
shader.load_sampler2d('tex', tex.id, 0)
shader.load1i('solidcolor', 0)
else:
shader.load_sampler2d('tex', texdata['line_tex'].id, 0)
shader.load1i('solidcolor', 1)
window.blend_mode = current
def lines(window, view, projection, points, closed, color_a, color_b=None, tex=None, vcoord=0, blend=BlendMode.alpha, update=True):
# type: (GLWindow, Mat4, Mat4, Union[list, tuple], bool, Union[Vec4, FrozenVec4], Union[Vec4, FrozenVec4], Optional[TexDescriptor], float, BlendMode, bool) -> None
if len(points) < 2 and not closed:
return
if len(points) < 3 and closed:
return
current = window.blend_mode
if update:
data = Vec2(points[0]).pack() # type: bytes
if window.projection is Projection.ortho_down:
h = window.height
for (x, y) in points[1:]:
data += Vec2.pack_values(x, h - y)
else:
for (x, y) in points[1:]:
data += Vec2.pack_values(x, y)
line_vertex_array.update_data(0, data)
if not isinstance(color_b , Vec4):
color_b = color_a
window.blend_mode = blend
count = max(2, min(len(points), 1024))
with line_vertex_array.render(GL_LINE_STRIP if not closed else GL_LINE_LOOP, count) as shader: # type: ShaderProgram
shader.load_matrix4f('view', 1, False, tuple(view))
shader.load_matrix4f('projection', 1, False, tuple(projection))
shader.load4f('start_color', *color_a)
shader.load4f('end_color', *color_b)
shader.load1f('point_count', count)
shader.load1f('vcoord', vcoord)
if isinstance(tex, TexDescriptor):
shader.load_sampler2d('tex', tex.id, 0)
shader.load1i('solidcolor', 0)
else:
shader.load_sampler2d('tex', texdata['line_tex'].id, 0)
shader.load1i('solidcolor', 1)
window.blend_mode = current
def lineset(window, view, projection, points, color_a, color_b=None, tex=None, vcoord=0, blend=BlendMode.alpha, update=True, count=-1):
# type: (GLWindow, Mat4, Mat4, Union[list, tuple], Union[Vec4, FrozenVec4], Union[Vec4, FrozenVec4], float, Optional[TexDescriptor], BlendMode, bool, int) -> None
if len(points) % 2 != 0 and update is True:
return
current = window.blend_mode
if update:
data = Vec2(points[0]).pack() # type: bytes
if window.projection is Projection.ortho_down:
h = window.height
for (x, y) in points[1:]:
data += Vec2.pack_values(x, h - y)
else:
for (x, y) in points[1:]:
data += Vec2.pack_values(x, y)
line_vertex_array.update_data(0, data)
if not isinstance(color_b , Vec4):
color_b = color_a
window.blend_mode = blend
count = max(2, min(len(points), 1024)) if count == -1 else count
count -= (count % 2) if count > 2 else 0
with line_vertex_array.render(GL_LINES, count) as shader: # type: ShaderProgram
shader.load_matrix4f('view', 1, False, tuple(view))
shader.load_matrix4f('projection', 1, False, tuple(projection))
shader.load4f('start_color', *color_a)
shader.load4f('end_color', *color_b)
shader.load1f('point_count', count)
shader.load1f('vcoord', vcoord)
if isinstance(tex, TexDescriptor):
shader.load_sampler2d('tex', tex.id, 0)
shader.load1i('solidcolor', 0)
else:
shader.load_sampler2d('tex', texdata['line_tex'].id, 0)
shader.load1i('solidcolor', 1)
window.blend_mode = current
def vline(window, view, projection, start, length, color_a, color_b, tex=None, vcoord=0, blend=BlendMode.alpha):
# type: (GLWindow, Mat4, Mat4, Vec2, float, Union[Vec2, FrozenVec4], Union[Vec4, FrozenVec4], Optional[TexDescriptor], float, BlendMode) -> None
line(window, view, projection, start, Vec2(start) + (0, length), color_a, color_b, tex, vcoord, blend)
def hline(window, view, projection, start, length, color_a, color_b, tex=None, vcoord=0, blend=BlendMode.alpha):
# type: (GLWindow, Mat4, Mat4, Vec2, float, Union[Vec2, FrozenVec4], Union[Vec4, FrozenVec4], Optional[TexDescriptor], float, BlendMode) -> None
line(window, view, projection, start, Vec2(start) + (length, 0), color_a, color_b, tex, vcoord, blend)
def bezier(window, view, projection, points, ctrl_points, tex=None, vcoord=0, blend=BlendMode.alpha):
# type: (GLWindow, Mat4, Mat4, Union[list, tuple], Union[list, tuple], Optional[TexDescriptor], float, BlendMode) -> None
pass
# endregion
_initialized = True
| 41.051282
| 171
| 0.626692
|
68efa087a812dfed3eb83c9f1a7a14451740e65d
| 4,793
|
py
|
Python
|
tests/commons.py
|
STAMP-project/camp
|
e8652ddf3e2e84ffbf2b9dff3fb5ee678b209246
|
[
"MIT"
] | 8
|
2018-05-28T13:14:24.000Z
|
2020-08-10T17:37:08.000Z
|
tests/commons.py
|
STAMP-project/camp
|
e8652ddf3e2e84ffbf2b9dff3fb5ee678b209246
|
[
"MIT"
] | 95
|
2018-05-16T14:06:52.000Z
|
2022-02-26T04:25:18.000Z
|
tests/commons.py
|
STAMP-project/camp
|
e8652ddf3e2e84ffbf2b9dff3fb5ee678b209246
|
[
"MIT"
] | 10
|
2018-06-25T16:04:11.000Z
|
2019-12-12T15:08:00.000Z
|
#
# CAMP
#
# Copyright (C) 2017 -- 2019 SINTEF Digital
# All rights reserved.
#
# This software may be modified and distributed under the terms
# of the MIT license. See the LICENSE file for details.
#
from camp.codecs.yaml import YAML
from camp.commands import Command
from camp.core import Camp
from camp.directories import InputDirectory, OutputDirectory
from camp.generate import Z3Problem
from camp.realize import Builder
from os import makedirs, listdir
from os.path import exists, isdir, join as join_paths
from shutil import copytree, copy2
from tempfile import mkdtemp
from unittest import TestCase
SAMPLE_DIRECTORY = "samples"
class Scenario(object):
@staticmethod
def from_sample(relative_path):
scenario = Scenario()
sample_directory = join_paths(SAMPLE_DIRECTORY, relative_path)
working_directory = scenario.directory
# Copy all the content
for item in listdir(sample_directory):
source = join_paths(sample_directory, item)
destination = join_paths(working_directory, item)
if isdir(source):
copytree(source, destination)
else:
copy2(source, destination)
return scenario
def __init__(self, path=""):
temporary_directory = join_paths(mkdtemp(prefix="camp_"), path)
makedirs(temporary_directory, exist_ok=True)
self._input = InputDirectory(temporary_directory, YAML())
self._output = OutputDirectory(join_paths(self._input.path, "out"), YAML())
self._model = None
@property
def directory(self):
return self._input.path
@property
def output_directory(self):
return self._output.path
@property
def generated_configurations(self):
_, model, warnings = self._input.model
if warnings:
error = ("There are warnings!\n"
"\n".join(each for each in warnings))
raise AssertionError(error)
return [GeneratedConfiguration(path, configuration) \
for path, configuration in self._output.existing_configurations(model)]
def fetch_test_report(self):
return self._output.load_reports()
@property
def model(self):
return self._input.model
def create_configuration(self, index, content):
file_name = self._output._yaml_configuration_file(index)
self._output.create_file(file_name, content)
def create_model(self, content):
self._input.create_model(content)
def create_template(self, component, relative_path, content="whatever"):
self._input.create_template_file(component, relative_path, content)
class GeneratedConfiguration(object):
def __init__(self, path, model):
self._path = path
self._model = model
@property
def model(self):
return self._model
def includes_file(self, path_to_file):
return exists(join_paths(self._path, path_to_file))
def content_of(self, resource):
path = join_paths(self._path, resource)
with open(path, "r") as content:
return content.read()
class CampTest(TestCase):
__test__ = False
def generate_all(self):
self.camp("generate", "--mode", "all", "-d", self.scenario.directory)
def generate_coverage(self):
self.camp("generate", "--mode", "covering", "-d", self.scenario.directory)
def realize(self):
self.camp("realize", "-d", self.scenario.directory)
def execute(self, simulated=False, include=None):
parameters = ["execute", "-d", self.scenario.directory]
if simulated:
parameters.append("-s")
if include:
parameters.append("--include")
for each in include:
parameters.append(str(each))
self.camp(*parameters)
@staticmethod
def camp(*arguments):
camp = Camp(YAML(), Z3Problem, Builder())
command = Command.extract_from(arguments)
command.send_to(camp)
def _assert_generated(self, configuration, *files):
for each in files:
self.assertTrue(configuration.includes_file(each),
"Missing file '%s'" % each)
def _assert_missing(self, configuration, *files):
for each in files:
self.assertFalse(configuration.includes_file(each),
"Unexpected file '%s'" % each)
def assert_file_contains(self, configuration, resource, fragment):
self.assertIn(fragment, configuration.content_of(resource))
def create_configurations(self, *configurations):
for index, configuration in enumerate(configurations, 1):
self.scenario.create_configuration(index, configuration)
| 26.480663
| 87
| 0.657
|
a8c55890acf018f2e4abc69a37e09a8efe900ae3
| 4,069
|
py
|
Python
|
tests/components/zha/test_fan.py
|
alemuro/home-assistant
|
9b1315d8e55f0ca906c4c8a1b2ae8c2ea511dc90
|
[
"Apache-2.0"
] | 3
|
2020-01-21T18:09:09.000Z
|
2022-01-17T08:06:03.000Z
|
tests/components/zha/test_fan.py
|
alemuro/home-assistant
|
9b1315d8e55f0ca906c4c8a1b2ae8c2ea511dc90
|
[
"Apache-2.0"
] | 39
|
2016-12-16T12:40:34.000Z
|
2017-02-13T17:53:42.000Z
|
tests/components/zha/test_fan.py
|
alemuro/home-assistant
|
9b1315d8e55f0ca906c4c8a1b2ae8c2ea511dc90
|
[
"Apache-2.0"
] | 3
|
2020-03-03T18:14:10.000Z
|
2020-10-04T06:52:45.000Z
|
"""Test zha fan."""
from unittest.mock import call, patch
from homeassistant.components import fan
from homeassistant.const import STATE_ON, STATE_OFF, STATE_UNAVAILABLE
from homeassistant.components.fan import ATTR_SPEED, DOMAIN, SERVICE_SET_SPEED
from homeassistant.const import ATTR_ENTITY_ID, SERVICE_TURN_ON, SERVICE_TURN_OFF
from tests.common import mock_coro
from .common import (
async_init_zigpy_device,
make_attribute,
make_entity_id,
async_test_device_join,
async_enable_traffic,
)
async def test_fan(hass, config_entry, zha_gateway):
"""Test zha fan platform."""
from zigpy.zcl.clusters.hvac import Fan
from zigpy.zcl.clusters.general import Basic
from zigpy.zcl.foundation import Status
# create zigpy device
zigpy_device = await async_init_zigpy_device(
hass, [Fan.cluster_id, Basic.cluster_id], [], None, zha_gateway
)
# load up fan domain
await hass.config_entries.async_forward_entry_setup(config_entry, DOMAIN)
await hass.async_block_till_done()
cluster = zigpy_device.endpoints.get(1).fan
entity_id = make_entity_id(DOMAIN, zigpy_device, cluster)
zha_device = zha_gateway.get_device(zigpy_device.ieee)
# test that the fan was created and that it is unavailable
assert hass.states.get(entity_id).state == STATE_UNAVAILABLE
# allow traffic to flow through the gateway and device
await async_enable_traffic(hass, zha_gateway, [zha_device])
# test that the state has changed from unavailable to off
assert hass.states.get(entity_id).state == STATE_OFF
# turn on at fan
attr = make_attribute(0, 1)
cluster.handle_message(False, 1, 0x0A, [[attr]])
await hass.async_block_till_done()
assert hass.states.get(entity_id).state == STATE_ON
# turn off at fan
attr.value.value = 0
cluster.handle_message(False, 0, 0x0A, [[attr]])
await hass.async_block_till_done()
assert hass.states.get(entity_id).state == STATE_OFF
# turn on from HA
with patch(
"zigpy.zcl.Cluster.write_attributes",
return_value=mock_coro([Status.SUCCESS, Status.SUCCESS]),
):
# turn on via UI
await async_turn_on(hass, entity_id)
assert len(cluster.write_attributes.mock_calls) == 1
assert cluster.write_attributes.call_args == call({"fan_mode": 2})
# turn off from HA
with patch(
"zigpy.zcl.Cluster.write_attributes",
return_value=mock_coro([Status.SUCCESS, Status.SUCCESS]),
):
# turn off via UI
await async_turn_off(hass, entity_id)
assert len(cluster.write_attributes.mock_calls) == 1
assert cluster.write_attributes.call_args == call({"fan_mode": 0})
# change speed from HA
with patch(
"zigpy.zcl.Cluster.write_attributes",
return_value=mock_coro([Status.SUCCESS, Status.SUCCESS]),
):
# turn on via UI
await async_set_speed(hass, entity_id, speed=fan.SPEED_HIGH)
assert len(cluster.write_attributes.mock_calls) == 1
assert cluster.write_attributes.call_args == call({"fan_mode": 3})
# test adding new fan to the network and HA
await async_test_device_join(hass, zha_gateway, Fan.cluster_id, DOMAIN)
async def async_turn_on(hass, entity_id, speed=None):
"""Turn fan on."""
data = {
key: value
for key, value in [(ATTR_ENTITY_ID, entity_id), (ATTR_SPEED, speed)]
if value is not None
}
await hass.services.async_call(DOMAIN, SERVICE_TURN_ON, data, blocking=True)
async def async_turn_off(hass, entity_id):
"""Turn fan off."""
data = {ATTR_ENTITY_ID: entity_id} if entity_id else {}
await hass.services.async_call(DOMAIN, SERVICE_TURN_OFF, data, blocking=True)
async def async_set_speed(hass, entity_id, speed=None):
"""Set speed for specified fan."""
data = {
key: value
for key, value in [(ATTR_ENTITY_ID, entity_id), (ATTR_SPEED, speed)]
if value is not None
}
await hass.services.async_call(DOMAIN, SERVICE_SET_SPEED, data, blocking=True)
| 34.483051
| 82
| 0.704596
|
279df1b1cf8dbc659a9ef2747d70e9533f9bd403
| 7,124
|
py
|
Python
|
triangular_lattice/vicsek/vicsek_self-avoiding_fix.py
|
ssh0/growing-string
|
2e43916e91157dfb4253775149b35ec9d81ef14d
|
[
"MIT"
] | null | null | null |
triangular_lattice/vicsek/vicsek_self-avoiding_fix.py
|
ssh0/growing-string
|
2e43916e91157dfb4253775149b35ec9d81ef14d
|
[
"MIT"
] | 1
|
2016-04-14T08:15:28.000Z
|
2016-04-27T02:57:13.000Z
|
triangular_lattice/vicsek/vicsek_self-avoiding_fix.py
|
ssh0/growing-string
|
2e43916e91157dfb4253775149b35ec9d81ef14d
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
# -*- coding:utf-8 -*-
#
# written by Shotaro Fujimoto
# 2016-05-30
import os
import sys
sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
from triangular import LatticeTriangular as LT
import matplotlib.pyplot as plt
import matplotlib.tri as tri
import matplotlib.animation as animation
import numpy as np
from numpy import linalg as la
import random
import time
rint = random.randint
randm = random.random
class Point:
def __init__(self, id, ix, iy):
self.id, self.x, self.y = id, ix, iy
# vel is unified and the value of it implies the direction of the
# velocity
self.vel = rint(0, 5)
class Points:
def __init__(self, N, Lx, Ly):
self.points = []
occupied = [True] * N + [False] * (Lx * Ly - N)
random.shuffle(occupied)
occupied = np.array([occupied]).reshape(Lx, Ly)
n = 0
for ix, iy in zip(np.where(occupied)[0], np.where(occupied)[1]):
self.points.append(Point(n, ix, iy))
n += 1
class Main:
def __init__(self, Lx=6, Ly=6, rho=0.9, lattice_scale=10, T=0.4, plot=True,
frames=100):
self.lattice = LT(- np.ones((Lx, Ly), dtype=np.int),
scale=lattice_scale)
self.N = int(Lx * Ly * rho)
self.points = Points(self.N, Lx, Ly)
self.points = self.points.points
self.T = T
self.plot = plot
self.beta = 1. / self.T
self.order_param = []
self.num = 0
self.lattice_point = {}
for point in self.points:
if not self.lattice_point.has_key((point.x, point.y)):
self.lattice_point[(point.x, point.y)] = point
else:
raise UserWarning()
self.lattice_X = self.lattice.coordinates_x
self.lattice_Y = self.lattice.coordinates_y
self.lattice_X = np.array(self.lattice_X).reshape(Lx, Ly)
self.lattice_Y = np.array(self.lattice_Y).reshape(Lx, Ly)
X_min, X_max = np.min(self.lattice_X), np.max(self.lattice_X)
Y_min, Y_max = np.min(self.lattice_Y), np.max(self.lattice_Y)
self.X, self.Y = [], []
self.vel_x, self.vel_y = [], []
for point in self.points:
self.X.append(self.lattice_X[point.x, point.y])
self.Y.append(self.lattice_Y[point.x, point.y])
angle = point.vel * np.pi / 3.
self.vel_x.append(np.cos(angle))
self.vel_y.append(- np.sin(angle))
if self.plot:
self.fig, (self.ax1, self.ax2) = plt.subplots(1, 2, figsize=(8, 10))
self.ax1.set_xlim([X_min, X_max])
self.ax1.set_ylim([Y_min, Y_max])
self.ax1.set_xticklabels([])
self.ax1.set_yticklabels([])
self.ax1.set_aspect('equal')
self.ax1.set_title("Lattice-Gas model for collective motion")
self.triang = tri.Triangulation(self.lattice_X.flatten(),
self.lattice_Y.flatten())
self.ax1.triplot(self.triang, color='whitesmoke', lw=0.5)
self.l, = self.ax2.plot([], [], 'b-')
self.ax2.set_title(r"Order parameter $m=\frac{1}{N} |\sum \vec{u}_{i}|$ ($T = %.2f$)"
% self.T)
self.ax2.set_ylim([0, 1.])
ani = animation.FuncAnimation(self.fig, self.update, frames=frames,
interval=1, blit=True, repeat=False)
plt.show()
else:
for i in range(100):
self.update(i)
# print self.order_param[-1]
def update(self, num):
max_repeat = 10
i = 0
while True:
# 一つのPointを選択
p = random.choice(self.points)
# 速度方向が空いているかどうか
nnx, nny = self.lattice.neighbor_of(p.x, p.y)
# 占有されていないもののみを抽出
not_occupied = {}
for v, x, y in zip(range(len(nnx)), nnx, nny):
if not self.lattice_point.has_key((x, y)):
not_occupied[v] = (x, y)
if len(not_occupied) == 0:
# 点を選び直し
i += 1
continue
elif self.lattice_point.has_key((nnx[p.vel], nny[p.vel])):
# ランダムに選択する
p.vel = random.choice(not_occupied.keys())
# 点を動かす
del self.lattice_point[(p.x, p.y)]
p.x, p.y = nnx[p.vel], nny[p.vel]
self.lattice_point[(p.x, p.y)] = p
self.X[p.id] = self.lattice_X[p.x, p.y]
self.Y[p.id] = self.lattice_Y[p.x, p.y]
# 最近接効果(decided by Boltzmann eq)をうけて速度変化
# 最近接の速度の合計を求める
velocities = np.array([0., 0.])
nnx, nny = self.lattice.neighbor_of(p.x, p.y)
not_occupied = {}
for v, x, y in zip(range(len(nnx)), nnx, nny):
if self.lattice_point.has_key((x, y)):
angle = self.lattice_point[(x, y)].vel * np.pi / 3.
velocities += np.array([np.cos(angle), np.sin(angle)])
else:
# 占有されていないもののみを抽出
not_occupied[v] = (x, y)
# ボルツマン分布に従って確率的に速度方向を決定
u_alpha = [i * np.pi / 3. for i in range(6)]
u_alpha = [np.array([np.cos(ang), np.sin(ang)]) for ang in u_alpha]
A = [np.exp(self.beta * np.dot(u, velocities)) for u in u_alpha]
rand = random.random() * sum(A)
prob = 0
for i, P in enumerate(A):
prob += P
if rand < prob:
p.vel = i
break
angle = p.vel * np.pi / 3.
self.vel_x[p.id] = np.cos(angle)
self.vel_y[p.id] = - np.sin(angle)
if self.plot:
self.quiver = self.ax1.quiver(self.X, self.Y, self.vel_x, self.vel_y,
units='xy', angles='xy', color='k')
break
# オーダーパラメーターをプロット
self.plot_order_param(num)
return self.quiver, self.l
def plot_order_param(self, num):
nwidth = 20
m = self.cal_order_param()
self.order_param.append(m)
self.num += 1
nl = max(self.num - nwidth, 0)
nr = 1.25 * nwidth + nl
xdata = np.arange(nl, self.num)
# print xdata
# print self.order_param
# print len(xdata)
# print len(self.order_param)
if self.plot:
self.ax2.set_xlim([nl, nr])
self.l.set_data(xdata, self.order_param[nl:])
def cal_order_param(self):
# return order parameter
velocities = np.array([0., 0.])
for point in self.points:
angle = point.vel * np.pi / 3.
velocities += np.array([np.cos(angle), np.sin(angle)])
m = la.norm(velocities) / self.N
return m
if __name__ == '__main__':
# main = Main(Lx=10, Ly=10, rho=0.6, T=1.5, frames=1000, plot=True)
main = Main(Lx=40, Ly=40, T=0.6, frames=1000, plot=True)
| 34.582524
| 97
| 0.520634
|
2e418b80a3d353e99412c23a6afff696283b356b
| 2,565
|
py
|
Python
|
main.py
|
LG95/Classifcation-tree-based-
|
1ec905ee58ead38543f30a31034a2cd5b79a171d
|
[
"MIT"
] | null | null | null |
main.py
|
LG95/Classifcation-tree-based-
|
1ec905ee58ead38543f30a31034a2cd5b79a171d
|
[
"MIT"
] | null | null | null |
main.py
|
LG95/Classifcation-tree-based-
|
1ec905ee58ead38543f30a31034a2cd5b79a171d
|
[
"MIT"
] | null | null | null |
#! /usr/bin/python
from build import CLASS, grow_tree, Node
def parse(line):
first_space = False
words = ['']
if line[-1] == '\n':
line = line[:-1]
for char in line:
if char != ' ':
words[-1] += char
first_space = True
elif first_space:
words.append('')
first_space = False
if words[-1] == '':
words.pop()
return words
def calculate_accuracy(records, tree):
total = len(records)
misclassified = 0
for record in records:
if record[CLASS] != tree.classify(record):
misclassified += 1
return (1 - float(misclassified) / total) * 100
def main(files):
attributes = []
training = []
testing = []
if len(files) == 2:
attribute_filename, train_filename = files
test_filename = None
else:
attribute_filename, train_filename, test_filename = files
try:
try:
with open(attribute_filename) as file:
for line in file:
words = parse(line)
attributes.append( (words[0], words[1] == 'continuous', words[1:]) )
except:
raise Exception('Cannot read records without knowing their attributes. ' + attribute_filename + ' could not be opened.')
class_name, continuous, values = attributes.pop()
attributes.append( (CLASS, continuous, values) )
for filename, records in [(train_filename, training), (test_filename, testing)]:
try:
with open(filename) as file:
for line in file:
record = {}
for attribute, value in zip(attributes, parse(line)):
name, continuous, values = attribute
if continuous:
record[name] = float(value)
else:
record[name] = value
records.append(record)
except:
if records is training:
raise Exception('Cannot build a decision tree without training records. ' + training_filename + ' could not be opened.')
except Exception, e:
print(e)
else:
attributes.pop()
tree = grow_tree(training, attributes)
print(tree)
print('Accuracy on training set: ' + str( calculate_accuracy(training, tree) ) + '%')
if testing != []:
print('Accuracy on testing set: ' + str( calculate_accuracy(testing, tree) ) + '%')
if __name__ == '__main__':
from sys import argv
if len(argv) == 2:
main( [ argv[1] + end for end in ['-attr.txt', '-train.txt', '-test.txt'] ] )
elif 3 <= len(argv) <= 4:
main( argv[1:] )
else:
print('Usage: ' + argv[0] + ' name')
print(' ' + argv[0] + ' attribute_file training_file [test_file]\n')
print(' name: attribute file, training file and an optional test file are, respectively, name-attr.txt, name-train.txt and name-test.txt')
| 24.428571
| 146
| 0.647173
|
2ad9e9534ee871a386c81ca563a8f6548ea14483
| 6,637
|
py
|
Python
|
build/management/commands/build_construct_data.py
|
penglian518/protwis
|
d81b0aaeb0821dcca1979926dc18c1a70a3ca654
|
[
"Apache-2.0"
] | null | null | null |
build/management/commands/build_construct_data.py
|
penglian518/protwis
|
d81b0aaeb0821dcca1979926dc18c1a70a3ca654
|
[
"Apache-2.0"
] | null | null | null |
build/management/commands/build_construct_data.py
|
penglian518/protwis
|
d81b0aaeb0821dcca1979926dc18c1a70a3ca654
|
[
"Apache-2.0"
] | null | null | null |
from django.core.management.base import BaseCommand, CommandError
from django.conf import settings
from django.utils.text import slugify
from django.db import IntegrityError
from protein.models import Protein, ProteinConformation
from residue.models import Residue
from structure.models import Structure
from construct.models import (Construct,Crystallization,CrystallizationLigandConc,ChemicalType,Chemical,ChemicalConc,ChemicalList,
CrystallizationMethods,CrystallizationTypes,ChemicalListName,ContributorInfo,ConstructMutation,ConstructInsertion,ConstructInsertionType,
ConstructDeletion,ConstructModification,CrystalInfo,ExpressionSystem,Solubilization,PurificationStep,Purification)
from construct.functions import add_construct, fetch_pdb_info
from ligand.models import Ligand, LigandType, LigandRole
from ligand.functions import get_or_make_ligand
from optparse import make_option
import logging
import csv
import os
import json
import datetime
class Command(BaseCommand):
help = 'Build construct data'
def add_arguments(self, parser):
parser.add_argument('--filename', action='append', dest='filename',
help='Filename to import. Can be used multiple times')
parser.add_argument('--local', action='store_true', dest='local', default=False,
help='Read local construct files')
logger = logging.getLogger(__name__)
# source file directory
construct_data_dir = os.sep.join([settings.DATA_DIR, 'structure_data','construct_data'])
construct_data_local_dir = "../files/construct_data"
def handle(self, *args, **options):
if options['filename']:
filenames = options['filename']
else:
filenames = False
if options['local']:
local_fill = True
else:
local_fill = False
print(filenames)
# try:
# # self.purge_construct_data()
if not local_fill:
self.create_construct_data(filenames)
else:
self.create_construct_local_data()
# except Exception as msg:
# print("ERROR: "+str(msg))
# self.logger.error(msg)
def purge_construct_data(self):
Construct.objects.all().delete()
Crystallization.objects.all().delete()
ChemicalConc.objects.all().delete()
Chemical.objects.all().delete()
ChemicalType.objects.all().delete()
ChemicalList.objects.all().delete()
CrystallizationLigandConc.objects.all().delete()
CrystallizationMethods.objects.all().delete()
CrystallizationTypes.objects.all().delete()
ChemicalListName.objects.all().delete()
ContributorInfo.objects.all().delete()
ConstructMutation.objects.all().delete()
ConstructDeletion.objects.all().delete()
ConstructInsertion.objects.all().delete()
ConstructInsertionType.objects.all().delete()
ConstructModification.objects.all().delete()
CrystalInfo.objects.all().delete()
ExpressionSystem.objects.all().delete()
Solubilization.objects.all().delete()
Purification.objects.all().delete()
PurificationStep.objects.all().delete()
# def create_construct_local_data(self, filenames=False):
# self.logger.info('ADDING EXPERIMENTAL CONSTRUCT DATA')
# # read source files
# if not filenames:
# #delete existing if nothing specific is defined
# self.purge_construct_data()
# filenames = os.listdir(self.construct_data_dir)
# for filename in sorted(filenames):
# print('dealing with',filename)
# if filename[-4:]!='json':
# continue
# filepath = os.sep.join([self.construct_data_dir, filename])
# with open(filepath) as json_file:
# d = json.load(json_file)
# add_construct(d)
# filenames = os.listdir(self.construct_data_local_dir)
# for filename in sorted(filenames):
# print('dealing with',filename)
# if filename[-4:]!='json':
# continue
# filepath = os.sep.join([self.construct_data_local_dir, filename])
# with open(filepath) as json_file:
# d = json.load(json_file)
# add_construct(d)
# if not filenames:
# structures = Structure.objects.all().exclude(refined=True)
# for s in structures:
# pdbname = str(s)
# print(pdbname)
# try:
# exists = Construct.objects.filter(structure__pdb_code__index=pdbname).exists()
# if not exists:
# print(pdbname)
# protein = Protein.objects.filter(entry_name=pdbname.lower()).get()
# d = fetch_pdb_info(pdbname,protein)
# add_construct(d)
# else:
# print("Entry for",pdbname,"already there")
# except:
# print(pdbname,'failed')
def create_construct_data(self, filenames=False):
self.logger.info('ADDING EXPERIMENTAL CONSTRUCT DATA')
# read source files
do_all = False
if not filenames:
do_all = True
# self.purge_construct_data()
# filenames = os.listdir(self.construct_data_dir)
if filenames:
for filename in filenames:
if filename[-4:]!='json':
continue
filepath = os.sep.join([self.construct_data_dir, filename])
print('Adding '+filepath)
with open(filepath) as json_file:
d = json.load(json_file)
add_construct(d)
if do_all:
structures = Structure.objects.all().exclude(refined=True)
for s in structures:
pdbname = str(s)
try:
exists = Construct.objects.filter(structure__pdb_code__index=pdbname).exists()
if not exists:
# print(pdbname)
protein = Protein.objects.filter(entry_name=pdbname.lower()).get()
d = fetch_pdb_info(pdbname,protein)
add_construct(d)
else:
# pass
print("Entry for",pdbname,"already there")
except:
print(pdbname,'failed')
self.logger.info('COMPLETED CREATING EXPERIMENTAL CONSTRUCT DATA')
| 39.272189
| 137
| 0.605846
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.