content stringlengths 5 1.05M |
|---|
from turtle import pos
from matplotlib.axis import XAxis
import pandas as pd
import matplotlib.pyplot as plt
import numpy as np
df = pd.read_csv('tranzistory1data.csv')
ax = df.plot(y="M1 - U [V]", x="M1 - I [mA]", kind="line", label = '0mV')
df.plot(y="M2 - U [V]", x="M2 - I [mA]", kind="line", ax=ax, color="C2", label = '20mV')
df.plot(y="M3 - U [V]", x="M3 - I [mA]", kind="line", ax=ax, color="C3", label = '50mV')
df.plot(y="M4 - U [V]", x="M4 - I [mA]", kind="line", ax=ax, color="C4", label = '100mV')
plt.ylabel(r'$\ U_{BE} (V)$')
plt.xlabel(r'$\ I_B (mA)$')
plt.gca().invert_yaxis()
plt.gca().invert_xaxis()
plt.text(5,0.5,'vstupní charakteristiky')
plt.show()
df = pd.read_csv('tranzistory2data.csv')
ax = df.plot(x="M1 - U [V]", y="M1 - I [mA]", kind="line", label = '100mV')
df.plot(x="M2 - U [V]", y="M2 - I [mA]", kind="line", ax=ax, color="C2", label = '500mV')
df.plot(x="M3 - U [V]", y="M3 - I [mA]", kind="line", ax=ax, color="C3", label = '1000mV')
df.plot(x="M4 - U [V]", y="M4 - I [mA]", kind="line", ax=ax, color="C4", label = '2000mV')
plt.legend(loc='upper left')
plt.ylabel(r'$\ U_{CE} (V)$')
plt.xlabel(r'$\ I_B (mA)$',)
#plt.gca().invert_yaxis()
plt.gca().invert_xaxis()
plt.text(1,6.5,'převodní charakteristiky')
ax.xaxis.set_ticks_position("top")
plt.show()
df = pd.read_csv('tranzistory3data.csv')
ax = df.plot(x="M1 - U [V]", y="M1 - I [mA]", kind="line", label = '2000mV')
df.plot(x="M2 - U [V]", y="M2 - I [mA]", kind="line", ax=ax, color="C2", label = '4000mV')
df.plot(x="M3 - U [V]", y="M3 - I [mA]", kind="line", ax=ax, color="C3", label = '6000mV')
df.plot(x="M4 - U [V]", y="M4 - I [mA]", kind="line", ax=ax, color="C4", label = '8000mV')
plt.legend(loc='lower right')
plt.ylabel(r'$\ I_C (mA)$')
plt.xlabel(r'$\ U_{CE} (V)$')
ax.xaxis.set_ticks_position("top")
ax.yaxis.set_ticks_position("right")
plt.text(0.5,6.5,'výstupní charakteristiky')
#plt.gca().invert_yaxis()
#plt.gca().invert_xaxis()
plt.show() |
from libary.utils import custom_emotes
class CustomEmotes:
type = ["Đ", "Đ"]
bot_dev = ["<:bot_dev:937424114933702736>", ":tools:"]
booster = ["<:booster:937428202123591711>", ":sparkles:"]
def __getattribute__(self, __name: str) -> str:
if custom_emotes():
return getattr(CustomEmotes, __name)[0]
else:
return getattr(CustomEmotes, __name)[1]
|
# Created by Egor Kostan.
# GitHub: https://github.com/ikostan
# LinkedIn: https://www.linkedin.com/in/egor-kostan/
# ALGORITHMS DATES/TIME MATHEMATICS NUMBERS
import unittest
import allure
from utils.log_func import print_log
from kyu_5.human_readable_time.make_readable import make_readable
@allure.epic('5 kyu')
@allure.parent_suite('Novice')
@allure.suite("Algorithms")
@allure.sub_suite("Unit Tests")
@allure.feature("Math")
@allure.story('Human Readable Time')
@allure.tag('ALGORITHMS', 'DATES/TIME', 'MATHEMATICS', 'NUMBERS')
@allure.link(url='https://www.codewars.com/kata/52685f7382004e774f0001f7/train/python',
name='Source/Kata')
class MakeReadableTestCase(unittest.TestCase):
"""
Testing make_readable function
"""
def test_make_readable(self):
"""
Testing make_readable function
Write a function, which takes a non-negative integer
(seconds) as input and returns the time in a human-readable
format (HH:MM:SS)
HH = hours, padded to 2 digits, range: 00 - 99
MM = minutes, padded to 2 digits, range: 00 - 59
SS = seconds, padded to 2 digits, range: 00 - 59
The maximum time never exceeds 359999 (99:59:59)
:return:
"""
allure.dynamic.title("Testing make_readable function")
allure.dynamic.severity(allure.severity_level.NORMAL)
allure.dynamic.description_html('<h3>Codewars badge:</h3>'
'<img src="https://www.codewars.com/users/myFirstCode'
'/badges/large">'
'<h3>Test Description:</h3>'
"<p></p>")
with allure.step("Enter test number and verify the output"):
data = [
(0, "00:00:00"),
(5, "00:00:05"),
(60, "00:01:00"),
(86399, "23:59:59"),
(359999, "99:59:59"),
]
for seconds, expected in data:
print_log(seconds=seconds, expected=expected)
self.assertEqual(expected, make_readable(seconds))
|
import pyro
import pyro.distributions as distrs
pyro.set_rng_seed(100)
class FactorSocialDeviation:
def __init__(self, is_crazy: int):
self.is_crazy = is_crazy
def get_decel(self) -> float:
raise NotImplementedError
def get_sigma(self) -> float:
if self.is_crazy == 1:
return 0.5 + distrs.Normal(0.25, 0.25)().item()
if self.is_crazy == 2:
return max(0, distrs.Exponential(0.25)().item() / 10)
return 0.5
def get_minGap(self):
raise NotImplementedError
def get_maxSpeed(self):
raise NotImplementedError
def get_collisionMinGapFactor(self):
raise NotImplementedError
def get_speedFactor(self) -> float:
raise NotImplementedError
def get_actionStepLength(self) -> float:
raise NotImplementedError
def get_jmIgnoreFoeProb(self):
raise NotImplementedError
def get_jmSigmaMinor(self):
raise NotImplementedError
|
import asyncio
from http import HTTPStatus
from uuid import uuid4
from aiohttp import ClientSession
from aiohttp.web_exceptions import HTTPNotFound
from aiohttp.web_response import Response
from asynctest import TestCase, patch, CoroutineMock
from asyncworker import App
from asyncworker.conf import settings
from asyncworker.http.wrapper import RequestWrapper
class HTTPMetricsTests(TestCase):
app_url = f"http://{settings.HTTP_HOST}:{settings.HTTP_PORT}"
async def setUp(self):
self.app = App()
self.client = ClientSession()
self.callback = callback = CoroutineMock()
self.route_path = "/mock_handler"
self.route_method = "GET"
self.metrics = metrics = patch(
"asyncworker.metrics.aiohttp_resources.metrics"
).start()
@self.app.http.get(routes=[self.route_path])
async def handler(wrapper: RequestWrapper):
request = wrapper.http_request
metrics.requests_in_progress.labels.assert_called_once_with(
method=request.method, path=request.path
)
metrics.requests_in_progress.labels.return_value.inc.assert_called_once()
metrics.requests_in_progress.labels.return_value.dec.assert_not_called()
await callback(request)
return Response(text="ok")
self.dynamic_route_path = "/resource/{id}"
@self.app.http.get(routes=[self.dynamic_route_path])
async def handler(wrapper: RequestWrapper):
request = wrapper.http_request
metrics.requests_in_progress.labels.assert_called_once_with(
method="GET", path=self.dynamic_route_path
)
metrics.requests_in_progress.labels.return_value.inc.assert_called_once()
metrics.requests_in_progress.labels.return_value.dec.assert_not_called()
await callback(request)
return Response(text="ok")
await self.app.startup()
async def tearDown(self):
await asyncio.gather(self.app.shutdown(), self.client.close())
patch.stopall()
async def test_successful_request(self):
url = f"{self.app_url}{self.route_path}"
async with self.client.get(url) as response:
content = await response.text()
self.assertEqual(response.status, HTTPStatus.OK)
self.metrics.response_size.labels.assert_called_once_with(
method=self.route_method, path=self.route_path
)
self.metrics.request_duration.labels.assert_called_once_with(
method=self.route_method,
path=self.route_path,
status=response.status,
)
self.metrics.requests_in_progress.labels.assert_called_with(
method=self.route_method, path=self.route_path
)
self.metrics.response_size.labels.return_value.observe.assert_called_once_with(
len(content)
)
self.metrics.request_duration.labels.return_value.observe.assert_called_once()
self.metrics.requests_in_progress.labels.return_value.dec.assert_called_once()
async def test_unsuccessful_request(self):
url = f"{self.app_url}{self.route_path}"
self.callback.side_effect = KeyError
async with self.client.get(url) as response:
await response.text()
self.assertEqual(response.status, HTTPStatus.INTERNAL_SERVER_ERROR)
self.metrics.response_size.labels.assert_not_called()
self.metrics.request_duration.labels.assert_called_once_with(
method=self.route_method,
path=self.route_path,
status=response.status,
)
self.metrics.requests_in_progress.labels.assert_called_with(
method=self.route_method, path=self.route_path
)
self.metrics.response_size.labels.return_value.observe.assert_not_called()
self.metrics.request_duration.labels.return_value.observe.assert_called_once()
self.metrics.requests_in_progress.labels.return_value.dec.assert_called_once()
async def test_notfound_request(self):
url = f"{self.app_url}{self.route_path}"
self.callback.side_effect = HTTPNotFound
async with self.client.get(url) as response:
content = await response.text()
self.assertEqual(response.status, HTTPStatus.NOT_FOUND)
self.metrics.response_size.labels.assert_called_once_with(
method=self.route_method, path=self.route_path
)
self.metrics.request_duration.labels.assert_called_once_with(
method=self.route_method,
path=self.route_path,
status=response.status,
)
self.metrics.requests_in_progress.labels.assert_called_with(
method=self.route_method, path=self.route_path
)
self.metrics.response_size.labels.return_value.observe.assert_called_once_with(
len(content)
)
self.metrics.request_duration.labels.return_value.observe.assert_called_once()
self.metrics.requests_in_progress.labels.return_value.dec.assert_called_once()
async def test_request_to_route_with_dynamic_path(self):
request_path = self.dynamic_route_path.format(id=uuid4().hex)
url = f"{self.app_url}{request_path}"
async with self.client.get(url) as response:
content = await response.text()
self.assertEqual(response.status, HTTPStatus.OK)
self.metrics.response_size.labels.assert_called_once_with(
method="GET", path=self.dynamic_route_path
)
self.metrics.request_duration.labels.assert_called_once_with(
method="GET", path=self.dynamic_route_path, status=response.status
)
self.metrics.requests_in_progress.labels.assert_called_with(
method="GET", path=self.dynamic_route_path
)
self.metrics.response_size.labels.return_value.observe.assert_called_once_with(
len(content)
)
self.metrics.request_duration.labels.return_value.observe.assert_called_once()
self.metrics.requests_in_progress.labels.return_value.dec.assert_called_once()
async def test_request_to_route_with_404_path(self):
request_path = f"/{uuid4().hex}"
url = f"{self.app_url}{request_path}"
async with self.client.get(url) as response:
content = await response.text()
self.assertEqual(response.status, HTTPStatus.NOT_FOUND)
self.metrics.response_size.labels.assert_called_once_with(
method="GET", path="unregistered_path"
)
self.metrics.request_duration.labels.assert_called_once_with(
method="GET", path="unregistered_path", status=response.status
)
self.metrics.requests_in_progress.labels.assert_called_with(
method="GET", path="unregistered_path"
)
self.metrics.response_size.labels.return_value.observe.assert_called_once_with(
len(content)
)
self.metrics.request_duration.labels.return_value.observe.assert_called_once()
self.metrics.requests_in_progress.labels.return_value.dec.assert_called_once()
|
# Generated by Django 2.2.12 on 2020-07-10 13:33
import django.db.models.deletion
import django_countries.fields
import django_inet.models
from django.db import migrations, models
import django_peeringdb.models.abstract
class Migration(migrations.Migration):
dependencies = [
("django_peeringdb", "0009_ixlanprefix_in_dfz"),
]
operations = [
migrations.AddField(
model_name="internetexchange",
name="ixf_last_import",
field=models.DateTimeField(
blank=True, null=True, verbose_name="IX-F Last Import"
),
),
migrations.AddField(
model_name="internetexchange",
name="ixf_net_count",
field=models.IntegerField(default=0, verbose_name="IX-F Network Count"),
),
]
|
from collections import Counter
import dateutil
from nltk import sent_tokenize, word_tokenize
import demistomock as demisto
from CommonServerPython import *
from CommonServerUserPython import *
import pandas as pd
import numpy as np
from nltk.corpus import stopwords
from sklearn.feature_extraction.text import CountVectorizer
from numpy import dot
from numpy.linalg import norm
EMAIL_BODY_FIELD = 'emailbody'
EMAIL_SUBJECT_FIELD = 'emailsubject'
EMAIL_HTML_FIELD = 'emailbodyhtml'
FROM_FIELD = 'emailfrom'
FROM_DOMAIN_FIELD = 'fromdomain'
PREPROCESSED_EMAIL_BODY = 'preprocessedemailbody'
PREPROCESSED_EMAIL_SUBJECT = 'preprocessedemailsubject'
MERGED_TEXT_FIELD = 'mereged_text'
EMAIL_TO_FIELD = 'emailto'
EMAIL_CC_FIELD = 'emailcc'
EMAIL_BCC_FIELD = 'emailbcc'
MIN_CAMPAIGN_SIZE = int(demisto.args().get("minIncidentsForCampaign", 3))
MIN_UNIQUE_RECIPIENTS = int(demisto.args().get("minUniqueRecipients", 2))
DUPLICATE_SENTENCE_THRESHOLD = 0.95
KEYWORDS = ['#1', '100%', 'access', 'accordance', 'account', 'act', 'action', 'activate', 'ad', 'affordable', 'amazed',
'amazing', 'apply', 'asap', 'asked', 'attach', 'attached', 'attachment', 'attachments', 'attention',
'authorize', 'authorizing', 'avoid', 'bank', 'bargain', 'billing', 'bonus', 'boss', 'bucks', 'bulk', 'buy',
"can't", 'cancel', 'candidate', 'capacity', 'card', 'cards', 'cash', 'casino', 'caution', 'cents',
'certified', 'chance', 'charges', 'claim', 'claims', 'clearance', 'click', 'collect', 'confidentiality',
'confirm', 'confirmation', 'confirmed', 'congratulations', 'consideration', 'consolidate', 'consultation',
'contact', 'contract', 'credentials', 'credit', 'day', 'days', 'deadline', 'deal', 'deals', 'dear', 'debt',
'delivered', 'delivery', 'deposit', 'detected', 'dhl', 'disabled', 'discount', 'discounts', 'document',
'documents', 'dollar', 'dollars', 'dropbox', 'drugs', 'due', 'earn', 'earnings', 'enlarge', 'enlargement',
'equity', 'erection', 'erections', 'exclusive', 'expire', 'expires', 'fedex', 'fees', 'file', 'finance',
'financial', 'fraud', 'free', 'friend', 'from', 'funds', 'gas', 'gift', 'gimmick', 'giveaway', 'great',
'growth', 'guarantee', 'guaranteed', 'hack', 'hacked', 'hacker', 'hormone', 'hosting', 'hours', 'hurry',
'immediate', 'immediately', 'important', 'income', 'increase', 'instant', 'interest', 'investment',
'invoice', 'kindly', 'last', 'lender', 'lenders', 'lifetime', 'limited', 'loan', 'loans', 'login', 'lose',
'loss', 'luxury', 'market', 'marketing', 'mass', 'mastrubate', 'mastrubating', 'med', 'medications',
'medicine', 'meds', 'member', 'membership', 'million', 'millions', 'miracle', 'money', 'monthly',
'months', 'mortgage', 'newsletter', 'notification', 'notify', 'obligation', 'offer', 'offers', 'oil',
'only', 'open', 'opt', 'order', 'package', 'paid', 'parcel', 'partners', 'password', 'passwords',
'payment', 'payments', 'paypal', 'payroll', 'pdf', 'penis', 'pennies', 'permanently', 'pharmacy', 'pics',
'pictures', 'pill', 'pills', 'porn', 'porno', 'postal', 'potential', 'pre-approved', 'presently',
'preview', 'price', 'prize', 'profit', 'promise', 'promotion', 'purchase', 'pure', 'qualifies', 'qualify',
'quote', 'rates', 'receipt', 'record', 'recorded', 'recording', 'refund', 'request', 'requested',
'requires', 'reserve', 'reserves', 'review', 'risk', 'sales', 'satisfactin', 'satisfaction', 'satisfied',
'save', 'scam', 'security', 'sensitive', 'sex', 'share', 'shared', 'sharing', 'shipment', 'shipping',
'sir', 'spam', 'special', 'spend', 'spending', 'started', 'starting', 'stock', 'success', 'supplies',
'supply', 'suspended', 'temporarily', 'terms', 'trader', 'trading', 'traffic', 'transaction', 'transfer',
'trial', 'unlimited', 'unsecured', 'unsolicited', 'unsubscribe', 'update', 'ups', 'urgent', 'user', 'usps',
'valium', 'verification', 'verify', 'viagra', 'vicodin', 'videos', 'vids', 'viedo', 'virus', 'waiting',
'wallet', 'warranty', 'web', 'weight', 'win', 'winner', 'winning', 'wire', 'xanax']
STATUS_DICT = {
0: "Pending",
1: "Active",
2: "Closed",
3: "Archive",
}
def return_outputs_custom(readable_output, outputs=None, tag=None):
return_entry = {
"Type": entryTypes["note"],
"HumanReadable": readable_output,
"ContentsFormat": formats['json'],
"Contents": outputs,
"EntryContext": outputs,
}
if tag is not None:
return_entry["Tags"] = ['campaign_{}'.format(tag)]
demisto.results(return_entry)
def add_context_key(entry_context):
new_context = {}
for k, v in entry_context.items():
new_context['{}.{}'.format('EmailCampaign', k)] = v
return new_context
def create_context_for_campaign_details(campaign_found=False, incidents_df=None):
if not campaign_found:
return {
'isCampaignFound': campaign_found,
}
else:
incident_id = demisto.incident()['id']
incident_df = incidents_df[
['id', 'similarity', FROM_FIELD, FROM_DOMAIN_FIELD]] # lgtm [py/hash-unhashable-value]
incident_df = incident_df[incident_df['id'] != incident_id]
incident_df.rename({FROM_DOMAIN_FIELD: 'emailfromdomain'}, axis=1, inplace=True)
incidents_context = incident_df.fillna(1).to_dict(orient='records')
return {
'isCampaignFound': campaign_found,
'involvedIncidentsCount': len(incidents_df) if incidents_df is not None else 0,
'incidents': incidents_context
}
def create_context_for_indicators(indicators_df=None):
if indicators_df is None:
indicators_context = []
else:
indicators_df.rename({'Value': 'value'}, axis=1, inplace=True)
indicators_df = indicators_df[['id', 'value']]
indicators_context = indicators_df.to_dict(orient='records')
return {'indicators': indicators_context}
def create_empty_context():
context = create_context_for_campaign_details(campaign_found=False)
context = add_context_key(context)
return context
def is_number_of_incidents_too_low(res, incidents):
if not res["EntryContext"]['isDuplicateIncidentFound'] or \
len(incidents) < MIN_CAMPAIGN_SIZE:
return_outputs_custom('No possible campaign was detected', create_empty_context())
return True
return False
def is_number_of_unique_recipients_is_too_low(incidents):
unique_recipients = Counter([str(i.get(EMAIL_TO_FIELD, 'None')) for i in incidents])
unique_recipients += Counter([str(i[EMAIL_CC_FIELD]) for i in incidents if EMAIL_CC_FIELD in i])
unique_recipients += Counter([str(i[EMAIL_BCC_FIELD]) for i in incidents if EMAIL_BCC_FIELD in i])
missing_recipients = unique_recipients['None']
unique_recipients.pop('None', None)
if (len(unique_recipients) < MIN_UNIQUE_RECIPIENTS and missing_recipients == 0) or \
(0 < len(unique_recipients) < MIN_UNIQUE_RECIPIENTS and missing_recipients > 0):
msg = 'Similar emails were found, but the number of their unique recipients is too low to consider them as ' \
'campaign.\n '
msg += 'If you wish to consider similar emails as campaign even with low number of unique recipients, ' \
'please change *minUniqueRecipients* argument\'s value.\n'
msg += 'Details:\n'
msg += '* Found {} similar incidents.\n'.format(len(incidents))
msg += '* Those incidents have {} unique recipients'.format(len(unique_recipients))
msg += ' ({}).\n'.format(', '.join(unique_recipients))
msg += '* The minimum number of unique recipients for similar emails as campaign: ' \
'{}\n'.format(MIN_UNIQUE_RECIPIENTS)
if missing_recipients > 0:
msg += '* Could not find email recipient for {}/{} incidents ' \
'(*Email To* field is empty)'.format(missing_recipients, len(incidents))
return_outputs_custom(msg, create_empty_context())
return True
return False
def get_str_representation_top_n_values(values_list, counter_tuples_list, top_n):
domains_counter_top = counter_tuples_list[:top_n]
if len(counter_tuples_list) > top_n:
domains_counter_top += [('Other', len(values_list) - sum(x[1] for x in domains_counter_top))]
return ', '.join('{} ({})'.format(domain, count) for domain, count in domains_counter_top)
def calculate_campaign_details_table(incidents_df, fields_to_display):
n_incidents = len(incidents_df)
similarities = incidents_df['similarity'].dropna().to_list()
max_similarity = max(similarities)
min_similarity = min(similarities)
headers = []
contents = []
headers.append('Details')
contents.append('Found possible campaign of {} similar emails'.format(n_incidents))
if max_similarity > min_similarity + 10 ** -3:
headers.append('Similarity range')
contents.append("{:.1f}%-{:.1f}%".format(min_similarity * 100, max_similarity * 100))
else:
headers.append('Similarity')
contents.append("{:.1f}%".format(max_similarity * 100))
incidents_df['created_dt'] = incidents_df['created'].apply(lambda x: dateutil.parser.parse(x)) # type: ignore
datetimes = incidents_df['created_dt'].dropna() # type: ignore
min_datetime, max_datetime = min(datetimes), max(datetimes)
if (max_datetime - min_datetime).days == 0:
headers.append('Date')
contents.append(max_datetime.strftime("%B %d, %Y"))
else:
headers.append('Date range')
contents.append('{} - {}'.format(min_datetime.strftime("%B %d, %Y"), max_datetime.strftime("%B %d, %Y")))
senders = incidents_df[FROM_FIELD].dropna().replace('', np.nan).tolist()
senders_counter = Counter(senders).most_common() # type: ignore
senders_domain = incidents_df[FROM_DOMAIN_FIELD].replace('', np.nan).dropna().tolist()
domains_counter = Counter(senders_domain).most_common() # type: ignore
if EMAIL_TO_FIELD in incidents_df.columns:
recipients = incidents_df[EMAIL_TO_FIELD].replace('', np.nan).dropna().tolist()
if EMAIL_CC_FIELD in incidents_df.columns:
recipients += incidents_df[EMAIL_CC_FIELD].replace('', np.nan).dropna().tolist()
if EMAIL_BCC_FIELD in incidents_df.columns:
recipients += incidents_df[EMAIL_BCC_FIELD].replace('', np.nan).dropna().tolist()
recipients_counter = Counter(recipients).most_common() # type: ignore
if len(senders_counter) == 1:
domain_header = "Sender domain"
sender_header = "Sender address"
elif len(senders_counter) > 1 and len(domains_counter) == 1:
domain_header = "Senders domain"
sender_header = "Senders addresses"
else:
domain_header = "Senders domains"
sender_header = "Senders addresses"
top_n = 3
domain_value = get_str_representation_top_n_values(senders_domain, domains_counter, top_n)
sender_value = get_str_representation_top_n_values(senders, senders_counter, top_n)
recipients_value = get_str_representation_top_n_values(recipients, recipients_counter, len(recipients_counter))
headers.append(domain_header)
contents.append(domain_value)
headers.append(sender_header)
contents.append(sender_value)
headers.append('Recipients')
contents.append(recipients_value)
for field in fields_to_display:
if field in incidents_df.columns:
field_values = get_non_na_empty_values(incidents_df, field)
if len(field_values) > 0:
field_values_counter = Counter(field_values).most_common() # type: ignore
field_value_str = get_str_representation_top_n_values(field_values, field_values_counter, top_n)
headers.append(field)
contents.append(field_value_str)
hr = tableToMarkdown('Possible Campaign Detected', {header: value for header, value in zip(headers, contents)},
headers=headers)
return hr
def get_non_na_empty_values(incidents_df, field):
field_values = incidents_df[field].replace('', None).dropna().tolist()
field_values = [x for x in field_values if len(str(x).strip()) > 0]
return field_values
def cosine_sim(a, b):
return dot(a, b) / (norm(a) * norm(b))
def summarize_email_body(body, subject, nb_sentences=3, subject_weight=1.5, keywords_weight=1.5):
corpus = sent_tokenize(body)
cv = CountVectorizer(stop_words=list(stopwords.words('english')))
body_arr = cv.fit_transform(corpus).toarray()
subject_arr = cv.transform(sent_tokenize(subject)).toarray()
word_list = cv.get_feature_names()
count_list = body_arr.sum(axis=0) + subject_arr.sum(axis=0) * subject_weight
duplicate_sentences = [i for i, arr in enumerate(body_arr) if
any(cosine_sim(arr, arr2) > DUPLICATE_SENTENCE_THRESHOLD
for arr2 in body_arr[:i])]
word_frequency = dict(zip(word_list, count_list))
val = sorted(word_frequency.values())
max_frequency = val[-1]
for word in word_frequency.keys():
word_frequency[word] = (word_frequency[word] / max_frequency)
for word in KEYWORDS:
if word in word_frequency:
word_frequency[word] *= keywords_weight
sentence_rank = [0] * len(corpus)
for i, sent in enumerate(corpus):
if i in duplicate_sentences:
continue
for word in word_tokenize(sent):
if word.lower() in word_frequency.keys():
sentence_rank[i] += word_frequency[word.lower()]
sentence_rank[i] = sentence_rank[i] / len(word_tokenize(sent)) # type: ignore
top_sentences_indices = np.argsort(sentence_rank)[::-1][:nb_sentences].tolist()
summary = []
for sent_i in sorted(top_sentences_indices):
sent = corpus[sent_i].strip().replace('\n', ' ')
if sent_i == 0 and sent_i + 1 not in top_sentences_indices:
sent = sent + ' ...'
elif sent_i + 1 == len(corpus) and sent_i - 1 not in top_sentences_indices:
sent = '... ' + sent
elif sent_i - 1 not in top_sentences_indices and sent_i + 1 not in top_sentences_indices:
sent = '... ' + sent + ' ...'
summary.append(sent)
return '\n'.join(summary)
def create_email_summary_hr(incidents_df):
hr_email_summary = ''
clean_email_subject = incidents_df.iloc[0][PREPROCESSED_EMAIL_SUBJECT]
email_summary = 'Subject: ' + clean_email_subject.replace('\n', '')
clean_email_body = incidents_df.iloc[0][PREPROCESSED_EMAIL_BODY]
email_summary += '\n' + summarize_email_body(clean_email_body, clean_email_subject)
for word in KEYWORDS:
for cased_word in [word.lower(), word.title(), word.upper()]:
email_summary = re.sub(r'(?<!\w)({})(?!\w)'.format(cased_word), '**{}**'.format(cased_word), email_summary)
hr_email_summary += '\n\n' + '### Current Incident\'s Email Snippets'
hr_email_summary += '\n ##### ' + email_summary
context = add_context_key(create_context_for_campaign_details(campaign_found=True, incidents_df=incidents_df))
return context, hr_email_summary
def return_campaign_details_entry(incidents_df, fields_to_display):
hr_campaign_details = calculate_campaign_details_table(incidents_df, fields_to_display)
context, hr_email_summary = create_email_summary_hr(incidents_df)
hr = '\n'.join([hr_campaign_details, hr_email_summary])
return return_outputs_custom(hr, context, tag='campaign_details')
def return_no_mututal_indicators_found_entry():
hr = '### Mutual Indicators' + '\n'
hr += 'No mutual indicators were found.'
return_outputs_custom(hr, add_context_key(create_context_for_indicators()), tag='indicators')
def return_indicator_entry(incidents_df):
indicators_query = 'investigationIDs:({})'.format(' '.join('"{}"'.format(id_) for id_ in incidents_df['id']))
fields = ['id', 'indicator_type', 'investigationIDs', 'relatedIncCount', 'score', 'value']
indicators_args = {'query': indicators_query, 'limit': '150', 'populateFields': ','.join(fields)}
res = demisto.executeCommand('GetIndicatorsByQuery', args=indicators_args)
if is_error(res):
return_error(res)
indicators = res[0]['Contents']
indicators_df = pd.DataFrame(data=indicators)
if len(indicators_df) == 0:
return_no_mututal_indicators_found_entry()
return indicators_df
indicators_df = indicators_df[indicators_df['relatedIncCount'] < 150]
indicators_df['Involved Incidents Count'] = \
indicators_df['investigationIDs'].apply(lambda x: sum(id_ in x for id_ in incidents_df['id']))
indicators_df = indicators_df[indicators_df['Involved Incidents Count'] > 1]
if len(indicators_df) == 0:
return_no_mututal_indicators_found_entry()
return indicators_df
indicators_df['Id'] = indicators_df['id'].apply(lambda x: "[%s](#/indicator/%s)" % (x, x))
indicators_df = indicators_df.sort_values(['score', 'Involved Incidents Count'], ascending=False)
indicators_df['Reputation'] = indicators_df['score'].apply(scoreToReputation)
indicators_df.rename({'value': 'Value', 'indicator_type': 'Type'}, axis=1, inplace=True)
indicators_headers = ['Id', 'Value', 'Type', 'Reputation', 'Involved Incidents Count']
hr = tableToMarkdown('Mutual Indicators', indicators_df.to_dict(orient='records'),
headers=indicators_headers)
return_outputs_custom(hr, add_context_key(create_context_for_indicators(indicators_df)), tag='indicators')
return indicators_df
def get_comma_sep_list(value):
res = [x.strip() for x in value.split(",")]
return [x for x in res if x != '']
def get_reputation(id_, indicators_df):
if len(indicators_df) == 0:
max_reputation = 0
else:
relevant_indicators_df = indicators_df[indicators_df['investigationIDs'].apply(lambda x: id_ in x)]
if len(relevant_indicators_df) > 0:
max_reputation = max(relevant_indicators_df['score'])
else:
max_reputation = 0
return scoreToReputation(max_reputation)
def return_involved_incidents_entry(incidents_df, indicators_df, fields_to_display):
incidents_df['Id'] = incidents_df['id'].apply(lambda x: "[%s](#/Details/%s)" % (x, x))
incidents_df = incidents_df.sort_values('created', ascending=False).reset_index(drop=True)
incidents_df['created_dt'] = incidents_df['created'].apply(lambda x: dateutil.parser.parse(x)) # type: ignore
incidents_df['Created'] = incidents_df['created_dt'].apply(lambda x: x.strftime("%B %d, %Y"))
incidents_df['similarity'] = incidents_df['similarity'].fillna(1)
incidents_df['similarity'] = incidents_df['similarity'].apply(lambda x: '{:.1f}%'.format(x * 100))
current_incident_id = demisto.incident()['id']
incidents_df['DBot Score'] = incidents_df['id'].apply(lambda id_: get_reputation(id_, indicators_df))
# add a mark at current incident, at its similarity cell
incidents_df['similarity'] = incidents_df.apply(
lambda x: '{} (current)'.format(x['similarity']) if x['id'] == current_incident_id else x['similarity'], axis=1)
incidents_df['status'] = incidents_df['status'].apply(lambda x: STATUS_DICT[x] if x in STATUS_DICT else '')
incidents_df.rename({
'name': 'Name',
FROM_FIELD: 'Email From',
'similarity': 'Similarity to Current Incident',
'status': 'Status'},
axis=1, inplace=True)
incidents_headers = ['Id', 'Created', 'Name', 'Status', 'Email From', 'DBot Score',
'Similarity to Current Incident']
if fields_to_display is not None:
fields_to_display = [f for f in fields_to_display if f in incidents_df.columns]
incidents_df[fields_to_display] = incidents_df[fields_to_display].fillna('')
fields_to_display = [f for f in fields_to_display if len(get_non_na_empty_values(incidents_df, f)) > 0]
incidents_headers += fields_to_display
hr = '\n\n' + tableToMarkdown('Involved Incidents', incidents_df[incidents_headers].to_dict(orient='records'),
headers=incidents_headers)
return_outputs_custom(hr, tag='incidents')
def draw_canvas(incidents, indicators):
incident_ids = set(map(lambda x: x['id'], incidents))
filtered_indicators = []
for indicator in indicators:
investigations = indicator.get('investigationIDs', [])
mutual_incidents_in_canvas = len(set(investigations).intersection(incident_ids))
if mutual_incidents_in_canvas >= 2:
filtered_indicators.append(indicator)
try:
res = demisto.executeCommand('DrawRelatedIncidentsCanvas', {'relatedIncidentsIDs': list(incident_ids),
'indicators': filtered_indicators,
'overrideUserCanvas': 'true'
})
if not is_error(res):
res[-1]['Tags'] = ['canvas']
demisto.results(res)
except Exception:
pass
def analyze_incidents_campaign(incidents, fields_to_display):
incidents_df = pd.DataFrame(incidents)
return_campaign_details_entry(incidents_df, fields_to_display)
indicators_df = return_indicator_entry(incidents_df)
return_involved_incidents_entry(incidents_df, indicators_df, fields_to_display)
draw_canvas(incidents, indicators_df.to_dict(orient='records'))
def main():
global EMAIL_BODY_FIELD, EMAIL_SUBJECT_FIELD, EMAIL_HTML_FIELD, FROM_FIELD
input_args = demisto.args()
EMAIL_BODY_FIELD = input_args.get('emailBody', EMAIL_BODY_FIELD)
EMAIL_SUBJECT_FIELD = input_args.get('emailSubject', EMAIL_SUBJECT_FIELD)
EMAIL_HTML_FIELD = input_args.get('emailBodyHTML', EMAIL_HTML_FIELD)
FROM_FIELD = input_args.get('emailFrom', FROM_FIELD)
fields_to_display = input_args.get('fieldsToDisplay')
if fields_to_display is not None:
input_args['populateFields'] = fields_to_display
fields_to_display = get_comma_sep_list(fields_to_display)
else:
fields_to_display = []
res = demisto.executeCommand('FindDuplicateEmailIncidents', input_args)
if is_error(res):
return_error(get_error(res))
res = res[-1]
incidents = json.loads(res['Contents'])
if is_number_of_incidents_too_low(res, incidents):
return
if is_number_of_unique_recipients_is_too_low(incidents):
return
analyze_incidents_campaign(incidents, fields_to_display)
if __name__ in ['__main__', '__builtin__', 'builtins']:
main()
|
from collections import defaultdict
from copy import deepcopy
from pprint import pformat
from textwrap import wrap
import warnings
import numpy as np
from astropy.units import UnitConversionError, Quantity, Unit
class FieldValidationError(ValueError):
pass
class Field:
"""
Class for storing data in `Containers`.
Parameters
----------
default:
default value of the item (this will be set when the `Container`
is constructed, as well as when `Container.reset()` is called
description: str
Help text associated with the item
unit: str or astropy.units.core.UnitBase
unit to convert to when writing output, or None for no conversion
ucd: str
universal content descriptor (see Virtual Observatory standards)
dtype: str or np.dtype
expected data type of the value, None to ignore in validation.
ndim: int or None
expected dimensionality of the data, for arrays, None to ignore
allow_none:
if the value of None is given to this Field, skip validation
"""
def __init__(
self,
default=None,
description="",
unit=None,
ucd=None,
dtype=None,
ndim=None,
allow_none=True,
):
self.default = default
self.description = description
self.unit = Unit(unit) if unit is not None else None
self.ucd = ucd
self.dtype = np.dtype(dtype) if dtype is not None else None
self.ndim = ndim
self.allow_none = allow_none
def __repr__(self):
desc = f"{self.description}"
if self.unit is not None:
desc += f" [{self.unit}]"
if self.ndim is not None:
desc += f" as a {self.ndim}-D array"
if self.dtype is not None:
desc += f" with type {self.dtype}"
return desc
def validate(self, value):
"""
check that a given value is appropriate for this Field
Parameters
----------
value: Any
the value to test
Raises
------
FieldValidationError:
if the value is not valid
"""
if self.allow_none and value is None:
return
errorstr = f"the value '{value}' ({type(value)}) is invalid: "
if self.unit is not None:
if not isinstance(value, Quantity):
raise FieldValidationError(
f"{errorstr} Should have units of {self.unit}"
) from None
try:
value.to(self.unit)
except UnitConversionError as err:
raise FieldValidationError(f"{errorstr}: {err}")
# strip off the units now, so we can test the rest without units
value = value.value
if self.ndim is not None:
# should be a numpy array
if not isinstance(value, np.ndarray):
raise FieldValidationError(f"{errorstr} Should be an ndarray")
if value.ndim != self.ndim:
raise FieldValidationError(
f"{errorstr} Should have dimensionality {self.ndim}"
)
if value.dtype != self.dtype:
raise FieldValidationError(
f"{errorstr} Has dtype "
f"{value.dtype}, should have dtype"
f" {self.dtype}"
)
else:
# not a numpy array
if self.dtype is not None:
if not isinstance(value, self.dtype.type):
raise FieldValidationError(
f"{errorstr} Should have numpy dtype {self.dtype}"
)
class DeprecatedField(Field):
""" used to mark which fields may be removed in next version """
def __init__(self, default, description="", unit=None, ucd=None, reason=""):
super().__init__(default=default, description=description, unit=unit, ucd=ucd)
warnings.warn(f"Field {self} is deprecated. {reason}", DeprecationWarning)
self.reason = reason
class ContainerMeta(type):
"""
The MetaClass for the Containers
It reserves __slots__ for every class variable,
that is of instance `Field` and sets all other class variables
as read-only for the instances.
This makes sure, that the metadata is immutable,
and no new fields can be added to a container by accident.
"""
def __new__(cls, name, bases, dct):
field_names = [k for k, v in dct.items() if isinstance(v, Field)]
dct["__slots__"] = tuple(field_names + ["meta", "prefix"])
dct["fields"] = {}
# inherit fields from baseclasses
for b in bases:
if issubclass(b, Container):
for k, v in b.fields.items():
dct["fields"][k] = v
for k in field_names:
dct["fields"][k] = dct.pop(k)
new_cls = type.__new__(cls, name, bases, dct)
# if prefix was not set as a class variable, build a default one
if "container_prefix" not in dct:
new_cls.container_prefix = name.lower().replace("container", "")
return new_cls
class Container(metaclass=ContainerMeta):
"""Generic class that can hold and accumulate data to be passed
between Components.
The purpose of this class is to provide a flexible data structure
that works a bit like a dict or blank Python class, but prevents
the user from accessing members that have not been defined a
priori (more like a C struct), and also keeps metadata information
such as a description, defaults, and units for each item in the
container.
Containers can transform the data into a `dict` using the `
Container.as_dict()` method. This allows them to be written to an
output table for example, where each Field defines a column. The
`dict` conversion can be made recursively and even flattened so
that a nested set of `Containers` can be translated into a set of
columns in a flat table without naming conflicts (the name of the
parent Field is pre-pended).
Only members of instance `Field` will be used as output.
For hierarchical data structures, Field can use `Container`
subclasses or a `Map` as the default value.
>>> class MyContainer(Container):
>>> x = Field(100,"The X value")
>>> energy = Field(-1, "Energy measurement", unit=u.TeV)
>>>
>>> cont = MyContainer()
>>> print(cont.x)
>>> # metadata will become header keywords in an output file:
>>> cont.meta['KEY'] = value
`Field`s inside `Containers` can contain instances of other
`Containers`, to allow for a hierarchy of containers, and can also
contain a `Map` for the case where one wants e.g. a set of
sub-classes indexed by a value like the `telescope_id`. Examples
of this can be found in `ctapipe.containers`
`Containers` work by shadowing all class variables (which must be
instances of `Field`) with instance variables of the same name the
hold the value expected. If `Container.reset()` is called, all
instance variables are reset to their default values as defined in
the class.
Finally, `Containers` can have associated metadata via their
`meta` attribute, which is a `dict` of keywords to values.
"""
def __init__(self, **fields):
self.meta = {}
# __slots__ cannot be provided with defaults
# via class variables, so we use a `container_prefix` class variable
# and an instance variable `prefix` in `__slots__`
self.prefix = self.container_prefix
for k in set(self.fields).difference(fields):
setattr(self, k, deepcopy(self.fields[k].default))
for k, v in fields.items():
setattr(self, k, v)
def __getitem__(self, key):
return getattr(self, key)
def __setitem__(self, key, value):
return setattr(self, key, value)
def items(self, add_prefix=False):
"""Generator over (key, value) pairs for the items"""
if not add_prefix or self.prefix == "":
return ((k, getattr(self, k)) for k in self.fields.keys())
return ((self.prefix + "_" + k, getattr(self, k)) for k in self.fields.keys())
def keys(self):
"""Get the keys of the container"""
return self.fields.keys()
def values(self):
"""Get the keys of the container"""
return (getattr(self, k) for k in self.fields.keys())
def as_dict(self, recursive=False, flatten=False, add_prefix=False):
"""
convert the `Container` into a dictionary
Parameters
----------
recursive: bool
sub-Containers should also be converted to dicts
flatten: type
return a flat dictionary, with any sub-field keys generated
by appending the sub-Container name.
add_prefix: bool
include the container's prefix in the name of each item
"""
if not recursive:
return dict(self.items(add_prefix=add_prefix))
else:
d = dict()
for key, val in self.items(add_prefix=add_prefix):
if isinstance(val, Container) or isinstance(val, Map):
if flatten:
d.update(
{
f"{key}_{k}": v
for k, v in val.as_dict(
recursive, add_prefix=add_prefix
).items()
}
)
else:
d[key] = val.as_dict(
recursive=recursive, flatten=flatten, add_prefix=add_prefix
)
else:
d[key] = val
return d
def reset(self, recursive=True):
""" set all values back to their default values"""
for name, value in self.fields.items():
if isinstance(value, Container):
if recursive:
getattr(self, name).reset()
else:
setattr(self, name, deepcopy(self.fields[name].default))
def update(self, **values):
"""
update more than one parameter at once (e.g. `update(x=3,y=4)`
or `update(**dict_of_values)`)
"""
for key in values:
self[key] = values[key]
def __str__(self):
return pformat(self.as_dict(recursive=True))
def __repr__(self):
text = ["{}.{}:".format(type(self).__module__, type(self).__name__)]
for name, item in self.fields.items():
extra = ""
if isinstance(getattr(self, name), Container):
extra = ".*"
if isinstance(getattr(self, name), Map):
extra = "[*]"
desc = "{:>30s}: {}".format(name + extra, repr(item))
lines = wrap(desc, 80, subsequent_indent=" " * 32)
text.extend(lines)
return "\n".join(text)
def validate(self):
"""
Check that all fields in the Container have the expected characterisics (as
defined by the Field metadata). This is not intended to be run every time a
Container is filled, since it is slow, only for testing a first event.
Raises
------
ValueError:
if the Container's values are not valid
"""
for name, field in self.fields.items():
try:
field.validate(self[name])
except FieldValidationError as err:
raise FieldValidationError(
f"{self.__class__.__name__} Field '{name}': {err}"
)
class Map(defaultdict):
"""A dictionary of sub-containers that can be added to a Container. This
may be used e.g. to store a set of identical sub-Containers (e.g. indexed
by `tel_id` or algorithm name).
"""
def as_dict(self, recursive=False, flatten=False, add_prefix=False):
if not recursive:
return dict(self.items())
else:
d = dict()
for key, val in self.items():
if isinstance(val, Container) or isinstance(val, Map):
if flatten:
d.update(
{
f"{key}_{k}": v
for k, v in val.as_dict(
recursive, add_prefix=add_prefix
).items()
}
)
else:
d[key] = val.as_dict(
recursive=recursive, flatten=flatten, add_prefix=add_prefix
)
continue
d[key] = val
return d
def reset(self, recursive=True):
for val in self.values():
if isinstance(val, Container):
val.reset(recursive=recursive)
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Load and archive the CAL-ACCESS Filing and FilingVersion models.
"""
from django.apps import apps
from django.conf import settings
from django.core.management import call_command
from django.db import connection
from django.utils.timezone import now
from calaccess_processed.management.commands import CalAccessCommand
from calaccess_processed.models.tracking import ProcessedDataFile
class Command(CalAccessCommand):
"""
Load and archive the CAL-ACCESS Filing and FilingVersion models.
"""
help = 'Load and archive the CAL-ACCESS Filing and FilingVersion models.'
def add_arguments(self, parser):
"""
Adds custom arguments specific to this command.
"""
parser.add_argument(
"--force-restart",
"--restart",
action="store_true",
dest="restart",
default=False,
help="Force re-start (overrides auto-resume)."
)
def handle(self, *args, **options):
"""
Make it happen.
"""
super(Command, self).handle(*args, **options)
self.force_restart = options.get("restart")
# get or create the ProcessedDataVersion instance
self.processed_version, created = self.get_or_create_processed_version()
if self.processed_version.files.count() > 0 and not self.force_restart:
self.header(
'Resume loading of filings from {:%m-%d-%Y %H:%M:%S} snapshot'.format(
self.processed_version.raw_version.release_datetime,
)
)
else:
self.header(
'Load filings from {:%m-%d-%Y %H:%M:%S} snapshot'.format(
self.processed_version.raw_version.release_datetime
)
)
# set the time if not there already or forcing restart
if not self.processed_version.process_start_datetime or self.force_restart:
self.processed_version.process_start_datetime = now()
self.processed_version.save()
# handle version models first
version_models = self.get_model_list('version')
self.load_model_list(version_models)
# then filing models
filing_models = self.get_model_list('filing')
self.load_model_list(filing_models)
self.success("Done!")
def get_model_list(self, model_type):
"""
Return a list of models of the specified type to be loaded.
model_type must be "version" of "filing".
"""
non_abstract_models = [
m for m in apps.get_app_config('calaccess_processed').get_models()
if not m._meta.abstract and
'filings' in str(m)
]
if model_type == 'version':
models_to_load = [
m for m in non_abstract_models if 'Version' in str(m)
]
elif model_type == 'filing':
models_to_load = [
m for m in non_abstract_models if 'Version' not in str(m)
]
else:
raise Exception('model_type must be "version" or "filing".')
# if not forcing a restart, filter out the models already loaded
if not self.force_restart:
loaded_models_q = ProcessedDataFile.objects.filter(
version=self.processed_version,
process_finish_datetime__isnull=False,
)
if model_type == 'version':
loaded_models_q.filter(file_name__icontains='Version')
elif model_type == 'filing':
loaded_models_q.filter(
file_name__icontains='Form'
).exclude(file_name__icontains='Version')
loaded_models = [i.file_name for i in loaded_models_q.all()]
if self.verbosity >= 2:
self.log(
" {0} {1} models already loaded.".format(
len(loaded_models),
model_type,
)
)
models_to_load = [
m for m in models_to_load
if m._meta.object_name not in loaded_models
]
if self.verbosity >= 2:
self.log(
" Loading {0} {1} models.".format(
len(models_to_load),
model_type,
)
)
return models_to_load
def load_model_list(self, model_list):
"""
Iterate over the given list of models, loading each one.
"""
# iterate over all of filing models
for m in model_list:
# set up the ProcessedDataFile instance
processed_file, created = ProcessedDataFile.objects.get_or_create(
version=self.processed_version,
file_name=m._meta.object_name,
)
processed_file.process_start_datetime = now()
processed_file.save()
# flush the processed model
if self.verbosity > 2:
self.log(" Truncating %s" % m._meta.db_table)
with connection.cursor() as c:
c.execute('TRUNCATE TABLE "%s" CASCADE' % (m._meta.db_table))
# load the processed model
if self.verbosity > 2:
self.log(" Loading %s" % m._meta.db_table)
m.objects.load_raw_data()
processed_file.records_count = m.objects.count()
processed_file.process_finish_datetime = now()
processed_file.save()
# archive if django project setting enabled
if getattr(settings, 'CALACCESS_STORE_ARCHIVE', False):
call_command(
'archivecalaccessprocessedfile',
m._meta.object_name,
)
|
from django.test import TestCase
from rest_framework.test import APIClient
from django.urls import reverse
from faker import Factory
from .models import Cliente
from usuarios.tests import getApiCliente, criaUsuarios
faker = Factory.create('pt_BR')
# Create your tests here.
class ClientesModelTest(TestCase):
def setUp(self):
self.email_usuario = faker.email()
Cliente.objects.create(
email=self.email_usuario,
nome=faker.name()
)
def testValidaQtdInserts(self):
saved_models = Cliente.objects.count()
self.assertEqual(saved_models, 1)
class ClientesAPITest(TestCase):
def setUp(self):
self.email_usuario = faker.email()
Cliente.objects.create(
email=self.email_usuario,
nome=faker.name()
)
self.endpoint = '/api/v1/clientes/'
email_usuario, email_superusuario, senha = criaUsuarios() #criando usuario
self.api_client = getApiCliente(email_usuario, senha) #buscando Token
def testValidaGET(self):
response = self.api_client.get(self.endpoint)
self.failUnlessEqual(response.status_code, 200)
def testValidaPOST(self):
response = self.api_client.post(
self.endpoint,
{
"nome":faker.name(),
"email":faker.email()
}
)
self.failUnlessEqual(response.status_code, 201)
response = self.api_client.get(self.endpoint)
self.failUnlessEqual(2, len(response.data.get("results")))
def testValidaPUTParcial(self):
tb = Cliente.objects.get(email=self.email_usuario)
response = self.api_client.patch(
f'{self.endpoint}{tb.id}/',
{
"nome":"Desafio",
},
format='json'
)
self.failUnlessEqual(response.status_code, 200)
self.failUnlessEqual(response.data.get('nome'), 'Desafio')
def testValidaPUT(self):
tb = Cliente.objects.get(email=self.email_usuario)
response = self.api_client.put(
f'{self.endpoint}{tb.id}/',
{
"nome":tb.nome,
"email":'validaPUT@validaPUT.com',
},
format='json'
)
self.failUnlessEqual(response.status_code, 200)
self.failUnlessEqual('validaPUT@validaPUT.com', response.data.get("email"))
self.failUnlessEqual(response.data.get('nome'), tb.nome)
def testValidaDELETE(self):
tb = Cliente.objects.get(email=self.email_usuario)
response = self.api_client.delete(f'{self.endpoint}{tb.id}/')
self.failUnlessEqual(response.status_code, 204)
|
'''Useful subroutines dealing with asyn functions from another process.'''
import asyncio
import os
import queue
import multiprocessing as mp
import multiprocessing.queues as mq
from .base import yield_control
__all__ = ['qput_aio', 'qget_aio', 'BgProcess']
async def qput_aio(q: mq.Queue, obj, block : bool = True, timeout : float = None, aio_interval : float = 0.001):
'''Puts obj into the queue q.
If the optional argument `block` is True (the default) and `timeout` is None (the default),
block asynchronously if necessary until a free slot is available. If timeout is a positive
number, it blocks asynchronously at most timeout seconds and raises the :class:`queue.Full`
exception if no free slot was available within that time. Otherwise (`block` is False),
put an item on the queue if a free slot is immediately available, else raise the
:class:`queue.Full` exception (timeout is ignored in that case).
'''
if not block:
return q.put(obj, block=False)
if timeout is None:
while q.full():
await asyncio.sleep(aio_interval)
return q.put(obj, block=True)
cnt = int(timeout / aio_interval)+1
while cnt > 0:
if not q.full():
return q.put(obj, block=True)
await asyncio.sleep(aio_interval)
cnt -= 1
raise queue.Full()
async def qget_aio(q: mq.Queue, block : bool = True, timeout : float = None, aio_interval : float = 0.001):
'''Removes and returns an item from the queue q.
If optional args `block` is True (the default) and `timeout` is None (the default), block
asynchronously if necessary until an item is available. If `timeout` is a positive number,
it blocks asynchronously at most timeout seconds and raises the :class:`queue.Empty`
exception if no item was available within that time. Otherwise (`block` is False), return
an item if one is immediately available, else raise the :class:`queue.Empty` exception
(`timeout` is ignored in that case).
'''
if not block:
return q.get(block=False)
if timeout is None:
while q.empty():
await asyncio.sleep(aio_interval)
return q.get(block=True)
cnt = int(timeout / aio_interval)+1
while cnt > 0:
if not q.empty():
return q.get(block=True)
await asyncio.sleep(aio_interval)
cnt -= 1
raise queue.Empty()
class BgProcess:
'''Launches a child process that communicates with the parent process via message passing.
You should subclass this class and implement :func:`child_handle_message`. See the docstring of the
function below.
Notes
-----
In interactive mode, remember to delete any instance of the the class when you exit or else it
will not exit.
'''
def __init__(self):
self.msg_p2c = mp.Queue()
self.msg_c2p = mp.Queue()
self.msg_cnt = 0
self.parent_pid = os.getpid()
self.child_process = mp.Process(target=self._worker_process)
self.child_process.start()
self.sending = False
def __del__(self):
self.close()
async def send(self, msg, recv_timeout : float = None, recv_aio_interval = 0.001):
'''Sends a message to the child process and awaits for the returning message.
Parameters
----------
msg : object
message to be sent to the child process
recv_timeout : float
If specified, the number of seconds to wait asynchronously to receive the message,
before raising a :class:`queue.Empty` exception. If not, asynchronously blocks until
the message from the child process is received.
recv_aio_interval : float
time unit to simulate asynchronous blocking while waiting for message response. Default
is 1ms.
Returns
-------
object
message received from the child process
Raises
------
RuntimeError
if the child process is not alive while processing the message
'''
while self.sending:
await yield_control()
try:
self.sending = True
self.msg_p2c.put_nowait(msg)
while True:
retval = await qget_aio(self.msg_c2p, timeout=recv_timeout, aio_interval=recv_aio_interval)
if retval[0] == 'ignored_exception':
continue
if retval[0] == 'write': # child printing something
for line in retval[2].splitlines():
print("BgProcess ({}): {}".format(retval[1], line))
continue
break
if retval[0] == 'exit':
if retval[1] is None:
raise RuntimeError("Child process died normally while parent process is expecting some message response.", msg)
else:
raise RuntimeError("Child process died abruptedly with an exception.", retval[1], msg)
if retval[0] == 'raised_exception':
raise RuntimeError("Child raised an exception while processing the message.", retval[1], retval[2])
finally:
self.sending = False
return retval[1]
def child_handle_message(self, msg: object) -> object:
'''Handles a message obtained from the queue.
This function should only be called by the child process.
It takes as input a message from the parent-to-child queue and processes the message.
Once done, it returns an object which will be wrapped into a message and placed into the
child-to-parent queue.
An input message can be anything. Usually it is a tuple with the first component being
a command string. The output message can also be anything. If the handle succeeds,
the returning value is then wrapped into a `('returned', retval)` output message. If
an exception is raised, it is wrapped into a `('raised_exception', exc, callstack_lines)`
output message.
If the child process prints anything to stdout our stderr, it will be redirected as
`('write', 'stdout' or 'stderr', text)` in the output queue. Note that for now only
Python-generated printouts can be redirected. Native printouts require more cumbersome
solutions. See: https://exceptionshub.com/python-multiprocessing-how-can-i-reliably-redirect-stdout-from-a-child-process-2.html
The user should override this function. The default behaviour is returning whatever
sent to it.
If KeyboardInterrupt is raised in the child process but outside this function, you will
get a `('ignored_exception', KeyboardInterrupt)` message. If the child process dies
you will get a `('exit', None or Exception)` message depending on whether the child process
dies normally or abruptedly.
'''
return msg
def _worker_process(self):
import psutil
import queue
import sys
from ..traceback import extract_stack_compact
class Writer:
def __init__(self, msg_c2p, prefix):
self.msg_c2p = msg_c2p
self.prefix = prefix
def write(self, text):
self.msg_c2p.put_nowait(('write', self.prefix, text))
sys.stderr = Writer(self.msg_c2p, 'stderr')
sys.stdout = Writer(self.msg_c2p, 'stdout')
while True:
try:
if self.parent_pid is not None:
if not psutil.pid_exists(self.parent_pid):
self.msg_c2p.put_nowait(('exit', RuntimeError('Parent does not exist.')))
return
try:
msg = self.msg_p2c.get(block=True, timeout=1)
except queue.Empty:
continue
if msg == 'exit':
break
try:
retval = self.child_handle_message(msg) # handle the message and return
msg = ('returned', retval)
except Exception as e:
msg = ('raised_exception', e, extract_stack_compact())
self.msg_c2p.put_nowait(msg)
except KeyboardInterrupt as e:
self.msg_c2p.put_nowait(('ignored_exception', e))
except Exception as e:
self.msg_c2p.put_nowait(('exit', e))
return
self.msg_c2p.put_nowait(('exit', None))
def close(self):
self.msg_p2c.put_nowait('exit')
|
import logging
from unittest.mock import Mock, patch
import pytest
try:
import django
from django.db.models import Manager
from django.core.exceptions import ImproperlyConfigured
from django.test import override_settings
from parasolr.django import SolrClient, SolrQuerySet, \
AliasedSolrQuerySet
from parasolr.indexing import Indexable
from parasolr.django.indexing import ModelIndexable
from parasolr.django.tests.test_models import Collection, \
IndexItem, Owner, NothingToIndex
except ImportError:
django = None
from parasolr.tests.utils import skipif_django, skipif_no_django
@skipif_no_django
def test_django_solrclient():
# check error handling
# no config
with override_settings(SOLR_CONNECTIONS=None):
with pytest.raises(ImproperlyConfigured) as excinfo:
SolrClient()
assert 'requires SOLR_CONNECTIONS in settings' in str(excinfo.value)
# config but no default
with override_settings(SOLR_CONNECTIONS={'foo': 'bar'}):
with pytest.raises(ImproperlyConfigured) as excinfo:
SolrClient()
assert 'No "default" section in SOLR_CONNECTIONS configuration' \
in str(excinfo.value)
# default config but no URL
with override_settings(SOLR_CONNECTIONS={'default': {'foo': 'bar'}}):
with pytest.raises(ImproperlyConfigured) as excinfo:
SolrClient()
assert 'No URL in default SOLR_CONNECTIONS configuration' in \
str(excinfo.value)
# url but no collection
config = {'URL': 'http://my.solr.com:8943/solr'}
with override_settings(SOLR_CONNECTIONS={'default': config}):
solr = SolrClient()
assert solr.solr_url == config['URL']
assert solr.collection == ''
# url and collection
config['COLLECTION'] = 'mycore'
with override_settings(SOLR_CONNECTIONS={'default': config}):
solr = SolrClient()
assert solr.solr_url == config['URL']
assert solr.collection == config['COLLECTION']
# commit within option
config['COMMITWITHIN'] = 750
with override_settings(SOLR_CONNECTIONS={'default': config}):
solr = SolrClient()
assert solr.commitWithin == 750
# but passed in value takes precedence
solr = SolrClient(commitWithin=7339)
assert solr.commitWithin == 7339
@skipif_django
def test_no_django_solrclient():
# should not be defined when django is not installed
with pytest.raises(ImportError):
from parasolr.django import SolrClient
@skipif_django
def test_no_django_solr_solrclient():
# should not be defined when django is not installed
with pytest.raises(ImportError):
from parasolr.solr.django.solr import SolrClient
@skipif_django
def test_no_django_queryset():
# should not be defined when django is not installed
with pytest.raises(ImportError):
from parasolr.django.queryset import SolrQuerySet
@skipif_django
def test_no_django_modelindexable():
# should not be defined when django is not installed
with pytest.raises(ImportError):
from parasolr.django.indexing import ModelIndexable
@skipif_no_django
@patch('parasolr.django.queryset.SolrClient')
def test_django_solrqueryset(mocksolrclient):
# auto-initialize solr connection if not specified
sqs = SolrQuerySet()
mocksolrclient.assert_called_with()
assert sqs.solr == mocksolrclient.return_value
mocksolrclient.reset_mock()
# use solr client if passed in
mymocksolr = Mock(spec=SolrClient)
sqs = SolrQuerySet(solr=mymocksolr)
assert sqs.solr == mymocksolr
mocksolrclient.assert_not_called()
@skipif_no_django
@patch('parasolr.django.queryset.SolrClient')
def test_django_aliasedsolrqueryset(mocksolrclient):
class MyAliasedSolrQuerySet(AliasedSolrQuerySet):
"""extended version of AliasedSolrQuerySet for testing"""
#: map app/readable field names to actual solr fields
field_aliases = {
'name': 'name_t',
'year': 'year_i',
'has_info': 'has_info_b',
}
# django queryset behavior: auto-initialize solr connection if not specified
mysqs = MyAliasedSolrQuerySet()
mocksolrclient.assert_called_with()
assert mysqs.solr == mocksolrclient.return_value
mocksolrclient.reset_mock()
# alias queryset init: field list and reverse alias lookup populated
assert mysqs.field_list
assert mysqs.reverse_aliases
@skipif_no_django
@patch('parasolr.django.queryset.SolrClient')
def test_identify_index_dependencies(mocksolrclient):
ModelIndexable.identify_index_dependencies()
# collection model should be in related object config
# convert list of tuples back into dict for testing
related_models = {model: opts for model, opts in ModelIndexable.related}
assert Collection in related_models
# assert Collection in ModelIndexable.related
# save/delete handler config options saved
assert related_models[Collection] == \
IndexItem.index_depends_on['collections']
# through model added to m2m list
assert IndexItem.collections.through in ModelIndexable.m2m
# dependencies should be cached on the first run and not regenerated
with patch.object(ModelIndexable, '__subclasses__') as mockgetsubs:
ModelIndexable.identify_index_dependencies()
assert mockgetsubs.call_count == 0
@skipif_no_django
def test_get_related_model(caplog):
# test app.Model notation with stock django model
from django.contrib.auth.models import User
assert ModelIndexable.get_related_model(IndexItem, 'auth.User') == User
# many to many
assert ModelIndexable.get_related_model(IndexItem, 'collections') == \
Collection
# reverse many to many
assert ModelIndexable.get_related_model(IndexItem, 'owner_set') == \
Owner
# multipart path
assert ModelIndexable.get_related_model(
IndexItem, 'owner_set__collections') == Collection
# foreign key is now supported!
assert ModelIndexable.get_related_model(
IndexItem, 'primary') == Collection
# use mock to test taggable manager behavior
mockitem = Mock()
mockitem.tags = Mock(spec=Manager, through=Mock())
mockitem.tags.through.tag_model.return_value = 'TagBase'
assert ModelIndexable.get_related_model(mockitem, 'tags') == \
'TagBase'
# if relation cannot be determined, should warn
with caplog.at_level(logging.WARNING):
assert not ModelIndexable.get_related_model(mockitem, 'foo')
assert 'Unhandled related model' in caplog.text
# these classes cannot be defined without django dependencies
if django:
@skipif_no_django
class TestModelIndexable:
class NoMetaModelIndexable(NothingToIndex, ModelIndexable):
"""indexable subclass that should be indexed"""
class AbstractModelIndexable(ModelIndexable):
"""abstract indexable subclass that should NOT be indexed"""
class Meta:
abstract = True
class NonAbstractModelIndexable(NothingToIndex, ModelIndexable):
"""indexable subclass that should be indexed"""
class Meta:
abstract = False
def test_all_indexables(self):
indexables = Indexable.all_indexables()
assert ModelIndexable not in indexables
assert self.NoMetaModelIndexable in indexables
assert self.AbstractModelIndexable not in indexables
assert self.NonAbstractModelIndexable in indexables
|
import pytest
import docutils.nodes as nodes
import re
from sphinxext.toptranslators import strip_accents
@pytest.mark.sphinx("html", testroot="limit")
def test_limit(html_contexts):
assert len(list(html_contexts[0].doctree.traverse(nodes.list_item))) == 4
@pytest.mark.sphinx("html", testroot="hide-contributions-true")
def test_contributions_true(html_contexts):
assert "contributions" not in html_contexts[0].doctree.astext()
@pytest.mark.sphinx("html", testroot="hide-contributions-false")
def test_contributions_false(html_contexts):
assert "contributions" in html_contexts[0].doctree.astext()
@pytest.mark.sphinx("html", testroot="order-ranked")
def test_order_ranked(html_contexts):
items = html_contexts[0].doctree.traverse(nodes.list_item)
num_contributions = 1e99
for item in items:
num = int(re.search(r"(.*) - (.*?) contribution", item.astext()).group(2))
assert num <= num_contributions
num_contributions = num
@pytest.mark.sphinx("html", testroot="order-alphabetical")
def test_order_alphabetical(html_contexts):
items = html_contexts[0].doctree.traverse(nodes.list_item)
prev_name = None
for item in items:
name = re.search(r"(.*) - (.*?) contribution", item.astext()).group(1)
name = strip_accents(name)
if prev_name == None:
prev_name = name
continue
assert name >= prev_name
prev_name = name
|
import importlib
import logging
from lira.parsers import State
log = logging.getLogger(__name__)
class ValidationError(Exception):
"""Exception to raise when the data isn't valid."""
pass
class Validator:
"""
Base class for validators.
The :py:meth:`run` method should be called to validate the given data.
Usage:
.. code:: python
validator = Validator(data="Validate me!")
validator.run()
print(validator.is_valid)
print(validator.data)
print(validator.message)
"""
def __init__(self, data=None):
self.is_valid = False
"""Was the data valid?"""
self.error: Exception = None
"""Current raised exception."""
self.message = None
"""Optional message to show after the validation."""
self.data = data
"""Data to validate."""
self.tries = 0
"""Current number of tries."""
self.previous_errors = []
"""List of previous validation errors."""
def run(self):
"""
Run the validator.
This will increment :py:attr:`tries`, and in case of failure it wil add
the exception to :py:attr:`previous_errors`.
"""
try:
self.is_valid = False
self.tries += 1
self.data = self.validate(data=self._get_data())
self.is_valid = True
self.on_success()
except ValidationError as e:
self.error = e
self.on_failure(e)
self.previous_errors.append(e)
except Exception as e:
log.exception("Error while running validator.")
self.error = e
self.on_exception(e)
return self
def _get_data(self) -> str:
"""Get the data to validate."""
return self.data
def on_success(self):
"""To be called if the validation was successful."""
self.message = "Awesome, you did it!"
def on_failure(self, e: ValidationError):
"""To be called if the validation wasn't successful."""
self.message = str(e) or "Wrong answer :("
def on_exception(self, e: Exception):
"""To be called if an unhandled exception was raised."""
self.message = (
"Something unexpected happened! Check your logs for more information"
)
def validate(self, data: str) -> bool:
"""
Validate the value.
:raises ValidationError: If the value is incorrect.
:returns: The final value of :py:attr:`data`.
"""
return data
class TestBlockValidator(Validator):
"""
Base validator for :py:class:`lira.parsers.nodes.TestBlock` nodes.
:param node: TestBlock node to validate.
:type node: lira.parsers.nodes.TestBlock
"""
def __init__(self, node):
self.node = node
super().__init__()
def _get_data(self) -> str:
return self.node.text()
def on_success(self):
super().on_success()
self.node.attributes.state = State.VALID
self.node.content = self.data.split("\n")
def on_failure(self, e: Exception):
super().on_failure(e)
self.node.attributes.state = State.INVALID
def on_exception(self, e: Exception):
super().on_exception(e)
self.node.attributes.state = State.UNKNOWN
def get_validator_class(validator_path, subclass=None):
"""
Get a validator class from a dotted path.
:raises ValueError: If the class isn't a subclass of :py:class:`Validator`.
"""
module_name, class_name = validator_path.rsplit(".", 1)
module = importlib.import_module(module_name)
validator = getattr(module, class_name, None)
subclass = subclass or Validator
if not issubclass(subclass, Validator):
log.warning(
"Subclass isn't a subclass of validator. subclass=%",
subclass.__name__,
)
raise ValueError
if not issubclass(validator, subclass):
log.warning(
"Validator isn't a subclass of validator. subclass=% validator=%s",
subclass.__name__,
validator_path,
)
raise ValueError
return validator
|
"""
Modified from https://github.com/RobotLocomotion/drake/blob/4751fd6f0b61313faceccb7c6081bc077b35448b/bindings/pydrake/multibody/examples/jupyter_widgets_examples.ipynb
Runs simple visualization of the supplied SDF with sliders for moving joints.
"""
import argparse
import sys
import numpy as np
from pydrake.all import (
ConnectMeshcatVisualizer,
DiagramBuilder,
PiecewisePolynomial,
MeshcatVisualizer,
Simulator
)
from drake_dot_sim import setup_dot_diagram, setup_argparse_for_setup_dot_diagram, ServoSliders, TrajectoryLooper
def main():
parser = argparse.ArgumentParser(description=__doc__)
parser.add_argument("-t1", default=0.05, help="Extend leg")
parser.add_argument("-t2", default=0.5, help="Dwell at top")
parser.add_argument("-t3", default=0.5, help="Contract leg")
parser.add_argument("-t4", default=0.1, help="Wait at bottom")
setup_argparse_for_setup_dot_diagram(parser)
MeshcatVisualizer.add_argparse_argument(parser)
args = parser.parse_args()
t1 = float(args.t1)
t2 = float(args.t2)
t3 = float(args.t3)
t4 = float(args.t4)
q_crouch = np.array([1600, 2100, 2000,
1600, 2100, 2000,
1400, 2100, 2000,
1400, 2100, 2000])
q_extend = np.array([1600, 1600, 2400,
1600, 1600, 2400,
1400, 1600, 2400,
1400, 1600, 2400])
breaks = np.cumsum([0., t1, t2, t3, t4])
samples = np.stack([q_crouch, q_extend, q_extend, q_crouch, q_crouch]).T
trajectory = PiecewisePolynomial.FirstOrderHold(breaks, samples)
builder = DiagramBuilder()
plant, scene_graph, servo_controller = setup_dot_diagram(
builder, args)
trajectory_source = builder.AddSystem(TrajectoryLooper(trajectory))
builder.Connect(trajectory_source.get_output_port(0), servo_controller.get_input_port(0))
if args.meshcat:
meshcat = ConnectMeshcatVisualizer(
builder, output_port=scene_graph.get_query_output_port(),
zmq_url=args.meshcat, open_browser=args.open_browser)
diagram = builder.Build()
simulator = Simulator(diagram)
simulator.set_target_realtime_rate(1.0)
simulator.AdvanceTo(1E6)
if __name__ == '__main__':
main() |
import datetime
import os
import unittest
import pytest
from testing.postgresql import Postgresql
from importers.gpx_importer import GPXImporter
from pepys_import.core.store.data_store import DataStore
from pepys_import.file.file_processor import FileProcessor
FILE_PATH = os.path.dirname(__file__)
DATA_PATH = os.path.join(FILE_PATH, "sample_data/track_files/gpx/gpx_1_1.gpx")
def test_gpx_timezone_sqlite():
store = DataStore("", "", "", 0, ":memory:", db_type="sqlite")
store.initialise()
processor = FileProcessor(archive=False)
processor.register_importer(GPXImporter())
# parse the folder
processor.process(DATA_PATH, store, False)
# check data got created
with store.session_scope():
# there must be states after the import
states = store.session.query(store.db_classes.State).all()
assert states[0].time == datetime.datetime(2012, 4, 27, 15, 29, 38)
@pytest.mark.postgres
class TestGPXTimezonePostgres(unittest.TestCase):
def setUp(self):
self.postgres = None
try:
self.postgres = Postgresql(
database="test",
host="localhost",
user="postgres",
password="postgres",
port=55527,
)
except RuntimeError:
raise Exception("Testing Postgres server could not be started/accessed")
self.postgres_store = DataStore(
db_name="test",
db_host="localhost",
db_username="postgres",
db_password="postgres",
db_port=55527,
db_type="postgres",
)
self.postgres_store.initialise()
def tearDown(self):
try:
self.postgres.stop()
except AttributeError:
return
def test_gpx_timezone_postgres(self):
processor = FileProcessor(archive=False)
processor.register_importer(GPXImporter())
processor.process(DATA_PATH, self.postgres_store, False)
with self.postgres_store.session_scope():
postgres_results = (
self.postgres_store.session.query(self.postgres_store.db_classes.State)
.order_by(self.postgres_store.db_classes.State.time)
.all()
)
assert postgres_results[0].time == datetime.datetime(2012, 4, 27, 15, 29, 38)
@pytest.mark.postgres
class TestTimesEqualDifferentDBs(unittest.TestCase):
def setUp(self):
self.postgres = None
try:
self.postgres = Postgresql(
database="test",
host="localhost",
user="postgres",
password="postgres",
port=55527,
)
except RuntimeError:
raise Exception("Testing Postgres server could not be started/accessed")
self.postgres_store = DataStore(
db_name="test",
db_host="localhost",
db_username="postgres",
db_password="postgres",
db_port=55527,
db_type="postgres",
)
self.sqlite_store = DataStore("", "", "", 0, db_name="slave.sqlite", db_type="sqlite")
self.postgres_store.initialise()
self.sqlite_store.initialise()
def tearDown(self):
try:
self.postgres.stop()
except AttributeError:
return
def test_gpx_timezones_values_equal_postgres_sqlite(self):
processor = FileProcessor(archive=False)
processor.register_importer(GPXImporter())
processor.process(DATA_PATH, self.sqlite_store, False)
processor.process(DATA_PATH, self.postgres_store, False)
with self.postgres_store.session_scope():
with self.sqlite_store.session_scope():
sqlite_results = (
self.sqlite_store.session.query(self.sqlite_store.db_classes.State)
.order_by(self.sqlite_store.db_classes.State.time)
.all()
)
postgres_results = (
self.postgres_store.session.query(self.postgres_store.db_classes.State)
.order_by(self.postgres_store.db_classes.State.time)
.all()
)
sqlite_times = [result.time for result in sqlite_results]
postgres_times = [result.time for result in postgres_results]
assert sqlite_times == postgres_times
|
import bpy
from bpy.types import Operator
import os
import sys
class ConvertModelsOperator(Operator):
bl_idname = "object.convert_models_operator"
bl_label = "Convert Processed OBJ Files to STL"
bl_options = {'REGISTER', 'UNDO'}
@classmethod
def poll(self, context):
return True
def execute(self, context):
path = 'C:\Program Files\Blender Foundation\Blender'
for root, dirs, files in os.walk(path):
for f in files:
if f.endswith('.obj'):
obj_file = os.path.join(path,f)
stl_file = os.path.splitext(obj_file)[0]+".stl"
bpy.ops.object.select_all(action='SELECT')
bpy.ops.object.delete()
bpy.ops.import_scene.obj(filepath = obj_file)
bpy.ops.object.select_all(action='SELECT')
bpy.ops.export_mesh.stl(filepath=stl_file)
#for i in range(1,context.object.set_colors+1):
#context.scene.layers[i] = True
#bpy.ops.object.select_all(action='TOGGLE')
#bpy.ops.import_scene.obj(filepath=objpath)
#ob = context.scene.layers[i].active_object
#ob.layers[i] = True
#ob.layers = [ j==i for j in range(len(ob.layers)) ]
return {'FINISHED'}
def register():
bpy.utils.register_class(ConvertModelsOperator)
def unregister():
bpy.utils.unregister_class(ConvertModelsOperator)
if __name__ == "__main__":
register()
|
# Function to evaluate factorial by recursion
def fact(x):
if x > 2:
return x * fact(x - 1)
else:
return x
if __name__ == "__main__":
print(fact(10))
print(fact(50))
|
def n_type(x,y,nx,ny):
if nx>x and ny==y:
return 0
if nx<x and ny==y:
return 1
if nx==x and ny>y:
return 2
if nx==x and ny<y:
return 3
return -1
n = int(input())
a = [[0,0,0,0] for i in range(n)]
b = []
for i in range(n):
x,y = map(int, input().split())
b.append((x,y))
for i in range(n):
for j in range(n):
d = n_type(*b[i], *b[j])
if d!=-1:
a[i][d] = 1
c = 0
for i in range(n):
if sum(a[i])==4:
c += 1
print(c)
|
from enum import Enum
import re
import json
from typing import Any, List
import graphql
from graphql.language import visitor
from graphql.language.ast import ArgumentNode, OperationDefinitionNode
class Action(Enum):
"""The action enum contains all possible actions generated for customers."""
ADD = "add"
CREATE = "create"
DELETE = "delete"
GET = "get"
LIST = "list"
ON_ADD = "onAdd"
ON_CREATE = "onCreate"
ON_DELETE = "onDelete"
ON_REMOVE = "onRemove"
ON_UPDATE = "onUpdate"
PUT = "put"
REMOVE = "remove"
UPDATE = "update"
def __str__(self) -> str:
# wrap value with `str()` to appease pylint
return str(self.value)
def is_update(self) -> bool:
"""Returns true if the action is updating an object in the ledger"""
return self in {Action.UPDATE, Action.PUT}
def is_object_mutation(self) -> bool:
"""Returns true if the action mutates an existing object in the ledger.
This function tracks all of the actions that can be applied to an existing object
in the world state.
"""
return self in {Action.DELETE, Action.UPDATE, Action.PUT, Action.REMOVE}
def is_graphql_mutation(self) -> bool:
"""Returns true if the action applies to a mutation graphql query."""
return self in {Action.ADD, Action.CREATE, Action.DELETE, Action.UPDATE, Action.PUT, Action.REMOVE}
def is_subscription(self) -> bool:
"""Returns true if the action is a subscription operation."""
return self in {Action.ON_ADD, Action.ON_CREATE, Action.ON_DELETE, Action.ON_REMOVE, Action.ON_UPDATE}
def is_delete(self) -> bool:
"""Returns true if the action deletes objects from world state"""
return self in {Action.DELETE, Action.REMOVE}
class MutationVisitor(visitor.Visitor):
"""GraphQL syntax visitor to parse mutations into useful JSON/dicts"""
# create a matcher for the operation+type info
_action_parser = re.compile("^(" + "|".join(str(a) for a in Action) + ")(.*)$")
def __init__(self):
self.decoded = {}
def enter_operation_definition(self, node: OperationDefinitionNode, *_args):
"""For all operations, accumulate the query + arguments as python dictionaries"""
for selection in node.selection_set.selections:
if self.decoded.get(node.name.value):
self.decoded[node.name.value][selection.name.value] = self._args_to_dict(selection.arguments)
else:
self.decoded[node.name.value] = {selection.name.value: self._args_to_dict(selection.arguments)}
return visitor.SKIP
def _args_to_dict(self, args: List[ArgumentNode]):
return {arg.name.value: graphql.value_from_ast_untyped(arg.value) for arg in args}
@classmethod
def parse_mutations(cls, mutations: List[str]):
"""Convert a list of mutation strings into dictionary types and provide them via an iterator
This will automatically add a `mutation m` wrapper if one is not already present"""
for mut in mutations:
mutation = mut if mut.strip().startswith("mutation m") else "mutation m {" + mut + "}"
ast = graphql.parse(graphql.Source(mutation, "GraphQL request"))
argument_decoder = cls()
visitor.visit(ast, argument_decoder)
for operation, arguments in argument_decoder.decoded["m"].items():
op, user_type = cls._action_parser.match(operation).groups()
yield {"__operation": op, "__typename": user_type, "arguments": arguments}
if __name__ == "__main__":
sample = [
"""
addAnimal(
id:"9cc720d4-623c-11eb-9c41-5391fa973328",
input: {
organization_id: "6fe94056-5bd4-11eb-a9fc-0bb70a7f9c77" ,
name: "bangu" ,
type: "dog" ,
sex: "female" ,
animal_description: "" ,
primary_color: "Black" ,
primary_color_group: "black" ,
additional_colors: [{name: "Blue" , group: "blue" }] ,
additional_color_groups_string: ["blue"]
}
) {
error
}""",
"""
addEvent(
id: "b4de7525-623b-11eb-a0cb-0db0d645b658"
input: {
animal_id: "b434d448-623b-11eb-afea-59074c0526d3",
organization_id: "6fe94056-5bd4-11eb-a9fc-0bb70a7f9c77",
timestamp: 1611929411261,
node_created: "Node-2",
type: "intake",
nested: {thing: ["intake"]},
sub_type: "Stray/OTC",
location_description: "",
three_legged: false,
tentacles: null,
address1: "",
address2: "",
city: "", state: "", zipcode: "", geo_location: [0.0, 1.0]}
) {
error
}
""",
]
out = MutationVisitor.parse_mutations(sample)
print(json.dumps(out, indent=2, sort_keys=True))
|
###
# Copyright (c) 2019-present, IBM Research
# Licensed under The MIT License [see LICENSE for details]
###
from typing import List, Any, Optional, TypeVar, Generic, Union
__all__ = ['ResultRow', 'ResultSet']
from typing import List, Any, Optional, TypeVar, Generic, Union
class ResultSetIterator(object):
def __init__(self, result_set):
self._result_set = result_set
self._index = 0
def __next__(self):
if self._index < len(self._result_set._result):
result = self._result_set._result[self._index]
self._index += 1
return result
raise StopIteration
T = TypeVar('T')
class ResultRow(Generic[T]):
def __init__(self, result_set: 'ResultSet', row: List[T]):
self._result_set = result_set
self._row = row
def __getitem__(self, key: Union[str, int]) -> T:
if isinstance(key, str):
index = self._result_set._get_key_index(key)
else:
index = key
return self._row[index]
def get_keys(self) -> List[str]:
return self._result_set._keys
def __len__(self):
return len(self._row)
T2 = TypeVar('T2')
class ResultSet(Generic[T2]):
def __init__(self, keys: Optional[List[str]] = None, result: Optional[List[ResultRow[T2]]] = None):
self._keys = keys
if result is None:
result = list()
self._result = result
@classmethod
def build(cls, row_matrix: List[List[T2]], keys: Optional[List[str]] = None) -> 'ResultSet[T2]':
instance = cls(keys)
instance._result = [ResultRow[T2](instance, row) for row in row_matrix]
return instance
def _get_key_index(self, key) -> int:
return self._keys.index(key)
def __iter__(self):
return ResultSetIterator(self)
def __len__(self) -> int:
return len(self._result)
def __add__(self, other: 'ResultSet[T2]') -> 'ResultSet[T2]':
if self._keys != other._keys:
raise ValueError(f'Result set keys should match. ({self._keys}) and ({other._keys})')
return ResultSet[T2](self._keys, self._result.extend(other._result))
|
from os import chdir, system, remove, mkdir, rmdir
import os
import sys
directory = input("Directory: ")
chdir(directory)
def gettree(directoryfile):
contents = open(directoryfile, 'r').readlines()
holder = []
for fil in contents:
if str(fil).replace('\n', '') != "023943022039.3402394023" and str(fil).replace('\n', '') != os.path.basename(__file__):
holder.append(str(fil).replace('\n', ''))
return holder
system("dir /B > 023943022039.3402394023")
tree = gettree('023943022039.3402394023')
remove("023943022039.3402394023")
toremove = []
print("Answer with [y]/[n] or [type] to each of these questions")
input("[ENTER]")
system('cls')
for FILE in tree:
removefile = input("Remove '" + FILE + "' ? ")
if removefile == "y" or removefile == "Y":
toremove.append(FILE)
elif removefile == "type" or removefile == "TYPE":
print()
try:
contents = open(FILE).read()
print(contents)
except:
print("[!] Error: Cannot read file")
print()
removefile = input("Remove '" + FILE + "' ? ")
if removefile == "y" or removefile == "Y":
toremove.append(FILE)
print("THE FOLLOWING FILES WILL BE DELETED FOREVER: ")
print(toremove)
proceed = input("Proceed? (y/n) ")
if proceed == "y" or "Y":
for f in toremove:
try:
remove(f)
except:
rmdir(f)
print("[*] Files removed successfully")
input("[EXIT] ")
sys.exit()
elif proceed == "n" or "N":
print("[!] Canceling.. No files have been changed")
input('[EXIT] ')
sys.exit()
|
from __future__ import print_function
import sys
import click
import abc
from functools import partial, wraps
PY2 = sys.version_info[0] == 2
@click.pass_context
def args_getter(ctx, *args, **kwargs):
return ctx.params
def with_cli_args(func):
'''
A decorator helping with using click with standalone_mode turned off.
'''
getter_cmd = click.command(context_settings={
'allow_extra_args': True,
'ignore_unknown_options': True,
})(args_getter)
getter_cmd.params.extend(func.__click_params__)
@wraps(func)
def wrapper(*args, **kwargs):
kwargs.update(
getter_cmd(standalone_mode=False)
)
return func(*args, **kwargs)
return wrapper
class class_property(object):
"""
Read-only class property
"""
def __init__(self, func):
self.func = func
def __get__(self, obj, cls):
return self.func(cls)
def add_metaclass(metaclass):
"""Class decorator for creating a class with a metaclass."""
def wrapper(cls):
orig_vars = cls.__dict__.copy()
slots = orig_vars.get('__slots__')
if slots is not None:
if isinstance(slots, str):
slots = [slots]
for slots_var in slots:
orig_vars.pop(slots_var)
orig_vars.pop('__dict__', None)
orig_vars.pop('__weakref__', None)
return metaclass(cls.__name__, cls.__bases__, orig_vars)
return wrapper
@add_metaclass(abc.ABCMeta)
class ContextManager(object):
# Taken from Python 3.6 (contextlib).
def __enter__(self):
return self
def __exit__(self, exc_type, exc_value, traceback):
return None
@classmethod
def __subclasshook__(cls, C):
if cls is ContextManager:
if (any("__enter__" in B.__dict__ for B in C.__mro__) and
any("__exit__" in B.__dict__ for B in C.__mro__)):
return True
return NotImplemented
class ValidationError(Exception):
pass
def no_op(*args):
pass
class merge_attrs(object):
def __init__(self, *objects):
self.objects = objects
def __getattr__(self, key):
for obj in self.objects:
try:
return getattr(obj, key)
except AttributeError:
pass
raise AttributeError
class merge_dicts(object):
def __init__(self, *objects):
self.objects = objects
def __getitem__(self, key):
for obj in self.objects:
try:
return obj[key]
except KeyError:
pass
raise KeyError
class drop_into_debugger(object):
def __enter__(self):
pass
def __exit__(self, e, m, tb):
if not e:
return
try:
import ipdb as pdb
except ImportError:
import pdb
import sys
print(m.__repr__(), file=sys.stderr)
pdb.post_mortem(tb) |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Sun Jan 26 10:34:07 2020
@author: val
"""
################################################################ GLOBAL PARAMETERS
workingDirectory = "/home/val/Documents/NFWF_Files/2020_Analysis/" # Note Bene: This MUST be set IN .py file BEFORE imports
modelDeltaSecs = 10 # interval between successive time steps
tortDamping = 6.0
minNumWhaleObs = 4 # need at least this many whale observations to constitute a passby
maxObsGapMins = 15 # any larger gap between whale sitings will start a new whale passby
maxPriorOrPostMinsForBoats = 5 # accept any boat obs from 5 min before focal animal's first fix to 5 min after last one
whaleCVSfileName = "csvFiles/2003_2005_whalePassby.csv"
boatCVSfileName = "csvFiles/2003_2005_boatPassby.csv"
minNumWhaleObs = 5 # don't save passby with less than 5 whale observations
ff_fileName = "csvFiles/Whale Activity 2003-2005_EA quality check_FINAL_all_sorted_tabbed.csv"
boatFileName = "csvFiles/All 2003-2005 boat data_EA Quality_Final_sorted_tabbed.csv" # Note Bene MAKE SURE csv FILE SAVED as utf-8 8 bit format
# !!!!!!!!!!!!!! And, delete first two characters if needed
theoTracks_2019_FileName = "csvFiles/FinalTheoTracks_SRKW2019_15April2020_NoErrors.csv" # 2019 is whale tracks AND (a small number of) boat observations
spreadingLaw = -18
backgroundNoiseLevel = 50
condenseIfDtLessThan = 10 # any obs of same target closer together than this parameter are averaged or one is deleted
# 15 secs produces 27 boats
# 10 secs produces 12 boats
backgroundDb = 100
clickSourceLevel = 170
clickOutwardSpreading = -10
targetCrossSection = 20
callSourceLevel = 150
# parameters for plotting noise from individual boats
boat_dBlevels = [130, 120, 110, backgroundDb] # change colors for display at ranges where these spreading source levels fall
|
def obj_to_dict(obj, d=None):
data = d or {}
for k, v in vars(obj).items():
data.update({k: v})
return data
def to_dict(obj):
if isinstance(obj, list):
# assumes a list of objects
return [obj_to_dict(item) for item in obj]
elif isinstance(obj, dict):
# assumes dict containing values that are objects.
return {k: obj_to_dict(v) for k, v in obj.items()}
elif isinstance(obj, object):
return obj_to_dict(obj)
|
# This file is part of PeachPy package and is licensed under the Simplified BSD license.
# See license.rst for the full text of the license.
from peachpy.abi import ABI
from peachpy.abi import Endianness
from peachpy.formats.elf.file import ElfClass, MachineType, DataEncoding
from peachpy.arm.registers import r0, r1, r2, r3, r4, r5, r6, r7, r8, r9, r10, r11, r12, \
d0, d1, d2, d3, d4, d5, d6, d7, d8, d9, d10, d11, d12, d13, d14, d15, \
d16, d17, d18, d19, d20, d21, d22, d23, d24, d25, d26, d27, d28, d29, d30, d31
arm_gnueabi = ABI("GNU Soft-Float ARM EABI", endianness=Endianness.Little,
bool_size=1, wchar_size=2, short_size=2, int_size=4, long_size=4, longlong_size=8,
pointer_size=4, index_size=4,
stack_alignment=8, red_zone=0,
callee_save_registers=[r4, r5, r6, r7, r8, r9, r10, r11,
d8, d9, d10, d11, d12, d13, d14, d15],
argument_registers=[r0, r1, r2, r3],
volatile_registers=[r12,
d0, d1, d2, d3, d4, d5, d6, d7,
d16, d17, d18, d19, d20, d21, d22, d23,
d24, d25, d26, d27, d28, d29, d30, d31],
elf_class=ElfClass.class32,
elf_data_encoding=DataEncoding.little_endian,
elf_machine_type=MachineType.arm)
arm_gnueabihf = ABI("GNU Hard-Float ARM EABI", endianness=Endianness.Little,
bool_size=1, wchar_size=2, short_size=2, int_size=4, long_size=4, longlong_size=8,
pointer_size=4, index_size=4,
stack_alignment=8, red_zone=0,
callee_save_registers=[r4, r5, r6, r7, r8, r9, r10, r11,
d8, d9, d10, d11, d12, d13, d14, d15],
argument_registers=[r0, r1, r2, r3],
volatile_registers=[r12,
d0, d1, d2, d3, d4, d5, d6, d7,
d16, d17, d18, d19, d20, d21, d22, d23,
d24, d25, d26, d27, d28, d29, d30, d31],
elf_class=ElfClass.class32,
elf_data_encoding=DataEncoding.little_endian,
elf_machine_type=MachineType.arm)
|
import json
import os
import sys
from indy import pool as indy_pool
ROOT_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
sys.path.append(os.path.abspath(ROOT_DIR+'/identityLayer'))
sys.path.append(os.path.abspath(ROOT_DIR))
from identityLayer import identitylayer_pb2
from identityLayer import identitylayer_pb2_grpc
_ONE_DAY_IN_SECONDS = 60 * 60 * 24
import logging
logger = logging.getLogger(__name__)
logging.basicConfig(level=logging.DEBUG)
def get_value(value, value_type=None):
"""
This function return the value of the specified key else `None`
For `strings`, the default value is the empty string.
For `bytes`, the default value is empty bytes.
For `bools`, the default value is false.
For `numeric` types, the default value is zero.
For `enums`, the default value is the first defined enum value, which must be 0.
For message fields, the field is not set. Its exact value is language-dependent. See the generated code guide for details.
The default value for `repeated` fields is empty (generally an empty list in the appropriate language).
"""
# print('\n\n----------')
# print(value, value_type, type(value))
# string check
if isinstance(value, str) and len(value) == 0:
return None
elif isinstance(value, str):
return value
# numeric check
if isinstance(value, int) and value == 0:
return None
elif isinstance(value, int):
return value
class PoolServiceServicer(identitylayer_pb2_grpc.PoolServiceServicer):
"""`pool` Services
"""
async def CreatePoolLedgerConfig(self, request, context):
"""Create Pool Ledger Config
"""
resp = None
try:
config_name = get_value(request.ConfigName)
config = json.dumps({"genesis_txn": get_value(request.Config.GensisTxn)})
resp = await indy_pool.create_pool_ledger_config(config_name, config)
return identitylayer_pb2.CreatePoolLedgerConfigResponse(resp)
except Exception as e:
logger.error("Exception Occurred @ CreatePoolLedgerConfig ------")
logger.error(e)
return identitylayer_pb2.CreatePoolLedgerConfigResponse(resp)
async def OpenPoolLedger(self, request, context):
"""Open Pool Ledger
"""
resp = None
try:
config_name = get_value(request.ConfigName)
config = json.dumps({"timeout": get_value(request.Config.Timeout),
"extended_timeout":get_value(request.Config.ExtendedTimeour),
"preordered_nodes":get_value(request.Config.PreorderedNodes)})
resp = await indy_pool.open_pool_ledger(config_name, config)
return identitylayer_pb2.OpenPoolLedgerResponse(resp)
except Exception as e:
logger.error("Exception Occurred @ OpenPoolLedger------")
logger.error(e)
return identitylayer_pb2.OpenPoolLedgerResponse(resp)
async def RefreshPoolLedger(self, request, context):
"""Refresh Pool Ledger
"""
resp = None
try:
handle = get_value(request.Handle)
resp = await indy_pool.refresh_pool_ledger(handle)
return identitylayer_pb2.RefreshPoolLedgerResponse(resp)
except Exception as e:
logger.error("Exception Occurred @ RefreshPoolLedger------")
logger.error(e)
return identitylayer_pb2.RefreshPoolLedgerResponse(resp)
async def ListPools(self, request, context):
"""List Pools
"""
resp = None
try:
resp = await indy_pool.list_pools()
return identitylayer_pb2.ListPoolsResponse(resp)
except Exception as e:
logger.error("Exception Occurred @ ListPools------")
logger.error(e)
return identitylayer_pb2.ListPoolsResponse(resp)
async def ClosePoolLedger(self, request, context):
"""Close Pool Ledger
"""
resp = None
try:
handle = get_value(request.Handle)
resp = await indy_pool.close_pool_ledger(handle)
return identitylayer_pb2.ClosePoolLedgerResponse(resp)
except Exception as e:
logger.error("Exception Occurred @ ClosePoolLedger------")
logger.error(e)
return identitylayer_pb2.ClosePoolLedgerResponse(resp)
async def DeletePoolLedgerConfig(self, request, context):
"""Delete Pool Ledger Config
"""
resp = None
try:
config_name = get_value(request.ConfigName)
resp = await indy_pool.delete_pool_ledger_config(config_name)
return identitylayer_pb2.DeletePoolLedgerConfigResponse(resp)
except Exception as e:
logger.error("Exception Occurred @ DeletePoolLedgerConfig------")
logger.error(e)
return identitylayer_pb2.DeletePoolLedgerConfigResponse(resp)
async def SetProtocolVersion(self, request, context):
"""Set Protocol Version
"""
resp = None
try:
protocol_version = get_value(request.ProtocolVersion)
resp = await indy_pool.set_protocol_version(protocol_version)
return identitylayer_pb2.SetProtocolVersionResponse(resp)
except Exception as e:
logger.error("Exception Occurred @ SetProtocolVersion------")
logger.error(e)
return identitylayer_pb2.SetProtocolVersionResponse(resp)
|
from discord.ext import commands
class Listen(commands.Cog):
"""
Bot Voice Channel Commands
"""
def __init__(self, bot):
self.bot = bot
@commands.command(help="Bot joins voice channel")
async def join(self, ctx) -> None:
"""
bot joins the voice channel
"""
channel = ctx.author.voice.channel
await channel.connect()
@commands.command(help="Bot leaves voice channel")
async def leave(self, ctx) -> None:
"""
bot leaves voice channel
"""
await ctx.voice_client.disconnect()
def setup(bot):
bot.add_cog(Listen(bot))
|
import os
import streamlit.components.v1 as components
_RELEASE = True
if not _RELEASE:
_component_func = components.declare_component(
"click_detector", url="http://localhost:3001",
)
else:
parent_dir = os.path.dirname(os.path.abspath(__file__))
build_dir = os.path.join(parent_dir, "frontend/build")
_component_func = components.declare_component("st_click_detector", path=build_dir)
def click_detector(html_content, key=None):
"""Display HTML content and detect when links are clicked on".
Parameters
----------
html_content: str
Content to display and from which clicks should be detected
Returns
-------
str
The id of the last link clicked on (or "" before any click)
"""
component_value = _component_func(html_content=html_content, key=key, default="",)
return component_value
if not _RELEASE:
import streamlit as st
content = """<p><a href='#' id='Link 1'>First link</a></p>
<p><a href='#' id='Link 2'>Second link</a></p>
<a href='#' id='Image 1'><img width='20%' src='https://images.unsplash.com/photo-1565130838609-c3a86655db61?w=200'></a>
<a href='#' id='Image 2'><img width='20%' src='https://images.unsplash.com/photo-1565372195458-9de0b320ef04?w=200'></a>
"""
clicked = click_detector(content)
st.markdown(
f"<p><b>{clicked} clicked</b></p>"
if clicked != ""
else "<p><b>No click</b></p>",
unsafe_allow_html=True,
)
|
import pickle
import os
import pandas as pd
import pathlib
from BFAIR.mfa.INCA import INCA_input_parser
from BFAIR.atom_mapping import (MolfileDownloader,
write_rxn_files,
obtain_atom_mappings,
parse_reaction_mappings,
parse_metabolite_mappings,
generate_INCA_mapping_input,
clean_output,
)
original_wd = os.getcwd()
current_dir = str(pathlib.Path(__file__).parent.absolute())
os.chdir(current_dir)
# Load e_coli_core model
model, reaction_data, metabolite_data = INCA_input_parser.parse_cobra_model(
'e_coli_core.json', 'e_coli_core', '2021-07-15')
# Subset handpicked reactions
model_reaction_df = pd.DataFrame()
model_reaction_df = model_reaction_df.append(
reaction_data[reaction_data['rxn_id'] == 'PFK'])
model_reaction_df = model_reaction_df.append(
reaction_data[reaction_data['rxn_id'] == 'BIOMASS_Ecoli_core_w_GAM'])
model_reaction_df = model_reaction_df.append(
reaction_data[reaction_data['rxn_id'] == 'EX_pyr_e'])
model_reaction_df = model_reaction_df.append(
reaction_data[reaction_data['rxn_id'] == 'ICL'])
# And metabolites from these reactions
model_metabolite_df = pd.DataFrame()
model_metabolite_df = model_metabolite_df.append(
metabolite_data[metabolite_data['met_id'] == 'atp_c'])
model_metabolite_df = model_metabolite_df.append(
metabolite_data[metabolite_data['met_id'] == 'f6p_c'])
model_metabolite_df = model_metabolite_df.append(
metabolite_data[metabolite_data['met_id'] == 'adp_c'])
model_metabolite_df = model_metabolite_df.append(
metabolite_data[metabolite_data['met_id'] == 'fdp_c'])
model_metabolite_df = model_metabolite_df.append(
metabolite_data[metabolite_data['met_id'] == 'h_c'])
model_metabolite_df = model_metabolite_df.append(
metabolite_data[metabolite_data['met_id'] == 'pyr_e'])
model_metabolite_df = model_metabolite_df.append(
metabolite_data[metabolite_data['met_id'] == 'icit_c'])
model_metabolite_df = model_metabolite_df.append(
metabolite_data[metabolite_data['met_id'] == 'succ_c'])
model_metabolite_df = model_metabolite_df.append(
metabolite_data[metabolite_data['met_id'] == 'glx_c'])
# Obtain all required files
# Metabolite Molfiles
downloader = MolfileDownloader(model_metabolite_df)
downloader.generate_molfile_database()
# Rxn files
write_rxn_files(model_reaction_df)
# Mapped Rxn files
obtain_atom_mappings()
# Parsed dataframes of mappings
reaction_df = parse_reaction_mappings()
metabolite_df = parse_metabolite_mappings()
# CSV outputs of these dataframes
generate_INCA_mapping_input(reaction_df, metabolite_df)
# Load all the generated files in Python
# Molfiles in a single list.
# All numerics converted to floats.
metabolites = os.listdir('metabolites')
for i, molfile in enumerate(metabolites):
with open(f'metabolites/{molfile}', 'r') as f:
metabolites[i] = f.readlines()
for j, met in enumerate(metabolites[i]):
metabolites[i][j] = metabolites[i][j].split()
for k, val in enumerate(metabolites[i][j]):
try:
metabolites[i][j][k] = float(val)
except BaseException:
pass
# Rxn files in a single list.
# All numerics converted to floats.
unmapped_rxns = os.listdir('unmappedRxns')
for i, rxn_file in enumerate(unmapped_rxns):
with open(f'unmappedRxns/{rxn_file}', 'r') as f:
unmapped_rxns[i] = f.readlines()
for j, line in enumerate(unmapped_rxns[i]):
unmapped_rxns[i][j] = unmapped_rxns[i][j].split()
for k, val in enumerate(unmapped_rxns[i][j]):
try:
unmapped_rxns[i][j][k] = float(val)
except BaseException:
pass
# Mapped Rxn files in a single list
mapped_rxns = os.listdir('mappedRxns/rxnFiles')
for i, rxn_file in enumerate(mapped_rxns):
with open(f'mappedRxns/rxnFiles/{rxn_file}', 'r') as f:
lines = f.readlines()
atom_rows = []
for j, line in enumerate(lines):
if len(line.split()) in (15, 16):
# Only append rows containing atom mappings
atom_rows.append(line.split())
mapped_rxns[i] = atom_rows
# CSV outputs of parsed mapping data
reaction_df_csv = pd.read_csv('MappingReactions.csv')
metabolite_df_csv = pd.read_csv('MappingMetabolites.csv')
# Pickle all the variables using pickle
filehandler = open("test_data.obj", "wb")
pickle.dump(
[
metabolites,
unmapped_rxns,
mapped_rxns,
reaction_df,
metabolite_df,
reaction_df_csv,
metabolite_df_csv,
model_reaction_df,
model_metabolite_df,
],
filehandler
)
filehandler.close()
clean_output()
os.chdir(original_wd)
|
"""Schema for the main application."""
import graphene
import blog.schema
QUERIES = (
# Place all future query classes here.
blog.schema.Query,
)
class Query(*QUERIES):
"""Top level query class that inherits from all others."""
pass
schema = graphene.Schema(query=Query)
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from setuptools import (
setup,
find_packages,
)
extras_require = {
'test': [
"factory-boy==2.12.0",
"hypothesis==5.10.4",
"pytest==5.4.1",
"pytest-xdist==1.31.0",
"pytest-trio==0.5.2",
"tox==3.14.6",
# This doesn't actually result in a correct install of the testing
# requirements so instead we supply the exact eth-tester requirement
# manually.
# "web3[tester]",
"eth-tester[py-evm]==v0.2.0-beta.2",
],
'lint': [
'black>=18.6b4,<19',
"flake8==3.7.9",
"flake8-bugbear==20.1.4",
"isort>=4.3.18,<5",
"mypy==0.770",
"pydocstyle>=3.0.0,<4",
],
'doc': [
"Sphinx>=1.6.5,<2",
"sphinx_rtd_theme>=0.1.9",
"towncrier>=19.2.0, <20",
],
'dev': [
"bumpversion>=0.5.3,<1",
"pytest-watch>=4.1.0,<5",
"wheel",
"twine",
"ipython",
],
'postgres': [
"psycopg2==2.8.5",
],
}
extras_require['dev'] = (
extras_require['dev'] + # noqa: W504
extras_require['test'] + # noqa: W504
extras_require['lint'] + # noqa: W504
extras_require['doc']
)
with open('./README.md') as readme:
long_description = readme.read()
setup(
name='cthaeh',
# *IMPORTANT*: Don't manually change the version here. Use `make bump`, as described in readme
version='0.1.0-alpha.0',
description="""Stand alone application for serving the Ethereum JSON-RPC logging APIs""",
long_description=long_description,
long_description_content_type='text/markdown',
author='The Ethereum Foundation',
author_email='snakecharmers@ethereum.org',
url='https://github.com/ethereum/cthaeh',
include_package_data=True,
install_requires=[
"async-service==0.1.0a7",
"eth-typing==2.2.1",
"eth-utils>=1,<2",
"SQLAlchemy==1.3.16",
"sqlalchemy-stubs==0.3",
"trio==0.13.0",
"trio-typing==0.3.0",
"web3==5.7.0",
],
python_requires='>=3.6, <4',
extras_require=extras_require,
py_modules=['cthaeh'],
license="MIT",
zip_safe=False,
keywords='ethereum',
packages=find_packages(exclude=["tests", "tests.*"]),
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Natural Language :: English',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.7',
],
entry_points={
'console_scripts': [
'cthaeh=cthaeh._boot:_boot',
],
},
)
|
import re
from pathlib import Path
from urllib.request import urlopen
from yaml import load, add_constructor, Loader
from foliant.config.base import BaseParser
class Parser(BaseParser):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
add_constructor('!include', self._resolve_include_tag)
self.logger = self.logger.getChild('yaml_include')
self.logger.debug(f'Extension inited: {self.__dict__}')
def _resolve_include_tag(self, _, node) -> str:
'''Replace value after ``!include`` with the content of the referenced file.'''
self.logger.debug('Start resolving !include tag')
parts = node.value.split('#')
if len(parts) == 1:
path_ = parts[0]
include_content = self._get_file_or_url_content(path_)
return load(include_content, Loader)
elif len(parts) == 2:
path_, section = parts[0], parts[1]
include_content = self._get_file_or_url_content(path_)
return load(include_content, Loader)[section]
else:
raise ValueError('Invalid include syntax')
def _get_file_or_url_content(self, path_: str) -> str or bytes:
"""
Determine whether path_ is a path to local file or url. And return its content.
"""
link_pattern = re.compile(r'https?://\S+')
if link_pattern.search(path_): # path_ is a URL
self.logger.debug(f'Getting included content from the link {path_}')
result = urlopen(path_).read()
else:
included_file_path = self.project_path / Path(path_).expanduser()
self.logger.debug(f'Getting included content from the file {included_file_path}')
with open(included_file_path, encoding='utf8') as f:
result = f.read()
return result
|
"""Configurations of Transformer model
"""
import copy
import texar.torch as tx
random_seed = 1234
beam_width = 5
length_penalty = 0.6
hidden_dim = 512
emb = {
"name": "lookup_table",
"dim": hidden_dim,
"initializer": {
"type": "normal_",
"kwargs": {"mean": 0.0, "std": hidden_dim ** -0.5},
},
}
position_embedder_hparams = {"dim": hidden_dim}
encoder = {
"dim": hidden_dim,
"num_blocks": 6,
"multihead_attention": {
"num_heads": 8,
"output_dim": hidden_dim
# See documentation for more optional hyperparameters
},
"initializer": {
"type": "variance_scaling_initializer",
"kwargs": {"factor": 1.0, "mode": "FAN_AVG", "uniform": True},
},
"poswise_feedforward": tx.modules.default_transformer_poswise_net_hparams(
input_dim=hidden_dim,
output_dim=hidden_dim
),
}
decoder = copy.deepcopy(encoder)
loss_label_confidence = 0.9
opt = {
"optimizer": {
"type": "Adam",
"kwargs": {"beta1": 0.9, "beta2": 0.997, "epsilon": 1e-9},
}
}
lr_config = {
"learning_rate_schedule": "constant.linear_warmup.rsqrt_decay.rsqrt_depth",
"lr_constant": 2 * (hidden_dim ** -0.5),
"static_lr": 1e-3,
"warmup_steps": 16000,
}
|
#!/usr/bin/env python
"""
Usage: python get_syscalls.py <all | specific architectures...>
Generate the "source/mir/linux/arch/<arch>/uapi/_asm/unistd.di' files by
this command in the containing folder. You will need to be connected
to the internet. Architecture names are written exactly as corresponding
version specifiers in the D programming language (see
https://dlang.org/spec/version.html).
If you get an error message like:
ModuleNotFoundError: No module named 'urllib2'
then "pip install urllib2".
Report bugs at https://github.com/libmir/mir-linux-kernel/issues
"""
from __future__ import print_function
import os, sys
import urllib2, re
import datetime
from io import BytesIO
from gzip import GzipFile
LINUX_ARCH_URL = "https://raw.githubusercontent.com/torvalds/linux/master/arch/"
syscall_table_urls = {
# The format of the remote tables is:
# <number> <abi> <name> <entry point> <compat entry point>
# The format of this dict is:
# "Architecture": ["path/from/arch/to/the.tbl", ["accepted ABI", "more accepted ABIs"], <NR offset>]
# <NR offset> can be omitted in which case it is treated as 0.
"X86": ["x86/entry/syscalls/syscall_32.tbl", ["i386"]],
"X86_64": ["x86/entry/syscalls/syscall_64.tbl", ["common","64","x32"]], # x32 ABI is handled specially.
"ARM": ["arm/tools/syscall.tbl", ["common", "oabi"]],
"Alpha": ["alpha/kernel/syscalls/syscall.tbl", ["common"]],
"HPPA": ["parisc/kernel/syscalls/syscall.tbl", ["common", "32"]],
"HPPA64": ["parisc/kernel/syscalls/syscall.tbl", ["common", "64"]],
"IA64": ["ia64/kernel/syscalls/syscall.tbl", ["common"], 1024],
"MIPS_N32": ["mips/kernel/syscalls/syscall_n32.tbl", ["n32"], 6000],
"MIPS_O32": ["mips/kernel/syscalls/syscall_o32.tbl", ["o32"], 4000],
"MIPS64": ["mips/kernel/syscalls/syscall_n64.tbl", ["n64"], 5000],
"PPC": ["powerpc/kernel/syscalls/syscall.tbl", ["common", "32", "nospu"]],
"PPC64": ["powerpc/kernel/syscalls/syscall.tbl", ["common", "64", "nospu"]],
"S390": ["s390/kernel/syscalls/syscall.tbl", ["common", "32"]],
"SH": ["sh/kernel/syscalls/syscall.tbl", ["common"]],
"SPARC": ["sparc/kernel/syscalls/syscall.tbl", ["common", "32"]],
"SPARC64": ["sparc/kernel/syscalls/syscall.tbl", ["common", "64"]],
"SystemZ": ["s390/kernel/syscalls/syscall.tbl", ["common", "64"]],
}
uses_asm_generic_unistd = {
# Value is header that may include additional defines.
"AArch64": "arm64/include/uapi/asm/unistd.h", # Currently no extra defines.
"RISCV32": "riscv/include/uapi/asm/unistd.h", # NR_riscv_flush_icache
"RISCV64": "riscv/include/uapi/asm/unistd.h", # ditto
}
def read_url(url, headers=None):
if headers is None:
headers = {}
if not "Accept-Encoding" in headers:
headers["Accept-Encoding"] = "gzip"
response = None
try:
response = urllib2.build_opener().open(
urllib2.Request(url, headers=headers))
if response.info().get("Content-Encoding") == "gzip":
return GzipFile(fileobj=BytesIO(response.read())).read()
else:
return response.read()
except:
print ("Error trying to read "+url)
raise
finally:
if response is not None:
response.close()
def iter_syscall_table(arch):
"""KeyError if syscall table is unavailable for arch"""
abiWhitelist = syscall_table_urls[arch][1];
for line in read_url(LINUX_ARCH_URL+syscall_table_urls[arch][0]).splitlines():
line = line.partition('#')[0].strip()
if len(line) > 0:
line = line.split(None, 4)
if line[1] in abiWhitelist:
yield line;
def iter_unistd_h(path_from_arch):
import re
text = read_url(LINUX_ARCH_URL+path_from_arch)
text = re.sub(r"/[*].*?[*]/", "", text, re.DOTALL)#Remove C-style comments
text = text.splitlines()
for line in text:
line = line.partition("//")[0].partition("#define")[2].strip()
if len(line) > 0:
yield line.split(None, 1)
def _yield_nr_defs_unistd_helper(lines):#Each line is tuple
for line in lines:
try:
if line[0].startswith("__NR"):
yield "enum "+line[0].lstrip("_")+" = "+line[1].replace("__NR","NR")+";"
except:
yield "// omitted " + " ".join(line)
def yield_nr_defs(arch):
if arch == "X86_64":
# X86_64: has special handling for x32.
for line in iter_syscall_table("X86_64"):
try:
if line[1] == "64":
yield "static if (size_t.sizeof == 8) enum NR_"+line[2]+" = "+str(int(line[0]))+";"
elif line[1] == "x32":#i.e. for x32 ABI
yield "version (D_X32) enum NR_"+line[2]+" = "+str(int(line[0]))+";"
else:
yield "enum NR_"+line[2]+" = "+str(int(line[0]))+";"
except:
yield "// omitted " + " ".join(line);
elif arch in syscall_table_urls:
nr_offset = 0 if len(syscall_table_urls[arch]) < 3 else syscall_table_urls[arch][2]
for line in iter_syscall_table(arch):
try:
yield "enum NR_"+line[2]+" = "+str(nr_offset + int(line[0]))+";"
except:
yield "// omitted " + " ".join(line);
elif arch in uses_asm_generic_unistd:
yield "public import mir.linux.arch.asm_generic.unistd;"
for line in _yield_nr_defs_unistd_helper(iter_unistd_h(uses_asm_generic_unistd[arch])):
yield line
else:
for line in _yield_nr_defs_unistd_helper(iter_unistd_h(arch.lower()+"/include/uapi/asm/unistd.h")):
yield line
def write_nr_defs_file(arch):
#We might immediately get an web error.
#Don't create an empty file / erase the existing file if so.
lines = yield_nr_defs(arch)
mname = "mir.linux.arch."+arch.lower()+".uapi._asm.unistd"
fdir = "source/mir/linux/arch/"+arch.lower()+"/uapi/_asm"
fpath = fdir + "/unistd.di"
print ("Writing "+fpath)
try:
os.makedirs(fdir)
except:
#Error can either mean failure or the directory
#already existed.
pass
with open(fpath, "w") as f:
f.write("/++\nAuto-generated Linux syscall constants for "+arch+"\n+/\nmodule ")
f.write(mname+";\nversion(LDC) pragma(LDC_no_moduleinfo);\n\n")
for line in lines:
if line.startswith("// omitted "):
print(line)
f.write(line)
f.write('\n')
return mname
if __name__ == "__main__":
argv = sys.argv[1:]
if len(argv) == 1 and argv[0].lower() == "all":
argv = ["X86","X86_64","ARM","AArch64","SPARC","SPARC64","Alpha",
"IA64","PPC","PPC64","SH","S390","SystemZ","HPPA","HPPA64",
"MIPS_O32","MIPS_N32","MIPS64","RISCV32","RISCV64"]
if len(argv) == 0 or len([x for x in argv if x.startswith('-')]) > 0:
print (__doc__)
sys.exit(2)
arch_modules = []
for arg in argv:
arch_modules.append((arg, write_nr_defs_file(arg)))
mname = "mir.linux._asm.unistd"
fdir = "source/mir/linux/_asm"
fpath = fdir+"/unistd.di"
try:
os.makedirs(fdir)
except:
#Error can either mean failure or the directory
#already existed.
pass
print ("Writing "+fpath)
with open(fpath, "w") as f:
f.write("/++\nAuto-generated Linux syscall constants\n+/\nmodule "+mname+";\n")
f.write("version(LDC) pragma(LDC_no_moduleinfo);\n\n")
first = True
for (arch, mname) in arch_modules:
if first:
first = False
f.write("version ("+arch+") public import "+mname+";\n")
else:
f.write("else version ("+arch+") public import "+mname+";\n")
f.write("else pragma(msg, \"Linux syscall constants not known for target architecture!\");\n")
|
"""
This Codes is to tokenize the given strings into feed-able shapes of input.
"""
import spacy
from utils.tokenize import tokenize
from utils.misc.json import write_json_one_line
def do_tokenize(factual, id=0, label=[0]):
"""
Perform Tokenize to make input data dict.
In case you perform test, just set id and label arbitrarily.
It does not affect the result.
"""
# Load SpaCy model
nlp = spacy.load('en_core_web_sm')
one_dict = dict()
keys = ["testid",
"features_content",
"labels_index",
"labels_num"]
one_dict[keys[0]] = id # id
one_dict[keys[1]] = tokenize(nlp, factual)
one_dict[keys[2]] = label
one_dict[keys[3]] = len(label)
return one_dict
if __name__ == "__main__":
factual_text = "Korea has banned importation of beef from United States!"
data_input = do_tokenize(factual_text)
print(data_input)
# Write Out the input
write_path = "test_data.json"
write_json_one_line(write_path, data_input)
|
#!/usr/bin/env python
# Copyright 2017 Yahoo Holdings. Licensed under the terms of the Apache 2.0 license. See LICENSE in the project root.
# @author Vegard Sjonfjell
import sys
import argparse
import copy
import os
import subprocess
import time
import shlex
def parse_arguments():
argparser = argparse.ArgumentParser(description="Run Vespa cppunit tests in parallell")
argparser.add_argument("testrunner", type=str, help="Test runner executable")
argparser.add_argument("--chunks", type=int, help="Number of chunks", default=5)
args = argparser.parse_args()
if args.chunks < 1:
raise RuntimeError("Error: Chunk size must be greater than 0")
return args
def take(lst, n):
return [ lst.pop() for i in xrange(n) ]
def chunkify(lst, chunks):
lst = copy.copy(lst)
chunk_size = len(lst) / chunks
chunk_surplus = len(lst) % chunks
result = [ take(lst, chunk_size) for i in xrange(chunks) ]
if chunk_surplus:
result.append(lst)
return result
def error_if_file_not_found(function):
def wrapper(*args, **kwargs):
try:
return function(*args, **kwargs)
except OSError as e:
if e.errno == os.errno.ENOENT: # "No such file or directory"
print >>sys.stderr, "Error: could not find testrunner or valgrind executable"
sys.exit(1)
return wrapper
@error_if_file_not_found
def get_test_suites(testrunner):
return subprocess.check_output((testrunner, "--list")).strip().split("\n")
class Process:
def __init__(self, cmd, group):
self.group = group
self.finished = False
self.output = ""
self.handle = subprocess.Popen(
cmd,
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT,
preexec_fn=os.setpgrp)
@error_if_file_not_found
def build_processes(test_groups):
valgrind = os.getenv("VALGRIND")
testrunner = shlex.split(valgrind) + [args.testrunner] if valgrind else [args.testrunner]
processes = []
for group in test_groups:
cmd = testrunner + group
processes.append(Process(cmd, group))
return processes
def cleanup_processes(processes):
for proc in processes:
try:
proc.handle.kill()
except OSError as e:
if e.errno != os.errno.ESRCH: # "No such process"
print >>sys.stderr, e.message
args = parse_arguments()
test_suites = get_test_suites(args.testrunner)
test_suite_groups = chunkify(test_suites, args.chunks)
processes = build_processes(test_suite_groups)
print "Running %d test suites in %d parallel chunks with ~%d tests each" % (len(test_suites), len(test_suite_groups), len(test_suite_groups[0]))
processes_left = len(processes)
while True:
try:
for proc in processes:
return_code = proc.handle.poll()
proc.output += proc.handle.stdout.read()
if return_code == 0:
proc.finished = True
processes_left -= 1
if processes_left > 0:
print "%d test suite(s) left" % processes_left
else:
print "All test suites ran successfully"
sys.exit(0)
elif return_code is not None:
print "Error: one of '%s' test suites failed:" % ", ".join(proc.group)
print >>sys.stderr, proc.output
sys.exit(return_code)
time.sleep(0.01)
finally:
cleanup_processes(processes)
|
# Run with
# rm match/no/*
# rm match/yes/*
# docker run -it -v $PWD:/app -w=/app valian/docker-python-opencv-ffmpeg python test3.py
# https://docs.opencv.org/3.0-beta/doc/py_tutorials/py_feature2d/py_sift_intro/py_sift_intro.html
import os.path
import glob
import sys
import numpy as np
import cv2
DISTANCE_FACTOR = 0.7
def match(img2_path, img1_path):
img1_rgb = cv2.imread(img1_path) # queryImage
img2_rgb = cv2.imread(img2_path) # trainImage
img1 = cv2.cvtColor(img1_rgb,cv2.COLOR_BGR2GRAY)
img2 = cv2.cvtColor(img2_rgb,cv2.COLOR_BGR2GRAY)
# Initiate SIFT detector
sift = cv2.xfeatures2d.SIFT_create()
# find the keypoints and descriptors with SIFT
kp1, des1 = sift.detectAndCompute(img1,None)
kp2, des2 = sift.detectAndCompute(img2,None)
FLANN_INDEX_KDTREE = 1
index_params = dict(algorithm = FLANN_INDEX_KDTREE, trees = 5)
search_params = dict(checks = 50)
flann = cv2.FlannBasedMatcher(index_params, search_params)
matches = flann.knnMatch(des1,des2,k=2)
ngood = 0
for m,n in matches:
if m.distance < DISTANCE_FACTOR*n.distance:
ngood = ngood + 1
return ngood
# ----------------------------------------------------
pages_file_or_dir=sys.argv[1]
logos_file_or_dir=sys.argv[2]
if os.path.isdir(pages_file_or_dir):
pages=glob.glob(pages_file_or_dir + "/*.png")
else:
pages=[pages_file_or_dir]
if os.path.isdir(logos_file_or_dir):
logos=glob.glob(logos_file_or_dir + "/*.jpg")
else:
logos=[logos_file_or_dir]
verbose=(len(pages)>1 or len(logos)>1)
for page in pages:
pname=os.path.splitext(os.path.basename(page))[0]
m_max=0
m_logo=""
for logo in logos:
lname=os.path.splitext(os.path.basename(logo))[0]
# print(page, logo, MIN_MATCH_COUNT, ypath)
m=match(page, logo)
if (m>m_max):
m_max = m
m_logo = lname
if (verbose):
print("%-20s %-20s %2d" % (pname, m_logo, m_max))
else:
print(m_max)
|
from dataclasses import dataclass, field
from typing import List
@dataclass
class DataClassApplication:
operation_type: str
workflow_id: str = ''
attachments: list = field(default_factory=list)
answers: list = field(default_factory=list)
raw_data: str = ''
raw_data_type: str = 'PDF'
|
#!/usr/bin/python
from ambassador.utils import ParsedService as Service
from typing import Dict, List, Optional, Tuple, TYPE_CHECKING
import sys
import json
import logging
import os
from ambassador import Config, IR
from ambassador.fetch import ResourceFetcher
from ambassador.utils import SecretInfo, SavedSecret, SecretHandler, dump_json
if TYPE_CHECKING:
from ambassador.ir.irresource import IRResource # pragma: no cover
# default AES's Secret name
# (by default, we assume it will be in the same namespace as Ambassador)
DEFAULT_AES_SECRET_NAME = "ambassador-edge-stack"
# the name of some env vars that can be used for overriding
# the AES's Secret name/namespace
ENV_AES_SECRET_NAME = "AMBASSADOR_AES_SECRET_NAME"
ENV_AES_SECRET_NAMESPACE = "AMBASSADOR_AES_SECRET_NAMESPACE"
# Fake SecretHandler for our fake IR, below.
class SecretRecorder(SecretHandler):
def __init__(self, logger: logging.Logger) -> None:
super().__init__(logger, "-source_root-", "-cache_dir-", "0")
self.needed: Dict[Tuple[str, str], SecretInfo] = {}
# Record what was requested, and always return success.
def load_secret(self, resource: 'IRResource',
secret_name: str, namespace: str) -> Optional[SecretInfo]:
self.logger.debug("SecretRecorder (%s %s): load secret %s in namespace %s" %
(resource.kind, resource.name, secret_name, namespace))
return self.record_secret(secret_name, namespace)
def record_secret(self, secret_name: str, namespace: str) -> Optional[SecretInfo]:
secret_key = (secret_name, namespace)
if secret_key not in self.needed:
self.needed[secret_key] = SecretInfo(secret_name, namespace, 'needed-secret', '-crt-', '-key-',
decode_b64=False)
return self.needed[secret_key]
# Secrets that're still needed also get recorded.
def still_needed(self, resource: 'IRResource', secret_name: str, namespace: str) -> None:
self.logger.debug("SecretRecorder (%s %s): secret %s in namespace %s is still needed" %
(resource.kind, resource.name, secret_name, namespace))
self.record_secret(secret_name, namespace)
# Never cache anything.
def cache_secret(self, resource: 'IRResource', secret_info: SecretInfo):
self.logger.debug("SecretRecorder (%s %s): skipping cache step for secret %s in namespace %s" %
(resource.kind, resource.name, secret_info.name, secret_info.namespace))
return SavedSecret(secret_info.name, secret_info.namespace, '-crt-path-', '-key-path-', '-user-path-',
'-root-crt-path', { 'tls.crt': '-crt-', 'tls.key': '-key-', 'user.key': '-user-' })
# XXX Sooooo there's some ugly stuff here.
#
# We need to do a little bit of the same work that the IR does for things like
# managing Resolvers and parsing service names. However, we really don't want to
# do all the work of instantiating an IR.
#
# The solution here is to subclass the IR and take advantage of the watch_only
# initialization keyword, which skips the hard parts of building an IR.
class FakeIR(IR):
def __init__(self, aconf: Config, logger=None) -> None:
# If we're asked about a secret, record interest in that secret.
self.secret_recorder = SecretRecorder(logger)
# If we're asked about a file, it's good.
file_checker = lambda path: True
super().__init__(aconf, logger=logger, watch_only=True,
secret_handler=self.secret_recorder, file_checker=file_checker)
# Don't bother actually saving resources that come up when working with
# the faked modules.
def save_resource(self, resource: 'IRResource') -> 'IRResource':
return resource
class WatchHook:
def __init__(self, logger, yaml_stream) -> None:
# Watch management
self.logger = logger
self.consul_watches: List[Dict[str, str]] = []
self.kube_watches: List[Dict[str, str]] = []
self.load_yaml(yaml_stream)
def add_kube_watch(self, what: str, kind: str, namespace: Optional[str],
field_selector: Optional[str]=None, label_selector: Optional[str]=None) -> None:
watch = { "kind": kind }
if namespace:
watch["namespace"] = namespace
if field_selector:
watch["field-selector"] = field_selector
if label_selector:
watch["label-selector"] = label_selector
self.logger.debug(f"{what}: add watch {watch}")
self.kube_watches.append(watch)
def load_yaml(self, yaml_stream):
self.aconf = Config()
fetcher = ResourceFetcher(self.logger, self.aconf, watch_only=True)
fetcher.parse_watt(yaml_stream.read())
self.aconf.load_all(fetcher.sorted())
# We can lift mappings straight from the aconf...
mappings = self.aconf.get_config('mappings') or {}
# ...but we need the fake IR to deal with resolvers and TLS contexts.
self.fake = FakeIR(self.aconf, logger=self.logger)
self.logger.debug("IR: %s" % self.fake.as_json())
resolvers = self.fake.resolvers
contexts = self.fake.tls_contexts
self.logger.debug(f'mappings: {len(mappings)}')
self.logger.debug(f'resolvers: {len(resolvers)}')
self.logger.debug(f'contexts: {len(contexts)}')
global_resolver = self.fake.ambassador_module.get('resolver', None)
global_label_selector = os.environ.get('AMBASSADOR_LABEL_SELECTOR', '')
self.logger.debug('label-selector: %s' % global_label_selector)
# watch the AES Secret if the edge stack is running
if self.fake.edge_stack_allowed:
aes_secret_name = os.getenv(ENV_AES_SECRET_NAME, DEFAULT_AES_SECRET_NAME)
aes_secret_namespace = os.getenv(ENV_AES_SECRET_NAMESPACE, Config.ambassador_namespace)
self.logger.debug(f'edge stack detected: need secret {aes_secret_name}.{aes_secret_namespace}')
self.add_kube_watch(f'Secret {aes_secret_name}', 'secret', namespace=aes_secret_namespace,
field_selector=f"metadata.name={aes_secret_name}")
# Walk hosts.
for host in self.fake.get_hosts():
sel = host.get('selector') or {}
match_labels = sel.get('matchLabels') or {}
label_selector = None
if match_labels:
label_selector = ','.join([f"{l}={v}" for l, v in match_labels.items()])
for wanted_kind in ['service', 'secret']:
self.add_kube_watch(f"Host {host.name}", wanted_kind, host.namespace,
label_selector=label_selector)
for mname, mapping in mappings.items():
res_name = mapping.get('resolver', None)
res_source = 'mapping'
if not res_name:
res_name = global_resolver
res_source = 'defaults'
ctx_name = mapping.get('tls', None)
self.logger.debug(
f'Mapping {mname}: resolver {res_name} from {res_source}, service {mapping.service}, tls {ctx_name}')
if res_name:
resolver = resolvers.get(res_name, None)
self.logger.debug(f'-> resolver {resolver}')
if resolver:
svc = Service(logger, mapping.service, ctx_name)
if resolver.kind == 'ConsulResolver':
self.logger.debug(f'Mapping {mname} uses Consul resolver {res_name}')
# At the moment, we stuff the resolver's datacenter into the association
# ID for this watch. The ResourceFetcher relies on that.
self.consul_watches.append(
{
"id": resolver.datacenter,
"consul-address": resolver.address,
"datacenter": resolver.datacenter,
"service-name": svc.hostname
}
)
elif resolver.kind == 'KubernetesEndpointResolver':
host = svc.hostname
namespace = Config.ambassador_namespace
if not host:
# This is really kind of impossible.
self.logger.error(f"KubernetesEndpointResolver {res_name} has no 'hostname'")
continue
if "." in host:
(host, namespace) = host.split(".", 2)[0:2]
self.logger.debug(f'...kube endpoints: svc {svc.hostname} -> host {host} namespace {namespace}')
self.add_kube_watch(f"endpoint", "endpoints", namespace,
label_selector=global_label_selector,
field_selector=f"metadata.name={host}")
for secret_key, secret_info in self.fake.secret_recorder.needed.items():
self.logger.debug(f'need secret {secret_info.name}.{secret_info.namespace}')
self.add_kube_watch(f"needed secret", "secret", secret_info.namespace,
field_selector=f"metadata.name={secret_info.name}")
if self.fake.edge_stack_allowed:
# If the edge stack is allowed, make sure we watch for our fallback context.
self.add_kube_watch("Fallback TLSContext", "TLSContext", namespace=Config.ambassador_namespace)
ambassador_basedir = os.environ.get('AMBASSADOR_CONFIG_BASE_DIR', '/ambassador')
if os.path.exists(os.path.join(ambassador_basedir, '.ambassadorinstallations_ok')):
self.add_kube_watch("AmbassadorInstallations", "ambassadorinstallations.getambassador.io", Config.ambassador_namespace)
ambassador_knative_requested = (os.environ.get("AMBASSADOR_KNATIVE_SUPPORT", "-unset-").lower() == 'true')
if ambassador_knative_requested:
self.logger.debug('Looking for Knative support...')
if os.path.exists(os.path.join(ambassador_basedir, '.knative_clusteringress_ok')):
# Watch for clusteringresses.networking.internal.knative.dev in any namespace and with any labels.
self.logger.debug('watching for clusteringresses.networking.internal.knative.dev')
self.add_kube_watch("Knative clusteringresses", "clusteringresses.networking.internal.knative.dev",
None)
if os.path.exists(os.path.join(ambassador_basedir, '.knative_ingress_ok')):
# Watch for ingresses.networking.internal.knative.dev in any namespace and
# with any labels.
self.add_kube_watch("Knative ingresses", "ingresses.networking.internal.knative.dev", None)
self.watchset = {
"kubernetes-watches": self.kube_watches,
"consul-watches": self.consul_watches
}
save_dir = os.environ.get('AMBASSADOR_WATCH_DIR', '/tmp')
if save_dir:
watchset = dump_json(self.watchset)
with open(os.path.join(save_dir, 'watch.json'), "w") as output:
output.write(watchset)
#### Mainline.
if __name__ == "__main__":
loglevel = logging.INFO
args = sys.argv[1:]
if args:
if args[0] == '--debug':
loglevel = logging.DEBUG
args.pop(0)
elif args[0].startswith('--'):
raise Exception(f'Usage: {os.path.basename(sys.argv[0])} [--debug] [path]')
logging.basicConfig(
level=loglevel,
format="%(asctime)s watch-hook %(levelname)s: %(message)s",
datefmt="%Y-%m-%d %H:%M:%S"
)
alogger = logging.getLogger('ambassador')
alogger.setLevel(logging.INFO)
logger = logging.getLogger('watch_hook')
logger.setLevel(loglevel)
yaml_stream = sys.stdin
if args:
yaml_stream = open(args[0], "r")
wh = WatchHook(logger, yaml_stream)
watchset = dump_json(wh.watchset)
sys.stdout.write(watchset)
|
from rest_framework import viewsets
from rest_framework.decorators import action
from rest_framework.parsers import MultiPartParser
from rest_framework.permissions import IsAuthenticated
from rest_framework.response import Response
from socialplantplatform.Base.BaseSerializers import SerializerNone
from socialplantplatform.social.Serializers.FollowSerializers import FollowToSerializer, FollowCreateSerializer,FollowMetListSerializer
from socialplantplatform.social.Serializers.PublicationSerializers import PublicationListSerializer, \
PublicationCreateSerializer, PublicationStoriesListSerializer, FriendsPublicationListSerializer
from socialplantplatform.social.filters import PublicationFilter
from socialplantplatform.social.models import Publication, UserFollow
from socialplantplatform.social.permissions import UserOwnPublication
class PublicationViewSet(viewsets.ModelViewSet):
parser_classes = (MultiPartParser,)
serializers_class = {
"list": PublicationListSerializer,
"create": PublicationCreateSerializer,
"retrieve": PublicationListSerializer,
"update": PublicationCreateSerializer,
"friends_stories": PublicationStoriesListSerializer,
"friends_publications": FriendsPublicationListSerializer,
"all_publications":PublicationListSerializer
}
filterset_class = PublicationFilter
permission_classes = (IsAuthenticated,UserOwnPublication,)
def get_serializer_class(self):
return self.serializers_class.get(self.action, SerializerNone)
def get_queryset(self):
if not self.request.user.is_authenticated:
print("not authenticated")
return Publication.objects.none()
return Publication.objects.filter(user=self.request.user)
@action(detail=False, methods=['get'],filterset_class=None)
def friends_publications(self, request, *args, **kwargs):
queryset = Publication.objects.get_user_follows_publications(request.user)
page = self.paginate_queryset(queryset)
if page is not None:
serializer = self.get_serializer(page, many=True)
return self.get_paginated_response(serializer.data)
serializer = self.get_serializer(queryset, many=True)
return Response(serializer.data)
@action(detail=False, methods=['get'], filterset_class=None)
def all_publications(self, request, *args, **kwargs):
queryset = Publication.objects.all()
page = self.paginate_queryset(queryset)
if page is not None:
serializer = self.get_serializer(page, many=True)
return self.get_paginated_response(serializer.data)
serializer = self.get_serializer(queryset, many=True)
return Response(serializer.data)
@action(detail=False, methods=['get'],filterset_class=None)
def friends_stories(self, request, *args, **kwargs):
queryset = Publication.objects.get_user_follows_stories(request.user)
page = self.paginate_queryset(queryset)
if page is not None:
serializer = self.get_serializer(page, many=True)
return self.get_paginated_response(serializer.data)
serializer = self.get_serializer(queryset, many=True)
return Response(serializer.data)
class FollowViewSet(viewsets.ModelViewSet):
serializers_class = {
"list": FollowToSerializer,
"create": FollowCreateSerializer,
"delete": FollowCreateSerializer,
"update": FollowCreateSerializer,
"follow_me":FollowMetListSerializer
}
# queryset = UserFollow.objects.all()
def get_queryset(self):
if not self.request.user.is_authenticated:
return UserFollow.objects.none()
return UserFollow.objects.filter(follow_by=self.request.user)
def get_serializer_class(self):
return self.serializers_class.get(self.action, SerializerNone)
@action(detail=False,methods=["get",])
def follow_me(self, request, *args, **kwargs):
queryset = UserFollow.objects.filter(follow_to=self.request.user)
page = self.paginate_queryset(queryset)
if page is not None:
serializer = self.get_serializer(page, many=True)
return self.get_paginated_response(serializer.data)
serializer = self.get_serializer(queryset, many=True)
return Response(serializer.data)
|
import unittest
import Boltz
from sympy.functions import exp, log
# import SPCVUnittests
from Boltz import t, x, x_diff, x_t0, x_t1
C1 = Boltz.Boltz.C1
C2 = Boltz.Boltz.C2
# ToDo Think about inheritance
class TestBoltzBase(unittest.TestCase):
# ToDo Why setUp yellow???
def setUp(self, L, l, t0, t1,
general_solution, coefficients, particular_solution, extreme_value):
self.L = L
self.l = l
self.t0 = t0
self.t1 = t1
self.general_solution = general_solution
self.coefficients = coefficients
self.particular_solution = particular_solution
self.extreme_value = extreme_value
self.Boltz = Boltz.Boltz(L=self.L, l=self.l,
t0=self.t0, t1=self.t1)
self.Boltz.solve()
def test_L(self):
self.assertEqual(self.Boltz.L,
self.L)
def test_l(self):
self.assertEqual(self.Boltz.l,
self.l)
def test_t0(self):
self.assertEqual(self.Boltz.t0,
self.t0)
def test_t1(self):
self.assertEqual(self.Boltz.t1,
self.t1)
def test_general_solution(self):
self.assertEqual(self.Boltz.general_solution,
self.general_solution)
def test_coefficients(self):
self.assertEqual(self.Boltz.coefficients,
self.coefficients)
def test_particular_solution(self):
self.assertEqual(self.Boltz.particular_solution,
self.particular_solution)
def test_extreme_value(self):
self.assertEqual(self.Boltz.extreme_value,
self.extreme_value)
def runTest(self):
self.test_L()
self.test_l()
self.test_t0()
self.test_t1()
self.test_general_solution()
self.test_coefficients()
self.test_particular_solution()
self.test_extreme_value()
class TestBoltz1(TestBoltzBase):
def setUp(self):
super().setUp(L=x_diff ** 2 + 2 * x,
l=x_t0 ** 2,
t0=0,
t1=1,
general_solution=C1 + C2 * t + t ** 2 / 2,
coefficients={C1: -1,
C2: -1},
particular_solution=t ** 2 / 2 - t - 1,
extreme_value=-4 / 3)
class TestBoltz2(TestBoltzBase):
def setUp(self):
super().setUp(L=x_diff ** 2 - x,
l=-x_t1 ** 2 / 2,
t0=0,
t1=1,
general_solution=C1 + C2 * t - t ** 2 / 4,
coefficients={C1: -3 / 4,
C2: 0},
particular_solution=-t ** 2 / 4 - 3 / 4,
extreme_value=5 / 12)
class TestBoltz3(TestBoltzBase):
def setUp(self):
super().setUp(L=t**2 * x_diff**2,
l=-2*x_t0 + x_t1**2,
t0=1,
t1=2,
general_solution=C1 + C2/t,
coefficients={C1: 1/2,
C2: 1},
particular_solution=1/2 + 1/t,
extreme_value=-3/2)
class TestBoltz4(TestBoltzBase):
def setUp(self):
super().setUp(L=2*(t*x_diff**2 + x_diff*x),
l=3*x_t0**2 - x_t1**2 - 4*x_t1,
t0=1,
t1=exp(1),
general_solution=C1 + C2*log(t),
coefficients={C1: 1,
C2: 1},
particular_solution=log(t) + 1,
extreme_value=-4)
if __name__ == '__main__':
suite = unittest.TestSuite()
suite.addTests([TestBoltz1(),
TestBoltz2(),
TestBoltz3(),
TestBoltz4()])
runner = unittest.TextTestRunner()
runner.run(suite)
|
"""
Collection management and migration helpers
See also: documents/notes/schema-evolution-notes:
"""
from __future__ import unicode_literals
from __future__ import absolute_import, division, print_function
__author__ = "Graham Klyne (GK@ACM.ORG)"
__copyright__ = "Copyright 2016, G. Klyne"
__license__ = "MIT (http://opensource.org/licenses/MIT)"
import os
import sys
import logging
import subprocess
import importlib
import shutil
import datetime
log = logging.getLogger(__name__)
from annalist import layout
from annalist.identifiers import ANNAL, RDFS
from annalist.util import valid_id, extract_entity_id, make_type_entity_id
from annalist.collections_data import installable_collections
from annalist.models.site import Site
from annalist.models.collection import Collection
from annalist.models.recordtype import RecordType
from annalist.models.recordview import RecordView
from annalist.models.recordlist import RecordList
from annalist.models.recordfield import RecordField
from annalist.models.recordgroup import RecordGroup
from annalist.models.collectiondata import initialize_coll_data, copy_coll_data, migrate_coll_data
from . import am_errors
from .am_settings import (
am_get_settings, am_get_site_settings, am_get_site
)
from .am_getargvalue import getarg, getargvalue
# Collection access helpers
def get_settings_site(annroot, userhome, options):
"""
Get settings and site data based on command line options provided
returns:
(status, settings, site)
where 'settings' and/or 'site' are None if not found.
"""
status = am_errors.AM_SUCCESS
settings = am_get_settings(annroot, userhome, options)
site = None
if not settings:
print("Settings not found (%s)"%(options.configuration), file=sys.stderr)
status = am_errors.AM_NOSETTINGS
if status == am_errors.AM_SUCCESS:
sitesettings = am_get_site_settings(annroot, userhome, options)
if not sitesettings:
print("Site settings not found (%s)"%(options.configuration), file=sys.stderr)
status = am_errors.AM_NOSETTINGS
if status == am_errors.AM_SUCCESS:
site = am_get_site(sitesettings)
return (status, settings, site)
def coll_type(coll, type_id):
"""
Return identified type in collection, or None
"""
return RecordField.load(coll, field_id, altscope="all")
def coll_types(coll):
"""
Return iterator over types in collection
"""
return coll.types()
def coll_view(coll, view_id):
"""
Return identified view in collection, or None
"""
return RecordView.load(coll, view_id, altscope="all")
def coll_views(coll):
"""
Return iterator over views in collection
"""
for fid in coll._children(RecordView, altscope="all"):
f = coll_view(coll, fid)
if f and f.get_id() != "_initial_values":
yield f
return
def coll_list(coll, list_id):
"""
Return identified list in collection, or None
"""
return RecordList.load(coll, list_id, altscope="all")
def coll_lists(coll):
"""
Return iterator over lists in collection
"""
for fid in coll._children(RecordList, altscope="all"):
f = coll_list(coll, fid)
if f and f.get_id() != "_initial_values":
yield f
return
def coll_field(coll, field_id):
"""
Return identified field in collection, or None
"""
return RecordField.load(coll, field_id, altscope="all")
def coll_fields(coll):
"""
Return iterator over fields in collection
"""
for fid in coll._children(RecordField, altscope="all"):
f = coll_field(coll, fid)
if f and f.get_id() != "_initial_values":
yield f
return
def coll_group(coll, group_id):
"""
Return identified group in collection, or None
"""
return RecordGroup.load(coll, group_id, altscope="all")
def coll_groups(coll):
"""
Return iterator over groups in collection
"""
for gid in coll._children(RecordGroup, altscope="all"):
g = coll_group(coll, gid)
if g and g.get_id() != "_initial_values":
yield g
return
# Common logic for View, List and Group field lists
def add_to_set(value, values):
"""
Add non-empty value to set of values
"""
if value:
values.add(value)
return values
def field_in_field_list(field_list, field_id, property_uri):
"""
Tests to see if field is referenced in field list
"""
for fref in field_list:
if ( (extract_entity_id(fref.get(ANNAL.CURIE.field_id, "")) == field_id) or
(fref.get(ANNAL.CURIE.property_uri, "") == property_uri) ):
return True
return False
def group_in_field_list(field_list, coll, group_ids):
"""
Tests to see if any of group ids are referenced in field list
"""
for fref in field_list:
fid = extract_entity_id(fref.get(ANNAL.CURIE.field_id, ""))
fdef = coll_field(coll, fid)
if fdef.get(ANNAL.CURIE.group_ref, "") in group_ids:
return True
return False
def types_using_field(coll, field_id, property_uri):
"""
Returns a list of type ids that may use a specified field or property URI
"""
type_ids = set()
type_uris = set()
group_ids = set()
# Look at field definition
f = coll_field(coll, field_id)
add_to_set(f.get(ANNAL.CURIE.field_entity_type, ""), type_uris)
# Look at groups that reference field
for g in coll_groups(coll):
if field_in_field_list(g[ANNAL.CURIE.group_fields], field_id, property_uri):
add_to_set(g.get_id(), group_ids)
add_to_set(extract_entity_id(g.get(ANNAL.CURIE.record_type, "")), type_uris)
# Look at views that reference field or groups
for v in coll_views(coll):
if ( field_in_field_list(v[ANNAL.CURIE.view_fields], field_id, property_uri) or
group_in_field_list(v[ANNAL.CURIE.view_fields], coll, group_ids) ):
add_to_set(extract_entity_id(v.get(ANNAL.CURIE.record_type, "")), type_uris)
# Look at lists that reference field or groups
for l in coll_lists(coll):
if ( field_in_field_list(l[ANNAL.CURIE.list_fields], field_id, property_uri) or
group_in_field_list(l[ANNAL.CURIE.list_fields], coll, group_ids) ):
add_to_set(extract_entity_id(l.get(ANNAL.CURIE.record_type, "")), type_uris)
add_to_set(extract_entity_id(l.get(ANNAL.CURIE.default_type, "")), type_uris)
# Collect type ids
for t in coll_types(coll):
type_uri = t.get(ANNAL.CURIE.uri, "")
supertype_uris = set( s[ANNAL.CURIE.supertype_uri] for s in t.get(ANNAL.CURIE.supertype_uris,[]) )
if (type_uri in type_uris) or (supertype_uris & type_uris):
add_to_set(t.get_id(), type_ids)
return type_ids
def compare_field_list(old_coll, new_coll, old_field_list, new_field_list, reporting_prefix):
"""
Report URI changes between fields lists as seen in group, view and list definitions
"""
old_len = len(old_field_list)
new_len = len(new_field_list)
if new_len != old_len:
print("* %s, field count changed from %d to %d"%(reporting_prefix, old_len, new_len))
for i in range(new_len):
for j in range(old_len):
# Look for field in old group.
# If not found, ignore it - we're looking for URI changes
# @@TODO: ... or are we?
new_f = new_field_list[i]
old_f = old_field_list[j]
field_id = extract_entity_id(new_f[ANNAL.CURIE.field_id])
if field_id == extract_entity_id(old_f[ANNAL.CURIE.field_id]):
# Field found - check for incompatible URI override
# Note that field definitions are already checked
old_uri = old_f.get(ANNAL.CURIE.property_uri, "")
new_uri = new_f.get(ANNAL.CURIE.property_uri, "")
if (not old_uri) and new_uri:
old_field = coll_field(old_coll, field_id)
old_uri = old_field[ANNAL.CURIE.property_uri]
if old_uri and (not new_uri):
new_field = coll_field(new_coll, field_id)
new_uri = new_field[ANNAL.CURIE.property_uri]
if old_uri != new_uri:
print(
"* %s, field %s, property URI changed from '%s' to '%s'"%
(reporting_prefix, field_id, old_uri, new_uri)
)
print(
" Consider adding supertype '%s' to type '%s' in collection '%s'"%
(old_uri, type_id, new_coll_id)
)
report_property_references(new_coll, old_uri, "URI '%s'"%(old_uri))
break
return
def report_property_references(coll, property_uri, reporting_prefix):
"""
Report all references to a specified property URI.
"""
# Reference from types
for t in coll_types(coll):
type_id = t.get_id()
alias_value_uris = [ a[ANNAL.CURIE.alias_source] for a in t.get(ANNAL.CURIE.field_aliases,[]) ]
if property_uri in alias_value_uris:
print("%s appears as an alias value of type '%s'"%(reporting_prefix, type_id))
# References from views
for v in coll_views(coll):
view_id = v.get_id()
report_property_references_in_field_list(
coll, property_uri, v[ANNAL.CURIE.view_fields],
reporting_prefix, "fields for view %s"%(view_id)
)
# References from lists
for l in coll_lists(coll):
list_id = l.get_id()
if property_uri in l.get(ANNAL.CURIE.list_entity_selector, ""):
print("%s appears in selector for list '%s'"%(reporting_prefix, list_id))
report_property_references_in_field_list(
coll, property_uri, v[ANNAL.CURIE.list_fields],
reporting_prefix, "fields for list %s"%(list_id)
)
# References from fields
for f in coll_fields(coll):
field_id = f.get_id()
if property_uri == f.get(ANNAL.CURIE.property_uri, ""):
print("%s appears as property URI for field '%s'"%(reporting_prefix, field_id))
if property_uri in f.get(ANNAL.CURIE.field_ref_restriction, ""):
print("%s appears in value restriction for field '%s'"%(reporting_prefix, field_id))
# References from groups
for g in coll_groups(coll):
group_id = g.get_id()
report_property_references_in_field_list(
coll, property_uri, g[ANNAL.CURIE.group_fields],
reporting_prefix, "fields for group %s"%(group_id)
)
return
def report_property_references_in_field_list(
coll, property_uri, field_list,
reporting_prefix, reporting_suffix):
"""
Report occurrences of a property URI appearing in a field list.
"""
for f in field_list:
if property_uri == f.get(ANNAL.CURIE.property_uri, ""):
print("%s appears in %s"%(reporting_prefix, reporting_suffix))
return
def report_type_references(coll, type_uri, reporting_prefix):
"""
Report all references to a specified type URI.
"""
# Reference from types
for t in coll_types(coll):
type_id = t.get_id()
supertype_uris = [ u[ANNAL.CURIE.supertype_uri] for u in t.get(ANNAL.CURIE.supertype_uris,[]) ]
if type_uri in supertype_uris:
print("%s appears as a supertype of type '%s'"%(reporting_prefix, type_id))
# References from views
for v in coll_views(coll):
view_id = v.get_id()
if type_uri == v.get(ANNAL.CURIE.record_type, ""):
print("%s appears as entity type for view '%s'"%(reporting_prefix, view_id))
# References from lists
for l in coll_lists(coll):
list_id = l.get_id()
if type_uri == l.get(ANNAL.CURIE.record_type, ""):
print("%s appears as entity type for list '%s'"%(reporting_prefix, list_id))
if type_uri in l.get(ANNAL.CURIE.list_entity_selector, ""):
print("%s appears in selector for list '%s'"%(reporting_prefix, list_id))
# References from fields
for f in coll_fields(coll):
field_id = f.get_id()
if type_uri == f.get(ANNAL.CURIE.field_value_type, ""):
print("%s appears as value type for field '%s'"%(reporting_prefix, field_id))
if type_uri == f.get(ANNAL.CURIE.field_entity_type, ""):
print("%s appears as entity type for field '%s'"%(reporting_prefix, field_id))
if type_uri in f.get(ANNAL.CURIE.field_ref_restriction, ""):
print("%s appears in value restriction for field '%s'"%(reporting_prefix, field_id))
# References from groups
for g in coll_groups(coll):
group_id = g.get_id()
if type_uri == g.get(ANNAL.CURIE.record_type, ""):
print("%s appears as entity type for group %s"%(reporting_prefix, group_id))
return
# Migration helper functions
def am_migrationreport(annroot, userhome, options):
"""
Collection migration report helper
annalist_manager migrationreport old_coll new_coll
Generates a report of changes to data needed to match type and property
URI changes moving from old_coll to new_coll.
annroot is the root directory for the Annalist software installation.
userhome is the home directory for the host system user issuing the command.
options contains options parsed from the command line.
returns 0 if all is well, or a non-zero status code.
This value is intended to be used as an exit status code
for the calling program.
"""
status, settings, site = get_settings_site(annroot, userhome, options)
if status != am_errors.AM_SUCCESS:
return status
if len(options.args) > 2:
print("Unexpected arguments for %s: (%s)"%(options.command, " ".join(options.args)), file=sys.stderr)
return am_errors.AM_UNEXPECTEDARGS
old_coll_id = getargvalue(getarg(options.args, 0), "Old collection Id: ")
old_coll = Collection.load(site, old_coll_id)
if not (old_coll and old_coll.get_values()):
print("Old collection not found: %s"%(old_coll_id), file=sys.stderr)
return am_errors.AM_NOCOLLECTION
new_coll_id = getargvalue(getarg(options.args, 1), "New collection Id: ")
new_coll = Collection.load(site, new_coll_id)
if not (new_coll and new_coll.get_values()):
print("New collection not found: %s"%(new_coll_id), file=sys.stderr)
return am_errors.AM_NOCOLLECTION
status = am_errors.AM_SUCCESS
print("# Migration report from collection '%s' to '%s' #"%(old_coll_id, new_coll_id))
print("")
# Scan and report on type URI changes
for new_type in coll_types(new_coll):
type_id = new_type.get_id()
old_type = old_coll.get_type(type_id)
if old_type:
old_uri = old_type[ANNAL.CURIE.uri]
new_uri = new_type[ANNAL.CURIE.uri]
if old_uri != new_uri:
print("* Type %s, URI changed from '%s' to '%s'"%(type_id, old_uri, new_uri))
supertype_uris = [ u[ANNAL.CURIE.supertype_uri] for u in new_type.get(ANNAL.CURIE.supertype_uris,[]) ]
if old_uri not in supertype_uris:
print(
" Consider adding supertype '%s' to type '%s' in collection '%s'"%
(old_uri, type_id, new_coll_id)
)
report_type_references(new_coll, old_uri, " URI '%s'"%(old_uri))
# Scan and report on property URI changes in field definitions
for new_field in coll_fields(new_coll):
field_id = new_field.get_id()
old_field = coll_field(old_coll, field_id)
if old_field:
old_uri = old_field[ANNAL.CURIE.property_uri]
new_uri = new_field[ANNAL.CURIE.property_uri]
if old_uri != new_uri:
print("* Field %s, property URI changed from '%s' to '%s'"%(field_id, old_uri, new_uri))
type_ids = types_using_field(new_coll, field_id, old_uri)
for tid in type_ids:
print(
" Consider adding property alias for '%s' to type %s in collection '%s'"%
(old_uri, tid, new_coll_id)
)
# Scan and report on property URI changes in group definitions
for new_group in coll_groups(new_coll):
group_id = new_group.get_id()
old_group = coll_group(old_coll, group_id)
if old_group:
compare_field_list(
old_coll, new_coll,
old_group[ANNAL.CURIE.group_fields],
new_group[ANNAL.CURIE.group_fields],
"Group %s"%group_id)
# Scan and report on property URI changes in view definitions
for new_view in coll_views(new_coll):
view_id = new_view.get_id()
old_view = coll_view(old_coll, view_id)
if old_view:
compare_field_list(
old_coll, new_coll,
old_view[ANNAL.CURIE.view_fields],
new_view[ANNAL.CURIE.view_fields],
"View %s"%view_id)
# Scan and report on property URI changes in list definitions
for new_list in coll_lists(new_coll):
list_id = new_list.get_id()
old_list = coll_list(old_coll, list_id)
if old_list:
compare_field_list(
old_coll, new_coll,
old_list[ANNAL.CURIE.list_fields],
new_list[ANNAL.CURIE.list_fields],
"List %s"%list_id)
print("")
return status
# Collection management functions
def am_installcollection(annroot, userhome, options):
"""
Install software-defined collection data
annalist_manager installcollection coll_id
Copies data from an existing collection to a new collection.
annroot is the root directory for the Annalist software installation.
userhome is the home directory for the host system user issuing the command.
options contains options parsed from the command line.
returns 0 if all is well, or a non-zero status code.
This value is intended to be used as an exit status code
for the calling program.
"""
status, settings, site = get_settings_site(annroot, userhome, options)
if status != am_errors.AM_SUCCESS:
return status
if len(options.args) > 1:
print(
"Unexpected arguments for %s: (%s)"%
(options.command, " ".join(options.args)),
file=sys.stderr
)
return am_errors.AM_UNEXPECTEDARGS
# Check collection Id
coll_id = getargvalue(getarg(options.args, 0), "Collection Id to install: ")
if coll_id in installable_collections:
src_dir_name = installable_collections[coll_id]['data_dir']
else:
print("Collection name to install not known: %s"%(coll_id), file=sys.stderr)
print("Available collection Ids are: %s"%(",".join(installable_collections.keys())))
return am_errors.AM_NOCOLLECTION
# Check if ciollection already exists
coll = Collection.load(site, coll_id)
if (coll and coll.get_values()):
if options.force:
print("Existing collection %s will be removed ('--force' specified)"%(coll_id), file=sys.stderr)
Collection.remove(site, coll_id)
else:
print("Collection already exists: %s"%(coll_id), file=sys.stderr)
return am_errors.AM_COLLECTIONEXISTS
# Install collection now
src_dir = os.path.join(annroot, "annalist/data", src_dir_name)
print("Installing collection '%s' from data directory '%s'"%(coll_id, src_dir))
coll_metadata = installable_collections[coll_id]['coll_meta']
date_time_now = datetime.datetime.now().replace(microsecond=0)
coll_metadata[ANNAL.CURIE.comment] = (
"Initialized at %s by `annalist-manager installcollection`"%
date_time_now.isoformat()
)
coll = site.add_collection(coll_id, coll_metadata)
msgs = initialize_coll_data(src_dir, coll)
if msgs:
for msg in msgs:
print(msg)
status = am_errors.AM_INSTALLCOLLFAIL
return status
def am_copycollection(annroot, userhome, options):
"""
Copy collection data
annalist_manager copycollection old_coll_id new_coll_id
Copies data from an existing collection to a new collection.
annroot is the root directory for the Annalist software installation.
userhome is the home directory for the host system user issuing the command.
options contains options parsed from the command line.
returns 0 if all is well, or a non-zero status code.
This value is intended to be used as an exit status code
for the calling program.
"""
status, settings, site = get_settings_site(annroot, userhome, options)
if status != am_errors.AM_SUCCESS:
return status
if len(options.args) > 2:
print(
"Unexpected arguments for %s: (%s)"%
(options.command, " ".join(options.args)),
file=sys.stderr
)
return am_errors.AM_UNEXPECTEDARGS
old_coll_id = getargvalue(getarg(options.args, 0), "Old collection Id: ")
old_coll = Collection.load(site, old_coll_id)
if not (old_coll and old_coll.get_values()):
print("Old collection not found: %s"%(old_coll_id), file=sys.stderr)
return am_errors.AM_NOCOLLECTION
new_coll_id = getargvalue(getarg(options.args, 1), "New collection Id: ")
new_coll = Collection.load(site, new_coll_id)
if (new_coll and new_coll.get_values()):
print("New collection already exists: %s"%(new_coll_id), file=sys.stderr)
return am_errors.AM_COLLECTIONEXISTS
# Copy collection now
print("Copying collection '%s' to '%s'"%(old_coll_id, new_coll_id))
new_coll = site.add_collection(new_coll_id, old_coll.get_values())
msgs = copy_coll_data(old_coll, new_coll)
if msgs:
for msg in msgs:
print(msg)
status = am_errors.AM_COPYCOLLFAIL
print("")
return status
def am_check_site_updated(coll):
"""
Check that site data has ben updated before perfoprming data migration.
Data migraton is performed incompletely if the "_field" type is not visible, so
that is the test used here.
"""
if layout.FIELD_TYPEID in coll._children(RecordType, altscope="all"):
return am_errors.AM_SUCCESS
print("Perform 'annalist-manager updatesitedata' before collection data migration.")
print("Collection data not migrated.")
return am_errors.AM_MIGRATECOLLFAIL
def am_migratecollection(annroot, userhome, options):
"""
Apply migrations for a specified collection
annalist_manager migratecollection coll
Reads and writes every entity in a collection, thereby applying data
migrations and saving them in the stored data.
annroot is the root directory for the Annalist software installation.
userhome is the home directory for the host system user issuing the command.
options contains options parsed from the command line.
returns 0 if all is well, or a non-zero status code.
This value is intended to be used as an exit status code
for the calling program.
"""
status, settings, site = get_settings_site(annroot, userhome, options)
if status != am_errors.AM_SUCCESS:
return status
coll_id = getargvalue(getarg(options.args, 0), "Collection Id: ")
coll = Collection.load(site, coll_id)
if not (coll and coll.get_values()):
print("Collection not found: %s"%(coll_id), file=sys.stderr)
return am_errors.AM_NOCOLLECTION
status = am_check_site_updated(coll)
if status != am_errors.AM_SUCCESS:
return status
print("Apply data migrations in collection '%s'"%(coll_id,))
msgs = migrate_coll_data(coll)
if msgs:
for msg in msgs:
print(msg)
status = am_errors.AM_MIGRATECOLLFAIL
else:
coll.update_software_compatibility_version()
return status
def am_migrateallcollections(annroot, userhome, options):
"""
Apply migrations to all collections
annalist_manager migrateallcollections
Reads and writes every entity in all collections, thereby
applying data migrations and saving them in the stored data.
annroot is the root directory for the Annalist software installation.
userhome is the home directory for the host system user issuing the command.
options contains options parsed from the command line.
returns 0 if all is well, or a non-zero status code.
This value is intended to be used as an exit status code
for the calling program.
"""
status, settings, site = get_settings_site(annroot, userhome, options)
if status != am_errors.AM_SUCCESS:
return status
print("Apply data migrations in all collections:")
for coll in site.collections():
status = am_check_site_updated(coll)
if status != am_errors.AM_SUCCESS:
return status
coll_id = coll.get_id()
if coll_id != layout.SITEDATA_ID:
log.debug("========== Processing '%s' =========="%(coll_id,))
print("---- Processing '%s'"%(coll_id,))
msgs = migrate_coll_data(coll)
if msgs:
for msg in msgs:
print(msg)
status = am_errors.AM_MIGRATECOLLFAIL
print("Data migrations complete.")
return status
# End.
|
"""
This package contains the elements used to bootstrap the Android SDK's components
"""
import glob
import logging
import os
import shutil
import stat
import subprocess
import sys
import zipfile
from pathlib import Path
from typing import Optional
import mobiletestorchestrator
try:
from importlib.resources import files
except:
# noinspection PyUnresolvedReferences
from importlib_resources import files # type: ignore
log = logging.getLogger(str(Path(__file__).stem))
log.setLevel(logging.ERROR)
class SdkManager:
"""
SDK Manager interface for installing components of the Android SDK
:param sdk_dir: Path to where the sdk either exists or is to be bootstrapped (if starting fresh)
:param bootstrap: If True, bootstrap the sdk manager and avd manager from internal resources
"""
PROTOCOL_PREFIX = "sdkmanager"
def __init__(self, sdk_dir: Path, bootstrap: bool = False):
self._sdk_dir = sdk_dir
self._env = dict(os.environ)
self._env.update({'ANDROID_SDK_ROOT': str(self._sdk_dir)})
self._sdk_manager_path = sdk_dir.joinpath("tools", "bin", "sdkmanager")
self._avd_manager_path = sdk_dir.joinpath("tools", "bin", "avdmanager")
if sys.platform.lower() == 'win32':
self._sdk_manager_path = self._sdk_manager_path.with_suffix(".bat")
self._avd_manager_path = self._avd_manager_path.with_suffix(".bat")
self._shell = True
else:
self._shell = False
if bootstrap is True and not self._sdk_manager_path.exists():
bootstrap_zip = files(mobiletestorchestrator).joinpath(os.path.join("resources", "sdkmanager",
"bootstrap.zip"))
with zipfile.ZipFile(bootstrap_zip) as zfile:
zfile.extractall(path=self._sdk_dir)
if self._sdk_dir.joinpath("android_sdk_bootstrap").exists():
for file in glob.glob(str(self._sdk_dir.joinpath("android_sdk_bootstrap", "*"))):
basename = os.path.basename(file)
shutil.move(file, str(self._sdk_dir.joinpath(basename)))
if not self._sdk_manager_path.exists():
raise FileNotFoundError(f"Did not locate sdkmanager tool at expected location {self._sdk_manager_path}")
if not self._avd_manager_path.exists():
raise FileNotFoundError(f"Did not locate sdkmanager tool at expected location {self._avd_manager_path}")
os.chmod(str(self._sdk_manager_path), stat.S_IRWXU)
os.chmod(str(self._avd_manager_path), stat.S_IRWXU)
if sys.platform == 'win32':
self._env['USERNAME'] = os.getlogin()
self._env["USERPROFILE"] = f"\\Users\\{os.getlogin()}"
@property
def emulator_path(self) -> Path:
return self._sdk_dir.joinpath("emulator", "emulator.exe") if sys.platform.lower() == 'win32' else \
self._sdk_dir.joinpath("emulator", "emulator")
@property
def adb_path(self) -> Path:
return self._sdk_dir.joinpath("platform-tools", "adb.exe") if sys.platform.lower() == 'win32' else \
self._sdk_dir.joinpath("platform-tools", "adb")
def bootstrap(self, application: str, version: Optional[str] = None) -> None:
application = f"{application};{version}" if version else f"{application}"
if not os.path.exists(self._sdk_manager_path):
raise SystemError("Failed to properly install sdk manager for bootstrapping")
log.debug(f"Downloading to {self._sdk_dir}\n {self._sdk_manager_path} {application}")
completed = subprocess.Popen([self._sdk_manager_path, application], stdout=subprocess.PIPE, bufsize=0,
stderr=subprocess.PIPE, stdin=subprocess.PIPE,
shell=self._shell, env=self._env)
assert completed.stdin is not None # make mypy happy
for _ in range(10):
try:
if sys.platform.lower() == 'win32':
completed.stdin.write(b'y\r\n')
else:
completed.stdin.write(b'y\n')
except Exception:
break
stdout, stderr = completed.communicate()
if completed.returncode != 0:
raise Exception(
f"Failed to download/update {application}: {stderr.decode('utf-8')}")
def bootstrap_platform_tools(self) -> None:
"""
download/update platform tools within the sdk
"""
self.bootstrap("platform-tools")
def bootstrap_emulator(self) -> None:
"""
download/update emulator within the sdk
"""
self.bootstrap("emulator")
def download_system_img(self, version: str) -> None:
"""
download/update system image with version
:param version: version to download
"""
self.bootstrap("system-images", version)
def create_avd(self, avd_dir: Path, avd_name: str, image: str, device_type: str, *args: str) -> None:
"""
Create an android emulator definition
:param avd_dir: Where to create the files
:param avd_name: name to give to emulator definition
:param image: which system image to use
:param device_type: device type (as per 'avd_manager list')
:param args: additional args to pass on create
"""
log.debug(f">>>> Downloading system image ...{image}")
self.download_system_img(image)
create_avd_cmd = [str(self._avd_manager_path), "create", "avd", "-n", avd_name, "-k", f"system-images;{image}",
"-d", device_type]
create_avd_cmd += args
self._env.update({"ANDROID_AVD_HOME": str(avd_dir)})
p = subprocess.Popen(create_avd_cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT,
stdin=subprocess.PIPE, shell=self._shell, env=self._env)
if p.wait() != 0:
stdout, stderr = p.communicate()
raise Exception(f"Failed to create avd: {stdout.decode('utf-8')}\n{stderr.decode('utf-8')}")
|
from typing import Optional, Union, Any, List, Tuple
import enum
from pydantic import BaseModel as PydanticModel
class AccountAccountCounters(PydanticModel):
app_requests: Optional[int] = None
events: Optional[int] = None
faves: Optional[int] = None
friends: Optional[int] = None
friends_suggestions: Optional[int] = None
friends_recommendations: Optional[int] = None
gifts: Optional[int] = None
groups: Optional[int] = None
menu_discover_badge: Optional[int] = None
menu_clips_badge: Optional[int] = None
messages: Optional[int] = None
memories: Optional[int] = None
notes: Optional[int] = None
notifications: Optional[int] = None
photos: Optional[int] = None
sdk: Optional[int] = None
class AccountInfo(PydanticModel):
wishlists_ae_promo_banner_show: Optional["BaseBoolInt"] = None
_2fa_required: Optional["BaseBoolInt"] = None
country: Optional[str] = None
https_required: Optional["BaseBoolInt"] = None
intro: Optional["BaseBoolInt"] = None
show_vk_apps_intro: Optional[bool] = None
mini_apps_ads_slot_id: Optional[int] = None
qr_promotion: Optional[int] = None
link_redirects: Optional[dict] = None
lang: Optional[int] = None
no_wall_replies: Optional["BaseBoolInt"] = None
own_posts_default: Optional["BaseBoolInt"] = None
subscriptions: Optional[List[int]] = None
class AccountNameRequest(PydanticModel):
first_name: Optional[str] = None
id: Optional[int] = None
last_name: Optional[str] = None
status: Optional["AccountNameRequestStatus"] = None
lang: Optional[str] = None
link_href: Optional[str] = None
link_label: Optional[str] = None
class AccountNameRequestStatus(enum.Enum):
SUCCESS = "success"
PROCESSING = "processing"
DECLINED = "declined"
WAS_ACCEPTED = "was_accepted"
WAS_DECLINED = "was_declined"
DECLINED_WITH_LINK = "declined_with_link"
RESPONSE = "response"
RESPONSE_WITH_LINK = "response_with_link"
class AccountOffer(PydanticModel):
description: Optional[str] = None
id: Optional[int] = None
img: Optional[str] = None
instruction: Optional[str] = None
instruction_html: Optional[str] = None
price: Optional[int] = None
short_description: Optional[str] = None
tag: Optional[str] = None
title: Optional[str] = None
currency_amount: Optional[int] = None
link_id: Optional[int] = None
link_type: Optional[str] = None
class AccountPushConversations(PydanticModel):
count: Optional[int] = None
items: Optional[List["AccountPushConversationsItem"]] = None
class AccountPushConversationsItem(PydanticModel):
disabled_until: Optional[int] = None
peer_id: Optional[int] = None
sound: Optional["BaseBoolInt"] = None
disabled_mentions: Optional["BaseBoolInt"] = None
disabled_mass_mentions: Optional["BaseBoolInt"] = None
class AccountPushParams(PydanticModel):
msg: Optional[List["AccountPushParamsMode"]] = None
chat: Optional[List["AccountPushParamsMode"]] = None
like: Optional[List["AccountPushParamsSettings"]] = None
repost: Optional[List["AccountPushParamsSettings"]] = None
comment: Optional[List["AccountPushParamsSettings"]] = None
mention: Optional[List["AccountPushParamsSettings"]] = None
reply: Optional[List["AccountPushParamsOnoff"]] = None
new_post: Optional[List["AccountPushParamsOnoff"]] = None
wall_post: Optional[List["AccountPushParamsOnoff"]] = None
wall_publish: Optional[List["AccountPushParamsOnoff"]] = None
friend: Optional[List["AccountPushParamsOnoff"]] = None
friend_found: Optional[List["AccountPushParamsOnoff"]] = None
friend_accepted: Optional[List["AccountPushParamsOnoff"]] = None
group_invite: Optional[List["AccountPushParamsOnoff"]] = None
group_accepted: Optional[List["AccountPushParamsOnoff"]] = None
birthday: Optional[List["AccountPushParamsOnoff"]] = None
event_soon: Optional[List["AccountPushParamsOnoff"]] = None
app_request: Optional[List["AccountPushParamsOnoff"]] = None
sdk_open: Optional[List["AccountPushParamsOnoff"]] = None
class AccountPushParamsMode(enum.Enum):
ON = "on"
OFF = "off"
NO_SOUND = "no_sound"
NO_TEXT = "no_text"
class AccountPushParamsOnoff(enum.Enum):
ON = "on"
OFF = "off"
class AccountPushParamsSettings(enum.Enum):
ON = "on"
OFF = "off"
FR_OF_FR = "fr_of_fr"
class AccountPushSettings(PydanticModel):
disabled: Optional["BaseBoolInt"] = None
disabled_until: Optional[int] = None
settings: Optional["AccountPushParams"] = None
conversations: Optional["AccountPushConversations"] = None
class UsersUserMin(PydanticModel):
deactivated: Optional[str] = None
first_name: Optional[str] = None
hidden: Optional[int] = None
id: Optional[int] = None
last_name: Optional[str] = None
can_access_closed: Optional[bool] = None
is_closed: Optional[bool] = None
class UsersUserSettingsXtr(PydanticModel):
connections: Optional["UsersUserConnections"] = None
bdate: Optional[str] = None
bdate_visibility: Optional[int] = None
city: Optional["BaseCity"] = None
country: Optional["BaseCountry"] = None
first_name: Optional[str] = None
home_town: Optional[str] = None
last_name: Optional[str] = None
maiden_name: Optional[str] = None
name_request: Optional["AccountNameRequest"] = None
personal: Optional["UsersPersonal"] = None
phone: Optional[str] = None
relation: Optional["UsersUserRelation"] = None
relation_partner: Optional["UsersUserMin"] = None
relation_pending: Optional["BaseBoolInt"] = None
relation_requests: Optional[List["UsersUserMin"]] = None
screen_name: Optional[str] = None
sex: Optional["BaseSex"] = None
status: Optional[str] = None
status_audio: Optional["AudioAudio"] = None
interests: Optional["AccountUserSettingsInterests"] = None
languages: Optional[List[str]] = None
class AccountUserSettings(UsersUserMin, UsersUserSettingsXtr):
nick_name: Optional[str] = None
photo_200: Optional[str] = None
is_service_account: Optional[bool] = None
primary_profile: Optional["AccountUserSettings"] = None
edu_signup_required: Optional[bool] = None
class AccountUserSettingsInterest(PydanticModel):
title: Optional[str] = None
value: Optional[str] = None
class AccountUserSettingsInterests(PydanticModel):
activities: Optional["AccountUserSettingsInterest"] = None
interests: Optional["AccountUserSettingsInterest"] = None
music: Optional["AccountUserSettingsInterest"] = None
tv: Optional["AccountUserSettingsInterest"] = None
movies: Optional["AccountUserSettingsInterest"] = None
books: Optional["AccountUserSettingsInterest"] = None
games: Optional["AccountUserSettingsInterest"] = None
quotes: Optional["AccountUserSettingsInterest"] = None
about: Optional["AccountUserSettingsInterest"] = None
class AddressesFields(enum.Enum):
ID = "id"
TITLE = "title"
ADDRESS = "address"
ADDITIONAL_ADDRESS = "additional_address"
COUNTRY_ID = "country_id"
CITY_ID = "city_id"
METRO_STATION_ID = "metro_station_id"
LATITUDE = "latitude"
LONGITUDE = "longitude"
DISTANCE = "distance"
WORK_INFO_STATUS = "work_info_status"
TIMETABLE = "timetable"
PHONE = "phone"
TIME_OFFSET = "time_offset"
class AdsAccessRole(enum.Enum):
ADMIN = "admin"
MANAGER = "manager"
REPORTS = "reports"
class AdsAccessRolePublic(enum.Enum):
MANAGER = "manager"
REPORTS = "reports"
class AdsAccesses(PydanticModel):
client_id: Optional[str] = None
role: Optional["AdsAccessRole"] = None
class AdsAccount(PydanticModel):
access_role: Optional["AdsAccessRole"] = None
account_id: Optional[int] = None
account_status: Optional["BaseBoolInt"] = None
account_type: Optional["AdsAccountType"] = None
account_name: Optional[str] = None
can_view_budget: Optional[bool] = None
class AdsAccountType(enum.Enum):
GENERAL = "general"
AGENCY = "agency"
class AdsAd(PydanticModel):
ad_format: Optional[int] = None
ad_platform: Optional[Union[int, str]] = None
all_limit: Optional[int] = None
approved: Optional["AdsAdApproved"] = None
campaign_id: Optional[int] = None
category1_id: Optional[int] = None
category2_id: Optional[int] = None
cost_type: Optional["AdsAdCostType"] = None
cpc: Optional[int] = None
cpm: Optional[int] = None
cpa: Optional[int] = None
ocpm: Optional[int] = None
autobidding_max_cost: Optional[int] = None
disclaimer_medical: Optional["BaseBoolInt"] = None
disclaimer_specialist: Optional["BaseBoolInt"] = None
disclaimer_supplements: Optional["BaseBoolInt"] = None
id: Optional[int] = None
impressions_limit: Optional[int] = None
impressions_limited: Optional["BaseBoolInt"] = None
name: Optional[str] = None
status: Optional["AdsAdStatus"] = None
video: Optional["BaseBoolInt"] = None
class AdsAdApproved(enum.IntEnum):
NOT_MODERATED = 0
PENDING_MODERATION = 1
APPROVED = 2
REJECTED = 3
class AdsAdCostType(enum.IntEnum):
PER_CLICKS = 0
PER_IMPRESSIONS = 1
PER_ACTIONS = 2
PER_IMPRESSIONS_OPTIMIZED = 3
class AdsAdLayout(PydanticModel):
ad_format: Optional[int] = None
campaign_id: Optional[int] = None
cost_type: Optional["AdsAdCostType"] = None
description: Optional[str] = None
id: Optional[str] = None
image_src: Optional[str] = None
image_src_2x: Optional[str] = None
link_domain: Optional[str] = None
link_url: Optional[str] = None
preview_link: Optional[Union[int, str]] = None
title: Optional[str] = None
video: Optional["BaseBoolInt"] = None
class AdsAdStatus(enum.IntEnum):
STOPPED = 0
STARTED = 1
DELETED = 2
class AdsCampaign(PydanticModel):
all_limit: Optional[str] = None
day_limit: Optional[str] = None
id: Optional[int] = None
name: Optional[str] = None
start_time: Optional[int] = None
status: Optional["AdsCampaignStatus"] = None
stop_time: Optional[int] = None
type: Optional["AdsCampaignType"] = None
class AdsCampaignStatus(enum.IntEnum):
STOPPED = 0
STARTED = 1
DELETED = 2
class AdsCampaignType(enum.Enum):
NORMAL = "normal"
VK_APPS_MANAGED = "vk_apps_managed"
MOBILE_APPS = "mobile_apps"
PROMOTED_POSTS = "promoted_posts"
class AdsCategory(PydanticModel):
id: Optional[int] = None
name: Optional[str] = None
subcategories: Optional[List["BaseObjectWithName"]] = None
class AdsClient(PydanticModel):
all_limit: Optional[str] = None
day_limit: Optional[str] = None
id: Optional[int] = None
name: Optional[str] = None
class AdsCriteriaSex(enum.IntEnum):
ANY = 0
MALE = 1
FEMALE = 2
class AdsDemoStats(PydanticModel):
id: Optional[int] = None
stats: Optional["AdsDemostatsFormat"] = None
type: Optional["AdsObjectType"] = None
class AdsDemostatsFormat(PydanticModel):
age: Optional[List["AdsStatsAge"]] = None
cities: Optional[List["AdsStatsCities"]] = None
day: Optional[str] = None
month: Optional[str] = None
overall: Optional[int] = None
sex: Optional[List["AdsStatsSex"]] = None
sex_age: Optional[List["AdsStatsSexAge"]] = None
class AdsFloodStats(PydanticModel):
left: Optional[int] = None
refresh: Optional[int] = None
class AdsLinkStatus(PydanticModel):
description: Optional[str] = None
redirect_url: Optional[str] = None
status: Optional[str] = None
class AdsLookalikeRequest(PydanticModel):
id: Optional[int] = None
create_time: Optional[int] = None
update_time: Optional[int] = None
scheduled_delete_time: Optional[int] = None
status: Optional[str] = None
source_type: Optional[str] = None
source_retargeting_group_id: Optional[int] = None
source_name: Optional[str] = None
audience_count: Optional[int] = None
save_audience_levels: Optional[List["AdsLookalikeRequestSaveAudienceLevel"]] = None
class AdsLookalikeRequestSaveAudienceLevel(PydanticModel):
level: Optional[int] = None
audience_count: Optional[int] = None
class AdsMusician(PydanticModel):
id: Optional[int] = None
name: Optional[str] = None
avatar: Optional[str] = None
class AdsObjectType(enum.Enum):
AD = "ad"
CAMPAIGN = "campaign"
CLIENT = "client"
OFFICE = "office"
class AdsParagraphs(PydanticModel):
paragraph: Optional[str] = None
class AdsPromotedPostReach(PydanticModel):
hide: Optional[int] = None
id: Optional[int] = None
join_group: Optional[int] = None
links: Optional[int] = None
reach_subscribers: Optional[int] = None
reach_total: Optional[int] = None
report: Optional[int] = None
to_group: Optional[int] = None
unsubscribe: Optional[int] = None
video_views_100p: Optional[int] = None
video_views_25p: Optional[int] = None
video_views_3s: Optional[int] = None
video_views_50p: Optional[int] = None
video_views_75p: Optional[int] = None
video_views_start: Optional[int] = None
class AdsRejectReason(PydanticModel):
comment: Optional[str] = None
rules: Optional[List["AdsRules"]] = None
class AdsRules(PydanticModel):
paragraphs: Optional[List["AdsParagraphs"]] = None
title: Optional[str] = None
class AdsStats(PydanticModel):
id: Optional[int] = None
stats: Optional["AdsStatsFormat"] = None
type: Optional["AdsObjectType"] = None
views_times: Optional["AdsStatsViewsTimes"] = None
class AdsStatsAge(PydanticModel):
clicks_rate: Optional[int] = None
impressions_rate: Optional[int] = None
value: Optional[str] = None
class AdsStatsCities(PydanticModel):
clicks_rate: Optional[int] = None
impressions_rate: Optional[int] = None
name: Optional[str] = None
value: Optional[int] = None
class AdsStatsFormat(PydanticModel):
clicks: Optional[int] = None
day: Optional[str] = None
impressions: Optional[int] = None
join_rate: Optional[int] = None
month: Optional[str] = None
overall: Optional[int] = None
reach: Optional[int] = None
spent: Optional[int] = None
video_clicks_site: Optional[int] = None
video_views: Optional[int] = None
video_views_full: Optional[int] = None
video_views_half: Optional[int] = None
class AdsStatsSex(PydanticModel):
clicks_rate: Optional[int] = None
impressions_rate: Optional[int] = None
value: Optional["AdsStatsSexValue"] = None
class AdsStatsSexAge(PydanticModel):
clicks_rate: Optional[int] = None
impressions_rate: Optional[int] = None
value: Optional[str] = None
class AdsStatsSexValue(enum.Enum):
FEMALE = "f"
MALE = "m"
class AdsStatsViewsTimes(PydanticModel):
views_ads_times_1: Optional[int] = None
views_ads_times_2: Optional[int] = None
views_ads_times_3: Optional[int] = None
views_ads_times_4: Optional[int] = None
views_ads_times_5: Optional[str] = None
views_ads_times_6: Optional[int] = None
views_ads_times_7: Optional[int] = None
views_ads_times_8: Optional[int] = None
views_ads_times_9: Optional[int] = None
views_ads_times_10: Optional[int] = None
views_ads_times_11_plus: Optional[int] = None
class AdsCriteria(PydanticModel):
age_from: Optional[int] = None
age_to: Optional[int] = None
apps: Optional[str] = None
apps_not: Optional[str] = None
birthday: Optional[int] = None
cities: Optional[str] = None
cities_not: Optional[str] = None
country: Optional[int] = None
districts: Optional[str] = None
groups: Optional[str] = None
interest_categories: Optional[str] = None
interests: Optional[str] = None
paying: Optional["BaseBoolInt"] = None
positions: Optional[str] = None
religions: Optional[str] = None
retargeting_groups: Optional[str] = None
retargeting_groups_not: Optional[str] = None
school_from: Optional[int] = None
school_to: Optional[int] = None
schools: Optional[str] = None
sex: Optional["AdsCriteriaSex"] = None
stations: Optional[str] = None
statuses: Optional[str] = None
streets: Optional[str] = None
travellers: Optional["BasePropertyExists"] = None
uni_from: Optional[int] = None
uni_to: Optional[int] = None
user_browsers: Optional[str] = None
user_devices: Optional[str] = None
user_os: Optional[str] = None
class AdsTargSettings(AdsCriteria):
id: Optional[int] = None
campaign_id: Optional[int] = None
class AdsTargStats(PydanticModel):
audience_count: Optional[int] = None
recommended_cpc: Optional[int] = None
recommended_cpm: Optional[int] = None
recommended_cpc_50: Optional[int] = None
recommended_cpm_50: Optional[int] = None
recommended_cpc_70: Optional[int] = None
recommended_cpm_70: Optional[int] = None
recommended_cpc_90: Optional[int] = None
recommended_cpm_90: Optional[int] = None
class AdsTargSuggestions(PydanticModel):
id: Optional[int] = None
name: Optional[str] = None
class AdsTargSuggestionsCities(PydanticModel):
id: Optional[int] = None
name: Optional[str] = None
parent: Optional[str] = None
class AdsTargSuggestionsRegions(PydanticModel):
id: Optional[int] = None
name: Optional[str] = None
type: Optional[str] = None
class AdsTargSuggestionsSchools(PydanticModel):
desc: Optional[str] = None
id: Optional[int] = None
name: Optional[str] = None
parent: Optional[str] = None
type: Optional["AdsTargSuggestionsSchoolsType"] = None
class AdsTargSuggestionsSchoolsType(enum.Enum):
SCHOOL = "school"
UNIVERSITY = "university"
FACULTY = "faculty"
CHAIR = "chair"
class AdsTargetGroup(PydanticModel):
audience_count: Optional[int] = None
domain: Optional[str] = None
id: Optional[int] = None
lifetime: Optional[int] = None
name: Optional[str] = None
pixel: Optional[str] = None
class AdsUpdateofficeusersResult(PydanticModel):
user_id: Optional[int] = None
is_success: Optional[bool] = None
error: Optional["BaseError"] = None
class AdsUserSpecification(PydanticModel):
user_id: Optional[int] = None
role: Optional["AdsAccessRolePublic"] = None
grant_access_to_all_clients: Optional[bool] = None
client_ids: Optional[List[int]] = None
view_budget: Optional[bool] = None
class AdsUserSpecificationCutted(PydanticModel):
user_id: Optional[int] = None
role: Optional["AdsAccessRolePublic"] = None
client_id: Optional[int] = None
view_budget: Optional[bool] = None
class AdsUsers(PydanticModel):
accesses: Optional[List["AdsAccesses"]] = None
user_id: Optional[int] = None
class AdswebGetadcategoriesResponseCategoriesCategory(PydanticModel):
id: Optional[int] = None
name: Optional[str] = None
class AdswebGetadunitsResponseAdUnitsAdUnit(PydanticModel):
id: Optional[int] = None
site_id: Optional[int] = None
name: Optional[str] = None
class AdswebGetfraudhistoryResponseEntriesEntry(PydanticModel):
site_id: Optional[int] = None
day: Optional[str] = None
class AdswebGetsitesResponseSitesSite(PydanticModel):
id: Optional[int] = None
status_user: Optional[str] = None
status_moder: Optional[str] = None
domains: Optional[str] = None
class AdswebGetstatisticsResponseItemsItem(PydanticModel):
site_id: Optional[int] = None
ad_unit_id: Optional[int] = None
overall_count: Optional[int] = None
months_count: Optional[int] = None
month_min: Optional[str] = None
month_max: Optional[str] = None
days_count: Optional[int] = None
day_min: Optional[str] = None
day_max: Optional[str] = None
hours_count: Optional[int] = None
hour_min: Optional[str] = None
hour_max: Optional[str] = None
class AppwidgetsPhoto(PydanticModel):
id: Optional[str] = None
images: Optional[List["BaseImage"]] = None
class AppwidgetsPhotos(PydanticModel):
count: Optional[int] = None
items: Optional[List["AppwidgetsPhoto"]] = None
class AppsAppMin(PydanticModel):
type: Optional["AppsAppType"] = None
id: Optional[int] = None
title: Optional[str] = None
author_owner_id: Optional[int] = None
is_installed: Optional[bool] = None
icon_139: Optional[str] = None
icon_150: Optional[str] = None
icon_278: Optional[str] = None
icon_576: Optional[str] = None
background_loader_color: Optional[str] = None
loader_icon: Optional[str] = None
icon_75: Optional[str] = None
need_policy_confirmation: Optional[bool] = None
class AppsApp(AppsAppMin):
author_url: Optional[str] = None
banner_1120: Optional[str] = None
banner_560: Optional[str] = None
icon_16: Optional[str] = None
is_new: Optional["BaseBoolInt"] = None
push_enabled: Optional["BaseBoolInt"] = None
screen_orientation: Optional[int] = None
friends: Optional[List[int]] = None
catalog_position: Optional[int] = None
description: Optional[str] = None
genre: Optional[str] = None
genre_id: Optional[int] = None
international: Optional[bool] = None
is_in_catalog: Optional[int] = None
leaderboard_type: Optional["AppsAppLeaderboardType"] = None
members_count: Optional[int] = None
platform_id: Optional[str] = None
published_date: Optional[int] = None
screen_name: Optional[str] = None
section: Optional[str] = None
class AppsAppLeaderboardType(enum.IntEnum):
NOT_SUPPORTED = 0
LEVELS = 1
POINTS = 2
class AppsAppType(enum.Enum):
APP = "app"
GAME = "game"
SITE = "site"
STANDALONE = "standalone"
VK_APP = "vk_app"
COMMUNITY_APP = "community_app"
HTML5_GAME = "html5_game"
MINI_APP = "mini_app"
class AppsLeaderboard(PydanticModel):
level: Optional[int] = None
points: Optional[int] = None
score: Optional[int] = None
user_id: Optional[int] = None
class AppsScope(PydanticModel):
name: Optional[str] = None
title: Optional[str] = None
class AudioAudio(PydanticModel):
access_key: Optional[str] = None
artist: Optional[str] = None
id: Optional[int] = None
owner_id: Optional[int] = None
title: Optional[str] = None
url: Optional[str] = None
duration: Optional[int] = None
date: Optional[int] = None
album_id: Optional[int] = None
genre_id: Optional[int] = None
performer: Optional[str] = None
class BaseBoolInt(enum.IntEnum):
NO = 0
YES = 1
class BaseCity(PydanticModel):
id: Optional[int] = None
title: Optional[str] = None
class BaseCommentsInfo(PydanticModel):
can_post: Optional["BaseBoolInt"] = None
count: Optional[int] = None
groups_can_post: Optional[bool] = None
donut: Optional["WallWallpostCommentsDonut"] = None
class BaseCountry(PydanticModel):
id: Optional[int] = None
title: Optional[str] = None
class BaseCropPhoto(PydanticModel):
photo: Optional["PhotosPhoto"] = None
crop: Optional["BaseCropPhotoCrop"] = None
rect: Optional["BaseCropPhotoRect"] = None
class BaseCropPhotoCrop(PydanticModel):
x: Optional[int] = None
y: Optional[int] = None
x2: Optional[int] = None
y2: Optional[int] = None
class BaseCropPhotoRect(PydanticModel):
x: Optional[int] = None
y: Optional[int] = None
x2: Optional[int] = None
y2: Optional[int] = None
class BaseError(PydanticModel):
error_code: Optional[int] = None
error_subcode: Optional[int] = None
error_msg: Optional[str] = None
error_text: Optional[str] = None
request_params: Optional[List["BaseRequestParam"]] = None
class BaseGeo(PydanticModel):
coordinates: Optional["BaseGeoCoordinates"] = None
place: Optional["BasePlace"] = None
showmap: Optional[int] = None
type: Optional[str] = None
class BaseGeoCoordinates(PydanticModel):
latitude: Optional[int] = None
longitude: Optional[int] = None
class BaseGradientPoint(PydanticModel):
color: Optional[str] = None
position: Optional[int] = None
class BaseLikes(PydanticModel):
count: Optional[int] = None
user_likes: Optional["BaseBoolInt"] = None
class BaseLikesInfo(PydanticModel):
can_like: Optional["BaseBoolInt"] = None
can_publish: Optional["BaseBoolInt"] = None
count: Optional[int] = None
user_likes: Optional[int] = None
class BaseLink(PydanticModel):
application: Optional["BaseLinkApplication"] = None
button: Optional["BaseLinkButton"] = None
caption: Optional[str] = None
description: Optional[str] = None
id: Optional[str] = None
is_favorite: Optional[bool] = None
photo: Optional["PhotosPhoto"] = None
preview_page: Optional[str] = None
preview_url: Optional[str] = None
product: Optional["BaseLinkProduct"] = None
rating: Optional["BaseLinkRating"] = None
title: Optional[str] = None
url: Optional[str] = None
target_object: Optional["LinkTargetObject"] = None
is_external: Optional[bool] = None
video: Optional["VideoVideo"] = None
class BaseLinkApplication(PydanticModel):
app_id: Optional[int] = None
store: Optional["BaseLinkApplicationStore"] = None
class BaseLinkApplicationStore(PydanticModel):
id: Optional[int] = None
name: Optional[str] = None
class BaseLinkButton(PydanticModel):
action: Optional["BaseLinkButtonAction"] = None
title: Optional[str] = None
block_id: Optional[str] = None
section_id: Optional[str] = None
curator_id: Optional[int] = None
owner_id: Optional[int] = None
icon: Optional[str] = None
style: Optional["BaseLinkButtonStyle"] = None
class BaseLinkButtonAction(PydanticModel):
type: Optional["BaseLinkButtonActionType"] = None
url: Optional[str] = None
consume_reason: Optional[str] = None
class BaseLinkButtonActionType(enum.Enum):
OPEN_URL = "open_url"
class BaseLinkButtonStyle(enum.Enum):
PRIMARY = "primary"
SECONDARY = "secondary"
class BaseLinkProduct(PydanticModel):
price: Optional["MarketPrice"] = None
merchant: Optional[str] = None
orders_count: Optional[int] = None
class BaseLinkProductStatus(enum.Enum):
ACTIVE = "active"
BLOCKED = "blocked"
SOLD = "sold"
DELETED = "deleted"
ARCHIVED = "archived"
class BaseLinkRating(PydanticModel):
reviews_count: Optional[int] = None
stars: Optional[int] = None
class BaseMessageError(PydanticModel):
code: Optional[int] = None
description: Optional[str] = None
class BaseObjectCount(PydanticModel):
count: Optional[int] = None
class BaseObjectWithName(PydanticModel):
id: Optional[int] = None
name: Optional[str] = None
class BasePlace(PydanticModel):
address: Optional[str] = None
checkins: Optional[int] = None
city: Optional[str] = None
country: Optional[str] = None
created: Optional[int] = None
icon: Optional[str] = None
id: Optional[int] = None
latitude: Optional[int] = None
longitude: Optional[int] = None
title: Optional[str] = None
type: Optional[str] = None
class BasePropertyExists(enum.IntEnum):
PROPERTY_EXISTS = 1
class BaseRepostsInfo(PydanticModel):
count: Optional[int] = None
wall_count: Optional[int] = None
mail_count: Optional[int] = None
user_reposted: Optional[int] = None
class BaseRequestParam(PydanticModel):
key: Optional[str] = None
value: Optional[str] = None
class BaseSex(enum.IntEnum):
UNKNOWN = 0
FEMALE = 1
MALE = 2
class BaseSticker(PydanticModel):
sticker_id: Optional[int] = None
product_id: Optional[int] = None
images: Optional[List["BaseImage"]] = None
images_with_background: Optional[List["BaseImage"]] = None
animation_url: Optional[str] = None
animations: Optional[List["BaseStickerAnimation"]] = None
is_allowed: Optional[bool] = None
class BaseStickerAnimation(PydanticModel):
type: Optional[str] = None
url: Optional[str] = None
class BaseUploadServer(PydanticModel):
upload_url: Optional[str] = None
class BaseUserGroupFields(enum.Enum):
ABOUT = "about"
ACTION_BUTTON = "action_button"
ACTIVITIES = "activities"
ACTIVITY = "activity"
ADDRESSES = "addresses"
ADMIN_LEVEL = "admin_level"
AGE_LIMITS = "age_limits"
AUTHOR_ID = "author_id"
BAN_INFO = "ban_info"
BDATE = "bdate"
BLACKLISTED = "blacklisted"
BLACKLISTED_BY_ME = "blacklisted_by_me"
BOOKS = "books"
CAN_CREATE_TOPIC = "can_create_topic"
CAN_MESSAGE = "can_message"
CAN_POST = "can_post"
CAN_SEE_ALL_POSTS = "can_see_all_posts"
CAN_SEE_AUDIO = "can_see_audio"
CAN_SEND_FRIEND_REQUEST = "can_send_friend_request"
CAN_UPLOAD_VIDEO = "can_upload_video"
CAN_WRITE_PRIVATE_MESSAGE = "can_write_private_message"
CAREER = "career"
CITY = "city"
COMMON_COUNT = "common_count"
CONNECTIONS = "connections"
CONTACTS = "contacts"
COUNTERS = "counters"
COUNTRY = "country"
COVER = "cover"
CROP_PHOTO = "crop_photo"
DEACTIVATED = "deactivated"
DESCRIPTION = "description"
DOMAIN = "domain"
EDUCATION = "education"
EXPORTS = "exports"
FINISH_DATE = "finish_date"
FIXED_POST = "fixed_post"
FOLLOWERS_COUNT = "followers_count"
FRIEND_STATUS = "friend_status"
GAMES = "games"
HAS_MARKET_APP = "has_market_app"
HAS_MOBILE = "has_mobile"
HAS_PHOTO = "has_photo"
HOME_TOWN = "home_town"
ID = "id"
INTERESTS = "interests"
IS_ADMIN = "is_admin"
IS_CLOSED = "is_closed"
IS_FAVORITE = "is_favorite"
IS_FRIEND = "is_friend"
IS_HIDDEN_FROM_FEED = "is_hidden_from_feed"
IS_MEMBER = "is_member"
IS_MESSAGES_BLOCKED = "is_messages_blocked"
CAN_SEND_NOTIFY = "can_send_notify"
IS_SUBSCRIBED = "is_subscribed"
LAST_SEEN = "last_seen"
LINKS = "links"
LISTS = "lists"
MAIDEN_NAME = "maiden_name"
MAIN_ALBUM_ID = "main_album_id"
MAIN_SECTION = "main_section"
MARKET = "market"
MEMBER_STATUS = "member_status"
MEMBERS_COUNT = "members_count"
MILITARY = "military"
MOVIES = "movies"
MUSIC = "music"
NAME = "name"
NICKNAME = "nickname"
OCCUPATION = "occupation"
ONLINE = "online"
ONLINE_STATUS = "online_status"
PERSONAL = "personal"
PHONE = "phone"
PHOTO_100 = "photo_100"
PHOTO_200 = "photo_200"
PHOTO_200_ORIG = "photo_200_orig"
PHOTO_400_ORIG = "photo_400_orig"
PHOTO_50 = "photo_50"
PHOTO_ID = "photo_id"
PHOTO_MAX = "photo_max"
PHOTO_MAX_ORIG = "photo_max_orig"
QUOTES = "quotes"
RELATION = "relation"
RELATIVES = "relatives"
SCHOOLS = "schools"
SCREEN_NAME = "screen_name"
SEX = "sex"
SITE = "site"
START_DATE = "start_date"
STATUS = "status"
TIMEZONE = "timezone"
TRENDING = "trending"
TV = "tv"
TYPE = "type"
UNIVERSITIES = "universities"
VERIFIED = "verified"
WALL_COMMENTS = "wall_comments"
WIKI_PAGE = "wiki_page"
VK_ADMIN_STATUS = "vk_admin_status"
class BaseUserId(PydanticModel):
user_id: Optional[int] = None
class BoardDefaultOrder(enum.IntEnum):
DESC_UPDATED = 1
DESC_CREATED = 2
ASC_UPDATED = -1
ASC_CREATED = -2
class BoardTopicComment(PydanticModel):
attachments: Optional[List["WallCommentAttachment"]] = None
date: Optional[int] = None
from_id: Optional[int] = None
id: Optional[int] = None
real_offset: Optional[int] = None
text: Optional[str] = None
can_edit: Optional["BaseBoolInt"] = None
likes: Optional["BaseLikesInfo"] = None
class BoardTopicPoll(PydanticModel):
owner_id: Optional[int] = None
poll_id: Optional[int] = None
created: Optional[int] = None
is_closed: Optional["BaseBoolInt"] = None
question: Optional[str] = None
votes: Optional[int] = None
answer_id: Optional[int] = None
answers: Optional[List["PollsAnswer"]] = None
class CallbackBoardPostDelete(PydanticModel):
topic_owner_id: Optional[int] = None
topic_id: Optional[int] = None
id: Optional[int] = None
class CallbackConfirmationMessage(PydanticModel):
type: Optional["CallbackMessageType"] = None
group_id: Optional[int] = None
secret: Optional[str] = None
class CallbackDonutMoneyWithdraw(PydanticModel):
amount: Optional[int] = None
amount_without_fee: Optional[int] = None
class CallbackDonutMoneyWithdrawError(PydanticModel):
reason: Optional[str] = None
class CallbackDonutSubscriptionCancelled(PydanticModel):
user_id: Optional[int] = None
class CallbackDonutSubscriptionCreate(PydanticModel):
user_id: Optional[int] = None
amount: Optional[int] = None
amount_without_fee: Optional[int] = None
class CallbackDonutSubscriptionExpired(PydanticModel):
user_id: Optional[int] = None
class CallbackDonutSubscriptionPriceChanged(PydanticModel):
user_id: Optional[int] = None
amount_old: Optional[int] = None
amount_new: Optional[int] = None
amount_diff: Optional[int] = None
amount_diff_without_fee: Optional[int] = None
class CallbackDonutSubscriptionProlonged(PydanticModel):
user_id: Optional[int] = None
amount: Optional[int] = None
amount_without_fee: Optional[int] = None
class CallbackGroupChangePhoto(PydanticModel):
user_id: Optional[int] = None
photo: Optional["PhotosPhoto"] = None
class CallbackGroupChangeSettings(PydanticModel):
user_id: Optional[int] = None
self: Optional["BaseBoolInt"] = None
class CallbackGroupJoin(PydanticModel):
user_id: Optional[int] = None
join_type: Optional["CallbackGroupJoinType"] = None
class CallbackGroupJoinType(enum.Enum):
JOIN = "join"
UNSURE = "unsure"
ACCEPTED = "accepted"
APPROVED = "approved"
REQUEST = "request"
class CallbackGroupLeave(PydanticModel):
user_id: Optional[int] = None
self: Optional["BaseBoolInt"] = None
class CallbackGroupMarket(enum.IntEnum):
DISABLED = 0
OPEN = 1
class CallbackGroupOfficerRole(enum.IntEnum):
NONE = 0
MODERATOR = 1
EDITOR = 2
ADMINISTRATOR = 3
class CallbackGroupOfficersEdit(PydanticModel):
admin_id: Optional[int] = None
user_id: Optional[int] = None
level_old: Optional["CallbackGroupOfficerRole"] = None
level_new: Optional["CallbackGroupOfficerRole"] = None
class CallbackGroupSettingsChanges(PydanticModel):
title: Optional[str] = None
description: Optional[str] = None
access: Optional["GroupsGroupIsClosed"] = None
screen_name: Optional[str] = None
public_category: Optional[int] = None
public_subcategory: Optional[int] = None
age_limits: Optional["GroupsGroupFullAgeLimits"] = None
website: Optional[str] = None
enable_status_default: Optional["GroupsGroupWall"] = None
enable_audio: Optional["GroupsGroupAudio"] = None
enable_video: Optional["GroupsGroupVideo"] = None
enable_photo: Optional["GroupsGroupPhotos"] = None
enable_market: Optional["CallbackGroupMarket"] = None
class CallbackLikeAddRemove(PydanticModel):
liker_id: Optional[int] = None
object_type: Optional[str] = None
object_owner_id: Optional[int] = None
object_id: Optional[int] = None
post_id: Optional[int] = None
thread_reply_id: Optional[int] = None
class CallbackMarketComment(PydanticModel):
id: Optional[int] = None
from_id: Optional[int] = None
date: Optional[int] = None
text: Optional[str] = None
market_owner_id: Optional[int] = None
photo_id: Optional[int] = None
class CallbackMarketCommentDelete(PydanticModel):
owner_id: Optional[int] = None
id: Optional[int] = None
user_id: Optional[int] = None
item_id: Optional[int] = None
class CallbackMessageAllow(PydanticModel):
user_id: Optional[int] = None
key: Optional[str] = None
class CallbackMessageBase(PydanticModel):
type: Optional["CallbackMessageType"] = None
object: Optional[dict] = None
group_id: Optional[int] = None
class CallbackMessageDeny(PydanticModel):
user_id: Optional[int] = None
class CallbackMessageType(enum.Enum):
AUDIO_NEW = "audio_new"
BOARD_POST_NEW = "board_post_new"
BOARD_POST_EDIT = "board_post_edit"
BOARD_POST_RESTORE = "board_post_restore"
BOARD_POST_DELETE = "board_post_delete"
CONFIRMATION = "confirmation"
GROUP_LEAVE = "group_leave"
GROUP_JOIN = "group_join"
GROUP_CHANGE_PHOTO = "group_change_photo"
GROUP_CHANGE_SETTINGS = "group_change_settings"
GROUP_OFFICERS_EDIT = "group_officers_edit"
LEAD_FORMS_NEW = "lead_forms_new"
MARKET_COMMENT_NEW = "market_comment_new"
MARKET_COMMENT_DELETE = "market_comment_delete"
MARKET_COMMENT_EDIT = "market_comment_edit"
MARKET_COMMENT_RESTORE = "market_comment_restore"
MESSAGE_ALLOW = "message_allow"
MESSAGE_NEW = "message_new"
MESSAGE_DENY = "message_deny"
MESSAGE_READ = "message_read"
MESSAGE_REPLY = "message_reply"
MESSAGE_EDIT = "message_edit"
MESSAGE_TYPING_STATE = "message_typing_state"
MESSAGES_EDIT = "messages_edit"
PHOTO_NEW = "photo_new"
PHOTO_COMMENT_NEW = "photo_comment_new"
PHOTO_COMMENT_DELETE = "photo_comment_delete"
PHOTO_COMMENT_EDIT = "photo_comment_edit"
PHOTO_COMMENT_RESTORE = "photo_comment_restore"
POLL_VOTE_NEW = "poll_vote_new"
USER_BLOCK = "user_block"
USER_UNBLOCK = "user_unblock"
VIDEO_NEW = "video_new"
VIDEO_COMMENT_NEW = "video_comment_new"
VIDEO_COMMENT_DELETE = "video_comment_delete"
VIDEO_COMMENT_EDIT = "video_comment_edit"
VIDEO_COMMENT_RESTORE = "video_comment_restore"
WALL_POST_NEW = "wall_post_new"
WALL_REPLY_NEW = "wall_reply_new"
WALL_REPLY_EDIT = "wall_reply_edit"
WALL_REPLY_DELETE = "wall_reply_delete"
WALL_REPLY_RESTORE = "wall_reply_restore"
WALL_REPOST = "wall_repost"
class CallbackPhotoComment(PydanticModel):
id: Optional[int] = None
from_id: Optional[int] = None
date: Optional[int] = None
text: Optional[str] = None
photo_owner_id: Optional[int] = None
class CallbackPhotoCommentDelete(PydanticModel):
id: Optional[int] = None
owner_id: Optional[int] = None
user_id: Optional[int] = None
photo_id: Optional[int] = None
class CallbackPollVoteNew(PydanticModel):
owner_id: Optional[int] = None
poll_id: Optional[int] = None
option_id: Optional[int] = None
user_id: Optional[int] = None
class CallbackQrScan(PydanticModel):
user_id: Optional[int] = None
data: Optional[str] = None
type: Optional[str] = None
subtype: Optional[str] = None
reread: Optional[bool] = None
class CallbackUserBlock(PydanticModel):
admin_id: Optional[int] = None
user_id: Optional[int] = None
unblock_date: Optional[int] = None
reason: Optional[int] = None
comment: Optional[str] = None
class CallbackUserUnblock(PydanticModel):
admin_id: Optional[int] = None
user_id: Optional[int] = None
by_end_date: Optional[int] = None
class CallbackVideoComment(PydanticModel):
id: Optional[int] = None
from_id: Optional[int] = None
date: Optional[int] = None
text: Optional[str] = None
video_owner_id: Optional[int] = None
class CallbackVideoCommentDelete(PydanticModel):
id: Optional[int] = None
owner_id: Optional[int] = None
user_id: Optional[int] = None
video_id: Optional[int] = None
class CallbackWallCommentDelete(PydanticModel):
owner_id: Optional[int] = None
id: Optional[int] = None
user_id: Optional[int] = None
post_id: Optional[int] = None
class CallsCall(PydanticModel):
duration: Optional[int] = None
initiator_id: Optional[int] = None
receiver_id: Optional[int] = None
state: Optional["CallsEndState"] = None
time: Optional[int] = None
video: Optional[bool] = None
participants: Optional["CallsParticipants"] = None
class CallsEndState(enum.Enum):
CANCELED_BY_INITIATOR = "canceled_by_initiator"
CANCELED_BY_RECEIVER = "canceled_by_receiver"
REACHED = "reached"
class CallsParticipants(PydanticModel):
list: Optional[List[int]] = None
count: Optional[int] = None
class CommentThread(PydanticModel):
can_post: Optional[bool] = None
count: Optional[int] = None
groups_can_post: Optional[bool] = None
items: Optional[List["WallWallComment"]] = None
show_reply_button: Optional[bool] = None
class BaseObject(PydanticModel):
id: Optional[int] = None
title: Optional[str] = None
class DatabaseCity(BaseObject):
area: Optional[str] = None
region: Optional[str] = None
important: Optional["BaseBoolInt"] = None
class DatabaseFaculty(PydanticModel):
id: Optional[int] = None
title: Optional[str] = None
class DatabaseRegion(PydanticModel):
id: Optional[int] = None
title: Optional[str] = None
class DatabaseSchool(PydanticModel):
id: Optional[int] = None
title: Optional[str] = None
class DatabaseStation(PydanticModel):
city_id: Optional[int] = None
color: Optional[str] = None
id: Optional[int] = None
name: Optional[str] = None
class DatabaseUniversity(PydanticModel):
id: Optional[int] = None
title: Optional[str] = None
class DocsDoc(PydanticModel):
id: Optional[int] = None
owner_id: Optional[int] = None
title: Optional[str] = None
size: Optional[int] = None
ext: Optional[str] = None
url: Optional[str] = None
date: Optional[int] = None
type: Optional[int] = None
preview: Optional["DocsDocPreview"] = None
is_licensed: Optional["BaseBoolInt"] = None
access_key: Optional[str] = None
tags: Optional[List[str]] = None
class DocsDocAttachmentType(enum.Enum):
DOC = "doc"
GRAFFITI = "graffiti"
AUDIO_MESSAGE = "audio_message"
class DocsDocPreview(PydanticModel):
audio_msg: Optional["DocsDocPreviewAudioMsg"] = None
graffiti: Optional["DocsDocPreviewGraffiti"] = None
photo: Optional["DocsDocPreviewPhoto"] = None
video: Optional["DocsDocPreviewVideo"] = None
class DocsDocPreviewAudioMsg(PydanticModel):
duration: Optional[int] = None
link_mp3: Optional[str] = None
link_ogg: Optional[str] = None
waveform: Optional[List[int]] = None
class DocsDocPreviewGraffiti(PydanticModel):
src: Optional[str] = None
width: Optional[int] = None
height: Optional[int] = None
class DocsDocPreviewPhoto(PydanticModel):
sizes: Optional[List["DocsDocPreviewPhotoSizes"]] = None
class DocsDocPreviewPhotoSizes(PydanticModel):
src: Optional[str] = None
width: Optional[int] = None
height: Optional[int] = None
type: Optional["PhotosPhotoSizesType"] = None
class DocsDocPreviewVideo(PydanticModel):
src: Optional[str] = None
width: Optional[int] = None
height: Optional[int] = None
file_size: Optional[int] = None
class DocsDocTypes(PydanticModel):
id: Optional[int] = None
name: Optional[str] = None
count: Optional[int] = None
class DonutDonatorSubscriptionInfo(PydanticModel):
owner_id: Optional[int] = None
next_payment_date: Optional[int] = None
amount: Optional[int] = None
status: Optional[str] = None
class EventsEventAttach(PydanticModel):
address: Optional[str] = None
button_text: Optional[str] = None
friends: Optional[List[int]] = None
id: Optional[int] = None
is_favorite: Optional[bool] = None
member_status: Optional["GroupsGroupFullMemberStatus"] = None
text: Optional[str] = None
time: Optional[int] = None
class FaveBookmark(PydanticModel):
added_date: Optional[int] = None
link: Optional["BaseLink"] = None
post: Optional["WallWallpostFull"] = None
product: Optional["MarketMarketItem"] = None
seen: Optional[bool] = None
tags: Optional[List["FaveTag"]] = None
type: Optional["FaveBookmarkType"] = None
video: Optional["VideoVideo"] = None
class FaveBookmarkType(enum.Enum):
POST = "post"
VIDEO = "video"
PRODUCT = "product"
ARTICLE = "article"
LINK = "link"
class FavePage(PydanticModel):
description: Optional[str] = None
group: Optional["GroupsGroupFull"] = None
tags: Optional[List["FaveTag"]] = None
type: Optional["FavePageType"] = None
updated_date: Optional[int] = None
user: Optional["UsersUserFull"] = None
class FavePageType(enum.Enum):
USER = "user"
GROUP = "group"
HINTS = "hints"
class FaveTag(PydanticModel):
id: Optional[int] = None
name: Optional[str] = None
class FriendsFriendStatus(PydanticModel):
friend_status: Optional["FriendsFriendStatusStatus"] = None
sign: Optional[str] = None
user_id: Optional[int] = None
class FriendsFriendExtendedStatus(FriendsFriendStatus):
is_request_unread: Optional[bool] = None
class FriendsFriendStatusStatus(enum.IntEnum):
NOT_A_FRIEND = 0
OUTCOMING_REQUEST = 1
INCOMING_REQUEST = 2
IS_FRIEND = 3
class FriendsFriendsList(PydanticModel):
id: Optional[int] = None
name: Optional[str] = None
class FriendsMutualFriend(PydanticModel):
common_count: Optional[int] = None
common_friends: Optional[List[int]] = None
id: Optional[int] = None
class FriendsRequests(PydanticModel):
_from: Optional[str] = None
mutual: Optional["FriendsRequestsMutual"] = None
user_id: Optional[int] = None
class FriendsRequestsMutual(PydanticModel):
count: Optional[int] = None
users: Optional[List[int]] = None
class FriendsRequestsXtrMessage(PydanticModel):
_from: Optional[str] = None
message: Optional[str] = None
mutual: Optional["FriendsRequestsMutual"] = None
user_id: Optional[int] = None
class UsersUser(UsersUserMin):
sex: Optional["BaseSex"] = None
screen_name: Optional[str] = None
photo_50: Optional[str] = None
photo_100: Optional[str] = None
online_info: Optional["UsersOnlineInfo"] = None
online: Optional["BaseBoolInt"] = None
online_mobile: Optional["BaseBoolInt"] = None
online_app: Optional[int] = None
verified: Optional["BaseBoolInt"] = None
trending: Optional["BaseBoolInt"] = None
friend_status: Optional["FriendsFriendStatusStatus"] = None
mutual: Optional["FriendsRequestsMutual"] = None
class UsersUserFull(UsersUser):
first_name_nom: Optional[str] = None
first_name_gen: Optional[str] = None
first_name_dat: Optional[str] = None
first_name_acc: Optional[str] = None
first_name_ins: Optional[str] = None
first_name_abl: Optional[str] = None
last_name_nom: Optional[str] = None
last_name_gen: Optional[str] = None
last_name_dat: Optional[str] = None
last_name_acc: Optional[str] = None
last_name_ins: Optional[str] = None
last_name_abl: Optional[str] = None
nickname: Optional[str] = None
maiden_name: Optional[str] = None
contact_name: Optional[str] = None
domain: Optional[str] = None
bdate: Optional[str] = None
city: Optional["BaseCity"] = None
country: Optional["BaseCountry"] = None
timezone: Optional[int] = None
owner_state: Optional["OwnerState"] = None
photo_200: Optional[str] = None
photo_max: Optional[str] = None
photo_200_orig: Optional[str] = None
photo_400_orig: Optional[str] = None
photo_max_orig: Optional[str] = None
photo_id: Optional[str] = None
has_photo: Optional["BaseBoolInt"] = None
has_mobile: Optional["BaseBoolInt"] = None
is_friend: Optional["BaseBoolInt"] = None
wall_comments: Optional["BaseBoolInt"] = None
can_post: Optional["BaseBoolInt"] = None
can_see_all_posts: Optional["BaseBoolInt"] = None
can_see_audio: Optional["BaseBoolInt"] = None
type: Optional["UsersUserType"] = None
email: Optional[str] = None
skype: Optional[str] = None
facebook: Optional[str] = None
facebook_name: Optional[str] = None
twitter: Optional[str] = None
livejournal: Optional[str] = None
instagram: Optional[str] = None
test: Optional["BaseBoolInt"] = None
video_live: Optional["VideoLiveInfo"] = None
is_video_live_notifications_blocked: Optional["BaseBoolInt"] = None
is_service: Optional[bool] = None
service_description: Optional[str] = None
photo_rec: Optional["PhotosPhotoFalseable"] = None
photo_medium: Optional["PhotosPhotoFalseable"] = None
photo_medium_rec: Optional["PhotosPhotoFalseable"] = None
photo: Optional[str] = None
photo_big: Optional[str] = None
photo_400: Optional[str] = None
photo_max_size: Optional["PhotosPhoto"] = None
language: Optional[str] = None
stories_archive_count: Optional[int] = None
wall_default: Optional[str] = None
can_call: Optional[bool] = None
can_see_wishes: Optional[bool] = None
can_see_gifts: Optional["BaseBoolInt"] = None
interests: Optional[str] = None
books: Optional[str] = None
tv: Optional[str] = None
quotes: Optional[str] = None
about: Optional[str] = None
games: Optional[str] = None
movies: Optional[str] = None
activities: Optional[str] = None
music: Optional[str] = None
can_write_private_message: Optional["BaseBoolInt"] = None
can_send_friend_request: Optional["BaseBoolInt"] = None
can_be_invited_group: Optional[bool] = None
mobile_phone: Optional[str] = None
home_phone: Optional[str] = None
site: Optional[str] = None
status_audio: Optional["AudioAudio"] = None
status: Optional[str] = None
activity: Optional[str] = None
last_seen: Optional["UsersLastSeen"] = None
exports: Optional["UsersExports"] = None
crop_photo: Optional["BaseCropPhoto"] = None
followers_count: Optional[int] = None
video_live_level: Optional[int] = None
video_live_count: Optional[int] = None
clips_count: Optional[int] = None
blacklisted: Optional["BaseBoolInt"] = None
blacklisted_by_me: Optional["BaseBoolInt"] = None
is_favorite: Optional["BaseBoolInt"] = None
is_hidden_from_feed: Optional["BaseBoolInt"] = None
common_count: Optional[int] = None
occupation: Optional["UsersOccupation"] = None
career: Optional[List["UsersCareer"]] = None
military: Optional[List["UsersMilitary"]] = None
university: Optional[int] = None
university_name: Optional[str] = None
university_group_id: Optional[int] = None
faculty: Optional[int] = None
faculty_name: Optional[str] = None
graduation: Optional[int] = None
education_form: Optional[str] = None
education_status: Optional[str] = None
home_town: Optional[str] = None
relation: Optional["UsersUserRelation"] = None
relation_partner: Optional["UsersUserMin"] = None
personal: Optional["UsersPersonal"] = None
universities: Optional[List["UsersUniversity"]] = None
schools: Optional[List["UsersSchool"]] = None
relatives: Optional[List["UsersRelative"]] = None
is_subscribed_podcasts: Optional[bool] = None
can_subscribe_podcasts: Optional[bool] = None
can_subscribe_posts: Optional[bool] = None
counters: Optional["UsersUserCounters"] = None
access_key: Optional[str] = None
can_upload_doc: Optional["BaseBoolInt"] = None
hash: Optional[str] = None
has_email: Optional[bool] = None
class FriendsUserXtrLists(UsersUserFull):
lists: Optional[List[int]] = None
class FriendsUserXtrPhone(UsersUserFull):
phone: Optional[str] = None
class GiftsGift(PydanticModel):
date: Optional[int] = None
from_id: Optional[int] = None
gift: Optional["GiftsLayout"] = None
gift_hash: Optional[str] = None
id: Optional[int] = None
message: Optional[str] = None
privacy: Optional["GiftsGiftPrivacy"] = None
class GiftsGiftPrivacy(enum.IntEnum):
NAME_AND_MESSAGE_FOR_ALL = 0
NAME_FOR_ALL = 1
NAME_AND_MESSAGE_FOR_RECIPIENT_ONLY = 2
class GiftsLayout(PydanticModel):
id: Optional[int] = None
thumb_512: Optional[str] = None
thumb_256: Optional[str] = None
thumb_48: Optional[str] = None
thumb_96: Optional[str] = None
stickers_product_id: Optional[int] = None
is_stickers_style: Optional[bool] = None
build_id: Optional[str] = None
keywords: Optional[str] = None
class GroupsAddress(PydanticModel):
additional_address: Optional[str] = None
address: Optional[str] = None
city_id: Optional[int] = None
country_id: Optional[int] = None
distance: Optional[int] = None
id: Optional[int] = None
latitude: Optional[int] = None
longitude: Optional[int] = None
metro_station_id: Optional[int] = None
phone: Optional[str] = None
time_offset: Optional[int] = None
timetable: Optional["GroupsAddressTimetable"] = None
title: Optional[str] = None
work_info_status: Optional["GroupsAddressWorkInfoStatus"] = None
class GroupsAddressTimetable(PydanticModel):
fri: Optional["GroupsAddressTimetableDay"] = None
mon: Optional["GroupsAddressTimetableDay"] = None
sat: Optional["GroupsAddressTimetableDay"] = None
sun: Optional["GroupsAddressTimetableDay"] = None
thu: Optional["GroupsAddressTimetableDay"] = None
tue: Optional["GroupsAddressTimetableDay"] = None
wed: Optional["GroupsAddressTimetableDay"] = None
class GroupsAddressTimetableDay(PydanticModel):
break_close_time: Optional[int] = None
break_open_time: Optional[int] = None
close_time: Optional[int] = None
open_time: Optional[int] = None
class GroupsAddressWorkInfoStatus(enum.Enum):
NO_INFORMATION = "no_information"
TEMPORARILY_CLOSED = "temporarily_closed"
ALWAYS_OPENED = "always_opened"
TIMETABLE = "timetable"
FOREVER_CLOSED = "forever_closed"
class GroupsAddressesInfo(PydanticModel):
is_enabled: Optional[bool] = None
main_address_id: Optional[int] = None
class GroupsBanInfo(PydanticModel):
admin_id: Optional[int] = None
comment: Optional[str] = None
comment_visible: Optional[bool] = None
is_closed: Optional[bool] = None
date: Optional[int] = None
end_date: Optional[int] = None
reason: Optional["GroupsBanInfoReason"] = None
class GroupsBanInfoReason(enum.IntEnum):
OTHER = 0
SPAM = 1
VERBAL_ABUSE = 2
STRONG_LANGUAGE = 3
FLOOD = 4
class GroupsOwnerXtrBanInfo(PydanticModel):
ban_info: Optional["GroupsBanInfo"] = None
group: Optional["GroupsGroup"] = None
profile: Optional["UsersUser"] = None
type: Optional["GroupsOwnerXtrBanInfoType"] = None
GroupsBannedItem = GroupsOwnerXtrBanInfo
class GroupsCallbackServer(PydanticModel):
id: Optional[int] = None
title: Optional[str] = None
creator_id: Optional[int] = None
url: Optional[str] = None
secret_key: Optional[str] = None
status: Optional[str] = None
class GroupsCallbackSettings(PydanticModel):
api_version: Optional[str] = None
events: Optional["GroupsLongPollEvents"] = None
class GroupsContactsItem(PydanticModel):
user_id: Optional[int] = None
desc: Optional[str] = None
phone: Optional[str] = None
email: Optional[str] = None
class GroupsCountersGroup(PydanticModel):
addresses: Optional[int] = None
albums: Optional[int] = None
audios: Optional[int] = None
audio_playlists: Optional[int] = None
docs: Optional[int] = None
market: Optional[int] = None
photos: Optional[int] = None
topics: Optional[int] = None
videos: Optional[int] = None
class GroupsCover(PydanticModel):
enabled: Optional["BaseBoolInt"] = None
images: Optional[List["BaseImage"]] = None
class GroupsFields(enum.Enum):
MARKET = "market"
MEMBER_STATUS = "member_status"
IS_FAVORITE = "is_favorite"
IS_SUBSCRIBED = "is_subscribed"
IS_SUBSCRIBED_PODCASTS = "is_subscribed_podcasts"
CAN_SUBSCRIBE_PODCASTS = "can_subscribe_podcasts"
CITY = "city"
COUNTRY = "country"
VERIFIED = "verified"
DESCRIPTION = "description"
WIKI_PAGE = "wiki_page"
MEMBERS_COUNT = "members_count"
REQUESTS_COUNT = "requests_count"
COUNTERS = "counters"
COVER = "cover"
CAN_POST = "can_post"
CAN_SUGGEST = "can_suggest"
CAN_UPLOAD_STORY = "can_upload_story"
CAN_UPLOAD_DOC = "can_upload_doc"
CAN_UPLOAD_VIDEO = "can_upload_video"
CAN_UPLOAD_CLIP = "can_upload_clip"
CAN_SEE_ALL_POSTS = "can_see_all_posts"
CAN_CREATE_TOPIC = "can_create_topic"
ACTIVITY = "activity"
FIXED_POST = "fixed_post"
HAS_PHOTO = "has_photo"
STATUS = "status"
MAIN_ALBUM_ID = "main_album_id"
LINKS = "links"
CONTACTS = "contacts"
SITE = "site"
MAIN_SECTION = "main_section"
SECONDARY_SECTION = "secondary_section"
WALL = "wall"
TRENDING = "trending"
CAN_MESSAGE = "can_message"
IS_MARKET_CART_ENABLED = "is_market_cart_enabled"
IS_MESSAGES_BLOCKED = "is_messages_blocked"
CAN_SEND_NOTIFY = "can_send_notify"
HAS_GROUP_CHANNEL = "has_group_channel"
GROUP_CHANNEL = "group_channel"
ONLINE_STATUS = "online_status"
START_DATE = "start_date"
FINISH_DATE = "finish_date"
AGE_LIMITS = "age_limits"
BAN_INFO = "ban_info"
ACTION_BUTTON = "action_button"
AUTHOR_ID = "author_id"
PHONE = "phone"
HAS_MARKET_APP = "has_market_app"
ADDRESSES = "addresses"
LIVE_COVERS = "live_covers"
IS_ADULT = "is_adult"
CAN_SUBSCRIBE_POSTS = "can_subscribe_posts"
WARNING_NOTIFICATION = "warning_notification"
MSG_PUSH_ALLOWED = "msg_push_allowed"
STORIES_ARCHIVE_COUNT = "stories_archive_count"
VIDEO_LIVE_LEVEL = "video_live_level"
VIDEO_LIVE_COUNT = "video_live_count"
CLIPS_COUNT = "clips_count"
IS_BUSINESS = "is_business"
TEXTLIVES_COUNT = "textlives_count"
class GroupsFilter(enum.Enum):
ADMIN = "admin"
EDITOR = "editor"
MODER = "moder"
ADVERTISER = "advertiser"
GROUPS = "groups"
PUBLICS = "publics"
EVENTS = "events"
HAS_ADDRESSES = "has_addresses"
class GroupsGroupAccess(enum.IntEnum):
OPEN = 0
CLOSED = 1
PRIVATE = 2
class GroupsGroupAdminLevel(enum.IntEnum):
MODERATOR = 1
EDITOR = 2
ADMINISTRATOR = 3
class GroupsGroupAgeLimits(enum.IntEnum):
UNLIMITED = 1
_16_PLUS = 2
_18_PLUS = 3
class GroupsGroupAttach(PydanticModel):
id: Optional[int] = None
text: Optional[str] = None
status: Optional[str] = None
size: Optional[int] = None
is_favorite: Optional[bool] = None
class GroupsGroupAudio(enum.IntEnum):
DISABLED = 0
OPEN = 1
LIMITED = 2
class GroupsGroupBanInfo(PydanticModel):
comment: Optional[str] = None
end_date: Optional[int] = None
reason: Optional["GroupsBanInfoReason"] = None
class GroupsGroupCategory(PydanticModel):
id: Optional[int] = None
name: Optional[str] = None
subcategories: Optional[List["BaseObjectWithName"]] = None
class GroupsGroupCategoryFull(PydanticModel):
id: Optional[int] = None
name: Optional[str] = None
page_count: Optional[int] = None
page_previews: Optional[List["GroupsGroup"]] = None
subcategories: Optional[List["GroupsGroupCategory"]] = None
class GroupsGroupCategoryType(PydanticModel):
id: Optional[int] = None
name: Optional[str] = None
class GroupsGroupDocs(enum.IntEnum):
DISABLED = 0
OPEN = 1
LIMITED = 2
class GroupsGroup(PydanticModel):
id: Optional[int] = None
name: Optional[str] = None
screen_name: Optional[str] = None
is_closed: Optional["GroupsGroupIsClosed"] = None
type: Optional["GroupsGroupType"] = None
is_admin: Optional["BaseBoolInt"] = None
admin_level: Optional["GroupsGroupAdminLevel"] = None
is_member: Optional["BaseBoolInt"] = None
is_advertiser: Optional["BaseBoolInt"] = None
start_date: Optional[int] = None
finish_date: Optional[int] = None
deactivated: Optional[str] = None
photo_50: Optional[str] = None
photo_100: Optional[str] = None
photo_200: Optional[str] = None
class GroupsGroupFull(GroupsGroup):
market: Optional["GroupsMarketInfo"] = None
member_status: Optional["GroupsGroupFullMemberStatus"] = None
is_adult: Optional["BaseBoolInt"] = None
is_hidden_from_feed: Optional["BaseBoolInt"] = None
is_favorite: Optional["BaseBoolInt"] = None
is_subscribed: Optional["BaseBoolInt"] = None
city: Optional["BaseObject"] = None
country: Optional["BaseCountry"] = None
verified: Optional["BaseBoolInt"] = None
description: Optional[str] = None
wiki_page: Optional[str] = None
members_count: Optional[int] = None
requests_count: Optional[int] = None
video_live_level: Optional[int] = None
video_live_count: Optional[int] = None
clips_count: Optional[int] = None
counters: Optional["GroupsCountersGroup"] = None
cover: Optional["GroupsCover"] = None
can_post: Optional["BaseBoolInt"] = None
can_suggest: Optional["BaseBoolInt"] = None
can_upload_story: Optional["BaseBoolInt"] = None
can_upload_doc: Optional["BaseBoolInt"] = None
can_upload_video: Optional["BaseBoolInt"] = None
can_see_all_posts: Optional["BaseBoolInt"] = None
can_create_topic: Optional["BaseBoolInt"] = None
activity: Optional[str] = None
fixed_post: Optional[int] = None
has_photo: Optional["BaseBoolInt"] = None
crop_photo: Optional["BaseCropPhoto"] = None
status: Optional[str] = None
status_audio: Optional["AudioAudio"] = None
main_album_id: Optional[int] = None
links: Optional[List["GroupsLinksItem"]] = None
contacts: Optional[List["GroupsContactsItem"]] = None
wall: Optional[int] = None
site: Optional[str] = None
main_section: Optional["GroupsGroupFullMainSection"] = None
secondary_section: Optional[int] = None
trending: Optional["BaseBoolInt"] = None
can_message: Optional["BaseBoolInt"] = None
is_messages_blocked: Optional["BaseBoolInt"] = None
can_send_notify: Optional["BaseBoolInt"] = None
online_status: Optional["GroupsOnlineStatus"] = None
invited_by: Optional[int] = None
age_limits: Optional["GroupsGroupFullAgeLimits"] = None
ban_info: Optional["GroupsGroupBanInfo"] = None
has_market_app: Optional[bool] = None
using_vkpay_market_app: Optional[bool] = None
has_group_channel: Optional[bool] = None
addresses: Optional["GroupsAddressesInfo"] = None
is_subscribed_podcasts: Optional[bool] = None
can_subscribe_podcasts: Optional[bool] = None
can_subscribe_posts: Optional[bool] = None
live_covers: Optional["GroupsLiveCovers"] = None
stories_archive_count: Optional[int] = None
class GroupsGroupFullAgeLimits(enum.IntEnum):
NO = 1
OVER_16 = 2
OVER_18 = 3
class GroupsGroupFullMainSection(enum.IntEnum):
ABSENT = 0
PHOTOS = 1
TOPICS = 2
AUDIO = 3
VIDEO = 4
MARKET = 5
class GroupsGroupFullMemberStatus(enum.IntEnum):
NOT_A_MEMBER = 0
MEMBER = 1
NOT_SURE = 2
DECLINED = 3
HAS_SENT_A_REQUEST = 4
INVITED = 5
class GroupsGroupIsClosed(enum.IntEnum):
OPEN = 0
CLOSED = 1
PRIVATE = 2
class GroupsGroupLink(PydanticModel):
name: Optional[str] = None
desc: Optional[str] = None
edit_title: Optional["BaseBoolInt"] = None
id: Optional[int] = None
image_processing: Optional["BaseBoolInt"] = None
url: Optional[str] = None
class GroupsGroupMarketCurrency(enum.IntEnum):
RUSSIAN_RUBLES = 643
UKRAINIAN_HRYVNIA = 980
KAZAKH_TENGE = 398
EURO = 978
US_DOLLARS = 840
class GroupsGroupPhotos(enum.IntEnum):
DISABLED = 0
OPEN = 1
LIMITED = 2
class GroupsGroupPublicCategoryList(PydanticModel):
id: Optional[int] = None
name: Optional[str] = None
subcategories: Optional[List["GroupsGroupCategoryType"]] = None
class GroupsGroupRole(enum.Enum):
MODERATOR = "moderator"
EDITOR = "editor"
ADMINISTRATOR = "administrator"
ADVERTISER = "advertiser"
class GroupsGroupSubject(enum.Enum):
AUTO = "1"
ACTIVITY_HOLIDAYS = "2"
BUSINESS = "3"
PETS = "4"
HEALTH = "5"
DATING_AND_COMMUNICATION = "6"
GAMES = "7"
IT = "8"
CINEMA = "9"
BEAUTY_AND_FASHION = "10"
COOKING = "11"
ART_AND_CULTURE = "12"
LITERATURE = "13"
MOBILE_SERVICES_AND_INTERNET = "14"
MUSIC = "15"
SCIENCE_AND_TECHNOLOGY = "16"
REAL_ESTATE = "17"
NEWS_AND_MEDIA = "18"
SECURITY = "19"
EDUCATION = "20"
HOME_AND_RENOVATIONS = "21"
POLITICS = "22"
FOOD = "23"
INDUSTRY = "24"
TRAVEL = "25"
WORK = "26"
ENTERTAINMENT = "27"
RELIGION = "28"
FAMILY = "29"
SPORTS = "30"
INSURANCE = "31"
TELEVISION = "32"
GOODS_AND_SERVICES = "33"
HOBBIES = "34"
FINANCE = "35"
PHOTO = "36"
ESOTERICS = "37"
ELECTRONICS_AND_APPLIANCES = "38"
EROTIC = "39"
HUMOR = "40"
SOCIETY_HUMANITIES = "41"
DESIGN_AND_GRAPHICS = "42"
class GroupsGroupSuggestedPrivacy(enum.IntEnum):
NONE = 0
ALL = 1
SUBSCRIBERS = 2
class GroupsGroupTag(PydanticModel):
id: Optional[int] = None
name: Optional[str] = None
color: Optional[str] = None
uses: Optional[int] = None
class GroupsGroupTopics(enum.IntEnum):
DISABLED = 0
OPEN = 1
LIMITED = 2
class GroupsGroupType(enum.Enum):
GROUP = "group"
PAGE = "page"
EVENT = "event"
class GroupsGroupVideo(enum.IntEnum):
DISABLED = 0
OPEN = 1
LIMITED = 2
class GroupsGroupWall(enum.IntEnum):
DISABLED = 0
OPEN = 1
LIMITED = 2
CLOSED = 3
class GroupsGroupWiki(enum.IntEnum):
DISABLED = 0
OPEN = 1
LIMITED = 2
class GroupsGroupsArray(PydanticModel):
count: Optional[int] = None
items: Optional[List[int]] = None
class GroupsLinksItem(PydanticModel):
desc: Optional[str] = None
edit_title: Optional["BaseBoolInt"] = None
id: Optional[int] = None
name: Optional[str] = None
photo_100: Optional[str] = None
photo_50: Optional[str] = None
url: Optional[str] = None
class GroupsLiveCovers(PydanticModel):
is_enabled: Optional[bool] = None
is_scalable: Optional[bool] = None
story_ids: Optional[List[str]] = None
class GroupsLongPollEvents(PydanticModel):
audio_new: Optional["BaseBoolInt"] = None
board_post_delete: Optional["BaseBoolInt"] = None
board_post_edit: Optional["BaseBoolInt"] = None
board_post_new: Optional["BaseBoolInt"] = None
board_post_restore: Optional["BaseBoolInt"] = None
group_change_photo: Optional["BaseBoolInt"] = None
group_change_settings: Optional["BaseBoolInt"] = None
group_join: Optional["BaseBoolInt"] = None
group_leave: Optional["BaseBoolInt"] = None
group_officers_edit: Optional["BaseBoolInt"] = None
lead_forms_new: Optional["BaseBoolInt"] = None
market_comment_delete: Optional["BaseBoolInt"] = None
market_comment_edit: Optional["BaseBoolInt"] = None
market_comment_new: Optional["BaseBoolInt"] = None
market_comment_restore: Optional["BaseBoolInt"] = None
market_order_new: Optional["BaseBoolInt"] = None
market_order_edit: Optional["BaseBoolInt"] = None
message_allow: Optional["BaseBoolInt"] = None
message_deny: Optional["BaseBoolInt"] = None
message_new: Optional["BaseBoolInt"] = None
message_read: Optional["BaseBoolInt"] = None
message_reply: Optional["BaseBoolInt"] = None
message_typing_state: Optional["BaseBoolInt"] = None
message_edit: Optional["BaseBoolInt"] = None
photo_comment_delete: Optional["BaseBoolInt"] = None
photo_comment_edit: Optional["BaseBoolInt"] = None
photo_comment_new: Optional["BaseBoolInt"] = None
photo_comment_restore: Optional["BaseBoolInt"] = None
photo_new: Optional["BaseBoolInt"] = None
poll_vote_new: Optional["BaseBoolInt"] = None
user_block: Optional["BaseBoolInt"] = None
user_unblock: Optional["BaseBoolInt"] = None
video_comment_delete: Optional["BaseBoolInt"] = None
video_comment_edit: Optional["BaseBoolInt"] = None
video_comment_new: Optional["BaseBoolInt"] = None
video_comment_restore: Optional["BaseBoolInt"] = None
video_new: Optional["BaseBoolInt"] = None
wall_post_new: Optional["BaseBoolInt"] = None
wall_reply_delete: Optional["BaseBoolInt"] = None
wall_reply_edit: Optional["BaseBoolInt"] = None
wall_reply_new: Optional["BaseBoolInt"] = None
wall_reply_restore: Optional["BaseBoolInt"] = None
wall_repost: Optional["BaseBoolInt"] = None
donut_subscription_create: Optional["BaseBoolInt"] = None
donut_subscription_prolonged: Optional["BaseBoolInt"] = None
donut_subscription_cancelled: Optional["BaseBoolInt"] = None
donut_subscription_expired: Optional["BaseBoolInt"] = None
donut_subscription_price_changed: Optional["BaseBoolInt"] = None
donut_money_withdraw: Optional["BaseBoolInt"] = None
donut_money_withdraw_error: Optional["BaseBoolInt"] = None
class GroupsLongPollServer(PydanticModel):
key: Optional[str] = None
server: Optional[str] = None
ts: Optional[str] = None
class GroupsLongPollSettings(PydanticModel):
api_version: Optional[str] = None
events: Optional["GroupsLongPollEvents"] = None
is_enabled: Optional[bool] = None
class GroupsMarketInfo(PydanticModel):
contact_id: Optional[int] = None
currency: Optional["MarketCurrency"] = None
currency_text: Optional[str] = None
enabled: Optional["BaseBoolInt"] = None
main_album_id: Optional[int] = None
price_max: Optional[str] = None
price_min: Optional[str] = None
class GroupsMarketState(enum.Enum):
NONE = "none"
BASIC = "basic"
ADVANCED = "advanced"
class GroupsMemberRole(PydanticModel):
id: Optional[int] = None
permissions: Optional[List["GroupsMemberRolePermission"]] = None
role: Optional["GroupsMemberRoleStatus"] = None
class GroupsMemberRolePermission(enum.Enum):
ADS = "ads"
class GroupsMemberRoleStatus(enum.Enum):
MODERATOR = "moderator"
EDITOR = "editor"
ADMINISTRATOR = "administrator"
CREATOR = "creator"
class GroupsMemberStatus(PydanticModel):
member: Optional["BaseBoolInt"] = None
user_id: Optional[int] = None
class GroupsMemberStatusFull(PydanticModel):
can_invite: Optional["BaseBoolInt"] = None
can_recall: Optional["BaseBoolInt"] = None
invitation: Optional["BaseBoolInt"] = None
member: Optional["BaseBoolInt"] = None
request: Optional["BaseBoolInt"] = None
user_id: Optional[int] = None
class GroupsOnlineStatus(PydanticModel):
minutes: Optional[int] = None
status: Optional["GroupsOnlineStatusType"] = None
class GroupsOnlineStatusType(enum.Enum):
NONE = "none"
ONLINE = "online"
ANSWER_MARK = "answer_mark"
class GroupsOwnerXtrBanInfoType(enum.Enum):
GROUP = "group"
PROFILE = "profile"
class GroupsProfileItem(PydanticModel):
id: Optional[int] = None
photo_50: Optional[str] = None
photo_100: Optional[str] = None
first_name: Optional[str] = None
class GroupsRoleOptions(enum.Enum):
MODERATOR = "moderator"
EDITOR = "editor"
ADMINISTRATOR = "administrator"
CREATOR = "creator"
GroupsSectionsListItem = List[Union[int, str]]
class GroupsSettingsTwitter(PydanticModel):
status: Optional[str] = None
name: Optional[str] = None
class GroupsSubjectItem(PydanticModel):
id: Optional[int] = None
name: Optional[str] = None
class GroupsTokenPermissionSetting(PydanticModel):
name: Optional[str] = None
setting: Optional[int] = None
class GroupsUserXtrRole(UsersUserFull):
role: Optional["GroupsRoleOptions"] = None
class LikesType(enum.Enum):
POST = "post"
COMMENT = "comment"
PHOTO = "photo"
AUDIO = "audio"
VIDEO = "video"
NOTE = "note"
MARKET = "market"
PHOTO_COMMENT = "photo_comment"
VIDEO_COMMENT = "video_comment"
TOPIC_COMMENT = "topic_comment"
MARKET_COMMENT = "market_comment"
SITEPAGE = "sitepage"
class LinkTargetObject(PydanticModel):
type: Optional[str] = None
owner_id: Optional[int] = None
item_id: Optional[int] = None
class MarketCurrency(PydanticModel):
id: Optional[int] = None
name: Optional[str] = None
class MarketMarketAlbum(PydanticModel):
count: Optional[int] = None
id: Optional[int] = None
owner_id: Optional[int] = None
photo: Optional["PhotosPhoto"] = None
title: Optional[str] = None
updated_time: Optional[int] = None
class MarketMarketCategoryOld(PydanticModel):
id: Optional[int] = None
name: Optional[str] = None
section: Optional["MarketSection"] = None
MarketMarketCategory = MarketMarketCategoryOld
class MarketMarketCategoryNested(PydanticModel):
id: Optional[int] = None
name: Optional[str] = None
parent: Optional["MarketMarketCategoryNested"] = None
class MarketMarketCategoryTree(PydanticModel):
id: Optional[int] = None
name: Optional[str] = None
children: Optional[List["MarketMarketCategoryTree"]] = None
class MarketMarketItemAvailability(enum.IntEnum):
AVAILABLE = 0
REMOVED = 1
UNAVAILABLE = 2
class MarketMarketItem(PydanticModel):
access_key: Optional[str] = None
availability: Optional["MarketMarketItemAvailability"] = None
button_title: Optional[str] = None
category: Optional["MarketMarketCategory"] = None
date: Optional[int] = None
description: Optional[str] = None
external_id: Optional[str] = None
id: Optional[int] = None
is_favorite: Optional[bool] = None
owner_id: Optional[int] = None
price: Optional["MarketPrice"] = None
thumb_photo: Optional[str] = None
title: Optional[str] = None
url: Optional[str] = None
variants_grouping_id: Optional[int] = None
is_main_variant: Optional[bool] = None
class MarketMarketItemFull(MarketMarketItem):
albums_ids: Optional[List[int]] = None
photos: Optional[List["PhotosPhoto"]] = None
can_comment: Optional["BaseBoolInt"] = None
can_repost: Optional["BaseBoolInt"] = None
likes: Optional["BaseLikes"] = None
reposts: Optional["BaseRepostsInfo"] = None
views_count: Optional[int] = None
wishlist_item_id: Optional[int] = None
cancel_info: Optional["BaseLink"] = None
user_agreement_info: Optional[str] = None
class MarketOrder(PydanticModel):
id: Optional[int] = None
group_id: Optional[int] = None
user_id: Optional[int] = None
display_order_id: Optional[str] = None
date: Optional[int] = None
status: Optional[int] = None
items_count: Optional[int] = None
track_number: Optional[str] = None
track_link: Optional[str] = None
comment: Optional[str] = None
address: Optional[str] = None
merchant_comment: Optional[str] = None
weight: Optional[int] = None
total_price: Optional["MarketPrice"] = None
preview_order_items: Optional[List["MarketOrderItem"]] = None
cancel_info: Optional["BaseLink"] = None
class MarketOrderItem(PydanticModel):
owner_id: Optional[int] = None
item_id: Optional[int] = None
price: Optional["MarketPrice"] = None
quantity: Optional[int] = None
item: Optional["MarketMarketItem"] = None
title: Optional[str] = None
photo: Optional["PhotosPhoto"] = None
variants: Optional[List[str]] = None
class MarketPrice(PydanticModel):
amount: Optional[str] = None
currency: Optional["MarketCurrency"] = None
discount_rate: Optional[int] = None
old_amount: Optional[str] = None
text: Optional[str] = None
old_amount_text: Optional[str] = None
class MarketSection(PydanticModel):
id: Optional[int] = None
name: Optional[str] = None
class MediaRestriction(PydanticModel):
text: Optional[str] = None
title: Optional[str] = None
button: Optional["VideoRestrictionButton"] = None
always_shown: Optional["BaseBoolInt"] = None
blur: Optional["BaseBoolInt"] = None
can_play: Optional["BaseBoolInt"] = None
can_preview: Optional["BaseBoolInt"] = None
card_icon: Optional[List["BaseImage"]] = None
list_icon: Optional[List["BaseImage"]] = None
class MessagesAudioMessage(PydanticModel):
access_key: Optional[str] = None
transcript_error: Optional[int] = None
transcript_rate_enabled: Optional[bool] = None
transcript_update_time: Optional[int] = None
duration: Optional[int] = None
id: Optional[int] = None
link_mp3: Optional[str] = None
link_ogg: Optional[str] = None
owner_id: Optional[int] = None
waveform: Optional[List[int]] = None
class MessagesChat(PydanticModel):
admin_id: Optional[int] = None
id: Optional[int] = None
kicked: Optional["BaseBoolInt"] = None
left: Optional["BaseBoolInt"] = None
photo_100: Optional[str] = None
photo_200: Optional[str] = None
photo_50: Optional[str] = None
push_settings: Optional["MessagesChatPushSettings"] = None
title: Optional[str] = None
type: Optional[str] = None
users: Optional[List[int]] = None
is_default_photo: Optional[bool] = None
class MessagesChatFull(PydanticModel):
admin_id: Optional[int] = None
id: Optional[int] = None
kicked: Optional["BaseBoolInt"] = None
left: Optional["BaseBoolInt"] = None
photo_100: Optional[str] = None
photo_200: Optional[str] = None
photo_50: Optional[str] = None
push_settings: Optional["MessagesChatPushSettings"] = None
title: Optional[str] = None
type: Optional[str] = None
users: Optional[List["MessagesUserXtrInvitedBy"]] = None
class MessagesChatPreview(PydanticModel):
admin_id: Optional[int] = None
joined: Optional[bool] = None
local_id: Optional[int] = None
members: Optional[List[int]] = None
members_count: Optional[int] = None
title: Optional[str] = None
is_member: Optional[bool] = None
class MessagesChatPushSettings(PydanticModel):
disabled_until: Optional[int] = None
sound: Optional["BaseBoolInt"] = None
class MessagesChatRestrictions(PydanticModel):
admins_promote_users: Optional[bool] = None
only_admins_edit_info: Optional[bool] = None
only_admins_edit_pin: Optional[bool] = None
only_admins_invite: Optional[bool] = None
only_admins_kick: Optional[bool] = None
class MessagesChatSettings(PydanticModel):
members_count: Optional[int] = None
friends_count: Optional[int] = None
owner_id: Optional[int] = None
title: Optional[str] = None
pinned_message: Optional["MessagesPinnedMessage"] = None
state: Optional["MessagesChatSettingsState"] = None
photo: Optional["MessagesChatSettingsPhoto"] = None
admin_ids: Optional[List[int]] = None
active_ids: Optional[List[int]] = None
is_group_channel: Optional[bool] = None
acl: Optional["MessagesChatSettingsAcl"] = None
permissions: Optional["MessagesChatSettingsPermissions"] = None
is_disappearing: Optional[bool] = None
theme: Optional[str] = None
disappearing_chat_link: Optional[str] = None
is_service: Optional[bool] = None
class MessagesChatSettingsAcl(PydanticModel):
can_change_info: Optional[bool] = None
can_change_invite_link: Optional[bool] = None
can_change_pin: Optional[bool] = None
can_invite: Optional[bool] = None
can_promote_users: Optional[bool] = None
can_see_invite_link: Optional[bool] = None
can_moderate: Optional[bool] = None
can_copy_chat: Optional[bool] = None
can_call: Optional[bool] = None
can_use_mass_mentions: Optional[bool] = None
can_change_service_type: Optional[bool] = None
class MessagesChatSettingsPermissions(PydanticModel):
invite: Optional[str] = None
change_info: Optional[str] = None
change_pin: Optional[str] = None
use_mass_mentions: Optional[str] = None
see_invite_link: Optional[str] = None
call: Optional[str] = None
change_admins: Optional[str] = None
class MessagesChatSettingsPhoto(PydanticModel):
photo_50: Optional[str] = None
photo_100: Optional[str] = None
photo_200: Optional[str] = None
is_default_photo: Optional[bool] = None
class MessagesChatSettingsState(enum.Enum):
IN = "in"
KICKED = "kicked"
LEFT = "left"
class MessagesConversation(PydanticModel):
peer: Optional["MessagesConversationPeer"] = None
sort_id: Optional["MessagesConversationSortId"] = None
last_message_id: Optional[int] = None
in_read: Optional[int] = None
out_read: Optional[int] = None
unread_count: Optional[int] = None
is_marked_unread: Optional[bool] = None
out_read_by: Optional["MessagesOutReadBy"] = None
important: Optional[bool] = None
unanswered: Optional[bool] = None
special_service_type: Optional[str] = None
message_request_data: Optional["MessagesMessageRequestData"] = None
mentions: Optional[List[int]] = None
current_keyboard: Optional["MessagesKeyboard"] = None
push_settings: Optional["MessagesPushSettings"] = None
can_write: Optional["MessagesConversationCanWrite"] = None
chat_settings: Optional["MessagesChatSettings"] = None
class MessagesConversationCanWrite(PydanticModel):
allowed: Optional[bool] = None
reason: Optional[int] = None
class MessagesConversationMember(PydanticModel):
can_kick: Optional[bool] = None
invited_by: Optional[int] = None
is_admin: Optional[bool] = None
is_owner: Optional[bool] = None
is_message_request: Optional[bool] = None
join_date: Optional[int] = None
request_date: Optional[int] = None
member_id: Optional[int] = None
class MessagesConversationPeer(PydanticModel):
id: Optional[int] = None
local_id: Optional[int] = None
type: Optional["MessagesConversationPeerType"] = None
class MessagesConversationPeerType(enum.Enum):
CHAT = "chat"
EMAIL = "email"
USER = "user"
GROUP = "group"
class MessagesConversationSortId(PydanticModel):
major_id: Optional[int] = None
minor_id: Optional[int] = None
class MessagesConversationWithMessage(PydanticModel):
conversation: Optional["MessagesConversation"] = None
last_message: Optional["MessagesMessage"] = None
class MessagesForeignMessage(PydanticModel):
attachments: Optional[List["MessagesMessageAttachment"]] = None
conversation_message_id: Optional[int] = None
date: Optional[int] = None
from_id: Optional[int] = None
fwd_messages: Optional[List["MessagesForeignMessage"]] = None
geo: Optional["BaseGeo"] = None
id: Optional[int] = None
peer_id: Optional[int] = None
reply_message: Optional["MessagesForeignMessage"] = None
text: Optional[str] = None
update_time: Optional[int] = None
was_listened: Optional[bool] = None
payload: Optional[str] = None
class MessagesForward(PydanticModel):
owner_id: Optional[int] = None
peer_id: Optional[int] = None
conversation_message_ids: Optional[List[int]] = None
message_ids: Optional[List[int]] = None
is_reply: Optional[bool] = None
class MessagesGraffiti(PydanticModel):
access_key: Optional[str] = None
height: Optional[int] = None
id: Optional[int] = None
owner_id: Optional[int] = None
url: Optional[str] = None
width: Optional[int] = None
class MessagesHistoryAttachment(PydanticModel):
attachment: Optional["MessagesHistoryMessageAttachment"] = None
message_id: Optional[int] = None
from_id: Optional[int] = None
forward_level: Optional[int] = None
class MessagesHistoryMessageAttachment(PydanticModel):
audio: Optional["AudioAudio"] = None
audio_message: Optional["MessagesAudioMessage"] = None
doc: Optional["DocsDoc"] = None
graffiti: Optional["MessagesGraffiti"] = None
link: Optional["BaseLink"] = None
market: Optional["BaseLink"] = None
photo: Optional["PhotosPhoto"] = None
share: Optional["BaseLink"] = None
type: Optional["MessagesHistoryMessageAttachmentType"] = None
video: Optional["VideoVideo"] = None
wall: Optional["BaseLink"] = None
class MessagesHistoryMessageAttachmentType(enum.Enum):
PHOTO = "photo"
VIDEO = "video"
AUDIO = "audio"
DOC = "doc"
LINK = "link"
MARKET = "market"
WALL = "wall"
SHARE = "share"
GRAFFITI = "graffiti"
AUDIO_MESSAGE = "audio_message"
class MessagesKeyboard(PydanticModel):
author_id: Optional[int] = None
buttons: Optional[List[List["MessagesKeyboardButton"]]] = None
one_time: Optional[bool] = None
inline: Optional[bool] = None
class MessagesKeyboardButton(PydanticModel):
action: Optional["MessagesKeyboardButtonAction"] = None
color: Optional[str] = None
class MessagesKeyboardButtonAction(PydanticModel):
app_id: Optional[int] = None
hash: Optional[str] = None
label: Optional[str] = None
link: Optional[str] = None
owner_id: Optional[int] = None
payload: Optional[str] = None
type: Optional["MessagesTemplateActionTypeNames"] = None
class MessagesLastActivity(PydanticModel):
online: Optional["BaseBoolInt"] = None
time: Optional[int] = None
class MessagesLongpollMessages(PydanticModel):
count: Optional[int] = None
items: Optional[List["MessagesMessage"]] = None
class MessagesLongpollParams(PydanticModel):
server: Optional[str] = None
key: Optional[str] = None
ts: Optional[int] = None
pts: Optional[int] = None
class MessagesMessage(PydanticModel):
action: Optional["MessagesMessageAction"] = None
admin_author_id: Optional[int] = None
attachments: Optional[List["MessagesMessageAttachment"]] = None
conversation_message_id: Optional[int] = None
date: Optional[int] = None
deleted: Optional["BaseBoolInt"] = None
from_id: Optional[int] = None
fwd_messages: Optional[List["MessagesForeignMessage"]] = None
geo: Optional["BaseGeo"] = None
id: Optional[int] = None
important: Optional[bool] = None
is_hidden: Optional[bool] = None
is_cropped: Optional[bool] = None
keyboard: Optional["MessagesKeyboard"] = None
members_count: Optional[int] = None
out: Optional["BaseBoolInt"] = None
payload: Optional[str] = None
peer_id: Optional[int] = None
random_id: Optional[int] = None
ref: Optional[str] = None
ref_source: Optional[str] = None
reply_message: Optional["MessagesForeignMessage"] = None
text: Optional[str] = None
update_time: Optional[int] = None
was_listened: Optional[bool] = None
pinned_at: Optional[int] = None
class MessagesMessageAction(PydanticModel):
conversation_message_id: Optional[int] = None
email: Optional[str] = None
member_id: Optional[int] = None
message: Optional[str] = None
photo: Optional["MessagesMessageActionPhoto"] = None
text: Optional[str] = None
type: Optional["MessagesMessageActionStatus"] = None
class MessagesMessageActionPhoto(PydanticModel):
photo_100: Optional[str] = None
photo_200: Optional[str] = None
photo_50: Optional[str] = None
class MessagesMessageActionStatus(enum.Enum):
CHAT_PHOTO_UPDATE = "chat_photo_update"
CHAT_PHOTO_REMOVE = "chat_photo_remove"
CHAT_CREATE = "chat_create"
CHAT_TITLE_UPDATE = "chat_title_update"
CHAT_INVITE_USER = "chat_invite_user"
CHAT_KICK_USER = "chat_kick_user"
CHAT_PIN_MESSAGE = "chat_pin_message"
CHAT_UNPIN_MESSAGE = "chat_unpin_message"
CHAT_INVITE_USER_BY_LINK = "chat_invite_user_by_link"
class MessagesMessageAttachment(PydanticModel):
audio: Optional["AudioAudio"] = None
audio_message: Optional["MessagesAudioMessage"] = None
call: Optional["CallsCall"] = None
doc: Optional["DocsDoc"] = None
gift: Optional["GiftsLayout"] = None
graffiti: Optional["MessagesGraffiti"] = None
link: Optional["BaseLink"] = None
market: Optional["MarketMarketItem"] = None
market_market_album: Optional["MarketMarketAlbum"] = None
photo: Optional["PhotosPhoto"] = None
sticker: Optional["BaseSticker"] = None
story: Optional["StoriesStory"] = None
type: Optional["MessagesMessageAttachmentType"] = None
video: Optional["VideoVideo"] = None
wall: Optional["WallWallpostFull"] = None
wall_reply: Optional["WallWallComment"] = None
poll: Optional["PollsPoll"] = None
class MessagesMessageAttachmentType(enum.Enum):
PHOTO = "photo"
AUDIO = "audio"
VIDEO = "video"
DOC = "doc"
LINK = "link"
MARKET = "market"
MARKET_ALBUM = "market_album"
GIFT = "gift"
STICKER = "sticker"
WALL = "wall"
WALL_REPLY = "wall_reply"
ARTICLE = "article"
POLL = "poll"
CALL = "call"
GRAFFITI = "graffiti"
AUDIO_MESSAGE = "audio_message"
class MessagesMessageRequestData(PydanticModel):
status: Optional[str] = None
inviter_id: Optional[int] = None
request_date: Optional[int] = None
class MessagesMessagesArray(PydanticModel):
count: Optional[int] = None
items: Optional[List["MessagesMessage"]] = None
class MessagesOutReadBy(PydanticModel):
count: Optional[int] = None
member_ids: Optional[List[int]] = None
class MessagesPinnedMessage(PydanticModel):
attachments: Optional[List["MessagesMessageAttachment"]] = None
conversation_message_id: Optional[int] = None
date: Optional[int] = None
from_id: Optional[int] = None
fwd_messages: Optional[List["MessagesForeignMessage"]] = None
geo: Optional["BaseGeo"] = None
id: Optional[int] = None
peer_id: Optional[int] = None
reply_message: Optional["MessagesForeignMessage"] = None
text: Optional[str] = None
keyboard: Optional["MessagesKeyboard"] = None
class MessagesPushSettings(PydanticModel):
disabled_forever: Optional[bool] = None
disabled_until: Optional[int] = None
no_sound: Optional[bool] = None
disabled_mentions: Optional[bool] = None
disabled_mass_mentions: Optional[bool] = None
class MessagesTemplateActionTypeNames(enum.Enum):
TEXT = "text"
START = "start"
LOCATION = "location"
VKPAY = "vkpay"
OPEN_APP = "open_app"
OPEN_PHOTO = "open_photo"
OPEN_LINK = "open_link"
CALLBACK = "callback"
class UsersUserXtrType(UsersUser):
type: Optional["UsersUserType"] = None
class MessagesUserXtrInvitedBy(UsersUserXtrType):
invited_by: Optional[int] = None
class NewsfeedCommentsFilters(enum.Enum):
POST = "post"
PHOTO = "photo"
VIDEO = "video"
TOPIC = "topic"
NOTE = "note"
class NewsfeedEventActivity(PydanticModel):
address: Optional[str] = None
button_text: Optional[str] = None
friends: Optional[List[int]] = None
member_status: Optional["GroupsGroupFullMemberStatus"] = None
text: Optional[str] = None
time: Optional[int] = None
class NewsfeedFilters(enum.Enum):
POST = "post"
PHOTO = "photo"
PHOTO_TAG = "photo_tag"
WALL_PHOTO = "wall_photo"
FRIEND = "friend"
NOTE = "note"
AUDIO = "audio"
VIDEO = "video"
AUDIO_PLAYLIST = "audio_playlist"
CLIP = "clip"
class NewsfeedIgnoreItemType(enum.Enum):
POST_ON_THE_WALL = "wall"
TAG_ON_A_PHOTO = "tag"
PROFILE_PHOTO = "profilephoto"
VIDEO = "video"
PHOTO = "photo"
AUDIO = "audio"
class NewsfeedItemBase(PydanticModel):
type: Optional["NewsfeedNewsfeedItemType"] = None
source_id: Optional[int] = None
date: Optional[int] = None
class NewsfeedItemAudio(NewsfeedItemBase):
audio: Optional["NewsfeedItemAudioAudio"] = None
post_id: Optional[int] = None
class NewsfeedItemAudioAudio(PydanticModel):
count: Optional[int] = None
items: Optional[List["AudioAudio"]] = None
class NewsfeedItemDigest(NewsfeedItemBase):
feed_id: Optional[str] = None
items: Optional[List["NewsfeedItemDigestItem"]] = None
main_post_ids: Optional[List[str]] = None
template: Optional[str] = None
header: Optional["NewsfeedItemDigestHeader"] = None
footer: Optional["NewsfeedItemDigestFooter"] = None
track_code: Optional[str] = None
class NewsfeedItemDigestButton(PydanticModel):
title: Optional[str] = None
style: Optional[str] = None
class NewsfeedItemDigestFooter(PydanticModel):
style: Optional[str] = None
text: Optional[str] = None
button: Optional["NewsfeedItemDigestButton"] = None
class NewsfeedItemDigestFullItem(PydanticModel):
text: Optional[str] = None
source_name: Optional[str] = None
attachment_index: Optional[int] = None
attachment: Optional["WallWallpostAttachment"] = None
style: Optional[str] = None
post: Optional["WallWallpost"] = None
class NewsfeedItemDigestHeader(PydanticModel):
title: Optional[str] = None
subtitle: Optional[str] = None
style: Optional[str] = None
button: Optional["NewsfeedItemDigestButton"] = None
class WallWallpost(PydanticModel):
access_key: Optional[str] = None
attachments: Optional[List["WallWallpostAttachment"]] = None
copyright: Optional["WallPostCopyright"] = None
date: Optional[int] = None
edited: Optional[int] = None
from_id: Optional[int] = None
geo: Optional["WallGeo"] = None
id: Optional[int] = None
is_archived: Optional[bool] = None
is_favorite: Optional[bool] = None
likes: Optional["BaseLikesInfo"] = None
owner_id: Optional[int] = None
poster: Optional[dict] = None
post_id: Optional[int] = None
parents_stack: Optional[List[int]] = None
post_source: Optional["WallPostSource"] = None
post_type: Optional["WallPostType"] = None
reposts: Optional["BaseRepostsInfo"] = None
signer_id: Optional[int] = None
text: Optional[str] = None
views: Optional["WallViews"] = None
NewsfeedItemDigestItem = WallWallpost
class NewsfeedItemFriend(NewsfeedItemBase):
friends: Optional["NewsfeedItemFriendFriends"] = None
class NewsfeedItemFriendFriends(PydanticModel):
count: Optional[int] = None
items: Optional[List["BaseUserId"]] = None
class NewsfeedItemHolidayRecommendationsBlockHeader(PydanticModel):
title: Optional[str] = None
subtitle: Optional[str] = None
image: Optional[List["BaseImage"]] = None
action: Optional["BaseLinkButtonAction"] = None
class WallCarouselBase(PydanticModel):
carousel_offset: Optional[int] = None
class NewsfeedItemPhoto(WallCarouselBase, NewsfeedItemBase):
photos: Optional["NewsfeedItemPhotoPhotos"] = None
post_id: Optional[int] = None
class NewsfeedItemPhotoPhotos(PydanticModel):
count: Optional[int] = None
items: Optional[List["NewsfeedNewsfeedPhoto"]] = None
class NewsfeedItemPhotoTag(WallCarouselBase, NewsfeedItemBase):
photo_tags: Optional["NewsfeedItemPhotoTagPhotoTags"] = None
post_id: Optional[int] = None
class NewsfeedItemPhotoTagPhotoTags(PydanticModel):
count: Optional[int] = None
items: Optional[List["NewsfeedNewsfeedPhoto"]] = None
class NewsfeedItemPromoButton(NewsfeedItemBase):
text: Optional[str] = None
title: Optional[str] = None
action: Optional["NewsfeedItemPromoButtonAction"] = None
images: Optional[List["NewsfeedItemPromoButtonImage"]] = None
track_code: Optional[str] = None
class NewsfeedItemPromoButtonAction(PydanticModel):
url: Optional[str] = None
type: Optional[str] = None
target: Optional[str] = None
class NewsfeedItemPromoButtonImage(PydanticModel):
width: Optional[int] = None
height: Optional[int] = None
url: Optional[str] = None
class NewsfeedItemTopic(NewsfeedItemBase):
comments: Optional["BaseCommentsInfo"] = None
likes: Optional["BaseLikesInfo"] = None
post_id: Optional[int] = None
text: Optional[str] = None
class NewsfeedItemVideo(WallCarouselBase, NewsfeedItemBase):
video: Optional["NewsfeedItemVideoVideo"] = None
class NewsfeedItemVideoVideo(PydanticModel):
count: Optional[int] = None
items: Optional[List["VideoVideo"]] = None
class NewsfeedItemWallpost(WallCarouselBase, NewsfeedItemBase):
activity: Optional["NewsfeedEventActivity"] = None
attachments: Optional[List["WallWallpostAttachment"]] = None
comments: Optional["BaseCommentsInfo"] = None
copy_history: Optional[List["WallWallpost"]] = None
feedback: Optional["NewsfeedItemWallpostFeedback"] = None
geo: Optional["BaseGeo"] = None
is_favorite: Optional[bool] = None
likes: Optional["BaseLikesInfo"] = None
marked_as_ads: Optional["BaseBoolInt"] = None
post_id: Optional[int] = None
post_source: Optional["WallPostSource"] = None
post_type: Optional["NewsfeedItemWallpostType"] = None
reposts: Optional["BaseRepostsInfo"] = None
signer_id: Optional[int] = None
text: Optional[str] = None
views: Optional["WallViews"] = None
short_text_rate: Optional[int] = None
class NewsfeedItemWallpostFeedback(PydanticModel):
type: Optional["NewsfeedItemWallpostFeedbackType"] = None
question: Optional[str] = None
answers: Optional[List["NewsfeedItemWallpostFeedbackAnswer"]] = None
stars_count: Optional[int] = None
gratitude: Optional[str] = None
class NewsfeedItemWallpostFeedbackAnswer(PydanticModel):
title: Optional[str] = None
id: Optional[str] = None
class NewsfeedItemWallpostFeedbackType(enum.Enum):
BUTTONS = "buttons"
STARS = "stars"
class NewsfeedItemWallpostType(enum.Enum):
POST = "post"
COPY = "copy"
REPLY = "reply"
class NewsfeedList(PydanticModel):
id: Optional[int] = None
title: Optional[str] = None
class NewsfeedListFull(NewsfeedList):
no_reposts: Optional["BaseBoolInt"] = None
source_ids: Optional[List[int]] = None
class NewsfeedNewsfeedItem(PydanticModel):
pass
class NewsfeedNewsfeedItemType(enum.Enum):
POST = "post"
PHOTO = "photo"
PHOTO_TAG = "photo_tag"
WALL_PHOTO = "wall_photo"
FRIEND = "friend"
AUDIO = "audio"
VIDEO = "video"
TOPIC = "topic"
DIGEST = "digest"
STORIES = "stories"
class PhotosPhoto(PydanticModel):
access_key: Optional[str] = None
album_id: Optional[int] = None
date: Optional[int] = None
height: Optional[int] = None
id: Optional[int] = None
images: Optional[List["PhotosImage"]] = None
lat: Optional[int] = None
long: Optional[int] = None
owner_id: Optional[int] = None
photo_256: Optional[str] = None
can_comment: Optional["BaseBoolInt"] = None
place: Optional[str] = None
post_id: Optional[int] = None
sizes: Optional[List["PhotosPhotoSizes"]] = None
text: Optional[str] = None
user_id: Optional[int] = None
width: Optional[int] = None
has_tags: Optional[bool] = None
restrictions: Optional["MediaRestriction"] = None
class NewsfeedNewsfeedPhoto(PhotosPhoto):
likes: Optional["BaseLikes"] = None
comments: Optional["BaseObjectCount"] = None
can_repost: Optional["BaseBoolInt"] = None
class NotesNote(PydanticModel):
read_comments: Optional[int] = None
can_comment: Optional["BaseBoolInt"] = None
comments: Optional[int] = None
date: Optional[int] = None
id: Optional[int] = None
owner_id: Optional[int] = None
text: Optional[str] = None
text_wiki: Optional[str] = None
title: Optional[str] = None
view_url: Optional[str] = None
class NotesNoteComment(PydanticModel):
date: Optional[int] = None
id: Optional[int] = None
message: Optional[str] = None
nid: Optional[int] = None
oid: Optional[int] = None
reply_to: Optional[int] = None
uid: Optional[int] = None
class NotificationsFeedback(PydanticModel):
attachments: Optional[List["WallWallpostAttachment"]] = None
from_id: Optional[int] = None
geo: Optional["BaseGeo"] = None
id: Optional[int] = None
likes: Optional["BaseLikesInfo"] = None
text: Optional[str] = None
to_id: Optional[int] = None
class NotificationsNotification(PydanticModel):
date: Optional[int] = None
feedback: Optional["NotificationsFeedback"] = None
parent: Optional["NotificationsNotificationParent"] = None
reply: Optional["NotificationsReply"] = None
type: Optional[str] = None
NotificationsNotificationItem = NotificationsNotification
class WallWallpostToId(PydanticModel):
attachments: Optional[List["WallWallpostAttachment"]] = None
comments: Optional["BaseCommentsInfo"] = None
copy_owner_id: Optional[int] = None
copy_post_id: Optional[int] = None
date: Optional[int] = None
from_id: Optional[int] = None
geo: Optional["WallGeo"] = None
id: Optional[int] = None
is_favorite: Optional[bool] = None
likes: Optional["BaseLikesInfo"] = None
post_id: Optional[int] = None
post_source: Optional["WallPostSource"] = None
post_type: Optional["WallPostType"] = None
reposts: Optional["BaseRepostsInfo"] = None
signer_id: Optional[int] = None
text: Optional[str] = None
to_id: Optional[int] = None
class BoardTopic(PydanticModel):
comments: Optional[int] = None
created: Optional[int] = None
created_by: Optional[int] = None
id: Optional[int] = None
is_closed: Optional["BaseBoolInt"] = None
is_fixed: Optional["BaseBoolInt"] = None
title: Optional[str] = None
updated: Optional[int] = None
updated_by: Optional[int] = None
class VideoVideo(PydanticModel):
access_key: Optional[str] = None
adding_date: Optional[int] = None
can_comment: Optional["BaseBoolInt"] = None
can_edit: Optional["BaseBoolInt"] = None
can_like: Optional["BaseBoolInt"] = None
can_repost: Optional["BaseBoolInt"] = None
can_subscribe: Optional["BaseBoolInt"] = None
can_add_to_faves: Optional["BaseBoolInt"] = None
can_add: Optional["BaseBoolInt"] = None
can_attach_link: Optional["BaseBoolInt"] = None
is_private: Optional["BaseBoolInt"] = None
comments: Optional[int] = None
date: Optional[int] = None
description: Optional[str] = None
duration: Optional[int] = None
image: Optional[List["VideoVideoImage"]] = None
first_frame: Optional[List["VideoVideoImage"]] = None
width: Optional[int] = None
height: Optional[int] = None
id: Optional[int] = None
owner_id: Optional[int] = None
user_id: Optional[int] = None
title: Optional[str] = None
is_favorite: Optional[bool] = None
player: Optional[str] = None
processing: Optional["BasePropertyExists"] = None
converting: Optional["BaseBoolInt"] = None
restriction: Optional["MediaRestriction"] = None
added: Optional["BaseBoolInt"] = None
is_subscribed: Optional["BaseBoolInt"] = None
track_code: Optional[str] = None
repeat: Optional["BasePropertyExists"] = None
type: Optional[str] = None
views: Optional[int] = None
local_views: Optional[int] = None
content_restricted: Optional[int] = None
content_restricted_message: Optional[str] = None
balance: Optional[int] = None
live_status: Optional[str] = None
live: Optional["BasePropertyExists"] = None
upcoming: Optional["BasePropertyExists"] = None
live_start_time: Optional[int] = None
live_notify: Optional["BaseBoolInt"] = None
spectators: Optional[int] = None
platform: Optional[str] = None
likes: Optional["BaseLikes"] = None
reposts: Optional["BaseRepostsInfo"] = None
class NotificationsNotificationsComment(PydanticModel):
date: Optional[int] = None
id: Optional[int] = None
owner_id: Optional[int] = None
photo: Optional["PhotosPhoto"] = None
post: Optional["WallWallpost"] = None
text: Optional[str] = None
topic: Optional["BoardTopic"] = None
video: Optional["VideoVideo"] = None
class NotificationsNotificationParent(WallWallpostToId, PhotosPhoto, BoardTopic, VideoVideo, NotificationsNotificationsComment):
pass
class NotificationsReply(PydanticModel):
date: Optional[int] = None
id: Optional[int] = None
text: Optional[int] = None
class NotificationsSendMessageError(PydanticModel):
code: Optional[int] = None
description: Optional[str] = None
class NotificationsSendMessageItem(PydanticModel):
user_id: Optional[int] = None
status: Optional[bool] = None
error: Optional["NotificationsSendMessageError"] = None
class OauthError(PydanticModel):
error: Optional[str] = None
error_description: Optional[str] = None
redirect_uri: Optional[str] = None
class OrdersAmount(PydanticModel):
amounts: Optional[List["OrdersAmountItem"]] = None
currency: Optional[str] = None
class OrdersAmountItem(PydanticModel):
amount: Optional[int] = None
description: Optional[str] = None
votes: Optional[str] = None
class OrdersOrder(PydanticModel):
amount: Optional[int] = None
app_order_id: Optional[int] = None
cancel_transaction_id: Optional[int] = None
date: Optional[int] = None
id: Optional[int] = None
item: Optional[str] = None
receiver_id: Optional[int] = None
status: Optional[str] = None
transaction_id: Optional[int] = None
user_id: Optional[int] = None
class OrdersSubscription(PydanticModel):
cancel_reason: Optional[str] = None
create_time: Optional[int] = None
id: Optional[int] = None
item_id: Optional[str] = None
next_bill_time: Optional[int] = None
pending_cancel: Optional[bool] = None
period: Optional[int] = None
period_start_time: Optional[int] = None
price: Optional[int] = None
status: Optional[str] = None
test_mode: Optional[bool] = None
trial_expire_time: Optional[int] = None
update_time: Optional[int] = None
class OwnerState(PydanticModel):
state: Optional[int] = None
description: Optional[str] = None
class PagesPrivacySettings(enum.IntEnum):
COMMUNITY_MANAGERS_ONLY = 0
COMMUNITY_MEMBERS_ONLY = 1
EVERYONE = 2
class PagesWikipage(PydanticModel):
creator_id: Optional[int] = None
creator_name: Optional[int] = None
editor_id: Optional[int] = None
editor_name: Optional[str] = None
group_id: Optional[int] = None
id: Optional[int] = None
title: Optional[str] = None
views: Optional[int] = None
who_can_edit: Optional["PagesPrivacySettings"] = None
who_can_view: Optional["PagesPrivacySettings"] = None
class PagesWikipageFull(PydanticModel):
created: Optional[int] = None
creator_id: Optional[int] = None
current_user_can_edit: Optional["BaseBoolInt"] = None
current_user_can_edit_access: Optional["BaseBoolInt"] = None
edited: Optional[int] = None
editor_id: Optional[int] = None
group_id: Optional[int] = None
html: Optional[str] = None
id: Optional[int] = None
source: Optional[str] = None
title: Optional[str] = None
view_url: Optional[str] = None
views: Optional[int] = None
who_can_edit: Optional["PagesPrivacySettings"] = None
who_can_view: Optional["PagesPrivacySettings"] = None
class PagesWikipageHistory(PydanticModel):
id: Optional[int] = None
length: Optional[int] = None
date: Optional[int] = None
editor_id: Optional[int] = None
editor_name: Optional[str] = None
class PhotosCommentXtrPid(PydanticModel):
attachments: Optional[List["WallCommentAttachment"]] = None
date: Optional[int] = None
from_id: Optional[int] = None
id: Optional[int] = None
likes: Optional["BaseLikesInfo"] = None
pid: Optional[int] = None
reply_to_comment: Optional[int] = None
reply_to_user: Optional[int] = None
text: Optional[str] = None
parents_stack: Optional[List[int]] = None
thread: Optional["CommentThread"] = None
class PhotosImage(PydanticModel):
height: Optional[int] = None
type: Optional["PhotosImageType"] = None
url: Optional[str] = None
width: Optional[int] = None
class PhotosImageType(enum.Enum):
S = "s"
M = "m"
X = "x"
L = "l"
O = "o"
P = "p"
Q = "q"
R = "r"
Y = "y"
Z = "z"
W = "w"
class PhotosPhotoAlbum(PydanticModel):
created: Optional[int] = None
description: Optional[str] = None
id: Optional[int] = None
owner_id: Optional[int] = None
size: Optional[int] = None
thumb: Optional["PhotosPhoto"] = None
title: Optional[str] = None
updated: Optional[int] = None
class PhotosPhotoAlbumFull(PydanticModel):
can_upload: Optional["BaseBoolInt"] = None
comments_disabled: Optional["BaseBoolInt"] = None
created: Optional[int] = None
description: Optional[str] = None
id: Optional[int] = None
owner_id: Optional[int] = None
size: Optional[int] = None
sizes: Optional[List["PhotosPhotoSizes"]] = None
thumb_id: Optional[int] = None
thumb_is_last: Optional["BaseBoolInt"] = None
thumb_src: Optional[str] = None
title: Optional[str] = None
updated: Optional[int] = None
upload_by_admins_only: Optional["BaseBoolInt"] = None
class PhotosPhotoFalseable(PydanticModel):
pass
class PhotosPhotoFull(PydanticModel):
access_key: Optional[str] = None
album_id: Optional[int] = None
can_comment: Optional["BaseBoolInt"] = None
date: Optional[int] = None
height: Optional[int] = None
id: Optional[int] = None
images: Optional[List["PhotosImage"]] = None
lat: Optional[int] = None
likes: Optional["BaseLikes"] = None
reposts: Optional["BaseRepostsInfo"] = None
comments: Optional["BaseObjectCount"] = None
long: Optional[int] = None
owner_id: Optional[int] = None
post_id: Optional[int] = None
tags: Optional["BaseObjectCount"] = None
text: Optional[str] = None
user_id: Optional[int] = None
width: Optional[int] = None
class PhotosPhotoFullXtrRealOffset(PydanticModel):
access_key: Optional[str] = None
album_id: Optional[int] = None
can_comment: Optional["BaseBoolInt"] = None
comments: Optional["BaseObjectCount"] = None
date: Optional[int] = None
height: Optional[int] = None
hidden: Optional["BasePropertyExists"] = None
id: Optional[int] = None
lat: Optional[int] = None
likes: Optional["BaseLikes"] = None
long: Optional[int] = None
owner_id: Optional[int] = None
photo_1280: Optional[str] = None
photo_130: Optional[str] = None
photo_2560: Optional[str] = None
photo_604: Optional[str] = None
photo_75: Optional[str] = None
photo_807: Optional[str] = None
post_id: Optional[int] = None
real_offset: Optional[int] = None
reposts: Optional["BaseObjectCount"] = None
sizes: Optional[List["PhotosPhotoSizes"]] = None
tags: Optional["BaseObjectCount"] = None
text: Optional[str] = None
user_id: Optional[int] = None
width: Optional[int] = None
class PhotosPhotoSizes(PydanticModel):
height: Optional[int] = None
url: Optional[str] = None
src: Optional[str] = None
type: Optional["PhotosPhotoSizesType"] = None
width: Optional[int] = None
class PhotosPhotoSizesType(enum.Enum):
S = "s"
M = "m"
X = "x"
O = "o"
P = "p"
Q = "q"
R = "r"
K = "k"
L = "l"
Y = "y"
Z = "z"
C = "c"
W = "w"
A = "a"
B = "b"
E = "e"
I = "i"
D = "d"
J = "j"
TEMP = "temp"
H = "h"
G = "g"
N = "n"
F = "f"
MAX = "max"
class PhotosPhotoTag(PydanticModel):
date: Optional[int] = None
id: Optional[int] = None
placer_id: Optional[int] = None
tagged_name: Optional[str] = None
description: Optional[str] = None
user_id: Optional[int] = None
viewed: Optional["BaseBoolInt"] = None
x: Optional[int] = None
x2: Optional[int] = None
y: Optional[int] = None
y2: Optional[int] = None
class PhotosPhotoUpload(PydanticModel):
album_id: Optional[int] = None
upload_url: Optional[str] = None
fallback_upload_url: Optional[str] = None
user_id: Optional[int] = None
group_id: Optional[int] = None
class PhotosPhotoXtrRealOffset(PydanticModel):
access_key: Optional[str] = None
album_id: Optional[int] = None
date: Optional[int] = None
height: Optional[int] = None
hidden: Optional["BasePropertyExists"] = None
id: Optional[int] = None
lat: Optional[int] = None
long: Optional[int] = None
owner_id: Optional[int] = None
photo_1280: Optional[str] = None
photo_130: Optional[str] = None
photo_2560: Optional[str] = None
photo_604: Optional[str] = None
photo_75: Optional[str] = None
photo_807: Optional[str] = None
post_id: Optional[int] = None
real_offset: Optional[int] = None
sizes: Optional[List["PhotosPhotoSizes"]] = None
text: Optional[str] = None
user_id: Optional[int] = None
width: Optional[int] = None
class PhotosPhotoXtrTagInfo(PydanticModel):
access_key: Optional[str] = None
album_id: Optional[int] = None
date: Optional[int] = None
height: Optional[int] = None
id: Optional[int] = None
lat: Optional[int] = None
long: Optional[int] = None
owner_id: Optional[int] = None
photo_1280: Optional[str] = None
photo_130: Optional[str] = None
photo_2560: Optional[str] = None
photo_604: Optional[str] = None
photo_75: Optional[str] = None
photo_807: Optional[str] = None
placer_id: Optional[int] = None
post_id: Optional[int] = None
sizes: Optional[List["PhotosPhotoSizes"]] = None
tag_created: Optional[int] = None
tag_id: Optional[int] = None
text: Optional[str] = None
user_id: Optional[int] = None
width: Optional[int] = None
class PhotosTagsSuggestionItem(PydanticModel):
title: Optional[str] = None
caption: Optional[str] = None
type: Optional[str] = None
buttons: Optional[List["PhotosTagsSuggestionItemButton"]] = None
photo: Optional["PhotosPhoto"] = None
tags: Optional[List["PhotosPhotoTag"]] = None
track_code: Optional[str] = None
class PhotosTagsSuggestionItemButton(PydanticModel):
title: Optional[str] = None
action: Optional[str] = None
style: Optional[str] = None
class PodcastCover(PydanticModel):
sizes: Optional[List["PhotosPhotoSizes"]] = None
class PodcastExternalData(PydanticModel):
url: Optional[str] = None
owner_url: Optional[str] = None
title: Optional[str] = None
owner_name: Optional[str] = None
cover: Optional["PodcastCover"] = None
class PollsAnswer(PydanticModel):
id: Optional[int] = None
rate: Optional[int] = None
text: Optional[str] = None
votes: Optional[int] = None
class PollsBackground(PydanticModel):
angle: Optional[int] = None
color: Optional[str] = None
height: Optional[int] = None
id: Optional[int] = None
name: Optional[str] = None
images: Optional[List["BaseImage"]] = None
points: Optional[List["BaseGradientPoint"]] = None
type: Optional[str] = None
width: Optional[int] = None
class PollsFriend(PydanticModel):
id: Optional[int] = None
class PollsPoll(PydanticModel):
anonymous: Optional["PollsPollAnonymous"] = None
friends: Optional[List["PollsFriend"]] = None
multiple: Optional[bool] = None
answer_id: Optional[int] = None
end_date: Optional[int] = None
answer_ids: Optional[List[int]] = None
closed: Optional[bool] = None
is_board: Optional[bool] = None
can_edit: Optional[bool] = None
can_vote: Optional[bool] = None
can_report: Optional[bool] = None
can_share: Optional[bool] = None
photo: Optional["PollsBackground"] = None
answers: Optional[List["PollsAnswer"]] = None
created: Optional[int] = None
id: Optional[int] = None
owner_id: Optional[int] = None
author_id: Optional[int] = None
question: Optional[str] = None
background: Optional["PollsBackground"] = None
votes: Optional[int] = None
disable_unvote: Optional[bool] = None
PollsPollAnonymous = bool
class PollsVoters(PydanticModel):
answer_id: Optional[int] = None
users: Optional["PollsVotersUsers"] = None
class PollsVotersUsers(PydanticModel):
count: Optional[int] = None
items: Optional[List[int]] = None
class PrettycardsPrettycard(PydanticModel):
button: Optional[str] = None
button_text: Optional[str] = None
card_id: Optional[str] = None
images: Optional[List["BaseImage"]] = None
link_url: Optional[str] = None
photo: Optional[str] = None
price: Optional[str] = None
price_old: Optional[str] = None
title: Optional[str] = None
class SearchHint(PydanticModel):
app: Optional["AppsApp"] = None
description: Optional[str] = None
_global: Optional["BaseBoolInt"] = None
group: Optional["GroupsGroup"] = None
profile: Optional["UsersUserMin"] = None
section: Optional["SearchHintSection"] = None
type: Optional["SearchHintType"] = None
class SearchHintSection(enum.Enum):
GROUPS = "groups"
EVENTS = "events"
PUBLICS = "publics"
CORRESPONDENTS = "correspondents"
PEOPLE = "people"
FRIENDS = "friends"
MUTUAL_FRIENDS = "mutual_friends"
class SearchHintType(enum.Enum):
GROUP = "group"
PROFILE = "profile"
VK_APP = "vk_app"
APP = "app"
HTML5_GAME = "html5_game"
class SecureLevel(PydanticModel):
level: Optional[int] = None
uid: Optional[int] = None
class SecureSmsNotification(PydanticModel):
app_id: Optional[str] = None
date: Optional[str] = None
id: Optional[str] = None
message: Optional[str] = None
user_id: Optional[str] = None
class SecureTokenChecked(PydanticModel):
date: Optional[int] = None
expire: Optional[int] = None
success: Optional[int] = None
user_id: Optional[int] = None
class SecureTransaction(PydanticModel):
date: Optional[int] = None
id: Optional[int] = None
uid_from: Optional[int] = None
uid_to: Optional[int] = None
votes: Optional[int] = None
class StatsActivity(PydanticModel):
comments: Optional[int] = None
copies: Optional[int] = None
hidden: Optional[int] = None
likes: Optional[int] = None
subscribed: Optional[int] = None
unsubscribed: Optional[int] = None
class StatsCity(PydanticModel):
count: Optional[int] = None
name: Optional[str] = None
value: Optional[int] = None
class StatsCountry(PydanticModel):
code: Optional[str] = None
count: Optional[int] = None
name: Optional[str] = None
value: Optional[int] = None
class StatsPeriod(PydanticModel):
activity: Optional["StatsActivity"] = None
period_from: Optional[int] = None
period_to: Optional[int] = None
reach: Optional["StatsReach"] = None
visitors: Optional["StatsViews"] = None
class StatsReach(PydanticModel):
age: Optional[List["StatsSexAge"]] = None
cities: Optional[List["StatsCity"]] = None
countries: Optional[List["StatsCountry"]] = None
mobile_reach: Optional[int] = None
reach: Optional[int] = None
reach_subscribers: Optional[int] = None
sex: Optional[List["StatsSexAge"]] = None
sex_age: Optional[List["StatsSexAge"]] = None
class StatsSexAge(PydanticModel):
count: Optional[int] = None
value: Optional[str] = None
reach: Optional[int] = None
reach_subscribers: Optional[int] = None
count_subscribers: Optional[int] = None
class StatsViews(PydanticModel):
age: Optional[List["StatsSexAge"]] = None
cities: Optional[List["StatsCity"]] = None
countries: Optional[List["StatsCountry"]] = None
mobile_views: Optional[int] = None
sex: Optional[List["StatsSexAge"]] = None
sex_age: Optional[List["StatsSexAge"]] = None
views: Optional[int] = None
visitors: Optional[int] = None
class StatsWallpostStat(PydanticModel):
post_id: Optional[int] = None
hide: Optional[int] = None
join_group: Optional[int] = None
links: Optional[int] = None
reach_subscribers: Optional[int] = None
reach_subscribers_count: Optional[int] = None
reach_total: Optional[int] = None
reach_total_count: Optional[int] = None
reach_viral: Optional[int] = None
reach_ads: Optional[int] = None
report: Optional[int] = None
to_group: Optional[int] = None
unsubscribe: Optional[int] = None
sex_age: Optional[List["StatsSexAge"]] = None
class StatusStatus(PydanticModel):
text: Optional[str] = None
audio: Optional["AudioAudio"] = None
class StickersImageSet(PydanticModel):
base_url: Optional[str] = None
version: Optional[int] = None
class StorageValue(PydanticModel):
key: Optional[str] = None
value: Optional[str] = None
class StoreProduct(PydanticModel):
id: Optional[int] = None
type: Optional[str] = None
is_new: Optional[bool] = None
purchased: Optional["BaseBoolInt"] = None
active: Optional["BaseBoolInt"] = None
promoted: Optional["BaseBoolInt"] = None
purchase_date: Optional[int] = None
title: Optional[str] = None
stickers: Optional["BaseStickersList"] = None
style_sticker_ids: Optional[List[int]] = None
icon: Optional["StoreProductIcon"] = None
previews: Optional[List["BaseImage"]] = None
has_animation: Optional[bool] = None
subtitle: Optional[str] = None
StoreProductIcon = List["BaseImage"]
class StoreStickersKeyword(PydanticModel):
words: Optional[List[str]] = None
user_stickers: Optional["StoreStickersKeywordStickers"] = None
promoted_stickers: Optional["StoreStickersKeywordStickers"] = None
stickers: Optional[List["StoreStickersKeywordSticker"]] = None
class StoreStickersKeywordSticker(PydanticModel):
pack_id: Optional[int] = None
sticker_id: Optional[int] = None
BaseStickersList = List["BaseSticker"]
StoreStickersKeywordStickers = BaseStickersList
class StoriesClickableArea(PydanticModel):
x: Optional[int] = None
y: Optional[int] = None
class StoriesClickableSticker(PydanticModel):
clickable_area: Optional[List["StoriesClickableArea"]] = None
id: Optional[int] = None
hashtag: Optional[str] = None
link_object: Optional["BaseLink"] = None
mention: Optional[str] = None
tooltip_text: Optional[str] = None
owner_id: Optional[int] = None
story_id: Optional[int] = None
question: Optional[str] = None
question_button: Optional[str] = None
place_id: Optional[int] = None
market_item: Optional["MarketMarketItem"] = None
audio: Optional["AudioAudio"] = None
audio_start_time: Optional[int] = None
style: Optional[str] = None
type: Optional[str] = None
subtype: Optional[str] = None
post_owner_id: Optional[int] = None
post_id: Optional[int] = None
poll: Optional["PollsPoll"] = None
color: Optional[str] = None
sticker_id: Optional[int] = None
sticker_pack_id: Optional[int] = None
app: Optional["AppsAppMin"] = None
app_context: Optional[str] = None
has_new_interactions: Optional[bool] = None
is_broadcast_notify_allowed: Optional[bool] = None
situational_theme_id: Optional[int] = None
situational_app_url: Optional[str] = None
class StoriesClickableStickers(PydanticModel):
clickable_stickers: Optional[List["StoriesClickableSticker"]] = None
original_height: Optional[int] = None
original_width: Optional[int] = None
class StoriesFeedItem(PydanticModel):
type: Optional[str] = None
id: Optional[str] = None
stories: Optional[List["StoriesStory"]] = None
grouped: Optional[List["StoriesFeedItem"]] = None
app: Optional["AppsAppMin"] = None
promo_data: Optional["StoriesPromoBlock"] = None
birthday_user_id: Optional[int] = None
class StoriesPromoBlock(PydanticModel):
name: Optional[str] = None
photo_50: Optional[str] = None
photo_100: Optional[str] = None
not_animated: Optional[bool] = None
class StoriesReplies(PydanticModel):
count: Optional[int] = None
new: Optional[int] = None
class StoriesStatLine(PydanticModel):
name: Optional[str] = None
counter: Optional[int] = None
is_unavailable: Optional[bool] = None
class StoriesStory(PydanticModel):
access_key: Optional[str] = None
can_comment: Optional["BaseBoolInt"] = None
can_reply: Optional["BaseBoolInt"] = None
can_see: Optional["BaseBoolInt"] = None
can_like: Optional[bool] = None
can_share: Optional["BaseBoolInt"] = None
can_hide: Optional["BaseBoolInt"] = None
date: Optional[int] = None
expires_at: Optional[int] = None
id: Optional[int] = None
is_deleted: Optional[bool] = None
is_expired: Optional[bool] = None
link: Optional["StoriesStoryLink"] = None
owner_id: Optional[int] = None
parent_story: Optional["StoriesStory"] = None
parent_story_access_key: Optional[str] = None
parent_story_id: Optional[int] = None
parent_story_owner_id: Optional[int] = None
photo: Optional["PhotosPhoto"] = None
replies: Optional["StoriesReplies"] = None
seen: Optional["BaseBoolInt"] = None
type: Optional["StoriesStoryType"] = None
clickable_stickers: Optional["StoriesClickableStickers"] = None
video: Optional["VideoVideo"] = None
views: Optional[int] = None
can_ask: Optional["BaseBoolInt"] = None
can_ask_anonymous: Optional["BaseBoolInt"] = None
narratives_count: Optional[int] = None
first_narrative_title: Optional[str] = None
birthday_wish_user_id: Optional[int] = None
can_use_in_narrative: Optional[bool] = None
class StoriesStoryLink(PydanticModel):
text: Optional[str] = None
url: Optional[str] = None
class StoriesStoryStats(PydanticModel):
answer: Optional["StoriesStoryStatsStat"] = None
bans: Optional["StoriesStoryStatsStat"] = None
open_link: Optional["StoriesStoryStatsStat"] = None
replies: Optional["StoriesStoryStatsStat"] = None
shares: Optional["StoriesStoryStatsStat"] = None
subscribers: Optional["StoriesStoryStatsStat"] = None
views: Optional["StoriesStoryStatsStat"] = None
likes: Optional["StoriesStoryStatsStat"] = None
class StoriesStoryStatsStat(PydanticModel):
count: Optional[int] = None
state: Optional["StoriesStoryStatsState"] = None
class StoriesStoryStatsState(enum.Enum):
ON = "on"
OFF = "off"
HIDDEN = "hidden"
class StoriesStoryType(enum.Enum):
PHOTO = "photo"
VIDEO = "video"
LIVE_ACTIVE = "live_active"
LIVE_FINISHED = "live_finished"
BIRTHDAY_INVITE = "birthday_invite"
class StoriesUploadLinkText(enum.Enum):
TO_STORE = "to_store"
VOTE = "vote"
MORE = "more"
BOOK = "book"
ORDER = "order"
ENROLL = "enroll"
FILL = "fill"
SIGNUP = "signup"
BUY = "buy"
TICKET = "ticket"
WRITE = "write"
OPEN = "open"
LEARN_MORE = "learn_more"
VIEW = "view"
GO_TO = "go_to"
CONTACT = "contact"
WATCH = "watch"
PLAY = "play"
INSTALL = "install"
READ = "read"
CALENDAR = "calendar"
class StoriesViewersItem(PydanticModel):
is_liked: Optional[bool] = None
user_id: Optional[int] = None
user: Optional["UsersUserFull"] = None
class UsersCareer(PydanticModel):
city_id: Optional[int] = None
city_name: Optional[str] = None
company: Optional[str] = None
country_id: Optional[int] = None
_from: Optional[int] = None
group_id: Optional[int] = None
id: Optional[int] = None
position: Optional[str] = None
until: Optional[int] = None
class UsersExports(PydanticModel):
facebook: Optional[int] = None
livejournal: Optional[int] = None
twitter: Optional[int] = None
class UsersFields(enum.Enum):
FIRST_NAME_NOM = "first_name_nom"
FIRST_NAME_GEN = "first_name_gen"
FIRST_NAME_DAT = "first_name_dat"
FIRST_NAME_ACC = "first_name_acc"
FIRST_NAME_INS = "first_name_ins"
FIRST_NAME_ABL = "first_name_abl"
LAST_NAME_NOM = "last_name_nom"
LAST_NAME_GEN = "last_name_gen"
LAST_NAME_DAT = "last_name_dat"
LAST_NAME_ACC = "last_name_acc"
LAST_NAME_INS = "last_name_ins"
LAST_NAME_ABL = "last_name_abl"
PHOTO_ID = "photo_id"
VERIFIED = "verified"
SEX = "sex"
BDATE = "bdate"
CITY = "city"
COUNTRY = "country"
HOME_TOWN = "home_town"
HAS_PHOTO = "has_photo"
PHOTO_50 = "photo_50"
PHOTO_100 = "photo_100"
PHOTO_200_ORIG = "photo_200_orig"
PHOTO_200 = "photo_200"
PHOTO_400 = "photo_400"
PHOTO_400_ORIG = "photo_400_orig"
PHOTO_MAX = "photo_max"
PHOTO_MAX_ORIG = "photo_max_orig"
PHOTO_MAX_SIZE = "photo_max_size"
ONLINE = "online"
LISTS = "lists"
DOMAIN = "domain"
HAS_MOBILE = "has_mobile"
CONTACTS = "contacts"
SITE = "site"
EDUCATION = "education"
UNIVERSITIES = "universities"
SCHOOLS = "schools"
STATUS = "status"
LAST_SEEN = "last_seen"
FOLLOWERS_COUNT = "followers_count"
COUNTERS = "counters"
COMMON_COUNT = "common_count"
OCCUPATION = "occupation"
NICKNAME = "nickname"
RELATIVES = "relatives"
RELATION = "relation"
PERSONAL = "personal"
CONNECTIONS = "connections"
EXPORTS = "exports"
WALL_COMMENTS = "wall_comments"
ACTIVITIES = "activities"
INTERESTS = "interests"
MUSIC = "music"
MOVIES = "movies"
TV = "tv"
BOOKS = "books"
GAMES = "games"
ABOUT = "about"
QUOTES = "quotes"
CAN_POST = "can_post"
CAN_SEE_ALL_POSTS = "can_see_all_posts"
CAN_SEE_AUDIO = "can_see_audio"
CAN_WRITE_PRIVATE_MESSAGE = "can_write_private_message"
CAN_SEND_FRIEND_REQUEST = "can_send_friend_request"
IS_FAVORITE = "is_favorite"
IS_HIDDEN_FROM_FEED = "is_hidden_from_feed"
TIMEZONE = "timezone"
SCREEN_NAME = "screen_name"
MAIDEN_NAME = "maiden_name"
CROP_PHOTO = "crop_photo"
IS_FRIEND = "is_friend"
FRIEND_STATUS = "friend_status"
CAREER = "career"
MILITARY = "military"
BLACKLISTED = "blacklisted"
BLACKLISTED_BY_ME = "blacklisted_by_me"
CAN_SUBSCRIBE_POSTS = "can_subscribe_posts"
DESCRIPTIONS = "descriptions"
TRENDING = "trending"
MUTUAL = "mutual"
FRIENDSHIP_WEEKS = "friendship_weeks"
CAN_INVITE_TO_CHATS = "can_invite_to_chats"
STORIES_ARCHIVE_COUNT = "stories_archive_count"
VIDEO_LIVE_LEVEL = "video_live_level"
VIDEO_LIVE_COUNT = "video_live_count"
CLIPS_COUNT = "clips_count"
SERVICE_DESCRIPTION = "service_description"
IS_DEAD = "is_dead"
class UsersLastSeen(PydanticModel):
platform: Optional[int] = None
time: Optional[int] = None
class UsersMilitary(PydanticModel):
country_id: Optional[int] = None
_from: Optional[int] = None
id: Optional[int] = None
unit: Optional[str] = None
unit_id: Optional[int] = None
until: Optional[int] = None
class UsersOccupation(PydanticModel):
id: Optional[int] = None
name: Optional[str] = None
type: Optional[str] = None
class UsersOnlineInfo(PydanticModel):
visible: Optional[bool] = None
last_seen: Optional[int] = None
is_online: Optional[bool] = None
app_id: Optional[int] = None
is_mobile: Optional[bool] = None
status: Optional[str] = None
class UsersPersonal(PydanticModel):
alcohol: Optional[int] = None
inspired_by: Optional[str] = None
langs: Optional[List[str]] = None
life_main: Optional[int] = None
people_main: Optional[int] = None
political: Optional[int] = None
religion: Optional[str] = None
religion_id: Optional[int] = None
smoking: Optional[int] = None
class UsersRelative(PydanticModel):
birth_date: Optional[str] = None
id: Optional[int] = None
name: Optional[str] = None
type: Optional[str] = None
class UsersSchool(PydanticModel):
city: Optional[int] = None
_class: Optional[str] = None
country: Optional[int] = None
id: Optional[str] = None
name: Optional[str] = None
type: Optional[int] = None
type_str: Optional[str] = None
year_from: Optional[int] = None
year_graduated: Optional[int] = None
year_to: Optional[int] = None
speciality: Optional[str] = None
class UsersSubscriptionsItem(PydanticModel):
pass
class UsersUniversity(PydanticModel):
chair: Optional[int] = None
chair_name: Optional[str] = None
city: Optional[int] = None
country: Optional[int] = None
education_form: Optional[str] = None
education_status: Optional[str] = None
faculty: Optional[int] = None
faculty_name: Optional[str] = None
graduation: Optional[int] = None
id: Optional[int] = None
name: Optional[str] = None
university_group_id: Optional[int] = None
class UsersUserConnections(PydanticModel):
skype: Optional[str] = None
facebook: Optional[str] = None
facebook_name: Optional[str] = None
twitter: Optional[str] = None
livejournal: Optional[str] = None
instagram: Optional[str] = None
class UsersUserCounters(PydanticModel):
albums: Optional[int] = None
audios: Optional[int] = None
followers: Optional[int] = None
friends: Optional[int] = None
gifts: Optional[int] = None
groups: Optional[int] = None
notes: Optional[int] = None
online_friends: Optional[int] = None
pages: Optional[int] = None
photos: Optional[int] = None
subscriptions: Optional[int] = None
user_photos: Optional[int] = None
user_videos: Optional[int] = None
videos: Optional[int] = None
new_photo_tags: Optional[int] = None
new_recognition_tags: Optional[int] = None
mutual_friends: Optional[int] = None
posts: Optional[int] = None
articles: Optional[int] = None
wishes: Optional[int] = None
podcasts: Optional[int] = None
clips: Optional[int] = None
clips_followers: Optional[int] = None
class UsersUserRelation(enum.IntEnum):
NOT_SPECIFIED = 0
SINGLE = 1
IN_A_RELATIONSHIP = 2
ENGAGED = 3
MARRIED = 4
COMPLICATED = 5
ACTIVELY_SEARCHING = 6
IN_LOVE = 7
IN_A_CIVIL_UNION = 8
class UsersUserType(enum.Enum):
PROFILE = "profile"
class UsersUserXtrCounters(UsersUserFull):
pass
class UsersUsersArray(PydanticModel):
count: Optional[int] = None
items: Optional[List[int]] = None
class UtilsDomainResolved(PydanticModel):
object_id: Optional[int] = None
group_id: Optional[int] = None
type: Optional["UtilsDomainResolvedType"] = None
class UtilsDomainResolvedType(enum.Enum):
USER = "user"
GROUP = "group"
APPLICATION = "application"
PAGE = "page"
VK_APP = "vk_app"
COMMUNITY_APPLICATION = "community_application"
class UtilsLastShortenedLink(PydanticModel):
access_key: Optional[str] = None
key: Optional[str] = None
short_url: Optional[str] = None
timestamp: Optional[int] = None
url: Optional[str] = None
views: Optional[int] = None
class UtilsLinkChecked(PydanticModel):
link: Optional[str] = None
status: Optional["UtilsLinkCheckedStatus"] = None
class UtilsLinkCheckedStatus(enum.Enum):
NOT_BANNED = "not_banned"
BANNED = "banned"
PROCESSING = "processing"
class UtilsLinkStats(PydanticModel):
key: Optional[str] = None
stats: Optional[List["UtilsStats"]] = None
class UtilsLinkStatsExtended(PydanticModel):
key: Optional[str] = None
stats: Optional[List["UtilsStatsExtended"]] = None
class UtilsShortLink(PydanticModel):
access_key: Optional[str] = None
key: Optional[str] = None
short_url: Optional[str] = None
url: Optional[str] = None
class UtilsStats(PydanticModel):
timestamp: Optional[int] = None
views: Optional[int] = None
class UtilsStatsCity(PydanticModel):
city_id: Optional[int] = None
views: Optional[int] = None
class UtilsStatsCountry(PydanticModel):
country_id: Optional[int] = None
views: Optional[int] = None
class UtilsStatsExtended(PydanticModel):
cities: Optional[List["UtilsStatsCity"]] = None
countries: Optional[List["UtilsStatsCountry"]] = None
sex_age: Optional[List["UtilsStatsSexAge"]] = None
timestamp: Optional[int] = None
views: Optional[int] = None
class UtilsStatsSexAge(PydanticModel):
age_range: Optional[str] = None
female: Optional[int] = None
male: Optional[int] = None
class VideoLiveInfo(PydanticModel):
enabled: Optional["BaseBoolInt"] = None
is_notifications_blocked: Optional["BaseBoolInt"] = None
class VideoLiveSettings(PydanticModel):
can_rewind: Optional["BaseBoolInt"] = None
is_endless: Optional["BaseBoolInt"] = None
max_duration: Optional[int] = None
class VideoRestrictionButton(PydanticModel):
action: Optional[str] = None
title: Optional[str] = None
class VideoSaveResult(PydanticModel):
access_key: Optional[str] = None
description: Optional[str] = None
owner_id: Optional[int] = None
title: Optional[str] = None
upload_url: Optional[str] = None
video_id: Optional[int] = None
class VideoVideoAlbumFull(PydanticModel):
count: Optional[int] = None
id: Optional[int] = None
image: Optional[List["VideoVideoImage"]] = None
image_blur: Optional["BasePropertyExists"] = None
is_system: Optional["BasePropertyExists"] = None
owner_id: Optional[int] = None
title: Optional[str] = None
updated_time: Optional[int] = None
class VideoVideoFiles(PydanticModel):
external: Optional[str] = None
mp4_240: Optional[str] = None
mp4_360: Optional[str] = None
mp4_480: Optional[str] = None
mp4_720: Optional[str] = None
mp4_1080: Optional[str] = None
flv_320: Optional[str] = None
class VideoVideoFull(VideoVideo):
files: Optional["VideoVideoFiles"] = None
live_settings: Optional["VideoLiveSettings"] = None
class BaseImage(PydanticModel):
id: Optional[str] = None
height: Optional[int] = None
url: Optional[str] = None
width: Optional[int] = None
class VideoVideoImage(BaseImage):
with_padding: Optional["BasePropertyExists"] = None
class WallAppPost(PydanticModel):
id: Optional[int] = None
name: Optional[str] = None
photo_130: Optional[str] = None
photo_604: Optional[str] = None
class WallAttachedNote(PydanticModel):
comments: Optional[int] = None
date: Optional[int] = None
id: Optional[int] = None
owner_id: Optional[int] = None
read_comments: Optional[int] = None
title: Optional[str] = None
view_url: Optional[str] = None
class WallCommentAttachment(PydanticModel):
audio: Optional["AudioAudio"] = None
doc: Optional["DocsDoc"] = None
link: Optional["BaseLink"] = None
market: Optional["MarketMarketItem"] = None
market_market_album: Optional["MarketMarketAlbum"] = None
note: Optional["WallAttachedNote"] = None
page: Optional["PagesWikipageFull"] = None
photo: Optional["PhotosPhoto"] = None
sticker: Optional["BaseSticker"] = None
type: Optional["WallCommentAttachmentType"] = None
video: Optional["VideoVideo"] = None
class WallCommentAttachmentType(enum.Enum):
PHOTO = "photo"
AUDIO = "audio"
VIDEO = "video"
DOC = "doc"
LINK = "link"
NOTE = "note"
PAGE = "page"
MARKET_MARKET_ALBUM = "market_market_album"
MARKET = "market"
STICKER = "sticker"
class WallGeo(PydanticModel):
coordinates: Optional[str] = None
place: Optional["BasePlace"] = None
showmap: Optional[int] = None
type: Optional[str] = None
class WallGraffiti(PydanticModel):
id: Optional[int] = None
owner_id: Optional[int] = None
photo_200: Optional[str] = None
photo_586: Optional[str] = None
class WallPostCopyright(PydanticModel):
id: Optional[int] = None
link: Optional[str] = None
name: Optional[str] = None
type: Optional[str] = None
class WallPostSource(PydanticModel):
data: Optional[str] = None
platform: Optional[str] = None
type: Optional["WallPostSourceType"] = None
url: Optional[str] = None
class WallPostSourceType(enum.Enum):
VK = "vk"
WIDGET = "widget"
API = "api"
RSS = "rss"
SMS = "sms"
MVK = "mvk"
class WallPostType(enum.Enum):
POST = "post"
COPY = "copy"
REPLY = "reply"
POSTPONE = "postpone"
SUGGEST = "suggest"
class WallPostedPhoto(PydanticModel):
id: Optional[int] = None
owner_id: Optional[int] = None
photo_130: Optional[str] = None
photo_604: Optional[str] = None
class WallViews(PydanticModel):
count: Optional[int] = None
class WallWallComment(PydanticModel):
attachments: Optional[List["WallCommentAttachment"]] = None
date: Optional[int] = None
donut: Optional["WallWallCommentDonut"] = None
from_id: Optional[int] = None
id: Optional[int] = None
likes: Optional["BaseLikesInfo"] = None
real_offset: Optional[int] = None
reply_to_comment: Optional[int] = None
reply_to_user: Optional[int] = None
text: Optional[str] = None
thread: Optional["CommentThread"] = None
post_id: Optional[int] = None
owner_id: Optional[int] = None
parents_stack: Optional[List[int]] = None
deleted: Optional[bool] = None
class WallWallCommentDonut(PydanticModel):
is_don: Optional[bool] = None
placeholder: Optional["WallWallCommentDonutPlaceholder"] = None
class WallWallCommentDonutPlaceholder(PydanticModel):
text: Optional[str] = None
class WallWallpostAttachment(PydanticModel):
access_key: Optional[str] = None
album: Optional["PhotosPhotoAlbum"] = None
app: Optional["WallAppPost"] = None
audio: Optional["AudioAudio"] = None
doc: Optional["DocsDoc"] = None
event: Optional["EventsEventAttach"] = None
group: Optional["GroupsGroupAttach"] = None
graffiti: Optional["WallGraffiti"] = None
link: Optional["BaseLink"] = None
market: Optional["MarketMarketItem"] = None
market_album: Optional["MarketMarketAlbum"] = None
note: Optional["WallAttachedNote"] = None
page: Optional["PagesWikipageFull"] = None
photo: Optional["PhotosPhoto"] = None
photos_list: Optional[List[str]] = None
poll: Optional["PollsPoll"] = None
posted_photo: Optional["WallPostedPhoto"] = None
type: Optional["WallWallpostAttachmentType"] = None
video: Optional["VideoVideo"] = None
class WallWallpostAttachmentType(enum.Enum):
PHOTO = "photo"
POSTED_PHOTO = "posted_photo"
AUDIO = "audio"
VIDEO = "video"
DOC = "doc"
LINK = "link"
GRAFFITI = "graffiti"
NOTE = "note"
APP = "app"
POLL = "poll"
PAGE = "page"
ALBUM = "album"
PHOTOS_LIST = "photos_list"
MARKET_MARKET_ALBUM = "market_market_album"
MARKET = "market"
EVENT = "event"
DONUT_LINK = "donut_link"
class WallWallpostCommentsDonut(PydanticModel):
placeholder: Optional["WallWallpostCommentsDonutPlaceholder"] = None
class WallWallpostCommentsDonutPlaceholder(PydanticModel):
text: Optional[str] = None
class WallWallpostDonut(PydanticModel):
is_donut: Optional[bool] = None
paid_duration: Optional[int] = None
placeholder: Optional["WallWallpostDonutPlaceholder"] = None
can_publish_free_copy: Optional[bool] = None
edit_mode: Optional[str] = None
class WallWallpostDonutPlaceholder(PydanticModel):
text: Optional[str] = None
class WallWallpostFull(WallCarouselBase, WallWallpost):
copy_history: Optional[List["WallWallpost"]] = None
can_edit: Optional["BaseBoolInt"] = None
created_by: Optional[int] = None
can_delete: Optional["BaseBoolInt"] = None
can_pin: Optional["BaseBoolInt"] = None
donut: Optional["WallWallpostDonut"] = None
is_pinned: Optional[int] = None
comments: Optional["BaseCommentsInfo"] = None
marked_as_ads: Optional["BaseBoolInt"] = None
short_text_rate: Optional[int] = None
class WidgetsCommentMedia(PydanticModel):
item_id: Optional[int] = None
owner_id: Optional[int] = None
thumb_src: Optional[str] = None
type: Optional["WidgetsCommentMediaType"] = None
class WidgetsCommentMediaType(enum.Enum):
AUDIO = "audio"
PHOTO = "photo"
VIDEO = "video"
class WidgetsCommentReplies(PydanticModel):
can_post: Optional["BaseBoolInt"] = None
count: Optional[int] = None
replies: Optional[List["WidgetsCommentRepliesItem"]] = None
class WidgetsCommentRepliesItem(PydanticModel):
cid: Optional[int] = None
date: Optional[int] = None
likes: Optional["WidgetsWidgetLikes"] = None
text: Optional[str] = None
uid: Optional[int] = None
user: Optional["UsersUserFull"] = None
class WidgetsWidgetComment(PydanticModel):
attachments: Optional[List["WallCommentAttachment"]] = None
can_delete: Optional["BaseBoolInt"] = None
comments: Optional["WidgetsCommentReplies"] = None
date: Optional[int] = None
from_id: Optional[int] = None
id: Optional[int] = None
likes: Optional["BaseLikesInfo"] = None
media: Optional["WidgetsCommentMedia"] = None
post_source: Optional["WallPostSource"] = None
post_type: Optional[int] = None
reposts: Optional["BaseRepostsInfo"] = None
text: Optional[str] = None
to_id: Optional[int] = None
user: Optional["UsersUserFull"] = None
class WidgetsWidgetLikes(PydanticModel):
count: Optional[int] = None
class WidgetsWidgetPage(PydanticModel):
comments: Optional["BaseObjectCount"] = None
date: Optional[int] = None
description: Optional[str] = None
id: Optional[int] = None
likes: Optional["BaseObjectCount"] = None
page_id: Optional[str] = None
photo: Optional[str] = None
title: Optional[str] = None
url: Optional[str] = None
AccountAccountCounters.update_forward_refs()
AccountInfo.update_forward_refs()
AccountNameRequest.update_forward_refs()
AccountOffer.update_forward_refs()
AccountPushConversations.update_forward_refs()
AccountPushConversationsItem.update_forward_refs()
AccountPushParams.update_forward_refs()
AccountPushSettings.update_forward_refs()
AccountUserSettings.update_forward_refs()
AccountUserSettingsInterest.update_forward_refs()
AccountUserSettingsInterests.update_forward_refs()
AdsAccesses.update_forward_refs()
AdsAccount.update_forward_refs()
AdsAd.update_forward_refs()
AdsAdLayout.update_forward_refs()
AdsCampaign.update_forward_refs()
AdsCategory.update_forward_refs()
AdsClient.update_forward_refs()
AdsCriteria.update_forward_refs()
AdsDemoStats.update_forward_refs()
AdsDemostatsFormat.update_forward_refs()
AdsFloodStats.update_forward_refs()
AdsLinkStatus.update_forward_refs()
AdsLookalikeRequest.update_forward_refs()
AdsLookalikeRequestSaveAudienceLevel.update_forward_refs()
AdsMusician.update_forward_refs()
AdsParagraphs.update_forward_refs()
AdsPromotedPostReach.update_forward_refs()
AdsRejectReason.update_forward_refs()
AdsRules.update_forward_refs()
AdsStats.update_forward_refs()
AdsStatsAge.update_forward_refs()
AdsStatsCities.update_forward_refs()
AdsStatsFormat.update_forward_refs()
AdsStatsSex.update_forward_refs()
AdsStatsSexAge.update_forward_refs()
AdsStatsViewsTimes.update_forward_refs()
AdsTargSettings.update_forward_refs()
AdsTargStats.update_forward_refs()
AdsTargSuggestions.update_forward_refs()
AdsTargSuggestionsCities.update_forward_refs()
AdsTargSuggestionsRegions.update_forward_refs()
AdsTargSuggestionsSchools.update_forward_refs()
AdsTargetGroup.update_forward_refs()
AdsUpdateofficeusersResult.update_forward_refs()
AdsUserSpecification.update_forward_refs()
AdsUserSpecificationCutted.update_forward_refs()
AdsUsers.update_forward_refs()
AdswebGetadcategoriesResponseCategoriesCategory.update_forward_refs()
AdswebGetadunitsResponseAdUnitsAdUnit.update_forward_refs()
AdswebGetfraudhistoryResponseEntriesEntry.update_forward_refs()
AdswebGetsitesResponseSitesSite.update_forward_refs()
AdswebGetstatisticsResponseItemsItem.update_forward_refs()
AppwidgetsPhoto.update_forward_refs()
AppwidgetsPhotos.update_forward_refs()
AppsApp.update_forward_refs()
AppsAppMin.update_forward_refs()
AppsLeaderboard.update_forward_refs()
AppsScope.update_forward_refs()
AudioAudio.update_forward_refs()
BaseCity.update_forward_refs()
BaseCommentsInfo.update_forward_refs()
BaseCountry.update_forward_refs()
BaseCropPhoto.update_forward_refs()
BaseCropPhotoCrop.update_forward_refs()
BaseCropPhotoRect.update_forward_refs()
BaseError.update_forward_refs()
BaseGeo.update_forward_refs()
BaseGeoCoordinates.update_forward_refs()
BaseGradientPoint.update_forward_refs()
BaseImage.update_forward_refs()
BaseLikes.update_forward_refs()
BaseLikesInfo.update_forward_refs()
BaseLink.update_forward_refs()
BaseLinkApplication.update_forward_refs()
BaseLinkApplicationStore.update_forward_refs()
BaseLinkButton.update_forward_refs()
BaseLinkButtonAction.update_forward_refs()
BaseLinkProduct.update_forward_refs()
BaseLinkRating.update_forward_refs()
BaseMessageError.update_forward_refs()
BaseObject.update_forward_refs()
BaseObjectCount.update_forward_refs()
BaseObjectWithName.update_forward_refs()
BasePlace.update_forward_refs()
BaseRepostsInfo.update_forward_refs()
BaseRequestParam.update_forward_refs()
BaseSticker.update_forward_refs()
BaseStickerAnimation.update_forward_refs()
BaseUploadServer.update_forward_refs()
BaseUserId.update_forward_refs()
BoardTopic.update_forward_refs()
BoardTopicComment.update_forward_refs()
BoardTopicPoll.update_forward_refs()
CallbackBoardPostDelete.update_forward_refs()
CallbackConfirmationMessage.update_forward_refs()
CallbackDonutMoneyWithdraw.update_forward_refs()
CallbackDonutMoneyWithdrawError.update_forward_refs()
CallbackDonutSubscriptionCancelled.update_forward_refs()
CallbackDonutSubscriptionCreate.update_forward_refs()
CallbackDonutSubscriptionExpired.update_forward_refs()
CallbackDonutSubscriptionPriceChanged.update_forward_refs()
CallbackDonutSubscriptionProlonged.update_forward_refs()
CallbackGroupChangePhoto.update_forward_refs()
CallbackGroupChangeSettings.update_forward_refs()
CallbackGroupJoin.update_forward_refs()
CallbackGroupLeave.update_forward_refs()
CallbackGroupOfficersEdit.update_forward_refs()
CallbackGroupSettingsChanges.update_forward_refs()
CallbackLikeAddRemove.update_forward_refs()
CallbackMarketComment.update_forward_refs()
CallbackMarketCommentDelete.update_forward_refs()
CallbackMessageAllow.update_forward_refs()
CallbackMessageBase.update_forward_refs()
CallbackMessageDeny.update_forward_refs()
CallbackPhotoComment.update_forward_refs()
CallbackPhotoCommentDelete.update_forward_refs()
CallbackPollVoteNew.update_forward_refs()
CallbackQrScan.update_forward_refs()
CallbackUserBlock.update_forward_refs()
CallbackUserUnblock.update_forward_refs()
CallbackVideoComment.update_forward_refs()
CallbackVideoCommentDelete.update_forward_refs()
CallbackWallCommentDelete.update_forward_refs()
CallsCall.update_forward_refs()
CallsParticipants.update_forward_refs()
CommentThread.update_forward_refs()
DatabaseCity.update_forward_refs()
DatabaseFaculty.update_forward_refs()
DatabaseRegion.update_forward_refs()
DatabaseSchool.update_forward_refs()
DatabaseStation.update_forward_refs()
DatabaseUniversity.update_forward_refs()
DocsDoc.update_forward_refs()
DocsDocPreview.update_forward_refs()
DocsDocPreviewAudioMsg.update_forward_refs()
DocsDocPreviewGraffiti.update_forward_refs()
DocsDocPreviewPhoto.update_forward_refs()
DocsDocPreviewPhotoSizes.update_forward_refs()
DocsDocPreviewVideo.update_forward_refs()
DocsDocTypes.update_forward_refs()
DonutDonatorSubscriptionInfo.update_forward_refs()
EventsEventAttach.update_forward_refs()
FaveBookmark.update_forward_refs()
FavePage.update_forward_refs()
FaveTag.update_forward_refs()
FriendsFriendExtendedStatus.update_forward_refs()
FriendsFriendStatus.update_forward_refs()
FriendsFriendsList.update_forward_refs()
FriendsMutualFriend.update_forward_refs()
FriendsRequests.update_forward_refs()
FriendsRequestsMutual.update_forward_refs()
FriendsRequestsXtrMessage.update_forward_refs()
FriendsUserXtrLists.update_forward_refs()
FriendsUserXtrPhone.update_forward_refs()
GiftsGift.update_forward_refs()
GiftsLayout.update_forward_refs()
GroupsAddress.update_forward_refs()
GroupsAddressTimetable.update_forward_refs()
GroupsAddressTimetableDay.update_forward_refs()
GroupsAddressesInfo.update_forward_refs()
GroupsBanInfo.update_forward_refs()
GroupsCallbackServer.update_forward_refs()
GroupsCallbackSettings.update_forward_refs()
GroupsContactsItem.update_forward_refs()
GroupsCountersGroup.update_forward_refs()
GroupsCover.update_forward_refs()
GroupsGroup.update_forward_refs()
GroupsGroupAttach.update_forward_refs()
GroupsGroupBanInfo.update_forward_refs()
GroupsGroupCategory.update_forward_refs()
GroupsGroupCategoryFull.update_forward_refs()
GroupsGroupCategoryType.update_forward_refs()
GroupsGroupFull.update_forward_refs()
GroupsGroupLink.update_forward_refs()
GroupsGroupPublicCategoryList.update_forward_refs()
GroupsGroupTag.update_forward_refs()
GroupsGroupsArray.update_forward_refs()
GroupsLinksItem.update_forward_refs()
GroupsLiveCovers.update_forward_refs()
GroupsLongPollEvents.update_forward_refs()
GroupsLongPollServer.update_forward_refs()
GroupsLongPollSettings.update_forward_refs()
GroupsMarketInfo.update_forward_refs()
GroupsMemberRole.update_forward_refs()
GroupsMemberStatus.update_forward_refs()
GroupsMemberStatusFull.update_forward_refs()
GroupsOnlineStatus.update_forward_refs()
GroupsOwnerXtrBanInfo.update_forward_refs()
GroupsProfileItem.update_forward_refs()
GroupsSettingsTwitter.update_forward_refs()
GroupsSubjectItem.update_forward_refs()
GroupsTokenPermissionSetting.update_forward_refs()
GroupsUserXtrRole.update_forward_refs()
LinkTargetObject.update_forward_refs()
MarketCurrency.update_forward_refs()
MarketMarketAlbum.update_forward_refs()
MarketMarketCategoryNested.update_forward_refs()
MarketMarketCategoryOld.update_forward_refs()
MarketMarketCategoryTree.update_forward_refs()
MarketMarketItem.update_forward_refs()
MarketMarketItemFull.update_forward_refs()
MarketOrder.update_forward_refs()
MarketOrderItem.update_forward_refs()
MarketPrice.update_forward_refs()
MarketSection.update_forward_refs()
MediaRestriction.update_forward_refs()
MessagesAudioMessage.update_forward_refs()
MessagesChat.update_forward_refs()
MessagesChatFull.update_forward_refs()
MessagesChatPreview.update_forward_refs()
MessagesChatPushSettings.update_forward_refs()
MessagesChatRestrictions.update_forward_refs()
MessagesChatSettings.update_forward_refs()
MessagesChatSettingsAcl.update_forward_refs()
MessagesChatSettingsPermissions.update_forward_refs()
MessagesChatSettingsPhoto.update_forward_refs()
MessagesConversation.update_forward_refs()
MessagesConversationCanWrite.update_forward_refs()
MessagesConversationMember.update_forward_refs()
MessagesConversationPeer.update_forward_refs()
MessagesConversationSortId.update_forward_refs()
MessagesConversationWithMessage.update_forward_refs()
MessagesForeignMessage.update_forward_refs()
MessagesForward.update_forward_refs()
MessagesGraffiti.update_forward_refs()
MessagesHistoryAttachment.update_forward_refs()
MessagesHistoryMessageAttachment.update_forward_refs()
MessagesKeyboard.update_forward_refs()
MessagesKeyboardButton.update_forward_refs()
MessagesKeyboardButtonAction.update_forward_refs()
MessagesLastActivity.update_forward_refs()
MessagesLongpollMessages.update_forward_refs()
MessagesLongpollParams.update_forward_refs()
MessagesMessage.update_forward_refs()
MessagesMessageAction.update_forward_refs()
MessagesMessageActionPhoto.update_forward_refs()
MessagesMessageAttachment.update_forward_refs()
MessagesMessageRequestData.update_forward_refs()
MessagesMessagesArray.update_forward_refs()
MessagesOutReadBy.update_forward_refs()
MessagesPinnedMessage.update_forward_refs()
MessagesPushSettings.update_forward_refs()
MessagesUserXtrInvitedBy.update_forward_refs()
NewsfeedEventActivity.update_forward_refs()
NewsfeedItemAudio.update_forward_refs()
NewsfeedItemAudioAudio.update_forward_refs()
NewsfeedItemBase.update_forward_refs()
NewsfeedItemDigest.update_forward_refs()
NewsfeedItemDigestButton.update_forward_refs()
NewsfeedItemDigestFooter.update_forward_refs()
NewsfeedItemDigestFullItem.update_forward_refs()
NewsfeedItemDigestHeader.update_forward_refs()
NewsfeedItemFriend.update_forward_refs()
NewsfeedItemFriendFriends.update_forward_refs()
NewsfeedItemHolidayRecommendationsBlockHeader.update_forward_refs()
NewsfeedItemPhoto.update_forward_refs()
NewsfeedItemPhotoPhotos.update_forward_refs()
NewsfeedItemPhotoTag.update_forward_refs()
NewsfeedItemPhotoTagPhotoTags.update_forward_refs()
NewsfeedItemPromoButton.update_forward_refs()
NewsfeedItemPromoButtonAction.update_forward_refs()
NewsfeedItemPromoButtonImage.update_forward_refs()
NewsfeedItemTopic.update_forward_refs()
NewsfeedItemVideo.update_forward_refs()
NewsfeedItemVideoVideo.update_forward_refs()
NewsfeedItemWallpost.update_forward_refs()
NewsfeedItemWallpostFeedback.update_forward_refs()
NewsfeedItemWallpostFeedbackAnswer.update_forward_refs()
NewsfeedList.update_forward_refs()
NewsfeedListFull.update_forward_refs()
NewsfeedNewsfeedItem.update_forward_refs()
NewsfeedNewsfeedPhoto.update_forward_refs()
NotesNote.update_forward_refs()
NotesNoteComment.update_forward_refs()
NotificationsFeedback.update_forward_refs()
NotificationsNotification.update_forward_refs()
NotificationsNotificationParent.update_forward_refs()
NotificationsNotificationsComment.update_forward_refs()
NotificationsReply.update_forward_refs()
NotificationsSendMessageError.update_forward_refs()
NotificationsSendMessageItem.update_forward_refs()
OauthError.update_forward_refs()
OrdersAmount.update_forward_refs()
OrdersAmountItem.update_forward_refs()
OrdersOrder.update_forward_refs()
OrdersSubscription.update_forward_refs()
OwnerState.update_forward_refs()
PagesWikipage.update_forward_refs()
PagesWikipageFull.update_forward_refs()
PagesWikipageHistory.update_forward_refs()
PhotosCommentXtrPid.update_forward_refs()
PhotosImage.update_forward_refs()
PhotosPhoto.update_forward_refs()
PhotosPhotoAlbum.update_forward_refs()
PhotosPhotoAlbumFull.update_forward_refs()
PhotosPhotoFalseable.update_forward_refs()
PhotosPhotoFull.update_forward_refs()
PhotosPhotoFullXtrRealOffset.update_forward_refs()
PhotosPhotoSizes.update_forward_refs()
PhotosPhotoTag.update_forward_refs()
PhotosPhotoUpload.update_forward_refs()
PhotosPhotoXtrRealOffset.update_forward_refs()
PhotosPhotoXtrTagInfo.update_forward_refs()
PhotosTagsSuggestionItem.update_forward_refs()
PhotosTagsSuggestionItemButton.update_forward_refs()
PodcastCover.update_forward_refs()
PodcastExternalData.update_forward_refs()
PollsAnswer.update_forward_refs()
PollsBackground.update_forward_refs()
PollsFriend.update_forward_refs()
PollsPoll.update_forward_refs()
PollsVoters.update_forward_refs()
PollsVotersUsers.update_forward_refs()
PrettycardsPrettycard.update_forward_refs()
SearchHint.update_forward_refs()
SecureLevel.update_forward_refs()
SecureSmsNotification.update_forward_refs()
SecureTokenChecked.update_forward_refs()
SecureTransaction.update_forward_refs()
StatsActivity.update_forward_refs()
StatsCity.update_forward_refs()
StatsCountry.update_forward_refs()
StatsPeriod.update_forward_refs()
StatsReach.update_forward_refs()
StatsSexAge.update_forward_refs()
StatsViews.update_forward_refs()
StatsWallpostStat.update_forward_refs()
StatusStatus.update_forward_refs()
StickersImageSet.update_forward_refs()
StorageValue.update_forward_refs()
StoreProduct.update_forward_refs()
StoreStickersKeyword.update_forward_refs()
StoreStickersKeywordSticker.update_forward_refs()
StoriesClickableArea.update_forward_refs()
StoriesClickableSticker.update_forward_refs()
StoriesClickableStickers.update_forward_refs()
StoriesFeedItem.update_forward_refs()
StoriesPromoBlock.update_forward_refs()
StoriesReplies.update_forward_refs()
StoriesStatLine.update_forward_refs()
StoriesStory.update_forward_refs()
StoriesStoryLink.update_forward_refs()
StoriesStoryStats.update_forward_refs()
StoriesStoryStatsStat.update_forward_refs()
StoriesViewersItem.update_forward_refs()
UsersCareer.update_forward_refs()
UsersExports.update_forward_refs()
UsersLastSeen.update_forward_refs()
UsersMilitary.update_forward_refs()
UsersOccupation.update_forward_refs()
UsersOnlineInfo.update_forward_refs()
UsersPersonal.update_forward_refs()
UsersRelative.update_forward_refs()
UsersSchool.update_forward_refs()
UsersSubscriptionsItem.update_forward_refs()
UsersUniversity.update_forward_refs()
UsersUser.update_forward_refs()
UsersUserConnections.update_forward_refs()
UsersUserCounters.update_forward_refs()
UsersUserFull.update_forward_refs()
UsersUserMin.update_forward_refs()
UsersUserSettingsXtr.update_forward_refs()
UsersUserXtrCounters.update_forward_refs()
UsersUserXtrType.update_forward_refs()
UsersUsersArray.update_forward_refs()
UtilsDomainResolved.update_forward_refs()
UtilsLastShortenedLink.update_forward_refs()
UtilsLinkChecked.update_forward_refs()
UtilsLinkStats.update_forward_refs()
UtilsLinkStatsExtended.update_forward_refs()
UtilsShortLink.update_forward_refs()
UtilsStats.update_forward_refs()
UtilsStatsCity.update_forward_refs()
UtilsStatsCountry.update_forward_refs()
UtilsStatsExtended.update_forward_refs()
UtilsStatsSexAge.update_forward_refs()
VideoLiveInfo.update_forward_refs()
VideoLiveSettings.update_forward_refs()
VideoRestrictionButton.update_forward_refs()
VideoSaveResult.update_forward_refs()
VideoVideo.update_forward_refs()
VideoVideoAlbumFull.update_forward_refs()
VideoVideoFiles.update_forward_refs()
VideoVideoFull.update_forward_refs()
VideoVideoImage.update_forward_refs()
WallAppPost.update_forward_refs()
WallAttachedNote.update_forward_refs()
WallCarouselBase.update_forward_refs()
WallCommentAttachment.update_forward_refs()
WallGeo.update_forward_refs()
WallGraffiti.update_forward_refs()
WallPostCopyright.update_forward_refs()
WallPostSource.update_forward_refs()
WallPostedPhoto.update_forward_refs()
WallViews.update_forward_refs()
WallWallComment.update_forward_refs()
WallWallCommentDonut.update_forward_refs()
WallWallCommentDonutPlaceholder.update_forward_refs()
WallWallpost.update_forward_refs()
WallWallpostAttachment.update_forward_refs()
WallWallpostCommentsDonut.update_forward_refs()
WallWallpostCommentsDonutPlaceholder.update_forward_refs()
WallWallpostDonut.update_forward_refs()
WallWallpostDonutPlaceholder.update_forward_refs()
WallWallpostFull.update_forward_refs()
WallWallpostToId.update_forward_refs()
WidgetsCommentMedia.update_forward_refs()
WidgetsCommentReplies.update_forward_refs()
WidgetsCommentRepliesItem.update_forward_refs()
WidgetsWidgetComment.update_forward_refs()
WidgetsWidgetLikes.update_forward_refs()
WidgetsWidgetPage.update_forward_refs() |
#!/usr/bin/python
import os
import threading
import glob
def parajob():
caldirs = glob.glob('paraCal*')
numthread = len(caldirs)
runs = []
for i in range(numthread):
runs.append(threading.Thread(target=execvasp, args=(caldirs[i],)))
for t in runs:
t.start()
for t in runs:
t.join()
def execvasp(caldir):
os.system("cd " + caldir + ";sh runvasp.sh")
if __name__ == '__main__':
parajob()
print 'FINISHED'
|
# Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Forward-mode tests.
Notes
-----
Arguments func, a, b, c, x, and n are automatically filled in.
Pass --short for a quick run.
"""
import sys
import pytest
import tfe_utils
import utils
def test_grad_unary(func, preserve_result, a):
"""Test gradients of single-argument scalar functions."""
utils.test_forward_array(func, (0,), preserve_result, a)
def test_grad_binary(func, preserve_result, a, b):
"""Test gradients of two-argument scalar functions."""
utils.test_forward_array(func, (0,), preserve_result, a, b)
def test_grad_ternary(func, preserve_result, a, b, c):
"""Test gradients of three-argument scalar functions."""
utils.test_forward_array(func, (0,), preserve_result, a, b, c)
def test_grad_binary_int(func, preserve_result, a, n):
"""Test gradients of functions with scalar and integer input."""
utils.test_forward_array(func, (0,), preserve_result, a, n)
def test_grad_unary_tensor(func, t):
"""Test gradients of functions with single tensor input."""
# TODO: remove trace test exemption when tests are consolidated.
if 'trace' in func.__name__:
return
if any(n in func.__name__ for n in ('tfe_rsqrt',)):
utils.assert_forward_not_implemented(func, (0,))
return
tfe_utils.test_forward_tensor(func, (0,), t)
def test_grad_binary_tensor(func, t1, t2):
"""Test gradients of functions with binary tensor inputs."""
if any(n in func.__name__ for n in ('tfe_squared_difference',)):
utils.assert_forward_not_implemented(func, (0,))
return
tfe_utils.test_forward_tensor(func, (0,), t1, t2)
tfe_utils.test_forward_tensor(func, (1,), t1, t2)
def test_grad_image(func, timage, tkernel, conv2dstrides):
"""Test gradients of image functions."""
utils.assert_forward_not_implemented(func, (0,))
if __name__ == '__main__':
assert not pytest.main([__file__, '--short'] + sys.argv[1:])
|
def digits(n):
return len(str(n)) |
#!/usr/bin/python
from reverseWords_01a import Sentence
s = Sentence('abc def xyz')
s.show()
s.reverse_words()
s.show()
|
from linked_lists_util import print_linked_list, insert_list, Node
class LinkedList:
def __init__(self):
self.head = None
def shift_elements(self, shifts):
"""Shift the elements of the linked list by a given number of shifts."""
for _ in range(shifts):
# get the current tail of the list
old_tail = self.get_tail()
# delete the current tail
self.remove_tail()
# place the current tail in the beginning of the list, with that all elements of the list get shifted
# by one
self.set_head(new_head=old_tail)
# repeat number of shifts times to get all elements of the list shifted by the given number of shifts
return self
def get_tail(self):
"""returns the last node in the linked list."""
pointer = self.head
while pointer.next_node:
pointer = pointer.next_node
return pointer
def set_head(self, new_head: Node):
"""sets a new head of the linked list to be the given node"""
pointer = self.head
self.head = new_head
self.head.next_node = pointer
return self
def remove_tail(self):
"""removes the last element from the linked list"""
pointer = self.head
while pointer.next_node.next_node:
pointer = pointer.next_node
pointer.next_node = None
return self
def shift_elements2(self, num_shifts: int):
"""Shifts all the elements of the list by the number of shifts given as parameters.
:param num_shifts: The number of shifts to shift the elements of the list by. positive integer"""
length = self.get_length()
if num_shifts == 0:
return self
pointer = self.head
tail_position = length - num_shifts
for _ in range(tail_position - 1):
pointer = pointer.next_node
new_tail = pointer
new_head = new_tail.next_node
new_tail.next_node = None
head = new_head
for _ in range(num_shifts - 1):
new_head = new_head.next_node
new_head.next_node = self.head
self.head = head
return self
def get_length(self):
"""Returns length of the linked list."""
pointer = self.head
counter = 0
while pointer:
counter += 1
pointer = pointer.next_node
return counter
if __name__ == '__main__':
linked_list2 = LinkedList()
insert_list(linked_list2, [-10, 3, 2, 10, 1, 3, -5, 4, 3, 8, 0])
print(linked_list2.get_length())
print_linked_list(linked_list2)
# linked_list2 = linked_list2.shift_elements(3)
# print_linked_list(linked_list2)
linked_list2 = linked_list2.shift_elements2(3)
print_linked_list(linked_list2)
|
"""Test the square_sum function."""
import pytest
NUMS = [
[[1, 2], 5],
[[0, 3, 4, 5], 50],
]
@pytest.mark.parametrize("n, result", NUMS)
def test_sum_square(n, result):
"""Test the sum of squares of a list is returned."""
from src.square_sum import square_sum
assert square_sum(n) == result
|
import enum
import sys
import importlib
import json
def lreplace(s, prefix, sub):
return s.replace(prefix, sub, 1)
def rreplace(s, suffix, sub):
return sub.join(s.rsplit(suffix, 1))
def AutoNameEnum(enum_type_name, enum_fields):
return enum.Enum(enum_type_name, [(field, field.lower()) for field in enum_fields])
class Tee:
def __init__(self, filename):
self.terminal = sys.stdout
self.file = open(filename, 'w')
def write(self, s):
self.terminal.write(s)
self.file.write(s)
def flush(self):
self.terminal.flush()
self.file.flush()
def number_lines(s):
lines = s.split('\n')
lineno_max_width = len(str(len(lines)))
for i, line in enumerate(lines):
yield ('line {:>' + str(lineno_max_width) + '}: {}').format(i+1, line)
def dynamic_import(full_class_name):
parts = full_class_name.split('.')
mod = __import__('.'.join(parts[0:-1]), fromlist=[parts[-1]])
return getattr(mod, parts[-1])
class CustomJSONSerializable:
def to_json(self):
return json.dumps(self, cls=CustomJSONEncoder, indent=2, sort_keys=True)
def list_to_json(collection):
return json.dumps(collection, cls=CustomJSONEncoder, indent=2, sort_keys=True)
def from_json(object_json):
if isinstance(object_json, str):
return json.loads(object_json, cls=CustomJSCONDecoder)
else:
return json.load(object_json, cls=CustomJSCONDecoder)
def list_from_json(list_json):
if isinstance(list_json, str):
return json.loads(list_json, cls=CustomJSCONDecoder)
else:
return json.load(list_json, cls=CustomJSCONDecoder)
class CustomJSONEncoder(json.JSONEncoder):
def default(self, obj):
if isinstance(obj, dict):
return {k : self.default(v) for k, v in obj.items()}
elif isinstance(obj, list) and not isinstance(obj, str):
return [self.default(v) for v in obj]
elif isinstance(obj, CustomJSONSerializable):
outdict = {a : self.default(v) for a, v in obj.__dict__.items()}
outdict['__type__'] = obj.__class__.__module__ + '.' + obj.__class__.__qualname__
return outdict
return obj
class CustomJSCONDecoder(json.JSONDecoder):
def __init__(self, *args, **kwargs):
json.JSONDecoder.__init__(self, object_hook=self.object_hook, *args, **kwargs)
def object_hook(self, o):
try:
if '__type__' in o:
cls = dynamic_import(o['__type__'])
obj = cls(**{key : o[key] for key in o if key != '__type__'})
return obj
except:
pass
return o
|
import numpy as np
from qubo_nn.problems.problem import Problem
class QuadraticAssignment(Problem):
def __init__(self, cfg, flow_matrix, distance_matrix, P=200):
self.flow_matrix = flow_matrix
self.distance_matrix = distance_matrix
self.P = P
if cfg["problems"]["QA"].get("debug", False):
self.P = 0.
def gen_qubo_matrix(self):
n = len(self.flow_matrix)
Q = np.zeros((n ** 2, n ** 2))
for i in range(n):
for j in range(n):
for k in range(n):
for m in range(n):
val = self.flow_matrix[i][j] * self.distance_matrix[k][m]
if val == 0:
Q[i * n + k][j * n + m] = self.P
else:
Q[i * n + k][j * n + m] = val
for i in range(n ** 2):
Q[i][i] = -self.P * 2
return Q
@classmethod
def gen_problems(self, cfg, n_problems, size=3, **kwargs):
high = cfg["problems"]["QA"].get("high", 50) # Outdated.
debug = cfg["problems"]["QA"].get("debug", False)
problems = []
for _ in range(n_problems):
if debug:
x = np.arange(1, 50)
else:
x = np.arange(1, 1000)
choice = np.random.choice(x, size=(2, size, size), replace=False)
flow = choice[0]
dist = choice[1]
if debug:
dist[1][0] = 1.
flow[1][0] = 1.
# flow = np.random.randint(low=1, high=high, size=(size, size))
# dist = np.random.randint(low=1, high=high, size=(size, size))
np.fill_diagonal(flow, 0)
np.fill_diagonal(dist, 0)
problems.append((
np.tril(flow) + np.tril(flow, -1).T,
np.tril(dist) + np.tril(dist, -1).T,
))
return [
{"flow_matrix": flow_matrix, "distance_matrix": distance_matrix}
for (flow_matrix, distance_matrix) in problems
]
|
#!/usr/bin/python2.7
# -*- coding: utf-8
from utils import Utils
import json
import logging
import random
logger = logging.getLogger(__name__)
class Miner:
ut = Utils()
def __init__(self, player):
self.username = player.username
self.password = player.password
self.uhash = player.uhash
self.p = player
self.running = False
self.useable = False
self.energy = 0
self._initminer()
def _initminer(self):
career_data = self._carrerStatus()
career = json.loads(career_data)
if int(career['count']) > 0:
self.energy = int(career['energy'])
if self.strength >= 600:
self.useable = True
def _careerStatus(self):
temp = self.ut.requestString(self.username, self.password, self.uhash, "vh_getCareerStatus.php")
return temp
def start(self):
self.running = True
while self.running:
|
import webbrowser
url = 'http://www.wsb.com/Assignment2/case13/case13.php?LANG=http://www.wsb.com/Assignment2/rfi.txt'
new = 2
webbrowser.open(url, new=new)
|
import os,sys
sys.path.insert(0,'./engines')
sys.path.append('./engines')
import test
from test import engineData
engineData()
|
import re
from django.contrib.auth.models import User
from corehq.apps.locations.models import SQLLocation
from corehq.apps.sms.mixin import PhoneNumberInUseException, VerifiedNumber
from corehq.apps.users.models import CommCareUser
from custom.ilsgateway.tanzania.handlers import get_location
from custom.ilsgateway.tanzania.handlers.keyword import KeywordHandler
from custom.ilsgateway.models import ILSGatewayConfig
from custom.ilsgateway.tanzania.reminders import REGISTER_HELP, Languages, \
REGISTRATION_CONFIRM_DISTRICT, REGISTRATION_CONFIRM, Roles
from custom.logistics.commtrack import add_location
DISTRICT_PREFIXES = ['d', 'm', 'tb', 'tg', 'dm', 'mz', 'mt', 'mb', 'ir', 'tb', 'ms']
class RegisterHandler(KeywordHandler):
DISTRICT_REG_DELIMITER = ":"
def help(self):
self.respond(REGISTER_HELP)
def _get_facility_location(self, domain, msd_code):
sp = get_location(domain, None, msd_code)
return sp['location']
def _get_district_location(self, domain, sp):
return SQLLocation.objects.filter(
domain=domain,
location_type__name="DISTRICT",
name=sp,
)[0].couch_location
def handle(self):
text = ' '.join(self.msg.text.split()[1:])
is_district = False
sp = ""
msd_code = ""
if text.find(self.DISTRICT_REG_DELIMITER) != -1:
phrases = [x.strip() for x in text.split(":")]
if len(phrases) != 2:
self.respond(REGISTER_HELP)
return
name = phrases[0]
sp = phrases[1]
role = Roles.DISTRICT_PHARMACIST
message = REGISTRATION_CONFIRM_DISTRICT
params = {}
is_district = True
else:
names = []
msd_codes = []
location_regex = '^({prefs})\d+'.format(prefs='|'.join(p.lower() for p in DISTRICT_PREFIXES))
for the_string in self.args:
if re.match(location_regex, the_string.strip().lower()):
msd_codes.append(the_string.strip().lower())
else:
names.append(the_string)
name = " ".join(names)
if len(msd_codes) != 1:
self.respond(REGISTER_HELP)
return
else:
[msd_code] = msd_codes
role = Roles.IN_CHARGE
message = REGISTRATION_CONFIRM
params = {
"msd_code": msd_code
}
if not self.user:
domains = [config.domain for config in ILSGatewayConfig.get_all_configs()]
for domain in domains:
if is_district:
loc = self._get_district_location(domain, sp)
else:
loc = self._get_facility_location(domain, msd_code)
if not loc:
continue
splited_name = name.split(' ', 1)
first_name = splited_name[0]
last_name = splited_name[1] if len(splited_name) > 1 else ""
clean_name = name.replace(' ', '.')
username = "%s@%s.commcarehq.org" % (clean_name, domain)
password = User.objects.make_random_password()
user = CommCareUser.create(domain=domain, username=username, password=password,
commit=False)
user.first_name = first_name
user.last_name = last_name
try:
user.set_default_phone_number(self.msg.phone_number.replace('+', ''))
user.save_verified_number(domain, self.msg.phone_number.replace('+', ''), True, self.msg.backend_api)
except PhoneNumberInUseException as e:
v = VerifiedNumber.by_phone(self.msg.phone_number, include_pending=True)
v.delete()
user.save_verified_number(domain, self.msg.phone_number.replace('+', ''), True, self.msg.backend_api)
except CommCareUser.Inconsistent:
continue
user.language = Languages.DEFAULT
params.update({
'sdp_name': loc.name,
'contact_name': name
})
user.user_data = {
'role': role
}
dm = user.get_domain_membership(domain)
dm.location_id = loc._id
user.save()
add_location(user, loc._id)
if params:
self.respond(message, **params)
|
class Astronaut:
def __init__(self, name, country, date):
self.name = name
self.country = country
self.date = date
class SpaceAgency:
def __init__(self, name, country, date):
self.name = name
self.country = country
self.date = date
watney = Astronaut('Watney', 'USA', '1969-07-21')
nasa = SpaceAgency(name='NASA', country='USA', date='1958-07-29')
|
"""
Script doing the "check IPTC Photo Metadata" jobs and providing some shared functions
"""
import os
import yaml
import json
from pmdtools.exiftool import Exiftool
currentdir = os.path.dirname(os.path.realpath(__file__))
CONFIGFP = currentdir + '/config/scriptsconfig.yml'
with open(CONFIGFP) as yaml_file1:
scriptconfigs = yaml.safe_load(yaml_file1)
# load the dictionary with all field/property names as defined by exiftool as key
IPMDTECHGUIDEFP = currentdir + '/config/iptc-pmd-techguide.yml'
with open(IPMDTECHGUIDEFP) as yaml_file1:
pmdguide = yaml.safe_load(yaml_file1)
FILESDIR = currentdir + '/' + scriptconfigs['general']['filespathrel']
REFDIR = FILESDIR + 'reference/'
IPTCPMDREFFP = REFDIR + 'IPTC-PhotometadataRef-Std2019.1.json'
TESTRESULTSDIR = FILESDIR + 'testresults/'
LOGFP = TESTRESULTSDIR + 'testresults_all.txt'
def append_line2file(line: str, filepath: str) -> None:
"""Appends a line to a text file
:param line: line of text
:param filepath: path of the text file
:return: nothing
"""
with open(filepath, 'a') as textfile:
textfile.write(line + '\n')
def readpmd_exiftool(imgfp: str, imgpmd_jsonfp: str) -> None:
"""ExifTool reads photo metadata out of an image file and writes it to a JSON file
:param imgfp: path of the image file
:param imgpmd_jsonfp: path of the JSON file
:return: nothing
"""
et = Exiftool('')
et.currentdir = currentdir
et.retrievedata(imgfp)
et.export_as_jsonfile(imgpmd_jsonfp)
def find_testfiles(testdirpath: str) -> list:
""" Collects a list of file names in a folder
:param testdirpath: path of the to-be-investigated directory
:return: list of file names
"""
foundfn = []
for fn in os.listdir(testdirpath):
if fn.endswith('.jpg'):
foundfn.append(fn)
if fn.endswith('.png'):
foundfn.append(fn)
return foundfn
def get_iptcpropname(etpropname: str, instructure: bool = False) -> str:
"""Gets the IPTC Photo Metadata property name corresponding to
the name of an ExifTool tag
:param etpropname: ExifTool tag
:param instructure: True if the property is one of an IPTC PMD structure
:return: IPTC property name
"""
testetpropname = etpropname.replace(":", "_")
iptcpropname = etpropname
groupname = 'et_topwithprefix'
if instructure:
groupname = 'et_instructure'
if testetpropname in pmdguide[groupname]:
iptcnameraw = pmdguide[groupname][testetpropname]['label']
iptcpropname = iptcnameraw.split('|')[0]
return iptcpropname
def is_iptcpmdpropname(etpropname: str, instructure: bool = False) -> bool:
"""Checks if the ExifTool tag name corresponds to a specified
IPTC Photo Metadata property
:param etpropname:
:param instructure:
:return:
"""
testetpropname = etpropname.replace(":", "_")
groupname = 'et_topwithprefix'
if instructure:
groupname = 'et_instructure'
isspecified: bool = False
if testetpropname in pmdguide[groupname]:
isspecified = True
return isspecified
def check_pmdstructure(parent_propnames: str, refstru: dict, teststru: dict,
testresultsfp: str, comparevalues: bool = False) -> None:
"""Checks an IPTC Photo Metadata structure at any level below the top level.
:param parent_propnames: a sequence of names of parent properties
:param refstru: reference structure of the IPTC Photo Metadata
:param teststru: test structure of the test image
:param testresultsfp: path of the file for logging test results
:param comparevalues: False: only missing properties are reported, True: changed property values too
:return: nothing
"""
for refpropname in refstru:
if not is_iptcpmdpropname(refpropname, True):
continue
iptcname = get_iptcpropname(refpropname, True)
iptcnames = parent_propnames + '->' + iptcname
if refpropname in teststru:
refpropval = refstru[refpropname]
# do the checking differently for dict, list and plain value types
if isinstance(refpropval, dict):
check_pmdstructure(iptcnames, refstru[refpropname], teststru[refpropname], testresultsfp, comparevalues)
if isinstance(refpropval, list):
if comparevalues:
idx = 0
while idx < len(refpropval): # iterate across the items of the list
refpropval2 = refstru[refpropname][idx]
testpropval2 = teststru[refpropname][idx]
idx += 1
if isinstance(refpropval2, str) or isinstance(refpropval2, int) or \
isinstance(refpropval2, float):
# compare only plain values, not a list or dict
if testpropval2 != refpropval2:
msg = f'CHANGED value of property <{iptcname}> is: {testpropval2}'
print(msg)
append_line2file(msg, LOGFP)
append_line2file(msg, testresultsfp)
idx = 0
while idx < len(refpropval):
refobj = refpropval[idx]
if isinstance(refobj, dict): # check only if a dict, all other types are not relevant
testobj = teststru[refpropname][idx]
check_pmdstructure(iptcnames + '[' + str(idx) + ']', refobj, testobj,
testresultsfp, comparevalues)
idx += 1
if comparevalues:
if isinstance(refpropval, str) or isinstance(refpropval, int) or isinstance(refpropval, float):
# the value is a plain one = compare the values
testpropval = teststru[refpropname]
if testpropval != refpropval:
msg = f'CHANGED value of property <{iptcnames}> is: {testpropval}'
print(msg)
append_line2file(msg, LOGFP)
append_line2file(msg, testresultsfp)
else:
msg = f'MISSING property: {iptcnames}'
print(msg)
append_line2file(msg, LOGFP)
append_line2file(msg, testresultsfp)
def check_mainpmd(test_json_fp: str, testresultsfp: str, comparevalues: bool = False) -> None:
"""Checks IPTC Photo Metadata at the top level (=properties not inside a structure)
:param test_json_fp: path of the JSON file with metadata retrieved from the image file by ExifTool
:param testresultsfp: path of the file for logging test results
:param comparevalues: False: only missing properties are reported, True: changed property values too
:return: nothing
"""
with open(IPTCPMDREFFP, encoding='utf-8') as refjson_file:
ipmdref = json.load(refjson_file)[0]
with open(test_json_fp, encoding='utf-8') as testjson_file:
ipmdtest = json.load(testjson_file)[0]
if 'File:Comment' in ipmdtest:
msg = f"COMMENT in the file: {ipmdtest['File:Comment']}"
print(msg)
append_line2file(msg, LOGFP)
append_line2file(msg, testresultsfp)
for refpropname in ipmdref:
if not is_iptcpmdpropname(refpropname):
continue
iptcname = get_iptcpropname(refpropname)
if refpropname in ipmdtest:
refpropval = ipmdref[refpropname]
# do the checking differently for dict, list and plain value types
if isinstance(refpropval, dict):
check_pmdstructure(iptcname, ipmdref[refpropname], ipmdtest[refpropname], testresultsfp, comparevalues)
if isinstance(refpropval, list):
if comparevalues:
idx = 0
while idx < len(refpropval): # iterate across the items of the list
refpropval2 = ipmdref[refpropname][idx]
testpropval2 = ipmdtest[refpropname][idx]
idx += 1
if isinstance(refpropval2, str) or isinstance(refpropval2, int) or \
isinstance(refpropval2, float):
# compare only plain values, not a list or dict
if testpropval2 != refpropval2:
msg = f'CHANGED value of property <{iptcname}> is: {testpropval2}'
print(msg)
append_line2file(msg, LOGFP)
append_line2file(msg, testresultsfp)
idx = 0
while idx < len(refpropval):
refobj = refpropval[idx]
if isinstance(refobj, dict): # check only if a dict, all other types are not relevant
testobj = ipmdtest[refpropname][idx]
check_pmdstructure(iptcname + '[' + str(idx) + ']', refobj, testobj,
testresultsfp, comparevalues)
idx += 1
if comparevalues:
if isinstance(refpropval, str) or isinstance(refpropval, int) or isinstance(refpropval, float):
# the value is a plain one = compare the values
testpropval = ipmdtest[refpropname]
if testpropval != refpropval:
msg = f'CHANGED value of property <{iptcname}> is: {testpropval}'
print(msg)
append_line2file(msg, LOGFP)
append_line2file(msg, testresultsfp)
else:
msg = f'MISSING property: {iptcname}'
print(msg)
append_line2file(msg, LOGFP)
append_line2file(msg, testresultsfp)
def investigate_ipmdstructure(parent_propnames: str, ugtopic: str, parent_so: str, level: int,
structid: str, teststruct: dict,
testresults_text_fp: str, testresults_csv_fp: str,
csvsep: str = ',') -> None:
"""Investigates which IPTC Photo Metadata properties exist inside a structure.
This function may be called recursively. Only investigations at level 2 and 3 are supported (currently).
:param parent_propnames: name(s)/label(s) of parent property(ies), csvsep-separated
:param ugtopic: IPTC User Guide topic of the top level property
:param parent_so: sort order of the parent property
:param level: level of the investigation. Top level = level 1
:param structid: IPTC PMD identifier of the investigated structure
:param teststruct: structure (dict) from the tested image file to be investigated
:param testresults_text_fp: path of the file for logging test results
:param testresults_csv_fp: path of the CSV file for logging test results
:return: nothing
"""
if level < 2 or level > 3: # range of the supported level: 2 to 3
return
refgroupname: str = 'ipmd_struct' # a structure is investigated, reference data only in this group
if structid in pmdguide[refgroupname]:
refstru: dict = pmdguide[refgroupname][structid]
else:
return
for ipmdpropid in refstru:
if ipmdpropid == '$anypmdproperty': # that's a placeholder for "any other pmd property", skip it
continue
refipmdprop: dict = refstru[ipmdpropid]
if 'label' in refipmdprop:
label: str = refipmdprop['label']
else:
label: str = 'UNKNOWN-ERROR' # a property without a label should not be in the reference data
msg: str = f'*** Investigating IPTC PMD structure <{label} used by {parent_propnames}>'
print(msg)
csvrow: str = ugtopic + csvsep
if 'sortorder' in refipmdprop:
sortorder: str = refipmdprop['sortorder']
else:
sortorder: str = 'xxx'
csvrow += parent_so + '-' + sortorder + csvsep
datatype: str = 'xxx'
if 'datatype' in refipmdprop:
datatype: str = refipmdprop['datatype']
dataformat: str = ''
if 'dataformat' in refipmdprop:
dataformat: str = refipmdprop['dataformat']
invstructid: str = '' # id/name of a structure to be investigated
if datatype == 'struct':
if dataformat == 'AltLang': # workaround to cover what ExifTool returns for AltLang values: a string
datatype = 'string'
else:
invstructid = dataformat
# create the appropriate sequence of PMD property names for a specific metadata level
if level == 2:
csvrow += parent_propnames + csvsep + label + csvsep + 'x' + csvsep # NameL1 inherited, L2 applied, L3 x-ed
if level == 3:
csvrow += parent_propnames + csvsep + label + csvsep # NameL1 and L2 inherited, L3 applied
csvrow += 'not spec' + csvsep # = the IIM column, no IIM spec exists for a property in a structure
xmpvalue = ''
if 'etTag' in refipmdprop:
ettag = refipmdprop['etTag']
if isinstance(teststruct, list):
propfound: bool = False
if len(teststruct) > 0:
for singleteststru in teststruct:
if ettag in singleteststru:
propfound = True
xmpvalue = teststruct[0][ettag]
if propfound:
keymsg = 'found'
else:
keymsg = 'MISSING'
else:
if ettag in teststruct:
keymsg = 'found'
xmpvalue = teststruct[ettag]
else:
keymsg = 'MISSING'
msg = f'{keymsg} its corresponding XMP property'
print(msg)
append_line2file(msg, testresults_text_fp)
csvrow += keymsg + csvsep # = the XMP column
else:
csvrow += 'not spec' + csvsep
csvrow += '---' + csvsep
append_line2file(csvrow, testresults_csv_fp)
if invstructid != '': # if the id of a to-be-investigated structure is set: investigate at the next level
investigate_ipmdstructure(parent_propnames + csvsep + label, ugtopic, parent_so + '-' + sortorder,
level + 1, invstructid, xmpvalue,
testresults_text_fp, testresults_csv_fp, csvsep)
def investigate_mainpmd(test_json_fp: str, testresults_text_fp: str, testresults_csv_fp: str,
csvsep: str = ',') -> None:
"""Investigates which IPTC Photo Metadata top level (=properties not inside a structure) properties exist
:param test_json_fp: path of the JSON file with metadata retrieved from the image file by ExifTool
:param testresults_text_fp: path of the file for logging test results
:param testresults_csv_fp: path of the CSV file for logging test results
:return: nothing
"""
with open(test_json_fp, encoding='utf-8') as testjson_file:
ipmdtest = json.load(testjson_file)[0]
# write the header of the CSV-output file
csvheader: str = f'topic{csvsep}sortorder{csvsep}IPMD Name L1{csvsep}IPMD Name L2{csvsep}IPMD Name L3'
csvheader += f'{csvsep}IIM prop{csvsep}XMP prop{csvsep}Sync Values{csvsep}Comments'
append_line2file(csvheader, testresults_csv_fp)
refgroupname: str = 'ipmd_top' # the top level of IPTC PMD is investigated, this is the corresponding group
for ipmdpropid in pmdguide[refgroupname]:
refipmdprop: dict = pmdguide[refgroupname][ipmdpropid]
if 'label' in refipmdprop:
label: str = refipmdprop['label']
else:
label: str = 'UNKNOWN-ERROR' # a property without a label should not be in the reference data
msg: str = f'*** Investigating IPTC PMD top level property <{label}>'
print(msg)
append_line2file(msg, testresults_text_fp)
if 'ugtopic' in refipmdprop:
ugtopic: str = refipmdprop['ugtopic']
else:
ugtopic: str = 'xxx'
csvrow: str = ugtopic + csvsep
if 'sortorder' in refipmdprop:
sortorder: str = refipmdprop['sortorder']
else:
sortorder: str = 'xxx'
csvrow += sortorder + csvsep
csvrow += label + csvsep + 'x' + csvsep + 'x' + csvsep # Name L1 is set, L2 and L3 x-ed out
datatype: str = 'xxx'
if 'datatype' in refipmdprop:
datatype: str = refipmdprop['datatype']
dataformat: str = ''
if 'dataformat' in refipmdprop:
dataformat = refipmdprop['dataformat']
invstructid: str = '' # id/name of a structure to be investigated
if datatype == 'struct':
if dataformat == 'AltLang': # workaround to cover what ExifTool returns for AltLang values: a string
datatype = 'string'
else:
invstructid = dataformat
plainvalue: bool = False
if datatype == 'string' or datatype == 'number':
plainvalue = True
iimfound: bool = False
iimvalue: str = ''
xmpvalue = ''
special_comparing: str = '' # indicates special procedure(s) for comparing values, pipe separated
if ipmdpropid == 'creatorNames':
special_comparing += 'iim1xmplist|'
if ipmdpropid == 'dateCreated':
special_comparing += 'iimdatetime|'
if 'etIIM' in refipmdprop:
ettag = refipmdprop['etIIM']
if ettag in ipmdtest:
keymsg = 'found'
iimfound = True
if plainvalue:
iimvalue = ipmdtest[ettag]
else:
keymsg = 'MISSING'
if ettag == 'IPTC:DateCreated+IPTC:TimeCreated':
keymsg = 'MISSING'
if 'IPTC:DateCreated' in ipmdtest and 'IPTC:TimeCreated' in ipmdtest:
keymsg = 'found'
msg = f'{keymsg} its corresponding IIM property'
print(msg)
append_line2file(msg, testresults_text_fp)
csvrow += keymsg + csvsep
else:
csvrow += 'not spec' + csvsep
if 'etXMP' in refipmdprop:
ettag = refipmdprop['etXMP']
if ettag in ipmdtest:
keymsg = 'found'
xmpvalue = ipmdtest[ettag]
else:
keymsg = 'MISSING'
msg = f'{keymsg} its corresponding XMP property'
print(msg)
append_line2file(msg, testresults_text_fp)
csvrow += keymsg + csvsep
else:
csvrow += 'not spec' + csvsep
keymsg = '---'
# compare only plain values
if plainvalue:
if iimfound:
if iimvalue == xmpvalue:
keymsg = 'in sync'
else:
keymsg = 'NOT SYNC'
if 'iim1xmplist' in special_comparing: # it may override the keymsg above!
if isinstance(xmpvalue, list):
if len(xmpvalue) > 0:
if iimvalue == xmpvalue[0]:
keymsg = 'in sync'
else:
keymsg = 'NOT SYNC'
if 'iimdatetime' in special_comparing: # it may override the keymsg above!
iimdatevalue: str = ''
dateettag: str = 'IPTC:DateCreated'
if dateettag in ipmdtest:
iimdatevalue = ipmdtest[dateettag]
iimtimevalue: str = ''
timeettag: str = 'IPTC:TimeCreated'
if timeettag in ipmdtest:
iimtimevalue = ipmdtest[timeettag]
iimdatetimevalue = iimdatevalue + ' ' + iimtimevalue
if iimdatetimevalue == xmpvalue:
keymsg = 'in sync'
else:
keymsg = 'NOT SYNC'
csvrow += keymsg + csvsep
append_line2file(csvrow, testresults_csv_fp)
if invstructid != '': # if the id of a to-be-investigated structure is set: investigate at the next level
investigate_ipmdstructure(label, ugtopic, sortorder, 2, invstructid, xmpvalue,
testresults_text_fp, testresults_csv_fp, csvsep)
|
# Copyright 2020, Salesforce.com, Inc.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
import argparse
from tqdm import tqdm
import random
import os
import json
from collections import defaultdict
from models.dnnc import DNNC
from models.dnnc import ENTAILMENT, NON_ENTAILMENT
from models.utils import InputExample
from models.utils import load_intent_datasets, load_intent_examples, sample, print_results
from models.utils import calc_oos_precision, calc_in_acc, calc_oos_recall, calc_oos_f1
from models.utils import THRESHOLDS
from intent_predictor import DnncIntentPredictor
def main():
parser = argparse.ArgumentParser()
parser.add_argument("--seed",
default=42,
type=int,
help="Random seed")
parser.add_argument("--bert_model",
default='roberta-base',
type=str,
help="BERT model")
parser.add_argument("--train_batch_size",
default=370,
type=int,
help="Total batch size for training.")
parser.add_argument("--eval_batch_size",
default=8,
type=int,
help="Total batch size for eval.")
parser.add_argument("--learning_rate",
default=1e-5,
type=float,
help="The initial learning rate for Adam.")
parser.add_argument("--num_train_epochs",
default=7,
type=float,
help="Total number of training epochs to perform.")
parser.add_argument("--warmup_proportion",
default=0.1,
type=float,
help="Proportion of training to perform linear learning rate warmup for. "
"E.g., 0.1 = 10%% of training.")
parser.add_argument("--adam_epsilon", default=1e-8, type=float, help="Epsilon for Adam optimizer.")
parser.add_argument("--no_cuda",
action='store_true',
help="Whether not to use CUDA when available")
parser.add_argument('--gradient_accumulation_steps',
type=int,
default=2,
help="Number of updates steps to accumulate before performing a backward/update pass.")
parser.add_argument('--max_grad_norm', help='gradient clipping for Max gradient norm.', required=False, default=1.0,
type=float)
parser.add_argument('--label_smoothing',
type = float,
default = 0.1,
help = 'Coefficient for label smoothing (default: 0.1, if 0.0, no label smoothing)')
parser.add_argument('--max_seq_length',
type = int,
default = 128,
help = 'Maximum number of paraphrases for each sentence')
parser.add_argument("--do_lower_case",
action='store_true',
help="Whether to lowercase input string")
# Special params
parser.add_argument('--train_file_path',
type = str,
default = None,
help = 'Training data path')
parser.add_argument('--dev_file_path',
type = str,
default = None,
help = 'Validation data path')
parser.add_argument('--oos_dev_file_path',
type = str,
default = None,
help = 'Out-of-Scope validation data path')
parser.add_argument('--output_dir',
type = str,
default = None,
help = 'Output file path')
parser.add_argument('--save_model_path',
type = str,
default = '',
help = 'path to save the model checkpoints')
parser.add_argument('--bert_nli_path',
type = str,
default = '',
help = 'The bert checkpoints which are fine-tuned with NLI datasets')
parser.add_argument("--scratch",
action='store_true',
help="Whether to start from the original BERT")
parser.add_argument('--over_sampling',
type = int,
default = 0,
help = 'Over-sampling positive examples as there are more negative examples')
parser.add_argument('--few_shot_num',
type = int,
default = 5,
help = 'Number of training examples for each class')
parser.add_argument('--num_trials',
type = int,
default = 10,
help = 'Number of trials to see robustness')
parser.add_argument("--do_predict",
action='store_true',
help="do_predict the model")
parser.add_argument("--do_final_test",
action='store_true',
help="do_predict the model")
args = parser.parse_args()
random.seed(args.seed)
N = args.few_shot_num
T = args.num_trials
train_file_path = args.train_file_path
dev_file_path = args.dev_file_path
train_examples, dev_examples = load_intent_datasets(train_file_path, dev_file_path, args.do_lower_case)
sampled_tasks = [sample(N, train_examples) for i in range(T)]
if args.oos_dev_file_path is not None:
oos_dev_examples = load_intent_examples(args.oos_dev_file_path, args.do_lower_case)
else:
oos_dev_examples = []
nli_train_examples = []
nli_dev_examples = []
for i in range(T):
if args.do_predict:
nli_train_examples.append([])
nli_dev_examples.append([])
continue
tasks = sampled_tasks[i]
all_entailment_examples = []
all_non_entailment_examples = []
# entailement
for task in tasks:
examples = task['examples']
for j in range(len(examples)):
for k in range(len(examples)):
if k <= j:
continue
all_entailment_examples.append(InputExample(examples[j], examples[k], ENTAILMENT))
all_entailment_examples.append(InputExample(examples[k], examples[j], ENTAILMENT))
# non entailment
for task_1 in range(len(tasks)):
for task_2 in range(len(tasks)):
if task_2 <= task_1:
continue
examples_1 = tasks[task_1]['examples']
examples_2 = tasks[task_2]['examples']
for j in range(len(examples_1)):
for k in range(len(examples_2)):
all_non_entailment_examples.append(InputExample(examples_1[j], examples_2[k], NON_ENTAILMENT))
all_non_entailment_examples.append(InputExample(examples_2[k], examples_1[j], NON_ENTAILMENT))
nli_train_examples.append(all_entailment_examples + all_non_entailment_examples)
nli_dev_examples.append(all_entailment_examples[:100] + all_non_entailment_examples[:100]) # sanity check for over-fitting
for j in range(args.over_sampling):
nli_train_examples[-1] += all_entailment_examples
if args.output_dir is not None:
if args.scratch:
folder_name = '{}/{}-shot-{}_nli__Scratch/'.format(args.output_dir, N, args.bert_model)
else:
folder_name = '{}/{}-shot-{}_nli__Based_on_nli_fine_tuned_model/'.format(args.output_dir, N, args.bert_model)
if not os.path.exists(folder_name):
os.makedirs(folder_name)
file_name = 'batch_{}---epoch_{}---lr_{}---trials_{}'.format(args.train_batch_size,
args.num_train_epochs,
args.learning_rate, args.num_trials)
file_name = '{}__oos-threshold'.format(file_name)
if args.scratch:
file_name = '{}__scratch'.format(file_name)
else:
file_name = '{}__based_on_nli_fine_tuned_model'.format(file_name)
if args.over_sampling:
file_name = file_name + '--over_sampling'
if args.do_final_test:
file_name = file_name + '_TEST.txt'
else:
file_name = file_name + '.txt'
f = open(folder_name+file_name, 'w')
else:
f = None
if args.scratch:
BERT_NLI_PATH = None
else:
BERT_NLI_PATH = args.bert_nli_path
assert os.path.exists(BERT_NLI_PATH)
if args.save_model_path and args.do_predict:
stats_lists_preds = defaultdict(list)
for j in range(T):
save_model_path = '{}_{}'.format(folder_name+args.save_model_path, j+1)
if os.path.exists(save_model_path):
assert args.do_predict
else:
assert not args.do_predict
if args.save_model_path and os.path.exists(save_model_path):
if args.do_predict:
trial_stats_preds = defaultdict(list)
model = DNNC(path = save_model_path,
args = args)
else:
model = DNNC(path = BERT_NLI_PATH,
args = args)
model.train(nli_train_examples[j], nli_dev_examples[j])
if args.save_model_path:
if not os.path.exists(save_model_path):
os.mkdir(save_model_path)
model.save(save_model_path)
intent_predictor = DnncIntentPredictor(model, sampled_tasks[j])
in_domain_preds = []
oos_preds = []
for e in tqdm(dev_examples, desc = 'Intent examples'):
pred, conf, matched_example = intent_predictor.predict_intent(e.text)
in_domain_preds.append((conf, pred))
if args.save_model_path and args.do_predict:
if not trial_stats_preds[e.label]:
trial_stats_preds[e.label] = []
single_pred = {}
single_pred['gold_example'] = e.text
single_pred['match_example'] = matched_example
single_pred['gold_label'] = e.label
single_pred['pred_label'] = pred
single_pred['conf'] = conf
trial_stats_preds[e.label].append(single_pred)
for e in tqdm(oos_dev_examples, desc = 'OOS examples'):
pred, conf, matched_example = intent_predictor.predict_intent(e.text)
oos_preds.append((conf, pred))
if args.save_model_path and args.do_predict:
if not trial_stats_preds[e.label]:
trial_stats_preds[e.label] = []
single_pred = {}
single_pred['gold_example'] = e.text
single_pred['match_example'] = matched_example
single_pred['gold_label'] = e.label
single_pred['pred_label'] = pred
single_pred['conf'] = conf
trial_stats_preds[e.label].append(single_pred)
if args.save_model_path and args.do_predict:
stats_lists_preds[j] = trial_stats_preds
in_acc = calc_in_acc(dev_examples, in_domain_preds, THRESHOLDS)
oos_recall = calc_oos_recall(oos_preds, THRESHOLDS)
oos_prec = calc_oos_precision(in_domain_preds, oos_preds, THRESHOLDS)
oos_f1 = calc_oos_f1(oos_recall, oos_prec)
print_results(THRESHOLDS, in_acc, oos_recall, oos_prec, oos_f1)
if f is not None:
for i in range(len(in_acc)):
f.write('{},{},{},{} '.format(in_acc[i], oos_recall[i], oos_prec[i], oos_f1[i]))
f.write('\n')
if f is not None:
f.close()
if args.save_model_path and args.do_predict:
if args.do_final_test:
save_file = folder_name + "dev_examples_predictions_TEST.json"
else:
save_file = folder_name+"dev_examples_predictions.json"
with open(save_file, "w") as outfile:
json.dump(stats_lists_preds, outfile, indent=4)
if __name__ == '__main__':
main()
|
import sys
sys.dont_write_bytecode = True
def least_recently_used(page_table):
'''
This file includes implementation of the least_recently_used page replacement algorithm
Solves CS149 Homework#4
@author Tyler Jones
'''
#make a dict with keys as page name and values as page.last_accessed
last_accessed_dict = dict() #Used to store all the pages last_accessed values
for key in page_table.memory:
last_accessed_dict[page_table.memory[key].name] = page_table.memory[key].last_accessed
#find the minimum last_accessed value which is least recently used.
# pick least recently used in memory to evict
eviction_page_name = min(last_accessed_dict, key=last_accessed_dict.get)
#Reset the page's frequency count to 0 because it got evicted
page_table.memory[eviction_page_name].frequency = 0
# Get the actual evicted page
eviction_page = page_table.memory[eviction_page_name]
# Add page to disk
page_table.disk[eviction_page_name] = eviction_page
# Delete page from memory
del page_table.memory[eviction_page_name]
return eviction_page
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
import logging
from logging.handlers import RotatingFileHandler
from flask import Flask
# ------------------------------------------------------------------------------
# SETUP GENERAL APPLICATION
# ------------------------------------------------------------------------------
__version__ = '1.0'
app = Flask('ITIL-ServiceCatalog')
app.config.from_object('config')
app.debug = True
# ------------------------------------------------------------------------------
# SETUP JWT AUTHENTICATION
# ------------------------------------------------------------------------------
# Import all courseware controller files
from courseware.controllers import *
|
"""
Hi, This is your friend . Here i am going to make a classic game of Hangman.
Hangman --> How it works
It's still a work in progress
"""
import time
def split(word):
return [char for char in word]
def ask():
word = "test"
word_chars = split(word)
word_chars = ['t','e','s']
guess = []
turns = 7
guesses = []
print("Do you want to continue(y/n):", end="")
ans = input()
if ans.lower() == "y" or ans.lower() == "yes":
print("- represents letter in word")
for char in word:
print("-", end="")
while turns > 0:
if guesses != word_chars:
if turns == len(word)+3:
chars = input("\nEnter a char of word:")
guess.append(chars)
guesses.append(chars)
else:
chars = input("\nEnter a char of word:")
guess.append(chars)
guesses.append(chars)
if len(chars) == 1:
for char in word:
if char in guess:
print(char, end="")
else:
print("-", end="")
turns -= 1
else:
print("You need to enter a single character")
else:
print("\nYou Won!!!!")
print("Congratulations")
turns = 0
if turns == 0 and guesses != word_chars:
print("\nYou Lose!!")
print("Restart to play")
elif ans.lower() == 'n' or ans.lower() == 'no':
print("Exiting Hangman", end="")
for i in range(0,3):
time.sleep(.2)
print(".", end="")
else:
print("Enter a valid character")
def game():
ask()
game()
|
from machinable import Component
class FlattenedNotationComponent(Component):
pass
|
# Generated by Django 2.0.2 on 2018-06-06 16:04
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('classes', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='Course',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('year', models.CharField(max_length=15)),
('semester', models.CharField(max_length=15)),
('subjectCode', models.CharField(max_length=15)),
('subjectName', models.CharField(blank=True, max_length=50, null=True)),
('grade', models.CharField(blank=True, max_length=50, null=True)),
('eisu', models.CharField(blank=True, max_length=50, null=True)),
('score', models.CharField(blank=True, max_length=50, null=True)),
('professor', models.CharField(blank=True, max_length=50, null=True)),
('remarks', models.CharField(blank=True, max_length=50, null=True)),
('time', models.CharField(blank=True, max_length=50, null=True)),
('lectureRoom', models.CharField(blank=True, max_length=100, null=True)),
('huksu_code', models.CharField(blank=True, max_length=50, null=True)),
('course_time', models.CharField(blank=True, max_length=15, null=True)),
('design_score', models.CharField(blank=True, default='0', max_length=15, null=True)),
('desription', models.CharField(blank=True, max_length=1000, null=True)),
('Knowledge_application', models.CharField(blank=True, max_length=5, null=True)),
('verification_ability', models.CharField(blank=True, max_length=5, null=True)),
('problem_solving', models.CharField(blank=True, max_length=5, null=True)),
('tool_utilization', models.CharField(blank=True, max_length=5, null=True)),
('design_ability', models.CharField(blank=True, max_length=5, null=True)),
('teamwork_skill', models.CharField(blank=True, max_length=5, null=True)),
('communication', models.CharField(blank=True, max_length=5, null=True)),
('understanding_of_influence', models.CharField(blank=True, max_length=5, null=True)),
('responsibility', models.CharField(blank=True, max_length=5, null=True)),
('self_led', models.CharField(blank=True, max_length=5, null=True)),
('Lecture_type', models.CharField(blank=True, max_length=100, null=True)),
('teaching_method', models.CharField(blank=True, max_length=100, null=True)),
('educational_equipment', models.CharField(blank=True, max_length=100, null=True)),
('Assignment', models.CharField(blank=True, max_length=500, null=True)),
('Midterm_exam', models.CharField(blank=True, max_length=10, null=True)),
('final_exam', models.CharField(blank=True, max_length=10, null=True)),
('attendance', models.CharField(blank=True, max_length=10, null=True)),
('assignments_and_others', models.CharField(blank=True, max_length=10, null=True)),
('grading_division', models.CharField(blank=True, max_length=10, null=True)),
('title', models.CharField(blank=True, max_length=200, null=True)),
('author', models.CharField(blank=True, max_length=300, null=True)),
('publisher', models.CharField(blank=True, max_length=100, null=True)),
('year_of_publication', models.CharField(blank=True, max_length=20, null=True)),
],
),
]
|
#!/usr/bin/env python
import sys
import rospy
import copy
import moveit_commander
import moveit_msgs.msg
import geometry_msgs.msg
from math import pi
from std_msgs.msg import String
from moveit_commander.conversions import pose_to_list
#initialise moveit commander and rospy nodes
moveit_commander.roscpp_initialize(sys.argv)
rospy.init_node('move_group_python_interface_tutorial',anonymous=True)
# instantiate Robotcommander object. This object is the outer inerface to the robot
robot = moveit_commander.RobotCommander()
# instantiate planningsceneinterfacae object. This object is an interface to the world surrounding the robot
scene=moveit_commander.PlanningSceneInterface()
#instantiate MoveGroupCommander for one of the planning groups of robot
group_name = "manipulator" #arm
group = moveit_commander.MoveGroupCommander(group_name)
#create a display trajectory publisher which is used later to publish trajectories for rvis to visulalize
display_trajectory_publisher=rospy.Publisher('/move_group/display_planned_path', moveit_msgs.msg.DisplayTrajectory, queue_size=20)
# # We can get the name of the reference frame for this robot:
# planning_frame = group.get_planning_frame()
# print "============ Reference frame: %s" % planning_frame
# # We can also print the name of the end-effector link for this group:
# eef_link = group.get_end_effector_link()
# print "============ End effector: %s" % eef_link
# # We can get a list of all the groups in the robot:
# group_names = robot.get_group_names()
# print "============ Robot Groups:", robot.get_group_names()
# # Sometimes for debugging it is useful to print the entire state of the
# # robot:
# print "============ Printing robot state"
print(robot.get_current_state())
# print ""
# move robot to non singular position by adjusting joint vlaues in group
joint_goal = group.get_current_joint_values()
print(joint_goal)
joint_goal[0] = 0
joint_goal[1] = 0
joint_goal[2] = -1
joint_goal[3] = 1
# #use go command to execute the joint angles
group.go(joint_goal,wait=True)
# # call stop to avoid residual movements
group.stop()
import numpy as np
jac = group.get_jacobian_matrix(joint_goal)
value = np.linalg.det(jac)
print(value)
|
# coding: utf-8
import unittest
import tempfile
from pynames.utils import is_file
try:
from django.core.files import File
from django.core.files.base import ContentFile
from django.core.files.uploadedfile import UploadedFile
except:
UploadedFile = None
File = None
ContentFile = None
class TestName(unittest.TestCase):
def test_is_file(self):
some_file = tempfile.NamedTemporaryFile()
self.assertTrue(is_file(some_file))
some_file.close()
def test_is_file_on_django_files(self):
if File and UploadedFile and ContentFile:
self.assertTrue(is_file(UploadedFile('mock')))
self.assertTrue(is_file(File('mock')))
self.assertTrue(is_file(ContentFile('mock')))
|
from posixpath import join
from sys import path
import cv2
from skimage.io.collection import ImageCollection
import imlib as im
import numpy as np
import pylib as py
import tensorflow as tf
import tf2lib as tl
import glob
import data
import module
#import sys
#sys.path.append('..')
import resize_images_pascalvoc
from resize_images_pascalvoc.resize_main import resize_label
# ==============================================================================
# = param =
# ==============================================================================
py.arg('--experiment_dir')
py.arg('--batch_size', type=int, default=32)
test_args = py.args()
args = py.args_from_yaml(py.join(test_args.experiment_dir, 'settings.yml'))
args.__dict__.update(test_args.__dict__)
# ==============================================================================
# = inference =
# ==============================================================================
# data
A_img_paths_test = (py.glob(py.join(args.datasets_dir, args.dataset, 'inference'), '*.jpg')+
py.glob(py.join(args.datasets_dir, args.dataset, 'inference'), '*.png')+
py.glob(py.join(args.datasets_dir, args.dataset, 'inference'), '*.PNG')+
py.glob(py.join(args.datasets_dir, args.dataset, 'inference'), '*.JPG'))
#
A_dataset_test = data.make_dataset(A_img_paths_test, args.batch_size, args.load_size, args.crop_size,
training=False, drop_remainder=False, shuffle=False, repeat=1)
# model
G_A2B = module.ResnetGenerator(input_shape=(args.crop_size, args.crop_size, 3))
# restore
tl.Checkpoint(dict(G_A2B=G_A2B), py.join(args.experiment_dir, 'checkpoints')).restore()
@tf.function
def sample_A2B(A):
A2B = G_A2B(A, training=False)
return A2B
# run
import imageio
import os
import shutil
save_inference = py.join(args.experiment_dir, 'Inference', str(args.adversarial_loss_mode)+"_Size"+str(args.crop_size)+"_Epo"+str(args.epochs))
py.mkdir(save_inference)
i = 0
dim = (1920, 1080)
# save Image as single image B for FID calculation
for A in A_dataset_test:
A2B = sample_A2B(A)
for A_i, A2B_i in zip(A, A2B):
imgB = A2B_i.numpy()
img_resized = cv2.resize(imgB, dim, interpolation = cv2.INTER_LANCZOS4)
imageio.imwrite(os.path.join(save_inference ,(py.name_ext(A_img_paths_test[i]))), img_resized)
i += 1
def resize_upsampling(old_folder, new_folder, size):
dim = (1920, 1080)
for image in os.listdir(old_folder):
img = cv2.imread(os.path.join(old_folder, image))
# INTER_CUBIC or INTER_LANCZOS4
img_resized = cv2.resize(img, dim, interpolation = cv2.INTER_LANCZOS4)
print('Shape: '+str(img.shape)+' is now resized to: '+str(img_resized.shape))
cv2.imwrite(os.path.join(new_folder , image),img_resized)
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
###################################################################
# Author: Mu yanru
# Date : 2019.2
# Email : muyanru345@163.com
###################################################################
import functools
import examples._mock_data as mock
from dayu_widgets3 import dayu_theme
from dayu_widgets3.divider import MDivider
from dayu_widgets3.field_mixin import MFieldMixin
from dayu_widgets3.item_model import MTableModel, MSortFilterModel
from dayu_widgets3.item_view import MTableView
from dayu_widgets3.line_edit import MLineEdit
from dayu_widgets3.loading import MLoadingWrapper
from dayu_widgets3.push_button import MPushButton
from dayu_widgets3.qt import *
def h(*args):
cls = args[0]
widget = cls()
for i in args:
if isinstance(i, dict):
for attr, value in i.get('props', {}).items():
widget.setProperty(attr, value)
for signal, slot in i.get('on', {}).items():
widget.connect(widget, SIGNAL(signal), slot)
elif isinstance(i, list):
lay = QHBoxLayout()
for j in i:
lay.addWidget(j)
widget.setLayout(lay)
return widget
class MFetchDataThread(QThread):
def __init__(self, parent=None):
super(MFetchDataThread, self).__init__(parent)
def run(self, *args, **kwargs):
import time
time.sleep(4)
class TableViewExample(QWidget, MFieldMixin):
def __init__(self, parent=None):
super(TableViewExample, self).__init__(parent)
self._init_ui()
def _init_ui(self):
model_1 = MTableModel()
model_1.set_header_list(mock.header_list)
model_sort = MSortFilterModel()
model_sort.setSourceModel(model_1)
table_small = MTableView(size=dayu_theme.small, show_row_count=True)
table_grid = MTableView(size=dayu_theme.small, show_row_count=True)
table_grid.setShowGrid(True)
table_default = MTableView(size=dayu_theme.medium, show_row_count=True)
thread = MFetchDataThread(self)
self.loading_wrapper = MLoadingWrapper(widget=table_default, loading=False)
thread.started.connect(functools.partial(self.loading_wrapper.set_dayu_loading, True))
thread.finished.connect(functools.partial(self.loading_wrapper.set_dayu_loading, False))
thread.finished.connect(functools.partial(table_default.setModel, model_sort))
button = MPushButton(text='Get Data: 4s')
button.clicked.connect(thread.start)
switch_lay = QHBoxLayout()
switch_lay.addWidget(button)
switch_lay.addStretch()
table_large = MTableView(size=dayu_theme.large, show_row_count=False)
table_small.setModel(model_sort)
table_grid.setModel(model_sort)
table_large.setModel(model_sort)
model_sort.set_header_list(mock.header_list)
table_small.set_header_list(mock.header_list)
table_grid.set_header_list(mock.header_list)
table_default.set_header_list(mock.header_list)
table_large.set_header_list(mock.header_list)
model_1.set_data_list(mock.data_list)
line_edit = MLineEdit().search().small()
line_edit.textChanged.connect(model_sort.set_search_pattern)
main_lay = QVBoxLayout()
main_lay.addWidget(line_edit)
main_lay.addWidget(MDivider('Small Size'))
main_lay.addWidget(table_small)
main_lay.addWidget(MDivider('Default Size'))
main_lay.addLayout(switch_lay)
main_lay.addWidget(self.loading_wrapper)
main_lay.addWidget(MDivider('Large Size (Hide Row Count)'))
main_lay.addWidget(table_large)
main_lay.addWidget(MDivider('With Grid'))
main_lay.addWidget(table_grid)
main_lay.addStretch()
self.setLayout(main_lay)
if __name__ == '__main__':
import sys
app = QApplication(sys.argv)
test = TableViewExample()
dayu_theme.apply(test)
test.show()
sys.exit(app.exec_())
|
from __future__ import absolute_import, print_function, division
import numpy as np
try:
import scipy.sparse
imported_scipy = True
except ImportError:
imported_scipy = False
import theano
from theano import gof
from six import string_types
def _is_sparse(x):
"""
Returns
-------
boolean
True iff x is a L{scipy.sparse.spmatrix} (and not a L{numpy.ndarray}).
"""
if not isinstance(x, (scipy.sparse.spmatrix, np.ndarray, tuple, list)):
raise NotImplementedError("this function should only be called on "
"sparse.scipy.sparse.spmatrix or "
"numpy.ndarray, not,", x)
return isinstance(x, scipy.sparse.spmatrix)
class SparseType(gof.Type):
"""
Fundamental way to create a sparse node.
Parameters
----------
dtype : numpy dtype string such as 'int64' or 'float64' (among others)
Type of numbers in the matrix.
format: str
The sparse storage strategy.
Returns
-------
An empty SparseVariable instance.
Notes
-----
As far as I can tell, L{scipy.sparse} objects must be matrices, i.e.
have dimension 2.
"""
if imported_scipy:
format_cls = {'csr': scipy.sparse.csr_matrix,
'csc': scipy.sparse.csc_matrix,
'bsr': scipy.sparse.bsr_matrix}
dtype_set = set(['int8', 'int16', 'int32', 'int64', 'float32',
'uint8', 'uint16', 'uint32', 'uint64',
'float64', 'complex64', 'complex128'])
ndim = 2
# Will be set to SparseVariable SparseConstant later.
Variable = None
Constant = None
def __init__(self, format, dtype):
if not imported_scipy:
raise Exception("You can't make SparseType object as SciPy"
" is not available.")
dtype = str(dtype)
if dtype in self.dtype_set:
self.dtype = dtype
else:
raise NotImplementedError('unsupported dtype "%s" not in list' %
dtype, list(self.dtype_set))
assert isinstance(format, string_types)
if format in self.format_cls:
self.format = format
else:
raise NotImplementedError('unsupported format "%s" not in list' %
format, list(self.format_cls.keys()))
def filter(self, value, strict=False, allow_downcast=None):
if isinstance(value, self.format_cls[self.format])\
and value.dtype == self.dtype:
return value
if strict:
raise TypeError("%s is not sparse, or not the right dtype (is %s, "
"expected %s)" % (value, value.dtype, self.dtype))
# The input format could be converted here
if allow_downcast:
sp = self.format_cls[self.format](value, dtype=self.dtype)
else:
sp = self.format_cls[self.format](value)
if str(sp.dtype) != self.dtype:
raise NotImplementedError("Expected %s dtype but got %s" %
(self.dtype, str(sp.dtype)))
if sp.format != self.format:
raise NotImplementedError()
return sp
@staticmethod
def may_share_memory(a, b):
# This is Fred suggestion for a quick and dirty way of checking
# aliasing .. this can potentially be further refined (ticket #374)
if _is_sparse(a) and _is_sparse(b):
return (SparseType.may_share_memory(a, b.data) or
SparseType.may_share_memory(a, b.indices) or
SparseType.may_share_memory(a, b.indptr))
if _is_sparse(b) and isinstance(a, np.ndarray):
a, b = b, a
if _is_sparse(a) and isinstance(b, np.ndarray):
if (np.may_share_memory(a.data, b) or
np.may_share_memory(a.indices, b) or
np.may_share_memory(a.indptr, b)):
# currently we can't share memory with a.shape as it is a tuple
return True
return False
def make_variable(self, name=None):
return self.Variable(self, name=name)
def __eq__(self, other):
return (type(self) == type(other) and other.dtype == self.dtype and
other.format == self.format)
def __hash__(self):
return hash(self.dtype) ^ hash(self.format)
def __str__(self):
return "Sparse[%s, %s]" % (str(self.dtype), str(self.format))
def __repr__(self):
return "Sparse[%s, %s]" % (str(self.dtype), str(self.format))
def values_eq_approx(self, a, b, eps=1e-6):
# WARNING: equality comparison of sparse matrices is not fast or easy
# we definitely do not want to be doing this un-necessarily during
# a FAST_RUN computation..
if not scipy.sparse.issparse(a) or not scipy.sparse.issparse(b):
return False
diff = abs(a - b)
if diff.nnz == 0:
return True
# Built-in max from python is not implemented for sparse matrix as a
# reduction. It returns a sparse matrix which cannot be compared to a
# scalar. When comparing sparse to scalar, no exceptions is raised and
# the returning value is not consistent. That is why it is apply to a
# numpy.ndarray.
return max(diff.data) < eps
def values_eq(self, a, b):
# WARNING: equality comparison of sparse matrices is not fast or easy
# we definitely do not want to be doing this un-necessarily during
# a FAST_RUN computation..
return scipy.sparse.issparse(a) \
and scipy.sparse.issparse(b) \
and abs(a - b).sum() == 0.0
def is_valid_value(self, a):
return scipy.sparse.issparse(a) and (a.format == self.format)
def get_shape_info(self, obj):
obj = self.filter(obj)
assert obj.indices.dtype == 'int32'
assert obj.indptr.dtype == 'int32'
return (obj.shape, obj.data.size,
obj.indices.size, obj.indptr.size, obj.nnz)
def get_size(self, shape_info):
return (shape_info[1] * np.dtype(self.dtype).itemsize +
(shape_info[2] + shape_info[3]) * np.dtype('int32').itemsize)
# Register SparseType's C code for ViewOp.
theano.compile.register_view_op_c_code(
SparseType,
"""
Py_XDECREF(%(oname)s);
%(oname)s = %(iname)s;
Py_XINCREF(%(oname)s);
""",
1)
|
"""Initial Migration
Revision ID: c31efd831ee7
Revises:
Create Date: 2021-09-15 19:37:00.295751
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = 'c31efd831ee7'
down_revision = None
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('address',
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('street', sa.String(), nullable=False),
sa.Column('city', sa.String(), nullable=False),
sa.Column('postcode', sa.String(), nullable=False),
sa.PrimaryKeyConstraint('id')
)
op.create_table('person',
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('address_id', sa.Integer(), nullable=True),
sa.Column('name', sa.String(), nullable=False),
sa.Column('age', sa.Float(), nullable=False),
sa.ForeignKeyConstraint(['address_id'], ['address.id'], ),
sa.PrimaryKeyConstraint('id')
)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_table('person')
op.drop_table('address')
# ### end Alembic commands ###
|
def sort_words(input):
inp = input.split()
words = [x.lower() for x in inp]
words.sort()
for i in words:
print(i)
sort_words('PINEAPPLE Grapes oraNge APPLE meloN')
|
#!/usr/bin/env python3
import os
import re
import sys
import glob
import time
import argparse
import datetime
import subprocess
import collections
ROOT = os.path.dirname(os.path.abspath(__file__))
S3TESTS_ROOT = os.path.join(ROOT, "s3-tests")
RAN_PATTERN = re.compile(r"^Ran (\d+) tests in [\d\.]+s")
FAILED_PATTERN = re.compile(r"^FAILED \((SKIP=(\d+))?(, )?(errors=(\d+))?(, )?(failures=(\d+))?\)")
ERROR_PATTERN = re.compile(r"^(FAIL|ERROR): (.+)")
TRACEBACK_PREFIXES = [
"Traceback (most recent call last):",
"----------------------------------------------------------------------",
" ",
]
# Ignore tests with these attributes. They're ignored because s2 itself
# doesn't support this functionality.
BLACKLISTED_ATTRIBUTES = [
"list-objects-v2",
"cors",
"lifecycle",
"encryption",
"bucket-policy",
"tagging",
"object-lock",
"appendobject",
]
def compute_stats(filename):
ran = 0
skipped = 0
errored = 0
failed = 0
with open(filename, "r") as f:
for line in f:
match = RAN_PATTERN.match(line)
if match:
ran = int(match.groups()[0])
continue
match = FAILED_PATTERN.match(line)
if match:
groups = match.groups()
skipped_str = groups[1]
errored_str = groups[4]
failed_str = groups[7]
skipped = int(skipped_str) if skipped_str is not None else 0
errored = int(errored_str) if errored_str is not None else 0
failed = int(failed_str) if failed_str is not None else 0
if ran != 0:
return (ran - skipped - errored - failed, ran - skipped)
else:
return (0, 0)
def run_nosetests(config, test=None, env=None, stderr=None):
config = os.path.abspath(config)
if not os.path.exists(config):
print("config file does not exist: {}".format(config), file=sys.stderr)
sys.exit(1)
all_env = dict(os.environ)
all_env["S3TEST_CONF"] = config
if env is not None:
all_env.update(env)
pwd = os.path.join(ROOT, "s3-tests")
args = [os.path.join("virtualenv", "bin", "nosetests"), "-a", ",".join("!{}".format(a) for a in BLACKLISTED_ATTRIBUTES)]
if test is not None:
args.append(test)
proc = subprocess.run(args, env=all_env, cwd=pwd, stderr=stderr)
print("Test run exited with {}".format(proc.returncode))
def print_failures(runs_dir):
log_files = sorted(glob.glob(os.path.join(runs_dir, "*.txt")))
if len(log_files) == 0:
print("No log files found", file=sys.stderr)
sys.exit(1)
old_stats = None
if len(log_files) > 1:
old_stats = compute_stats(log_files[-2])
filepath = log_files[-1]
stats = compute_stats(filepath)
if old_stats:
print("Overall results: {}/{} (vs last run: {}/{})".format(*stats, *old_stats))
else:
print("Overall results: {}/{}".format(*stats))
failing_test = None
causes = collections.defaultdict(lambda: [])
with open(filepath, "r") as f:
for line in f:
line = line.rstrip()
if failing_test is None:
match = ERROR_PATTERN.match(line)
if match is not None:
failing_test = match.groups()[1]
else:
if not any(line.startswith(p) for p in TRACEBACK_PREFIXES):
causes[line].append(failing_test)
failing_test = None
causes = sorted(causes.items(), key=lambda i: len(i[1]), reverse=True)
for (cause_name, failing_tests) in causes:
if len(cause_name) > 160:
print("{} [...]:".format(cause_name[:160]))
else:
print("{}:".format(cause_name))
for failing_test in failing_tests:
print("- {}".format(failing_test))
def main():
parser = argparse.ArgumentParser(description="Runs the s2 conformance test suite.")
parser.add_argument("--no-run", default=False, action="store_true", help="Disables a test run, and just prints failure data from the last test run")
parser.add_argument("--test", default="", help="Run a specific test")
parser.add_argument("--s3tests-config", required=True, help="Path to the s3-tests config file")
parser.add_argument("--ignore-config", default=None, help="Path to the ignore config file")
parser.add_argument("--runs-dir", default=None, help="Path to the directory holding test runs")
args = parser.parse_args()
if (not args.runs_dir) and (args.no_run or not args.test):
print("Must specify `--runs-dir`", file=sys.stderr)
sys.exit(1)
if args.no_run:
print_failures(args.runs_dir)
return
if args.test:
print("Running test {}".format(args.test))
# In some places, nose and its plugins expect tests to be
# specified as testmodule.testname, but here, it's expected to be
# testmodule:testname. This replaces the last . with a : so that
# the testmodule.testname format can be used everywhere, including
# here.
if "." in args.test and not ":" in args.test:
test = ":".join(args.test.rsplit(".", 1))
else:
test = args.test
run_nosetests(args.s3tests_config, test=test)
else:
print("Running tests")
if args.ignore_config:
# This uses the `nose-exclude` plugin to exclude tests for
# unsupported features. Note that `nosetest` does have a built-in
# way of excluding tests, but it only seems to match on top-level
# modules, rather than on specific tests.
ignores = []
with open(args.ignore_config, "r") as f:
for line in f:
line = line.strip()
if line and not line.startswith("#"):
ignores.append(line)
env = {
"NOSE_EXCLUDE_TESTS": ";".join(ignores)
}
else:
env = None
filepath = os.path.join(args.runs_dir, datetime.datetime.now().strftime("%Y-%m-%d-%H-%M-%S.txt"))
with open(filepath, "w") as f:
run_nosetests(args.s3tests_config, env=env, stderr=f)
print_failures(args.runs_dir)
if __name__ == "__main__":
main()
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import absolute_import
import os
import pprint
import shutil
import os.path
import datetime
from brainvisa.installer.package import Package
from brainvisa.installer.project import Project
from brainvisa.installer.repository import Repository
from brainvisa.installer.bvi_xml.configuration import Configuration
from brainvisa.installer.bvi_xml.ifw_config import IFWConfig
from brainvisa.installer.bvi_utils.xml_file import XmlFile
from brainvisa.installer.bvi_utils.paths import Paths
FULLPATH = os.path.dirname(os.path.abspath(__file__))
CURRENTDATE = datetime.datetime.now().strftime("%Y-%m-%d")
def test_Repository_init():
x = Configuration("%s/in/configuration.xml" % FULLPATH)
folder = "%s/out/repository" % FULLPATH
y = Repository(folder, x, None)
assert y.folder == folder
assert y.configuration == x
assert y.date == CURRENTDATE
assert y.components == None
def test_Repository_mkdir():
folder_exists = "%s/out/exists" % FULLPATH
os.mkdir(folder_exists)
assert os.path.isdir(folder_exists)
assert Repository._Repository__mkdir(folder_exists) == False
folder_not_exists = "%s/out/notexists" % FULLPATH
assert Repository._Repository__mkdir(folder_not_exists) == True
assert os.path.isdir(folder_not_exists)
def test_Repository__create_config():
x = Configuration("%s/in/configuration.xml" % FULLPATH)
folder = "%s/out/repository" % FULLPATH
os.mkdir(folder)
y = Repository(folder, x, None)
y._Repository__create_config()
assert os.path.isdir("%s/config" % folder)
assert os.path.isfile("%s/config/config.xml" % folder)
z = XmlFile()
z.read("%s/config/config.xml" % folder)
assert z.root.find('Name').text == 'BrainVISA Installer'
assert z.root.find('Version').text == '1.0.0'
assert z.root.find('Title').text == 'BrainVISA Installer'
assert z.root.find('Publisher').text == 'CEA IFR49 / I2BM'
assert z.root.find('ProductUrl').text == 'http://brainvisa.info/'
rr = z.root.find('RemoteRepositories')
assert rr[0].find('Url').text == 'http://localhost/repositories/win32/'
assert rr[3].find('Url').text == 'http://localhost/repositories/linux64/'
def test_Repository__create_packages_app():
x = Configuration("%s/in/configuration.xml" % FULLPATH)
folder = "%s/out/repository_pack_app" % FULLPATH
os.mkdir(folder)
os.mkdir("%s/packages" % folder)
y = Repository(folder, x, None)
y._Repository__create_packages_app()
filename = '%s/packages/brainvisa.app/meta/package.xml' % folder
assert os.path.isdir('%s/packages/brainvisa.app' % folder)
assert os.path.isdir('%s/packages/brainvisa.app/meta' % folder)
assert os.path.isfile(filename)
assert '<DisplayName>%s</DisplayName>' % x.category_by_id(
'APP').Name in open(filename, 'r').read()
assert '<ReleaseDate>%s</ReleaseDate>' % CURRENTDATE in open(
filename, 'r').read()
assert '<Name>brainvisa.app</Name>' in open(filename, 'r').read()
def test_Repository__create_packages_dev():
x = Configuration("%s/in/configuration.xml" % FULLPATH)
folder = "%s/out/repository_pack_dev" % FULLPATH
os.mkdir(folder)
os.mkdir("%s/packages" % folder)
y = Repository(folder, x, None)
y._Repository__create_packages_dev()
filename = '%s/packages/brainvisa.dev/meta/package.xml' % folder
assert os.path.isdir('%s/packages/brainvisa.dev' % folder)
assert os.path.isdir('%s/packages/brainvisa.dev/meta' % folder)
assert os.path.isfile(filename)
assert '<DisplayName>%s</DisplayName>' % x.category_by_id(
'DEV').Name in open(filename, 'r').read()
assert '<ReleaseDate>%s</ReleaseDate>' % CURRENTDATE in open(
filename, 'r').read()
assert '<Name>brainvisa.dev</Name>' in open(filename, 'r').read()
def test_Repository__create_packages_thirdparty():
x = Configuration("%s/in/configuration.xml" % FULLPATH)
folder = "%s/out/repository_pack_tp" % FULLPATH
os.mkdir(folder)
os.mkdir("%s/packages" % folder)
y = Repository(folder, x, None)
y._Repository__create_packages_thirdparty()
filename = '%s/packages/brainvisa.app.thirdparty/meta/package.xml' % folder
assert os.path.isdir('%s/packages/brainvisa.app.thirdparty' % folder)
assert os.path.isdir('%s/packages/brainvisa.app.thirdparty/meta' % folder)
assert os.path.isfile(filename)
assert '<DisplayName>Thirdparty</DisplayName>' in open(
filename, 'r').read()
assert '<ReleaseDate>%s</ReleaseDate>' % CURRENTDATE in open(
filename, 'r').read()
assert '<Name>brainvisa.app.thirdparty</Name>' in open(
filename, 'r').read()
assert '<Virtual>true</Virtual>' in open(filename, 'r').read()
def test_Repository__create_packages_licenses():
x = Configuration("%s/in/configuration.xml" % FULLPATH)
folder = "%s/out/repository_pack_lic" % FULLPATH
os.mkdir(folder)
os.mkdir("%s/packages" % folder)
y = Repository(folder, x, None)
y._Repository__create_packages_licenses()
filename = '%s/packages/brainvisa.app.licenses/meta/package.xml' % folder
assert os.path.isdir('%s/packages/brainvisa.app.licenses' % folder)
assert os.path.isdir('%s/packages/brainvisa.app.licenses/meta' % folder)
assert os.path.isfile(filename)
assert '<DisplayName>Licenses</DisplayName>' in open(filename, 'r').read()
assert '<ReleaseDate>%s</ReleaseDate>' % CURRENTDATE in open(
filename, 'r').read()
assert '<Name>brainvisa.app.licenses</Name>' in open(filename, 'r').read()
assert '<Virtual>true</Virtual>' in open(filename, 'r').read()
assert os.path.isdir(
'%s/packages/brainvisa.app.licenses.cecill_b' % folder)
assert os.path.isdir(
'%s/packages/brainvisa.app.licenses.cecill_b/meta' % folder)
filename_lic = '%s/packages/brainvisa.app.licenses.cecill_b/meta/package.xml' % folder
assert os.path.isfile(filename_lic)
assert os.path.isfile(
'%s/packages/brainvisa.app.licenses.cecill_b/meta/License_CeCILL-B_V1_en_EN.txt' % folder)
assert '<License' in open(filename_lic, 'r').read()
assert 'name="CeCILL-B"' in open(filename_lic, 'r').read()
assert 'file="License_CeCILL-B_V1_en_EN.txt"' in open(
filename_lic, 'r').read()
def test_Repository__create_package_bv_env():
x = Configuration("%s/in/configuration_script.xml" % FULLPATH)
folder = "%s/out/repository_pack_bv_env" % FULLPATH
os.mkdir(folder)
os.mkdir("%s/packages" % folder)
y = Repository(folder, x, None)
y._Repository__create_package_bv_env()
filename = '%s/packages/brainvisa.app.thirdparty.bv_env/meta/package.xml' % folder
assert os.path.isdir(
'%s/packages/brainvisa.app.thirdparty.bv_env/meta' % folder)
assert os.path.isdir(
'%s/packages/brainvisa.app.thirdparty.bv_env/data' % folder)
assert os.path.isfile(filename)
assert '<Version>1.0</Version>' in open(filename, 'r').read()
assert '<Name>brainvisa.app.thirdparty.bv_env</Name>' in open(
filename, 'r').read()
assert '<Virtual>true</Virtual>' in open(filename, 'r').read()
assert os.path.isfile(
'%s/packages/brainvisa.app.thirdparty.bv_env/data/bin/bv_env.sh' % folder)
assert os.path.isfile(
'%s/packages/brainvisa.app.thirdparty.bv_env/data/bin/bv_unenv' % folder)
assert os.path.isfile(
'%s/packages/brainvisa.app.thirdparty.bv_env/data/bin/bv_unenv.sh' % folder)
assert os.path.isfile(
'%s/packages/brainvisa.app.thirdparty.bv_env/meta/script.qs' % folder)
def test_Repository_create():
x = Configuration()
y = [Project('soma', x), Project('aims', x),
Project('anatomist', x), Project('axon', x)]
z = Repository( folder="%s/out/Repository_Final" % FULLPATH,
configuration=x,
components=y)
z.create()
|
import numpy as np
import pandas as pd
import os
from sklearn.cluster import KMeans
from sklearn.metrics.cluster import adjusted_rand_score
import argparse
parser = argparse.ArgumentParser(description='Embedding_Ground_Truth_Quality_Rank_')
parser.add_argument('--sampleName', type=str, default='151507')
args = parser.parse_args()
if __name__ == '__main__':
sample = args.sampleName
meta_folder_path = os.path.abspath('./meta_data_folder/metaData_brain_16_coords')
embedding_folder_path = os.path.abspath('./RESEPT_embedding_folder')
output_folder_path = os.path.abspath('./Embedding_Ground_Truth_Quality_Rank_'+sample+'/')
if not os.path.exists(output_folder_path):
os.makedirs(output_folder_path)
knn_distanceList=['euclidean']
PEalphaList = ['0.1','0.2','0.3', '0.5', '1.0', '1.2', '1.5','2.0']
zdimList = ['3','10', '16','32', '64', '128', '256']
####sample_list
sample_list = [ '151507','151508', '151509', '151510', '151669', '151670', '151671', '151672', '151673', '151674', '151675', '151676','18-64','2-5', '2-8', 'T4857']
letter_list = [ 'a','b', 'c', 'd', 'e', 'f', 'g', 'h', 'i', 'j', 'k', 'l','m', 'n', 'o', 'p']
count_init = sample_list.index(sample)
count = 56*count_init
letter = letter_list[count_init]
embedding_name_list = []
ari_result_list = []
n_clusters_num = 7
if sample=='151669' or sample=='151670' or sample=='151671' or sample=='151672':
n_clusters_num = 5
if sample=='2-8':
n_clusters_num = 6
for i in range(len(PEalphaList)):
for j in range((len(zdimList))):
count = count + 1
embedding_root_path = '/'+sample+'_embedding_raw/'+letter+'_'+str(count)+'_outputdir-3S-'+sample+'_raw_EM1_resolution0.3_euclidean_dummy_add_PEalpha'+str(PEalphaList[i])+'_k6_NA_zdim'+str(zdimList[j])+'/'+sample+'_raw_6_euclidean_NA_dummy_add_'+str(PEalphaList[i])+'_intersect_160_GridEx19_embedding.csv'
meta_df = pd.read_csv(meta_folder_path+'/'+sample+'_humanBrain_metaData.csv')
embedding_df = pd.read_csv(embedding_folder_path+embedding_root_path)
embedding_name = sample+'_'+letter+'_'+str(count)+'_raw_res0.3_euclidean_NA_dummy_add_PEalpha'+str(PEalphaList[i])+'_k6_NA_zdim'+str(zdimList[j])
embedding_name_list.append(embedding_name)
X = embedding_df[['embedding0','embedding1','embedding2']].values
#X = embedding_df.iloc[:,1:4].values
print(X.shape)
kmeans = KMeans(n_clusters=n_clusters_num, random_state=0).fit(X)
kmeans_label = kmeans.labels_
print(kmeans_label)
ground_truth_init_np = np.array(meta_df['benmarklabel'])
ground_truth_label_np = np.zeros((len(ground_truth_init_np),))
if sample == '2-5' or sample == '2-8' or sample == '18-64' or sample == 'T4857':
for k in range(len(ground_truth_init_np)):
if ground_truth_init_np[k] == 'Layer 1':
ground_truth_label_np[k] = 1
if ground_truth_init_np[k] == 'Layer 2':
ground_truth_label_np[k] = 2
if ground_truth_init_np[k] == 'Layer 3':
ground_truth_label_np[k] = 3
if ground_truth_init_np[k] == 'Layer 4':
ground_truth_label_np[k] = 4
if ground_truth_init_np[k] == 'Layer 5':
ground_truth_label_np[k] = 5
if ground_truth_init_np[k] == 'Layer 6':
ground_truth_label_np[k] = 6
if ground_truth_init_np[k] == 'White matter' or ground_truth_init_np[k] == 'Noise' or ground_truth_init_np[k] is np.NAN:
ground_truth_label_np[k] = 0
if sample == '151507' or sample == '151508' or sample == '151509' or sample == '151510' or sample == '151669' or sample == '151670' or sample == '151671' or sample == '151672' or sample == '151673' or sample == '151674' or sample == '151675' or sample == '151676':
for k in range(len(ground_truth_init_np)):
if ground_truth_init_np[k] == 'Layer1':
ground_truth_label_np[k] = 1
if ground_truth_init_np[k] == 'Layer2':
ground_truth_label_np[k] = 2
if ground_truth_init_np[k] == 'Layer3':
ground_truth_label_np[k] = 3
if ground_truth_init_np[k] == 'Layer4':
ground_truth_label_np[k] = 4
if ground_truth_init_np[k] == 'Layer5':
ground_truth_label_np[k] = 5
if ground_truth_init_np[k] == 'Layer6':
ground_truth_label_np[k] = 6
if ground_truth_init_np[k] == 'WM' or ground_truth_init_np[k] is np.NAN:
ground_truth_label_np[k] = 0
print(ground_truth_label_np)
ari = adjusted_rand_score(kmeans_label , ground_truth_label_np)
ari_result_list.append(ari)
order_num_list = []
for l in range(len(ari_result_list)):
order_num_list.append(l+1)
order_num_pd = pd.DataFrame({'Order_num':order_num_list})
ARI_k_means_result = pd.DataFrame({'Name':embedding_name_list,'ARI_k_means':ari_result_list})
ARI_k_means_result_sort = ARI_k_means_result.sort_values(by=['ARI_k_means'], ascending=False)
ARI_k_means_result_sort.index = order_num_list
ARI_k_means_result_sort.to_csv(output_folder_path+'/'+sample+'_raw_embedding_ground_truth_rank.csv')
|
"""
Entrypoint script for the FastAPI application
"""
import os
from fastapi import FastAPI
from routers import datasets, samples
from database import engine, Base
LABLR_DIR = f"{os.environ.get('HOME')}/.lablr"
if not os.path.exists(LABLR_DIR):
os.makedirs(LABLR_DIR)
Base.metadata.create_all(bind=engine)
PREFIX = "/api/v1"
app = FastAPI(
title="Lablr Backend",
description="Backend for the Lablr annotation tool",
version="0.0.0",
docs_url=f"{PREFIX}/docs",
openapi_url=f"{PREFIX}/openapi.json",
)
app.include_router(datasets.router, prefix=PREFIX)
app.include_router(samples.router, prefix=PREFIX)
|
"""
Copyright (c) 2021 Intel Corporation
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import os
from pathlib import Path
import pytest
import tensorflow as tf
import tensorflow_addons as tfa
from nncf import NNCFConfig
from nncf.common.compression import BaseCompressionAlgorithmController
from nncf.common.composite_compression import CompositeCompressionAlgorithmController
from nncf.tensorflow.helpers.callback_creation import create_compression_callbacks
from nncf.tensorflow.helpers.model_creation import create_compressed_model
from examples.tensorflow.common.callbacks import get_callbacks
from examples.tensorflow.common.callbacks import get_progress_bar
MODEL_PATH = Path(__file__).parent.parent.parent / 'data' / 'mock_models' / 'LeNet.h5'
def get_basic_sparsity_config(model_size=4, input_sample_size=None,
sparsity_init=0.02, sparsity_target=0.5, sparsity_target_epoch=2,
sparsity_freeze_epoch=3, scheduler='polinomial'):
if input_sample_size is None:
input_sample_size = [1, 1, 4, 4]
config = NNCFConfig()
config.update({
"model": "basic_sparse_conv",
"model_size": model_size,
"input_info":
{
"sample_size": input_sample_size,
},
"compression":
{
"algorithm": "rb_sparsity",
"sparsity_init": sparsity_init,
"params":
{
"schedule": scheduler,
"sparsity_target": sparsity_target,
"sparsity_target_epoch": sparsity_target_epoch,
"sparsity_freeze_epoch": sparsity_freeze_epoch
},
}
})
return config
def get_lenet_model():
inp = tf.keras.Input((28, 28, 1))
x = tf.keras.layers.Conv2D(32, 5)(inp)
x = tf.keras.layers.MaxPool2D()(x)
x = tf.keras.layers.Conv2D(48, 5)(x)
x = tf.keras.layers.MaxPool2D()(x)
x = tf.keras.layers.Flatten()(x)
x = tf.keras.layers.Dense(256)(x)
x = tf.keras.layers.Dense(84)(x)
y = tf.keras.layers.Dense(10, activation='softmax')(x)
return tf.keras.Model(inputs=inp, outputs=y)
def train_lenet():
(x_train, y_train), (x_test, y_test) = tf.keras.datasets.mnist.load_data()
x_train = tf.transpose(tf.reshape(x_train, (-1, 1, 28, 28)), (0, 2, 3, 1))
x_test = tf.transpose(tf.reshape(x_test, (-1, 1, 28, 28)), (0, 2, 3, 1))
x_train = x_train / 255
x_test = x_test / 255
model = get_lenet_model()
model.compile(
loss=tf.keras.losses.SparseCategoricalCrossentropy(),
optimizer=tf.keras.optimizers.Adam(5e-4),
metrics=["accuracy"],
)
model.fit(x_train, y_train, batch_size=64, epochs=16, validation_split=0.2,
callbacks=tf.keras.callbacks.ReduceLROnPlateau())
test_scores = model.evaluate(x_test, y_test, verbose=2)
print("Test loss:", test_scores[0])
print("Test accuracy:", test_scores[1])
model.save(MODEL_PATH)
@pytest.mark.parametrize('distributed', [False, True], ids=['not_distributed', 'distributed'])
@pytest.mark.parametrize('quantized', [False, True], ids=['without_quantization', 'with_quantization'])
def test_rb_sparse_target_lenet(distributed, quantized):
if not os.path.exists(MODEL_PATH):
train_lenet()
(x_train, y_train), (x_test, y_test) = tf.keras.datasets.mnist.load_data()
x_test, y_test = x_test[:2], y_test[:2]
x_train = tf.transpose(tf.reshape(x_train, (-1, 1, 28, 28)), (0, 2, 3, 1))
x_test = tf.transpose(tf.reshape(x_test, (-1, 1, 28, 28)), (0, 2, 3, 1))
x_train = x_train / 255
x_test = x_test / 255
batch_size = 128
if distributed:
num_of_replicas = 3
strategy = tf.distribute.MirroredStrategy([f'GPU:{i}' for i in range(num_of_replicas)])
else:
strategy = tf.distribute.OneDeviceStrategy('device:CPU:0')
tf.keras.backend.clear_session()
with strategy.scope():
dataset_train = tf.data.Dataset.from_tensor_slices((x_train, y_train)).batch(batch_size)
dataset_test = tf.data.Dataset.from_tensor_slices((x_test, y_test)).batch(batch_size)
options = tf.data.Options()
options.experimental_distribute.auto_shard_policy = tf.data.experimental.AutoShardPolicy.OFF
dataset_train = dataset_train.with_options(options)
dataset_test = dataset_test.with_options(options)
model = get_lenet_model()
model.load_weights(MODEL_PATH)
freeze_epoch = 4
config = get_basic_sparsity_config(sparsity_init=0.04, sparsity_target=0.3,
sparsity_target_epoch=3, sparsity_freeze_epoch=freeze_epoch,
scheduler='exponential')
if quantized:
config.update({'compression': [config['compression'], {'algorithm': 'quantization'}]})
compression_state_to_skip_init = {BaseCompressionAlgorithmController.BUILDER_STATE: dict()}
compress_algo, compress_model = create_compressed_model(model, config, compression_state_to_skip_init)
compression_callbacks = create_compression_callbacks(compress_algo, log_tensorboard=True, log_dir='logdir/')
sparse_algo = compress_algo.child_ctrls[0] \
if isinstance(compress_algo, CompositeCompressionAlgorithmController) else compress_algo
class SparsityRateTestCallback(tf.keras.callbacks.Callback):
def on_epoch_end(self, epoch, logs=None):
target = sparse_algo.loss.target_sparsity_rate
nncf_stats = sparse_algo.statistics()
actual = nncf_stats.rb_sparsity.model_statistics.sparsity_level_for_layers
print(f'target {target}, actual {actual}')
if epoch + 1 <= freeze_epoch:
assert abs(actual - target) < 0.05
else:
assert tf.cast(sparse_algo.loss.disabled, tf.bool)
assert tf.equal(sparse_algo.loss.calculate(), tf.constant(0.))
loss_obj = tf.keras.losses.SparseCategoricalCrossentropy()
metrics = [
tf.keras.metrics.CategoricalAccuracy(name='acc@1'),
tf.keras.metrics.TopKCategoricalAccuracy(k=5, name='acc@5'),
tfa.metrics.MeanMetricWrapper(loss_obj, name='ce_loss'),
tfa.metrics.MeanMetricWrapper(compress_algo.loss, name='cr_loss')
]
compress_model.add_loss(compress_algo.loss)
compress_model.compile(
loss=loss_obj,
optimizer=tf.keras.optimizers.Adam(5e-3),
metrics=metrics,
)
compress_model.fit(dataset_train, validation_data=dataset_test, epochs=5,
callbacks=[tf.keras.callbacks.ReduceLROnPlateau(),
get_progress_bar(
stateful_metrics=['loss'] + [metric.name for metric in metrics]),
*get_callbacks(
include_tensorboard=True,
track_lr=False,
profile_batch=0,
initial_step=0,
log_dir='logdir/',
ckpt_dir='logdir/cpt/'),
compression_callbacks,
SparsityRateTestCallback()])
|
# -*- coding: utf-8 -*-
import json
import logging
from pathlib import Path
from typing import Dict
from cova.pipeline.pipeline import COVAAutoTune
logger = logging.getLogger(__name__)
logging.basicConfig(level="INFO")
def parse_config(config_file: str) -> Dict:
"""Parses config file with pipeline definition.
Args:
config_file (str): path to the config file (json format) with the pipeline configuration.
Returns:
dict: dictionary containing all configuration parsed from the config file.
"""
with open(config_file, "r") as config_fn:
config = json.load(config_fn)
config_str = Path(config_file).read_text()
global_definitions = config.pop("globals", None)
if global_definitions is None:
return config
single_stage = global_definitions.get("single_stage", "")
stage_config = global_definitions.get("stage_params", [])
for key, value in global_definitions.items():
subst_str = "$globals#{}".format(key)
if subst_str in config_str:
if value is None:
value = ""
config_str = config_str.replace(subst_str, value)
config = json.loads(config_str)
_ = config.pop("globals", None)
return config, [single_stage, stage_config]
def _run(config_file: str) -> None:
"""Runs the pipeline defined in the config file.
Args:
config_file (str): path to the config file (json format) with the pipeline configuration.
"""
config, (single_stage, stage_config) = parse_config(config_file)
auto_tuner = COVAAutoTune()
auto_tuner.load_pipeline(config, single_stage)
if single_stage == '':
auto_tuner.run()
else:
logger.info(
"Executing single stage %s with parameters %s",
single_stage,
", ".join(stage_config),
)
auto_tuner.run_stage(single_stage, stage_config)
|
"""
Tests application settings are connected to Django
settings and have sensible default values.
"""
from django.test import TestCase, override_settings
from drf_signed_auth import settings
from drf_signed_auth.compat import reload
from rest_framework import permissions
class SettingsTest(TestCase):
def setUp(self):
self.sut = settings
self.addCleanup(lambda: reload(settings))
def test_default_ttl(self):
self.assertEqual(30, settings.SIGNED_URL_TTL)
def test_ttl_set_from_django_settings(self):
expected = 9999
with override_settings(SIGNED_URL_TTL=expected):
reload(settings)
self.assertEqual(expected, settings.SIGNED_URL_TTL)
def test_default_signature_param(self):
self.assertEqual('sig', settings.SIGNED_URL_QUERY_PARAM)
def test_signature_param_from_django_settings(self):
expected = 'serenity'
with override_settings(SIGNED_URL_QUERY_PARAM=expected):
reload(settings)
self.assertEqual(expected, settings.SIGNED_URL_QUERY_PARAM)
def test_default_permission_classes(self):
expected = [permissions.IsAuthenticated]
self.assertEqual(expected, settings.SIGNED_URL_PERMISSION_CLASSES)
def test_permission_classes_from_django_settings(self):
expected = ['some', 'other', 'classes']
with override_settings(SIGNED_URL_PERMISSION_CLASSES=expected):
reload(settings)
self.assertEqual(expected, settings.SIGNED_URL_PERMISSION_CLASSES)
|
from conans.test.utils.cpp_test_files import cpp_hello_source_files, cpp_hello_conan_files
from conans.test.utils.go_test_files import go_hello_source_files, go_hello_conan_files
import os
from conans.paths import PACKAGE_TGZ_NAME
import tempfile
from conans.test import CONAN_TEST_FOLDER
from conans.tools import untargz
from conans.errors import ConanException
def temp_folder():
t = tempfile.mkdtemp(suffix='conans', dir=CONAN_TEST_FOLDER)
nt = os.path.join(t, "path with spaces")
os.makedirs(nt)
return nt
def uncompress_packaged_files(paths, package_reference):
package_path = paths.package(package_reference)
if not(os.path.exists(os.path.join(package_path, PACKAGE_TGZ_NAME))):
raise ConanException("%s not found in %s" % (PACKAGE_TGZ_NAME, package_path))
tmp = temp_folder()
untargz(os.path.join(package_path, PACKAGE_TGZ_NAME), tmp)
return tmp
def scan_folder(folder):
scanned_files = []
for root, _, files in os.walk(folder):
relative_path = os.path.relpath(root, folder)
for f in files:
relative_name = os.path.normpath(os.path.join(relative_path, f)).replace("\\", "/")
scanned_files.append(relative_name)
return sorted(scanned_files)
def hello_source_files(number=0, deps=None, lang='cpp'):
"""
param number: integer, defining name of the conans Hello0, Hello1, HelloX
param deps: [] list of integers, defining which dependencies this conans
depends on
e.g. (3, [4, 7]) means that a Hello3 conans will be created, with message
"Hello 3", that depends both in Hello4 and Hello7.
The output of such a conans exe could be like: Hello 3, Hello 4, Hello7
"""
if lang == 'cpp':
return cpp_hello_source_files(number, deps)
elif lang == 'go':
return go_hello_source_files(number, deps)
def hello_conan_files(conan_reference, number=0, deps=None, language=0, lang='cpp'):
"""Generate hello_files, as described above, plus the necessary
CONANFILE to manage it
param number: integer, defining name of the conans Hello0, Hello1, HelloX
param deps: [] list of integers, defining which dependencies this conans
depends on
param language: 0 = English, 1 = Spanish
e.g. (3, [4, 7]) means that a Hello3 conans will be created, with message
"Hello 3", that depends both in Hello4 and Hello7.
The output of such a conans exe could be like: Hello 3, Hello 4, Hello7"""
if lang == 'cpp':
return cpp_hello_conan_files(conan_reference, number, deps, language)
elif lang == 'go':
return go_hello_conan_files(conan_reference, number, deps)
|
# -*- coding: utf-8 -*-
import math
import collections
import bisect
import heapq
import time
import random
import itertools
"""
created by shhuan at 2017/10/20 08:46
"""
N = int(input())
A = [int(x) for x in input().split()]
ks = collections.defaultdict(int)
found = False
for i in range(N):
if found or i > 1:
break
for j in range(i+1, N):
k = (A[j]-A[i])/(j-i)
ks[k] += 1
if ks[k] > 1:
found = True
break
K = [(k, v) for k,v in ks.items() if v == max(ks.values())][0][0]
vis = [0] * N
vis[0] = 1
for i in range(1, N):
if (A[i]-A[0])/i == K:
vis[i] = 1
b = 0
for i in range(N):
if vis[i] == 0:
b = i
break
if b == 0:
print('No')
else:
vis[b] = 2
for i in range(b+1, N):
if (A[i]-A[b])/(i-b) == K:
vis[i] = 2
c1 = vis.count(1)
c2 = vis.count(2)
if c1 > 0 and c2 > 0 and c1+c2==N:
print('Yes')
else:
print('No')
|
numblist = [10, 29,30,41]
for index, item in enumerate(numblist):
print (index, item) |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
###########################################################
# WARNING: Generated code! #
# ************************** #
# Manual changes may get lost if file is generated again. #
# Only code inside the [MANUAL] tags will be kept. #
###########################################################
from flexbe_core import Behavior, Autonomy, OperatableStateMachine, ConcurrencyContainer, PriorityContainer, Logger
from sara_flexbe_states.sara_set_head_angle import SaraSetHeadAngle
from sara_flexbe_states.Get_Entity_By_ID import GetEntityByID
from flexbe_states.wait_state import WaitState
from sara_flexbe_states.SetKey import SetKey
from sara_flexbe_behaviors.action_turn_sm import action_turnSM
from flexbe_states.check_condition_state import CheckConditionState
# Additional imports can be added inside the following tags
# [MANUAL_IMPORT]
# [/MANUAL_IMPORT]
'''
Created on Sun May 05 2019
@author: Quentin Gaillot
'''
class Action_findPersonByIDSM(Behavior):
'''
Find a person with its ID
'''
def __init__(self):
super(Action_findPersonByIDSM, self).__init__()
self.name = 'Action_findPersonByID'
# parameters of this behavior
# references to used behaviors
self.add_behavior(action_turnSM, 'Container/Rotation/action_turn')
# Additional initialization code can be added inside the following tags
# [MANUAL_INIT]
# [/MANUAL_INIT]
# Behavior comments:
def create(self):
# x:93 y:313, x:514 y:143
_state_machine = OperatableStateMachine(outcomes=['found', 'not_found'], input_keys=['className', 'personID'], output_keys=['personEntity'])
_state_machine.userdata.className = "person"
_state_machine.userdata.personID = 0
_state_machine.userdata.personEntity = ""
# Additional creation code can be added inside the following tags
# [MANUAL_CREATE]
# [/MANUAL_CREATE]
# x:707 y:760
_sm_rotation_0 = OperatableStateMachine(outcomes=['end'], output_keys=['personEntity'])
with _sm_rotation_0:
# x:51 y:38
OperatableStateMachine.add('Set 180 degres',
SetKey(Value=3.1416),
transitions={'done': 'set cpt to 0'},
autonomy={'done': Autonomy.Off},
remapping={'Key': 'rotation'})
# x:613 y:470
OperatableStateMachine.add('action_turn',
self.use_behavior(action_turnSM, 'Container/Rotation/action_turn'),
transitions={'finished': 'check is cpt is 1', 'failed': 'check is cpt is 1'},
autonomy={'finished': Autonomy.Inherit, 'failed': Autonomy.Inherit},
remapping={'rotation': 'rotation'})
# x:421 y:54
OperatableStateMachine.add('Look Right',
SaraSetHeadAngle(pitch=0.1, yaw=-1.5),
transitions={'done': 'w2'},
autonomy={'done': Autonomy.Off})
# x:265 y:56
OperatableStateMachine.add('w1',
WaitState(wait_time=4),
transitions={'done': 'Look Right'},
autonomy={'done': Autonomy.Off})
# x:630 y:56
OperatableStateMachine.add('w2',
WaitState(wait_time=4),
transitions={'done': 'center'},
autonomy={'done': Autonomy.Off})
# x:250 y:177
OperatableStateMachine.add('Look Center',
SaraSetHeadAngle(pitch=0.1, yaw=0),
transitions={'done': 'w1'},
autonomy={'done': Autonomy.Off})
# x:618 y:304
OperatableStateMachine.add('Look Left 2',
SaraSetHeadAngle(pitch=0.1, yaw=1.5),
transitions={'done': 'w4'},
autonomy={'done': Autonomy.Off})
# x:612 y:138
OperatableStateMachine.add('center',
SaraSetHeadAngle(pitch=0.1, yaw=0),
transitions={'done': 'w3'},
autonomy={'done': Autonomy.Off})
# x:635 y:214
OperatableStateMachine.add('w3',
WaitState(wait_time=4),
transitions={'done': 'Look Left 2'},
autonomy={'done': Autonomy.Off})
# x:636 y:394
OperatableStateMachine.add('w4',
WaitState(wait_time=4),
transitions={'done': 'action_turn'},
autonomy={'done': Autonomy.Off})
# x:59 y:128
OperatableStateMachine.add('set cpt to 0',
SetKey(Value=0),
transitions={'done': 'Look Center'},
autonomy={'done': Autonomy.Off},
remapping={'Key': 'cpt'})
# x:400 y:499
OperatableStateMachine.add('check is cpt is 1',
CheckConditionState(predicate=lambda x: x==1),
transitions={'true': 'set entity to unknown', 'false': 'set cpt to 1'},
autonomy={'true': Autonomy.Off, 'false': Autonomy.Off},
remapping={'input_value': 'cpt'})
# x:414 y:210
OperatableStateMachine.add('set cpt to 1',
SetKey(Value=1),
transitions={'done': 'Look Right'},
autonomy={'done': Autonomy.Off},
remapping={'Key': 'cpt'})
# x:605 y:659
OperatableStateMachine.add('set entity to unknown',
SetKey(Value="unknown"),
transitions={'done': 'end'},
autonomy={'done': Autonomy.Off},
remapping={'Key': 'personEntity'})
# x:683 y:188
_sm_find_entity_1 = OperatableStateMachine(outcomes=['found'], input_keys=['personID'], output_keys=['personEntity'])
with _sm_find_entity_1:
# x:226 y:188
OperatableStateMachine.add('get person',
GetEntityByID(),
transitions={'found': 'found', 'not_found': 'WaitState'},
autonomy={'found': Autonomy.Off, 'not_found': Autonomy.Off},
remapping={'ID': 'personID', 'Entity': 'personEntity'})
# x:194 y:40
OperatableStateMachine.add('WaitState',
WaitState(wait_time=1),
transitions={'done': 'get person'},
autonomy={'done': Autonomy.Off})
# x:372 y:27, x:392 y:217, x:400 y:139, x:330 y:458
_sm_container_2 = ConcurrencyContainer(outcomes=['found', 'not_found'], input_keys=['className', 'personID'], output_keys=['personEntity'], conditions=[
('not_found', [('Rotation', 'end')]),
('found', [('Find Entity', 'found')])
])
with _sm_container_2:
# x:131 y:44
OperatableStateMachine.add('Find Entity',
_sm_find_entity_1,
transitions={'found': 'found'},
autonomy={'found': Autonomy.Inherit},
remapping={'personID': 'personID', 'personEntity': 'personEntity'})
# x:135 y:199
OperatableStateMachine.add('Rotation',
_sm_rotation_0,
transitions={'end': 'not_found'},
autonomy={'end': Autonomy.Inherit},
remapping={'personEntity': 'personEntity'})
with _state_machine:
# x:67 y:42
OperatableStateMachine.add('Look Center',
SaraSetHeadAngle(pitch=0.1, yaw=0),
transitions={'done': 'Container'},
autonomy={'done': Autonomy.Off})
# x:278 y:138
OperatableStateMachine.add('Look Center Not Found',
SaraSetHeadAngle(pitch=0.1, yaw=0),
transitions={'done': 'not_found'},
autonomy={'done': Autonomy.Off})
# x:63 y:126
OperatableStateMachine.add('Container',
_sm_container_2,
transitions={'found': 'WaitState', 'not_found': 'Look Center Not Found'},
autonomy={'found': Autonomy.Inherit, 'not_found': Autonomy.Inherit},
remapping={'className': 'className', 'personID': 'personID', 'personEntity': 'personEntity'})
# x:67 y:222
OperatableStateMachine.add('WaitState',
WaitState(wait_time=1),
transitions={'done': 'found'},
autonomy={'done': Autonomy.Off})
return _state_machine
# Private functions can be added inside the following tags
# [MANUAL_FUNC]
# [/MANUAL_FUNC]
|
import os
src = 'C:/RavenFinetune/raw/medical_nlp.txt'
with open(src, 'r', encoding='utf-8') as infile:
data = infile.readlines()
print(len(data))
cnt = 0
for i in data:
cnt += 1
with open('C:/RavenFinetune/contexts/medical_%s.txt' % cnt, 'w', encoding='utf-8') as outfile:
outfile.write(i.strip()) |
"""
~/utils/update_ontario_stocking.py
Created: 23 Jan 2019 15:29:22
DESCRIPTION:
This script updates the ontario data in the lake wide cwt database.
Updates include tag type, and sequence number for sequential cwts, cwt
manufacturer (where it should have been Micro Mark (MM))
Updates are preformed on both stocking (below) and recovery (NOT YET)
tables.
This script should be run after the lakewide database has been
built and populated with both US and ontario data.
A. Cottrill
=============================================================
"""
import csv
import re
from collections import namedtuple
from fsdviz.common.models import CWT, CWTsequence
from fsdviz.stocking.models import StockingEvent
# ======================================================
# FSIS_ID to ID
# to update the OMNR stocking data, we need a dictionary that maps
# the ontario id values (fs_event) to the StockingEvent.Id in the
# current database
# get the id numbers and notes for each lake huron ontario stocking event
ont_events = StockingEvent.objects.filter(
agency__abbrev="OMNR", jurisdiction__lake__abbrev="HU"
)
# ontario fs_event numbers are in the notes field as 'fs_event:
# <fsis_id>' this code extracts the fsis_id from the notes and pairs
# it with its corresponding id in the current lakewide database.
# returns a list of tuples of the form: (<fsis_id>, <id>)
# id_pairs = [(int(re.match('fs_event: (\d+)',x['notes']).groups()[0]), x['id'])
# for x in ont_events]
# create a dictionary with the fsis_id as key - makes it easy to get
# associated id for the lakewide db:
fsis2lwdb = {x.agency_stock_id: x.id for x in ont_events}
# ======================================================
# STOCKED SEQUENTIAL CWTS
print("Updating Ontario's Sequential tags...")
# the csv file "MNRF_stocking_events_sequential_cwts.csv" contains a
# list of stocking events associated with sequential csv and the start
# and end the range associated with that event.
# create a named tuple that will hold our stocking event info:
seqCWT = namedtuple("seqCWT", "fsis_event, cwt_number, seq_start, seq_end")
fname = "utils/patches/MNRF_stocking_events_sequential_cwts.csv"
with open(fname) as csvfile:
reader = csv.reader(csvfile)
next(reader, None) # skip header
seqcwt_events = [seqCWT(*x) for x in reader]
for x in seqcwt_events[:3]:
print(x)
# make sure that all of the cwts are in the database - and that Lake
# Huron is the only lake and agency to stock that those tags.
cwt_numbers = list({x.cwt_number for x in seqcwt_events})
for event in seqcwt_events:
cwt = CWT.objects.filter(cwt_number=event.cwt_number).first()
if cwt is None:
print(event)
# now loop over the sequential cwt events and find the associated cwt
# and cwt_sequences in our database. Update the cwt start, end and tag
# type for each one. Keep a list of errors and print them out if
# anything goes wrong.
oops = []
for event in seqcwt_events:
cwt = CWT.objects.filter(cwt_number=event.cwt_number).first()
if cwt is None:
print(event)
oops.append(event)
continue
lwdb_id = fsis2lwdb[event.fsis_event]
stocking_event = StockingEvent.objects.get(id=lwdb_id)
cwt_seq, created = CWTsequence.objects.get_or_create(
cwt=cwt, sequence=(int(event.seq_start), int(event.seq_end))
)
cwt_seq.events.add(stocking_event)
cwt.tag_type = "sequential"
cwt.save()
# delete any cwtsequence events that are associated with sequential
# tags, but the sequence range is 0,0 (this was the old placeholder)
if oops:
print("There were problems with the following sequential tag records:")
for x in oops:
print(x)
# make sure that there aren't any stocking events associated with
# sequential cwts series that end with 1 - they should have all been
# fixed in the last step.
oops = StockingEvent.objects.filter(
cwt_series__seq_end=1, cwt_series__cwt__tag_type="sequental"
)
assert len(oops) == 0
# delete all of cwt series associated with seqential tags that start
# and end with 1 - these were created when the cwt was added but no
# longer point to any stocking events
childless_cwts = CWTsequence.objects.filter(
cwt__tag_type="sequential", sequence__isnull=True
)
childless_cwts.delete()
foo = CWTsequence.objects.filter(sequence__isnull=True)
#
# ======================================================
# CWT MANUFACTURER
print("Updating MicroMark tags...")
# this query returs a list of cwt numbers (without dashes) that we
# know were manufactured by Micro Mark. Only cwt numbers that are
# unique were to micro mark are included (63-59-01, 63-41-04,
# 63-43-04, 63-56-03 were manufactured by both MM and NMT and must be
# handled seperately (below))
fname = "utils/patches/MNRF_MicroMark_cwts.csv"
with open(fname) as csvfile:
reader = csv.reader(csvfile)
next(reader, None) # skip header
mm_cwts = [x[0] for x in reader]
omnr = Agency.objects.get(abbrev="OMNR")
for cwt_num in mm_cwts:
qs = CWT.objects.filter(
cwt_number=cwt_num, cwt_series__events__agency=omnr
).distinct()
assert len(qs) == 1
cwt = qs[0]
cwt.manufacturer = "mm"
cwt.save()
# these are the cwt number that have been purchased from two
# vendors. The event numbers are the stocking event IDs that used the
# Micro Mark tags.
micromark_events = {
# chinook stocked by ssa in 2001 - Not in FSIS Yet!
# "634104": [],
# chinook stocked by ssa in 2001 - Not in FSIS Yet!
# "634304": [],
"635603": [2650],
"635901": [2379, 2928],
}
# now loop over cwt numbers that have been purchased from 2
# manufacturers and get the events associated with each one create a
# new CWT object and new cwt_sequence. FInally, get the original
# stocking event and assign it to the sequence object created above.
for cwt_num, event_nums in micromark_events.items():
print("Applying updates for both {} tags...".format(cwt_num))
cwt_obj, created = CWT.objects.get_or_create(
cwt_number=cwt_num, tag_type="cwt", tag_count=0, manufacturer="mm"
)
cwt_seq, created = CWTsequence.objects.get_or_create(cwt=cwt_obj, sequence=(0, 1))
if event_nums:
for fsis_id in event_nums:
lwdb_id = fsis2lwdb.get(str(fsis_id))
if lwdb_id:
event = StockingEvent.objects.get(id=lwdb_id)
event.cwt_series.clear()
cwt_seq.events.add(event)
else:
print("/t unable for find FSIS event: {}".format(fsis_id))
print("Done updating Ontario-Huron tags.")
|
import aspose.slides as slides
def charts_number_format():
#ExStart:number_format
# The path to the documents directory.
outDir = "./examples/out/"
# Instantiate the presentation# Instantiate the presentation
with slides.Presentation() as pres:
# Access the first presentation slide
slide = pres.slides[0]
# Adding a defautlt clustered column chart
chart = slide.shapes.add_chart(slides.charts.ChartType.CLUSTERED_COLUMN, 50, 50, 500, 400)
# Accessing the chart series collection
series = chart.chart_data.series
# Setting the preset number format
# Traverse through every chart series
for ser in series:
# Traverse through every data cell in series
for cell in ser.data_points:
# Setting the number format
cell.value.as_cell.preset_number_format = 10 #0.00%
# Saving presentation
pres.save(outDir + "charts_number_format_out.pptx", slides.export.SaveFormat.PPTX)
#ExEnd:number_format |
msg = "Hello "
subject = "Hello world" |
class phasebitsifc:
class out_callbacks:
def send_phases(self, phases_out)
def __init__(self):
self.m_callbacks = []
def register_callbacks(self, callback):
self.m_callbacks = [self.m_callbacks, callbacks]
|
import dash_core_components as dcc
import dash_html_components as html
import dash_bootstrap_components as dbc
from overview_tab.map_graph import map_graph
from overview_tab.cumulative_nb_report import cumulative_graph
from overview_tab.survivors_report import first_time_assault_count, first_time_assault_percentage, agressor
def create_overview_tab(survey_df, all_reports_df, safety_df, safety_change_df, mental_health_df, working_situation_df,
big_bubble_size, small_bubble_size):
tab_content = html.Div([
dbc.Row([
# Column 1: Left Side
dbc.Col([
html.H4(children=html.Span("Women all over the UK are experiencing difficulties during COVID-19",
className="graph-heading-span"),
className="graph-heading"
),
html.Div(children=[
html.Div(children=[
dcc.Graph(figure=map_graph(
all_reports_df, safety_df, safety_change_df, mental_health_df, working_situation_df,
big_bubble_size, small_bubble_size))])
])
], md=6),
# Column 2: Right Side
dbc.Col([
html.Div(
[html.H4(children=html.Span("Many of you have been in touch since we started our campaign",
className="graph-heading-span"),
className="graph-heading"
),
html.Div(children=
html.Div(children=[dcc.Graph(figure=cumulative_graph(survey_df))]))
]),
html.Div([
html.H4(children=html.Span("This is what you told us about your experience during COVID-19",
className="graph-heading-span"),
className="graph-heading"
),
dbc.Row([
dbc.Col(html.Div([html.Span(first_time_assault_count(survey_df), className="overviewNumber"),
html.P("of you told us about feeling unsafe at home during the lockdown",
className="overviewText")],
className="overviewSurvivorContainer")),
dbc.Col(
html.Div([html.Span(first_time_assault_percentage(survey_df), className="overviewNumber"),
html.P("of you tell us they are anxious and/or depressed",
className="overviewText")],
className="overviewSurvivorContainer"))
]),
dbc.Row([
dbc.Col(html.Div([html.Span(agressor(survey_df, "partner"), className="overviewNumber"),
html.P("of you feel insecure about the future",
className="overviewText")],
className="overviewSurvivorContainer")),
dbc.Col(html.Div([html.Span(agressor(survey_df, "father"), className="overviewNumber"),
html.P("of you are afraid of the future economic situation",
className="overviewText")],
className="overviewSurvivorContainer"))
])
])
], md=6)
])
])
return tab_content
|
# This file is automatically generated by the rmf-codegen project.
#
# The Python code generator is maintained by Lab Digital. If you want to
# contribute to this project then please do not edit this file directly
# but send a pull request to the Lab Digital fork of rmf-codegen at
# https://github.com/labd/rmf-codegen
import datetime
import enum
import typing
from ._abstract import _BaseType
from .common import BaseResource
if typing.TYPE_CHECKING:
from .common import CreatedBy, LastModifiedBy, Reference
from .message import UserProvidedIdentifiers
__all__ = [
"AzureEventGridDestination",
"AzureServiceBusDestination",
"ChangeSubscription",
"DeliveryCloudEventsFormat",
"DeliveryFormat",
"DeliveryPlatformFormat",
"Destination",
"GoogleCloudPubSubDestination",
"IronMqDestination",
"MessageDelivery",
"MessageSubscription",
"PayloadNotIncluded",
"ResourceCreatedDelivery",
"ResourceDeletedDelivery",
"ResourceUpdatedDelivery",
"SnsDestination",
"SqsDestination",
"Subscription",
"SubscriptionChangeDestinationAction",
"SubscriptionDelivery",
"SubscriptionDraft",
"SubscriptionHealthStatus",
"SubscriptionPagedQueryResponse",
"SubscriptionSetChangesAction",
"SubscriptionSetKeyAction",
"SubscriptionSetMessagesAction",
"SubscriptionUpdate",
"SubscriptionUpdateAction",
]
class ChangeSubscription(_BaseType):
resource_type_id: str
def __init__(self, *, resource_type_id: str):
self.resource_type_id = resource_type_id
super().__init__()
@classmethod
def deserialize(cls, data: typing.Dict[str, typing.Any]) -> "ChangeSubscription":
from ._schemas.subscription import ChangeSubscriptionSchema
return ChangeSubscriptionSchema().load(data)
def serialize(self) -> typing.Dict[str, typing.Any]:
from ._schemas.subscription import ChangeSubscriptionSchema
return ChangeSubscriptionSchema().dump(self)
class DeliveryFormat(_BaseType):
type: str
def __init__(self, *, type: str):
self.type = type
super().__init__()
@classmethod
def deserialize(cls, data: typing.Dict[str, typing.Any]) -> "DeliveryFormat":
if data["type"] == "CloudEvents":
from ._schemas.subscription import DeliveryCloudEventsFormatSchema
return DeliveryCloudEventsFormatSchema().load(data)
if data["type"] == "Platform":
from ._schemas.subscription import DeliveryPlatformFormatSchema
return DeliveryPlatformFormatSchema().load(data)
def serialize(self) -> typing.Dict[str, typing.Any]:
from ._schemas.subscription import DeliveryFormatSchema
return DeliveryFormatSchema().dump(self)
class DeliveryCloudEventsFormat(DeliveryFormat):
cloud_events_version: str
def __init__(self, *, cloud_events_version: str):
self.cloud_events_version = cloud_events_version
super().__init__(type="CloudEvents")
@classmethod
def deserialize(
cls, data: typing.Dict[str, typing.Any]
) -> "DeliveryCloudEventsFormat":
from ._schemas.subscription import DeliveryCloudEventsFormatSchema
return DeliveryCloudEventsFormatSchema().load(data)
def serialize(self) -> typing.Dict[str, typing.Any]:
from ._schemas.subscription import DeliveryCloudEventsFormatSchema
return DeliveryCloudEventsFormatSchema().dump(self)
class DeliveryPlatformFormat(DeliveryFormat):
def __init__(self):
super().__init__(type="Platform")
@classmethod
def deserialize(
cls, data: typing.Dict[str, typing.Any]
) -> "DeliveryPlatformFormat":
from ._schemas.subscription import DeliveryPlatformFormatSchema
return DeliveryPlatformFormatSchema().load(data)
def serialize(self) -> typing.Dict[str, typing.Any]:
from ._schemas.subscription import DeliveryPlatformFormatSchema
return DeliveryPlatformFormatSchema().dump(self)
class Destination(_BaseType):
type: str
def __init__(self, *, type: str):
self.type = type
super().__init__()
@classmethod
def deserialize(cls, data: typing.Dict[str, typing.Any]) -> "Destination":
if data["type"] == "EventGrid":
from ._schemas.subscription import AzureEventGridDestinationSchema
return AzureEventGridDestinationSchema().load(data)
if data["type"] == "AzureServiceBus":
from ._schemas.subscription import AzureServiceBusDestinationSchema
return AzureServiceBusDestinationSchema().load(data)
if data["type"] == "GoogleCloudPubSub":
from ._schemas.subscription import GoogleCloudPubSubDestinationSchema
return GoogleCloudPubSubDestinationSchema().load(data)
if data["type"] == "IronMQ":
from ._schemas.subscription import IronMqDestinationSchema
return IronMqDestinationSchema().load(data)
if data["type"] == "SNS":
from ._schemas.subscription import SnsDestinationSchema
return SnsDestinationSchema().load(data)
if data["type"] == "SQS":
from ._schemas.subscription import SqsDestinationSchema
return SqsDestinationSchema().load(data)
def serialize(self) -> typing.Dict[str, typing.Any]:
from ._schemas.subscription import DestinationSchema
return DestinationSchema().dump(self)
class AzureEventGridDestination(Destination):
uri: str
access_key: str
def __init__(self, *, uri: str, access_key: str):
self.uri = uri
self.access_key = access_key
super().__init__(type="EventGrid")
@classmethod
def deserialize(
cls, data: typing.Dict[str, typing.Any]
) -> "AzureEventGridDestination":
from ._schemas.subscription import AzureEventGridDestinationSchema
return AzureEventGridDestinationSchema().load(data)
def serialize(self) -> typing.Dict[str, typing.Any]:
from ._schemas.subscription import AzureEventGridDestinationSchema
return AzureEventGridDestinationSchema().dump(self)
class AzureServiceBusDestination(Destination):
connection_string: str
def __init__(self, *, connection_string: str):
self.connection_string = connection_string
super().__init__(type="AzureServiceBus")
@classmethod
def deserialize(
cls, data: typing.Dict[str, typing.Any]
) -> "AzureServiceBusDestination":
from ._schemas.subscription import AzureServiceBusDestinationSchema
return AzureServiceBusDestinationSchema().load(data)
def serialize(self) -> typing.Dict[str, typing.Any]:
from ._schemas.subscription import AzureServiceBusDestinationSchema
return AzureServiceBusDestinationSchema().dump(self)
class GoogleCloudPubSubDestination(Destination):
project_id: str
topic: str
def __init__(self, *, project_id: str, topic: str):
self.project_id = project_id
self.topic = topic
super().__init__(type="GoogleCloudPubSub")
@classmethod
def deserialize(
cls, data: typing.Dict[str, typing.Any]
) -> "GoogleCloudPubSubDestination":
from ._schemas.subscription import GoogleCloudPubSubDestinationSchema
return GoogleCloudPubSubDestinationSchema().load(data)
def serialize(self) -> typing.Dict[str, typing.Any]:
from ._schemas.subscription import GoogleCloudPubSubDestinationSchema
return GoogleCloudPubSubDestinationSchema().dump(self)
class IronMqDestination(Destination):
uri: str
def __init__(self, *, uri: str):
self.uri = uri
super().__init__(type="IronMQ")
@classmethod
def deserialize(cls, data: typing.Dict[str, typing.Any]) -> "IronMqDestination":
from ._schemas.subscription import IronMqDestinationSchema
return IronMqDestinationSchema().load(data)
def serialize(self) -> typing.Dict[str, typing.Any]:
from ._schemas.subscription import IronMqDestinationSchema
return IronMqDestinationSchema().dump(self)
class MessageSubscription(_BaseType):
resource_type_id: str
types: typing.Optional[typing.List["str"]]
def __init__(
self,
*,
resource_type_id: str,
types: typing.Optional[typing.List["str"]] = None
):
self.resource_type_id = resource_type_id
self.types = types
super().__init__()
@classmethod
def deserialize(cls, data: typing.Dict[str, typing.Any]) -> "MessageSubscription":
from ._schemas.subscription import MessageSubscriptionSchema
return MessageSubscriptionSchema().load(data)
def serialize(self) -> typing.Dict[str, typing.Any]:
from ._schemas.subscription import MessageSubscriptionSchema
return MessageSubscriptionSchema().dump(self)
class PayloadNotIncluded(_BaseType):
reason: str
payload_type: str
def __init__(self, *, reason: str, payload_type: str):
self.reason = reason
self.payload_type = payload_type
super().__init__()
@classmethod
def deserialize(cls, data: typing.Dict[str, typing.Any]) -> "PayloadNotIncluded":
from ._schemas.subscription import PayloadNotIncludedSchema
return PayloadNotIncludedSchema().load(data)
def serialize(self) -> typing.Dict[str, typing.Any]:
from ._schemas.subscription import PayloadNotIncludedSchema
return PayloadNotIncludedSchema().dump(self)
class SnsDestination(Destination):
access_key: str
access_secret: str
topic_arn: str
def __init__(self, *, access_key: str, access_secret: str, topic_arn: str):
self.access_key = access_key
self.access_secret = access_secret
self.topic_arn = topic_arn
super().__init__(type="SNS")
@classmethod
def deserialize(cls, data: typing.Dict[str, typing.Any]) -> "SnsDestination":
from ._schemas.subscription import SnsDestinationSchema
return SnsDestinationSchema().load(data)
def serialize(self) -> typing.Dict[str, typing.Any]:
from ._schemas.subscription import SnsDestinationSchema
return SnsDestinationSchema().dump(self)
class SqsDestination(Destination):
access_key: str
access_secret: str
queue_url: str
region: str
def __init__(
self, *, access_key: str, access_secret: str, queue_url: str, region: str
):
self.access_key = access_key
self.access_secret = access_secret
self.queue_url = queue_url
self.region = region
super().__init__(type="SQS")
@classmethod
def deserialize(cls, data: typing.Dict[str, typing.Any]) -> "SqsDestination":
from ._schemas.subscription import SqsDestinationSchema
return SqsDestinationSchema().load(data)
def serialize(self) -> typing.Dict[str, typing.Any]:
from ._schemas.subscription import SqsDestinationSchema
return SqsDestinationSchema().dump(self)
class Subscription(BaseResource):
#: Present on resources updated after 1/02/2019 except for events not tracked.
last_modified_by: typing.Optional["LastModifiedBy"]
#: Present on resources created after 1/02/2019 except for events not tracked.
created_by: typing.Optional["CreatedBy"]
changes: typing.List["ChangeSubscription"]
destination: "Destination"
key: typing.Optional[str]
messages: typing.List["MessageSubscription"]
format: "DeliveryFormat"
status: "SubscriptionHealthStatus"
def __init__(
self,
*,
id: str,
version: int,
created_at: datetime.datetime,
last_modified_at: datetime.datetime,
last_modified_by: typing.Optional["LastModifiedBy"] = None,
created_by: typing.Optional["CreatedBy"] = None,
changes: typing.List["ChangeSubscription"],
destination: "Destination",
key: typing.Optional[str] = None,
messages: typing.List["MessageSubscription"],
format: "DeliveryFormat",
status: "SubscriptionHealthStatus"
):
self.last_modified_by = last_modified_by
self.created_by = created_by
self.changes = changes
self.destination = destination
self.key = key
self.messages = messages
self.format = format
self.status = status
super().__init__(
id=id,
version=version,
created_at=created_at,
last_modified_at=last_modified_at,
)
@classmethod
def deserialize(cls, data: typing.Dict[str, typing.Any]) -> "Subscription":
from ._schemas.subscription import SubscriptionSchema
return SubscriptionSchema().load(data)
def serialize(self) -> typing.Dict[str, typing.Any]:
from ._schemas.subscription import SubscriptionSchema
return SubscriptionSchema().dump(self)
class SubscriptionDelivery(_BaseType):
project_key: str
notification_type: str
resource: "Reference"
resource_user_provided_identifiers: typing.Optional["UserProvidedIdentifiers"]
def __init__(
self,
*,
project_key: str,
notification_type: str,
resource: "Reference",
resource_user_provided_identifiers: typing.Optional[
"UserProvidedIdentifiers"
] = None
):
self.project_key = project_key
self.notification_type = notification_type
self.resource = resource
self.resource_user_provided_identifiers = resource_user_provided_identifiers
super().__init__()
@classmethod
def deserialize(cls, data: typing.Dict[str, typing.Any]) -> "SubscriptionDelivery":
if data["notificationType"] == "Message":
from ._schemas.subscription import MessageDeliverySchema
return MessageDeliverySchema().load(data)
if data["notificationType"] == "ResourceCreated":
from ._schemas.subscription import ResourceCreatedDeliverySchema
return ResourceCreatedDeliverySchema().load(data)
if data["notificationType"] == "ResourceDeleted":
from ._schemas.subscription import ResourceDeletedDeliverySchema
return ResourceDeletedDeliverySchema().load(data)
if data["notificationType"] == "ResourceUpdated":
from ._schemas.subscription import ResourceUpdatedDeliverySchema
return ResourceUpdatedDeliverySchema().load(data)
def serialize(self) -> typing.Dict[str, typing.Any]:
from ._schemas.subscription import SubscriptionDeliverySchema
return SubscriptionDeliverySchema().dump(self)
class MessageDelivery(SubscriptionDelivery):
id: str
version: int
created_at: datetime.datetime
last_modified_at: datetime.datetime
sequence_number: int
resource_version: int
payload_not_included: "PayloadNotIncluded"
def __init__(
self,
*,
project_key: str,
resource: "Reference",
resource_user_provided_identifiers: typing.Optional[
"UserProvidedIdentifiers"
] = None,
id: str,
version: int,
created_at: datetime.datetime,
last_modified_at: datetime.datetime,
sequence_number: int,
resource_version: int,
payload_not_included: "PayloadNotIncluded"
):
self.id = id
self.version = version
self.created_at = created_at
self.last_modified_at = last_modified_at
self.sequence_number = sequence_number
self.resource_version = resource_version
self.payload_not_included = payload_not_included
super().__init__(
project_key=project_key,
resource=resource,
resource_user_provided_identifiers=resource_user_provided_identifiers,
notification_type="Message",
)
@classmethod
def deserialize(cls, data: typing.Dict[str, typing.Any]) -> "MessageDelivery":
from ._schemas.subscription import MessageDeliverySchema
return MessageDeliverySchema().load(data)
def serialize(self) -> typing.Dict[str, typing.Any]:
from ._schemas.subscription import MessageDeliverySchema
return MessageDeliverySchema().dump(self)
class ResourceCreatedDelivery(SubscriptionDelivery):
version: int
modified_at: datetime.datetime
def __init__(
self,
*,
project_key: str,
resource: "Reference",
resource_user_provided_identifiers: typing.Optional[
"UserProvidedIdentifiers"
] = None,
version: int,
modified_at: datetime.datetime
):
self.version = version
self.modified_at = modified_at
super().__init__(
project_key=project_key,
resource=resource,
resource_user_provided_identifiers=resource_user_provided_identifiers,
notification_type="ResourceCreated",
)
@classmethod
def deserialize(
cls, data: typing.Dict[str, typing.Any]
) -> "ResourceCreatedDelivery":
from ._schemas.subscription import ResourceCreatedDeliverySchema
return ResourceCreatedDeliverySchema().load(data)
def serialize(self) -> typing.Dict[str, typing.Any]:
from ._schemas.subscription import ResourceCreatedDeliverySchema
return ResourceCreatedDeliverySchema().dump(self)
class ResourceDeletedDelivery(SubscriptionDelivery):
version: int
modified_at: datetime.datetime
data_erasure: typing.Optional[bool]
def __init__(
self,
*,
project_key: str,
resource: "Reference",
resource_user_provided_identifiers: typing.Optional[
"UserProvidedIdentifiers"
] = None,
version: int,
modified_at: datetime.datetime,
data_erasure: typing.Optional[bool] = None
):
self.version = version
self.modified_at = modified_at
self.data_erasure = data_erasure
super().__init__(
project_key=project_key,
resource=resource,
resource_user_provided_identifiers=resource_user_provided_identifiers,
notification_type="ResourceDeleted",
)
@classmethod
def deserialize(
cls, data: typing.Dict[str, typing.Any]
) -> "ResourceDeletedDelivery":
from ._schemas.subscription import ResourceDeletedDeliverySchema
return ResourceDeletedDeliverySchema().load(data)
def serialize(self) -> typing.Dict[str, typing.Any]:
from ._schemas.subscription import ResourceDeletedDeliverySchema
return ResourceDeletedDeliverySchema().dump(self)
class ResourceUpdatedDelivery(SubscriptionDelivery):
version: int
old_version: int
modified_at: datetime.datetime
def __init__(
self,
*,
project_key: str,
resource: "Reference",
resource_user_provided_identifiers: typing.Optional[
"UserProvidedIdentifiers"
] = None,
version: int,
old_version: int,
modified_at: datetime.datetime
):
self.version = version
self.old_version = old_version
self.modified_at = modified_at
super().__init__(
project_key=project_key,
resource=resource,
resource_user_provided_identifiers=resource_user_provided_identifiers,
notification_type="ResourceUpdated",
)
@classmethod
def deserialize(
cls, data: typing.Dict[str, typing.Any]
) -> "ResourceUpdatedDelivery":
from ._schemas.subscription import ResourceUpdatedDeliverySchema
return ResourceUpdatedDeliverySchema().load(data)
def serialize(self) -> typing.Dict[str, typing.Any]:
from ._schemas.subscription import ResourceUpdatedDeliverySchema
return ResourceUpdatedDeliverySchema().dump(self)
class SubscriptionDraft(_BaseType):
changes: typing.Optional[typing.List["ChangeSubscription"]]
destination: "Destination"
key: typing.Optional[str]
messages: typing.Optional[typing.List["MessageSubscription"]]
format: typing.Optional["DeliveryFormat"]
def __init__(
self,
*,
changes: typing.Optional[typing.List["ChangeSubscription"]] = None,
destination: "Destination",
key: typing.Optional[str] = None,
messages: typing.Optional[typing.List["MessageSubscription"]] = None,
format: typing.Optional["DeliveryFormat"] = None
):
self.changes = changes
self.destination = destination
self.key = key
self.messages = messages
self.format = format
super().__init__()
@classmethod
def deserialize(cls, data: typing.Dict[str, typing.Any]) -> "SubscriptionDraft":
from ._schemas.subscription import SubscriptionDraftSchema
return SubscriptionDraftSchema().load(data)
def serialize(self) -> typing.Dict[str, typing.Any]:
from ._schemas.subscription import SubscriptionDraftSchema
return SubscriptionDraftSchema().dump(self)
class SubscriptionHealthStatus(enum.Enum):
HEALTHY = "Healthy"
CONFIGURATION_ERROR = "ConfigurationError"
CONFIGURATION_ERROR_DELIVERY_STOPPED = "ConfigurationErrorDeliveryStopped"
TEMPORARY_ERROR = "TemporaryError"
class SubscriptionPagedQueryResponse(_BaseType):
limit: int
count: int
total: typing.Optional[int]
offset: int
results: typing.List["Subscription"]
def __init__(
self,
*,
limit: int,
count: int,
total: typing.Optional[int] = None,
offset: int,
results: typing.List["Subscription"]
):
self.limit = limit
self.count = count
self.total = total
self.offset = offset
self.results = results
super().__init__()
@classmethod
def deserialize(
cls, data: typing.Dict[str, typing.Any]
) -> "SubscriptionPagedQueryResponse":
from ._schemas.subscription import SubscriptionPagedQueryResponseSchema
return SubscriptionPagedQueryResponseSchema().load(data)
def serialize(self) -> typing.Dict[str, typing.Any]:
from ._schemas.subscription import SubscriptionPagedQueryResponseSchema
return SubscriptionPagedQueryResponseSchema().dump(self)
class SubscriptionUpdate(_BaseType):
version: int
actions: typing.List["SubscriptionUpdateAction"]
def __init__(
self, *, version: int, actions: typing.List["SubscriptionUpdateAction"]
):
self.version = version
self.actions = actions
super().__init__()
@classmethod
def deserialize(cls, data: typing.Dict[str, typing.Any]) -> "SubscriptionUpdate":
from ._schemas.subscription import SubscriptionUpdateSchema
return SubscriptionUpdateSchema().load(data)
def serialize(self) -> typing.Dict[str, typing.Any]:
from ._schemas.subscription import SubscriptionUpdateSchema
return SubscriptionUpdateSchema().dump(self)
class SubscriptionUpdateAction(_BaseType):
action: str
def __init__(self, *, action: str):
self.action = action
super().__init__()
@classmethod
def deserialize(
cls, data: typing.Dict[str, typing.Any]
) -> "SubscriptionUpdateAction":
if data["action"] == "changeDestination":
from ._schemas.subscription import SubscriptionChangeDestinationActionSchema
return SubscriptionChangeDestinationActionSchema().load(data)
if data["action"] == "setChanges":
from ._schemas.subscription import SubscriptionSetChangesActionSchema
return SubscriptionSetChangesActionSchema().load(data)
if data["action"] == "setKey":
from ._schemas.subscription import SubscriptionSetKeyActionSchema
return SubscriptionSetKeyActionSchema().load(data)
if data["action"] == "setMessages":
from ._schemas.subscription import SubscriptionSetMessagesActionSchema
return SubscriptionSetMessagesActionSchema().load(data)
def serialize(self) -> typing.Dict[str, typing.Any]:
from ._schemas.subscription import SubscriptionUpdateActionSchema
return SubscriptionUpdateActionSchema().dump(self)
class SubscriptionChangeDestinationAction(SubscriptionUpdateAction):
destination: "Destination"
def __init__(self, *, destination: "Destination"):
self.destination = destination
super().__init__(action="changeDestination")
@classmethod
def deserialize(
cls, data: typing.Dict[str, typing.Any]
) -> "SubscriptionChangeDestinationAction":
from ._schemas.subscription import SubscriptionChangeDestinationActionSchema
return SubscriptionChangeDestinationActionSchema().load(data)
def serialize(self) -> typing.Dict[str, typing.Any]:
from ._schemas.subscription import SubscriptionChangeDestinationActionSchema
return SubscriptionChangeDestinationActionSchema().dump(self)
class SubscriptionSetChangesAction(SubscriptionUpdateAction):
changes: typing.Optional[typing.List["ChangeSubscription"]]
def __init__(
self, *, changes: typing.Optional[typing.List["ChangeSubscription"]] = None
):
self.changes = changes
super().__init__(action="setChanges")
@classmethod
def deserialize(
cls, data: typing.Dict[str, typing.Any]
) -> "SubscriptionSetChangesAction":
from ._schemas.subscription import SubscriptionSetChangesActionSchema
return SubscriptionSetChangesActionSchema().load(data)
def serialize(self) -> typing.Dict[str, typing.Any]:
from ._schemas.subscription import SubscriptionSetChangesActionSchema
return SubscriptionSetChangesActionSchema().dump(self)
class SubscriptionSetKeyAction(SubscriptionUpdateAction):
#: If `key` is absent or `null`, this field will be removed if it exists.
key: typing.Optional[str]
def __init__(self, *, key: typing.Optional[str] = None):
self.key = key
super().__init__(action="setKey")
@classmethod
def deserialize(
cls, data: typing.Dict[str, typing.Any]
) -> "SubscriptionSetKeyAction":
from ._schemas.subscription import SubscriptionSetKeyActionSchema
return SubscriptionSetKeyActionSchema().load(data)
def serialize(self) -> typing.Dict[str, typing.Any]:
from ._schemas.subscription import SubscriptionSetKeyActionSchema
return SubscriptionSetKeyActionSchema().dump(self)
class SubscriptionSetMessagesAction(SubscriptionUpdateAction):
messages: typing.Optional[typing.List["MessageSubscription"]]
def __init__(
self, *, messages: typing.Optional[typing.List["MessageSubscription"]] = None
):
self.messages = messages
super().__init__(action="setMessages")
@classmethod
def deserialize(
cls, data: typing.Dict[str, typing.Any]
) -> "SubscriptionSetMessagesAction":
from ._schemas.subscription import SubscriptionSetMessagesActionSchema
return SubscriptionSetMessagesActionSchema().load(data)
def serialize(self) -> typing.Dict[str, typing.Any]:
from ._schemas.subscription import SubscriptionSetMessagesActionSchema
return SubscriptionSetMessagesActionSchema().dump(self)
|
from numpy import linalg as _LA
import numpy as _np
def matrix_system(q, N, type='even', period='one'):
''' Creates tridianogal matrix system of order NxN, associated with
each of the four classes of simply-periodic functions.
Input:
q: parameter, real or purely imaginary.
N: Size of the matrix, and thus the order of the highest harmonic in
the trigonometric series that defines each Mathieu function.
type: str, `even` or `odd`.
period: str, `one` or 'two'. If `one`, function is assumed to be
pi-periodic. If `two` function is taken to be `2pi`-periodic.
Outout:
A: ndarray, the square matrix associated with each of the four types
of simply-periodic Mathieu-functions.
'''
if type is 'even':
if period is 'one': # ce_{2n}
d = [(2. * r) ** 2 for r in range(N)]
e = q * _np.ones(N - 1)
A = _np.diag(d) + _np.diag(e, k=-1) + _np.diag(e, k=1)
A[0, 1] = _np.sqrt(2) * A[0, 1]
A[1, 0] = A[0, 1]
elif period is 'two': # se_{2n+2}
pass
elif type is 'odd':
if period is 'one': # se_{2n+1}
pass
elif period is 'two': # ce_{2n+1}
d = [((2. * r) + 1)**2 for r in range(N)]
e = q * _np.ones(N - 1)
A = _np.diag(d) + _np.diag(e, k=-1) + _np.diag(e, k=1)
return A
def eig_pairs(A, type='even', period='one'):
''' Calculates the characteristic value (eigenvalue) and the Fourier
coefficients associated with the Mathieu function. Both the eigenvalues
and Fourier coefficients are given in ascending order.
'''
w, V = _LA.eig(A) # calculates the eigenvalue
if [type, period] == ['even', 'one']:
V[0, :] = V[0, :] / _np.sqrt(2) # remove factor on all first entries
# Sort the eigenvalues and in accordance, re-arrange eigenvectors
ord_w, V = order_check(w, V)
return ord_w, V
def order_check(a, v):
""" Check the ordering of the eigenvalue array, from smaller to larger. If
true, return a unchanged. Ordering also matters if a is complex. If a is
complex, ordering again is first set according to real(a). If two
eigenvalues are complex conjugates, then ordering is in accordance to the
sign of complex(a). Negative sign is first.
"""
if a.imag.any() == 0:
ordered_a = a
nv = v
else:
Ind = _np.argsort(_np.round(a, 5)) # sorting through 5 decimals
ordered_a = a[Ind]
nv = 0 * _np.copy(v)
for k in range(len(Ind)):
nv[:, k] = v[:, Ind[k]]
return ordered_a, nv
|
from django.db import models
# Create your models here.
class Poetry(models.Model):
title = models.CharField(
max_length=100,
verbose_name="诗名"
)
author = models.CharField(
max_length=100
)
content = models.CharField(
max_length=255
) |
import unittest
import pprint
import datetime
from oeqa.runtime.case import OERuntimeTestCase
from oeqa.core.decorator.depends import OETestDepends
from oeqa.core.decorator.oeid import OETestID
from oeqa.core.decorator.data import skipIfNotFeature
from oeqa.runtime.decorator.package import OEHasPackage
from oeqa.utils.logparser import PtestParser
class PtestRunnerTest(OERuntimeTestCase):
@OETestID(1600)
@skipIfNotFeature('ptest', 'Test requires ptest to be in DISTRO_FEATURES')
@OETestDepends(['ssh.SSHTest.test_ssh'])
@OEHasPackage(['ptest-runner'])
@unittest.expectedFailure
def test_ptestrunner(self):
status, output = self.target.run('which ptest-runner', 0)
if status != 0:
self.skipTest("No -ptest packages are installed in the image")
test_log_dir = self.td.get('TEST_LOG_DIR', '')
# The TEST_LOG_DIR maybe NULL when testimage is added after
# testdata.json is generated.
if not test_log_dir:
test_log_dir = os.path.join(self.td.get('WORKDIR', ''), 'testimage')
# Don't use self.td.get('DATETIME'), it's from testdata.json, not
# up-to-date, and may cause "File exists" when re-reun.
timestamp = datetime.datetime.now().strftime('%Y%m%d%H%M%S')
ptest_log_dir_link = os.path.join(test_log_dir, 'ptest_log')
ptest_log_dir = '%s.%s' % (ptest_log_dir_link, timestamp)
ptest_runner_log = os.path.join(ptest_log_dir, 'ptest-runner.log')
status, output = self.target.run('ptest-runner', 0)
os.makedirs(ptest_log_dir)
with open(ptest_runner_log, 'w') as f:
f.write(output)
# status != 0 is OK since some ptest tests may fail
self.assertTrue(status != 127, msg="Cannot execute ptest-runner!")
if not hasattr(self.tc, "extraresults"):
self.tc.extraresults = {}
extras = self.tc.extraresults
extras['ptestresult.rawlogs'] = {'log': output}
# Parse and save results
parser = PtestParser()
results, sections = parser.parse(ptest_runner_log)
parser.results_as_files(ptest_log_dir)
if os.path.exists(ptest_log_dir_link):
# Remove the old link to create a new one
os.remove(ptest_log_dir_link)
os.symlink(os.path.basename(ptest_log_dir), ptest_log_dir_link)
extras['ptestresult.sections'] = sections
trans = str.maketrans("()", "__")
for section in results:
for test in results[section]:
result = results[section][test]
testname = "ptestresult." + (section or "No-section") + "." + "_".join(test.translate(trans).split())
extras[testname] = {'status': result}
failed_tests = {}
for section in results:
failed_testcases = [ "_".join(test.translate(trans).split()) for test in results[section] if results[section][test] == 'fail' ]
if failed_testcases:
failed_tests[section] = failed_testcases
failmsg = ""
status, output = self.target.run('dmesg | grep "Killed process"', 0)
if output:
failmsg = "ERROR: Processes were killed by the OOM Killer:\n%s\n" % output
if failed_tests:
failmsg = failmsg + "Failed ptests:\n%s" % pprint.pformat(failed_tests)
if failmsg:
self.fail(failmsg)
|
from django.apps import AppConfig
class ApiConfig(AppConfig):
name = 'serveradmin.api'
verbose_name = "Api"
def ready(self):
import serveradmin.api.api # noqa
|
# coding=utf-8
# Copyright 2022 The Google Research Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""This file runs value iteration on an aggregated state space.
It aggregates states using the supplied metric.
This module will run a number of trials on a set of possible metrics and compile
the results in a plot.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
from absl import logging
import gin
from matplotlib import cm
import matplotlib.pyplot as plt
import numpy as np
import pandas as pd
import seaborn as sns
from six.moves import range
import tensorflow.compat.v1 as tf
def greedy(metric, num_states, num_states_target, max_iterations,
verbose=False):
"""Greedily aggregate states until a desired number of aggregate states.
Args:
metric: matrix of distances.
num_states: int, number of total states.
num_states_target: int, desired number of states.
max_iterations: int, maximum number of iterations to run algorithm.
verbose: bool, whether to print verbose messages.
Returns:
list of aggregated states and list mapping state to its cluster.
"""
curr_metric = np.copy(metric)
# First we ensure that we won't aggregate states with themselves.
np.fill_diagonal(curr_metric, np.inf)
aggregate_states = [[x] for x in range(num_states)]
state_to_aggregate_states = list(range(num_states))
num_iterations = 1
while len(aggregate_states) > num_states_target:
# Pick a pair of the closest states randomly.
min_distance = np.min(curr_metric)
# We add a little epsilon here to avoid floating point precision issues.
x, y = np.where(curr_metric <= min_distance + 1e-8)
i = np.random.randint(len(x))
s, t = x[i], y[i]
# So we no longer try to aggregate these states.
curr_metric[s, t] = np.inf
curr_metric[t, s] = np.inf
# For simplicity we'll put the new aggregation at the front.
c1 = state_to_aggregate_states[s]
c2 = state_to_aggregate_states[t]
new_aggregate_states = [[]]
for c in [c1, c2]:
for s in aggregate_states[c]:
if s in new_aggregate_states[0]:
# If c1 == c2, this would cause duplicates which causes never-ending
# loops.
continue
new_aggregate_states[0].append(s)
state_to_aggregate_states[s] = 0
# Re-index all the other aggregations.
for i, c in enumerate(aggregate_states):
if i == c1 or i == c2:
continue
for s in c:
state_to_aggregate_states[s] = len(new_aggregate_states)
new_aggregate_states.append(c)
aggregate_states = new_aggregate_states
if num_iterations % 1000 == 0 and verbose:
logging.info('Iteration %d', num_iterations)
num_iterations += 1
if num_iterations > max_iterations:
break
return aggregate_states, state_to_aggregate_states
def k_medians(metric, num_states, num_states_target, max_iterations,
verbose=False):
"""Aggregate states using the k-medians algorithm.
Args:
metric: matrix of distances.
num_states: int, number of total states.
num_states_target: int, desired number of states.
max_iterations: int, maximum number of iterations to run algorithm.
verbose: bool, whether to print verbose messages.
Returns:
list of aggregated states and dict mapping state to its cluster.
"""
# Pick an initial set of centroids.
centroids = np.random.choice(num_states, size=num_states_target,
replace=False)
state_to_centroid = [0 for _ in range(num_states)]
for k, s in enumerate(centroids):
state_to_centroid[s] = k
# We first put each state in a random cluster.
for s in range(num_states):
if s in centroids:
continue
k = s % num_states_target
state_to_centroid[s] = k
clusters_changing = True
num_iterations = 1
while clusters_changing:
clusters_changing = False
clusters = [[x] for x in centroids]
for s in range(num_states):
if s in centroids:
continue
nearest_centroid = 0
smallest_distance = np.inf
for k, t in enumerate(centroids):
if metric[s, t] < smallest_distance:
smallest_distance = metric[s, t]
nearest_centroid = k
if nearest_centroid != state_to_centroid[s]:
clusters_changing = True
state_to_centroid[s] = nearest_centroid
clusters[nearest_centroid].append(s)
# Re-calculate centroids.
for k, c in enumerate(clusters):
min_avg_distance = np.inf
new_centroid = 0
for s in c:
avg_distance = 0.
for t in c:
avg_distance += metric[s, t]
avg_distance /= len(c)
if avg_distance < min_avg_distance:
min_avg_distance = avg_distance
new_centroid = s
centroids[k] = new_centroid
if num_iterations % 1000 == 0 and verbose:
logging.info('Iteration %d', num_iterations)
num_iterations += 1
if num_iterations > max_iterations:
break
return clusters, state_to_centroid
@gin.configurable
def value_iteration(env, aggregate_states, tolerance=0.001, verbose=False):
r"""Run value iteration on the aggregate MDP.
This constructs a new MDP using the aggregate states as follows:
```
R(c, a) = 1/|c| * \sum_{s \in c} R(s, a)
P(c, a)(c') = 1/|c| * \sum_{s \in c}\sum_{s' \in c'} P(s, a)(s')
```
Args:
env: the original environment.
aggregate_states: list of aggregate states.
tolerance: float, maximum difference in value between successive
iterations. Once this threshold is past, computation stops.
verbose: bool, whether to print verbose messages.
Returns:
list of floats representing cluster values.
"""
num_clusters = len(aggregate_states)
transition_probs = np.zeros((num_clusters, env.num_actions, num_clusters))
rewards = np.zeros((num_clusters, env.num_actions))
for c1 in range(num_clusters):
for a in range(env.num_actions):
for s1 in aggregate_states[c1]:
rewards[c1, a] += env.rewards[s1, a]
for c2 in range(num_clusters):
for s2 in aggregate_states[c2]:
transition_probs[c1, a, c2] += env.transition_probs[s1, a, s2]
rewards[c1, a] /= len(aggregate_states[c1])
transition_probs[c1, a, :] /= len(aggregate_states[c1])
q_values = np.zeros((num_clusters, env.num_actions))
error = tolerance * 2.
num_iterations = 1
while error > tolerance:
for c in range(num_clusters):
for a in range(env.num_actions):
old_q_values = np.copy(q_values[c, a])
q_values[c, a] = rewards[c, a] + env.gamma * np.matmul(
transition_probs[c, a, :], np.max(q_values, axis=1))
error = np.max(abs(q_values[c, a] - old_q_values))
if num_iterations % 1000 == 0 and verbose:
logging.info('Iteration %d: %f', num_iterations, error)
num_iterations += 1
return q_values
@gin.configurable
def experiment(base_dir,
env,
metrics,
max_iterations=100,
run=0,
random_mdp=False,
verbose=False,
aggregation_method='greedy'):
"""Module to run the experiment.
Args:
base_dir: str, base directory where to save the files.
env: an environment specifying the true underlying MDP.
metrics: list of metrics which will be used for the nearest-neighbour
approximants.
max_iterations: int, maximum number of iterations for each of the
aggregation methods.
run: int, run id.
random_mdp: bool, whether the environment is a random MDP or not.
verbose: bool, whether to print verbose messages.
aggregation_method: string, greedy or k_median method
Returns:
Dict containing statistics.
"""
if env.values is None:
logging.info('Values must have already been computed.')
return
cmap = cm.get_cmap('plasma', 256)
data = {
'Metric': [],
'num_states_target': [],
'run': [],
'qg': [],
'exact_qvalues': [],
'error': []
}
num_states_targets = np.linspace(1, env.num_states, 10).astype(int)
for num_states_target in num_states_targets:
# -(-x//1) is the same as ceil(x).
# num_states_target = max(int(-(-state_fraction * env.num_states // 1)), 1)
for metric in metrics:
if metric.metric is None:
continue
if verbose:
logging.info('***Run %d, %s, %d',
num_states_target, metric.name, run)
if aggregation_method == 'k_median':
aggregate_states, state_to_aggregate_states = (
k_medians(
metric.metric,
env.num_states,
num_states_target,
max_iterations,
verbose=verbose))
if aggregation_method == 'greedy':
aggregate_states, state_to_aggregate_states = (
greedy(
metric.metric,
env.num_states,
num_states_target,
max_iterations,
verbose=verbose))
if not random_mdp:
# Generate plot of neighborhoods.
neighbourhood_path = os.path.join(
base_dir, metric.name,
'neighborhood_{}_{}.pdf'.format(num_states_target, run))
obs_image = env.render_custom_observation(
env.reset(), state_to_aggregate_states, cmap,
boundary_values=[-1, num_states_target])
plt.imshow(obs_image)
with tf.gfile.GFile(neighbourhood_path, 'w') as f:
plt.savefig(f, format='pdf', dpi=300, bbox_inches='tight')
plt.clf()
# Perform value iteration on aggregate states.
q_aggregate = value_iteration(env, aggregate_states)
# Now project the values of the aggregate states to the ground states.
q_projected = [
q_aggregate[state_to_aggregate_states[s]]
for s in range(env.num_states)]
data['Metric'].append(metric.label)
data['num_states_target'].append(num_states_target)
data['run'].append(run)
data['qg'].append(q_projected)
data['exact_qvalues'].append(env.q_val_it_q_values)
data['error'].append(
np.mean(
np.max((np.abs(q_projected - env.q_val_it_q_values)), axis=1)))
return data
def plot_data(base_dir, data):
"""Plot the data collected from all experiment runs."""
del data['qg']
del data['exact_qvalues']
df = pd.DataFrame(data=data)
plt.subplots(1, 1, figsize=(8, 6))
sns.lineplot(x='num_states_target', y='error', hue='Metric', data=df,
ci=99, lw=3)
plt.xlabel('Number of aggregate states', fontsize=24)
plt.ylabel('Avg. Error', fontsize=24)
plt.legend(fontsize=18)
pdf_file = os.path.join(base_dir, 'aggregate_value_iteration.pdf')
with tf.io.gfile.GFile(pdf_file, 'w') as f:
plt.savefig(f, format='pdf', dpi=300, bbox_inches='tight')
plt.clf()
plt.close('all')
|
# coding: utf-8
"""
Python InsightVM API Client
OpenAPI spec version: 3
Contact: support@rapid7.com
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import unittest
import swagger_client
from swagger_client.models.summary import Summary # noqa: E501
from swagger_client.rest import ApiException
class TestSummary(unittest.TestCase):
"""Summary unit test stubs"""
def setUp(self):
pass
def tearDown(self):
pass
def testSummary(self):
"""Test Summary"""
# FIXME: construct object with mandatory attributes with example values
# model = swagger_client.models.summary.Summary() # noqa: E501
pass
if __name__ == '__main__':
unittest.main()
|
#!/usr/bin/env python
#The MIT License (MIT)
#Copyright (c) 2016 Massimiliano Patacchiola
#
#THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
#MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
#CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
#SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
import numpy as np
import cv2
import sys
class DiffMotionDetector:
"""Motion is detected through the difference between
the background (static) and the foregroung (dynamic).
This class calculated the absolute difference between two frames.
The first one is a static frame which represent the background
and the second is the image containing the moving object.
The resulting mask is passed to a threshold and cleaned from noise.
"""
def __init__(self):
"""Init the color detector object.
"""
self.background_gray = None
def setBackground(self, frame):
"""Set the BGR image used as template during the pixel selection
The template can be a spedific region of interest of the main
frame or a representative color scheme to identify. the template
is internally stored as an HSV image.
@param frame the template to use in the algorithm
"""
if(frame is None): return None
self.background_gray = cv2.cvtColor(frame, cv2.COLOR_BGR2GRAY)
def getBackground(self):
"""Get the BGR image used as template during the pixel selection
The template can be a spedific region of interest of the main
frame or a representative color scheme to identify.
"""
if(self.background_gray is None):
return None
else:
return cv2.cvtColor(self.background_gray, cv2.COLOR_GRAY2BGR)
def returnMask(self, foreground_image, threshold=25):
"""Return the binary image after the detection process
@param foreground_image the frame to check
@param threshold the value used for filtering the pixels after the absdiff
"""
if(foreground_image is None): return None
foreground_gray = cv2.cvtColor(foreground_image, cv2.COLOR_BGR2GRAY)
delta_image = cv2.absdiff(self.background_gray, foreground_gray)
threshold_image = cv2.threshold(delta_image, threshold, 255, cv2.THRESH_BINARY)[1]
return threshold_image
class MogMotionDetector:
"""Motion is detected through the Mixtures of Gaussian (MOG)
This class is the implementation of the article "An Improved
Adaptive Background Mixture Model for Realtime Tracking with
Shadow Detection" by KaewTraKulPong and Bowden (2008).
ABSTRACT: Real-time segmentation of moving regions in image
sequences is a fundamental step in many vision systems
including automated visual surveillance, human-machine
interface, and very low-bandwidth telecommunications. A
typical method is background subtraction. Many background
models have been introduced to deal with different problems.
One of the successful solutions to these problems is to use a
multi-colour background model per pixel proposed by Grimson
et al [1,2,3]. However, the method suffers from slow learning
at the beginning, especially in busy environments. In addition,
it can not distinguish between moving shadows and moving objects.
This paper presents a method which improves this adaptive
background mixture model. By reinvestigating the update equations,
we utilise different equations at different phases. This allows
our system learn faster and more accurately as well as adapt
effectively to changing environments. A shadow detection scheme
is also introduced in this paper. It is based on a computational
colour space that makes use of our background model. A comparison
has been made between the two algorithms. The results show the
speed of learning and the accuracy of the model using our update
algorithm over the Grimson et al tracker. When incorporate with
the shadow detection, our method results in far better segmentation
than that of Grimson et al.
"""
def __init__(self, history=10, numberMixtures=3, backgroundRatio=0.6, noise=20):
"""Init the color detector object.
@param history lenght of the history
@param numberMixtures The maximum number of Gaussian Mixture components allowed.
Each pixel in the scene is modelled by a mixture of K Gaussian distributions.
This value should be a small number from 3 to 5.
@param backgroundRation define a threshold which specifies if a component has to be included
into the foreground or not. It is the minimum fraction of the background model.
In other words, it is the minimum prior probability that the background is in the scene.
@param noise specifies the noise strenght
"""
self.BackgroundSubtractorMOG = cv2.BackgroundSubtractorMOG(history, numberMixtures, backgroundRatio, noise)
def returnMask(self, foreground_image):
"""Return the binary image after the detection process
@param foreground_image the frame to check
@param threshold the value used for filtering the pixels after the absdiff
"""
return self.BackgroundSubtractorMOG.apply(foreground_image)
class Mog2MotionDetector:
"""Motion is detected through the Imporved Mixtures of Gaussian (MOG)
This class is the implementation of the article "Improved Adaptive
Gaussian Mixture Model for Background Subtraction" by Zoran Zivkovic.
ABSTRACT: Background subtraction is a common computer vision task.
We analyze the usual pixel-level approach. We develop an efficient
adaptive algorithm using Gaussian mixture probability density.
Recursive equations are used to constantly update the parameters
and but also to simultaneously select the appropriate number of
components for each pixel.
"""
def __init__(self):
"""Init the color detector object.
"""
self.BackgroundSubtractorMOG2 = cv2.BackgroundSubtractorMOG2()
def returnMask(self, foreground_image):
"""Return the binary image after the detection process
@param foreground_image the frame to check
"""
#Since the MOG2 returns shadows with value 127 we have to
#filter these values in order to have a binary mask
img = self.BackgroundSubtractorMOG2.apply(foreground_image)
ret, thresh = cv2.threshold(img, 126, 255,cv2.THRESH_BINARY)
return thresh
def returnGreyscaleMask(self, foreground_image):
"""Return the greyscale image after the detection process
The MOG2 can return shadows. The pixels associated with
shadows have value 127. This mask is not a classic binary
mask since it incorporates the shadow pixels.
@param foreground_image the frame to check
"""
return self.BackgroundSubtractorMOG2.apply(foreground_image)
|
#!/usr/bin/env python
import logging
import sys
import time
from copy import copy
import serial
from mitsi_lookup import (
CONTROL_PACKET_POSITIONS,
CONTROL_PACKET_VALUES,
DIR,
FAN,
MODE,
POWER,
ROOM_TEMP,
TEMP,
VANE,
)
HEADER_LEN = 5
log = logging.getLogger(__name__)
class HeatPump(object):
reported_attributes = ("power", "mode", "temp", "fan", "vane", "dir", "room_temp")
def __init__(self, port=None, **kwargs):
self.port = port
for item in self.reported_attributes:
setattr(self, item, kwargs.get(item, None))
self.dirty = True
self.room_temp = None
self.info_packet_index = 0
self.last_send = 0
self.current_packet = None
self.packet_history = {}
self.wanted_state = {}
self.start_packet = Packet.build(0x5A, [0xCA, 0x01])
self.info_packets = [
Packet.build(0x42, [0x02] + [0x00] * 0x0F),
Packet.build(0x42, [0x03] + [0x00] * 0x0F),
]
def __setattr__(self, item, value):
""" Set self.dirty when setting a reported attribute. Used downsteam to
determine if there's a change in state since we last looked. """
if item in self.reported_attributes:
if getattr(self, item, None) != value:
self.dirty = True
super(HeatPump, self).__setattr__(item, value)
def to_dict(self):
""" Return all the heatpump's reported attributes as
a dict. """
d = {}
for item in self.reported_attributes:
d[item] = getattr(self, item)
return d
def from_dict(self, d):
""" Set all the heatpump's reported attributes from
the provided dict. """
for item in self.reported_attributes:
if d.get(item, None):
setattr(self, item, d.get(item))
@property
def valid(self):
""" Validates every reported attribute has been set as an
actual HeatPump() attribute. """
for item in self.reported_attributes:
if getattr(self, item, None) is None:
return False
return True
def connect(self):
""" Establish a serial connection to self.port. """
if self.port:
self.ser = serial.Serial(
self.port, 2400, parity=serial.PARITY_EVEN, timeout=0
)
self.ser.write(bytearray(self.start_packet.bytes))
def map_set_packet_to_attributes(self):
""" Match data in a Packet() to the relevant HeatPump() attribute. """
result = []
for attribute_name in self.reported_attributes:
# Get the lookup dictonary name from the attribute name
# e.g. 'power' -> 'POWER'
ATTRIBUTE_NAME = attribute_name.upper()
# See what position this attribute should be in the Packet()
# e.g. CONTROL_PACKET_POSITIONS['POWER']
position = CONTROL_PACKET_POSITIONS.get(ATTRIBUTE_NAME, None)
if position:
# Retrieve the value from the Packet()
raw_value = self.current_packet.data[position]
# Dynamically get the lookup dictonary for an attribute,
# and lookup the human form of the value.
# e.g. "POWER"
try:
converted_value = globals()[ATTRIBUTE_NAME].lookup(raw_value)
except KeyError:
log.error("Failed to lookup %s[%s]" % (ATTRIBUTE_NAME, raw_value))
# Set the attribute on the HeatPump() object.
# e.g. "self.power = 'ON'"
setattr(self, attribute_name, converted_value)
result.append((attribute_name, converted_value))
log.debug("Set Packet: %s" % result)
def loop(self):
res = self.ser.read(22)
for c in res:
val = ord(c)
if val == 0xFC:
self.current_packet = Packet()
if not self.current_packet:
log.debug("No packet!")
return
self.current_packet.bytes.append(val)
if len(self.current_packet.bytes) == HEADER_LEN:
self.current_packet.data_len = val
if self.current_packet.complete:
if self.current_packet.valid:
if self.current_packet.data[0] == 0x02: # Set Packet
self.map_set_packet_to_attributes()
if self.current_packet.data[0] == 0x03: # Temp Packet
self.room_temp = ROOM_TEMP.lookup(self.current_packet.data[3])
log.debug("Temp Packet: %s" % self.room_temp)
if (
self.current_packet.data[0] in self.packet_history
and self.current_packet
== self.packet_history[self.current_packet.data[0]]
):
pass
else:
log.debug(
"HP Packet: 0x%x : %s : 0x%x"
% (
self.current_packet.type,
",".join(
["%02x" % x for x in self.current_packet.data]
),
self.current_packet.checksum,
)
)
self.packet_history[
self.current_packet.data[0]
] = self.current_packet
self.current_packet = None
else:
log.info("HP Packet Invalid")
self.current_packet = None
if time.time() - self.last_send > 1:
# Check if our current state matches the wanted state
if self.wanted_state:
wanted = copy(self)
wanted.from_dict(self.wanted_state)
packet = self.diff(wanted)
if packet:
log.debug(
"Sending packet: 0x%x : %s : 0x%x"
% (
packet.type,
",".join(["%02x" % x for x in packet.data]),
packet.checksum,
)
)
self.ser.write(bytearray(packet.bytes))
self.last_send = time.time()
self.info_packet_index = 0
time.sleep(1)
else:
self.wanted_state = {}
self.ser.write(bytearray(self.info_packets[self.info_packet_index].bytes))
self.last_send = time.time()
self.info_packet_index += 1
if self.info_packet_index >= len(self.info_packets):
self.info_packet_index = 0
def set(self, state):
self.wanted_state.update(state)
def diff(self, other):
if not other:
return
data = [0x00] * 0x10
data[0] = 0x01
if self.power != other.power:
data[1] += CONTROL_PACKET_VALUES["POWER"]
data[CONTROL_PACKET_POSITIONS["POWER"]] = POWER[other.power]
if self.mode != other.mode:
data[1] += CONTROL_PACKET_VALUES["MODE"]
data[CONTROL_PACKET_POSITIONS["MODE"]] = MODE[other.mode]
if other.temp and self.temp != float(other.temp):
data[1] += CONTROL_PACKET_VALUES["TEMP"]
data[CONTROL_PACKET_POSITIONS["TEMP"]] = TEMP[float(other.temp)]
if self.fan != other.fan:
data[1] += CONTROL_PACKET_VALUES["FAN"]
data[CONTROL_PACKET_POSITIONS["FAN"]] = FAN[other.fan]
if self.vane != other.vane:
data[1] += CONTROL_PACKET_VALUES["VANE"]
data[CONTROL_PACKET_POSITIONS["VANE"]] = VANE[other.vane]
if self.dir != other.dir:
data[1] += CONTROL_PACKET_VALUES["DIR"]
data[CONTROL_PACKET_POSITIONS["DIR"]] = DIR[other.dir]
if data[1] > 0x00:
return Packet.build(0x41, data)
return None
class Packet(object):
START_BYTE = 0xFC
EXTRA_HEADER = [0x01, 0x30]
def __init__(self):
self.bytes = []
self.data_len = None
def __eq__(self, other):
return self.bytes == other.bytes
def __str__(self):
return ",".join(["0x%02x" % x for x in self.bytes])
@classmethod
def build(cls, type, data):
c = cls()
c.bytes = [c.START_BYTE, type] + c.EXTRA_HEADER
c.bytes.append(len(data))
c.bytes += data
c.bytes.append(0xFC - (sum(c.bytes) & 0xFF))
return c
@property
def checksum(self):
return 0xFC - (sum(self.bytes[0:-1]) & 0xFF)
@property
def type(self):
return self.bytes[1]
@property
def complete(self):
if (
self.data_len is not None
and len(self.bytes) == HEADER_LEN + self.data_len + 1
):
return True
return False
@property
def valid(self):
if self.complete and self.checksum == self.bytes[-1]:
return True
return False
@property
def data(self):
return self.bytes[HEADER_LEN:-1]
if __name__ == "__main__":
log.setLevel(logging.DEBUG)
console = logging.StreamHandler()
log.addHandler(console)
if len(sys.argv) >= 2:
hp = HeatPump(sys.argv[1])
hp.connect()
while True:
try:
hp.loop()
time.sleep(1)
except KeyboardInterrupt:
print("Exiting.")
sys.exit(0)
print("Expected the first argument to be a serial port.")
sys.exit(1)
|
#!/usr/bin/python3
import os
import json
import ipaddress
from elasticsearch import Elasticsearch,RequestsHttpConnection
## ENV VARS
elasticsearch_user = os.getenv("ELASTIC_USER", "elastic")
elasticsearch_pass = os.getenv("ELASTIC_PASS")
elasticsearch_host = os.getenv("ELASTIC_HOST")
elasticsearch_proto = os.getenv("ELASTIC_PROTO", "https")
elasticsearch_index = os.getenv("ELASTIC_INDEX", "private_geoips")
elasticsearch_ssl_verify = os.getenv("ELASTIC_SSL_VERIFY", True)
elasticsearch_ssl = os.getenv("ELASTIC_SSL", True)
config_file_name = os.getenv("CONFIG_FILE", "locations.json")
create_enrichment_policy = os.getenv("CREATE_ENRICHMENT_POLICY", True)
create_index = os.getenv("CREATE_INDEX", True)
## INIT
es = Elasticsearch([elasticsearch_proto + "://" + elasticsearch_user + ":" + elasticsearch_pass + "@" + elasticsearch_host], connection_class=RequestsHttpConnection, use_ssl=elasticsearch_ssl, verify_certs=elasticsearch_ssl_verify)
## FUNCTIONS
def init_policy():
policy_name = elasticsearch_index + "_policy"
policy_body = {
"match": {
"indices": "private_geoips",
"match_field": "source.ip",
"enrich_fields": ["city_name", "continent_name", "country_iso_code", "country_name", "location"]
}
}
if es.enrich.get_policy(name=policy_name)["policies"] == []:
print("Creating Policy...")
es.enrich.put_policy(name=policy_name, body=policy_body)
print("Policy Created - " + policy_name)
print("Execute Policy Update...")
es.enrich.execute_policy(name=policy_name)
print("Executed Policy - " + policy_name)
else:
print("Policy Exists - " + policy_name)
return
def init_index(index):
index_mapping = {
"settings": {
"number_of_shards": 1,
"number_of_replicas": 0
},
"mappings": {
"properties": {
"city_name": {
"type": "text"
},
"continent_name": {
"type": "text"
},
"country_iso_code": {
"type": "text"
},
"country_name": {
"type": "text"
},
"location": {
"type": "geo_point"
},
"source.ip": {
"type": "ip"
}
}
}
}
if es.indices.exists(index=index):
print("Index Exists - " + index)
else:
print("Creating Index...")
es.indices.create(index=index, body=index_mapping)
print("Created Index - " + index)
return
def send_to_elastic(data, location, index):
""" Send docs to Elasticsearch"""
res = es.index(index=index,id=location, body=data)
print(location + " | " + str(res))
return res
def get_location(ip, networks, locations):
json_obj = {}
for network in networks:
if ipaddress.ip_address(ip) in network["network"]:
for location in locations:
if network["name"] == location["city_name"]:
json_obj = location
json_obj["source_ip"] = ip
continue
continue
return json_obj
def generate_entries(networks, locations):
for network in networks:
ips_list = network["network"]
for i in ips_list:
i = str(i)
location = network["_id"] + "-" + i
json_obj = get_location(i, networks, locations)
send_to_elastic(json_obj, location, elasticsearch_index)
print(network["name"])
def main():
print("----Reading Config-----")
# Read Config JSON
config_file = open(config_file_name)
location_data = json.load(config_file)
config_file.close()
print("----Generating Networks-----")
# Create IP Ranges
for network in location_data["networks"]:
network["network"] = ipaddress.ip_network(network['network'])
locations = location_data["locations"]
networks = location_data["networks"]
print("----Using Locations-----")
# Print Location JSON
for item in networks:
print(item)
print("----Using Networks-----")
# Print Network JSON
for item in networks:
print(item)
if create_index:
print("----Index Configuration-----")
init_index(elasticsearch_index)
else:
pass
if create_enrichment_policy:
print("----Enrichment Policy------")
init_policy()
else:
pass
print("----Generating Entries------")
# Generates GEOIP docs for ranges
generate_entries(networks, locations)
print("----Completed----")
print("Final steps will be to create the Enrichment section in the ingest pipeline of your choice")
main()
|
import boto3
import os
import json
client = boto3.client('stepfunctions')
def lambda_handler(event, context):
try:
s3_object = event["Records"][0]["s3"]
key = s3_object["object"]["key"]
bucket_name = s3_object["bucket"]["name"]
print(key)
print(os.environ['TRANSCRIBE_STATE_MACHINE_ARN'])
region = os.environ['AWS_REGION']
file_uri = form_key_uri(bucket_name, key, region)
job_name = get_job_name(key)
print(job_name)
execution_input = {
"jobName": job_name,
"mediaFormat": os.environ["MEDIA_FORMAT"],
"fileUri": file_uri,
"languageCode": os.environ["LANGUAGE_CODE"],
"transcriptDestination": os.environ["TRANSCRIPTS_DESTINATION"],
"wait_time": os.environ["WAIT_TIME"]
}
response = client.start_execution(
stateMachineArn=os.environ['TRANSCRIBE_STATE_MACHINE_ARN'],
input=json.dumps(execution_input)
)
print(response)
return "hello"
except Exception as e:
raise e
def get_job_name(key):
key_name_without_the_file_type = key.split('.')[0]
#REMOVING FOLDERS
keys = key_name_without_the_file_type.split('/')
keys = keys[len(keys)-1].split("%") #THIS IS TO CLEAN UP CHARACTERS NOT ALLOWED BY TRANSCRIBE JOB
#GETTING THE FIRST ELEMENT
return keys[0]
def form_key_uri(bucket_name,key,region):
return "https://s3.amazonaws.com/"+bucket_name+"/"+key
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.