content
stringlengths 5
1.05M
|
|---|
import numpy as np
#import ZebraLib as Lzb
import matplotlib.pyplot as plt
class plane:
def __init__(self, Sw=1, AR=2, teta=10, cw=0.5, bw=2):
#Asa
self.Sw = Sw
self.teta = teta
self.cw = cw
self.bw = bw
#Profundor e leme
self.AR = AR
self.S = np.linspace(0.05, 0.6)
self.c = (self.S / self.AR) **(1 / 2)
self.b = self.S/self.c
#Calculos
self.__calc_L()
def __calc_L(self, xlim=2, ylim=0.3):
if self.teta != 0:
self.y_pos = np.linspace(0.3*ylim, ylim)
self.x_pos = self.y_pos/np.tan(np.radians(self.teta))
self.L = np.sqrt((self.x_pos*2)+(self.y_pos*2))
else:
self.L = np.linspace(0.1*xlim, xlim)
def __filtragem(self, mh, mv):
Vxh = np.ravel(mh)
Vxv = np.ravel(mv)
Vh = list()
Vv = list()
indiceh = list()
indicev = list()
p = len(mh[0])
for n in range(len(Vxh)):
if 0.35 <= Vxh[n] <= 0.5:
Vh.append(Vxh[n])
indiceh.append(n)
if 0.04 <= Vxv[n] <= 0.06:
Vv.append(Vxv[n])
indicev.append(n)
#Horizontal
valor = np.array((indiceh)) / p
v_posh_x = valor.astype(int)
c = (valor - v_posh_x) * p
v_posh_y = (np.around(c)).astype(int)
#Vertical
valor = np.array((indicev)) / p
v_posv_x = valor.astype(int)
c = (valor - v_posv_x) * p
v_posv_y = (np.around(c)).astype(int)
return Vh, Vv, v_posh_x, v_posh_y, v_posv_x, v_posv_y
def volumes(self):
vh = []; vv = []
for i in range(len(self.L)):
vh.append(self.L[i]*self.S/(self.cw*self.Sw))
vv.append(self.L[i]*self.S/(self.bw*self.Sw))
return self.__filtragem(vh, vv)
def optmize(self, l, Sv, Sh, D=0.025, plot=False):
Sfus = 2 * np.pi * D * l
Swet = Sfus + Sh * Sv
idx = np.where(Swet.min() == Swet)
if plot:
plt.plot(l, Swet, label = 'Swet')
plt.plot(l, Sv, label = 'Sv')
plt.plot(l, Sh, label = 'Sh')
plt.plot(l, Sfus, label = 'Sfus')
plt.ylabel('Área molhada (m^2)')
plt.xlabel('Tamanho de cauda (m)')
plt.legend()
return l[idx]
|
from subprocess import Popen
def load_jupyter_server_extension(nbapp):
"""serve the app.py directory with bokeh server"""
Popen(["panel", "serve", "app.py", "--allow-websocket-origin=*"])
|
from extensions.vue_backend.messages.DLA_table_data_message import DLA_table_data_request, DLA_table_data_response, \
DLA_set_table_data_request, ColumnHeader
from pyecore.ecore import EReference, EClass
from esdl import esdl
from esdl.processing.ESDLQuantityAndUnits import qau_to_string
from typing import List
from src.log import get_logger
log = get_logger(__name__)
def get_table_data(datalayer, parent: esdl.Item, table_data: DLA_table_data_request) \
-> DLA_table_data_response:
"""
param: datalayer - to check for global QaU
param: parent - the parent object (e.g. asset) that contains the Table reference for which we want the data
param: table_data - DLA_table_data_request that contains the reference name and type
returns: DLA_table_data_response dataclass with header and row
"""
eclass: EClass = parent.eClass
reference: EReference = eclass.findEStructuralFeature(table_data.ref_name)
value: esdl.Table = parent.eGet(table_data.ref_name)
response = DLA_table_data_response()
if not reference.eType.eClass.name == table_data.ref_type:
print("Not a Table reference!")
return response
if value is not None:
response.name = value.name
response.description = value.description
headerList: List[esdl.AbstractQuantityAndUnit] = value.header
for column in headerList:
if isinstance(column, esdl.QuantityAndUnitType):
qau_string = qau_to_string(column)
h = ColumnHeader(title=qau_string, id=column.id)
elif isinstance(column, esdl.QuantityAndUnitReference):
if not column.reference:
break # issue with headers, break processing of headerList
qau_string = qau_to_string(column.reference)
h = ColumnHeader(title=qau_string, id=column.reference.id)
response.header.append(h)
for row in value.row:
response.rows.append(convert_to_float_list(row.value))
if isinstance(parent, esdl.Pump) and table_data.ref_name == esdl.Pump.pumpCurveTable.name:
# create empty table, with correct header: Flow, Head, efficiency from global QuA
print('New Pump curve table requested')
if len(response.header) == 0:
flow_qau = datalayer.get_or_create_qau('flow')
head_qau = datalayer.get_or_create_qau('head')
efficiency_qau = datalayer.get_or_create_qau('efficiency')
response.header.append(ColumnHeader(title=qau_to_string(flow_qau), id=flow_qau.id))
response.header.append(ColumnHeader(title=qau_to_string(head_qau), id=head_qau.id))
response.header.append(ColumnHeader(title=qau_to_string(efficiency_qau), id=efficiency_qau.id))
if len(response.rows) == 0:
response.rows.append([0.0] * len(response.header)) # first empty row
response.name = "Pump curve data"
if isinstance(parent, esdl.Valve) and table_data.ref_name == esdl.Valve.flowCoefficient.name:
print('New flow coefficient table requested')
if len(response.header) == 0:
position_qau = datalayer.get_or_create_qau('position')
kv_qau = datalayer.get_or_create_qau('kv_coefficient')
response.header.append(ColumnHeader(title=qau_to_string(position_qau), id=position_qau.id))
response.header.append(ColumnHeader(title=qau_to_string(kv_qau), id=kv_qau.id))
if len(response.rows) == 0:
response.rows.append([0.0] * len(response.header)) # first empty row
response.name = "Flow coefficient data"
else:
# create empty table with no rows?
pass
print('returning table: ', response)
return response
def set_table_data(datalayer, parent: esdl.Item, new_data: DLA_set_table_data_request):
#eclass: EClass = parent.eClass
#reference: EReference = eclass.findEStructuralFeature(new_data.ref_name)
table: esdl.Table = parent.eGet(new_data.ref_name)
# todo table.datasource
if table is None:
# todo data source
table = esdl.Table(name=new_data.name, description=new_data.description)
parent.eSet(new_data.ref_name, table)
# always recreate header for known tables
headerList = table.header
if isinstance(parent, esdl.Pump) and new_data.ref_name == esdl.Pump.pumpCurveTable.name:
headerList.clear()
flow_qau = datalayer.get_or_create_qau('flow')
head_qau = datalayer.get_or_create_qau('head')
efficiency_qau = datalayer.get_or_create_qau('efficiency')
headerList.append(esdl.QuantityAndUnitReference(reference=flow_qau))
headerList.append(esdl.QuantityAndUnitReference(reference=head_qau))
headerList.append(esdl.QuantityAndUnitReference(reference=efficiency_qau))
if isinstance(parent, esdl.Valve) and new_data.ref_name == esdl.Valve.flowCoefficient.name:
headerList.clear()
position_qau = datalayer.get_or_create_qau('position')
kv_qau = datalayer.get_or_create_qau('kv_coefficient')
headerList.append(esdl.QuantityAndUnitReference(reference=position_qau))
headerList.append(esdl.QuantityAndUnitReference(reference=kv_qau))
table.name = new_data.name
table.description = new_data.description
# assume headers haven't changed
row_list = table.row
row_list.clear() # delete old data and create new
for row in new_data.rows:
r = esdl.TableRow()
r.value.extend(convert_to_float_list(row))
row_list.append(r)
#print('table row:', r.value)
#else:
# log.error('Can\'t update table: Not a pump or incorrect reference for pump curve table')
def convert_to_float_list(edouble_list: list):
return [float(i) for i in edouble_list]
|
from django.apps import AppConfig
class CertificatesConfig(AppConfig):
name = 'sgce.certificates'
verbose_name = 'Certificados'
|
for i in range(1, 1001):
if i % 10 == 7:
print(i)
|
import anvil.tables as tables
import anvil.tables.query as q
from anvil.tables import app_tables
import anvil.server
import csv
import anvil.media
from io import BytesIO
import pandas as pd
import anvil.media
import Global
@anvil.server.callable
def store_data(file):
filedata = file.get_bytes()
df = pd.read_csv(BytesIO(filedata))
anvil.server.launch_background_task('server_store', df.to_dict(orient="records"))
@anvil.server.callable
def get_entries(list):
print(list[0],list[1],list[2])
@anvil.server.background_task
def server_store(dict_df):
print(dict_df[0])
for d in dict_df:
print(d)
# d is now a dict of {columnname -> value} for this row
# We use Python's **kwargs syntax to pass the whole dict as
# keyword arguments
app_tables.new_y.add_row(**d)
|
from dynamic_validation.models import Violation, ViolationStatus
from dynamic_rules.dynamic_actions import BaseDynamicAction
__all__ = ('BadViolationType', 'BaseDynamicValidation')
class BadViolationType(TypeError):
pass
class BaseDynamicValidation(BaseDynamicAction):
accepted_status = ViolationStatus.unreviewed
def run(self, *args, **kwargs):
current_violations = self.get_cleaned_violations(*args, **kwargs)
matching_violations = self.get_matching_violations(current_violations)
self.save_violations(matching_violations, current_violations)
def get_cleaned_violations(self, *args, **kwargs):
violations = self.get_current_violations(*args, **kwargs) or []
if not isinstance(violations, (tuple, list)):
violations = [violations]
if not all(isinstance(x, Violation) for x in violations):
raise BadViolationType
return violations
def get_current_violations(self, *args, **kwargs):
raise NotImplementedError
def get_existing_violations(self):
return Violation.objects.get_by_rule(self.rule_model, self.trigger_model)
def get_matching_violations(self, current_violations):
"""
If a violation used to exist, but is not in the new violations
we assume that the issue has been fixed and delete old record.
"""
existing_violations = self.get_existing_violations()
matched_violations = []
for existing_violation in existing_violations:
if existing_violation in current_violations:
matched_violations.append(existing_violation)
else:
existing_violation.delete()
return matched_violations
def save_violations(self, matching_violations, current_violations):
for violation in current_violations:
if violation in matching_violations:
position = matching_violations.index(violation)
existing_violation = matching_violations[position]
existing_violation.message = violation.message
existing_violation.save()
else:
violation.save()
def create_violation(self, key, message, violated_fields):
return Violation(
rule=self.rule_model,
trigger_model=self.trigger_model,
key=key,
message=message,
violated_fields=violated_fields,
acceptable=self.accepted_status,
)
|
from django.conf import settings
from django.conf.urls import patterns, url
from django.contrib.auth import views as auth_views
from django.views.generic.base import TemplateView
from django.utils.translation import ugettext_lazy as _
from connect.accounts.forms import CustomPasswordResetForm
from connect.accounts import views
reset_email_template = 'accounts/emails/password_reset_email.html'
urlpatterns = patterns(
'',
# Auth
url(_(r'^login/$'), auth_views.login,
{'template_name': 'accounts/login.html'},
name='login'),
url(_(r'^logout/$'), auth_views.logout,
{
'next_page': '/',
'template_name': 'accounts/login.html'
},
name='logout'),
# page where user can request to reset their password
url(_(r'^password/reset/$'), auth_views.password_reset,
{'template_name': 'accounts/password_reset.html',
'post_reset_redirect': '/accounts/password/reset/done/',
'from_email': settings.EMAIL_HOST_USER,
'current_app': 'accounts',
'email_template_name': 'accounts/emails/password_reset_email.html',
'html_email_template_name': reset_email_template,
'subject_template_name': 'accounts/emails/password_reset_subject.txt',
'password_reset_form': CustomPasswordResetForm},
name="password-reset"),
# page to confirm that email has been sent
url(_(r'^password/reset/done/$'), auth_views.password_reset_done,
{
'template_name': 'accounts/password_reset_done.html',
'current_app': 'accounts',
},
name="password-reset-done"),
# page for user to change password (uses token sent in email)
url(_(r'^password/reset/(?P<uidb64>[0-9A-Za-z_\-]+)/(?P<token>.+)/$'),
auth_views.password_reset_confirm,
{
'template_name': 'accounts/password_reset_confirm.html',
'post_reset_redirect': '/accounts/password/done/',
'current_app': 'accounts',
},
name="password-reset-confirm"),
# page confirming password has been reset
url(_(r'^password/reset/complete/$'),
auth_views.password_reset_complete,
{'template_name': 'accounts/password_reset_complete.html'},
name="password-reset-complete"),
# Request and activate account
url(_(r'^request-invitation/$'), views.request_invitation,
name='request-invitation'),
url(_(r'^request-invitation/done/$'), TemplateView.as_view(
template_name='accounts/request_invitation_done.html'),
name='request-invitation-done'),
url(_(r'^activate/(?P<token>\w+)$'), views.activate_account,
name='activate-account'),
# Profile settings
url(_(r'^profile/$'), views.profile_settings, name='profile-settings'),
# Account settings
url(_(r'^update/email/$'), views.update_email, name='update-email'),
url(_(r'^update/password/$'), views.update_password,
name='update-password'),
url(_(r'^close/$'), views.close_account, name='close-account'),
url(_(r'^close/done/$'),
TemplateView.as_view(template_name='accounts/close_account_done.html'),
name='close-account-done'),
)
|
from setuptools import setup, find_packages
setup(
name='prometheus-example-exporter',
version='0.1.0.dev1',
description='Example Prometheus exporter',
url='https://github.com/anttiniskanen1/prometheus-example-exporter',
author='Antti Niskanen',
author_email='antti.niskanen@cybercom.com',
license='MIT',
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'Topic :: System :: Monitoring',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
],
keywords='monitoring prometheus exporter',
packages=find_packages(exclude=['tests']),
install_requires=[
'jog',
'prometheus-client'
],
entry_points={
'console_scripts': [
'prometheus-example-exporter=prometheus_example_exporter:main',
],
},
)
|
from flaskr.pricing import Pricing, Context
from flaskr import typing
from dateutil.relativedelta import relativedelta
from dataclasses import dataclass, field
from typing import Optional
from decimal import Decimal
class Period(object):
@dataclass
class Result:
@dataclass
class Profits:
total: Decimal = field(default_factory=Decimal)
netValue: Decimal = field(default_factory=Decimal)
provisions: Decimal = field(default_factory=Decimal)
error: bool = False
initialNetValue: Optional[Decimal] = None
initialQuantity: Optional[Decimal] = None
finalNetValue: Optional[Decimal] = None
finalQuantity: Optional[Decimal] = None
profits: Profits = field(default_factory=Profits)
def __init__(self, startDate, finalDate):
super(Period, self).__init__()
self.startDate = startDate
self.finalDate = finalDate
self._pricingStart = Pricing(ctx=Context(finalDate=startDate))
self._pricingEnd = Pricing(ctx=Context(finalDate=finalDate))
def __call__(self, asset, profitInfo, debug=None):
self._result = self.Result()
self._debug = debug if isinstance(debug, dict) else None
self._initialConditions(asset)
self._finalConditions(asset)
if self._result.initialNetValue is None or self._result.finalNetValue is None:
self._result.error = True
return self._result
self._result.profits.total = self._result.finalNetValue - self._result.initialNetValue
operationScope = [(op,info) for op, info in zip(asset.operations, profitInfo) if op.date >= self.startDate and op.date <= self.finalDate]
for operation, operationProfit in operationScope:
self._operationToProfit(operation, operationProfit)
return self._result
def _initialConditions(self, asset):
if self._debug is not None:
self._debug['initialConditions'] = {}
debug = self._debug['initialConditions'] if self._debug else None
self._result.initialNetValue, self._result.initialQuantity = self._pricingStart(asset, debug=debug)
def _finalConditions(self, asset):
if self._debug is not None:
self._debug['finalConditions'] = {}
debug = self._debug['finalConditions'] if self._debug else None
self._result.finalNetValue, self._result.finalQuantity = self._pricingEnd(asset, debug=debug)
def _operationToProfit(self, operation, operationProfit):
def _operationNetValue(operation):
if operation.currencyConversion:
return operation.price * operation.currencyConversion
return operation.price
if operation.type == typing.Operation.Type.buy:
self._result.profits.total -= _operationNetValue(operation)
elif operation.type == typing.Operation.Type.sell:
self._result.profits.total += _operationNetValue(operation)
elif operation.type == typing.Operation.Type.receive:
self._result.profits.total += _operationNetValue(operation)
elif operation.type == typing.Operation.Type.earning:
self._result.profits.total += _operationNetValue(operation)
else:
raise NotImplementedError("Did not implement period for operation type {}" % (operation.type))
# TODO: clean this up after Profits analyzer becomes strong typed
if operationProfit and 'profits' in operationProfit['_stats']:
self._result.profits.netValue += Decimal(operationProfit['_stats']['profits']['netValue'])
self._result.profits.provisions += Decimal(operationProfit['_stats']['profits']['provisions'])
|
from common import vehicle_status_codes
# from .services.demand_prediction_service import DemandPredictionService
from config.settings import TIMESTEP, MIN_DISPATCH_CYCLE, MAX_DISPATCH_CYCLE
import numpy as np
class DispatchPolicy(object):
def __init__(self):
# self.demand_predictor = DemandPredictionService()
self.updated_at = {}
def dispatch(self, current_time, vehicles):
self.update_state(current_time, vehicles)
tbd_vehicles = self.get_tbd_vehicles(vehicles, current_time)
if len(tbd_vehicles) == 0:
return []
commands = self.get_commands(tbd_vehicles)
self.record_dispatch(tbd_vehicles.index, current_time)
return commands
def update_state(self, current_time, vehicles):
pass
def get_commands(self, tbd_vehicles):
return []
def get_tbd_vehicles(self, vehicles, current_time):
idle_vehicles = vehicles[vehicles.status == vehicle_status_codes.IDLE]
cruising_vehicles = vehicles[vehicles.status == vehicle_status_codes.CRUISING]
tbd_idle_vehicles = idle_vehicles.loc[[
vehicle_id for vehicle_id in idle_vehicles.index
if current_time - self.updated_at.get(vehicle_id, 0) >= MIN_DISPATCH_CYCLE
]]
tbd_cruising_vehicles = cruising_vehicles.loc[[
vehicle_id for vehicle_id in cruising_vehicles.index
if current_time - self.updated_at.get(vehicle_id, 0) >= MAX_DISPATCH_CYCLE
]]
tbd_vehicles = tbd_idle_vehicles.append(tbd_cruising_vehicles)
max_n = int(len(vehicles) / MIN_DISPATCH_CYCLE * TIMESTEP)
if len(tbd_vehicles) > max_n:
p = np.random.permutation(len(tbd_vehicles))
tbd_vehicles = tbd_vehicles.iloc[p[:max_n]]
return tbd_vehicles
def record_dispatch(self, vehicle_ids, current_time):
for vehicle_id in vehicle_ids:
self.updated_at[vehicle_id] = current_time
def create_command(self, vehicle_id, destination=None, offduty=False, cache_key=None):
command = {}
command["vehicle_id"] = vehicle_id
if offduty:
command["offduty"] = True
elif cache_key is not None:
command["cache"] = cache_key
else:
command["destination"] = destination
return command
|
# Copyright 2016 Intel Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""fMRI Simulator
Simulate fMRI data for a single subject.
This code provides a set of functions necessary to produce realistic
simulations of fMRI data. There are two main steps: characterizing the
signal and generating the noise model, which are then combined to simulate
brain data. Tools are included to support the creation of different types
of signal, such as region specific differences in univariate
activity. To create the noise model the parameters can either be set
manually or can be estimated from real fMRI data with reasonable accuracy (
works best when fMRI data has not been preprocessed)
Functions:
generate_signal
Create a volume with activity, of a specified shape and either multivariate
or univariate pattern, in a specific region to represent the signal in the
neural data.
generate_stimfunction
Create a timecourse of the signal activation. This can be specified using event
onsets and durations from a timing file. This is the time course before
convolution and therefore can be at any temporal precision.
export_3_column:
Generate a three column timing file that can be used with software like FSL
to represent event event onsets and duration
export_epoch_file:
Generate an epoch file from the time course which can be used as an input to
brainiak functions
convolve_hrf
Convolve the signal timecourse with the HRF to model the expected evoked
activity
apply_signal
Combine the signal volume with the HRF, thus giving the signal the temporal
properties of the HRF (such as smoothing and lag)
calc_noise
Estimate the noise properties of a given fMRI volume. Prominently, estimate
the smoothing and SFNR of the data
generate_noise
Create the noise for this run. This creates temporal, spatial task and white
noise. Various parameters can be tuned depending on need
mask_brain
Create a mask volume that has similar contrast as an fMRI image. Defaults to
use an MNI grey matter atlas but any image can be supplied to create an
estimate.
compute_signal_change
Convert the signal function into useful metric units according to metrics
used by others (Welvaert & Rosseel, 2013)
Authors:
Cameron Ellis (Princeton & Yale) 2016-2018
Chris Baldassano (Princeton) 2016-2017
Mingbo Cai (Princeton) 2017
"""
import logging
from itertools import product
from statsmodels.tsa.arima_model import ARMA
import math
import numpy as np
# See pyflakes issue #248
# https://github.com/PyCQA/pyflakes/issues/248
import numpy.matlib # noqa: F401
from numpy.linalg import LinAlgError
from pkg_resources import resource_stream
from scipy import stats
from scipy import signal
import scipy.ndimage as ndimage
import copy
__all__ = [
"apply_signal",
"calc_noise",
"compute_signal_change",
"convolve_hrf",
"export_3_column",
"export_epoch_file",
"generate_signal",
"generate_stimfunction",
"generate_noise",
"mask_brain",
"generate_1d_gaussian_rfs",
"generate_1d_rf_responses",
]
logger = logging.getLogger(__name__)
def _generate_feature(feature_type,
feature_size,
signal_magnitude,
thickness=1):
"""Generate features corresponding to signal
Generate a single feature, that can be inserted into the signal volume.
A feature is a region of activation with a specific shape such as cube
or ring
Parameters
----------
feature_type : str
What shape signal is being inserted? Options are 'cube',
'loop' (aka ring), 'cavity' (aka hollow sphere), 'sphere'.
feature_size : int
How big is the signal in diameter?
signal_magnitude : float
Set the signal size, a value of 1 means the signal is one standard
deviation of the noise
thickness : int
How thick is the surface of the loop/cavity
Returns
----------
signal : 3 dimensional array
The volume representing the signal
"""
# If the size is equal to or less than 2 then all features are the same
if feature_size <= 2:
feature_type = 'cube'
# What kind of signal is it?
if feature_type == 'cube':
# Preset the size of the signal
signal = np.ones((feature_size, feature_size, feature_size))
elif feature_type == 'loop':
# First make a cube of zeros
signal = np.zeros((feature_size, feature_size, feature_size))
# Make a mesh grid of the space
seq = np.linspace(0, feature_size - 1,
feature_size)
xx, yy = np.meshgrid(seq, seq)
# Make a disk corresponding to the whole mesh grid
xxmesh = (xx - ((feature_size - 1) / 2)) ** 2
yymesh = (yy - ((feature_size - 1) / 2)) ** 2
disk = xxmesh + yymesh
# What are the limits of the rings being made
outer_lim = disk[int((feature_size - 1) / 2), 0]
inner_lim = disk[int((feature_size - 1) / 2), thickness]
# What is the outer disk
outer = disk <= outer_lim
# What is the inner disk
inner = disk <= inner_lim
# Subtract the two disks to get a loop
loop = outer != inner
# Check if the loop is a disk
if np.all(inner is False):
logger.warning('Loop feature reduces to a disk because the loop '
'is too thick')
# If there is complete overlap then make the signal just the
# outer one
if np.all(loop is False):
loop = outer
# store the loop
signal[0:feature_size, 0:feature_size, int(np.round(feature_size /
2))] = loop
elif feature_type == 'sphere' or feature_type == 'cavity':
# Make a mesh grid of the space
seq = np.linspace(0, feature_size - 1,
feature_size)
xx, yy, zz = np.meshgrid(seq, seq, seq)
# Make a disk corresponding to the whole mesh grid
signal = ((xx - ((feature_size - 1) / 2)) ** 2 +
(yy - ((feature_size - 1) / 2)) ** 2 +
(zz - ((feature_size - 1) / 2)) ** 2)
# What are the limits of the rings being made
outer_lim = signal[int((feature_size - 1) / 2), int((feature_size -
1) / 2), 0]
inner_lim = signal[int((feature_size - 1) / 2), int((feature_size -
1) / 2),
thickness]
# Is the signal a sphere or a cavity?
if feature_type == 'sphere':
signal = signal <= outer_lim
else:
# Get the inner and outer sphere
outer = signal <= outer_lim
inner = signal <= inner_lim
# Subtract the two disks to get a loop
signal = outer != inner
# Check if the cavity is a sphere
if np.all(inner is False):
logger.warning('Cavity feature reduces to a sphere because '
'the cavity is too thick')
# If there is complete overlap then make the signal just the
# outer one
if np.all(signal is False):
signal = outer
# Assign the signal magnitude
signal = signal * signal_magnitude
# Return the signal
return signal
def _insert_idxs(feature_centre, feature_size, dimensions):
"""Returns the indices of where to put the signal into the signal volume
Parameters
----------
feature_centre : list, int
List of coordinates for the centre location of the signal
feature_size : list, int
How big is the signal's diameter.
dimensions : 3 length array, int
What are the dimensions of the volume you wish to create
Returns
----------
x_idxs : tuple
The x coordinates of where the signal is to be inserted
y_idxs : tuple
The y coordinates of where the signal is to be inserted
z_idxs : tuple
The z coordinates of where the signal is to be inserted
"""
# Set up the indexes within which to insert the signal
x_idx = [int(feature_centre[0] - (feature_size / 2)) + 1,
int(feature_centre[0] - (feature_size / 2) +
feature_size) + 1]
y_idx = [int(feature_centre[1] - (feature_size / 2)) + 1,
int(feature_centre[1] - (feature_size / 2) +
feature_size) + 1]
z_idx = [int(feature_centre[2] - (feature_size / 2)) + 1,
int(feature_centre[2] - (feature_size / 2) +
feature_size) + 1]
# Check for out of bounds
# Min Boundary
if 0 > x_idx[0]:
x_idx[0] = 0
if 0 > y_idx[0]:
y_idx[0] = 0
if 0 > z_idx[0]:
z_idx[0] = 0
# Max Boundary
if dimensions[0] < x_idx[1]:
x_idx[1] = dimensions[0]
if dimensions[1] < y_idx[1]:
y_idx[1] = dimensions[1]
if dimensions[2] < z_idx[1]:
z_idx[1] = dimensions[2]
# Return the idxs for data
return x_idx, y_idx, z_idx
def generate_signal(dimensions,
feature_coordinates,
feature_size,
feature_type,
signal_magnitude=[1],
signal_constant=1,
):
"""Generate volume containing signal
Generate signal, of a specific shape in specific regions, for a single
volume. This will then be convolved with the HRF across time
Parameters
----------
dimensions : 1d array, ndarray
What are the dimensions of the volume you wish to create
feature_coordinates : multidimensional array
What are the feature_coordinates of the signal being created.
Be aware of clipping: features far from the centre of the
brain will be clipped. If you wish to have multiple features
then list these as a features x 3 array. To create a feature of
a unique shape then supply all the individual
feature_coordinates of the shape and set the feature_size to 1.
feature_size : list, int
How big is the signal. If feature_coordinates=1 then only one value is
accepted, if feature_coordinates>1 then either one value must be
supplied or m values
feature_type : list, string
What feature_type of signal is being inserted? Options are cube,
loop, cavity, sphere. If feature_coordinates = 1 then
only one value is accepted, if feature_coordinates > 1 then either
one value must be supplied or m values
signal_magnitude : list, float
What is the (average) magnitude of the signal being generated? A
value of 1 means that the signal is one standard deviation from the
noise
signal_constant : list, bool
Is the signal constant across the feature (for univariate activity)
or is it a random pattern of a given magnitude across the feature (for
multivariate activity)
Returns
----------
volume_signal : 3 dimensional array, float
Creates a single volume containing the signal
"""
# Preset the volume
volume_signal = np.zeros(dimensions)
feature_quantity = round(feature_coordinates.shape[0])
# If there is only one feature_size value then make sure to duplicate it
# for all signals
if len(feature_size) == 1:
feature_size = feature_size * feature_quantity
# Do the same for feature_type
if len(feature_type) == 1:
feature_type = feature_type * feature_quantity
if len(signal_magnitude) == 1:
signal_magnitude = signal_magnitude * feature_quantity
# Iterate through the signals and insert in the data
for signal_counter in range(feature_quantity):
# What is the centre of this signal
if len(feature_size) > 1:
feature_centre = np.asarray(feature_coordinates[signal_counter, ])
else:
feature_centre = np.asarray(feature_coordinates)[0]
# Generate the feature to be inserted in the volume
signal = _generate_feature(feature_type[signal_counter],
feature_size[signal_counter],
signal_magnitude[signal_counter],
)
# If the signal is a random noise pattern then multiply these ones by
# a noise mask
if signal_constant == 0:
signal = signal * np.random.random([feature_size[signal_counter],
feature_size[signal_counter],
feature_size[signal_counter]])
# Pull out the idxs for where to insert the data
x_idx, y_idx, z_idx = _insert_idxs(feature_centre,
feature_size[signal_counter],
dimensions)
# Insert the signal into the Volume
volume_signal[x_idx[0]:x_idx[1], y_idx[0]:y_idx[1], z_idx[0]:z_idx[
1]] = signal
return volume_signal
def generate_stimfunction(onsets,
event_durations,
total_time,
weights=[1],
timing_file=None,
temporal_resolution=100.0,
):
"""Return the function for the timecourse events
When do stimuli onset, how long for and to what extent should you
resolve the fMRI time course. There are two ways to create this, either
by supplying onset, duration and weight information or by supplying a
timing file (in the three column format used by FSL).
Parameters
----------
onsets : list, int
What are the timestamps (in s) for when an event you want to
generate onsets?
event_durations : list, int
What are the durations (in s) of the events you want to
generate? If there is only one value then this will be assigned
to all onsets
total_time : int
How long (in s) is the experiment in total.
weights : list, float
What is the weight for each event (how high is the box car)? If
there is only one value then this will be assigned to all onsets
timing_file : string
The filename (with path) to a three column timing file (FSL) to
make the events. Still requires total_time to work
temporal_resolution : float
How many elements per second are you modeling for the
timecourse. This is useful when you want to model the HRF at an
arbitrarily high resolution (and then downsample to your TR later).
Returns
----------
stim_function : 1 by timepoint array, float
The time course of stimulus evoked activation. This has a temporal
resolution of temporal resolution / 1.0 elements per second
"""
# If the timing file is supplied then use this to acquire the
if timing_file is not None:
# Read in text file line by line
with open(timing_file) as f:
text = f.readlines() # Pull out file as a an array
# Preset
onsets = list()
event_durations = list()
weights = list()
# Pull out the onsets, weights and durations, set as a float
for line in text:
onset, duration, weight = line.strip().split()
# Check if the onset is more precise than the temporal resolution
upsampled_onset = float(onset) * temporal_resolution
# Because of float precision, the upsampled values might
# not round as expected .
# E.g. float('1.001') * 1000 = 1000.99
if np.allclose(upsampled_onset, np.round(upsampled_onset)) == 0:
warning = 'Your onset: ' + str(onset) + ' has more decimal ' \
'points than the ' \
'specified temporal ' \
'resolution can ' \
'resolve. This means' \
' that events might' \
' be missed. ' \
'Consider increasing' \
' the temporal ' \
'resolution.'
logger.warning(warning)
onsets.append(float(onset))
event_durations.append(float(duration))
weights.append(float(weight))
# If only one duration is supplied then duplicate it for the length of
# the onset variable
if len(event_durations) == 1:
event_durations = event_durations * len(onsets)
if len(weights) == 1:
weights = weights * len(onsets)
# Check files
if np.max(onsets) > total_time:
raise ValueError('Onsets outside of range of total time.')
# Generate the time course as empty, each element is a millisecond by
# default
stimfunction = np.zeros((int(round(total_time * temporal_resolution)), 1))
# Cycle through the onsets
for onset_counter in list(range(len(onsets))):
# Adjust for the resolution
onset_idx = int(np.floor(onsets[onset_counter] * temporal_resolution))
# Adjust for the resolution
offset_idx = int(np.floor((onsets[onset_counter] + event_durations[
onset_counter]) * temporal_resolution))
# Store the weights
stimfunction[onset_idx:offset_idx, 0] = [weights[onset_counter]]
return stimfunction
def export_3_column(stimfunction,
filename,
temporal_resolution=100.0
):
""" Output a tab separated three column timing file
This produces a three column tab separated text file, with the three
columns representing onset time (s), event duration (s) and weight,
respectively. Useful if you want to run the simulated data through FEAT
analyses. In a way, this is the reverse of generate_stimfunction
Parameters
----------
stimfunction : timepoint by 1 array
The stimulus function describing the time course of events. For
instance output from generate_stimfunction.
filename : str
The name of the three column text file to be output
temporal_resolution : float
How many elements per second are you modeling with the
stimfunction?
"""
# Iterate through the stim function
stim_counter = 0
event_counter = 0
while stim_counter < stimfunction.shape[0]:
# Is it an event?
if stimfunction[stim_counter, 0] != 0:
# When did the event start?
event_onset = str(stim_counter / temporal_resolution)
# The weight of the stimulus
weight = str(stimfunction[stim_counter, 0])
# Reset
event_duration = 0
# Is the event still ongoing?
while stimfunction[stim_counter, 0] != 0 & stim_counter <= \
stimfunction.shape[0]:
# Add one millisecond to each duration
event_duration = event_duration + 1
# Increment
stim_counter = stim_counter + 1
# How long was the event in seconds
event_duration = str(event_duration / temporal_resolution)
# Append this row to the data file
with open(filename, "a") as file:
file.write(event_onset + '\t' + event_duration + '\t' +
weight + '\n')
# Increment the number of events
event_counter = event_counter + 1
# Increment
stim_counter = stim_counter + 1
def export_epoch_file(stimfunction,
filename,
tr_duration,
temporal_resolution=100.0
):
""" Output an epoch file, necessary for some inputs into brainiak
This takes in the time course of stimulus events and outputs the epoch
file used in Brainiak. The epoch file is a way to structure the timing
information in fMRI that allows you to flexibly input different stimulus
sequences. This is a list with each entry a 3d matrix corresponding to a
participant. The dimensions of the 3d matrix are condition by epoch by
time. For the i-th condition, if its k-th epoch spans time points t_m to
t_n-1, then [i, k, t_m:t_n] are 1 in the epoch file.
Parameters
----------
stimfunction : list of timepoint by condition arrays
The stimulus function describing the time course of events. Each
list entry is from a different participant, each row is a different
timepoint (with the given temporal precision), each column is a
different condition. export_epoch_file is looking for differences in
the value of stimfunction to identify the start and end of an
epoch. If epochs in stimfunction are coded with the same weight and
there is no time between blocks then export_epoch_file won't be able to
label them as different epochs
filename : str
The name of the epoch file to be output
tr_duration : float
How long is each TR in seconds
temporal_resolution : float
How many elements per second are you modeling with the
stimfunction?
"""
# Cycle through the participants, different entries in the list
epoch_file = [0] * len(stimfunction)
for ppt_counter in range(len(stimfunction)):
# What is the time course for the participant (binarized)
stimfunction_ppt = np.abs(stimfunction[ppt_counter]) > 0
# Down sample the stim function
stride = tr_duration * temporal_resolution
stimfunction_downsampled = stimfunction_ppt[::int(stride), :]
# Calculates the number of event onsets. This uses changes in value
# to reflect different epochs. This might be false in some cases (the
# weight is non-uniform over an epoch or there is no break between
# identically weighted epochs).
epochs = 0 # Preset
conditions = stimfunction_ppt.shape[1]
for condition_counter in range(conditions):
weight_change = (np.diff(stimfunction_downsampled[:,
condition_counter], 1, 0) != 0)
# If the first or last events are 'on' then make these
# represent a epoch change
if stimfunction_downsampled[0, condition_counter] == 1:
weight_change[0] = True
if stimfunction_downsampled[-1, condition_counter] == 1:
weight_change[-1] = True
epochs += int(np.max(np.sum(weight_change, 0)) / 2)
# Get other information
trs = stimfunction_downsampled.shape[0]
# Make a timing file for this participant
epoch_file[ppt_counter] = np.zeros((conditions, epochs, trs))
# Cycle through conditions
epoch_counter = 0 # Reset and count across conditions
tr_counter = 0
while tr_counter < stimfunction_downsampled.shape[0]:
for condition_counter in range(conditions):
# Is it an event?
if tr_counter < stimfunction_downsampled.shape[0] and \
stimfunction_downsampled[
tr_counter, condition_counter] == 1:
# Add a one for this TR
epoch_file[ppt_counter][condition_counter,
epoch_counter, tr_counter] = 1
# Find the next non event value
end_idx = np.where(stimfunction_downsampled[tr_counter:,
condition_counter] == 0)[
0][0]
tr_idxs = list(range(tr_counter, tr_counter + end_idx))
# Add ones to all the trs within this event time frame
epoch_file[ppt_counter][condition_counter,
epoch_counter, tr_idxs] = 1
# Start from this index
tr_counter += end_idx
# Increment
epoch_counter += 1
# Increment the counter
tr_counter += 1
# Convert to boolean
epoch_file[ppt_counter] = epoch_file[ppt_counter].astype('bool')
# Save the file
np.save(filename, epoch_file)
def _double_gamma_hrf(response_delay=6,
undershoot_delay=12,
response_dispersion=0.9,
undershoot_dispersion=0.9,
response_scale=1,
undershoot_scale=0.035,
temporal_resolution=100.0,
):
"""Create the double gamma HRF with the timecourse evoked activity.
Default values are based on Glover, 1999 and Walvaert, Durnez,
Moerkerke, Verdoolaege and Rosseel, 2011
Parameters
----------
response_delay : float
How many seconds until the peak of the HRF
undershoot_delay : float
How many seconds until the trough of the HRF
response_dispersion : float
How wide is the rising peak dispersion
undershoot_dispersion : float
How wide is the undershoot dispersion
response_scale : float
How big is the response relative to the peak
undershoot_scale :float
How big is the undershoot relative to the trough
scale_function : bool
Do you want to scale the function to a range of 1
temporal_resolution : float
How many elements per second are you modeling for the stimfunction
Returns
----------
hrf : multi dimensional array
A double gamma HRF to be used for convolution.
"""
hrf_length = 30 # How long is the HRF being created
# How many seconds of the HRF will you model?
hrf = [0] * int(hrf_length * temporal_resolution)
# When is the peak of the two aspects of the HRF
response_peak = response_delay * response_dispersion
undershoot_peak = undershoot_delay * undershoot_dispersion
for hrf_counter in list(range(len(hrf) - 1)):
# Specify the elements of the HRF for both the response and undershoot
resp_pow = math.pow((hrf_counter / temporal_resolution) /
response_peak, response_delay)
resp_exp = math.exp(-((hrf_counter / temporal_resolution) -
response_peak) /
response_dispersion)
response_model = response_scale * resp_pow * resp_exp
undershoot_pow = math.pow((hrf_counter / temporal_resolution) /
undershoot_peak,
undershoot_delay)
undershoot_exp = math.exp(-((hrf_counter / temporal_resolution) -
undershoot_peak /
undershoot_dispersion))
undershoot_model = undershoot_scale * undershoot_pow * undershoot_exp
# For this time point find the value of the HRF
hrf[hrf_counter] = response_model - undershoot_model
return hrf
def convolve_hrf(stimfunction,
tr_duration,
hrf_type='double_gamma',
scale_function=True,
temporal_resolution=100.0,
):
""" Convolve the specified hrf with the timecourse.
The output of this is a downsampled convolution of the stimfunction and
the HRF function. If temporal_resolution is 1 / tr_duration then the
output will be the same length as stimfunction. This time course assumes
that slice time correction has occurred and all slices have been aligned
to the middle time point in the TR.
Be aware that if scaling is on and event durations are less than the
duration of a TR then the hrf may or may not come out as anticipated.
This is because very short events would evoke a small absolute response
after convolution but if there are only short events and you scale then
this will look similar to a convolution with longer events. In general
scaling is useful, which is why it is the default, but be aware of this
edge case and if it is a concern, set the scale_function to false.
Parameters
----------
stimfunction : timepoint by feature array
What is the time course of events to be modelled in this
experiment. This can specify one or more timecourses of events.
The events can be weighted or binary
tr_duration : float
How long (in s) between each volume onset
hrf_type : str or list
Takes in a string describing the hrf that ought to be created.
Can instead take in a vector describing the HRF as it was
specified by any function. The default is 'double_gamma' in which
an initial rise and an undershoot are modelled.
scale_function : bool
Do you want to scale the function to a range of 1
temporal_resolution : float
How many elements per second are you modeling for the stimfunction
Returns
----------
signal_function : timepoint by timecourse array
The time course of the HRF convolved with the stimulus function.
This can have multiple time courses specified as different
columns in this array.
"""
# Check if it is timepoint by feature
if stimfunction.shape[0] < stimfunction.shape[1]:
logger.warning('Stimfunction may be the wrong shape')
# How will stimfunction be resized
stride = int(temporal_resolution * tr_duration)
duration = int(stimfunction.shape[0] / stride)
# Generate the hrf to use in the convolution
if hrf_type == 'double_gamma':
hrf = _double_gamma_hrf(temporal_resolution=temporal_resolution)
elif isinstance(hrf_type, list):
hrf = hrf_type
# How many timecourses are there
list_num = stimfunction.shape[1]
# Create signal functions for each list in the stimfunction
for list_counter in range(list_num):
# Perform the convolution
signal_temp = np.convolve(stimfunction[:, list_counter], hrf)
# Down sample the signal function so that it only has one element per
# TR. This assumes that all slices are collected at the same time,
# which is often the result of slice time correction. In other
# words, the output assumes slice time correction
signal_temp = signal_temp[:duration * stride]
signal_vox = signal_temp[int(stride / 2)::stride]
# Scale the function so that the peak response is 1
if scale_function:
signal_vox = signal_vox / np.max(signal_vox)
# Add this function to the stack
if list_counter == 0:
signal_function = np.zeros((len(signal_vox), list_num))
signal_function[:, list_counter] = signal_vox
return signal_function
def apply_signal(signal_function,
volume_signal,
):
"""Combine the signal volume with its timecourse
Apply the convolution of the HRF and stimulus time course to the
volume.
Parameters
----------
signal_function : timepoint by timecourse array, float
The timecourse of the signal over time. If there is only one column
then the same timecourse is applied to all non-zero voxels in
volume_signal. If there is more than one column then each column is
paired with a non-zero voxel in the volume_signal (a 3d numpy array
generated in generate_signal).
volume_signal : multi dimensional array, float
The volume containing the signal to be convolved with the same
dimensions as the output volume. The elements in volume_signal
indicate how strong each signal in signal_function are modulated by
in the output volume
Returns
----------
signal : multidimensional array, float
The convolved signal volume with the same 3d as volume signal and
the same 4th dimension as signal_function
"""
# How many timecourses are there within the signal_function
timepoints = signal_function.shape[0]
timecourses = signal_function.shape[1]
# Preset volume
signal = np.zeros([volume_signal.shape[0], volume_signal.shape[
1], volume_signal.shape[2], timepoints])
# Find all the non-zero voxels in the brain
idxs = np.where(volume_signal != 0)
if timecourses == 1:
# If there is only one time course supplied then duplicate it for
# every voxel
signal_function = np.matlib.repmat(signal_function, 1, len(idxs[0]))
elif len(idxs[0]) != timecourses:
raise IndexError('The number of non-zero voxels in the volume and '
'the number of timecourses does not match. Aborting')
# For each coordinate with a non zero voxel, fill in the timecourse for
# that voxel
for idx_counter in range(len(idxs[0])):
x = idxs[0][idx_counter]
y = idxs[1][idx_counter]
z = idxs[2][idx_counter]
# Pull out the function for this voxel
signal_function_temp = signal_function[:, idx_counter]
# Multiply the voxel value by the function timecourse
signal[x, y, z, :] = volume_signal[x, y, z] * signal_function_temp
return signal
def _calc_fwhm(volume,
mask,
voxel_size=[1.0, 1.0, 1.0],
):
""" Calculate the FWHM of a volume
Estimates the FWHM (mm) of a volume's non-masked voxels
Parameters
----------
volume : 3 dimensional array
Functional data to have the FWHM measured.
mask : 3 dimensional array
A binary mask of the brain voxels in volume
voxel_size : length 3 list, float
Millimeters per voxel for x, y and z.
Returns
-------
fwhm : float, list
Returns the FWHM of each TR in mm
"""
# What are the dimensions of the volume
dimensions = volume.shape
# Iterate through the TRs, creating a FWHM for each TR
# Preset
v_count = 0
v_sum = 0
v_sq = 0
d_sum = [0.0, 0.0, 0.0]
d_sq = [0.0, 0.0, 0.0]
d_count = [0, 0, 0]
# Pull out all the voxel coordinates
coordinates = list(product(range(dimensions[0]),
range(dimensions[1]),
range(dimensions[2])))
# Find the sum of squared error for the non-masked voxels in the brain
for i in list(range(len(coordinates))):
# Pull out this coordinate
x, y, z = coordinates[i]
# Is this within the mask?
if mask[x, y, z] > 0:
# Find the the volume sum and squared values
v_count += 1
v_sum += np.abs(volume[x, y, z])
v_sq += volume[x, y, z] ** 2
# Get the volume variance
v_var = (v_sq - ((v_sum ** 2) / v_count)) / (v_count - 1)
for i in list(range(len(coordinates))):
# Pull out this coordinate
x, y, z = coordinates[i]
# Is this within the mask?
if mask[x, y, z] > 0:
# For each xyz dimension calculate the squared
# difference of this voxel and the next
in_range = (x < dimensions[0] - 1)
in_mask = in_range and (mask[x + 1, y, z] > 0)
included = in_mask and (~np.isnan(volume[x + 1, y, z]))
if included:
d_sum[0] += volume[x, y, z] - volume[x + 1, y, z]
d_sq[0] += (volume[x, y, z] - volume[x + 1, y, z]) ** 2
d_count[0] += 1
in_range = (y < dimensions[1] - 1)
in_mask = in_range and (mask[x, y + 1, z] > 0)
included = in_mask and (~np.isnan(volume[x, y + 1, z]))
if included:
d_sum[1] += volume[x, y, z] - volume[x, y + 1, z]
d_sq[1] += (volume[x, y, z] - volume[x, y + 1, z]) ** 2
d_count[1] += 1
in_range = (z < dimensions[2] - 1)
in_mask = in_range and (mask[x, y, z + 1] > 0)
included = in_mask and (~np.isnan(volume[x, y, z + 1]))
if included:
d_sum[2] += volume[x, y, z] - volume[x, y, z + 1]
d_sq[2] += (volume[x, y, z] - volume[x, y, z + 1]) ** 2
d_count[2] += 1
# Find the variance
d_var = np.divide((d_sq - np.divide(np.power(d_sum, 2),
d_count)), (np.add(d_count, -1)))
o_var = np.divide(-1, (4 * np.log(1 - (0.5 * d_var / v_var))))
fwhm3 = np.sqrt(o_var) * 2 * np.sqrt(2 * np.log(2))
fwhm = np.prod(np.multiply(fwhm3, voxel_size)) ** (1 / 3)
return fwhm
def _calc_sfnr(volume,
mask,
):
""" Calculate the the SFNR of a volume
Calculates the Signal to Fluctuation Noise Ratio, the mean divided
by the detrended standard deviation of each brain voxel. Based on
Friedman and Glover, 2006
Parameters
----------
volume : 4d array, float
Take a volume time series
mask : 3d array, binary
A binary mask the same size as the volume
Returns
-------
snr : float
The SFNR of the volume
"""
# Make a matrix of brain voxels by time
brain_voxels = volume[mask > 0]
# Take the means of each voxel over time
mean_voxels = np.nanmean(brain_voxels, 1)
# Detrend (second order polynomial) the voxels over time and then
# calculate the standard deviation.
order = 2
seq = np.linspace(1, brain_voxels.shape[1], brain_voxels.shape[1])
detrend_poly = np.polyfit(seq, brain_voxels.transpose(), order)
# Detrend for each voxel
detrend_voxels = np.zeros(brain_voxels.shape)
for voxel in range(brain_voxels.shape[0]):
trend = detrend_poly[0, voxel] * seq ** 2 + detrend_poly[1, voxel] * \
seq + detrend_poly[2, voxel]
detrend_voxels[voxel, :] = brain_voxels[voxel, :] - trend
std_voxels = np.nanstd(detrend_voxels, 1)
# Calculate the sfnr of all voxels across the brain
sfnr_voxels = mean_voxels / std_voxels
# Return the average sfnr
return np.mean(sfnr_voxels)
def _calc_snr(volume,
mask,
dilation=5,
reference_tr=None,
):
""" Calculate the the SNR of a volume
Calculates the Signal to Noise Ratio, the mean of brain voxels
divided by the standard deviation across non-brain voxels. Specify a TR
value to calculate the mean and standard deviation for that TR. To
calculate the standard deviation of non-brain voxels we can subtract
any baseline structure away first, hence getting at deviations due to the
system noise and not something like high baseline values in non-brain
parts of the body.
Parameters
----------
volume : 4d array, float
Take a volume time series
mask : 3d array, binary
A binary mask the same size as the volume
dilation : int
How many binary dilations do you want to perform on the mask to
determine the non-brain voxels. If you increase this the SNR
increases and the non-brain voxels (after baseline subtraction) more
closely resemble a gaussian
reference_tr : int or list
Specifies the TR to calculate the SNR for. If multiple are supplied
then it will use the average of them.
Returns
-------
snr : float
The SNR of the volume
"""
# If no TR is specified then take all of them
if reference_tr is None:
reference_tr = list(range(volume.shape[3]))
# Dilate the mask in order to ensure that non-brain voxels are far from
# the brain
if dilation > 0:
mask_dilated = ndimage.morphology.binary_dilation(mask,
iterations=dilation)
else:
mask_dilated = mask
# Make a matrix of brain and non_brain voxels, selecting the timepoint/s
brain_voxels = volume[mask > 0][:, reference_tr]
nonbrain_voxels = (volume[:, :, :, reference_tr]).astype('float64')
# If you have multiple TRs
if len(brain_voxels.shape) > 1:
brain_voxels = np.mean(brain_voxels, 1)
nonbrain_voxels = np.mean(nonbrain_voxels, 3)
nonbrain_voxels = nonbrain_voxels[mask_dilated == 0]
# Take the means of each voxel over time
mean_voxels = np.nanmean(brain_voxels)
# Find the standard deviation of the voxels
std_voxels = np.nanstd(nonbrain_voxels)
# Return the snr
return mean_voxels / std_voxels
def _calc_ARMA_noise(volume,
mask,
auto_reg_order=1,
ma_order=1,
sample_num=100,
):
""" Calculate the the ARMA noise of a volume
This calculates the autoregressive and moving average noise of the volume
over time by sampling brain voxels and averaging them.
Parameters
----------
volume : 4d array or 1d array, float
Take a volume time series to extract the middle slice from the
middle TR. Can also accept a one dimensional time course (mask input
is then ignored).
mask : 3d array, binary
A binary mask the same size as the volume
auto_reg_order : int
What order of the autoregression do you want to estimate
sample_num : int
How many voxels would you like to sample to calculate the AR values.
The AR distribution of real data is approximately exponential maxing
at 1. From analyses across a number of participants, to get less
than 3% standard deviation of error from the true mean it is
necessary to sample at least 100 voxels.
Returns
-------
auto_reg_rho : list of floats
Rho of a specific order for the autoregression noise in the data
na_rho : list of floats
Moving average of a specific order for the data
"""
# Pull out the non masked voxels
if len(volume.shape) > 1:
brain_timecourse = volume[mask > 0]
else:
# If a 1 dimensional input is supplied then reshape it to make the
# timecourse
brain_timecourse = volume.reshape(1, len(volume))
# Identify some brain voxels to assess
voxel_idxs = list(range(brain_timecourse.shape[0]))
np.random.shuffle(voxel_idxs)
# If there are more samples than voxels, take all of the voxels
if len(voxel_idxs) < sample_num:
sample_num = len(voxel_idxs)
auto_reg_rho_all = np.zeros((sample_num, auto_reg_order))
ma_all = np.zeros((sample_num, ma_order))
for voxel_counter in range(sample_num):
# Get the timecourse and demean it
timecourse = brain_timecourse[voxel_idxs[voxel_counter], :]
demeaned_timecourse = timecourse - timecourse.mean()
# Pull out the ARMA values (depends on order)
try:
model = ARMA(demeaned_timecourse, [auto_reg_order, ma_order])
model_fit = model.fit(disp=False)
params = model_fit.params
except (ValueError, LinAlgError):
params = np.ones(auto_reg_order + ma_order + 1) * np.nan
# Add to the list
auto_reg_rho_all[voxel_counter, :] = params[1:auto_reg_order + 1]
ma_all[voxel_counter, :] = params[auto_reg_order + 1:]
# Average all of the values and then convert them to a list
auto_reg_rho = np.nanmean(auto_reg_rho_all, 0).tolist()
ma_rho = np.nanmean(ma_all, 0).tolist()
# Return the coefficients
return auto_reg_rho, ma_rho
def calc_noise(volume,
mask,
template,
noise_dict=None,
):
""" Calculates the noise properties of the volume supplied.
This estimates what noise properties the volume has. For instance it
determines the spatial smoothness, the autoregressive noise, system
noise etc. Read the doc string for generate_noise to understand how
these different types of noise interact.
Parameters
----------
volume : 4d numpy array, float
Take in a functional volume (either the file name or the numpy
array) to be used to estimate the noise properties of this
mask : 3d numpy array, binary
A binary mask of the brain, the same size as the volume
template : 3d array, float
A continuous (0 -> 1) volume describing the likelihood a voxel is in
the brain. This can be used to contrast the brain and non brain.
noise_dict : dict
The initialized dictionary of the calculated noise parameters of the
provided dataset (usually it is only the voxel size)
Returns
-------
noise_dict : dict
Return a dictionary of the calculated noise parameters of the provided
dataset
"""
# Check the inputs
if template.max() > 1.1:
raise ValueError('Template out of range')
# Create the mask if not supplied and set the mask size
if mask is None:
raise ValueError('Mask not supplied')
# Update noise dict if it is not yet created
if noise_dict is None:
noise_dict = {'voxel_size': [1.0, 1.0, 1.0]}
elif 'voxel_size' not in noise_dict:
noise_dict['voxel_size'] = [1.0, 1.0, 1.0]
# What is the max activation of the mean of this voxel (allows you to
# convert between the mask and the mean of the brain volume)
noise_dict['max_activity'] = np.nanmax(np.mean(volume, 3))
# Calculate the temporal variability of the volume
noise_dict['auto_reg_rho'], noise_dict['ma_rho'] = _calc_ARMA_noise(
volume, mask)
# Set it such that all of the temporal variability will be accounted for
# by the AR component
noise_dict['auto_reg_sigma'] = 1
# Preset these values to be zero, as in you are not attempting to
# simulate them
noise_dict['physiological_sigma'] = 0
noise_dict['task_sigma'] = 0
noise_dict['drift_sigma'] = 0
# Calculate the sfnr
noise_dict['sfnr'] = _calc_sfnr(volume,
mask,
)
# Calculate the fwhm on a subset of volumes
if volume.shape[3] > 100:
# Take only 100 shuffled TRs
trs = np.random.choice(volume.shape[3], size=100, replace=False)
else:
trs = list(range(0, volume.shape[3]))
# Go through the trs and pull out the fwhm
fwhm = [0] * len(trs)
for tr in range(len(trs)):
fwhm[tr] = _calc_fwhm(volume[:, :, :, trs[tr]],
mask,
noise_dict['voxel_size'],
)
# Keep only the mean
noise_dict['fwhm'] = np.mean(fwhm)
noise_dict['snr'] = _calc_snr(volume,
mask,
)
# Return the noise dictionary
return noise_dict
def _generate_noise_system(dimensions_tr,
spatial_sd,
temporal_sd,
spatial_noise_type='gaussian',
temporal_noise_type='gaussian',
):
"""Generate the scanner noise
Generate system noise, either rician, gaussian or exponential, for the
scanner. Generates a distribution with a SD of 1. If you look at the
distribution of non-brain voxel intensity in modern scans you will see
it is rician. However, depending on how you have calculated the SNR and
whether the template is being used you will want to use this function
differently: the voxels outside the brain tend to be stable over time and
usually reflect structure in the MR signal (e.g. the
baseline MR of the head coil or skull). Hence the template captures this
rician noise structure. If you are adding the machine noise to the
template, as is done in generate_noise, then you are likely doubling up
on the addition of machine noise. In such cases, machine noise seems to
be better modelled by gaussian noise on top of this rician structure.
Parameters
----------
dimensions_tr : n length array, int
What are the dimensions of the volume you wish to insert
noise into. This can be a volume of any size
spatial_sd : float
What is the standard deviation in space of the noise volume to be
generated
temporal_sd : float
What is the standard deviation in time of the noise volume to be
generated
noise_type : str
String specifying the noise type. If you aren't specifying the noise
template then Rician is the appropriate model of noise. However,
if you are subtracting the template, as is default, then you should
use gaussian. (If the dilation parameter of _calc_snr is <10 then
gaussian is only an approximation)
Returns
----------
system_noise : multidimensional array, float
Create a volume with system noise
"""
def noise_volume(dimensions,
noise_type,
):
if noise_type == 'rician':
# Generate the Rician noise (has an SD of 1)
noise = stats.rice.rvs(b=0, loc=0, scale=1.527, size=dimensions)
elif noise_type == 'exponential':
# Make an exponential distribution (has an SD of 1)
noise = stats.expon.rvs(0, scale=1, size=dimensions)
elif noise_type == 'gaussian':
noise = np.random.randn(np.prod(dimensions)).reshape(dimensions)
# Return the noise
return noise
# Get just the xyz coordinates
dimensions = np.asarray([dimensions_tr[0],
dimensions_tr[1],
dimensions_tr[2],
1])
# Generate noise
spatial_noise = noise_volume(dimensions, spatial_noise_type)
temporal_noise = noise_volume(dimensions_tr, temporal_noise_type)
# Make the system noise have a specific spatial variability
spatial_noise *= spatial_sd
# Set the size of the noise
temporal_noise *= temporal_sd
# The mean in time of system noise needs to be zero, so subtract the
# means of the temporal noise in time
temporal_noise_mean = np.mean(temporal_noise, 3).reshape(dimensions[0],
dimensions[1],
dimensions[2],
1)
temporal_noise = temporal_noise - temporal_noise_mean
# Save the combination
system_noise = spatial_noise + temporal_noise
return system_noise
def _generate_noise_temporal_task(stimfunction_tr,
motion_noise='gaussian',
):
"""Generate the signal dependent noise
Create noise specific to the signal, for instance there is variability
in how the signal manifests on each event
Parameters
----------
stimfunction_tr : 1 Dimensional array
This is the timecourse of the stimuli in this experiment,
each element represents a TR
motion_noise : str
What type of noise will you generate? Can be gaussian or rician
Returns
----------
noise_task : one dimensional array, float
Generates the temporal task noise timecourse
"""
# Make the noise to be added
stimfunction_tr = stimfunction_tr != 0
if motion_noise == 'gaussian':
noise = stimfunction_tr * np.random.normal(0, 1,
size=stimfunction_tr.shape)
elif motion_noise == 'rician':
noise = stimfunction_tr * stats.rice.rvs(0, 1,
size=stimfunction_tr.shape)
noise_task = stimfunction_tr + noise
# Normalize
noise_task = stats.zscore(noise_task).flatten()
return noise_task
def _generate_noise_temporal_drift(trs,
tr_duration,
basis="discrete_cos",
period=150,
):
"""Generate the drift noise
Create a trend (either sine or discrete_cos), of a given period and random
phase, to represent the drift of the signal over time
Parameters
----------
trs : int
How many volumes (aka TRs) are there
tr_duration : float
How long in seconds is each volume acqusition
basis : str
What is the basis function for the drift. Could be made of discrete
cosines (for longer run durations, more basis functions are
created) or a sine wave.
period : int
How many seconds is the period of oscillation of the drift
Returns
----------
noise_drift : one dimensional array, float
The drift timecourse of activity
"""
# Calculate drift differently depending on the basis function
if basis == 'discrete_cos':
# Specify each tr in terms of its phase with the given period
timepoints = np.linspace(0, trs - 1, trs)
timepoints = ((timepoints * tr_duration) / period) * 2 * np.pi
# Specify the other timing information
duration = trs * tr_duration
basis_funcs = int(np.floor(duration / period)) # How bases do you have
if basis_funcs == 0:
err_msg = 'Too few timepoints (' + str(trs) + ') to accurately ' \
'model drift'
logger.warning(err_msg)
basis_funcs = 1
noise_drift = np.zeros((timepoints.shape[0], basis_funcs))
for basis_counter in list(range(1, basis_funcs + 1)):
# What steps do you want to take for this basis function
timepoints_basis = (timepoints/basis_counter) + (np.random.rand()
* np.pi * 2)
# Store the drift from this basis func
noise_drift[:, basis_counter - 1] = np.cos(timepoints_basis)
# Average the drift
noise_drift = np.mean(noise_drift, 1)
elif basis == 'sine':
# Calculate the cycles of the drift for a given function.
cycles = trs * tr_duration / period
# Create a sine wave with a given number of cycles and random phase
timepoints = np.linspace(0, trs - 1, trs)
phaseshift = np.pi * 2 * np.random.random()
phase = (timepoints / (trs - 1) * cycles * 2 * np.pi) + phaseshift
noise_drift = np.sin(phase)
# Normalize so the sigma is 1
noise_drift = stats.zscore(noise_drift)
# Return noise
return noise_drift
def _generate_noise_temporal_autoregression(timepoints,
noise_dict,
dimensions,
mask,
):
"""Generate the autoregression noise
Make a slowly drifting timecourse with the given autoregression
parameters. This can take in both AR and MA components
Parameters
----------
timepoints : 1 Dimensional array
What time points are sampled by a TR
noise_dict : dict
A dictionary specifying the types of noise in this experiment. The
noise types interact in important ways. First, all noise types
ending with sigma (e.g. motion sigma) are mixed together in
_generate_temporal_noise. The sigma values describe the proportion of
mixing of these elements. However critically, SFNR is the
parameter that describes how much noise these components contribute
to the brain. If you set the noise dict to matched then it will fit
the parameters to match the participant as best as possible.
dimensions : 3 length array, int
What is the shape of the volume to be generated
mask : 3 dimensional array, binary
The masked brain, thresholded to distinguish brain and non-brain
Returns
----------
noise_autoregression : one dimensional array, float
Generates the autoregression noise timecourse
"""
# Pull out the relevant noise parameters
auto_reg_rho = noise_dict['auto_reg_rho']
ma_rho = noise_dict['ma_rho']
# Specify the order based on the number of rho supplied
auto_reg_order = len(auto_reg_rho)
ma_order = len(ma_rho)
# This code assumes that the AR order is higher than the MA order
if ma_order > auto_reg_order:
msg = 'MA order (%d) is greater than AR order (%d). Cannot run.' % (
ma_order, auto_reg_order)
raise ValueError(msg)
# Generate a random variable at each time point that is a decayed value
# of the previous time points
noise_autoregression = np.zeros((dimensions[0], dimensions[1],
dimensions[2], len(timepoints)))
err_vols = np.zeros((dimensions[0], dimensions[1], dimensions[2],
len(timepoints)))
for tr_counter in range(len(timepoints)):
# Create a brain shaped volume with appropriate smoothing properties
noise = _generate_noise_spatial(dimensions=dimensions,
mask=mask,
fwhm=noise_dict['fwhm'],
)
# Store all of the noise volumes
err_vols[:, :, :, tr_counter] = noise
if tr_counter == 0:
noise_autoregression[:, :, :, tr_counter] = noise
else:
# Preset the volume to collect the AR estimated process
AR_vol = np.zeros((dimensions[0], dimensions[1], dimensions[2]))
# Iterate through both the AR and MA values
for pCounter in list(range(1, auto_reg_order + 1)):
past_TR = int(tr_counter - pCounter)
if tr_counter - pCounter >= 0:
# Pull out a previous TR
past_vols = noise_autoregression[:, :, :, past_TR]
# Add the discounted previous volume
AR_vol += past_vols * auto_reg_rho[pCounter - 1]
# If the MA order has at least this many coefficients
# then consider the error terms
if ma_order >= pCounter:
# Pull out a previous TR
past_noise = err_vols[:, :, :, past_TR]
# Add the discounted previous noise
AR_vol += past_noise * ma_rho[pCounter - 1]
noise_autoregression[:, :, :, tr_counter] = AR_vol + noise
# Z score the data so that all of the standard deviations of the voxels
# are one (but the ARMA coefs are unchanged)
noise_autoregression = stats.zscore(noise_autoregression, 3)
return noise_autoregression
def _generate_noise_temporal_phys(timepoints,
resp_freq=0.2,
heart_freq=1.17,
):
"""Generate the physiological noise.
Create noise representing the heart rate and respiration of the data.
Default values based on Walvaert, Durnez, Moerkerke, Verdoolaege and
Rosseel, 2011
Parameters
----------
timepoints : 1 Dimensional array
What time points, in seconds, are sampled by a TR
resp_freq : float
What is the frequency of respiration (in Hz)
heart_freq : float
What is the frequency of heart beat (in Hz)
Returns
----------
noise_phys : one dimensional array, float
Generates the physiological temporal noise timecourse
"""
resp_phase = (np.random.rand(1) * 2 * np.pi)[0]
heart_phase = (np.random.rand(1) * 2 * np.pi)[0]
# Find the rate for each timepoint
resp_rate = (resp_freq * 2 * np.pi)
heart_rate = (heart_freq * 2 * np.pi)
# Calculate the radians for each variable at this
# given TR
resp_radians = np.multiply(timepoints, resp_rate) + resp_phase
heart_radians = np.multiply(timepoints, heart_rate) + heart_phase
# Combine the two types of noise and append
noise_phys = np.cos(resp_radians) + np.sin(heart_radians)
# Normalize
noise_phys = stats.zscore(noise_phys)
return noise_phys
def _generate_noise_spatial(dimensions,
mask=None,
fwhm=4.0,
):
"""Generate code for Gaussian Random Fields.
Adapted from code found here:
http://andrewwalker.github.io/statefultransitions/post/gaussian-fields/
with permission from the author:
https://twitter.com/walkera101/status/785578499440377858. Original code
comes from http://mathematica.stackexchange.com/questions/4829
/efficiently-generating-n-d-gaussian-random-fields with a WTFPL (
http://www.wtfpl.net).
Parameters
----------
dimensions : 3 length array, int
What is the shape of the volume to be generated. This code
compresesses the range if the x and y dimensions are not equivalent.
This fixes this by upsampling and then downsampling the volume.
template : 3d array, float
A continuous (0 -> 1) volume describing the likelihood a voxel is in
the brain. This can be used to contrast the brain and non brain.
mask : 3 dimensional array, binary
The masked brain, thresholded to distinguish brain and non-brain
fwhm : float
What is the full width half max of the gaussian fields being created.
This is converted into a sigma which is used in this function.
However, this conversion was found empirically by testing values of
sigma and how it relates to fwhm values. The relationship that would be
found in such a test depends on the size of the brain (bigger brains
can have bigger fwhm).
However, small errors shouldn't matter too much since the fwhm
generated here can only be approximate anyway: firstly, although the
distribution that is being drawn from is set to this value,
this will manifest differently on every draw. Secondly, because of
the masking and dimensions of the generated volume, this does not
behave simply- wrapping effects matter (the outputs are
closer to the input value if you have no mask).
Use _calc_fwhm on this volume alone if you have concerns about the
accuracy of the fwhm.
Returns
----------
noise_spatial : 3d array, float
Generates the spatial noise volume for these parameters
"""
# Check the input is correct
if len(dimensions) == 4:
logger.warning('4 dimensions have been supplied, only using 3')
dimensions = dimensions[0:3]
# If the dimensions are wrong then upsample now
if dimensions[0] != dimensions[1] or dimensions[1] != dimensions[2]:
max_dim = np.max(dimensions)
new_dim = (max_dim, max_dim, max_dim)
else:
new_dim = dimensions
def _logfunc(x, a, b, c):
"""Solve for y given x for log function.
Parameters
----------
x : float
x value of log function
a : float
x shift of function
b : float
rate of change
c : float
y shift of function
Returns
----------
float
y value of log function
"""
return (np.log(x + a) / np.log(b)) + c
def _fftIndgen(n):
"""# Specify the fft coefficents
Parameters
----------
n : int
Dim size to estimate over
Returns
----------
array of ints
fft indexes
"""
# Pull out the ascending and descending indexes
ascending = np.linspace(0, int(n / 2), int(n / 2 + 1))
elements = int(np.ceil(n / 2 - 1)) # Round up so that len(output)==n
descending = np.linspace(-elements, -1, elements)
return np.concatenate((ascending, descending))
def _Pk2(idxs, sigma):
"""# Specify the amplitude given the fft coefficents
Parameters
----------
idxs : 3 by voxel array int
fft indexes
sigma : float
spatial sigma
Returns
----------
amplitude : 3 by voxel array
amplitude of the fft coefficients
"""
# The first set of idxs ought to be zero so make the first value
# zero to avoid a divide by zero error
amp_start = np.array((0))
# Compute the amplitude of the function for a series of indices
amp_end = np.sqrt(np.sqrt(np.sum(idxs[:, 1:] ** 2, 0)) ** (-1 * sigma))
amplitude = np.append(amp_start, amp_end)
# Return the output
return amplitude
# Convert from fwhm to sigma (relationship discovered empirical, only an
# approximation up to sigma = 0 -> 5 which corresponds to fwhm = 0 -> 8,
# relies on an assumption of brain size).
spatial_sigma = _logfunc(fwhm, -0.36778719, 2.10601011, 2.15439247)
noise = np.fft.fftn(np.random.normal(size=new_dim))
# Create a meshgrid of the object
fft_vol = np.meshgrid(_fftIndgen(new_dim[0]), _fftIndgen(new_dim[1]),
_fftIndgen(new_dim[2]))
# Reshape the data into a vector
fft_vec = np.asarray((fft_vol[0].flatten(), fft_vol[1].flatten(), fft_vol[
2].flatten()))
# Compute the amplitude for each element in the grid
amp_vec = _Pk2(fft_vec, spatial_sigma)
# Reshape to be a brain volume
amplitude = amp_vec.reshape(new_dim)
# The output
noise_fft = (np.fft.ifftn(noise * amplitude)).real
# Fix the dimensionality of the data (if necessary)
noise_spatial = noise_fft[:dimensions[0], :dimensions[1], :dimensions[2]]
# Mask or not, then z score
if mask is not None:
# Mask the output
noise_spatial *= mask
# Z score the specific to the brain
noise_spatial[mask > 0] = stats.zscore(noise_spatial[mask > 0])
else:
# Take the grand mean/std and use for z scoring
grand_mean = (noise_spatial).mean()
grand_std = (noise_spatial).std()
noise_spatial = (noise_spatial - grand_mean) / grand_std
return noise_spatial
def _generate_noise_temporal(stimfunction_tr,
tr_duration,
dimensions,
template,
mask,
noise_dict
):
"""Generate the temporal noise
Generate the time course of the average brain voxel. To change the
relative mixing of the noise components, change the sigma's specified
below.
Parameters
----------
stimfunction_tr : 1 Dimensional array
This is the timecourse of the stimuli in this experiment,
each element represents a TR
tr_duration : int
How long is a TR, in seconds
dimensions : 3 length array, int
What is the shape of the volume to be generated
template : 3d array, float
A continuous (0 -> 1) volume describing the likelihood a voxel is in
the brain. This can be used to contrast the brain and non brain.
mask : 3 dimensional array, binary
The masked brain, thresholded to distinguish brain and non-brain
noise_dict : dict
A dictionary specifying the types of noise in this experiment. The
noise types interact in important ways. First, all noise types
ending with sigma (e.g. motion sigma) are mixed together in
_generate_temporal_noise. The sigma values describe the proportion of
mixing of these elements. However critically, SFNR is the
parameter that describes how much noise these components contribute
to the brain. If you set the noise dict to matched then it will fit
the parameters to match the participant as best as possible.
Returns
----------
noise_temporal : one dimensional array, float
Generates the temporal noise timecourse for these parameters
"""
# Set up common parameters
# How many TRs are there
trs = len(stimfunction_tr)
# What time points are sampled by a TR?
timepoints = list(np.linspace(0, (trs - 1) * tr_duration, trs))
# Preset the volume
noise_volume = np.zeros((dimensions[0], dimensions[1], dimensions[2], trs))
# Generate the drift noise
if noise_dict['drift_sigma'] != 0:
# Calculate the drift time course
noise = _generate_noise_temporal_drift(trs,
tr_duration,
)
# Create a volume with the drift properties
volume = np.ones(dimensions)
# Combine the volume and noise
noise_volume += np.multiply.outer(volume, noise) * noise_dict[
'drift_sigma']
# Generate the physiological noise
if noise_dict['physiological_sigma'] != 0:
# Calculate the physiological time course
noise = _generate_noise_temporal_phys(timepoints,
)
# Create a brain shaped volume with similar smoothing properties
volume = _generate_noise_spatial(dimensions=dimensions,
mask=mask,
fwhm=noise_dict['fwhm'],
)
# Combine the volume and noise
noise_volume += np.multiply.outer(volume, noise) * noise_dict[
'physiological_sigma']
# Generate the AR noise
if noise_dict['auto_reg_sigma'] != 0:
# Calculate the AR time course volume
noise = _generate_noise_temporal_autoregression(timepoints,
noise_dict,
dimensions,
mask,
)
# Combine the volume and noise
noise_volume += noise * noise_dict['auto_reg_sigma']
# Generate the task related noise
if noise_dict['task_sigma'] != 0 and np.sum(stimfunction_tr) > 0:
# Calculate the task based noise time course
noise = _generate_noise_temporal_task(stimfunction_tr,
)
# Create a brain shaped volume with similar smoothing properties
volume = _generate_noise_spatial(dimensions=dimensions,
mask=mask,
fwhm=noise_dict['fwhm'],
)
# Combine the volume and noise
noise_volume += np.multiply.outer(volume, noise) * noise_dict[
'task_sigma']
# Finally, z score each voxel so things mix nicely
noise_volume = stats.zscore(noise_volume, 3)
# If it is a nan it is because you just divided by zero (since some
# voxels are zeros in the template)
noise_volume[np.isnan(noise_volume)] = 0
return noise_volume
def mask_brain(volume,
template_name=None,
mask_threshold=None,
mask_self=True,
):
""" Mask the simulated volume
This creates a mask specifying the approximate likelihood that a voxel is
part of the brain. All values are bounded to the range of 0 to 1. An
appropriate threshold to isolate brain voxels is >0.2. Critically,
the data that should be used to create a template shouldn't already be
masked/skull stripped. If it is then it will give in accurate estimates
of non-brain noise and corrupt estimations of SNR.
Parameters
----------
volume : multidimensional array
Either numpy array of a volume or a tuple describing the dimensions
of the mask to be created
template_name : str
What is the path to the template to be loaded? If empty then it
defaults to an MNI152 grey matter mask. This is ignored if mask_self
is True.
mask_threshold : float
What is the threshold (0 -> 1) for including a voxel in the mask? If
None then the program will try and identify the last wide peak in a
histogram of the template (assumed to be the brain voxels) and takes
the minima before that peak as the threshold. Won't work when the
data is not bimodal.
mask_self : bool or None
If set to true then it makes a mask from the volume supplied (by
averaging across time points and changing the range). If it is set
to false then it will use the template_name as an input.
Returns
----------
mask : 3 dimensional array, binary
The masked brain, thresholded to distinguish brain and non-brain
template : 3 dimensional array, float
A continuous (0 -> 1) volume describing the likelihood a voxel is in
the brain. This can be used to contrast the brain and non brain.
"""
# If the volume supplied is a 1d array then output a volume of the
# supplied dimensions
if len(volume.shape) == 1:
volume = np.ones(volume)
# Load in the mask
if mask_self is True:
mask_raw = volume
elif template_name is None:
mask_raw = np.load(resource_stream(__name__, "grey_matter_mask.npy"))
else:
mask_raw = np.load(template_name)
# Make the masks 3dremove_baseline
if len(mask_raw.shape) == 3:
mask_raw = np.array(mask_raw)
elif len(mask_raw.shape) == 4 and mask_raw.shape[3] == 1:
mask_raw = np.array(mask_raw[:, :, :, 0])
else:
mask_raw = np.mean(mask_raw, 3)
# Find the max value (so you can calulate these as proportions)
mask_max = mask_raw.max()
# Make sure the mask values range from 0 to 1 (make out of max of volume
# so that this is invertible later)
mask_raw = mask_raw / mask_max
# If there is only one brain volume then make this a forth dimension
if len(volume.shape) == 3:
temp = np.zeros([volume.shape[0], volume.shape[1], volume.shape[2], 1])
temp[:, :, :, 0] = volume
volume = temp
# Reshape the mask to be the size as the brain
brain_dim = volume.shape
mask_dim = mask_raw.shape
zoom_factor = (brain_dim[0] / mask_dim[0],
brain_dim[1] / mask_dim[1],
brain_dim[2] / mask_dim[2],
)
# Scale the mask according to the input brain
# You might get a warning if the zoom_factor is not an integer but you
# can safely ignore that.
template = ndimage.zoom(mask_raw, zoom_factor, order=2)
template[template < 0] = 0
# If the mask threshold is not supplied then guess it is a minima
# between the two peaks of the bimodal distribution of voxel activity
if mask_threshold is None:
# How many bins on either side of a peak will be compared
order = 5
# Make the histogram
template_vector = template.reshape(brain_dim[0] * brain_dim[1] *
brain_dim[2])
template_hist = np.histogram(template_vector, 100)
# Zero pad the values
binval = np.concatenate([np.zeros((order,)), template_hist[0]])
bins = np.concatenate([np.zeros((order,)), template_hist[1]])
# Identify the first two peaks
peaks = signal.argrelmax(binval, order=order)[0][0:2]
# What is the minima between peaks
minima = binval[peaks[0]:peaks[1]].min()
# What is the index of the last idx with this min value (since if
# zero, there may be many)
minima_idx = (np.where(binval[peaks[0]:peaks[1]] == minima) + peaks[
0])[-1]
# Convert the minima into a threshold
mask_threshold = bins[minima_idx][0]
# Mask the template based on the threshold
mask = np.zeros(template.shape)
mask[template > mask_threshold] = 1
return mask, template
def _noise_dict_update(noise_dict):
"""
Update the noise dictionary parameters with default values, in case any
were missing
Parameters
----------
noise_dict : dict
A dictionary specifying the types of noise in this experiment. The
noise types interact in important ways. First, all noise types
ending with sigma (e.g. motion sigma) are mixed together in
_generate_temporal_noise. These values describe the proportion of
mixing of these elements. However critically, SFNR is the
parameter that describes how much noise these components contribute
to the brain. If you set the noise dict to matched then it will fit
the parameters to match the participant as best as possible.
The noise variables are as follows:
snr [float]: Ratio of MR signal to the spatial noise
sfnr [float]: Ratio of the MR signal to the temporal noise. This is the
total variability that the following sigmas 'sum' to:
task_sigma [float]: Size of the variance of task specific noise
drift_sigma [float]: Size of the variance of drift noise
auto_reg_sigma [float]: Size of the variance of autoregressive
noise. This is an ARMA process where the AR and MA components can be
separately specified
physiological_sigma [float]: Size of the variance of physiological
noise
auto_reg_rho [list]: The coefficients of the autoregressive
components you are modeling
ma_rho [list]:The coefficients of the moving average components you
are modeling
max_activity [float]: The max value of the averaged brain in order
to reference the template
voxel_size [list]: The mm size of the voxels
fwhm [float]: The gaussian smoothing kernel size (mm)
matched [bool]: Specify whether you are fitting the noise parameters
The volumes of brain noise that are generated have smoothness
specified by 'fwhm'
Returns
-------
noise_dict : dict
Updated dictionary
"""
# Create the default dictionary
default_dict = {'task_sigma': 0, 'drift_sigma': 0, 'auto_reg_sigma': 1,
'auto_reg_rho': [0.5], 'ma_rho': [0.0],
'physiological_sigma': 0, 'sfnr': 90, 'snr': 50,
'max_activity': 1000, 'voxel_size': [1.0, 1.0, 1.0],
'fwhm': 4, 'matched': 1}
# Check what noise is in the dictionary and add if necessary. Numbers
# determine relative proportion of noise
for default_key in default_dict:
if default_key not in noise_dict:
noise_dict[default_key] = default_dict[default_key]
return noise_dict
def _fit_spatial(noise,
noise_temporal,
mask,
template,
spatial_sd,
temporal_sd,
noise_dict,
fit_thresh,
fit_delta,
iterations,
):
"""
Fit the noise model to match the SNR of the data
Parameters
----------
noise : multidimensional array, float
Initial estimate of the noise
noise_temporal : multidimensional array, float
The temporal noise that was generated by _generate_temporal_noise
tr_duration : float
What is the duration, in seconds, of each TR?
template : 3d array, float
A continuous (0 -> 1) volume describing the likelihood a voxel
is in the brain. This can be used to contrast the brain and non
brain.
mask : 3d array, binary
The mask of the brain volume, distinguishing brain from non-brain
spatial_sd : float
What is the standard deviation in space of the noise volume to be
generated
temporal_sd : float
What is the standard deviation in time of the noise volume to be
generated
noise_dict : dict
A dictionary specifying the types of noise in this experiment. The
noise types interact in important ways. First, all noise types
ending with sigma (e.g. motion sigma) are mixed together in
_generate_temporal_noise. These values describe the proportion of
mixing of these elements. However critically, SFNR is the
parameter that describes how much noise these components contribute
to the brain. If you set the noise dict to matched then it will
fit the parameters to match the participant as best as possible.
fit_thresh : float
What proportion of the target parameter value is sufficient
error to warrant finishing fit search.
fit_delta : float
How much are the parameters attenuated during the fitting process,
in terms of the proportion of difference between the target
parameter and the actual parameter
iterations : int
The first element is how many steps of fitting the SFNR and SNR
values will be performed. Usually converges after < 5. The
second element is the number of iterations for the AR fitting.
This is much more time consuming (has to make a new timecourse
on each iteration) so be careful about setting this appropriately.
Returns
-------
noise : multidimensional array, float
Generates the noise volume given these parameters
"""
# Pull out information that is needed
dim_tr = noise.shape
base = template * noise_dict['max_activity']
base = base.reshape(dim_tr[0], dim_tr[1], dim_tr[2], 1)
mean_signal = (base[mask > 0]).mean()
target_snr = noise_dict['snr']
# Iterate through different parameters to fit SNR and SFNR
spat_sd_orig = np.copy(spatial_sd)
iteration = 0
for iteration in list(range(iterations)):
# Calculate the new metrics
new_snr = _calc_snr(noise, mask)
# Calculate the difference between the real and simulated data
diff_snr = abs(new_snr - target_snr) / target_snr
# If the AR is sufficiently close then break the loop
if diff_snr < fit_thresh:
logger.info('Terminated SNR fit after ' + str(
iteration) + ' iterations.')
break
# Convert the SFNR and SNR
spat_sd_new = mean_signal / new_snr
# Update the variable
spatial_sd -= ((spat_sd_new - spat_sd_orig) * fit_delta)
# Prevent these going out of range
if spatial_sd < 0 or np.isnan(spatial_sd):
spatial_sd = 10e-3
# Set up the machine noise
noise_system = _generate_noise_system(dimensions_tr=dim_tr,
spatial_sd=spatial_sd,
temporal_sd=temporal_sd,
)
# Sum up the noise of the brain
noise = base + (noise_temporal * temporal_sd) + noise_system
# Reject negative values (only happens outside of the brain)
noise[noise < 0] = 0
# Failed to converge
if iterations == 0:
logger.info('No fitting iterations were run')
elif iteration == iterations:
logger.warning('SNR failed to converge.')
# Return the updated noise
return noise, spatial_sd
def _fit_temporal(noise,
mask,
template,
stimfunction_tr,
tr_duration,
spatial_sd,
temporal_proportion,
temporal_sd,
noise_dict,
fit_thresh,
fit_delta,
iterations,
):
"""
Fit the noise model to match the SFNR and AR of the data
Parameters
----------
noise : multidimensional array, float
Initial estimate of the noise
mask : 3d array, binary
The mask of the brain volume, distinguishing brain from non-brain
template : 3d array, float
A continuous (0 -> 1) volume describing the likelihood a voxel
is in the brain. This can be used to contrast the brain and non
brain.
stimfunction_tr : Iterable, list
When do the stimuli events occur. Each element is a TR
tr_duration : float
What is the duration, in seconds, of each TR?
spatial_sd : float
What is the standard deviation in space of the noise volume to be
generated
temporal_proportion, float
What is the proportion of the temporal variance (as specified by
the SFNR noise parameter) that is accounted for by the system
noise. If this number is high then all of the temporal
variability is due to system noise, if it is low then all of the
temporal variability is due to brain variability.
temporal_sd : float
What is the standard deviation in time of the noise volume to be
generated
noise_dict : dict
A dictionary specifying the types of noise in this experiment. The
noise types interact in important ways. First, all noise types
ending with sigma (e.g. motion sigma) are mixed together in
_generate_temporal_noise. These values describe the proportion of
mixing of these elements. However critically, SFNR is the
parameter that describes how much noise these components contribute
to the brain. If you set the noise dict to matched then it will
fit the parameters to match the participant as best as possible.
fit_thresh : float
What proportion of the target parameter value is sufficient
error to warrant finishing fit search.
fit_delta : float
How much are the parameters attenuated during the fitting process,
in terms of the proportion of difference between the target
parameter and the actual parameter
iterations : list, int
The first element is how many steps of fitting the SFNR and SNR
values will be performed. Usually converges after < 5. The
second element is the number of iterations for the AR fitting.
This is much more time consuming (has to make a new timecourse
on each iteration) so be careful about setting this appropriately.
Returns
-------
noise : multidimensional array, float
Generates the noise volume given these parameters
"""
# Pull out the
dim_tr = noise.shape
dim = dim_tr[0:3]
base = template * noise_dict['max_activity']
base = base.reshape(dim[0], dim[1], dim[2], 1)
mean_signal = (base[mask > 0]).mean()
# Iterate through different parameters to fit SNR and SFNR
temp_sd_orig = np.copy(temporal_sd)
# Make a copy of the dictionary so it can be modified
new_nd = copy.deepcopy(noise_dict)
# What SFNR do you want
target_sfnr = noise_dict['sfnr']
# What AR do you want?
target_ar = noise_dict['auto_reg_rho'][0]
# Iterate through different MA parameters to fit AR
for iteration in list(range(iterations)):
# If there are iterations left to perform then recalculate the
# metrics and try again
# Calculate the new SFNR
new_sfnr = _calc_sfnr(noise, mask)
# Calculate the AR
new_ar, _ = _calc_ARMA_noise(noise,
mask,
len(noise_dict['auto_reg_rho']),
len(noise_dict['ma_rho']),
)
# Calculate the difference between the real and simulated data
sfnr_diff = abs(new_sfnr - target_sfnr) / target_sfnr
# Calculate the difference in the first AR component
ar_diff = new_ar[0] - target_ar
# If the SFNR and AR is sufficiently close then break the loop
if (abs(ar_diff) / target_ar) < fit_thresh and sfnr_diff < fit_thresh:
msg = 'Terminated AR fit after ' + str(iteration) + ' iterations.'
logger.info(msg)
break
# Otherwise update the noise metrics. Get the new temporal noise value
temp_sd_new = mean_signal / new_sfnr
temporal_sd -= ((temp_sd_new - temp_sd_orig) * fit_delta)
# Prevent these going out of range
if temporal_sd < 0 or np.isnan(temporal_sd):
temporal_sd = 10e-3
# Set the new system noise
temp_sd_system_new = np.sqrt((temporal_sd ** 2) * temporal_proportion)
# Get the new AR value
new_nd['auto_reg_rho'][0] -= (ar_diff * fit_delta)
# Don't let the AR coefficient exceed 1
if new_nd['auto_reg_rho'][0] >= 1:
new_nd['auto_reg_rho'][0] = 0.99
# Generate the noise. The appropriate
noise_temporal = _generate_noise_temporal(stimfunction_tr,
tr_duration,
dim,
template,
mask,
new_nd,
)
# Set up the machine noise
noise_system = _generate_noise_system(dimensions_tr=dim_tr,
spatial_sd=spatial_sd,
temporal_sd=temp_sd_system_new,
)
# Sum up the noise of the brain
noise = base + (noise_temporal * temporal_sd) + noise_system
# Reject negative values (only happens outside of the brain)
noise[noise < 0] = 0
# Failed to converge
if iterations == 0:
logger.info('No fitting iterations were run')
elif iteration == iterations:
logger.warning('AR failed to converge.')
# Return the updated noise
return noise
def generate_noise(dimensions,
stimfunction_tr,
tr_duration,
template,
mask=None,
noise_dict=None,
temporal_proportion=0.5,
iterations=None,
fit_thresh=0.05,
fit_delta=0.5,
):
""" Generate the noise to be added to the signal.
Default noise parameters will create a noise volume with a standard
deviation of 0.1 (where the signal defaults to a value of 1). This has
built into estimates of how different types of noise mix. All noise
values can be set by the user or estimated with calc_noise.
Parameters
----------
dimensions : nd array
What is the shape of the volume to be generated
stimfunction_tr : Iterable, list
When do the stimuli events occur. Each element is a TR
tr_duration : float
What is the duration, in seconds, of each TR?
template : 3d array, float
A continuous (0 -> 1) volume describing the likelihood a voxel is in
the brain. This can be used to contrast the brain and non brain.
mask : 3d array, binary
The mask of the brain volume, distinguishing brain from non-brain
noise_dict : dictionary, float
This is a dictionary which describes the noise parameters of the
data. If there are no other variables provided then it will use
default values. The noise variables are as follows:
snr [float]: Ratio of MR signal to the spatial noise
sfnr [float]: Ratio of the MR signal to the temporal noise. This is the
total variability that the following sigmas 'sum' to:
task_sigma [float]: Size of the variance of task specific noise
drift_sigma [float]: Size of the variance of drift noise
auto_reg_sigma [float]: Size of the variance of autoregressive
noise. This is an ARMA process where the AR and MA components can be
separately specified
physiological_sigma [float]: Size of the variance of physiological
noise
auto_reg_rho [list]: The coefficients of the autoregressive
components you are modeling
ma_rho [list]:The coefficients of the moving average components you
are modeling
max_activity [float]: The max value of the averaged brain in order
to reference the template
voxel_size [list]: The mm size of the voxels
fwhm [float]: The gaussian smoothing kernel size (mm)
matched [bool]: Specify whether you are fitting the noise parameters
The volumes of brain noise that are generated have smoothness
specified by 'fwhm'
temporal_proportion, float
What is the proportion of the temporal variance (as specified by the
SFNR noise parameter) that is accounted for by the system noise. If
this number is high then all of the temporal variability is due to
system noise, if it is low then all of the temporal variability is
due to brain variability.
iterations : list, int
The first element is how many steps of fitting the SFNR and SNR values
will be performed. Usually converges after < 5. The second element
is the number of iterations for the AR fitting. This is much more
time consuming (has to make a new timecourse on each iteration) so
be careful about setting this appropriately.
fit_thresh : float
What proportion of the target parameter value is sufficient error to
warrant finishing fit search.
fit_delta : float
How much are the parameters attenuated during the fitting process,
in terms of the proportion of difference between the target
parameter and the actual parameter
Returns
----------
noise : multidimensional array, float
Generates the noise volume for these parameters
"""
# Check the input data
if template.max() > 1.1:
raise ValueError('Template out of range')
# Change to be an empty dictionary if it is None
if noise_dict is None:
noise_dict = {}
# Take in the noise dictionary and add any missing information
noise_dict = _noise_dict_update(noise_dict)
# How many iterations will you perform? If unspecified it will set
# values based on whether you are trying to match noise specifically to
# this participant or just get in the ball park
if iterations is None:
if noise_dict['matched'] == 1:
iterations = [20, 20]
else:
iterations = [0, 0]
if abs(noise_dict['auto_reg_rho'][0]) - abs(noise_dict['ma_rho'][0]) < 0.1:
logger.warning('ARMA coefs are close, may have trouble fitting')
# What are the dimensions of the volume, including time
dimensions_tr = (dimensions[0],
dimensions[1],
dimensions[2],
len(stimfunction_tr))
# Get the mask of the brain and set it to be 3d
if mask is None:
mask = np.ones(dimensions)
# Create the base (this inverts the process to make the template)
base = template * noise_dict['max_activity']
# Reshape the base (to be the same size as the volume to be created)
base = base.reshape(dimensions[0], dimensions[1], dimensions[2], 1)
base = np.ones(dimensions_tr) * base
# What is the mean signal of the non masked voxels in this template?
mean_signal = (base[mask > 0]).mean()
# Generate the noise
noise_temporal = _generate_noise_temporal(stimfunction_tr=stimfunction_tr,
tr_duration=tr_duration,
dimensions=dimensions,
template=template,
mask=mask,
noise_dict=noise_dict,
)
# Convert SFNR into the size of the standard deviation of temporal
# variability
temporal_sd = (mean_signal / noise_dict['sfnr'])
# Calculate the temporal sd of the system noise (as opposed to the noise
# attributed to the functional variability).
temporal_sd_system = np.sqrt((temporal_sd ** 2) * temporal_proportion)
# What is the standard deviation of the background activity
spat_sd = mean_signal / noise_dict['snr']
spatial_sd = np.sqrt((spat_sd ** 2) * (1 - temporal_proportion))
# Set up the machine noise
noise_system = _generate_noise_system(dimensions_tr=dimensions_tr,
spatial_sd=spatial_sd,
temporal_sd=temporal_sd_system,
)
# Sum up the noise of the brain
noise = base + (noise_temporal * temporal_sd) + noise_system
# Reject negative values (only happens outside of the brain)
noise[noise < 0] = 0
# Fit the SNR
noise, spatial_sd = _fit_spatial(noise,
noise_temporal,
mask,
template,
spatial_sd,
temporal_sd_system,
noise_dict,
fit_thresh,
fit_delta,
iterations[0],
)
# Fit the SFNR and AR noise
noise = _fit_temporal(noise,
mask,
template,
stimfunction_tr,
tr_duration,
spatial_sd,
temporal_proportion,
temporal_sd,
noise_dict,
fit_thresh,
fit_delta,
iterations[1],
)
# Return the noise
return noise
def compute_signal_change(signal_function,
noise_function,
noise_dict,
magnitude,
method='PSC',
):
""" Rescale the signal to be a given magnitude, based on a specified
metric (e.g. percent signal change). Metrics are heavily inspired by
Welvaert & Rosseel (2013). The rescaling is based on the maximal
activity in the timecourse. Importantly, all values within the
signal_function are scaled to have a min of -1 or max of 1, meaning that
the voxel value will be the same as the magnitude.
Parameters
----------
signal_function : timepoint by voxel array
The signal time course to be altered. This can have
multiple time courses specified as different columns in this
array. Conceivably you could use the output of
generate_stimfunction as the input but the temporal variance
will be incorrect. Critically, different values across voxels are
considered relative to each other, not independently. E.g., if the
voxel has a peak signal twice as high as another voxel's, then this
means that the signal after these transformations will still be
twice as high (according to the metric) in the first voxel relative
to the second
noise_function : timepoint by voxel numpy array
The time course of noise (a voxel created from generate_noise)
for each voxel specified in signal_function. This is necessary
for computing the mean evoked activity and the noise variability
noise_dict : dict
A dictionary specifying the types of noise in this experiment. The
noise types interact in important ways. First, all noise types
ending with sigma (e.g. motion sigma) are mixed together in
_generate_temporal_noise. The sigma values describe the proportion of
mixing of these elements. However critically, SFNR is the
parameter that describes how much noise these components contribute
to the brain. If you set the noise dict to matched then it will
fit the parameters to match the participant as best as possible.
magnitude : list of floats
This specifies the size, in terms of the metric choosen below,
of the signal being generated. This can be a single number,
and thus apply to all signal timecourses, or it can be array and
thus different for each voxel.
method : str
Select the procedure used to calculate the signal magnitude,
some of which are based on the definitions outlined in Welvaert &
Rosseel (2013):
- 'SFNR': Change proportional to the temporal variability,
as represented by the (desired) SFNR
- 'CNR_Amp/Noise-SD': Signal magnitude relative to the temporal
noise
- 'CNR_Amp2/Noise-Var_dB': Same as above but converted to decibels
- 'CNR_Signal-SD/Noise-SD': Standard deviation in signal
relative to standard deviation in noise
- 'CNR_Signal-Var/Noise-Var_dB': Same as above but converted to
decibels
- 'PSC': Calculate the percent signal change based on the
average activity of the noise (mean / 100 * magnitude)
Returns
----------
signal_function_scaled : 4d numpy array
The new signal volume with the appropriately set signal change
"""
# If you have only one magnitude value, duplicate the magnitude for each
# timecourse you have
assert type(magnitude) is list, '"magnitude" should be a list of floats'
if len(magnitude) == 1:
magnitude *= signal_function.shape[1]
# Scale all signals that to have a range of -1 to 1. This is
# so that any values less than this will be scaled appropriately
signal_function /= np.max(np.abs(signal_function))
# Iterate through the timecourses and calculate the metric
signal_function_scaled = np.zeros(signal_function.shape)
for voxel_counter in range(signal_function.shape[1]):
# Pull out the values for this voxel
sig_voxel = signal_function[:, voxel_counter]
noise_voxel = noise_function[:, voxel_counter]
magnitude_voxel = magnitude[voxel_counter]
# Calculate the maximum signal amplitude (likely to be 1,
# but not necessarily)
max_amp = np.max(np.abs(sig_voxel))
# Calculate the scaled time course using the specified method
if method == 'SFNR':
# How much temporal variation is there, relative to the mean
# activity
temporal_var = noise_voxel.mean() / noise_dict['sfnr']
# Multiply the timecourse by the variability metric
new_sig = sig_voxel * (temporal_var * magnitude_voxel)
elif method == 'CNR_Amp/Noise-SD':
# What is the standard deviation of the noise
noise_std = np.std(noise_voxel)
# Multiply the signal timecourse by the the CNR and noise (
# rearranging eq.)
new_sig = sig_voxel * (magnitude_voxel * noise_std)
elif method == 'CNR_Amp2/Noise-Var_dB':
# What is the standard deviation of the noise
noise_std = np.std(noise_voxel)
# Rearrange the equation to compute the size of signal change in
# decibels
scale = (10 ** (magnitude_voxel / 20)) * noise_std / max_amp
new_sig = sig_voxel * scale
elif method == 'CNR_Signal-SD/Noise-SD':
# What is the standard deviation of the signal and noise
sig_std = np.std(sig_voxel)
noise_std = np.std(noise_voxel)
# Multiply the signal timecourse by the the CNR and noise (
# rearranging eq.)
new_sig = sig_voxel * ((magnitude_voxel / max_amp) * noise_std
/ sig_std)
elif method == 'CNR_Signal-Var/Noise-Var_dB':
# What is the standard deviation of the signal and noise
sig_std = np.std(sig_voxel)
noise_std = np.std(noise_voxel)
# Rearrange the equation to compute the size of signal change in
# decibels
scale = (10 ** (magnitude_voxel / 20)) * noise_std / (max_amp *
sig_std)
new_sig = sig_voxel * scale
elif method == 'PSC':
# What is the average activity divided by percentage
scale = ((noise_voxel.mean() / 100) * magnitude_voxel)
new_sig = sig_voxel * scale
signal_function_scaled[:, voxel_counter] = new_sig
# Return the scaled time course
return signal_function_scaled
def generate_1d_gaussian_rfs(n_voxels, feature_resolution, feature_range,
rf_size=15, random_tuning=True, rf_noise=0.):
"""
Creates a numpy matrix of Gaussian-shaped voxel receptive fields (RFs)
along one dimension. Can specify whether they are evenly tiled or randomly
tuned along the axis. RF range will be between 0 and 1.
Parameters
----------
n_voxels : int
Number of voxel RFs to create.
feature_resolution : int
Number of points along the feature axis.
feature_range : tuple (numeric)
A tuple indicating the start and end values of the feature range. e.g.
(0, 359) for motion directions.
rf_size : numeric
Width of the Gaussian receptive field. Should be given in units of the
feature dimension. e.g., 15 degrees wide in motion direction space.
random_tuning : boolean [default True]
Indicates whether or not the voxels are randomly tuned along the 1D
feature axis or whether tuning is evenly spaced.
rf_noise : float [default 0.]
Amount of uniform noise to add to the Gaussian RF. This will cause the
generated responses to be distorted by the same uniform noise for a
given voxel.
Returns
----------
voxel_rfs : 2d numpy array (float)
The receptive fields in feature space. Dimensions are n_voxels by
feature_resolution.
voxel_tuning : 1d numpy array (float)
The centers of the voxel RFs, in feature space.
"""
range_start, range_stop = feature_range
if random_tuning:
# Voxel selectivity is random
voxel_tuning = np.floor((np.random.rand(n_voxels) * range_stop)
+ range_start).astype(int)
else:
# Voxel selectivity is evenly spaced along the feature axis
voxel_tuning = np.linspace(range_start, range_stop, n_voxels + 1)
voxel_tuning = voxel_tuning[0:-1]
voxel_tuning = np.floor(voxel_tuning).astype(int)
gaussian = signal.gaussian(feature_resolution, rf_size)
voxel_rfs = np.zeros((n_voxels, feature_resolution))
for i in range(0, n_voxels):
voxel_rfs[i, :] = np.roll(gaussian, voxel_tuning[i] -
((feature_resolution // 2) - 1))
voxel_rfs += np.random.rand(n_voxels, feature_resolution) * rf_noise
voxel_rfs = voxel_rfs / np.max(voxel_rfs, axis=1)[:, None]
return voxel_rfs, voxel_tuning
def generate_1d_rf_responses(rfs, trial_list, feature_resolution,
feature_range, trial_noise=0.25):
"""
Generates trial-wise data for a given set of receptive fields (RFs) and
a 1d array of features presented across trials.
Parameters
----------
voxel_rfs : 2d numpy array (float)
The receptive fields in feature space. Dimensions must be n_voxels
by feature_resolution.
trial_list : 1d numpy array (numeric)
The feature value of the stimulus presented on individual trials.
Array size be n_trials.
feature_resolution : int
Number of points along the feature axis.
feature_range : tuple (numeric)
A tuple indicating the start and end values of the feature range. e.g.
(0, 359) for motion directions.
trial_noise : float [default 0.25]
Amount of uniform noise to inject into the synthetic data. This is
generated independently for every trial and voxel.
Returns
----------
trial_data : 2d numpy array (float)
The synthetic data for each voxel and trial. Dimensions are n_voxels by
n_trials.
"""
range_start, range_stop = feature_range
stim_axis = np.linspace(range_start, range_stop,
feature_resolution)
if range_start > 0:
trial_list = trial_list + range_start
elif range_start < 0:
trial_list = trial_list - range_start
one_hot = np.eye(feature_resolution)
indices = [np.argmin(abs(stim_axis - x)) for x in trial_list]
stimulus_mask = one_hot[:, indices]
trial_data = rfs @ stimulus_mask
trial_data += np.random.rand(rfs.shape[0], trial_list.size) * \
(trial_noise * np.max(trial_data))
return trial_data
|
from Configuration.AlCa.autoCond import *
|
from setuptools import setup
def build_native(spec):
# build rust library
build = spec.add_external_build(cmd=["cargo", "build", "--release"], path=".")
spec.add_cffi_module(
module_path="arrow_odbc._native",
dylib=lambda: build.find_dylib("native", in_path="/target/release"),
header_filename=lambda: build.find_header("native.h", in_path="rust"),
rtld_flags=["NOW", "NODELETE"],
)
def readme():
with open("README.md") as f:
return f.read()
setup(
name="arrow-odbc",
packages=["arrow_odbc"],
zip_safe=False,
platforms="any",
setup_requires=["milksnake"],
install_requires=["pyarrow", "milksnake"],
extras_require={
"test": ["pytest"],
},
milksnake_tasks=[build_native],
url="https://github.com/pacman82/arrow-odbc-py",
author="Markus Klein",
version="0.2.0",
license="MIT",
description="Read the data of an ODBC data source as sequence of Apache Arrow record batches.",
long_description=readme(),
long_description_content_type="text/markdown",
)
|
import schedule, subprocess, time, sys, os, time
date_folder = time.strftime("%d%m%Y")
output = date_folder + "/" + "output.txt"
if not os.path.isdir(date_folder):
os.mkdir(date_folder)
if not os.path.isfile(output):
f = open(output,"w+")
def job (output, date):
print("Running the transcriber script on {0} folder.".format(date_folder))
subprocess.call(["python", "files_args.py", "-o", output, date])
schedule.every().day.at("23:45").do(job, output, date_folder)
while 1:
date_folder = time.strftime("%d%m%Y")
schedule.run_pending()
time.sleep(1)
|
# -*- coding: utf-8 -*-
import csv
import json
import os
import traceback
from io import StringIO
from tempfile import NamedTemporaryFile
import gevent
import requests
from pyquery import PyQuery as pq
import locust
from locust import constant
from locust.argument_parser import get_parser, parse_options
from locust.user import User, task
from locust.env import Environment
from locust.runners import Runner
from locust import stats
from locust.stats import StatsCSVFileWriter
from locust.web import WebUI
from .testcases import LocustTestCase
from .util import create_tls_cert
class _HeaderCheckMixin:
def _check_csv_headers(self, headers, exp_fn_prefix):
# Check common headers for csv file download request
self.assertIn("Content-Type", headers)
content_type = headers["Content-Type"]
self.assertIn("text/csv", content_type)
self.assertIn("Content-disposition", headers)
disposition = headers[
"Content-disposition"
] # e.g.: 'attachment; filename=requests_full_history_1597586811.5084946.csv'
self.assertIn(exp_fn_prefix, disposition)
class TestWebUI(LocustTestCase, _HeaderCheckMixin):
def setUp(self):
super().setUp()
parser = get_parser(default_config_files=[])
self.environment.parsed_options = parser.parse_args([])
self.stats = self.environment.stats
self.web_ui = self.environment.create_web_ui("127.0.0.1", 0)
self.web_ui.app.view_functions["request_stats"].clear_cache()
gevent.sleep(0.01)
self.web_port = self.web_ui.server.server_port
def tearDown(self):
super().tearDown()
self.web_ui.stop()
self.runner.quit()
def test_web_ui_reference_on_environment(self):
self.assertEqual(self.web_ui, self.environment.web_ui)
def test_web_ui_no_runner(self):
env = Environment()
web_ui = WebUI(env, "127.0.0.1", 0)
gevent.sleep(0.01)
try:
response = requests.get("http://127.0.0.1:%i/" % web_ui.server.server_port)
self.assertEqual(500, response.status_code)
self.assertEqual("Error: Locust Environment does not have any runner", response.text)
finally:
web_ui.stop()
def test_index(self):
self.assertEqual(200, requests.get("http://127.0.0.1:%i/" % self.web_port).status_code)
def test_index_with_spawn_options(self):
html_to_option = {
"user_count": ["-u", "100"],
"spawn_rate": ["-r", "10.0"],
}
for html_name_to_test in html_to_option.keys():
# Test that setting each spawn option individually populates the corresponding field in the html, and none of the others
self.environment.parsed_options = parse_options(html_to_option[html_name_to_test])
response = requests.get("http://127.0.0.1:%i/" % self.web_port)
self.assertEqual(200, response.status_code)
d = pq(response.content.decode("utf-8"))
for html_name in html_to_option.keys():
start_value = d(f".start [name={html_name}]").attr("value")
edit_value = d(f".edit [name={html_name}]").attr("value")
if html_name_to_test == html_name:
self.assertEqual(html_to_option[html_name][1], start_value)
self.assertEqual(html_to_option[html_name][1], edit_value)
else:
self.assertEqual("", start_value)
self.assertEqual("", edit_value)
def test_stats_no_data(self):
self.assertEqual(200, requests.get("http://127.0.0.1:%i/stats/requests" % self.web_port).status_code)
def test_stats(self):
self.stats.log_request("GET", "/<html>", 120, 5612)
response = requests.get("http://127.0.0.1:%i/stats/requests" % self.web_port)
self.assertEqual(200, response.status_code)
data = json.loads(response.text)
self.assertEqual(2, len(data["stats"])) # one entry plus Aggregated
self.assertEqual("/<html>", data["stats"][0]["name"])
self.assertEqual("/<html>", data["stats"][0]["safe_name"])
self.assertEqual("GET", data["stats"][0]["method"])
self.assertEqual(120, data["stats"][0]["avg_response_time"])
self.assertEqual("Aggregated", data["stats"][1]["name"])
self.assertEqual(1, data["stats"][1]["num_requests"])
self.assertEqual(120, data["stats"][1]["avg_response_time"])
def test_stats_cache(self):
self.stats.log_request("GET", "/test", 120, 5612)
response = requests.get("http://127.0.0.1:%i/stats/requests" % self.web_port)
self.assertEqual(200, response.status_code)
data = json.loads(response.text)
self.assertEqual(2, len(data["stats"])) # one entry plus Aggregated
# add another entry
self.stats.log_request("GET", "/test2", 120, 5612)
data = json.loads(requests.get("http://127.0.0.1:%i/stats/requests" % self.web_port).text)
self.assertEqual(2, len(data["stats"])) # old value should be cached now
self.web_ui.app.view_functions["request_stats"].clear_cache()
data = json.loads(requests.get("http://127.0.0.1:%i/stats/requests" % self.web_port).text)
self.assertEqual(3, len(data["stats"])) # this should no longer be cached
def test_stats_rounding(self):
self.stats.log_request("GET", "/test", 1.39764125, 2)
self.stats.log_request("GET", "/test", 999.9764125, 1000)
response = requests.get("http://127.0.0.1:%i/stats/requests" % self.web_port)
self.assertEqual(200, response.status_code)
data = json.loads(response.text)
self.assertEqual(1, data["stats"][0]["min_response_time"])
self.assertEqual(1000, data["stats"][0]["max_response_time"])
def test_request_stats_csv(self):
self.stats.log_request("GET", "/test2", 120, 5612)
response = requests.get("http://127.0.0.1:%i/stats/requests/csv" % self.web_port)
self.assertEqual(200, response.status_code)
self._check_csv_headers(response.headers, "requests")
def test_request_stats_full_history_csv_not_present(self):
self.stats.log_request("GET", "/test2", 120, 5612)
response = requests.get("http://127.0.0.1:%i/stats/requests_full_history/csv" % self.web_port)
self.assertEqual(404, response.status_code)
def test_failure_stats_csv(self):
self.stats.log_error("GET", "/", Exception("Error1337"))
response = requests.get("http://127.0.0.1:%i/stats/failures/csv" % self.web_port)
self.assertEqual(200, response.status_code)
self._check_csv_headers(response.headers, "failures")
def test_request_stats_with_errors(self):
self.stats.log_error("GET", "/", Exception("Error1337"))
response = requests.get("http://127.0.0.1:%i/stats/requests" % self.web_port)
self.assertEqual(200, response.status_code)
self.assertIn("Error1337", response.text)
def test_reset_stats(self):
try:
raise Exception("A cool test exception")
except Exception as e:
tb = e.__traceback__
self.runner.log_exception("local", str(e), "".join(traceback.format_tb(tb)))
self.runner.log_exception("local", str(e), "".join(traceback.format_tb(tb)))
self.stats.log_request("GET", "/test", 120, 5612)
self.stats.log_error("GET", "/", Exception("Error1337"))
response = requests.get("http://127.0.0.1:%i/stats/reset" % self.web_port)
self.assertEqual(200, response.status_code)
self.assertEqual({}, self.stats.errors)
self.assertEqual({}, self.runner.exceptions)
self.assertEqual(0, self.stats.get("/", "GET").num_requests)
self.assertEqual(0, self.stats.get("/", "GET").num_failures)
self.assertEqual(0, self.stats.get("/test", "GET").num_requests)
self.assertEqual(0, self.stats.get("/test", "GET").num_failures)
def test_exceptions(self):
try:
raise Exception("A cool test exception")
except Exception as e:
tb = e.__traceback__
self.runner.log_exception("local", str(e), "".join(traceback.format_tb(tb)))
self.runner.log_exception("local", str(e), "".join(traceback.format_tb(tb)))
response = requests.get("http://127.0.0.1:%i/exceptions" % self.web_port)
self.assertEqual(200, response.status_code)
self.assertIn("A cool test exception", response.text)
response = requests.get("http://127.0.0.1:%i/stats/requests" % self.web_port)
self.assertEqual(200, response.status_code)
def test_exceptions_csv(self):
try:
raise Exception("Test exception")
except Exception as e:
tb = e.__traceback__
self.runner.log_exception("local", str(e), "".join(traceback.format_tb(tb)))
self.runner.log_exception("local", str(e), "".join(traceback.format_tb(tb)))
response = requests.get("http://127.0.0.1:%i/exceptions/csv" % self.web_port)
self.assertEqual(200, response.status_code)
self._check_csv_headers(response.headers, "exceptions")
reader = csv.reader(StringIO(response.text))
rows = []
for row in reader:
rows.append(row)
self.assertEqual(2, len(rows))
self.assertEqual("Test exception", rows[1][1])
self.assertEqual(2, int(rows[1][0]), "Exception count should be 2")
def test_swarm_host_value_specified(self):
class MyUser(User):
wait_time = constant(1)
@task(1)
def my_task(self):
pass
self.environment.user_classes = [MyUser]
response = requests.post(
"http://127.0.0.1:%i/swarm" % self.web_port,
data={"user_count": 5, "spawn_rate": 5, "host": "https://localhost"},
)
self.assertEqual(200, response.status_code)
self.assertEqual("https://localhost", response.json()["host"])
self.assertEqual(self.environment.host, "https://localhost")
def test_swarm_host_value_not_specified(self):
class MyUser(User):
wait_time = constant(1)
@task(1)
def my_task(self):
pass
self.environment.user_classes = [MyUser]
response = requests.post(
"http://127.0.0.1:%i/swarm" % self.web_port,
data={"user_count": 5, "spawn_rate": 5},
)
self.assertEqual(200, response.status_code)
self.assertEqual(None, response.json()["host"])
self.assertEqual(self.environment.host, None)
def test_host_value_from_user_class(self):
class MyUser(User):
host = "http://example.com"
self.environment.user_classes = [MyUser]
response = requests.get("http://127.0.0.1:%i/" % self.web_port)
self.assertEqual(200, response.status_code)
self.assertIn("http://example.com", response.content.decode("utf-8"))
self.assertNotIn("setting this will override the host on all User classes", response.content.decode("utf-8"))
def test_host_value_from_multiple_user_classes(self):
class MyUser(User):
host = "http://example.com"
class MyUser2(User):
host = "http://example.com"
self.environment.user_classes = [MyUser, MyUser2]
response = requests.get("http://127.0.0.1:%i/" % self.web_port)
self.assertEqual(200, response.status_code)
self.assertIn("http://example.com", response.content.decode("utf-8"))
self.assertNotIn("setting this will override the host on all User classes", response.content.decode("utf-8"))
def test_host_value_from_multiple_user_classes_different_hosts(self):
class MyUser(User):
host = None
class MyUser2(User):
host = "http://example.com"
self.environment.user_classes = [MyUser, MyUser2]
response = requests.get("http://127.0.0.1:%i/" % self.web_port)
self.assertEqual(200, response.status_code)
self.assertNotIn("http://example.com", response.content.decode("utf-8"))
self.assertIn("setting this will override the host on all User classes", response.content.decode("utf-8"))
def test_report_page(self):
self.stats.log_request("GET", "/test", 120, 5612)
r = requests.get("http://127.0.0.1:%i/stats/report" % self.web_port)
self.assertEqual(200, r.status_code)
self.assertIn("<title>Test Report</title>", r.text)
self.assertIn("charts-container", r.text)
self.assertIn(
'<a href="?download=1">Download the Report</a>',
r.text,
"Download report link not found in HTML content",
)
def test_report_page_empty_stats(self):
r = requests.get("http://127.0.0.1:%i/stats/report" % self.web_port)
self.assertEqual(200, r.status_code)
self.assertIn("<title>Test Report</title>", r.text)
self.assertIn("charts-container", r.text)
def test_report_download(self):
self.stats.log_request("GET", "/test", 120, 5612)
r = requests.get("http://127.0.0.1:%i/stats/report?download=1" % self.web_port)
self.assertEqual(200, r.status_code)
self.assertIn("attachment", r.headers.get("Content-Disposition", ""))
self.assertNotIn("Download the Report", r.text, "Download report link found in HTML content")
def test_report_host(self):
self.environment.host = "http://test.com"
self.stats.log_request("GET", "/test", 120, 5612)
r = requests.get("http://127.0.0.1:%i/stats/report" % self.web_port)
self.assertEqual(200, r.status_code)
self.assertIn("http://test.com", r.text)
def test_report_host2(self):
class MyUser(User):
host = "http://test2.com"
@task
def my_task(self):
pass
self.environment.host = None
self.environment.user_classes = [MyUser]
self.stats.log_request("GET", "/test", 120, 5612)
r = requests.get("http://127.0.0.1:%i/stats/report" % self.web_port)
self.assertEqual(200, r.status_code)
self.assertIn("http://test2.com", r.text)
def test_report_exceptions(self):
try:
raise Exception("Test exception")
except Exception as e:
tb = e.__traceback__
self.runner.log_exception("local", str(e), "".join(traceback.format_tb(tb)))
self.runner.log_exception("local", str(e), "".join(traceback.format_tb(tb)))
self.stats.log_request("GET", "/test", 120, 5612)
r = requests.get("http://127.0.0.1:%i/stats/report" % self.web_port)
# self.assertEqual(200, r.status_code)
self.assertIn("<h2>Exceptions Statistics</h2>", r.text)
# Prior to 088a98bf8ff4035a0de3becc8cd4e887d618af53, the "nodes" field for each exception in
# "self.runner.exceptions" was accidentally mutated in "get_html_report" to a string.
# This assertion reproduces the issue and it is left there to make sure there's no
# regression in the future.
self.assertTrue(
isinstance(next(iter(self.runner.exceptions.values()))["nodes"], set), "exception object has been mutated"
)
class TestWebUIAuth(LocustTestCase):
def setUp(self):
super().setUp()
parser = get_parser(default_config_files=[])
options = parser.parse_args(["--web-auth", "john:doe"])
self.runner = Runner(self.environment)
self.stats = self.runner.stats
self.web_ui = self.environment.create_web_ui("127.0.0.1", 0, auth_credentials=options.web_auth)
self.web_ui.app.view_functions["request_stats"].clear_cache()
gevent.sleep(0.01)
self.web_port = self.web_ui.server.server_port
def tearDown(self):
super().tearDown()
self.web_ui.stop()
self.runner.quit()
def test_index_with_basic_auth_enabled_correct_credentials(self):
self.assertEqual(
200, requests.get("http://127.0.0.1:%i/?ele=phino" % self.web_port, auth=("john", "doe")).status_code
)
def test_index_with_basic_auth_enabled_incorrect_credentials(self):
self.assertEqual(
401, requests.get("http://127.0.0.1:%i/?ele=phino" % self.web_port, auth=("john", "invalid")).status_code
)
def test_index_with_basic_auth_enabled_blank_credentials(self):
self.assertEqual(401, requests.get("http://127.0.0.1:%i/?ele=phino" % self.web_port).status_code)
class TestWebUIWithTLS(LocustTestCase):
def setUp(self):
super().setUp()
tls_cert, tls_key = create_tls_cert("127.0.0.1")
self.tls_cert_file = NamedTemporaryFile(delete=False)
self.tls_key_file = NamedTemporaryFile(delete=False)
with open(self.tls_cert_file.name, "w") as f:
f.write(tls_cert.decode())
with open(self.tls_key_file.name, "w") as f:
f.write(tls_key.decode())
parser = get_parser(default_config_files=[])
options = parser.parse_args(
[
"--tls-cert",
self.tls_cert_file.name,
"--tls-key",
self.tls_key_file.name,
]
)
self.runner = Runner(self.environment)
self.stats = self.runner.stats
self.web_ui = self.environment.create_web_ui("127.0.0.1", 0, tls_cert=options.tls_cert, tls_key=options.tls_key)
gevent.sleep(0.01)
self.web_port = self.web_ui.server.server_port
def tearDown(self):
super().tearDown()
self.web_ui.stop()
self.runner.quit()
os.unlink(self.tls_cert_file.name)
os.unlink(self.tls_key_file.name)
def test_index_with_https(self):
# Suppress only the single warning from urllib3 needed.
from urllib3.exceptions import InsecureRequestWarning
requests.packages.urllib3.disable_warnings(category=InsecureRequestWarning)
self.assertEqual(200, requests.get("https://127.0.0.1:%i/" % self.web_port, verify=False).status_code)
class TestWebUIFullHistory(LocustTestCase, _HeaderCheckMixin):
STATS_BASE_NAME = "web_test"
STATS_FILENAME = "{}_stats.csv".format(STATS_BASE_NAME)
STATS_HISTORY_FILENAME = "{}_stats_history.csv".format(STATS_BASE_NAME)
STATS_FAILURES_FILENAME = "{}_failures.csv".format(STATS_BASE_NAME)
def setUp(self):
super().setUp()
self.remove_files_if_exists()
parser = get_parser(default_config_files=[])
self.environment.parsed_options = parser.parse_args(["--csv", self.STATS_BASE_NAME, "--csv-full-history"])
self.stats = self.environment.stats
self.stats.CSV_STATS_INTERVAL_SEC = 0.02
locust.stats.CSV_STATS_INTERVAL_SEC = 0.1
self.stats_csv_writer = StatsCSVFileWriter(
self.environment, stats.PERCENTILES_TO_REPORT, self.STATS_BASE_NAME, full_history=True
)
self.web_ui = self.environment.create_web_ui("127.0.0.1", 0, stats_csv_writer=self.stats_csv_writer)
self.web_ui.app.view_functions["request_stats"].clear_cache()
gevent.sleep(0.01)
self.web_port = self.web_ui.server.server_port
def tearDown(self):
super().tearDown()
self.web_ui.stop()
self.runner.quit()
self.remove_files_if_exists()
def remove_file_if_exists(self, filename):
if os.path.exists(filename):
os.remove(filename)
def remove_files_if_exists(self):
self.remove_file_if_exists(self.STATS_FILENAME)
self.remove_file_if_exists(self.STATS_HISTORY_FILENAME)
self.remove_file_if_exists(self.STATS_FAILURES_FILENAME)
def test_request_stats_full_history_csv(self):
self.stats.log_request("GET", "/test", 1.39764125, 2)
self.stats.log_request("GET", "/test", 999.9764125, 1000)
self.stats.log_request("GET", "/test2", 120, 5612)
greenlet = gevent.spawn(self.stats_csv_writer.stats_writer)
gevent.sleep(0.01)
self.stats_csv_writer.stats_history_flush()
gevent.kill(greenlet)
response = requests.get("http://127.0.0.1:%i/stats/requests_full_history/csv" % self.web_port)
self.assertEqual(200, response.status_code)
self._check_csv_headers(response.headers, "requests_full_history")
self.assertIn("Content-Length", response.headers)
reader = csv.reader(StringIO(response.text))
rows = [r for r in reader]
self.assertEqual(4, len(rows))
self.assertEqual("Timestamp", rows[0][0])
self.assertEqual("GET", rows[1][2])
self.assertEqual("/test", rows[1][3])
self.assertEqual("/test2", rows[2][3])
self.assertEqual("", rows[3][2])
self.assertEqual("Aggregated", rows[3][3])
|
from .collect import StatsCollector
from .config import Config
__version__ = '0.4.2'
|
load("@bazel_gazelle//:deps.bzl", "go_repository")
def go_modules():
go_repository(
name = "co_honnef_go_tools",
importpath = "honnef.co/go/tools",
sum = "h1:XJP7lxbSxWLOMNdBE4B/STaqVy6L73o0knwj2vIlxnw=",
version = "v0.0.0-20190102054323-c2f93a96b099",
)
go_repository(
name = "com_github_alecthomas_template",
importpath = "github.com/alecthomas/template",
sum = "h1:cAKDfWh5VpdgMhJosfJnn5/FoN2SRZ4p7fJNX58YPaU=",
version = "v0.0.0-20160405071501-a0175ee3bccc",
)
go_repository(
name = "com_github_alecthomas_units",
importpath = "github.com/alecthomas/units",
sum = "h1:qet1QNfXsQxTZqLG4oE62mJzwPIB8+Tee4RNCL9ulrY=",
version = "v0.0.0-20151022065526-2efee857e7cf",
)
go_repository(
name = "com_github_apparentlymart_go_textseg_v12",
importpath = "github.com/apparentlymart/go-textseg/v12",
sum = "h1:bNEQyAGak9tojivJNkoqWErVCQbjdL7GzRt3F8NvfJ0=",
version = "v12.0.0",
)
go_repository(
name = "com_github_armon_consul_api",
importpath = "github.com/armon/consul-api",
sum = "h1:G1bPvciwNyF7IUmKXNt9Ak3m6u9DE1rF+RmtIkBpVdA=",
version = "v0.0.0-20180202201655-eb2c6b5be1b6",
)
go_repository(
name = "com_github_beorn7_perks",
importpath = "github.com/beorn7/perks",
sum = "h1:HWo1m869IqiPhD389kmkxeTalrjNbbJTC8LXupb+sl0=",
version = "v1.0.0",
)
go_repository(
name = "com_github_burntsushi_toml",
importpath = "github.com/BurntSushi/toml",
sum = "h1:WXkYYl6Yr3qBf1K79EBnL4mak0OimBfB0XUf9Vl28OQ=",
version = "v0.3.1",
)
go_repository(
name = "com_github_burntsushi_xgb",
importpath = "github.com/BurntSushi/xgb",
sum = "h1:1BDTz0u9nC3//pOCMdNH+CiXJVYJh5UQNCOBG7jbELc=",
version = "v0.0.0-20160522181843-27f122750802",
)
go_repository(
name = "com_github_cespare_xxhash",
importpath = "github.com/cespare/xxhash",
sum = "h1:a6HrQnmkObjyL+Gs60czilIUGqrzKutQD6XZog3p+ko=",
version = "v1.1.0",
)
go_repository(
name = "com_github_client9_misspell",
importpath = "github.com/client9/misspell",
sum = "h1:ta993UF76GwbvJcIo3Y68y/M3WxlpEHPWIGDkJYwzJI=",
version = "v0.3.4",
)
go_repository(
name = "com_github_cockroachdb_apd",
importpath = "github.com/cockroachdb/apd",
sum = "h1:3LFP3629v+1aKXU5Q37mxmRxX/pIu1nijXydLShEq5I=",
version = "v1.1.0",
)
go_repository(
name = "com_github_cockroachdb_apd_v2",
importpath = "github.com/cockroachdb/apd/v2",
sum = "h1:y1Rh3tEU89D+7Tgbw+lp52T6p/GJLpDmNvr10UWqLTE=",
version = "v2.0.1",
)
go_repository(
name = "com_github_coreos_bbolt",
importpath = "github.com/coreos/bbolt",
sum = "h1:wZwiHHUieZCquLkDL0B8UhzreNWsPHooDAG3q34zk0s=",
version = "v1.3.2",
)
go_repository(
name = "com_github_coreos_etcd",
importpath = "github.com/coreos/etcd",
sum = "h1:jFneRYjIvLMLhDLCzuTuU4rSJUjRplcJQ7pD7MnhC04=",
version = "v3.3.10+incompatible",
)
go_repository(
name = "com_github_coreos_go_semver",
importpath = "github.com/coreos/go-semver",
sum = "h1:3Jm3tLmsgAYcjC+4Up7hJrFBPr+n7rAqYeSw/SZazuY=",
version = "v0.2.0",
)
go_repository(
name = "com_github_coreos_go_systemd",
importpath = "github.com/coreos/go-systemd",
sum = "h1:Wf6HqHfScWJN9/ZjdUKyjop4mf3Qdd+1TvvltAvM3m8=",
version = "v0.0.0-20190321100706-95778dfbb74e",
)
go_repository(
name = "com_github_coreos_pkg",
importpath = "github.com/coreos/pkg",
sum = "h1:lBNOc5arjvs8E5mO2tbpBpLoyyu8B6e44T7hJy6potg=",
version = "v0.0.0-20180928190104-399ea9e2e55f",
)
go_repository(
name = "com_github_cpuguy83_go_md2man_v2",
importpath = "github.com/cpuguy83/go-md2man/v2",
sum = "h1:EoUDS0afbrsXAZ9YQ9jdu/mZ2sXgT1/2yyNng4PGlyM=",
version = "v2.0.0",
)
go_repository(
name = "com_github_davecgh_go_spew",
importpath = "github.com/davecgh/go-spew",
sum = "h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=",
version = "v1.1.1",
)
go_repository(
name = "com_github_dgrijalva_jwt_go",
importpath = "github.com/dgrijalva/jwt-go",
sum = "h1:7qlOGliEKZXTDg6OTjfoBKDXWrumCAMpl/TFQ4/5kLM=",
version = "v3.2.0+incompatible",
)
go_repository(
name = "com_github_dgryski_go_sip13",
importpath = "github.com/dgryski/go-sip13",
sum = "h1:RMLoZVzv4GliuWafOuPuQDKSm1SJph7uCRnnS61JAn4=",
version = "v0.0.0-20181026042036-e10d5fee7954",
)
go_repository(
name = "com_github_emicklei_proto",
importpath = "github.com/emicklei/proto",
sum = "h1:XbpwxmuOPrdES97FrSfpyy67SSCV/wBIKXqgJzh6hNw=",
version = "v1.6.15",
)
go_repository(
name = "com_github_fsnotify_fsnotify",
importpath = "github.com/fsnotify/fsnotify",
sum = "h1:IXs+QLmnXW2CcXuY+8Mzv/fWEsPGWxqefPtCP5CnV9I=",
version = "v1.4.7",
)
go_repository(
name = "com_github_ghodss_yaml",
importpath = "github.com/ghodss/yaml",
sum = "h1:wQHKEahhL6wmXdzwWG11gIVCkOv05bNOh+Rxn0yngAk=",
version = "v1.0.0",
)
go_repository(
name = "com_github_go_gl_glfw_v3_3_glfw",
importpath = "github.com/go-gl/glfw/v3.3/glfw",
sum = "h1:WtGNWLvXpe6ZudgnXrq0barxBImvnnJoMEhXAzcbM0I=",
version = "v0.0.0-20200222043503-6f7a984d4dc4",
)
go_repository(
name = "com_github_go_kit_kit",
importpath = "github.com/go-kit/kit",
sum = "h1:Wz+5lgoB0kkuqLEc6NVmwRknTKP6dTGbSqvhZtBI/j0=",
version = "v0.8.0",
)
go_repository(
name = "com_github_go_logfmt_logfmt",
importpath = "github.com/go-logfmt/logfmt",
sum = "h1:MP4Eh7ZCb31lleYCFuwm0oe4/YGak+5l1vA2NOE80nA=",
version = "v0.4.0",
)
go_repository(
name = "com_github_go_stack_stack",
importpath = "github.com/go-stack/stack",
sum = "h1:5SgMzNM5HxrEjV0ww2lTmX6E2Izsfxas4+YHWRs3Lsk=",
version = "v1.8.0",
)
go_repository(
name = "com_github_gogo_protobuf",
importpath = "github.com/gogo/protobuf",
sum = "h1:/s5zKNz0uPFCZ5hddgPdo2TK2TVrUNMn0OOX8/aZMTE=",
version = "v1.2.1",
)
go_repository(
name = "com_github_golang_glog",
importpath = "github.com/golang/glog",
sum = "h1:VKtxabqXZkF25pY9ekfRL6a582T4P37/31XEstQ5p58=",
version = "v0.0.0-20160126235308-23def4e6c14b",
)
go_repository(
name = "com_github_golang_groupcache",
importpath = "github.com/golang/groupcache",
sum = "h1:veQD95Isof8w9/WXiA+pa3tz3fJXkt5B7QaRBrM62gk=",
version = "v0.0.0-20190129154638-5b532d6fd5ef",
)
go_repository(
name = "com_github_golang_mock",
importpath = "github.com/golang/mock",
sum = "h1:G5FRp8JnTd7RQH5kemVNlMeyXQAztQ3mOWV95KxsXH8=",
version = "v1.1.1",
)
go_repository(
name = "com_github_golang_protobuf",
importpath = "github.com/golang/protobuf",
sum = "h1:YF8+flBXS5eO826T4nzqPrxfhQThhXl0YzfuUPu4SBg=",
version = "v1.3.1",
)
go_repository(
name = "com_github_google_btree",
importpath = "github.com/google/btree",
sum = "h1:0udJVsspx3VBr5FwtLhQQtuAsVc79tTq0ocGIPAU6qo=",
version = "v1.0.0",
)
go_repository(
name = "com_github_google_go_cmp",
importpath = "github.com/google/go-cmp",
sum = "h1:xsAVV57WRhGj6kEIi8ReJzQlHHqcBYCElAvkovg3B/4=",
version = "v0.4.0",
)
go_repository(
name = "com_github_gorilla_websocket",
importpath = "github.com/gorilla/websocket",
sum = "h1:WDFjx/TMzVgy9VdMMQi2K2Emtwi2QcUQsztZ/zLaH/Q=",
version = "v1.4.0",
)
go_repository(
name = "com_github_grpc_ecosystem_go_grpc_middleware",
importpath = "github.com/grpc-ecosystem/go-grpc-middleware",
sum = "h1:Iju5GlWwrvL6UBg4zJJt3btmonfrMlCDdsejg4CZE7c=",
version = "v1.0.0",
)
go_repository(
name = "com_github_grpc_ecosystem_go_grpc_prometheus",
importpath = "github.com/grpc-ecosystem/go-grpc-prometheus",
sum = "h1:Ovs26xHkKqVztRpIrF/92BcuyuQ/YW4NSIpoGtfXNho=",
version = "v1.2.0",
)
go_repository(
name = "com_github_grpc_ecosystem_grpc_gateway",
importpath = "github.com/grpc-ecosystem/grpc-gateway",
sum = "h1:bM6ZAFZmc/wPFaRDi0d5L7hGEZEx/2u+Tmr2evNHDiI=",
version = "v1.9.0",
)
go_repository(
name = "com_github_hashicorp_hcl",
importpath = "github.com/hashicorp/hcl",
sum = "h1:0Anlzjpi4vEasTeNFn2mLJgTSwt0+6sfsiTG8qcWGx4=",
version = "v1.0.0",
)
go_repository(
name = "com_github_iancoleman_strcase",
importpath = "github.com/iancoleman/strcase",
sum = "h1:VHgatEHNcBFEB7inlalqfNqw65aNkM1lGX2yt3NmbS8=",
version = "v0.0.0-20191112232945-16388991a334",
)
go_repository(
name = "com_github_inconshreveable_mousetrap",
importpath = "github.com/inconshreveable/mousetrap",
sum = "h1:Z8tu5sraLXCXIcARxBp/8cbvlwVa7Z1NHg9XEKhtSvM=",
version = "v1.0.0",
)
go_repository(
name = "com_github_jonboulle_clockwork",
importpath = "github.com/jonboulle/clockwork",
sum = "h1:VKV+ZcuP6l3yW9doeqz6ziZGgcynBVQO+obU0+0hcPo=",
version = "v0.1.0",
)
go_repository(
name = "com_github_julienschmidt_httprouter",
importpath = "github.com/julienschmidt/httprouter",
sum = "h1:TDTW5Yz1mjftljbcKqRcrYhd4XeOoI98t+9HbQbYf7g=",
version = "v1.2.0",
)
go_repository(
name = "com_github_kisielk_errcheck",
importpath = "github.com/kisielk/errcheck",
sum = "h1:ZqfnKyx9KGpRcW04j5nnPDgRgoXUeLh2YFBeFzphcA0=",
version = "v1.1.0",
)
go_repository(
name = "com_github_kisielk_gotool",
importpath = "github.com/kisielk/gotool",
sum = "h1:AV2c/EiW3KqPNT9ZKl07ehoAGi4C5/01Cfbblndcapg=",
version = "v1.0.0",
)
go_repository(
name = "com_github_konsorten_go_windows_terminal_sequences",
importpath = "github.com/konsorten/go-windows-terminal-sequences",
sum = "h1:mweAR1A6xJ3oS2pRaGiHgQ4OO8tzTaLawm8vnODuwDk=",
version = "v1.0.1",
)
go_repository(
name = "com_github_kr_logfmt",
importpath = "github.com/kr/logfmt",
sum = "h1:T+h1c/A9Gawja4Y9mFVWj2vyii2bbUNDw3kt9VxK2EY=",
version = "v0.0.0-20140226030751-b84e30acd515",
)
go_repository(
name = "com_github_kr_pretty",
importpath = "github.com/kr/pretty",
sum = "h1:L/CwN0zerZDmRFUapSPitk6f+Q3+0za1rQkzVuMiMFI=",
version = "v0.1.0",
)
go_repository(
name = "com_github_kr_pty",
importpath = "github.com/kr/pty",
sum = "h1:VkoXIwSboBpnk99O/KFauAEILuNHv5DVFKZMBN/gUgw=",
version = "v1.1.1",
)
go_repository(
name = "com_github_kr_text",
importpath = "github.com/kr/text",
sum = "h1:45sCR5RtlFHMR4UwH9sdQ5TC8v0qDQCHnXt+kaKSTVE=",
version = "v0.1.0",
)
go_repository(
name = "com_github_kylelemons_godebug",
importpath = "github.com/kylelemons/godebug",
sum = "h1:RPNrshWIDI6G2gRW9EHilWtl7Z6Sb1BR0xunSBf0SNc=",
version = "v1.1.0",
)
go_repository(
name = "com_github_lib_pq",
importpath = "github.com/lib/pq",
sum = "h1:X5PMW56eZitiTeO7tKzZxFCSpbFZJtkMMooicw2us9A=",
version = "v1.0.0",
)
go_repository(
name = "com_github_magiconair_properties",
importpath = "github.com/magiconair/properties",
sum = "h1:LLgXmsheXeRoUOBOjtwPQCWIYqM/LU1ayDtDePerRcY=",
version = "v1.8.0",
)
go_repository(
name = "com_github_matttproud_golang_protobuf_extensions",
importpath = "github.com/matttproud/golang_protobuf_extensions",
sum = "h1:4hp9jkHxhMHkqkrB3Ix0jegS5sx/RkqARlsWZ6pIwiU=",
version = "v1.0.1",
)
go_repository(
name = "com_github_mitchellh_go_homedir",
importpath = "github.com/mitchellh/go-homedir",
sum = "h1:lukF9ziXFxDFPkA1vsr5zpc1XuPDn/wFntq5mG+4E0Y=",
version = "v1.1.0",
)
go_repository(
name = "com_github_mitchellh_mapstructure",
importpath = "github.com/mitchellh/mapstructure",
sum = "h1:fmNYVwqnSfB9mZU6OS2O6GsXM+wcskZDuKQzvN1EDeE=",
version = "v1.1.2",
)
go_repository(
name = "com_github_mpvl_unique",
importpath = "github.com/mpvl/unique",
sum = "h1:D5x39vF5KCwKQaw+OC9ZPiLVHXz3UFw2+psEX+gYcto=",
version = "v0.0.0-20150818121801-cbe035fff7de",
)
go_repository(
name = "com_github_mwitkow_go_conntrack",
importpath = "github.com/mwitkow/go-conntrack",
sum = "h1:F9x/1yl3T2AeKLr2AMdilSD8+f9bvMnNN8VS5iDtovc=",
version = "v0.0.0-20161129095857-cc309e4a2223",
)
go_repository(
name = "com_github_oklog_ulid",
importpath = "github.com/oklog/ulid",
sum = "h1:EGfNDEx6MqHz8B3uNV6QAib1UR2Lm97sHi3ocA6ESJ4=",
version = "v1.3.1",
)
go_repository(
name = "com_github_oneofone_xxhash",
importpath = "github.com/OneOfOne/xxhash",
sum = "h1:KMrpdQIwFcEqXDklaen+P1axHaj9BSKzvpUUfnHldSE=",
version = "v1.2.2",
)
go_repository(
name = "com_github_pelletier_go_toml",
importpath = "github.com/pelletier/go-toml",
sum = "h1:T5zMGML61Wp+FlcbWjRDT7yAxhJNAiPPLOFECq181zc=",
version = "v1.2.0",
)
go_repository(
name = "com_github_pkg_errors",
importpath = "github.com/pkg/errors",
sum = "h1:iURUrRGxPUNPdy5/HRSm+Yj6okJ6UtLINN0Q9M4+h3I=",
version = "v0.8.1",
)
go_repository(
name = "com_github_pmezard_go_difflib",
importpath = "github.com/pmezard/go-difflib",
sum = "h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=",
version = "v1.0.0",
)
go_repository(
name = "com_github_prometheus_client_golang",
importpath = "github.com/prometheus/client_golang",
sum = "h1:9iH4JKXLzFbOAdtqv/a+j8aewx2Y8lAjAydhbaScPF8=",
version = "v0.9.3",
)
go_repository(
name = "com_github_prometheus_client_model",
importpath = "github.com/prometheus/client_model",
sum = "h1:S/YWwWx/RA8rT8tKFRuGUZhuA90OyIBpPCXkcbwU8DE=",
version = "v0.0.0-20190129233127-fd36f4220a90",
)
go_repository(
name = "com_github_prometheus_common",
importpath = "github.com/prometheus/common",
sum = "h1:7etb9YClo3a6HjLzfl6rIQaU+FDfi0VSX39io3aQ+DM=",
version = "v0.4.0",
)
go_repository(
name = "com_github_prometheus_procfs",
importpath = "github.com/prometheus/procfs",
sum = "h1:sofwID9zm4tzrgykg80hfFph1mryUeLRsUfoocVVmRY=",
version = "v0.0.0-20190507164030-5867b95ac084",
)
go_repository(
name = "com_github_prometheus_tsdb",
importpath = "github.com/prometheus/tsdb",
sum = "h1:YZcsG11NqnK4czYLrWd9mpEuAJIHVQLwdrleYfszMAA=",
version = "v0.7.1",
)
go_repository(
name = "com_github_rogpeppe_fastuuid",
importpath = "github.com/rogpeppe/fastuuid",
sum = "h1:gu+uRPtBe88sKxUCEXRoeCvVG90TJmwhiqRpvdhQFng=",
version = "v0.0.0-20150106093220-6724a57986af",
)
go_repository(
name = "com_github_rogpeppe_testscript",
importpath = "github.com/rogpeppe/testscript",
sum = "h1:NxTsoOBQ1zibxf6NDtzrjPbK56hDAteIcOTSINZHtow=",
version = "v1.1.0",
)
go_repository(
name = "com_github_russross_blackfriday_v2",
importpath = "github.com/russross/blackfriday/v2",
sum = "h1:lPqVAte+HuHNfhJ/0LC98ESWRz8afy9tM/0RK8m9o+Q=",
version = "v2.0.1",
)
go_repository(
name = "com_github_shurcool_sanitized_anchor_name",
importpath = "github.com/shurcooL/sanitized_anchor_name",
sum = "h1:PdmoCO6wvbs+7yrJyMORt4/BmY5IYyJwS/kOiWx8mHo=",
version = "v1.0.0",
)
go_repository(
name = "com_github_sirupsen_logrus",
importpath = "github.com/sirupsen/logrus",
sum = "h1:juTguoYk5qI21pwyTXY3B3Y5cOTH3ZUyZCg1v/mihuo=",
version = "v1.2.0",
)
go_repository(
name = "com_github_soheilhy_cmux",
importpath = "github.com/soheilhy/cmux",
sum = "h1:0HKaf1o97UwFjHH9o5XsHUOF+tqmdA7KEzXLpiyaw0E=",
version = "v0.1.4",
)
go_repository(
name = "com_github_spaolacci_murmur3",
importpath = "github.com/spaolacci/murmur3",
sum = "h1:qLC7fQah7D6K1B0ujays3HV9gkFtllcxhzImRR7ArPQ=",
version = "v0.0.0-20180118202830-f09979ecbc72",
)
go_repository(
name = "com_github_spf13_afero",
importpath = "github.com/spf13/afero",
sum = "h1:m8/z1t7/fwjysjQRYbP0RD+bUIF/8tJwPdEZsI83ACI=",
version = "v1.1.2",
)
go_repository(
name = "com_github_spf13_cast",
importpath = "github.com/spf13/cast",
sum = "h1:oget//CVOEoFewqQxwr0Ej5yjygnqGkvggSE/gB35Q8=",
version = "v1.3.0",
)
go_repository(
name = "com_github_spf13_cobra",
importpath = "github.com/spf13/cobra",
sum = "h1:FfTH+vuMXOas8jmfb5/M7dzEYx7LpcLb7a0LPe34uOU=",
version = "v0.0.7",
)
go_repository(
name = "com_github_spf13_jwalterweatherman",
importpath = "github.com/spf13/jwalterweatherman",
sum = "h1:XHEdyB+EcvlqZamSM4ZOMGlc93t6AcsBEu9Gc1vn7yk=",
version = "v1.0.0",
)
go_repository(
name = "com_github_spf13_pflag",
importpath = "github.com/spf13/pflag",
sum = "h1:zPAT6CGy6wXeQ7NtTnaTerfKOsV6V6F8agHXFiazDkg=",
version = "v1.0.3",
)
go_repository(
name = "com_github_spf13_viper",
importpath = "github.com/spf13/viper",
sum = "h1:yXHLWeravcrgGyFSyCgdYpXQ9dR9c/WED3pg1RhxqEU=",
version = "v1.4.0",
)
go_repository(
name = "com_github_stretchr_objx",
importpath = "github.com/stretchr/objx",
sum = "h1:2vfRuCMp5sSVIDSqO8oNnWJq7mPa6KVP3iPIwFBuy8A=",
version = "v0.1.1",
)
go_repository(
name = "com_github_stretchr_testify",
importpath = "github.com/stretchr/testify",
sum = "h1:bSDNvY7ZPG5RlJ8otE/7V6gMiyenm9RtJ7IUVIAoJ1w=",
version = "v1.2.2",
)
go_repository(
name = "com_github_tmc_grpc_websocket_proxy",
importpath = "github.com/tmc/grpc-websocket-proxy",
sum = "h1:LnC5Kc/wtumK+WB441p7ynQJzVuNRJiqddSIE3IlSEQ=",
version = "v0.0.0-20190109142713-0ad062ec5ee5",
)
go_repository(
name = "com_github_ugorji_go",
importpath = "github.com/ugorji/go",
sum = "h1:j4s+tAvLfL3bZyefP2SEWmhBzmuIlH/eqNuPdFPgngw=",
version = "v1.1.4",
)
go_repository(
name = "com_github_vmihailenco_msgpack",
importpath = "github.com/vmihailenco/msgpack",
sum = "h1:wapg9xDUZDzGCNFlwc5SqI1rvcciqcxEHac4CYj89xI=",
version = "v3.3.3+incompatible",
)
go_repository(
name = "com_github_xiang90_probing",
importpath = "github.com/xiang90/probing",
sum = "h1:eY9dn8+vbi4tKz5Qo6v2eYzo7kUS51QINcR5jNpbZS8=",
version = "v0.0.0-20190116061207-43a291ad63a2",
)
go_repository(
name = "com_github_xordataexchange_crypt",
importpath = "github.com/xordataexchange/crypt",
sum = "h1:ESFSdwYZvkeru3RtdrYueztKhOBCSAAzS4Gf+k0tEow=",
version = "v0.0.3-0.20170626215501-b2862e3d0a77",
)
go_repository(
name = "com_github_zclconf_go_cty",
importpath = "github.com/zclconf/go-cty",
sum = "h1:Xzr4m4utRDhHDifag1onwwUSq32HLoLBsp+w6tD0880=",
version = "v1.4.1",
)
go_repository(
name = "com_google_cloud_go",
importpath = "cloud.google.com/go",
sum = "h1:e0WKqKTd5BnrG8aKH3J3h+QvEIQtSUcf2n5UZ5ZgLtQ=",
version = "v0.26.0",
)
go_repository(
name = "com_shuralyov_dmitri_gpu_mtl",
importpath = "dmitri.shuralyov.com/gpu/mtl",
sum = "h1:VpgP7xuJadIUuKccphEpTJnWhS2jkQyMt6Y7pJCD7fY=",
version = "v0.0.0-20190408044501-666a987793e9",
)
go_repository(
name = "in_gopkg_alecthomas_kingpin_v2",
importpath = "gopkg.in/alecthomas/kingpin.v2",
sum = "h1:jMFz6MfLP0/4fUyZle81rXUoxOBFi19VUFKVDOQfozc=",
version = "v2.2.6",
)
go_repository(
name = "in_gopkg_check_v1",
importpath = "gopkg.in/check.v1",
sum = "h1:qIbj1fsPNlZgppZ+VLlY7N33q108Sa+fhmuc+sWQYwY=",
version = "v1.0.0-20180628173108-788fd7840127",
)
go_repository(
name = "in_gopkg_errgo_v2",
importpath = "gopkg.in/errgo.v2",
sum = "h1:0vLT13EuvQ0hNvakwLuFZ/jYrLp5F3kcWHXdRggjCE8=",
version = "v2.1.0",
)
go_repository(
name = "in_gopkg_resty_v1",
importpath = "gopkg.in/resty.v1",
sum = "h1:CuXP0Pjfw9rOuY6EP+UvtNvt5DSqHpIxILZKT/quCZI=",
version = "v1.12.0",
)
go_repository(
name = "in_gopkg_yaml_v2",
importpath = "gopkg.in/yaml.v2",
sum = "h1:ZCJp+EgiOT7lHqUV2J862kp8Qj64Jo6az82+3Td9dZw=",
version = "v2.2.2",
)
go_repository(
name = "in_gopkg_yaml_v3",
importpath = "gopkg.in/yaml.v3",
sum = "h1:Xe2gvTZUJpsvOWUnvmL/tmhVBZUmHSvLbMjRj6NUUKo=",
version = "v3.0.0-20200121175148-a6ecf24a6d71",
)
go_repository(
name = "io_etcd_go_bbolt",
importpath = "go.etcd.io/bbolt",
sum = "h1:Z/90sZLPOeCy2PwprqkFa25PdkusRzaj9P8zm/KNyvk=",
version = "v1.3.2",
)
go_repository(
name = "org_cuelang_go",
importpath = "cuelang.org/go",
sum = "h1:5Cl8QY142TQWZfkQ3X4ODxrA8P1u3eqmaPHNP1d0p0Q=",
version = "v0.2.0",
)
go_repository(
name = "org_golang_google_appengine",
importpath = "google.golang.org/appengine",
sum = "h1:igQkv0AAhEIvTEpD5LIpAfav2eeVO9HBTjvKHVJPRSs=",
version = "v1.1.0",
)
go_repository(
name = "org_golang_google_genproto",
importpath = "google.golang.org/genproto",
sum = "h1:Nw54tB0rB7hY/N0NQvRW8DG4Yk3Q6T9cu9RcFQDu1tc=",
version = "v0.0.0-20180817151627-c66870c02cf8",
)
go_repository(
name = "org_golang_google_grpc",
importpath = "google.golang.org/grpc",
sum = "h1:G+97AoqBnmZIT91cLG/EkCoK9NSelj64P8bOHHNmGn0=",
version = "v1.21.0",
)
go_repository(
name = "org_golang_x_crypto",
importpath = "golang.org/x/crypto",
sum = "h1:ObdrDkeb4kJdCP557AjRjq69pTHfNouLtWZG7j9rPN8=",
version = "v0.0.0-20191011191535-87dc89f01550",
)
go_repository(
name = "org_golang_x_exp",
importpath = "golang.org/x/exp",
sum = "h1:rMqLP+9XLy+LdbCXHjJHAmTfXCr93W7oruWA6Hq1Alc=",
version = "v0.0.0-20200513190911-00229845015e",
)
go_repository(
name = "org_golang_x_image",
importpath = "golang.org/x/image",
sum = "h1:+qEpEAPhDZ1o0x3tHzZTQDArnOixOzGD9HUJfcg0mb4=",
version = "v0.0.0-20190802002840-cff245a6509b",
)
go_repository(
name = "org_golang_x_lint",
importpath = "golang.org/x/lint",
sum = "h1:XQyxROzUlZH+WIQwySDgnISgOivlhjIEwaQaJEJrrN0=",
version = "v0.0.0-20190313153728-d0100b6bd8b3",
)
go_repository(
name = "org_golang_x_mobile",
importpath = "golang.org/x/mobile",
sum = "h1:4+4C/Iv2U4fMZBiMCc98MG1In4gJY5YRhtpDNeDeHWs=",
version = "v0.0.0-20190719004257-d2bd2a29d028",
)
go_repository(
name = "org_golang_x_mod",
importpath = "golang.org/x/mod",
sum = "h1:GgiSbuUyC0BlbUmHQBgFqu32eiRR/CEYdjOjOd4zE6Y=",
version = "v0.1.1-0.20191107180719-034126e5016b",
)
go_repository(
name = "org_golang_x_net",
importpath = "golang.org/x/net",
sum = "h1:R/3boaszxrf1GEUWTVDzSKVwLmSJpwZ1yqXm8j0v2QI=",
version = "v0.0.0-20190620200207-3b0461eec859",
)
go_repository(
name = "org_golang_x_oauth2",
importpath = "golang.org/x/oauth2",
sum = "h1:vEDujvNQGv4jgYKudGeI/+DAX4Jffq6hpD55MmoEvKs=",
version = "v0.0.0-20180821212333-d2e6202438be",
)
go_repository(
name = "org_golang_x_sync",
importpath = "golang.org/x/sync",
sum = "h1:8gQV6CLnAEikrhgkHFbMAEhagSSnXWGV915qUMm9mrU=",
version = "v0.0.0-20190423024810-112230192c58",
)
go_repository(
name = "org_golang_x_sys",
importpath = "golang.org/x/sys",
sum = "h1:R8bzl0244nw47n1xKs1MUMAaTNgjavKcN/aX2Ss3+Fo=",
version = "v0.0.0-20191001151750-bb3f8db39f24",
)
go_repository(
name = "org_golang_x_text",
importpath = "golang.org/x/text",
sum = "h1:tW2bmiBqwgJj/UpqtC8EpXEZVYOwU0yG4iWbprSVAcs=",
version = "v0.3.2",
)
go_repository(
name = "org_golang_x_time",
importpath = "golang.org/x/time",
sum = "h1:SvFZT6jyqRaOeXpc5h/JSfZenJ2O330aBsf7JfSUXmQ=",
version = "v0.0.0-20190308202827-9d24e82272b4",
)
go_repository(
name = "org_golang_x_tools",
importpath = "golang.org/x/tools",
sum = "h1:5E4dL8+NgFOgjwbTKz+OOEGGhP+ectTmF842l6KjupQ=",
version = "v0.0.0-20200207183749-b753a1ba74fa",
)
go_repository(
name = "org_golang_x_xerrors",
importpath = "golang.org/x/xerrors",
sum = "h1:E7g+9GITq07hpfrRu66IVDexMakfv52eLZ2CXBWiKr4=",
version = "v0.0.0-20191204190536-9bdfabe68543",
)
go_repository(
name = "org_uber_go_atomic",
importpath = "go.uber.org/atomic",
sum = "h1:cxzIVoETapQEqDhQu3QfnvXAV4AlzcvUCxkVUFw3+EU=",
version = "v1.4.0",
)
go_repository(
name = "org_uber_go_multierr",
importpath = "go.uber.org/multierr",
sum = "h1:HoEmRHQPVSqub6w2z2d2EOVs2fjyFRGyofhKuyDq0QI=",
version = "v1.1.0",
)
go_repository(
name = "org_uber_go_zap",
importpath = "go.uber.org/zap",
sum = "h1:ORx85nbTijNz8ljznvCMR1ZBIPKFn3jQrag10X2AsuM=",
version = "v1.10.0",
)
|
import numpy as np
def main(number1,number2):
#num1 = int(number1)
#num2 = int(number2)
#sum = num1+num2
out=np.zeros(5)
return "Sum is "+str(out)
|
import pytest
from lineage.query_context import QueryContext
from lineage.bigquery_query import BigQueryQuery
from lineage.table_resolver import TableResolver
def create_bigquery_table_colum(project_id, dataset_id, table_id):
return {'project_id': project_id, 'dataset_id': dataset_id, 'table_id': table_id}
@pytest.mark.parametrize("json_table_column, full_table_names, expected_parsed_result", [
(create_bigquery_table_colum('db1', 'sc1', 't1'), True, 'db1.sc1.t1'),
(create_bigquery_table_colum('db1', 'sc1', 't1'), False, 't1'),
({'project': 'db1', 'dataset_id': 'sc1', 'table_id': 't1'}, True, None),
({}, True, None),
(None, True, None),
(create_bigquery_table_colum('db1', 'sc1', 'anont1'), True, None), #anon represents a temp cached table and therefore should be ignored
])
def test_bigquery_query_parse_table_json_column(json_table_column, full_table_names, expected_parsed_result):
table_resolver = TableResolver(full_table_names=full_table_names)
assert BigQueryQuery._parse_table_json_column(table_resolver, json_table_column) == expected_parsed_result
def test_bigquery_query_parse():
raw_query_text = ''
query_context = QueryContext(referenced_tables=[create_bigquery_table_colum('db1', 'sc1', 'source_table')],
destination_table=create_bigquery_table_colum('db1', 'sc1', 'target_table'))
reference = BigQueryQuery(raw_query_text, query_context)
reference.parse(full_table_names=False)
assert reference.source_tables == {'source_table'}
assert reference.target_tables == {'target_table'}
def test_bigquery_query_parse_with_drop_statement():
raw_query_text = 'drop table db1.sc1.target_table'
query_context = QueryContext(referenced_tables=[],
destination_table=create_bigquery_table_colum('db1', 'sc1', 'target_table'),
query_type='DROP_TABLE')
reference = BigQueryQuery(raw_query_text, query_context)
reference.parse(full_table_names=False)
assert reference.dropped_tables == {'target_table'}
def test_bigquery_query_parse_with_alter_statement():
raw_query_text = 'alter table db1.sc1.target_table rename to target_table_ng'
query_context = QueryContext(referenced_tables=[],
destination_table=create_bigquery_table_colum('db1', 'sc1', 'target_table'),
query_type='ALTER_TABLE')
reference = BigQueryQuery(raw_query_text, query_context)
reference.parse(full_table_names=False)
assert reference.renamed_tables == {('target_table', 'target_table_ng')}
|
# __init__.py
import os, sys
# Local imports
sys.path.insert(0,'..')
try:
import controller, cli, lunchscraper, translator
except:
from lunchscraper import controller, cli, lunchscraper, translator
|
import requests
JHUB_URL = 'http://127.0.0.1'
IMAGE_NAME = 'nielsbohr/slurm-notebook:edge'
if __name__ == "__main__":
# Make spawn request
with requests.Session() as session:
result = session.get(JHUB_URL)
# Get login page
login_url = JHUB_URL + "/hub/login"
page_resp = session.get(login_url)
if page_resp.status_code != 200:
print("Failed to GET the {} URL".format(page_resp))
exit(1)
# Login as test user
user = 'test_user'
login_resp = session.post(JHUB_URL + "/hub/login?next=",
data={"username": user,
"password": "password"})
if login_resp.status_code != 200 and login_resp.status_code != 302 \
and login_resp.status_code != 500:
print("Failed to login to {} as {}".format(JHUB_URL, user))
exit(1)
payload = {'dockerimage': IMAGE_NAME}
# Spawn a notebook with image name
spawn_resp = session.post(JHUB_URL + "/hub/spawn",
data=payload)
if spawn_resp.status_code != 200 and spawn_resp.status_code != 302:
print("Failed to spawn notebook {} at {}".format(
payload['dockerimage'], JHUB_URL))
exit(1)
|
#!/usr/bin/python
# -*- coding:utf-8 -*-
import time
import numpy as np
import tensorflow as tf
import os
class NERTagger(object):
"""The NER Tagger Model."""
def __init__(self, is_training, config):
self.batch_size = batch_size = config.batch_size
self.seq_length = seq_length = config.seq_length
size = config.hidden_size
vocab_size = config.vocab_size
tag_size = config.tag_size
self._input_data = tf.placeholder(tf.int32, [batch_size, seq_length])
self._targets = tf.placeholder(tf.int32, [batch_size, seq_length])
# Check if Model is Training
self.is_training = is_training
lstm_cell = tf.nn.rnn_cell.BasicLSTMCell(size, forget_bias=0.0, state_is_tuple=True)
if is_training and config.keep_prob < 1:
lstm_cell = tf.nn.rnn_cell.DropoutWrapper(
lstm_cell, output_keep_prob=config.keep_prob)
cell = tf.nn.rnn_cell.MultiRNNCell([lstm_cell] * config.num_layers, state_is_tuple=True)
self._initial_state = cell.zero_state(batch_size, tf.float32)
with tf.device("/cpu:0"):
embedding = tf.get_variable(
"embedding", [vocab_size, size], dtype=tf.float32)
inputs = tf.nn.embedding_lookup(embedding, self._input_data)
if is_training and config.keep_prob < 1:
inputs = tf.nn.dropout(inputs, config.keep_prob)
outputs = []
state = self._initial_state
with tf.variable_scope("ner_lstm"):
for time_step in range(seq_length):
if time_step > 0: tf.get_variable_scope().reuse_variables()
(cell_output, state) = cell(inputs[:, time_step, :], state)
outputs.append(cell_output)
output = tf.reshape(tf.concat(outputs, 1), [-1, size])
softmax_w = tf.get_variable(
"softmax_w", [size, tag_size], dtype=tf.float32)
softmax_b = tf.get_variable("softmax_b", [tag_size], dtype=tf.float32)
logits = tf.matmul(output, softmax_w) + softmax_b
loss = tf.contrib.legacy_seq2seq.sequence_loss_by_example(
logits=[logits],
targets=[tf.reshape(self._targets, [-1])],
weights=[tf.ones([batch_size * seq_length], dtype=tf.float32)])
# Fetch Reults in session.run()
self._cost = cost = tf.reduce_sum(loss) / batch_size
self._final_state = state
self._logits = logits
self._lr = tf.Variable(0.0, trainable=False)
tvars = tf.trainable_variables()
grads, _ = tf.clip_by_global_norm(tf.gradients(cost, tvars),
config.max_grad_norm)
optimizer = tf.train.GradientDescentOptimizer(self._lr)
self._train_op = optimizer.apply_gradients(zip(grads, tvars))
self._new_lr = tf.placeholder(
tf.float32, shape=[], name="new_learning_rate")
self._lr_update = tf.assign(self._lr, self._new_lr)
self.saver = tf.train.Saver(tf.global_variables())
def assign_lr(self, session, lr_value):
session.run(self._lr_update, feed_dict={self._new_lr: lr_value})
@property
def input_data(self):
return self._input_data
@property
def targets(self):
return self._targets
@property
def initial_state(self):
return self._initial_state
@property
def cost(self):
return self._cost
@property
def final_state(self):
return self._final_state
@property
def logits(self):
return self._logits
@property
def lr(self):
return self._lr
@property
def train_op(self):
return self._train_op
def predict_tag(self, sess, tags, text):
x = np.array(text)
feed = {self._input_data: x}
logits = sess.run([self._logits], feed_dict=feed)
results = np.argmax(logits, 1)
id2labels = dict(zip(tags.values(), tags.keys()))
labels = map(id2labels.get, results)
return labels
def run(session, model, dataset, eval_op, ner_train_dir, epoch):
"""Runs the model on the given data."""
start_time = time.time()
costs = 0.0
iters = 0
step = 0
while dataset.has_next():
step = step + 1
(x, y) = dataset.next_batch(model.batch_size)
fetches = [model.cost, eval_op]
feed_dict = {}
feed_dict[model.input_data] = x
feed_dict[model.targets] = y
cost, _ = session.run(fetches, feed_dict)
costs += cost
iters += model.seq_length
# Save Model to CheckPoint when is_training is True
if model.is_training:
checkpoint_path = os.path.join(ner_train_dir, "lstm/model.ckpt")
model.saver.save(session, checkpoint_path)
print("Model Saved... at time step " + str(step))
return np.exp(costs / iters)
|
from django.shortcuts import render
from rest_framework import generics, permissions
from django.contrib.auth import get_user_model
from .serializers import ChoreListSerializer, ChoreDetailSerializer, UserSerializer
from .models import Chore
from rest_framework.parsers import MultiPartParser, FormParser
from rest_framework.views import APIView
from rest_framework.renderers import JSONRenderer
from .permissions import IsOwnerOrReadOnly
class UserCreateView(generics.CreateAPIView):
model = get_user_model()
parser_classes = [MultiPartParser]
serializer_class = UserSerializer
permission_classes = [permissions.AllowAny]
class ChoreListAPIView(generics.ListAPIView):
queryset = Chore.objects.all()
serializer_class = ChoreListSerializer
class ChoreRetrieveAPIView(generics.RetrieveAPIView):
lookup_field = "id"
queryset = Chore.objects.all()
serializer_class = ChoreDetailSerializer
class ChoreCreateAPIView(generics.CreateAPIView):
parser_classes = (MultiPartParser, FormParser)
queryset = Chore.objects.all()
serializer_class = ChoreDetailSerializer
class ChoreRetrieveUpdateAPIView(generics.RetrieveUpdateAPIView):
lookup_field = "id"
queryset = Chore.objects.all()
serializer_class = ChoreDetailSerializer
parser_classes = (MultiPartParser, FormParser)
class ChoreDestroyAPIView(generics.DestroyAPIView):
lookup_field = "id"
queryset = Chore.objects.all()
|
import cv2
import numpy as np
def contourColor(color_number):
if color_number == 0:
contour_color = (128, 255, 255)
elif color_number == 1:
contour_color = (0, 255, 0)
elif color_number == 2:
contour_color = (0, 0, 0)
elif color_number == 3:
contour_color = (255, 0, 0)
else:
contour_color = (0, 0, 255)
return contour_color
def colorNumber2color(color_number):
if color_number == 0:
return "yellow"
if color_number == 1:
return "green"
if color_number == 2:
return "black"
if color_number == 3:
return "blue"
return "orange"
def color2colorNumber(color):
if color == "yellow":
return 0
if color == "green":
return 1
if color == "black":
return 2
if color == "blue":
return 3
return 4
def constructionPlans2colorNumber(colors):
return [color2colorNumber(colors[0]),
color2colorNumber(colors[1]),
color2colorNumber(colors[2])]
def showPicture(name, img, ratio = 1):
cv2.namedWindow(name, cv2.WINDOW_NORMAL)
cv2.imshow(name, img)
#print(name, img.shape[1], img.shape[0])
cv2.resizeWindow(name, int(img.shape[1]*ratio), int(img.shape[0]*ratio))
def selectROI(im):
showCrosshair = False
fromCenter = False
r = cv2.selectROI("Image", im, fromCenter, showCrosshair)
imCrop = im[int(r[1]):int(r[1] + r[3]), int(r[0]):int(r[0] + r[2])]
cv2.imshow("Image", imCrop)
cv2.waitKey(0)
def drawSquare(img, contour, color_number):
rect = cv2.minAreaRect(contour)
box = cv2.boxPoints(rect)
box = np.int0(box)
cv2.drawContours(img, [box], -1, contourColor(color_number), 5)
|
from scoring.models import *
from django.shortcuts import render
def remove_all_data(request):
Judge.objects.all().delete()
Project.objects.all().delete()
Student.objects.all().delete()
Judge_Assignment.objects.all().delete()
return render(request, 'home.html')
|
#!/usr/bin/env python
'''
Created on 27/10/2016
@author: sium
'''
from __future__ import print_function
__author__ = 'sium'
__licence__="""
MIT License
Copyright (c) 2017 Sinan Ugur Umu (SUU) sinanugur@gmail.com
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
"""
__doc__="""Variant caller for HPV project.
Usage:
hpv-variant-call.py <BAM> [<OUTCSV>] (--chromosome <name> | --auto) [--discordant] [--reference <name>] [--start=<number>] [--end=<number>] [--transformed] [--cpu=<number>]
hpv-variant-call.py <BAM> <FASTA> <BED> [--chromosome <name> | --auto] [--reference <name>] [--start=<number>] [--end=<number>]
hpv-variant-call.py (-h | --help)
hpv-variant-call.py --version
Arguments:
BAM BAM or SAM File name.
FASTA Output FASTA file name for soft clipped sequences.
BED Output tab-seperated BED file name for soft clipped sequences.
OUTCSV Write regular CSV output into a file, not STDOUT.
-c <name>, --chromosome <name> The name of the chromosome.
-r <name>, --reference <name> Reference FASTA file.
-s <number>, --start <number> Start position [default : 0]
-e <number>, --end <number> End position
-j <number>, --cpu <number> The number of CPUs for parallel processing. [default : 1]
Options:
-a --auto Autodetect chromosome name (with highest coverage) to be fetched.
-t --transformed Mapped HPV genomes are transformed.
-h --help Show this screen.
--version Show version.
"""
#prevent sigpipe error
from signal import signal, SIGPIPE, SIG_DFL
signal(SIGPIPE,SIG_DFL)
#########
import pysam
from collections import Counter
from docopt import docopt
import sys
from math import floor
from Bio import SeqIO
from Bio.Seq import Seq
from re import search
from re import match
from re import compile
from pathos.multiprocessing import ProcessPool
from functools import reduce
from itertools import repeat
def auto_detect_chromosome_by_coverage(samfile,bam_file):
hpv_chromosomes = list(filter(lambda x: x.find("HPV") >= 0, samfile.references)) # find HPV chromosomes
the_list_of_chromosome_counts = list(
map(lambda chr: [chr, samfile.count(chr)], hpv_chromosomes)) # estimate HPV chromosome coverages
autodetected_chromosome = reduce(lambda x, y: x if x[1] > y[1] >= 0 else y,
the_list_of_chromosome_counts) # find the highest coverage
print("The contig with the highest coverage is %s for the BAM file, %s " % (autodetected_chromosome[0], bam_file),
file=sys.stderr)
return(autodetected_chromosome[0])
def auto_detect_hpv_type_from_file_name(samfile,bam_file):
hpv_name=search('(HPV[0-9]+)',bam_file).group(1)
hpv_regex = compile("\(" + hpv_name + "\)")
autodetected_chromosome = list(filter(lambda x: search(hpv_regex,x), samfile.references)) # find HPV chromosome
print("The HPV name detected is %s for the BAM file, %s " % (autodetected_chromosome[0], bam_file),
file=sys.stderr)
return (autodetected_chromosome[0])
def function_position_counter(pileupread,position_counter,quality_counter):
if not pileupread.is_refskip:
if not pileupread.is_del:
base = pileupread.alignment.query_sequence[pileupread.query_position]
position_counter[base] += 1
quality_counter[base] += pileupread.alignment.query_qualities[pileupread.query_position]
else:
position_counter["deletion"] += 1
else:
position_counter["skip"] += 1
def function_merge_two_dicts(x, y):
"""Given two dicts, merge them into a new dict as a shallow copy."""
z = x.copy()
z.update(y)
return(z)
def function_reduce(x,y):
return((x[0]+y[0],x[1]+y[1]))
def function_parallel_count(position,bam_file,chromosome):
samfile = pysam.AlignmentFile(bam_file)
position_counter = Counter()
discordant_counter = Counter()
quality_counter = Counter()
discordant_quality_counter = Counter()
if arguments['--discordant']:
for pileupcolumn in samfile.pileup(chromosome, position, position + 1, truncate=True, max_depth=1000000000):
for pileupread in pileupcolumn.pileups:
if (pileupread.alignment.reference_name != pileupread.alignment.next_reference_name):
function_position_counter(pileupread, discordant_counter, discordant_quality_counter)
else:
for pileupcolumn in samfile.pileup(chromosome, position, position + 1, truncate=True, max_depth=1000000000):
for pileupread in pileupcolumn.pileups:
function_position_counter(pileupread, position_counter, quality_counter)
samfile.close()
return({position:(position_counter,quality_counter,discordant_counter,discordant_quality_counter)})
def hpv_variant_table_create(bam_file,chromosome,reference_filename,start,end,csv1):
samfile = pysam.AlignmentFile(bam_file)
if arguments['--auto']:
try:
chromosome = auto_detect_hpv_type_from_file_name(samfile,bam_file)
except:
chromosome = auto_detect_chromosome_by_coverage(samfile, bam_file)
if reference_filename is None:
sequence = None
else:
for record in SeqIO.parse(reference_filename,"fasta"):
if record.id == chromosome:
sequence=str(record.seq)
break
start= int(0 if start is None else start) #start position of the fetched location
end= int(samfile.lengths[samfile.references.index(chromosome)]) if end is None else int(end) #calculate the end by using the chromosome name
length=int(samfile.lengths[samfile.references.index(chromosome)])
second_half=length - floor(length/2) +1
first_half=floor(length/2 -1)
function_transformed_position = lambda position: int(
position + 1 + first_half) if position + 1 <= second_half else int(position + 1 - second_half)
print("chr\tposition\treference\tcoverage\tA\tG\tC\tT\tdeletion\tskip\tqA\tqG\tqC\tqT",
file= csv1 if csv1 else sys.stdout)
samfile.close()
with ProcessPool(int(arguments['--cpu'])) as pool:
res = pool.map(function_parallel_count, range(start,end),repeat(bam_file),repeat(chromosome))
results=reduce(function_merge_two_dicts,res)
for position in range(start,end):
if not arguments['--transformed']: # is this a shifted genome, no
pos = position + 1
else:
pos = function_transformed_position(position)
if arguments['--discordant']:
print_variant_csv_files(results[position][2],results[position][3],chromosome,sequence,position,pos,csv1 if csv1 else sys.stdout)
else:
print_variant_csv_files(results[position][0],results[position][1],chromosome,sequence,position,pos,csv1 if csv1 else sys.stdout)
def print_variant_csv_files(position_counter,quality_counter,chromosome,sequence,position,pos,where_to_print):
print("{chromosome}\t{position}\t{reference}\t{coverage}\t{A}\t{G}\t{C}\t{T}\t{deletion}\t{skip}\t{qA:.2f}\t{qG:.2f}\t{qC:.2f}\t{qT:.2f}".format(
chromosome=chromosome, position=pos,
reference='NA' if sequence is None else sequence[position],
coverage=position_counter["A"] + position_counter["G"] + position_counter["C"] + position_counter["T"],
A=position_counter["A"],
G=position_counter["G"],
C=position_counter["C"],
T=position_counter["T"],
deletion=position_counter["deletion"],
skip=position_counter['skip'],
qA=quality_counter["A"] / (position_counter["A"] +0.000000000001),
qG=quality_counter["G"] / (position_counter["G"] +0.000000000001),
qC=quality_counter["C"] / (position_counter["C"] +0.000000000001),
qT=quality_counter["T"] / (position_counter["T"] +0.000000000001)
),file=where_to_print)
def fetch_soft_clipped(bam_file,chromosome,start,end,fasta_file,tsv_file):
samfile = pysam.AlignmentFile(bam_file)
if arguments['--auto']:
try:
chromosomes = list(auto_detect_hpv_type_from_file_name(samfile,bam_file))
except:
chromosomes = list(auto_detect_chromosome_by_coverage(samfile, bam_file))
elif chromosome is None:
chromosomes = samfile.references
else:
chromosomes = list(chromosome)
cigarsoft = compile("([1-9][0-9]+)S")
with open(fasta_file,"w") as fasta,open(tsv_file,"w") as tsv:
for chromosome in chromosomes:
start = int(0 if start is None else start) # start position of the fetched location
end = int(samfile.lengths[samfile.references.index(chromosome)]) if end is None else int(
end) # calculate the end by using the chromosome name
for read in samfile.fetch(chromosome,start,end):
if not read.is_unmapped and search(cigarsoft,read.cigarstring):
#seq_position=0
#read_aligned_pairs=read.get_aligned_pairs()
#for i in read.cigartuples:
#if i[0] == 4 and i[1] >= 10: #detect soft clipped, 4 is for soft clip
if match(cigarsoft, read.cigarstring): #if soft clipping at the beginning
size=int(match(cigarsoft, read.cigarstring).group(1))
sequence=read.seq[0:size]
else: #if soft clipping at the end
size = int(search(cigarsoft, read.cigarstring).group(1))
sequence = read.seq[-size:]
if read.is_reverse:
sequence=str(Seq(sequence).reverse_complement()) #take reverse complement if on opposite strand
print (">{read_id}\n{sequence}".format(read_id=read.query_name,sequence=sequence),file=fasta)
feat_start = read.reference_start if match(cigarsoft,read.cigarstring) else read.reference_end
print ("{ref_id}\t{feat_start}\t{feat_end}\t{name}\t{score}\t{strand}".format(ref_id=read.reference_name,
feat_start=feat_start,
feat_end=feat_start+size,
name=read.query_name,score=1,strand="."),file=tsv)
#break
#elif i[0] != 3: #3 is for Ns
#elif i[0] != 3: # 3 is for Ns
# seq_position=seq_position + i[1]
else:
pass
def main():
if arguments['<FASTA>']:
fetch_soft_clipped(arguments['<BAM>'],arguments['--chromosome'],arguments['--start'],arguments['--end'],arguments['<FASTA>'],arguments['<BED>'])
else:
if arguments['<OUTCSV>']:
with open(arguments["<OUTCSV>"], "w") as csv1:
hpv_variant_table_create(arguments['<BAM>'], arguments['--chromosome'], arguments['--reference'],
arguments['--start'], arguments['--end'], csv1)
else:
hpv_variant_table_create(arguments['<BAM>'], arguments['--chromosome'], arguments['--reference'],
arguments['--start'], arguments['--end'], csv1=None)
if __name__ == '__main__':
arguments = docopt(__doc__, version='0.95')
main()
|
# This program is free software: you can redistribute it and/or modify it under the
# terms of the Apache License (v2.0) as published by the Apache Software Foundation.
#
# This program is distributed in the hope that it will be useful, but WITHOUT ANY
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A
# PARTICULAR PURPOSE. See the Apache License for more details.
#
# You should have received a copy of the Apache License along with this program.
# If not, see <https://www.apache.org/licenses/LICENSE-2.0>.
"""Manage database."""
# external libs
from cmdkit.app import ApplicationGroup
from cmdkit.cli import Interface
# commands
from . import init, check, dump
COMMANDS = {
'init': init.InitDatabaseApp,
'check': check.CheckDatabaseApp,
'dump': dump.DumpDatabaseApp,
}
PROGRAM = 'streamkit database'
USAGE = f"""\
usage: {PROGRAM} [-h] <command> [<args>...]
{__doc__}\
"""
HELP = f"""\
{USAGE}
commands:
init {init.__doc__}
check {check.__doc__}
dump {dump.__doc__}
options:
-h, --help Show this message and exit.
Use the -h/--help flag with the above groups/commands to
learn more about their usage.\
"""
class DatabaseApp(ApplicationGroup):
"""Application class for database command group."""
interface = Interface(PROGRAM, USAGE, HELP)
interface.add_argument('command')
command = None
commands = COMMANDS
|
from aces import Aces
class sub(Aces):
def submit(self):
opt = dict(
units="metal",
species="glassC",
method="greenkubo",
nodes=1,
procs=12,
queue="q1.1",
runTime=10000000,
runner="phonopy")
app = dict(enforceThick=True,supercell=[2, 2, 2], kpoints=[5, 5, 5], strain=.3)
self.commit(opt, app)
if __name__ == '__main__':
sub().run()
|
from sim.scheduler import Scheduler
from sim.queue import Queue
from sim.genjob import GenJob
from sim.tokenbucket import TokenBucket
from sim.server import Server
|
# -*- coding: utf-8 -*-
from .libs import log, storage
import sublime
class S6Setting(object):
def __init__(self):
super(S6Setting, self).__init__()
self._setting = storage.StorageSetting("sublime_666")
def get_git_remote(self):
remote = self._setting.get("git_remote", None)
if remote is None:
window = sublime.active_window()
def on_done(remote):
self.set_git_remote(remote)
window.show_input_panel("git remote url:", "", on_done, None, None)
return remote
def set_git_remote(self, remote):
self._setting.set("git_remote", remote)
self._setting.save()
|
from tinydb import TinyDB, Query
import os
import json
import youtube_metadata
import logging
from googleapiclient.errors import HttpError
from time import sleep
def store_metadata(db_fn: str, video: dict):
"""
Store the given videos_metadata
:param db_fn:
:param video:
:return:
"""
logging.info("store_metadata >>>")
db = TinyDB(db_fn)
q = Query()
db.upsert(video, q.video_id == video['video_id'])
db.close()
logging.info("store_metadata <<<")
def retrieve_already_parsed_ids(db_path: str):
db = TinyDB(db_path)
q = Query()
query_result = db.search(q.video_id.exists())
video_ids = []
for item in query_result:
video_ids.append(item['video_id'])
return video_ids
def get_videos(db_path:str):
db = TinyDB(db_path)
q = Query()
query_result = db.search(q.video_id.exists())
return query_result
def get_all_video_ids(base_path: str):
logging.info("get_all_video_ids >>>")
subdirectories = os.listdir(base_path)
video_ids = []
for subdir in subdirectories:
subdir_complete_path = os.path.join(base_path, subdir)
if os.path.isdir(subdir_complete_path):
subdir_complete_path = os.path.join(base_path, subdir)
current_video_ids = os.listdir(subdir_complete_path)
video_ids = video_ids + current_video_ids
video_ids_set = set(video_ids)
video_ids = [*video_ids_set]
logging.info("get_all_video_ids <<<")
return video_ids
def get_delta(ids_in_dir, ids_already_parsed):
logging.info("get_delta >>>")
if not ids_already_parsed:
return ids_in_dir
delta = [item for item in ids_in_dir if item not in ids_already_parsed]
logging.info("{} video to download!".format(len(delta)))
logging.info("get_delta <<<")
return delta
if __name__ == "__main__":
logging.root.handlers = []
#log_path = "/Users/kappa/repositories/enrich_youtube_videos/yt_metadata.log"
log_path="/home/khaled/enrich_yt_videos/yt_metadata.log"
logging.basicConfig(format='%(asctime)s|%(name)s|%(levelname)s| %(message)s',
level=logging.INFO,
filename=log_path)
# set up logging to console
console = logging.StreamHandler()
console.setLevel(logging.INFO)
# set a format which is simpler for console use
formatter = logging.Formatter(fmt='%(asctime)s|%(name)s|%(levelname)s| %(message)s',
datefmt="%d-%m-%Y %H:%M:%S")
console.setFormatter(formatter)
logging.getLogger("").addHandler(console)
logging.info("App started")
with open("config/settings.json") as f:
settings = json.load(f)
logging.info("Settings loaded")
db_fn = settings['db_path']
credential = settings['api_key']
yt_video_ids = get_all_video_ids(settings['video_id_basefolder'])
already_parsed_ids = retrieve_already_parsed_ids(db_fn)
yt_video_ids = get_delta(yt_video_ids, already_parsed_ids)
api = youtube_metadata.load_api_instance(credential)
for video_id in yt_video_ids:
try:
metadata_current_video = youtube_metadata.get_complete_video_infos(video_id, False, True, api)
store_metadata(db_fn, metadata_current_video)
logging.debug("{} APPENDED SUCCESSFULLY!".format(metadata_current_video))
sleep(0.4)
except HttpError as http_error:
logging.error(http_error)
sleep(3)
except Exception as ex:
logging.error("Unable to retrieve infos for the following video id: {video_id}".format(video_id=video_id))
logging.error(ex)
message = {"video_id": video_id,
"title": "",
"description": "",
"publishing_date": "",
"person": [],
"year_in_title": ""}
store_metadata(db_fn, message)
sleep(2)
|
transliteration_to_bw_map = {
'Alif': 'A',
'hamza': 'A',
'Ayn': 'E',
'Ba': 'b',
'Dad': 'D',
'Daad': 'D',
'Dal': 'd',
'dal': 'd',
'Fa': 'f',
'Gh': 'g',
'Ghayn': 'g',
'ha': 'h',
'Ha': 'H',
'Haa': 'H',
'Jiim': 'j',
'Kaf': 'k',
'Kha': 'x',
'Kh': 'x',
'Lam': 'l',
'La': 'l',
'Miim': 'm',
'Meem': 'm',
'Nun': 'n',
'Qaf': 'q',
'Ra': 'r',
'Sad': 'S',
'Saad': 'S',
'Sh': '$',
'Shiin': '$',
'Siin': 's',
'Ta': 't',
'Tay': 'T',
'Tha': 'v',
'Thaa': 'v',
'Thal': '*',
'Dhal': '*',
'Waw': 'w',
'Ya': 'y',
'Za': 'Z',
'Zay': 'z'
}
with open('../content/original/Root_d.txt', 'r') as in_file:
with open('dictionary.v2.txt', 'w') as out_file:
line = in_file.readline()
while line:
parts = line.split(' =')
key = parts[0].split(' ')[0]
meaning = ''
if len(parts) > 1:
meaning = parts[1]
transliterated_key_list = key.split("-")
new_key = ''
for transliterated_key in transliterated_key_list:
new_key = new_key + transliteration_to_bw_map[transliterated_key.strip()]
out_file.write(new_key + "|" + meaning.strip() + "\n");
line = in_file.readline()
|
# ==== import libraries we are using ====
import requests # for http requests (e.g., pulling the RSS feed)
import re # for searching with regular expressions
import json # for reading config.json file
import os # for file system path names
import smtplib # for sending email
import ssl # for making encrypted connection to send email
import socket # for obtaining our host IP address
# ==== pull the RSS feed ====
r = requests.get('https://www.baaqmd.gov/Feeds/AlertRSS.aspx')
matches = re.findall("<description>(.*)</description", r.text) # match text inside <description> tags (there are two)
status = matches[1] # grab the second match
if status == "No Alert":
color = "green"
elif status == "Alert In Effect":
color = "red"
else:
color = "no match" # in future, only send email on no match
# ==== get config details ====
path_to_config = os.path.dirname(os.path.realpath(__file__)) + '/../config.json'
with open(path_to_config) as f:
config = json.load(f)
# ==== write result to file ====
f = open(config["path_to_www_file"], "w")
f.write(color)
f.close()
# ==== get our IP address (see https://cutt.ly/Dh10wYW) ====
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
s.connect(('10.255.255.255', 1))
host_ip = s.getsockname()[0]
# ==== send an email with the result ====
www_file = os.path.basename(config["path_to_www_file"])
message = f"""\
Subject: Fireplace Alert!
The light should be {color}.
Color has been written to: http://{host_ip}/{www_file}"""
context = ssl.create_default_context()
with smtplib.SMTP_SSL(config["smtp_server"], config["smtp_port"], context=context) as server:
server.login(config["sender_email"], config["password"])
server.sendmail(config["sender_email"], config["recipient_email"], message)
|
#!/usr/bin/python
# Copyright 2015 Google.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import encoder
import glob
import mpeg_settings
import optimizer
import os
class Error(Exception):
pass
def ChooseRates(width, framerate):
# pylint: disable=too-many-return-statements
if width >= 1920 and framerate >= 50:
return [3000, 4500, 7000, 10000]
if width >= 1920 and framerate >= 24:
return [1600, 2500, 4000, 6000]
if width >= 1280 and framerate >= 60:
return [384, 512, 850, 1500] # To match MPEG rates for Johnny
if width >= 832 and framerate >= 50:
return [512, 768, 1200, 2000]
if width >= 416:
return [384, 512, 850, 1500]
if width >= 352:
# CIF video. No standards activity is behind these chocies of rate.
return [256, 384, 512, 850]
if width >= 176:
# QCIF video.
return [256, 384, 512, 850, 1500]
raise Error('Unhandled width/framerate combo: w=%d rate=%d' %
(width, framerate))
def GenerateFilesetFromDirectory(name):
"""Returns a FileAndRateSet containing all the YUV files in the directory."""
yuvfiles = glob.glob(os.path.join(os.getenv('WORKDIR'),
'video', name, '*.yuv'))
my_set = optimizer.FileAndRateSet()
for yuvfile in yuvfiles:
videofile = encoder.Videofile(yuvfile)
my_set.AddFilesAndRates([videofile.filename],
ChooseRates(videofile.width, videofile.framerate))
return my_set
def PickFileset(name):
if name == 'mpeg_video':
return mpeg_settings.MpegFiles()
elif os.path.isdir(os.path.join('video', name)):
return GenerateFilesetFromDirectory(name)
else:
raise Error('Fileset %s not found' % name)
|
sd = 'icons/' # Icon sub-directory name.
# The forcast includes an icon index number. Below is an ordered array that has the
# icon filename for the icon index.
icons = [
'thunderstorms.png', # 0 - Thunderstorms
'windy.png', # 1 - Windy Rain
'unknown.png', # 2 - Windy Rain
'thunderstorms.png', # 3 - Thunderstorms
't-storms2.png', # 4 - T-Storms
'rain_snow.png', # 5 - Rain Snow
'rain_snow.png', # 6 - Rain Sleet
'rain_snow.png', # 7 - Snow/Rain Icy Mix
'rain_snow.png', # 8 - Freezing Drizzle
'rain.png', # 9 - Drizzle
'rain.png', #10 - Freezing Rain
't-showers.png', #11 - T-Showers
#'unknown.png', #11 - Showers
#'unknown.png', #11 - Light Rain
'rain.png', #12 - Heavy Rain
'snow.png', #13 - Snow Flurries
'snow.png', #14 - Light Snow
'snow.png', #15 - Snowflakes
'snow.png', #16 - Heavy Snow
'thunderstorms.png', #17 - Thunderstorms
'unknown.png', #18 - Hail
'unknown.png', #19 - Dust
'fog.png', #20 - Fog
'fog.png', #21 - Haze
'fog.png', #22 - Smoke
'windy.png', #23 - Windy
'windy.png', #24 - Windy
'sunny.png', #25 - Frigid
'partly_cloudy.png', #26 - Cloudy
'partly_cloudy.png', #27 - Mostly Cloudy Night (the "Night" will not be included)
'mostly_cloudy.png', #28 - Mostly Cloudy
'partly_cloudy.png', #29 - Partly Cloudy Night (the "Night" will not be included)
'partly_cloudy.png', #30 - Partly Cloudy
'clear_night.png', #31 - Clear Night (the "Night" will not be included)
'sunny.png', #32 - Sunny
'sunny.png', #33 - Fair
#'unknown.png', #33 - Mostly Clear Night (the "Night" will not be included)
'sunny.png', #34 - Fair
#'unknown.png', #34 - Mostly Sunny
'thunderstorms.png', #35 - Thunderstorms
'sunny.png', #36 - Hot
'scattered_tstorms.png',#37 - Isolated Thunder
'scattered_tstorms.png',#38 - Scattered T-Storms
'rain.png', #39 - Scattered Rain
'rain.png', #40 - Heavy Rain
'snow.png', #41 - Scattered Snow
'snow.png', #42 - Heavy Snow
'snow.png', #43 - Windy/Snowy
'mostly_cloudy.png', #44 - Partly Cloudy Day
'rain.png', #45 - Scattered Showers Night (the "Night" will not be included)
'snow.png' , #46 - Snowy Night
'scattered_tstorms.png' #47 - Scattered T-Storms Night (the "Night" will not be included)
]
|
from django.utils.translation import gettext_lazy as _
from oscar.defaults import * # noqa
from psycopg2cffi import compat
import os
compat.register()
BASE_DIR = os.path.dirname(os.path.abspath(__file__))
DEBUG = True
SECRET_KEY = "li0$-gnv)76g$yf7p@(cg-^_q7j6df5cx$o-gsef5hd68phj!4"
SITE_ID = 1
USE_I18N = True
LANGUAGE_CODE = "en-us"
LANGUAGES = (
("en-us", _("English")),
("es", _("Spanish")),
)
ROOT_URLCONF = "urls"
ALLOWED_HOSTS = ["*"]
# Configure JUnit XML output
TEST_RUNNER = "xmlrunner.extra.djangotestrunner.XMLTestRunner"
_tox_env_name = os.environ.get("TOX_ENV_NAME")
if _tox_env_name:
TEST_OUTPUT_DIR = os.path.join(BASE_DIR, f"../junit-{_tox_env_name}/")
else:
TEST_OUTPUT_DIR = os.path.join(BASE_DIR, "../junit/")
# Used to encrypt secure acceptance profiles in the database
FERNET_KEYS = [
"epM8Bk2YJlLVLsHqUlriW0Ma7rDpPfHMrAhmxmwdbVqqdgPNEqzeYYxheLdKLPe",
]
INSTALLED_APPS = [
# Core Django
"django.contrib.admin",
"django.contrib.auth",
"django.contrib.contenttypes",
"django.contrib.sessions",
"django.contrib.sites",
"django.contrib.postgres",
"django.contrib.messages",
"django.contrib.staticfiles",
"django.contrib.flatpages",
# django-oscar
"oscar",
"oscar.apps.analytics",
"oscar.apps.communication",
"oscar.apps.checkout",
"oscar.apps.address",
"oscar.apps.shipping",
"oscar.apps.catalogue",
"oscar.apps.catalogue.reviews",
"oscar.apps.partner",
"oscar.apps.basket",
"payment", # 'oscar.apps.payment',
"oscar.apps.offer",
"order", # 'oscar.apps.order',
"oscar.apps.customer",
"oscar.apps.search",
"oscar.apps.voucher",
"oscar.apps.wishlists",
"oscar.apps.dashboard",
"oscar.apps.dashboard.reports",
"oscar.apps.dashboard.users",
"oscar.apps.dashboard.orders",
"oscar.apps.dashboard.catalogue",
"oscar.apps.dashboard.offers",
"oscar.apps.dashboard.partners",
"oscar.apps.dashboard.pages",
"oscar.apps.dashboard.ranges",
"oscar.apps.dashboard.reviews",
"oscar.apps.dashboard.vouchers",
"oscar.apps.dashboard.communications",
"oscar.apps.dashboard.shipping",
# 3rd-party apps that oscar depends on
"widget_tweaks",
"haystack",
"treebeard",
"sorl.thumbnail",
"django_tables2",
# django-oscar-api
"rest_framework",
"oscarapi",
"oscarapicheckout",
"cybersource",
]
MIDDLEWARE = (
"django.middleware.security.SecurityMiddleware",
"django.contrib.sessions.middleware.SessionMiddleware",
"django.middleware.locale.LocaleMiddleware",
"django.middleware.common.CommonMiddleware",
"django.middleware.csrf.CsrfViewMiddleware",
"django.contrib.auth.middleware.AuthenticationMiddleware",
"django.contrib.messages.middleware.MessageMiddleware",
"django.middleware.clickjacking.XFrameOptionsMiddleware",
"oscar.apps.basket.middleware.BasketMiddleware",
)
TEMPLATES = [
{
"BACKEND": "django.template.backends.django.DjangoTemplates",
"DIRS": [],
"APP_DIRS": True,
"OPTIONS": {
"context_processors": [
"django.template.context_processors.debug",
"django.template.context_processors.request",
"django.contrib.auth.context_processors.auth",
"django.contrib.messages.context_processors.messages",
"django.template.context_processors.i18n",
"oscar.apps.search.context_processors.search_form",
"oscar.apps.checkout.context_processors.checkout",
"oscar.apps.communication.notifications.context_processors.notifications",
"oscar.core.context_processors.metadata",
],
},
},
]
DEFAULT_AUTO_FIELD = "django.db.models.AutoField"
DATABASES = {
"default": {
"ENGINE": "django.db.backends.postgresql_psycopg2",
"NAME": "postgres",
"USER": "postgres",
"PASSWORD": "",
"HOST": "postgres",
"PORT": 5432,
}
}
HAYSTACK_CONNECTIONS = {
"default": {
"ENGINE": "haystack.backends.simple_backend.SimpleEngine",
},
}
CACHES = {
"default": {
"BACKEND": "django.core.cache.backends.locmem.LocMemCache",
"LOCATION": "cybersource-testing-sandbox",
}
}
# Static files (CSS, JavaScript, Images)
STATIC_URL = "/static/"
STATIC_ROOT = os.path.join(BASE_DIR, "public", "static")
MEDIA_URL = "/media/"
MEDIA_ROOT = os.path.join(BASE_DIR, "public", "media")
# Order Status Pipeline
# Needed by oscarapicheckout
ORDER_STATUS_PENDING = "Pending"
ORDER_STATUS_PAYMENT_DECLINED = "Payment Declined"
ORDER_STATUS_AUTHORIZED = "Authorized"
# Other statuses
ORDER_STATUS_SHIPPED = "Shipped"
ORDER_STATUS_CANCELED = "Canceled"
OSCAR_INITIAL_ORDER_STATUS = ORDER_STATUS_PENDING
OSCARAPI_INITIAL_ORDER_STATUS = ORDER_STATUS_PENDING
OSCAR_ORDER_STATUS_PIPELINE = {
ORDER_STATUS_PENDING: (
ORDER_STATUS_PAYMENT_DECLINED,
ORDER_STATUS_AUTHORIZED,
ORDER_STATUS_CANCELED,
),
ORDER_STATUS_PAYMENT_DECLINED: (ORDER_STATUS_CANCELED,),
ORDER_STATUS_AUTHORIZED: (
ORDER_STATUS_SHIPPED,
ORDER_STATUS_CANCELED,
ORDER_STATUS_PAYMENT_DECLINED,
),
ORDER_STATUS_SHIPPED: (),
ORDER_STATUS_CANCELED: (),
}
OSCAR_INITIAL_LINE_STATUS = ORDER_STATUS_PENDING
OSCAR_LINE_STATUS_PIPELINE = {
ORDER_STATUS_PENDING: (ORDER_STATUS_SHIPPED, ORDER_STATUS_CANCELED),
ORDER_STATUS_SHIPPED: (),
ORDER_STATUS_CANCELED: (),
}
OSCAR_ALLOW_ANON_CHECKOUT = True
OSCAR_DEFAULT_CURRENCY = "USD"
OSCARAPI_BLOCK_ADMIN_API_ACCESS = False
# Cybersource Config. Test key expires on 2022-04-10
CYBERSOURCE_ORG_ID = "someorg"
CYBERSOURCE_PROFILE = "2A37F989-C8B2-4FEF-ACCF-2562577780E2"
CYBERSOURCE_ACCESS = "47ba466bbd223f7097b94d8e18bd654c"
CYBERSOURCE_SECRET = "78b76f8cd5d14b0cad48fcb79107fb84bca61697151445fe82e98832193cd998760a05da6ae94983a94615c98555e2a29599b16afb044c7cb8007e9b68df560ee771e6499a4242fdab0f8302a698bf65c4e21470f7e04863afe00c54c634b263b6b796cc3e1647b19af4cf51e2a52f59dd8b0be725d549e4b81747f4361f900d" # NOQA
CYBERSOURCE_REDIRECT_PENDING = "checkout:index"
CYBERSOURCE_REDIRECT_SUCCESS = "checkout:thank-you"
CYBERSOURCE_REDIRECT_FAIL = "checkout:index"
# Cybersource SOAP Config
CYBERSOURCE_MERCHANT_ID = os.environ.get("CYBERSOURCE_MERCHANT_ID")
CYBERSOURCE_SOAP_KEY = os.environ.get("CYBERSOURCE_SOAP_KEY")
# Configure payment methods
API_ENABLED_PAYMENT_METHODS = [
{
"method": "cybersource.methods.Cybersource",
"permission": "oscarapicheckout.permissions.Public",
},
{
"method": "cybersource.methods.Bluefin",
"permission": "oscarapicheckout.permissions.Public",
},
]
CYBERSOURCE_SHIPPING_METHOD_MAPPING = {
"free-shipping": "lowcost",
"ups-ground": "threeday",
"ups-2-day": "twoday",
"ups-next-day": "oneday",
}
|
from starry_process import StarryProcess
from starry_process.latitude import beta2gauss
import numpy as np
from tqdm import tqdm
import matplotlib.pyplot as plt
import pymc3 as pm
import exoplanet as xo
import theano
import theano.tensor as tt
from tqdm import tqdm
from corner import corner
from scipy.stats import gaussian_kde
from scipy.stats import median_abs_deviation as mad
def test_jacobian(plot=False):
# Compile the PDF
_x = tt.dvector()
_a = tt.dscalar()
_b = tt.dscalar()
pdf = theano.function(
[_x, _a, _b], StarryProcess(a=_a, b=_b).latitude.pdf(_x)
)
with pm.Model() as model:
# Uniform sampling in `a` and `b`
a = pm.Uniform("a", 0, 1)
b = pm.Uniform("b", 0, 1)
# Likelihood w/ no data: just the prior!
sp = StarryProcess(a=a, b=b)
m1, m2 = 0, 80
s1, s2 = 0, 45
xmin = -90
xmax = 90
pm.Potential("jacobian", sp.log_jac())
# Sample
# NOTE: Sampling straight from this prior is really tough because
# it has really high curvature in some places. Typically
# about half of the samples end in divergences! (This is much less of
# an issue when we have data.) Despite these issues, the test still
# works: the posterior density in `mu` and `sigma` is quite uniform.
trace = pm.sample(
tune=1000,
draws=25000,
chains=4,
step=xo.get_dense_nuts_step(target_accept=0.9),
)
# Transform samples to `mu`, `sigma`
samples = np.array(pm.trace_to_dataframe(trace))
a, b = samples.T
mu, sigma = beta2gauss(a, b)
tr_samples = np.transpose([mu, sigma])
if plot:
corner(tr_samples, plot_density=False, plot_contours=False)
plt.figure()
ndraws = 1000
idx = np.random.choice(len(samples), size=(ndraws,))
x = np.linspace(xmin, xmax, 1000)
p = np.empty((ndraws, len(x)))
for i in tqdm(range(ndraws)):
p[i] = pdf(x, a[idx[i]], b[idx[i]])
plt.plot(x, p[i], color="C0", lw=1, alpha=0.1)
plt.show()
# Approximate the density with a Gaussian KDE
# and check that the variation is < 10%
kernel = gaussian_kde(tr_samples.T)
X, Y = np.mgrid[m1:m2:100j, s1:s2:100j]
positions = np.vstack([X.ravel(), Y.ravel()])
density = np.reshape(kernel(positions).T, X.shape).T
std = 1.4826 * mad(density.flatten())
mean = np.mean(density.flatten())
assert std / mean < 0.1
if __name__ == "__main__":
test_jacobian(plot=True)
|
from pytest import fixture, mark, raises
from pytest_asyncio import fixture as async_fixture
from sqlalchemy.ext.asyncio import create_async_engine, AsyncSession
from sqlalchemy.orm import sessionmaker
from sqlmodel import SQLModel
import todo_app.models as models
@async_fixture
async def engine():
engine = create_async_engine("sqlite+aiosqlite:///:memory:", future=True)
async with engine.begin() as connection:
await connection.run_sync(SQLModel.metadata.create_all)
return engine
@fixture
def session_factory(engine):
return sessionmaker(engine, class_=AsyncSession, expire_on_commit=False)
@async_fixture
async def session(session_factory):
async with session_factory() as sess:
yield sess
async def _add_model(session, model, **kwargs):
session.add(obj := model(**kwargs))
await session.commit()
await session.refresh(obj)
return obj
async def add_user(session, **kwargs) -> models.User:
return await _add_model(session, models.User, **kwargs)
async def add_project(session, **kwargs) -> models.Project:
return await _add_model(session, models.Project, **kwargs)
async def add_task(session, **kwargs) -> models.Task:
return await _add_model(session, models.Task, **kwargs)
@mark.asyncio
async def test_user_projects(session):
user = await add_user(session, name="Bob")
await add_project(
session, name="Test", description="Test project", owner_id=user.id
)
projects = await user.get_projects(session)
assert len(projects) == 1
assert isinstance(projects[0], models.Project)
@mark.asyncio
async def test_user_multiple_projects(session):
user = await add_user(session, name="Bob")
await add_project(
session, name="Test", description="Test project", owner_id=user.id
)
await add_project(
session, name="Test 2", description="Test project 2", owner_id=user.id
)
projects = await user.get_projects(session)
assert len(projects) == 2
assert isinstance(projects[0], models.Project)
assert isinstance(projects[1], models.Project)
assert projects[0].name == "Test"
assert projects[1].name == "Test 2"
@mark.asyncio
async def test_user_inbox_project(session):
user = await add_user(session, name="Bob")
project = await add_project(
session,
name="Inbox",
description="Bob's inbox",
owner_id=user.id,
inbox=True,
)
assert (await user.get_inbox(session)).id == project.id
@mark.asyncio
async def test_user_no_inbox_project(session):
user = await add_user(session, name="Bob")
project = await add_project(
session,
name="Inbox",
description="Bob's inbox",
owner_id=user.id,
)
assert (await user.get_inbox(session)).id == project.id
@mark.asyncio
async def test_user_inbox_no_projects(session):
user = await add_user(session, name="Bob")
with raises(ValueError):
await user.get_inbox(session)
@mark.asyncio
async def test_project_owner(session):
user = await add_user(session, name="Bob")
project = await add_project(
session, name="Test", description="Test project", owner_id=user.id
)
assert project.owner.id is not None
assert project.owner.id == user.id
@mark.asyncio
async def test_project_tasks(session):
user = await add_user(session, name="Bob")
project = await add_project(
session, name="Test", description="Test project", owner_id=user.id
)
await add_task(
session,
name="Test",
description="Test task",
author_id=user.id,
project_id=project.id,
)
tasks = await project.get_tasks(session)
assert len(tasks) == 1
@mark.asyncio
async def test_task_author(session):
user = await add_user(session, name="Bob")
project = await add_project(
session, name="Test", description="Test project", owner_id=user.id
)
task = await add_task(
session,
name="Test",
description="Test task",
author_id=user.id,
project_id=project.id,
)
assert task.author.id is not None
assert task.author.id == user.id
@mark.asyncio
async def test_task_project(session):
user = await add_user(session, name="Bob")
project = await add_project(
session, name="Test", description="Test project", owner_id=user.id
)
task = await add_task(
session,
name="Test",
description="Test task",
author_id=user.id,
project_id=project.id,
)
task_project = task.project
assert task_project
assert task_project.id == project.id
|
import pytest
class TestFiltersView:
def test_basic_filters(self, client):
res = client.get("/works/filters/is_oa:true")
json_data = res.get_json()
filter_1 = json_data["filters"][0]
assert filter_1["key"] == "is_oa"
assert filter_1["type"] == "BooleanField"
assert filter_1["is_negated"] == False
assert filter_1["values"][0]["value"] == "true"
assert filter_1["values"][0]["display_name"] == "true"
assert filter_1["values"][0]["count"] == 7709
def test_filter_with_search(self, client):
res = client.get("/works/filters/display_name.search:science,is_oa:true")
json_data = res.get_json()
filter_1 = json_data["filters"][0]
assert filter_1["key"] == "display_name.search"
assert filter_1["type"] == "SearchField"
assert filter_1["values"][0]["value"] == "science"
assert filter_1["values"][0]["display_name"] == "science"
assert filter_1["values"][0]["count"] == 32
filter_2 = json_data["filters"][1]
assert filter_2["key"] == "is_oa"
assert filter_2["type"] == "BooleanField"
assert filter_2["values"][0]["display_name"] == "true"
assert filter_2["values"][0]["count"] == 32
def test_filter_with_full_search(self, client):
res = client.get("/works/filters/is_oa:true?search=science")
json_data = res.get_json()
filter_1 = json_data["filters"][0]
assert filter_1["key"] == "is_oa"
assert filter_1["type"] == "BooleanField"
assert filter_1["values"][0]["display_name"] == "true"
assert filter_1["values"][0]["count"] == 32
filter_2 = json_data["filters"][1]
assert filter_2["key"] == "search"
assert filter_2["type"] == "FullSearchField"
assert filter_2["values"][0]["value"] == "science"
assert filter_2["values"][0]["display_name"] == "science"
assert filter_2["values"][0]["count"] == 32
def test_filter_with_search_negation(self, client):
res = client.get("/works/filters/display_name.search:science,oa_status:!gold")
json_data = res.get_json()
filter_1 = json_data["filters"][0]
assert filter_1["key"] == "display_name.search"
assert filter_1["is_negated"] == False
assert filter_1["values"][0]["display_name"] == "science"
assert filter_1["values"][0]["count"] == 2
filter_2 = json_data["filters"][1]
assert filter_2["key"] == "oa_status"
assert filter_2["is_negated"] == True
assert filter_2["values"][0]["display_name"] == "gold"
assert filter_2["values"][0]["count"] == 2
def test_filter_convert_id(self, client):
res = client.get("/works/filters/host_venue.id:V90590500")
json_data = res.get_json()
filter_1 = json_data["filters"][0]
assert filter_1["key"] == "host_venue.id"
assert filter_1["values"][0]["value"] == "V90590500"
assert filter_1["values"][0]["display_name"] == "Brazilian Journal of Biology"
def test_filter_convert_country(self, client):
res = client.get("/works/filters/institutions.country_code:ca")
json_data = res.get_json()
filter_1 = json_data["filters"][0]
assert filter_1["key"] == "institutions.country_code"
assert filter_1["values"][0]["value"] == "ca"
assert filter_1["values"][0]["display_name"] == "Canada"
def test_filter_multiple(self, client):
res = client.get(
"/works/filters/display_name.search:" ",concepts.id:C556758197%7CC73283319"
)
json_data = res.get_json()
filter_1 = json_data["filters"][0]
assert filter_1["values"][0]["count"] == 33
filter_2 = json_data["filters"][1]
assert filter_2["key"] == "concepts.id"
assert filter_2["values"][0]["value"] == "C556758197"
assert filter_2["values"][0]["count"] == 24
assert filter_2["values"][1]["value"] == "C73283319"
assert filter_2["values"][1]["count"] == 12
def test_filter_multiple_negated(self, client):
res = client.get(
"/works/filters/display_name.search:the,concepts.id:!C556758197%7CC73283319"
)
json_data = res.get_json()
filter_2 = json_data["filters"][1]
assert filter_2["key"] == "concepts.id"
assert filter_2["is_negated"] == True
assert filter_2["values"][0]["value"] == "C556758197"
assert filter_2["values"][0]["count"] == 0
assert filter_2["values"][1]["value"] == "C73283319"
assert filter_2["values"][1]["count"] == 0
def test_filter_invalid_id(self, client):
res = client.get("/works/filters/concepts.id:fff")
json_data = res.get_json()
assert json_data["error"] == "Invalid query parameters error."
assert (
json_data["message"]
== "https://openalex.org/fff is not a valid OpenAlex ID."
)
@pytest.mark.skip
def test_filter_url_single(self, client):
res = client.get("/works/filters/concepts.id:C73283319")
json_data = res.get_json()
filter_1 = json_data["filters"][0]
assert (
filter_1["values"][0]["url"]
== "http://localhost/works?filter=concepts.id:C73283319"
)
@pytest.mark.skip
def test_filter_url_with_search(self, client):
res = client.get(
"/works/filters/display_name.search:the,concepts.id:!C556758197"
)
json_data = res.get_json()
filter_1 = json_data["filters"][0]
assert (
filter_1["values"][0]["url"]
== "http://localhost/works?filter=display_name.search:the"
)
filter_2 = json_data["filters"][1]
assert (
filter_2["values"][0]["url"]
== "http://localhost/works?filter=display_name.search:the,concepts.id:C556758197"
)
|
#!/usr/bin/env python
from __future__ import print_function
import sys
import os
import traceback
if len(sys.argv) != 3:
print('Usage: "generate_spirv_offline.py <compilation_cache_dir> <cl_device_info_file>"')
exit(1)
compilation_cache_dir = sys.argv[1]
cl_device_info_filename = sys.argv[2]
def generate_spirv():
print("Generating SPIR-V files")
build_options = ''
if os.path.exists(compilation_cache_dir):
for root, dirs, files in os.walk(compilation_cache_dir):
for file in files:
if file.endswith('.cl'):
options_file_name = file[:-2] + "options"
if os.path.exists(os.path.join(root, options_file_name)):
optFile = open (os.path.join(root, options_file_name), 'r')
build_options = optFile.readline().strip()
print(build_options)
source_filename = os.path.join(root, file)
output_filename = os.path.join(root, file[:-2]) + "spv"
command_line = ("cl_offline_compiler" +
" --source=" + source_filename +
" --output=" + output_filename +
" --cl-device-info=" + cl_device_info_filename +
" --mode=spir-v -- " +
'"' + build_options + '"')
print(command_line)
os.system(command_line)
return 0
def main():
try:
generate_spirv()
except Exception:
traceback.print_exc(file=sys.stdout)
sys.exit(0)
if __name__ == "__main__":
main()
|
import word2vec
word2vec.word2phrase('text8', 'text8-phrases', verbose=True)
word2vec.word2vec('text8-phrases', 'text8.bin', size=100, verbose=True)
model = word2vec.load('text8.bin')
indexes, metrics = model.analogy(pos=['king', 'woman'], neg=['man'])
for i in model.generate_response(indexes, metrics).tolist():
print(i)
|
"""
# ==================================================================================
# Author: Marc Sanchez Net
# Date: 03/14/2019
# Copyright (c) 2019, Jet Propulsion Laboratory.
# ==================================================================================
"""
from collections import defaultdict
from copy import deepcopy
import numpy as np
from simulator.connections.DtnAbstractConnection import DtnAbstractConnection, TransmissionError
from uuid import uuid1
from warnings import warn
from simulator.core.DtnSemaphore import DtnSemaphore
# Dictionary with all broadcast connection instances
# {origin: DtnScheduledBroadcastConnection}
_instances = {}
class DtnScheduledBroadcastConnection(DtnAbstractConnection):
""" A scheduled connection that, if in view at the same time, propagates data to one or
multiple destinations.
This class manages internally all connections from an origin to one or multiple
destinations. Therefore, it re-implements __new__ to return an already existing
instance if you request an already existing broadcast connection
"""
def __new__(cls, env, cid, orig, *args):
# If you have an instance for this origin already,
# return it
if orig in _instances: return _instances[orig]
# Create new instance
instance = super().__new__(cls)
# Store new instance
_instances[orig] = instance
return instance
def __init__(self, env, cid, orig, dest, props):
# Call parent constructor
super(DtnScheduledBroadcastConnection, self).__init__(env, cid, orig, dest, props)
# You need a semaphore per potential neighbor
self.active = {n: False for n in env.nodes}
# Map of messages in transit {dest: set(uuid1, uuid2, ...)}
self.in_transit = defaultdict(set)
# Flags to only initialize once
self.initialized = False
self.initialized_contacts = False
def initialize(self, **kwargs):
# If this connection is already running, skip
if self.initialized: return
# Call parent initializer
super(DtnScheduledBroadcastConnection, self).initialize()
# Mark as initialized
self.initialized = True
def initialize_contacts_and_ranges(self):
# If this connection is already running, skip
if self.initialized_contacts: return
# Call parent
super(DtnScheduledBroadcastConnection, self).initialize_contacts_and_ranges()
# Initialize variables
db = self.mobility_model.contacts_df
# Get a copy of the contact plan for this contact
cp = db.loc[db.orig == self.orig.nid].copy()
# If no range intervals are found, invert orig, dest since connection is assumed
# to be symmetric
if cp.empty:
cp = db.loc[db.dest == self.orig.nid]
cp.rename({'orig':'dest', 'dest':'orig'}, axis=1, inplace=True)
# If no range intervals are available at this point, exit
if cp.empty: return
# Sort range intervals
cp = cp.sort_values(by=['tstart', 'tend'])
# Drop rows with no duration
self.contact_plan = cp[cp.duration != 0.0]
# Create the broadcast intervals
self.process_broacast_opportunities()
# Mark the connection as initialized
self.initialized_contacts = True
def process_broacast_opportunities(self):
# Initialize variables
time = []
contacts = []
# Transform for fast processing
df = self.contact_plan.copy().to_dict(orient='list')
df['index'] = self.contact_plan.index.tolist()
# Create list of with the cids active at any point in time
while not np.isnan(df['tstart']).all() or not np.isnan(df['tend']).all():
# Get the index for the next contact start. Catch the case where
# you are just processing contact ends
try:
ts_idx = np.nanargmin(df['tstart'])
except ValueError:
ts_idx = np.nan
# Get the index for the next contact end
te_idx = np.nanargmin(df['tend'])
# Grab the list of current contacts in view
inview = set() if len(contacts) == 0 else deepcopy(contacts[-1])
# Next event is contact end
if np.isnan(ts_idx) or df['tstart'][ts_idx] > df['tend'][te_idx]:
# Next event is a contact end
time.append(df['tend'][te_idx])
# Remove the contact that ends
contacts.append(inview - {df['index'][te_idx]})
# Set this event to NaN
df['tend'][te_idx] = np.nan
continue
# Record the event time
time.append(df['tstart'][ts_idx])
# Add a new entry
contacts.append(inview | {df['index'][ts_idx]})
# Set this event to NaN
df['tstart'][ts_idx] = np.nan
# Initialize variables
prev_t = -1
to_remove = np.zeros(len(time), dtype=bool)
# Figure out where duplicates are located
for i, t in enumerate(time):
if t > prev_t:
prev_t = t
continue
to_remove[i - 1] = True
# Eliminate duplicates
self.time = np.array(time)[~to_remove]
self.contacts = np.array(contacts)[~to_remove]
# If empty, issue warnings
if len(self.time) != len(self.contacts):
warn(f'Broadcast connection for {self.orig.nid} is wrong')
if len(self.time) == 0:
warn(f'Broadcast connection for {self.orig.nid} is empty')
def run(self):
# If you do not have a destination, return
if len(self.time) == 0: yield self.env.exit()
# If no contact information is available, return
if len(self.contacts) == 0: yield self.env.exit()
# Iterate over list of active contacts
for t, cts in zip(self.time, self.contacts):
# Wait until the next event
yield self.env.timeout(max(0.0, t-self.t))
# If at this point in time no one is in view, close
if len(cts) == 0:
self.current_contacts = {}
self.current_dests = {}
self.in_transit = defaultdict(set)
self.close_connection()
continue
# Store current contacts
self.current_contacts = cts
self.current_dests = {self.contact_plan.dest[cid] for cid in cts}
# Eliminate all messages that were still in transit. This happens because
# a message might not be completely delivered before the state of the
# connection changes. See ``self.tx_to_neighbor``
ended = set(self.in_transit.keys()) - self.current_dests
for dest in ended:
self.in_transit[dest] = set()
# Open the connection
self.open_connection()
def set_contact_properties(self):
# Initialize variables
cp = self.contact_plan
# Save connection properties
self.prop_delay = {cp.dest[cid]: cp.range[cid] for cid in self.current_contacts}
def do_transmit(self, peer_duct, message, BER, direction):
# Hack to transform this function to a generator
yield self.env.timeout(0)
# Initialize variables
valid_ducts = []
# If the peer duct's parent is not the list of current destinations, then this message
# is effectively lost since all routers will discard it.
if peer_duct.parent.nid not in self.current_dests:
self.lost.append(message)
# Find all ducts where this message should be delivered
for dest in self.current_dests:
# Get the ducts for this destination towards this node
ducts = self.env.nodes[dest].ducts[self.orig.nid]
# If more than one duct, throw error. This is not allowed because you
# don't have a criteria to choose between them
if len(ducts) > 1:
raise RuntimeError('Only one duct allowed in DtnScheduledBroadcastConnection')
# Get the duct id
duct_id = list(ducts.keys())[0]
# Get another peer duct
peer_duct2 = ducts[duct_id]['induct'] if direction == 'fwd' else ducts[duct_id]['outduct']
# Add duct to list of valid ducts
valid_ducts.append(peer_duct2)
# Log start of transmission
self.disp('{} starts being propagated', message)
# Monitor the start of transmission
self.monitor_tx_start(message)
# Create a new UUID for this message
m_uuid = uuid1()
# Put the messages in transit
for duct in valid_ducts:
self.in_transit[duct.parent.nid].add(m_uuid)
self.env.process(self.tx_to_neighbor(m_uuid, message, duct, BER, direction))
def tx_to_neighbor(self, m_uuid, message, duct, BER, direction):
# The duct's parent is the destination of this connection.
# (duct.neighbor == self.orig)
dest = duct.parent.nid
# Do the actual transmission (This is a blocking call)
try:
yield from self.propagate(message, dest=dest)
except TransmissionError:
pass
# If the uuid is not in the in-transit map, this message got lost
if m_uuid not in self.in_transit[dest]:
self.lost.append(message)
return
# Monitor end of transmission
self.monitor_tx_end(message)
# Remove the record for this message
self.in_transit[dest].remove(m_uuid)
# Create copy
message = deepcopy(message)
# Decide if message has error
MER = (1 - (1 - BER) ** message.num_bits)
if MER > 0: message.has_errors = (np.random.random() < MER)
# Put the message in the destination node
# Note: This is a non-blocking call since que in_queue
# of a duct has infinite capacity
if direction == 'fwd':
duct.send(message)
elif direction == 'ack':
duct.ack(message)
else:
raise ValueError('Direction can only be "fwd" or "ack"')
def transmission_error(self, message):
err = '\n****** Cannot send message while connection is closed ******\n'
err += 't={:.3f}:\tPropagation delay for connection ({}, {}) is None\n'.format(self.t, self.orig, self.dest)
err += '\n' + repr(message) + '\n'
err += 'Range interval table:\n'
err += str(self.contact_plan)
raise TransmissionError(err)
|
import time
import unittest
from cache_dependencies import interfaces, locks, utils
from cache_dependencies.tests import helpers
try:
from unittest import mock
except ImportError:
import mock
class AbstractDependencyLockTestCase(unittest.TestCase):
"""Abstract class.
See http://stackoverflow.com/questions/1323455/python-unit-test-with-base-and-sub-class
and self.run()
"""
delay = 0
lock_factory = None
def setUp(self):
self.transaction = mock.Mock(interfaces.ITransaction)
self.transaction.get_start_time.return_value = time.time() - 2
self.cache = helpers.CacheStub()
self.lock = self.lock_factory(lambda: self.cache, self.delay)
self.dependency = self._make_dependency()
self.tag_versions = {
'tag1': utils.generate_tag_version(),
'tag2': utils.generate_tag_version(),
'tag3': utils.generate_tag_version(),
}
self._set_tag_versions()
def _set_tag_versions(self):
self.cache.set_many(
{utils.make_tag_key(tag): tag_version for tag, tag_version in self.tag_versions.items()},
3600
)
@staticmethod
def _make_dependency():
return mock.Mock(spec=interfaces.IDependency)
def test_evaluate(self):
self.lock.evaluate(self.dependency, self.transaction, 1)
self.dependency.evaluate.assert_called_once_with(self.cache, self.transaction, 1)
def run(self, result=None):
if self.__class__.__name__.startswith('Abstract'):
return
super(AbstractDependencyLockTestCase, self).run(result)
class ReadUncommittedDependencyLockTestCase(AbstractDependencyLockTestCase):
lock_factory = locks.ReadUncommittedDependencyLock
def test_acquire(self):
self.lock.acquire(self.dependency, self.transaction, 1)
self.dependency.acquire.assert_not_called()
def test_release(self):
self.lock.release(self.dependency, self.transaction, 1)
self.dependency.release.assert_not_called()
class ReadUncommittedDependencyLockDelayedTestCase(ReadUncommittedDependencyLockTestCase):
delay = 1
def test_release(self):
super(ReadUncommittedDependencyLockDelayedTestCase, self).test_release()
time.sleep(2)
self.dependency.invalidate.assert_called_once_with(self.cache, 1)
class ReadCommittedDependencyLockTestCase(AbstractDependencyLockTestCase):
lock_factory = locks.ReadCommittedDependencyLock
def test_acquire(self):
self.lock.acquire(self.dependency, self.transaction, 1)
self.dependency.acquire.assert_not_called()
def test_release(self):
self.lock.release(self.dependency, self.transaction, 1)
self.dependency.invalidate.assert_called_once_with(self.cache, 1)
class ReadCommittedDependencyLockDelayedTestCase(ReadCommittedDependencyLockTestCase):
delay = 1
def test_release(self):
super(ReadCommittedDependencyLockDelayedTestCase, self).test_release()
time.sleep(2)
self.dependency.invalidate.assert_called_with(self.cache, 1)
class RepeatableReadDependencyLockTestCase(AbstractDependencyLockTestCase):
lock_factory = locks.RepeatableReadDependencyLock
def test_acquire(self):
self.lock.acquire(self.dependency, self.transaction, 1)
self.dependency.acquire.assert_called_once_with(self.cache, self.transaction, 1)
def test_release(self):
self.lock.release(self.dependency, self.transaction, 1)
self.dependency.release.assert_called_once_with(self.cache, self.transaction, self.delay, 1)
class RepeatableReadDependencyLockDelayedTestCase(RepeatableReadDependencyLockTestCase):
delay = 1
class SerializableDependencyLockTestCase(RepeatableReadDependencyLockTestCase):
lock_factory = locks.SerializableDependencyLock
class SerializableDependencyLockDelayedTestCase(SerializableDependencyLockTestCase):
delay = 1
|
"""Auto-generated file, do not edit by hand. CH metadata"""
from ..phonemetadata import NumberFormat, PhoneNumberDesc, PhoneMetadata
PHONE_METADATA_CH = PhoneMetadata(id='CH', country_code=41, international_prefix='00',
general_desc=PhoneNumberDesc(national_number_pattern='[2-9]\\d{8}|860\\d{9}', possible_number_pattern='\\d{9}(?:\\d{3})?'),
fixed_line=PhoneNumberDesc(national_number_pattern='(?:2[12467]|3[1-4]|4[134]|5[256]|6[12]|[7-9]1)\\d{7}', possible_number_pattern='\\d{9}', example_number='212345678'),
mobile=PhoneNumberDesc(national_number_pattern='7[5-9]\\d{7}', possible_number_pattern='\\d{9}', example_number='781234567'),
toll_free=PhoneNumberDesc(national_number_pattern='800\\d{6}', possible_number_pattern='\\d{9}', example_number='800123456'),
premium_rate=PhoneNumberDesc(national_number_pattern='90[016]\\d{6}', possible_number_pattern='\\d{9}', example_number='900123456'),
shared_cost=PhoneNumberDesc(national_number_pattern='84[0248]\\d{6}', possible_number_pattern='\\d{9}', example_number='840123456'),
personal_number=PhoneNumberDesc(national_number_pattern='878\\d{6}', possible_number_pattern='\\d{9}', example_number='878123456'),
voip=PhoneNumberDesc(national_number_pattern='NA', possible_number_pattern='NA'),
pager=PhoneNumberDesc(national_number_pattern='74[0248]\\d{6}', possible_number_pattern='\\d{9}', example_number='740123456'),
uan=PhoneNumberDesc(national_number_pattern='5[18]\\d{7}', possible_number_pattern='\\d{9}', example_number='581234567'),
voicemail=PhoneNumberDesc(national_number_pattern='860\\d{9}', possible_number_pattern='\\d{12}', example_number='860123456789'),
no_international_dialling=PhoneNumberDesc(national_number_pattern='NA', possible_number_pattern='NA'),
national_prefix='0',
national_prefix_for_parsing='0',
number_format=[NumberFormat(pattern='([2-9]\\d)(\\d{3})(\\d{2})(\\d{2})', format='\\1 \\2 \\3 \\4', leading_digits_pattern=['[2-7]|[89]1'], national_prefix_formatting_rule='0\\1'),
NumberFormat(pattern='([89]\\d{2})(\\d{3})(\\d{3})', format='\\1 \\2 \\3', leading_digits_pattern=['8[047]|90'], national_prefix_formatting_rule='0\\1'),
NumberFormat(pattern='(\\d{3})(\\d{2})(\\d{3})(\\d{2})(\\d{2})', format='\\1 \\2 \\3 \\4 \\5', leading_digits_pattern=['860'], national_prefix_formatting_rule='0\\1')],
mobile_number_portable_region=True)
|
"""
doxybridge.renderer.rst.doxygen.base
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:copyright: Copyright (c) 2011 The Department of Arts and Culture, The Government
of the Republic of South Africa.
:copyright: Copyright (c) 2009, Michael Jones
:license: MIT, see LICENSE for details.
"""
class Renderer(object):
def __init__(self,
project_info,
data_object,
renderer_factory,
node_factory,
state,
document,
directive
):
self.project_info = project_info
self.data_object = data_object
self.renderer_factory = renderer_factory
self.node_factory = node_factory
self.state = state
self.document = document
self.directive = directive
|
"""
Test cases to validate dashboard base image configurations
"""
import subprocess
import pytest
import requests
import testinfra
DOCKER_IMAGE_NAME = 'dashboard:latest'
# scope='session' uses the same container for all the tests;
# scope='function' uses a new container per test function.
@pytest.fixture(scope='session')
def host():
"""
Pytest fixture to manage the lifecycle of a container of interest using the specified DOCKER_IMAGE_NAME
:return: testinfra connection to the container
"""
# run a container
docker_id = subprocess.check_output(
[
'docker',
'run',
'-p'
'3527:3527',
'-d',
DOCKER_IMAGE_NAME,
]
).decode().strip()
# return a testinfra connection to the container
yield testinfra.get_host("docker://" + docker_id)
# at the end of the test suite, destroy the container
subprocess.check_call(['docker', 'rm', '-f', docker_id])
# Scenario: Check if all the python packages are installed
def test_pip_packages(host):
"""
Test case to check if all the python packages are installed as per requirements.txt
:param host: pytest.fixture to access the docker container of our interest
:return:
"""
expected_python_pkg = ['requests', 'Flask']
installed_python_pkg = (host.pip_package.get_packages()).keys()
print installed_python_pkg
for expected_pkg in expected_python_pkg:
assert expected_pkg in installed_python_pkg
# Scenario: Check if the application files are copied to the docker
def test_application_artifacts(host):
"""
Test case to verify if all the application files are copied to the image
:param host: pytest.fixture to access the docker container of our interest
:return:
"""
# TEST: CHECK IF THE WORKDIR EXISTS
application_directory = host.file('/dashboard/')
assert application_directory.exists
# TEST: CODE ARTIFACTS ARE COPIED TO THE WORKDIR
work_dir_contents = host.run('ls -R /dashboard/').stdout
print work_dir_contents
expected_work_dir_contents = ['static', 'templates', 'requirements.txt', 'app.py', 'Chart.js', 'chart.html']
for artifact in expected_work_dir_contents:
assert artifact in work_dir_contents
# Scenario: Check if PYTHONPATH is set in the container
def test_env_var(host):
"""
Test case to verify if the PYTHONPATH env variable is set
:param host: pytest.fixture to access the docker container of our interest
:return:
"""
python_path = host.run('echo $PYTHONPATH').stdout
assert str(python_path).strip() == '/dashboard'
# Scenario: Check if Dashboard is Running
def test_webapp(host):
"""
Test case to verify if the PYTHONPATH env variable is set
:param host: pytest.fixture to access the docker container of our interest
:return:
"""
# TEST: Check if port 3527 is open and up for listening any requests
# host.run('yum install -y net-tools')
# netstat_op = host.run('netstat -anlp | grep 3527').stdout
# assert 'tcp 0 0 0.0.0.0:3527 0.0.0.0:* LISTEN' in netstat_op
# TEST: Check if the application is running - Verify the page title & request is successful
curl_op = host.run('curl -vs http://0.0.0.0:3527').stdout
assert '<title>Sentiment Analysis Summary</title>' in curl_op
response = requests.get('http://0.0.0.0:3527')
assert response.status_code == 200
|
"""
Entradas
Horas trabajadas-->int-->H_T
Precio de la hora-->int-->P_h
Salidas
Salario neto-->int-->S_n
"""
#Entradas
P_h=float(input("Precio por cada hora trabajada: "))
H_t=int(input("Numero de horas trabajadas: "))
#Caja negra
S=float((H_t*P_h))
S_n=int((S-S*0.20))
#Salida
print("El salario neto es de:",S_n)
|
#! /usr/bin/env python
# Configuration for the project.
class AttrDict(dict):
def __init__(self, *args, **kwargs):
super(AttrDict, self).__init__(*args, **kwargs)
self.__dict__ = self
config = AttrDict()
config.print_info = False
config.data_root = '../dataSets/'
config.val_ratio = 0.2
# Configuration for training feature.
config.add_weather = True
config.weather_attr = ['']
config.add_history = True
config.window_num = 6
# Parameters for xgb model.
xgb_params = {}
xgb_params["objective"] = "reg:linear"
xgb_params["eta"] = 0.02
xgb_params["min_child_weight"] = 8
xgb_params["subsample"] = 0.9
xgb_params["colsample_bytree"] = 0.8
xgb_params["max_depth"] = 8
xgb_params["seed"] = 1
xgb_params["silent"] = 1
use_pretrained = False
config.xgb_model = '../model/model_xgb_reg:linear_0.8_8_history6_iter5000.bin'
if use_pretrained:
xgb_params['pretrained_model'] = config.xgb_model
config.xgb_params = xgb_params
config.xgb_num_round = 1500
|
from api.decorators import api_view, request_data, setting_required
from api.permissions import IsAdminOrReadOnly
from api.mon.vm.api_views import VmMonitoringView, VmSLAView, VmHistoryView
__all__ = ('mon_vm_define', 'mon_vm_sla', 'mon_vm_history')
#: vm_status: GET:
#: vm_status: PUT: notcreated, running, stopped, stopping
@api_view(('GET', 'PUT'))
@request_data(permissions=(IsAdminOrReadOnly,)) # get_vm() = IsVmOwner
def mon_vm_define(request, hostname_or_uuid, data=None):
"""
Show (:http:get:`GET </mon/vm/(hostname_or_uuid)/monitoring>`) or
update (:http:put:`PUT </mon/vm/(hostname_or_uuid)/monitoring>`)
a VM's monitoring interface definition.
.. note:: A VM's monitoring interface is automatically created for \
every :py:func:`monitored VM <api.vm.define.views.vm_define>`.
.. http:get:: /mon/vm/(hostname_or_uuid)/monitoring
:DC-bound?:
* |dc-yes|
:Permissions:
* |VmOwner|
:Asynchronous?:
* |async-no|
:arg hostname_or_uuid: **required** - Server hostname or uuid
:type hostname_or_uuid: string
:arg data.active: Display currently active VM monitoring definition in the monitoring system (default: false)
:type data.active: boolean
:status 200: SUCCESS
:status 403: Forbidden
:status 404: VM not found
.. http:put:: /mon/vm/(hostname_or_uuid)/monitoring
.. note:: Please use :http:put:`/vm/(hostname_or_uuid)` to update the monitoring interface definition of \
an already deployed VM after changing any of the VM's monitoring interface attributes.
.. note:: By setting the value of ``port``, ``dns``, ``useip`` and/or ``proxy`` parameter(s) to ``null``, the \
parameter(s) will be set to a default value.
:DC-bound?:
* |dc-yes|
:Permissions:
* |Admin|
:Asynchronous?:
* |async-no|
:arg hostname_or_uuid: **required** - Server hostname or uuid
:type hostname_or_uuid: string
:arg data.ip: IPv4 address used for monitoring (automatically updated \
by :py:func:`~api.vm.define.views.vm_define_nic`)
:type data.ip: string
:arg data.port: Port number used for monitoring (default: 10050)
:type data.port: integer
:arg data.dns: Server hostname (FQDN) used for monitoring connections when ``useip`` is false \
or ``ip`` is not set (default: ``hostname``)
:type data.dns: string
:arg data.useip: Whether the monitoring connection should be made via the IP address (default: true)
:type data.useip: boolean
:arg data.proxy: Name or ID of the monitoring proxy that is used to monitor the host (default: '' => disabled)
:type data.proxy: string
:arg data.hostgroups: Custom VM monitoring hostgroups; same as ``monitoring_hostgroups`` in \
:py:func:`~api.vm.define.views.vm_define` (default: [])
:type data.hostgroups: array
:arg data.templates: Custom VM monitoring templates; same as ``monitoring_templates`` in \
:py:func:`~api.vm.define.views.vm_define` (default: [])
:type data.templates: array
:status 200: SUCCESS
:status 400: FAILURE
:status 403: Forbidden
:status 404: VM not found
:status 423: VM is not operational / VM is locked or has slave VMs
"""
return VmMonitoringView(request, hostname_or_uuid, data).response()
#: vm_status: GET: Vm.STATUS_OPERATIONAL
# noinspection PyUnusedLocal
@api_view(('GET',))
@request_data() # get_vm() = IsVmOwner
@setting_required('MON_ZABBIX_ENABLED', default_dc=True)
@setting_required('MON_ZABBIX_VM_SLA')
def mon_vm_sla(request, hostname_or_uuid, yyyymm, data=None):
"""
Get (:http:get:`GET </mon/vm/(hostname_or_uuid)/sla/(yyyymm)>`) SLA for
requested server and month.
.. http:get:: /mon/vm/(hostname_or_uuid)/sla/(yyyymm)
:DC-bound?:
* |dc-yes|
:Permissions:
* |VmOwner|
:Asynchronous?:
* |async-yes| - SLA value is retrieved from monitoring server
* |async-no| - SLA value is cached
:arg hostname_or_uuid: **required** - Server hostname or uuid
:type hostname_or_uuid: string
:arg yyyymm: **required** - Time period in YYYYMM format
:type yyyymm: integer
:status 200: SUCCESS
:status 201: PENDING
:status 400: FAILURE
:status 403: Forbidden
:status 404: VM not found
:status 412: Invalid yyyymm
:status 417: Monitoring data not available
:status 423: VM is not operational
"""
return VmSLAView(request, hostname_or_uuid, yyyymm, data).get()
#: vm_status: GET: Vm.STATUS_OPERATIONAL
@api_view(('GET',))
@request_data() # get_vm() = IsVmOwner
@setting_required('MON_ZABBIX_ENABLED', default_dc=True)
def mon_vm_history(request, hostname_or_uuid, graph, data=None):
"""
Get (:http:get:`GET </mon/vm/(hostname_or_uuid)/history/(graph)>`) monitoring history
for requested server and graph name.
.. http:get:: /mon/vm/(hostname_or_uuid)/history/(graph)
:DC-bound?:
* |dc-yes|
:Permissions:
* |VmOwner|
:Asynchronous?:
* |async-yes|
:arg hostname_or_uuid: **required** - Server hostname or uuid
:type hostname_or_uuid: string
:type graph: string
:arg graph: **required** - Graph identificator. One of:
| *cpu-usage* - Total compute node CPU consumed by the VM.
| *cpu-waittime* - Total amount of time spent in CPU run queue by the VM.
| *cpu-load* - 1-minute load average.
| *mem-usage* - Total compute node physical memory consumed by the VM.
| *swap-usage* - Total compute node swap space used by the VM.
| *net-bandwidth* - The amount of received and sent network traffic through \
the virtual network interface. *requires data.nic_id*
| *net-packets* - The amount of received and sent packets through the virtual \
network interface. *requires data.nic_id*
| *disk-throughput* (KVM only) - The amount of written and read data on \
the virtual hard drive. *requires data.disk_id*
| *disk-io* (KVM only) - The amount of write and read I/O operations performed on \
the virtual hard drive. *requires data.disk_id*
| *fs-throughput* (SunOS and Linux Zone only) - The amount of written and read data \
on the virtual hard drive. *requires data.disk_id*
| *fs-io* (SunOS and Linux Zone only) - The amount of write and read I/O operations performed on \
the virtual hard drive. *requires data.disk_id*
| *vm-disk-logical-throughput* - Aggregated disk throughput on the logical layer \
(with acceleration mechanisms included).
| *vm-disk-logical-io* - Aggregated amount or read and write I/O operations on the logical layer \
(with acceleration mechanisms included).
| *vm-disk-physical-throughput* - Aggregated disk throughput on the physical (disk) layer.
| *vm-disk-physical-io* - Aggregated amount of read and write I/O operations on the physical (disk) layer.
| *vm-disk-io-operations* - Aggregated amount of disk I/O operations by latency on the logical layer \
(with acceleration mechanisms included).
:arg data.since: Return only values that have been received after the given timestamp (default: now - 1 hour)
:type data.since: integer
:arg data.until: Return only values that have been received before the given timestamp (default: now)
:type data.until: integer
:arg data.disk_id: used only with *disk-throughput*, \
*disk-io*, *fs-throughput*, *fs-io* graphs to specify ID of the disk for which graph should be retrieved.
:type data.disk_id: integer
:arg data.nic_id: only used with *net-bandwidth*, *net-packets* \
graphs to specify ID of the NIC for which graph should be retrieved.
:type data.nic_id: integer
:status 200: SUCCESS
:status 201: PENDING
:status 400: FAILURE
:status 403: Forbidden
:status 404: VM not found
:status 412: Invalid graph / Invalid OS type
:status 417: VM monitoring disabled
:status 423: VM is not operational
"""
return VmHistoryView(request, hostname_or_uuid, graph, data).get()
|
# -*- coding: utf-8 -*-
"""
Core
====
Provides a collection of functions that are core to **e13Tools** and are
imported automatically.
"""
# %% IMPORTS
# Built-in imports
from pkg_resources import parse_version
# All declaration
__all__ = ['InputError', 'ShapeError', 'compare_versions']
# %% CLASSES
# Define Error class for wrong inputs
class InputError(Exception):
"""
Generic exception raised for errors in the function input arguments.
General purpose exception class, raised whenever the function input
arguments prevent the correct execution of the function without specifying
the type of error (eg. ValueError, TypeError, etc).
"""
pass
# Define Error class for wrong shapes
class ShapeError(Exception):
"""
Inappropriate argument shape (of correct type).
"""
pass
# %% FUNCTIONS
# Function that compares two versions with each other
def compare_versions(a, b):
"""
Compares provided versions `a` and `b` with each other, and returns *True*
if version `a` is later than or equal to version `b`.
"""
if a:
return(parse_version(a) >= parse_version(b))
else:
return(False)
|
# ============================================================================
# FILE: commands.py
# AUTHOR: Qiming Zhao <chemzqm@gmail.com>
# License: MIT license
# ============================================================================
# pylint: disable=E0401,C0411
import os
import json
from .base import Base
from denite import util
from ..kind.base import Base as BaseKind
from operator import itemgetter
class Source(Base):
def __init__(self, vim):
super().__init__(vim)
self.name = 'commands'
self.vars = {
"config": '~/.vim/command.json'
}
self.kind = Kind(vim)
def on_init(self, context):
context['__config'] = util.expand(self.vars['config'])
def highlight(self):
self.vim.command('highlight link uniteSource__CommandSign Type')
self.vim.command('highlight link uniteSource__CommandTrigger Identifier')
self.vim.command('highlight link uniteSource__CommandDescription Statement')
def define_syntax(self):
self.vim.command(r'syntax match deniteSource__CommandHeader /^.*$/ '
r'containedin=' + self.syntax_name)
self.vim.command(r'syntax match uniteSource__CommandSign /\v^.{2}/ contained '
r'contained containedin=deniteSource__CommandHeader')
self.vim.command(r'syntax match uniteSource__CommandTrigger /\%6c.*\%18c/ contained '
r'contained containedin=deniteSource__CommandHeader')
self.vim.command(r'syntax match uniteSource__CommandDescription /\%19c.*$/ contained '
r'contained containedin=deniteSource__CommandHeader')
def gather_candidates(self, context):
if not os.access(context['__config'], os.R_OK):
return []
candidates = []
with open(context['__config']) as fp:
try:
config = json.loads(fp.read())
for obj in config:
candidates.append({
'word': obj['command'],
'abbr': '▷ %-12s %s' % (obj['command'], obj['description']),
'source__command': obj['command'],
'source__args': obj['args'],
'source__config': context['__config'],
})
except json.JSONDecodeError:
util.error(self.vim, 'Decode error for %s' % context['__config'])
candidates = sorted(candidates, key=itemgetter('source__command'))
return candidates
class Kind(BaseKind):
def __init__(self, vim):
super().__init__(vim)
self.default_action = 'execute'
self.name = 'commands'
self.persist_actions = []
def action_execute(self, context):
target = context['targets'][0]
command = target['source__command']
args = target['source__args']
if args:
util.clear_cmdline(self.vim)
self.vim.call('denite#extra#feedkeys', ':%s' % command)
else:
self.vim.call('denite#util#execute_command', command, False)
def action_edit(self, context):
target = context['targets'][0]
command = target['source__command']
config = target['source__config']
self.vim.command('silent edit +/"%s %s' % (command, config))
self.vim.command('normal! zz')
cursor = self.vim.call('getcurpos')
cursor[2] = 15
self.vim.call('setpos', '.', cursor)
|
import os
import re
from collections import defaultdict, namedtuple
from junit_xml import TestSuite, TestCase
module_dir = os.path.abspath(os.path.dirname(__file__))
Suite = namedtuple('Suite', 'name tests msgs fails times lines')
class GUnit:
def __init__(self, build_dir='.', executable='gdb'):
self.build_dir = os.path.abspath(os.path.join(build_dir, 'gunit'))
# Create output directory
os.makedirs(self.build_dir, exist_ok=True)
self.logging_file = os.path.join(self.build_dir, 'gdblog')
self.report_file = os.path.join(self.build_dir, 'report.xml')
self.log = f'-ex "set logging file {self.logging_file}"'
self.remote = ''
self.load = ''
self.gdb = executable
# Runs automatic testing on given binary/test suite
def test(self, path=None):
# Get scripts
setup_path = os.path.join(module_dir, 'setup.gdb')
execute_path = os.path.join(module_dir, 'execute.gdb')
# Activate GDB and connect to server
command = f'{self.gdb} -batch-silent {self.log} -x {setup_path} {self.remote} '
if path is not None:
command += f'-ex "file {path}" {self.load} '
command += f'-x {execute_path}'
os.system(command)
def junit(self):
suites = []
with open(self.logging_file, "r") as gdblog:
lines = gdblog.readlines()
suites_raw = []
for line in lines:
if line[0] == '$':
suites_raw += [line]
data = [None] * 6
data_i = 0
for sr in suites_raw:
data[data_i] = sr.split(" = ")[1][0:-1]
if data_i != 0:
data[data_i] = data[data_i][1:-1]
if data_i == 5:
suite_name = data[0][1:-1]
suite_tests = re.sub(r'("| *)', '', data[1]).split(',')
suite_messages = re.sub(r'"', '', data[2]).split(',')
suite_fails = re.sub(r'("| *)', '', data[3]).split(',')
suite_elapsed = re.sub(r' *', '', data[4]).split(',')
suite_lines = re.sub(r' *', '', data[5]).split(',')
suites += [Suite(suite_name, suite_tests, suite_messages, suite_fails, suite_elapsed, suite_lines)]
data_i = 0
data_i += 1
test_suites = []
for suitei, suite in enumerate(suites):
suite_name = suite.name
# Get the number of test cases in the suite
testn = len(suite.tests)
# Create a test case for each test
cases = []
for testi in range(testn):
name = suite.tests[testi]
message = suite.msgs[testi]
fail = suite.fails[testi] == 'true'
elapsed = suite.times[testi]
fline = suite.lines[testi]
tc = TestCase(name, suite_name, float(elapsed), "", "", allow_multiple_subelements=True)
if fail:
tc.add_failure_info(f"Affirmation failed at {suite_name}:{fline} with the following message: {message}")
cases += [tc]
test_suites += [TestSuite(suite_name, cases)]
with open(self.report_file, "w") as outxml:
outxml.write(TestSuite.to_xml_string(test_suites))
#### STATIC FUNCTIONS ####
# Generate GUnit.h
def get_header(build_dir='.'):
source_path = os.path.join(module_dir, 'GUnit.h')
destination_path = os.path.abspath(os.path.join(build_dir, 'GUnit.h'))
with open(source_path, "r") as src, open(destination_path, "w") as dst:
dst.write(src.read())
# Configure GUnit for OpenOCD server
def openOCD(gdb_uri, build_dir='.', executable='gdb'):
target = GUnit(build_dir, executable)
target.remote = f'-ex "target remote {gdb_uri}"'
target.load = '-ex load -ex "monitor reset init"'
return target
# Configure GUnit for regular GDB server
def gdbserver(gdb_uri, build_dir='.'):
target = GUnit(build_dir)
target.remote = f'-ex "target remote {gdb_uri}"'
return target
|
#!/usr/bin/env python3
#
# Copyright (c) 2021 Xiaomi Corp. (authors: Fangjun Kuang, Daniel Povey)
#
# See ../../../LICENSE for clarification regarding multiple authors
# To run this single test, use
#
# ctest --verbose -R expand_ragged_attributes_tests_py
import unittest
import k2
import torch
import _k2
class TestExpandArcs(unittest.TestCase):
def test(self):
devices = [torch.device('cpu')]
if torch.cuda.is_available() and k2.with_cuda:
devices.append(torch.device('cuda', 0))
for device in devices:
for need_map in [True, False]:
s = '''
0 1 2 10
0 1 1 20
1 2 -1 30
2
'''
src = k2.Fsa.from_str(s).to(device).requires_grad_(True)
src.float_attr = torch.tensor([0.1, 0.2, 0.3],
dtype=torch.float32,
requires_grad=True,
device=device)
src.int_attr = torch.tensor([1, 2, 3],
dtype=torch.int32,
device=device)
src.ragged_attr = k2.RaggedInt('[[1 2 3] [5 6] []]').to(device)
src.attr1 = 'src'
src.attr2 = 'fsa'
if need_map:
dest, arc_map = k2.expand_ragged_attributes(
src, ret_arc_map=True)
else:
dest = k2.expand_ragged_attributes(src)
assert torch.allclose(
dest.float_attr,
torch.tensor([0.1, 0.2, 0.0, 0.0, 0.0, 0.3],
dtype=torch.float32,
device=device))
assert torch.all(
torch.eq(
dest.scores,
torch.tensor([10, 20, 0, 0, 0, 30],
dtype=torch.float32,
device=device)))
assert torch.all(
torch.eq(
dest.int_attr,
torch.tensor([1, 2, 0, 0, 0, 3],
dtype=torch.int32,
device=device)))
assert torch.all(
torch.eq(
dest.ragged_attr,
torch.tensor([1, 5, 2, 3, 6, -1],
dtype=torch.float32,
device=device)))
# non-tensor attributes...
assert dest.attr1 == src.attr1
assert dest.attr2 == src.attr2
# now for autograd
scale = torch.tensor([10, 20, 10, 10, 10, 30], device=device)
(dest.float_attr * scale).sum().backward()
(dest.scores * scale).sum().backward()
expected_grad = torch.tensor([10, 20, 30],
dtype=torch.float32,
device=device)
assert torch.all(torch.eq(src.float_attr.grad, expected_grad))
assert torch.all(torch.eq(src.scores.grad, expected_grad))
def test_final(self):
devices = [torch.device('cpu')]
if torch.cuda.is_available() and k2.with_cuda:
devices.append(torch.device('cuda', 0))
for device in devices:
for need_map in [True, False]:
s = '''
0 1 2 10
0 1 1 20
1 2 -1 30
2
'''
src = k2.Fsa.from_str(s).to(device).requires_grad_(True)
src.float_attr = torch.tensor([0.1, 0.2, 0.3],
dtype=torch.float32,
requires_grad=True,
device=device)
src.int_attr = torch.tensor([1, 2, 3],
dtype=torch.int32,
device=device)
src.ragged_attr = k2.RaggedInt('[[1 2 3] [5 6] [1]]').to(
device)
src.attr1 = 'src'
src.attr2 = 'fsa'
if need_map:
dest, arc_map = k2.expand_ragged_attributes(
src, ret_arc_map=True)
else:
dest = k2.expand_ragged_attributes(src)
assert torch.allclose(
dest.float_attr,
torch.tensor([0.1, 0.2, 0.0, 0.0, 0.0, 0.3, 0.0],
dtype=torch.float32,
device=device))
assert torch.all(
torch.eq(
dest.scores,
torch.tensor([10, 20, 0, 0, 0, 30, 0],
dtype=torch.float32,
device=device)))
assert torch.all(
torch.eq(
dest.int_attr,
torch.tensor([1, 2, 0, 0, 0, 3, 0],
dtype=torch.int32,
device=device)))
_k2.fix_final_labels(dest.arcs, dest.int_attr)
assert torch.all(
torch.eq(
dest.int_attr,
torch.tensor([1, 2, 0, 0, 0, 3, -1],
dtype=torch.int32,
device=device)))
assert torch.all(
torch.eq(
dest.ragged_attr,
torch.tensor([1, 5, 2, 3, 6, 1, -1],
dtype=torch.float32,
device=device)))
# non-tensor attributes...
assert dest.attr1 == src.attr1
assert dest.attr2 == src.attr2
# now for autograd
scale = torch.tensor([10, 20, 10, 10, 10, 30, 10],
device=device)
(dest.float_attr * scale).sum().backward()
(dest.scores * scale).sum().backward()
expected_grad = torch.tensor([10, 20, 30],
dtype=torch.float32,
device=device)
assert torch.all(torch.eq(src.float_attr.grad, expected_grad))
assert torch.all(torch.eq(src.scores.grad, expected_grad))
if __name__ == '__main__':
unittest.main()
|
import os
import sys
import unittest
import warnings
from gym_minigrid import *
# Make sure the path of the framework is included in the import path
sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), '../src/')))
# Framework imports
from mdp import GymMinigrid, GymMinigridBuilder, GymMinigridCustomLevelBuilder
class TestGymMinigrid(unittest.TestCase):
def setUp(self):
# At time of development, some deprecation warnings occurred in the gym_minigrid package.
# They are not useful for unittesting, so we suppress them here for all unttests in this class.
warnings.simplefilter('ignore', category=DeprecationWarning)
def test_available_actions(self):
# Available actions are the same for any state.
mdp = GymMinigridBuilder('MiniGrid-Empty-5x5-v0').build_mdp()
self.assertSetEqual({'left', 'right', 'forward', 'pickup', 'drop', 'toggle', 'done'},
mdp.available_actions)
def test_state(self):
"""
Simple environment:
#####
#A #
# #
# G#
#####
"""
mdp = GymMinigridBuilder('MiniGrid-Empty-5x5-v0').build_mdp()
should_be_state = { f'obj(wall(grey),({x},0))' for x in range(0, 5) } \
| { f'obj(wall(grey),({x},4))' for x in range(0, 5) } \
| { f'obj(wall(grey),(0,{y}))' for y in range(0, 5) } \
| { f'obj(wall(grey),(4,{y}))' for y in range(0, 5) } \
| { 'obj(agent(east),(1,1))',
'obj(goal,(3,3))'
}
self.assertSetEqual(should_be_state, mdp.state)
self.assertSetEqual(should_be_state, mdp.ground_state)
# TODO: Test if state also worls for keys, doors, lava, floors, etc...
def test_state_transition(self):
"""
Simple environment:
##### ##### ##### ##### ##### ##### #####
#> # # > # # ># # ># # v# # # # #
# # # # # # # # # # # v# # #
# G# # G# # G# # G# # G# # G# # v#
##### ##### ##### ##### ##### ##### #####
forward forward forward right forward forward
"""
static_state = { f'obj(wall(grey),({x},0))' for x in range(0, 5) } \
| { f'obj(wall(grey),({x},4))' for x in range(0, 5) } \
| { f'obj(wall(grey),(0,{y}))' for y in range(0, 5) } \
| { f'obj(wall(grey),(4,{y}))' for y in range(0, 5) } \
| { 'obj(goal,(3,3))' }
mdp = GymMinigridBuilder('MiniGrid-Empty-5x5-v0').build_mdp()
next_state, next_reward = mdp.transition('forward')
self.assertEqual(static_state | {'obj(agent(east),(2,1))'},
next_state)
self.assertEqual(0, next_reward)
self.assertEqual(static_state | {'obj(agent(east),(2,1))'},
mdp.state)
next_state, next_reward = mdp.transition('forward')
self.assertEqual(static_state | {'obj(agent(east),(3,1))'},
next_state)
self.assertEqual(0, next_reward)
self.assertEqual(static_state | {'obj(agent(east),(3,1))'},
mdp.state)
next_state, next_reward = mdp.transition('forward')
self.assertEqual(static_state | {'obj(agent(east),(3,1))'},
next_state)
self.assertEqual(0, next_reward)
self.assertEqual(static_state | {'obj(agent(east),(3,1))'},
mdp.state)
next_state, next_reward = mdp.transition('right')
self.assertEqual(static_state | {'obj(agent(south),(3,1))'},
next_state)
self.assertEqual(0, next_reward)
self.assertEqual(static_state | {'obj(agent(south),(3,1))'},
mdp.state)
next_state, next_reward = mdp.transition('forward')
self.assertEqual(static_state | {'obj(agent(south),(3,2))'},
next_state)
self.assertEqual(0, next_reward)
self.assertEqual(static_state | {'obj(agent(south),(3,2))'},
mdp.state)
next_state, next_reward = mdp.transition('forward')
self.assertEqual((static_state | {'obj(agent(south),(3,3))', 'terminal'}) - {'obj(goal,(3,3))'},
next_state)
step_count = 6
max_steps = 100.0
self.assertEqual(1 - 0.9 * (step_count / max_steps), next_reward)
self.assertEqual((static_state | {'obj(agent(south),(3,3))', 'terminal'}) - {'obj(goal,(3,3))'},
mdp.state)
# Check if trajectory is correct: S0, A0, R1, S1, A1, R2, S2 ...
self.assertEqual(static_state | {'obj(agent(east),(1,1))'}, mdp.state_history[0]) # S0
self.assertEqual('forward', mdp.action_history[0]) # A0
self.assertEqual(0, mdp.reward_history[1]) # R1
self.assertEqual(static_state | {'obj(agent(east),(2,1))'}, mdp.state_history[1]) # S1
self.assertEqual('forward', mdp.action_history[1]) # A1
self.assertEqual(0, mdp.reward_history[2]) # R2
self.assertEqual(static_state | {'obj(agent(east),(3,1))'}, mdp.state_history[2]) # S2
self.assertEqual('forward', mdp.action_history[2]) # A2
self.assertEqual(0, mdp.reward_history[3]) # R3
self.assertEqual(static_state | {'obj(agent(east),(3,1))'}, mdp.state_history[3]) # S3
self.assertEqual('right', mdp.action_history[3]) # A3
self.assertEqual(0, mdp.reward_history[4]) # R4
self.assertEqual(static_state | {'obj(agent(south),(3,1))'}, mdp.state_history[4]) # S4
self.assertEqual('forward', mdp.action_history[4]) # A4
self.assertEqual(0, mdp.reward_history[5]) # R5
self.assertEqual(static_state | {'obj(agent(south),(3,2))'}, mdp.state_history[5]) # S5
self.assertEqual('forward', mdp.action_history[5]) # A5
self.assertEqual(1 - 0.9 * (step_count / max_steps), mdp.reward_history[6]) # R6
def test_returns(self):
mdp = GymMinigridBuilder('MiniGrid-Empty-5x5-v0').build_mdp()
mdp.transition('forward')
mdp.transition('forward')
mdp.transition('right')
mdp.transition('forward')
mdp.transition('forward')
step_count = 5
max_steps = 100.0
final_reward = 1 - 0.9 * (step_count / max_steps)
# G[t] = R[t+1] + R[t+2] + R[t+3] + ...
self.assertEqual(mdp.return_history[0], 0 + 0 + 0 + 0 + final_reward)
self.assertEqual(mdp.return_history[1], 0 + 0 + 0 + final_reward)
self.assertEqual(mdp.return_history[2], 0 + 0 + final_reward)
self.assertEqual(mdp.return_history[3], 0 + final_reward)
self.assertEqual(mdp.return_history[4], final_reward)
self.assertEqual(mdp.return_history[5], 0) # Return is zero in terminal state
def test_adjusted_reward_system(self):
# In the original MDP, there is no discounting (gamma = 1) and the reward in the last episode
# is proportional to the time it took to get to the goal.
# i.e. the ground MDP is not markov!
# To remedy this, we experimented with changing the return.
# We intruduced a discount factor of 0.9 and set the reward when reaching to goal to be 1.
# This behavior can be turned on via a parameter in the minigrid builder!
# By default, the original reward system is used.
mdp = GymMinigridBuilder('MiniGrid-Empty-5x5-v0', use_alternative_reward_system=True).build_mdp()
_, next_reward = mdp.transition('forward')
self.assertEqual(0, next_reward)
_, next_reward = mdp.transition('forward')
self.assertEqual(0, next_reward)
_, next_reward = mdp.transition('right')
self.assertEqual(0, next_reward)
_, next_reward = mdp.transition('forward')
self.assertEqual(0, next_reward)
_, next_reward = mdp.transition('forward')
self.assertEqual(1, next_reward)
# G[t] = R[t+1] + R[t+2] + R[t+3] + ...
self.assertEqual(mdp.return_history[0], 0 + 0 + 0 + 0 + 1*0.9*0.9*0.9*0.9)
self.assertEqual(mdp.return_history[1], 0 + 0 + 0 + 1*0.9*0.9*0.9)
self.assertEqual(mdp.return_history[2], 0 + 0 + 1*0.9*0.9)
self.assertEqual(mdp.return_history[3], 0 + 1*0.9)
self.assertEqual(mdp.return_history[4], 1)
self.assertEqual(mdp.return_history[5], 0) # Return is zero in terminal state
def test_custom_level_creation(self):
should_be_state = { f'obj(wall(grey),({x},0))' for x in range(0, 5) } \
| { f'obj(wall(grey),({x},4))' for x in range(0, 5) } \
| { f'obj(wall(grey),(0,{y}))' for y in range(0, 5) } \
| { f'obj(wall(grey),(4,{y}))' for y in range(0, 5) } \
| { 'obj(agent(south),(2,2))',
'obj(goal,(3,3))'
}
mdp = GymMinigridCustomLevelBuilder('room_5x5').build_mdp()
self.assertSetEqual(should_be_state, mdp.state)
def test_all_objects(self):
world = """
rW rF gK pA
yB < rDo bDc
gDl G ~ _
"""
should_be_state = {
'obj(wall(red),(0,0))',
'obj(floor(red),(1,0))',
'obj(key(green),(2,0))',
'obj(ball(purple),(3,0))',
'obj(box(yellow),(0,1))',
'obj(agent(west),(1,1))',
'obj(door(red,open),(2,1))',
'obj(door(blue,closed),(3,1))',
'obj(door(green,locked),(0,2))',
'obj(goal,(1,2))',
'obj(lava,(2,2))',
}
mdp = GymMinigridCustomLevelBuilder(ascii_encoding=world).build_mdp()
self.assertSetEqual(should_be_state, mdp.state)
def test_tiny_custom_world(self):
# World size nees to be at least 3x3.
# If too small, automatically pad the world with empty spaces.
world = """
> G
"""
should_be_state = {
'obj(agent(east),(0,0))',
'obj(goal,(1,0))',
}
mdp = GymMinigridCustomLevelBuilder(ascii_encoding=world).build_mdp()
self.assertSetEqual(should_be_state, mdp.state)
def test_final_state_after_reaching_goal(self):
# After reaching the goal, the MDP should be finished (no more actions available)
world = """
> G
"""
mdp = GymMinigridCustomLevelBuilder(ascii_encoding=world).build_mdp()
mdp.transition('forward')
should_be_state = {
'obj(agent(east),(1,0))',
'terminal'
}
self.assertSetEqual(should_be_state, mdp.state)
self.assertSetEqual(set(), mdp.available_actions)
self.assertTrue(mdp.done)
def test_final_state_after_touching_danger(self):
# After falling into lava, the MDP should be finished (no more actions available)
world = """
> ~
"""
mdp = GymMinigridCustomLevelBuilder(ascii_encoding=world).build_mdp()
mdp.transition('forward')
should_be_state = {
'obj(agent(east),(1,0))',
'terminal'
}
self.assertSetEqual(should_be_state, mdp.state)
self.assertSetEqual(set(), mdp.available_actions)
self.assertTrue(mdp.done)
def test_internal_step_limit(self):
mdp = GymMinigridBuilder('MiniGrid-Empty-5x5-v0').build_mdp()
# Calculation from minigrid:
step_limit = 4*5*5
for s in range(step_limit):
self.assertSetEqual({'left', 'right', 'forward', 'pickup', 'drop', 'toggle', 'done'},
mdp.available_actions)
mdp.transition('right')
# After this amount of steps, the environment should be done.
self.assertTrue(mdp.done)
self.assertSetEqual(set(), mdp.available_actions)
|
import streamlit as st
import random
from cursor import loader
from cursor import renderer
from cursor import path
from cursor import filter
from cursor import data
from cursor import device
@st.cache(allow_output_mutation=True)
def inputs1():
return [0, 1, 2, 3, 4]
@st.cache(allow_output_mutation=True)
def offsets1():
return [0, 100, 200, 300, 400]
@st.cache(
hash_funcs={
loader.Loader: lambda _: None,
path.PathCollection: lambda _: None,
path.Path: lambda _: None,
}
)
def load_data():
p = data.DataDirHandler().recordings()
ll = loader.Loader(directory=p, limit_files=None)
return ll.all_paths()
@st.cache(
hash_funcs={
loader.Loader: lambda _: None,
path.PathCollection: lambda _: None,
path.Path: lambda _: None,
filter.EntropyMinFilter: lambda _: None,
}
)
def apply_filter(all_paths, _min, _max):
entropy_min_filter = filter.EntropyMinFilter(_min, _min)
entropy_max_filter = filter.EntropyMaxFilter(_max, _max)
r = all_paths.filtered(entropy_min_filter)
return r.filtered(entropy_max_filter)
def composition57(pc):
folder = data.DataDirHandler().jpg("composition57")
jpeg_renderer = renderer.JpegRenderer(folder)
xspacing = 1
coll = path.PathCollection()
offsets = offsets1()
line_count = st.sidebar.number_input("Line count", 1, 200, 100)
counter = 0
for p in pc:
offsets[counter] = st.sidebar.number_input(
f"offset {p.hash}", 0, 3000, offsets[counter]
)
for i in range(line_count):
xfrom = xspacing * i + offsets[counter]
yfrom = 0
xto = xspacing * i + offsets[counter]
yto = 200
morphed = p.morph((xfrom, yfrom), (xto, yto))
coll.add(morphed)
if counter < 4:
offsets[counter + 1] = offsets[counter] + line_count
counter += 1
coll.fit(device.Paper.sizes[device.PaperSize.LANDSCAPE_A1], padding_mm=100)
filename = f"composition57_{pc.hash()}"
jpeg_renderer.render(coll, scale=1.0, frame=True)
st.write(f"Image size: {jpeg_renderer.img.size}")
if st.sidebar.checkbox("render bb"):
jpeg_renderer.render_bb(coll.bb())
st.image(
jpeg_renderer.img, caption=f"Composition #57 {pc.hash()}", use_column_width=True
)
if st.sidebar.button("save"):
device.SimpleExportWrapper().ex(
coll,
device.PlotterType.DIY_PLOTTER,
device.PaperSize.LANDSCAPE_A1,
90,
"composition57",
pc.hash(),
)
device.SimpleExportWrapper().ex(
coll,
device.PlotterType.ROLAND_DPX3300,
device.PaperSize.LANDSCAPE_A1,
90,
"composition57",
pc.hash(),
)
st.write(f"Saving {filename}")
def main():
st.title("Composition #57")
all_paths = load_data()
st.sidebar.markdown("EntropyMinFilter")
min_slider = st.sidebar.slider("min entropy", 0.0, 10.0, 3.5)
max_slider = st.sidebar.slider("max entropy", 0.0, 10.0, 5.0)
all_p = apply_filter(all_paths, min_slider, max_slider)
st.sidebar.text(f"Before filtering: {len(all_paths)}")
st.sidebar.text(f"After filtering: {len(all_p)}")
inputs = inputs1()
if st.sidebar.button("randomize 1"):
inputs[0] = int(random.randint(0, len(all_p) - 1))
if st.sidebar.button("randomize 2"):
inputs[1] = int(random.randint(0, len(all_p) - 1))
if st.sidebar.button("randomize 3"):
inputs[2] = int(random.randint(0, len(all_p) - 1))
if st.sidebar.button("randomize 4"):
inputs[3] = int(random.randint(0, len(all_p) - 1))
if st.sidebar.button("randomize 5"):
inputs[4] = int(random.randint(0, len(all_p) - 1))
i1 = st.sidebar.number_input("i1", 0, len(all_p), inputs[0])
i2 = st.sidebar.number_input("i2", 0, len(all_p), inputs[1])
i3 = st.sidebar.number_input("i3", 0, len(all_p), inputs[2])
i4 = st.sidebar.number_input("i4", 0, len(all_p), inputs[3])
i5 = st.sidebar.number_input("i5", 0, len(all_p), inputs[4])
pc = path.PathCollection()
pc.add(all_p[i1])
pc.add(all_p[i2])
pc.add(all_p[i3])
pc.add(all_p[i4])
pc.add(all_p[i5])
composition57(pc)
if __name__ == "__main__":
main()
|
from dataclasses import dataclass
from typing import Optional, Type, TypeVar
from commanderbot.ext.automod.automod_action import AutomodAction, AutomodActionBase
from commanderbot.ext.automod.automod_event import AutomodEvent
from commanderbot.lib import AllowedMentions, JsonObject
ST = TypeVar("ST")
@dataclass
class ReplyToMessage(AutomodActionBase):
"""
Reply to the message in context.
Attributes
----------
content
The content of the message to send.
allowed_mentions
The types of mentions allowed in the message. Unless otherwise specified, only
"everyone" mentions will be suppressed.
"""
content: str
allowed_mentions: Optional[AllowedMentions] = None
@classmethod
def from_data(cls: Type[ST], data: JsonObject) -> ST:
allowed_mentions = AllowedMentions.from_field_optional(data, "allowed_mentions")
return cls(
description=data.get("description"),
content=data.get("content"),
allowed_mentions=allowed_mentions,
)
async def apply(self, event: AutomodEvent):
if message := event.message:
content = event.format_content(self.content)
allowed_mentions = self.allowed_mentions or AllowedMentions.not_everyone()
await message.reply(
content,
allowed_mentions=allowed_mentions,
)
def create_action(data: JsonObject) -> AutomodAction:
return ReplyToMessage.from_data(data)
|
# Copyright (c) 2019 Leiden University Medical Center
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
from pathlib import Path
import pytest
from wdl_packager.git import get_commit_version, get_file_last_commit_timestamp
from . import TEST_DATA_DIR
TIMESTAMP_FILES = [
(Path(TEST_DATA_DIR, "gatk-variantcalling", "tasks", "gatk.wdl"),
1574753755),
(Path(TEST_DATA_DIR, "gatk-variantcalling", "gatk-variantcalling.wdl"),
1574768480)
]
@pytest.mark.parametrize(["repo_file", "result"], TIMESTAMP_FILES)
def test_get_commit_timestamp(repo_file, result):
assert get_file_last_commit_timestamp(repo_file) == result
def test_get_commit_version():
assert get_commit_version(
Path(TEST_DATA_DIR, "gatk-variantcalling")) == "v1.0.0-1-g43b8475"
|
import codecs
import csv
import datetime
def parsedate(str):
try:
if len(str) > 1:
splitted = str.split("/")
if len(splitted) == 3:
if len(splitted[-1]) == 2:
return datetime.datetime.strptime(str, "%d/%m/%y")
else:
return datetime.datetime.strptime(str, "%d/%m/%Y")
return None
except IndexError:
return None
def import_inmate_file(csvid, is_recovery=False):
import django
django.setup()
from mainapp.models import Person, CsvBulkUpload
upload = CsvBulkUpload.objects.get(id=csvid)
try:
upload.csv_file.open(mode="rb")
new_data = csv.DictReader(codecs.iterdecode(upload.csv_file.file, "utf-8"))
for datum in new_data:
"""
try:
identifier_str = (datum.get("phone", "") + datum.get("name","") + datum.get("age",0)).encode('utf-8')
identifier = md5(identifier_str).hexdigest()
#this will fail. we should deal with the removed unique_identifier
p = Person.objects.get(unique_identifier=identifier)
except ValueError as e:
print("Invalid camp ID. row = "+ str(datum))
except RescueCamp.DoesNotExist as e:
print("Camp does not exist. row = "+ str(datum))
except Person.DoesNotExist:
"""
empty = 0
header = [
"name",
"phone",
"address",
"notes",
"district",
"checkin_date",
"checkout_date",
"gender",
"age",
]
for i in header:
if not datum.get(i, ""):
empty += 1
continue
if datum.get(i, "").strip() == "":
empty += 1
if empty == len(header):
continue
gender = 2
if len(datum.get("gender", "")) > 0:
if datum.get("gender", "")[0] == "m" or datum.get("gender", "")[0] == "M":
gender = 0
elif datum.get("gender", "")[0] == "f" or datum.get("gender", "")[0] == "F":
gender = 1
age = "-1"
if datum.get("age", ""):
if datum.get("age", "").strip() != "":
age = datum.get("age", "").strip()
district = ""
if datum.get("district", ""):
district = district.lower()
Person(
name=datum.get("name", "")[:50],
phone=datum.get("phone", ""),
age=int(float(age)),
gender=gender,
address=datum.get("address", ""),
notes=datum.get("notes", ""),
camped_at=upload.camp,
district=datum.get("district", "").lower(),
status="new",
checkin_date=parsedate(datum.get("checkin_date", None)),
checkout_date=parsedate(datum.get("checkout_date", None)),
).save()
if is_recovery:
csv_name = CsvBulkUpload.objects.get(id=csvid).name
CsvBulkUpload.objects.filter(id=csvid).update(
is_completed=True, failure_reason="", name="rec-" + csv_name[:15]
)
else:
CsvBulkUpload.objects.filter(id=csvid).update(is_completed=True, failure_reason="")
except Exception as e:
CsvBulkUpload.objects.filter(id=csvid).update(failure_reason=(getattr(e, "message", repr(e))))
# For Shell Testing
# exec(open('mainapp/csvimporter.py').read())
|
# -*- coding: utf-8 -*-
import vk_api
from vk_api.keyboard import VkKeyboard, VkKeyboardColor
from vk_api.utils import get_random_id
def main():
""" Пример создания клавиатуры для отправки ботом """
vk_session = vk_api.VkApi(token='bot_api_token')
vk = vk_session.get_api()
keyboard = VkKeyboard(one_time=True)
keyboard.add_button('Белая кнопка', color=VkKeyboardColor.DEFAULT)
keyboard.add_button('Зелёная кнопка', color=VkKeyboardColor.POSITIVE)
keyboard.add_line() # Переход на вторую строку
keyboard.add_button('Красная кнопка', color=VkKeyboardColor.NEGATIVE)
keyboard.add_line()
keyboard.add_button('Синяя кнопка', color=VkKeyboardColor.PRIMARY)
vk.messages.send(
peer_id=123456,
random_id=get_random_id(),
keyboard=keyboard.get_keyboard(),
message='Пример клавиатуры'
)
if __name__ == '__main__':
main()
|
# -----------------------------------------------------------------------------
# Copyright (c) 2009-2016 Nicolas P. Rougier. All rights reserved.
# Distributed under the (new) BSD License.
# -----------------------------------------------------------------------------
import numpy as np
from glumpy import app, gl, glm, gloo
def cube():
vtype = [('a_position', np.float32, 3), ('a_texcoord', np.float32, 2),
('a_normal', np.float32, 3), ('a_color', np.float32, 4)]
itype = np.uint32
# Vertices positions
p = np.array([[1, 1, 1], [-1, 1, 1], [-1, -1, 1], [1, -1, 1],
[1, -1, -1], [1, 1, -1], [-1, 1, -1], [-1, -1, -1]], dtype=float)
# Face Normals
n = np.array([[0, 0, 1], [1, 0, 0], [0, 1, 0],
[-1, 0, 1], [0, -1, 0], [0, 0, -1]])
# Vertice colors
c = np.array([[0, 1, 1, 1], [0, 0, 1, 1], [0, 0, 0, 1], [0, 1, 0, 1],
[1, 1, 0, 1], [1, 1, 1, 1], [1, 0, 1, 1], [1, 0, 0, 1]])
# Texture coords
t = np.array([[0, 0], [0, 1], [1, 1], [1, 0]])
faces_p = [0, 1, 2, 3, 0, 3, 4, 5, 0, 5, 6, 1,
1, 6, 7, 2, 7, 4, 3, 2, 4, 7, 6, 5]
faces_c = [0, 1, 2, 3, 0, 3, 4, 5, 0, 5, 6, 1,
1, 6, 7, 2, 7, 4, 3, 2, 4, 7, 6, 5]
faces_n = [0, 0, 0, 0, 1, 1, 1, 1, 2, 2, 2, 2,
3, 3, 3, 3, 4, 4, 4, 4, 5, 5, 5, 5]
faces_t = [0, 1, 2, 3, 0, 1, 2, 3, 0, 1, 2, 3,
3, 2, 1, 0, 0, 1, 2, 3, 0, 1, 2, 3]
vertices = np.zeros(24, vtype)
vertices['a_position'] = p[faces_p]
vertices['a_normal'] = n[faces_n]
vertices['a_color'] = c[faces_c]
vertices['a_texcoord'] = t[faces_t]
filled = np.resize(
np.array([0, 1, 2, 0, 2, 3], dtype=itype), 6 * (2 * 3))
filled += np.repeat(4 * np.arange(6, dtype=itype), 6)
outline = np.resize(
np.array([0, 1, 1, 2, 2, 3, 3, 0], dtype=itype), 6 * (2 * 4))
outline += np.repeat(4 * np.arange(6, dtype=itype), 8)
vertices = vertices.view(gloo.VertexBuffer)
filled = filled.view(gloo.IndexBuffer)
outline = outline.view(gloo.IndexBuffer)
return vertices, filled, outline
def checkerboard(grid_num=8, grid_size=32):
""" Checkerboard pattern """
row_even = grid_num // 2 * [0, 1]
row_odd = grid_num // 2 * [1, 0]
Z = np.row_stack(grid_num // 2 * (row_even, row_odd)).astype(np.uint8)
return 255 * Z.repeat(grid_size, axis=0).repeat(grid_size, axis=1)
vertex = """
uniform mat4 u_model; // Model matrix
uniform mat4 u_view; // View matrix
uniform mat4 u_projection; // Projection matrix
attribute vec4 a_color; // Vertex color
attribute vec3 a_position; // Vertex position
attribute vec3 a_normal; // Vertex normal
attribute vec2 a_texcoord; // Vertex texture coordinates
varying vec4 v_color; // Interpolated fragment color (out)
varying vec3 v_normal; // Interpolated normal (out)
varying vec3 v_position; // Interpolated position (out)
varying vec2 v_texcoord; // Interpolated fragment texture coordinates (out)
void main()
{
// Assign varying variables
v_color = a_color;
v_normal = a_normal;
v_position = a_position;
v_texcoord = a_texcoord;
// Final position
gl_Position = u_projection * u_view * u_model * vec4(a_position,1.0);
}
"""
fragment = """
uniform mat4 u_model; // Model matrix
uniform mat4 u_view; // View matrix
uniform mat4 u_normal; // Normal matrix
uniform mat4 u_projection; // Projection matrix
uniform vec4 u_color; // Global color
uniform sampler2D u_texture; // Texture
uniform vec3 u_light_position; // Light position
uniform vec3 u_light_intensity; // Light intensity
varying vec4 v_color; // Interpolated fragment color (in)
varying vec3 v_normal; // Interpolated normal (in)
varying vec3 v_position; // Interpolated position (in)
varying vec2 v_texcoord; // Interpolated fragment texture coordinates (in)
void main()
{
// Calculate normal in world coordinates
vec3 normal = normalize(u_normal * vec4(v_normal,1.0)).xyz;
// Calculate the location of this fragment (pixel) in world coordinates
vec3 position = vec3(u_view*u_model * vec4(v_position, 1));
// Calculate the vector from this pixels surface to the light source
vec3 surfaceToLight = u_light_position - position;
// Calculate the cosine of the angle of incidence (brightness)
float brightness = dot(normal, surfaceToLight) /
(length(surfaceToLight) * length(normal));
brightness = max(min(brightness,1.0),0.0);
// Calculate final color of the pixel, based on:
// 1. The angle of incidence: brightness
// 2. The color/intensities of the light: light.intensities
// 3. The texture and texture coord: texture(tex, fragTexCoord)
// Get texture color
vec4 t_color = vec4(vec3(texture2D(u_texture, v_texcoord).r), 1.0);
// Final color
vec4 color = u_color * t_color * mix(v_color, t_color, 0.25);
gl_FragColor = color * brightness * vec4(u_light_intensity, 1);
}
"""
window = app.Window(width=1024, height=1024,
color=(0.30, 0.30, 0.35, 1.00))
@window.event
def on_draw(dt):
global phi, theta, duration
window.clear()
# Filled cube
gl.glDisable(gl.GL_BLEND)
gl.glEnable(gl.GL_DEPTH_TEST)
gl.glEnable(gl.GL_POLYGON_OFFSET_FILL)
cube['u_color'] = 1, 1, 1, 1
cube.draw(gl.GL_TRIANGLES, I)
# Outlined cube
gl.glDisable(gl.GL_POLYGON_OFFSET_FILL)
gl.glEnable(gl.GL_BLEND)
gl.glDepthMask(gl.GL_FALSE)
cube['u_color'] = 0, 0, 0, 1
cube.draw(gl.GL_LINES, O)
gl.glDepthMask(gl.GL_TRUE)
# Rotate cube
theta += 0.5 # degrees
phi += 0.5 # degrees
view = cube['u_view'].reshape(4,4)
model = np.eye(4, dtype=np.float32)
glm.rotate(model, theta, 0, 0, 1)
glm.rotate(model, phi, 0, 1, 0)
cube['u_model'] = model
cube['u_normal'] = np.array(np.matrix(np.dot(view, model)).I.T)
@window.event
def on_resize(width, height):
cube['u_projection'] = glm.perspective(45.0, width / float(height), 2.0, 100.0)
@window.event
def on_init():
gl.glEnable(gl.GL_DEPTH_TEST)
gl.glPolygonOffset(1, 1)
gl.glEnable(gl.GL_LINE_SMOOTH)
V,I,O = cube()
cube = gloo.Program(vertex, fragment)
cube.bind(V)
cube["u_light_position"] = 2,2,2
cube["u_light_intensity"] = 1,1,1
cube['u_texture'] = checkerboard()
cube['u_model'] = np.eye(4, dtype=np.float32)
cube['u_view'] = glm.translation(0, 0, -5)
phi, theta = 40, 30
app.run()
|
"""
Parse to and from a CSV string/file format for isotherms.
The _parser_version variable documents any changes to the format,
and is used to check for any deprecations.
"""
from io import StringIO
import pandas
from pygaps import logger
from pygaps.core.baseisotherm import BaseIsotherm
from pygaps.core.modelisotherm import ModelIsotherm
from pygaps.core.pointisotherm import PointIsotherm
from pygaps.modelling import model_from_dict
from pygaps.utilities.exceptions import ParsingError
from pygaps.utilities.string_utilities import _from_bool
from pygaps.utilities.string_utilities import _from_list
from pygaps.utilities.string_utilities import _is_bool
from pygaps.utilities.string_utilities import _is_float
from pygaps.utilities.string_utilities import _is_list
from pygaps.utilities.string_utilities import _to_string
_parser_version = "3.0"
def isotherm_to_csv(isotherm, path=None, separator=','):
"""
Convert isotherm into a CSV representation.
If the path is specified, the isotherm is saved as a file,
otherwise it is returned as a string.
Parameters
----------
isotherm : Isotherm
Isotherm to be written to csv.
path : str, None
Path to the file to be written.
separator : str, optional
Separator used int the csv file. Defaults to '',''.
Returns
-------
str: optional
String representation of the CSV, if path not provided.
"""
output = StringIO()
iso_dict = isotherm.to_dict()
iso_dict['file_version'] = _parser_version # version
# Parse material
material = iso_dict['material']
if isinstance(material, dict):
iso_dict['material'] = material.pop('name')
iso_dict.update({f"_material_{key}": val for key, val in material.items()})
output.writelines([x + separator + _to_string(y) + '\n' for (x, y) in iso_dict.items()])
if isinstance(isotherm, PointIsotherm):
# We get data and replace adsorption terminology
data = isotherm.data_raw.copy()
data['branch'] = data['branch'].replace(0, 'ads').replace(1, 'des')
output.write('data:[pressure,loading,branch,(otherdata)]\n')
data.to_csv(output, sep=separator, index=False, header=True)
elif isinstance(isotherm, ModelIsotherm):
output.write('model:[name and parameters]\n')
output.write(('name' + separator + isotherm.model.name + '\n'))
output.write(('rmse' + separator + _to_string(isotherm.model.rmse) + '\n'))
output.write(
('pressure range' + separator + _to_string(isotherm.model.pressure_range) + '\n')
)
output.write(
('loading range' + separator + _to_string(isotherm.model.loading_range) + '\n')
)
output.writelines([
param + separator + str(isotherm.model.params[param]) + '\n'
for param in isotherm.model.params
])
if path:
with open(path, mode='w', newline='\n', encoding='utf-8') as file:
file.write(output.getvalue())
else:
return output.getvalue()
def isotherm_from_csv(str_or_path, separator=',', **isotherm_parameters):
"""
Load an isotherm from a CSV file.
Parameters
----------
str_or_path : str
The isotherm in a CSV string format or a path
to where one can be read.
separator : str, optional
Separator used int the csv file. Defaults to `,`.
isotherm_parameters :
Any other options to be overridden in the isotherm creation.
Returns
-------
Isotherm
The isotherm contained in the csv string or file.
"""
try:
with open(str_or_path, encoding='utf-8') as f:
raw_csv = StringIO(f.read())
except OSError:
try:
raw_csv = StringIO(str_or_path)
except Exception as err:
raise ParsingError(
"Could not parse CSV isotherm. "
"The `str_or_path` is invalid or does not exist. "
) from err
line = raw_csv.readline().rstrip()
raw_dict = {}
try:
while not (line.startswith('data') or line.startswith('model') or line == ""):
values = line.strip().split(sep=separator)
if len(values) > 2:
raise ParsingError(f"The isotherm metadata {values} contains more than two values.")
key, val = values
if not val:
val = None
elif _is_bool(val):
val = _from_bool(val)
elif val.isnumeric():
val = int(val)
elif _is_float(val):
val = float(val)
elif _is_list(val):
val = _from_list(val)
raw_dict[key] = val
line = raw_csv.readline().rstrip()
except Exception as err:
raise ParsingError(
"Could not parse CSV isotherm. "
f"The format may be wrong, check for errors in line {line}."
) from err
# version check
version = raw_dict.pop("file_version", None)
if not version or float(version) < float(_parser_version):
logger.warning(
f"The file version is {version} while the parser uses version {_parser_version}. "
"Strange things might happen, so double check your data."
)
# check if material needs parsing
material = {}
for key, val in raw_dict.items():
if key.startswith("_material_"):
material[key.replace("_material_", "")] = val
if material:
for key in material.keys():
raw_dict.pop("_material_" + key)
material['name'] = raw_dict['material']
raw_dict['material'] = material
# Update dictionary with any user parameters
raw_dict.update(isotherm_parameters)
# Now read specific type of isotherm (Point, Model, Base)
if line.startswith('data'):
data = pandas.read_csv(raw_csv, sep=separator)
# process isotherm branches if they exist
if 'branch' in data.columns:
data['branch'] = data['branch'].apply(lambda x: 0 if x == 'ads' else 1)
else:
raw_dict['branch'] = 'guess'
isotherm = PointIsotherm(
isotherm_data=data,
pressure_key=data.columns[0],
loading_key=data.columns[1],
**raw_dict
)
elif line.startswith('model'):
model = {}
line = raw_csv.readline().rstrip()
model['name'] = line.split(sep=separator)[1]
line = raw_csv.readline().rstrip()
model['rmse'] = line.split(sep=separator)[1]
line = raw_csv.readline().rstrip()
model['pressure_range'] = _from_list(line.split(sep=separator)[1])
line = raw_csv.readline().rstrip()
model['loading_range'] = _from_list(line.split(sep=separator)[1])
line = raw_csv.readline().rstrip()
model['parameters'] = {}
while line != "":
values = line.split(sep=separator)
model['parameters'][values[0]] = float(values[1])
line = raw_csv.readline().rstrip()
isotherm = ModelIsotherm(
model=model_from_dict(model),
**raw_dict,
)
else:
isotherm = BaseIsotherm(**raw_dict)
return isotherm
|
#!/usr/bin/env python3
# Copyright 2010 Joao Henriques <jotaf (no spam) at hotmail dot com>.
#
# This file is part of name-gen.
#
# name-gen is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# name-gen is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with name-gen. If not, see
# <http://www.gnu.org/licenses/>.
# Updated by Curtis Sand for use in Space, 2015
# Changes made:
# - Included a default language file data in the module itself.
# - Updates for python 3, code clarity, pep 8
# - Unix newlines
"""
Namegen.py - A basic name generator script.
"""
import sys
import random
import locale
from argparse import ArgumentParser
try:
from StringIO import StringIO
except ImportError: # python 3
from io import StringIO
class NameGen(object):
"""
name-gen: Free python name generator module that analyzes sample text and
produces similar words.
Usage:
1. Initialize with path to language file (generated using
'namegen_training.py').
2. Call gen_word() method, returns generated string.
Optional:
- Change min_syl and max_syl to control number of syllables.
- Pass the sample file as 2nd parameter at initialization to set it as
the list of forbidden words. No words from the sample will be
replicated.
- Pass True as the 1st parameter to name_gen() to add the generated
word to the list of forbidden words. The word will not occur again.
"""
def __init__(self, language_file=None, forbidden_file=None):
self.min_syl = 2
self.max_syl = 4
# load language file
if language_file is None:
f = StringIO(LANG_STR)
else:
f = open(language_file, 'r')
try:
lines = [line.strip() for line in f.readlines()]
# first line, list of syllables
self.syllables = lines[0].split(',')
# next 2 lines, start syllable indexes and counts
starts_ids = [int(n) for n in lines[1].split(',')]
starts_counts = [int(n) for n in lines[2].split(',')]
# zip into a list of tuples
self.starts = list(zip(starts_ids, starts_counts))
# next 2, same for syllable ends
ends_ids = [int(n) for n in lines[3].split(',')]
ends_counts = [int(n) for n in lines[4].split(',')]
self.ends = list(zip(ends_ids, ends_counts))
# starting with the 6th and 7th lines, each pair of lines holds ids
# and counts of the "next syllables" for a previous syllable.
self.combinations = []
for (ids_str, counts_str) in list(zip(lines[5:None:2],
lines[6:None:2])):
if len(ids_str) == 0 or len(counts_str) == 0: # empty lines
self.combinations.append([])
else:
line_ids = [int(n) for n in ids_str.split(',')]
line_counts = [int(n) for n in counts_str.split(',')]
self.combinations.append(list(zip(line_ids, line_counts)))
finally:
f.close()
# load forbidden words file if needed
if forbidden_file is None:
self.forbidden = ''
else:
self.forbidden = _load_sample(forbidden_file)
def gen_word(self, no_repeat=False):
# random number of syllables, the last one is always appended
num_syl = random.randint(self.min_syl, self.max_syl - 1)
# turn ends list of tuples into a dictionary
ends_dict = dict(self.ends)
# we may have to repeat the process if the first "min_syl" syllables
# were a bad choice and have no possible continuations; or if the word
# is in the forbidden list.
word = []
word_str = ''
while len(word) < self.min_syl or self.forbidden.find(word_str) != -1:
# start word with the first syllable
syl = _select_syllable(self.starts, 0)
word = [self.syllables[syl]]
for i in range(1, num_syl):
# dont end yet if we don't have the minimum number of syllables
if i < self.min_syl:
end = 0
else: # probability of ending for this syllable
end = ends_dict.get(syl, 0)
# select next syllable
syl = _select_syllable(self.combinations[syl], end)
if syl is None:
break # early end for this word, end syllable was chosen
word.append(self.syllables[syl])
else: # add an ending syllable if the loop ended without one
syl = _select_syllable(self.ends, 0)
word.append(self.syllables[syl])
word_str = ''.join(word)
# to ensure the word doesn't repeat, add it to the forbidden words
if no_repeat:
self.forbidden = self.forbidden + '\n' + word_str
return word_str.capitalize()
def _select_syllable(counts, end_count):
if len(counts) == 0:
return None # no elements to choose from
# "counts" holds cumulative counts, so take the last element in the list
# (and 2nd in that tuple) to get the sum of all counts
chosen = random.randint(0, counts[-1][1] + end_count)
for (syl, count) in counts:
if count >= chosen:
return syl
return None
def _load_sample(filename):
# get sample text
with open(filename, 'r') as f:
sample = ''.join(f.readlines()).lower()
# convert accented characters to non-accented characters
sample = locale.strxfrm(sample)
# remove all characters except letters from A to Z
a = ord('a')
z = ord('z')
sample = ''.join(c if (ord(c) >= a and ord(c) <= z) else ' '
for c in sample)
return sample
def main():
args = parse_args()
generator = NameGen()
sep = ', '
if args.newline:
sep = '\n'
print(sep.join(generator.gen_word() for _ in range(args.count)))
return 0
def parse_args():
default_count = 5
epilog = 'Stay awesome, spacelings!'
parser = ArgumentParser(description=__doc__, epilog=epilog)
parser.add_argument(
'-c', '--count', default=default_count, dest='count',
type=int, help='How many names to generate. [default: %s]' %
default_count)
parser.add_argument(
'-n', '--newline', default=False,
dest='newline', action='store_true', help='Use newlines to make '
'the output tall instead of wide.')
return parser.parse_args()
LANG_STR = """un,ul,ua,nh,nc,oc,ge,fo,um,ed,fa,yn,ab,ik,bi,tu,rr,ja,av,wa,ti,od,pi,vi,gu,zi,di,ai,br,il,be,ne,gr,he,ss,ck,at,ir,im,lh,pr,go,io,ou,id,ho,va,rz,rt,ce,it,mi,ac,ve,za,ol,ba,ci,so,pe,ek,ew,ga,ur,ka,si,lo,ec,ha,li,zy,ze,ca,ak,me,mo,ri,sa,ei,pa,ni,om,ns,tr,us,wi,is,ch,sz,ic,le,na,po,on,ko,am,no,el,ad,ow,ws,em,ia,co,ie,la,nd,st,in,se,to,ro,cz,al,nt,da,ta,re,ue,en,ma,or,te,qu,es,ar,do,er,de,an,sk,as,ra,os,ki,slo,tka,ajd,azi,gla,iek,lav,lod,rti,uco,vai,var,ves,bed,bic,dal,ego,fan,fro,iol,sce,tus,uid,atr,lag,nal,nau,omb,tid,tou,gur,kas,koz,nus,raj,rez,rme,ycz,zym,bla,erb,erv,ite,kop,kuc,mul,ntu,rci,tir,uli,uto,wol,zys,div,edi,ide,koc,lej,oci,ona,usc,uta,chu,cob,kra,mic,ngu,ote,rak,rud,vas,aje,alu,amp,enf,erg,iem,jew,mel,via,bac,dis,far,had,kot,lgu,luc,ucz,vor,zio,ain,bur,ceb,did,mag,mia,one,ord,rmo,ryn,aka,alm,cav,ilu,kaw,onh,red,ebe,epo,iam,jak,log,nav,ock,pon,rro,unc,ank,bat,dem,kub,ouv,pad,sla,aco,alk,ask,chn,edz,els,erm,med,nhe,niu,nko,owa,scu,van,chi,gun,ixa,ize,lma,nat,omi,ors,rga,ure,zem,cre,ejo,gri,iej,lto,pes,ral,spo,zni,don,ial,kus,ova,rni,son,ude,mis,ouc,ous,sam,cap,ces,fim,fin,ger,ilo,rko,rot,sik,sni,sus,wen,zyc,cur,dom,etr,fre,odz,cab,iad,ret,uga,cad,eda,obi,oce,sab,vel,cel,eix,ili,lad,nin,rai,zik,mes,nun,rid,shi,sit,was,aws,ior,iss,jun,leg,lka,nsa,ped,rno,tru,adz,bin,gam,ler,lor,nem,sub,szu,abr,bri,mei,roz,sek,swi,mba,ndr,orz,vis,fic,fri,nom,onc,rig,rin,stu,inf,mur,ono,pit,zal,arn,awi,gre,kul,orq,pen,ena,nov,ocz,vir,alc,alg,atu,erz,ied,jas,law,nad,ong,pac,pra,diz,eio,eng,nis,oli,pou,rba,tig,rel,rod,uba,ace,emi,gas,grz,hei,lko,rar,unt,dec,gol,lat,prz,aci,emb,ncz,nfi,dam,edr,iko,ila,mpa,oto,ses,sua,ako,kon,met,ner,ovo,uit,arg,bus,cri,han,has,mou,nge,aus,dow,nim,sol,esi,iar,jar,nes,sco,zel,ang,pol,cen,ori,ric,rza,sci,sej,tel,trz,ych,asa,ato,ene,kur,rqu,rtu,arr,bas,bia,cla,lei,mon,uas,api,fei,ies,sca,woj,bem,egr,lta,osi,ubi,ari,emo,hal,lim,mpr,nar,ryc,tud,eni,eve,lac,nek,tek,zka,esa,nca,sar,dro,foi,reg,ack,egu,fra,orm,dra,gad,gro,her,imi,seg,mac,mig,mil,war,ate,kol,nga,poi,spa,tri,ane,hum,inc,int,pin,ham,kal,mui,omp,zen,bro,fam,mun,ual,uel,abi,apa,ave,krz,uda,pei,sko,zki,ecz,eme,rim,tko,alo,ein,iet,iuk,lcz,jan,rat,ris,ago,eia,cam,raz,val,vin,iga,win,elh,eza,kor,rav,eva,kar,eis,fal,lis,sal,gal,ius,oni,tav,aga,amb,gor,ndi,awa,cio,ech,ede,gie,olo,udo,zes,ban,cin,roc,wia,ico,imo,tow,zin,esz,lik,zer,olh,rab,rom,sti,usi,zar,fil,pri,ami,bel,vid,alh,eta,lek,mpo,ogo,ota,den,deu,gar,lev,mer,anc,ens,bor,eli,rde,rys,vam,dur,abe,bie,rus,dar,ers,ing,orr,rac,faz,tin,aze,eno,erc,ini,als,mie,orn,arm,sad,edo,lec,vos,eci,zuk,enc,gue,ien,nce,ars,car,sim,cos,enh,hos,kos,nci,obe,ois,rda,zia,bre,del,len,low,pal,czk,how,ich,our,sso,ysz,har,lon,erd,lak,sob,wal,bal,elo,ese,cid,dan,ime,mat,tad,pos,cas,isz,cie,gua,oda,ons,can,ode,tom,eja,pan,san,ilh,alt,spe,uer,ete,lan,pod,ssi,eri,rto,ame,ern,gos,ian,rre,ard,lic,ola,hor,rta,los,rma,tur,sza,ans,aro,oro,czu,usa,arz,nic,ess,mad,rna,tal,ole,nik,sas,tam,ust,nho,odo,gan,aba,sia,oss,sos,dei,ula,uan,dia,nti,sin,ron,che,pas,dor,szy,uma,tor,ast,les,oma,osz,ima,ata,ega,row,igo,qui,rem,zie,zko,anh,ren,uem,min,ero,usz,osa,sie,mal,pel,emp,ios,rek,ssa,ana,tod,aqu,ind,ino,lew,arc,ica,obr,und,asi,sen,ias,inh,och,szk,pro,seu,ntr,utr,eus,omo,out,fer,pie,rio,ert,sem,ore,cer,cia,lus,lho,rcz,ber,nas,wie,zek,iro,oso,uch,tar,ani,bar,ser,amo,lar,ten,ece,ome,ali,acz,esc,ira,now,pre,lha,rad,lsk,tre,sse,lin,tas,ick,ist,bra,rec,tem,der,cor,ina,mai,asz,ven,nia,las,rsk,ita,ach,eit,tro,esp,cze,zak,dad,rze,szc,sze,zcz,ver,ier,nie,uro,cza,sto,rzy,mor,qua,ont,art,eck,nha,gra,ost,zew,cho,eir,iel,are,gen,ram,ere,iec,ond,ade,mos,ora,tos,ass,ale,ido,rra,rte,yns,nos,ort,lhe,ais,cha,tes,ama,ura,nda,for,ele,ria,ida,kie,per,zyn,das,ano,men,owi,ito,err,ras,dzi,zyk,iak,ela,str,mas,mar,ala,ava,nta,rei,con,par,tan,era,ste,iew,end,ada,ews,man,res,ros,ter,ran,ewi,tra,dos,des,nto,czy,ara,nde,ins,sta,por,kow,ado,com,cki,ant,wic,icz,est,ndo,and,nte,nsk,ent,ows,wsk,que,ski
0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28,29,30,31,32,33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,50,51,52,53,54,55,56,57,58,59,60,61,62,63,64,65,66,67,68,69,70,71,72,73,74,75,76,77,78,79,80,81,82,83,84,85,86,87,88,89,90,91,92,93,94,95,96,97,98,99,100,101,102,103,104,105,106,107,108,109,110,111,112,113,114,115,116,117,118,119,120,121,122,123,124,125,126,127,128,129,130,131,132,133,134,135,136,139,141,142,143,145,146,147,148,149,150,152,153,154,155,156,158,159,160,161,162,163,164,165,166,167,168,169,170,173,174,175,176,178,179,180,182,183,184,186,187,188,189,190,191,192,193,194,195,197,198,199,200,201,203,204,205,207,208,209,210,212,213,214,215,216,217,218,219,221,222,223,224,225,226,227,228,229,230,231,232,234,235,236,237,238,239,241,242,243,244,245,246,247,249,252,253,254,255,256,257,258,259,262,264,266,270,271,272,273,274,277,278,279,280,281,282,283,284,286,288,289,290,291,292,293,295,296,297,298,300,301,302,303,304,305,306,307,308,309,310,311,312,313,314,315,316,317,318,320,321,322,324,326,328,329,330,331,332,333,334,335,336,337,338,339,340,341,342,343,344,345,346,347,348,349,350,352,353,354,356,357,358,359,360,361,362,363,364,365,366,367,368,371,372,373,374,375,377,378,379,380,381,382,383,384,385,387,388,390,391,392,393,394,395,396,399,400,401,402,403,404,405,406,408,409,410,411,412,413,414,415,416,417,419,420,421,422,423,425,426,427,428,429,430,432,433,436,437,438,439,440,441,442,443,444,447,448,449,450,451,452,453,454,455,456,457,460,461,463,464,465,466,467,468,469,470,471,472,473,475,476,477,478,481,482,483,484,485,486,488,489,490,491,492,493,494,495,496,498,499,500,501,503,504,505,506,508,510,511,513,514,515,516,517,519,520,521,522,523,524,525,526,527,528,529,530,531,532,533,534,535,536,537,538,539,540,541,542,543,544,545,547,548,549,550,553,554,555,556,557,558,559,562,563,564,565,566,569,570,571,572,573,575,576,577,578,580,581,583,585,586,587,588,589,590,591,593,594,595,596,597,598,600,601,603,604,606,607,608,609,610,611,612,613,614,616,617,618,619,620,621,623,624,625,626,627,628,629,631,633,634,635,636,637,638,639,640,641,642,643,644,645,647,648,649,650,651,653,655,656,657,658,659,660,661,662,663,664,665,666,668,669,671,672,673,676,677,678,679,681,682,683,684,687,688,689,690,691,692,694,695,696,697,699,700,701,702,703,704,705,706,708,709,710,711,712,713,714,716,717,718,720,722,724,725,726,727,728,730,731,732,733,734,736,737,738,739,740,741,742,743,744,746,747,748,749,751,752,753,754,755,756,757,758,759,760,761,762,763,764,765,767,768,769,770,771,772,773,774,776,777,778,779,780,781,782,783,784,785,786,787,788,789,790,791,792,793,795,796,797,799,800,801,802,804,805,806,807,808,809,810,811,812,813,814,815,816,817,818,819,820,821,822,823,824,825,827,828,829,830,832,835,836,837,838,839,840,841,842,843,844,845,846,847,848,849,850,851,852,853,854,855,856,857,858,859,860,862,863,865,866,867,868,869,870,871,872,873,874,875,876,877,878,879,880,881,882,883,884,885,886,887,888,889,890,891,892,893,894,895,896,897,898,899,900,902,904,905,906,907,908,909,910,912,913,914,915,916,917,918,919,921,922,923,924,926,927,928,929,930,931,932,933,937,938,939,940,941,942,943,944,945,946,947,948,949,950,951,952,953,954,955,957,958,959,960,961,963,964,965,966,967,968,970,971,972,973,974,975,977,978,980,981,982,983,984,985,986,987,988,990,991,993,994,995,996,997,998,1000,1001,1003,1005,1006,1008,1009,1011,1012
53,96,97,103,234,326,848,1511,1810,1830,2460,2465,2600,2605,2883,3157,3178,3594,3677,4015,4270,4300,4779,5368,5733,5848,6399,6448,6876,7020,7406,7820,8541,8733,8799,8800,8916,9034,9162,9585,10379,10687,10703,11165,11218,11434,11656,11719,11728,12039,12053,12620,12828,13615,13883,14119,14714,15219,15835,16723,16730,16735,17184,17239,17747,18155,18530,18544,18798,19152,19228,19316,20150,20163,20785,21502,22001,22615,22670,23886,24258,24275,24283,24843,24939,25231,25339,25852,26481,26494,26996,27814,29456,29702,30522,30870,31810,32012,32165,32193,32201,32804,32829,34935,34945,35400,35450,35929,36457,38655,39355,39752,39978,40647,40658,41722,42314,43256,43263,43809,45935,46107,47166,50693,51959,52470,53718,53868,56680,57095,57293,58118,58453,59408,59588,59627,59632,59670,59693,59699,59707,59776,59792,59816,59836,59851,59872,59886,59946,59950,59951,59957,59992,60018,60035,60105,60110,60115,60126,60142,60191,60265,60283,60305,60310,60313,60382,60384,60394,60452,60517,60555,60582,60625,60637,60696,60704,60776,60791,60795,60856,60861,60876,60877,60880,60907,60939,61007,61060,61074,61082,61126,61147,61154,61158,61234,61253,61255,61306,61351,61397,61470,61536,61545,61596,61617,61618,61620,61631,61663,61717,61722,61725,61771,61789,61790,61820,61832,61833,61865,61913,61981,62018,62038,62040,62041,62046,62088,62166,62218,62257,62261,62317,62353,62415,62455,62494,62523,62570,62573,62575,62635,62638,62639,62642,62656,62665,62667,62714,62717,62721,62722,62729,62743,62784,62827,62829,62879,62886,62890,62893,62949,63000,63001,63009,63056,63126,63127,63159,63203,63281,63289,63315,63365,63428,63429,63434,63451,63454,63471,63539,63553,63565,63599,63677,63726,63730,63797,63814,63847,63853,63876,63968,64056,64101,64102,64103,64123,64181,64207,64208,64253,64341,64344,64368,64389,64450,64452,64454,64467,64525,64536,64544,64617,64618,64632,64646,64702,64704,64714,64805,64901,64971,65030,65042,65074,65104,65121,65212,65226,65305,65381,65451,65546,65551,65559,65571,65634,65694,65697,65719,65741,65755,65795,65853,65931,65932,66021,66027,66112,66157,66238,66239,66308,66319,66353,66354,66428,66506,66609,66697,66734,66749,66834,66851,66854,66881,66923,66924,66972,67009,67103,67116,67121,67130,67147,67231,67268,67360,67367,67394,67395,67439,67441,67445,67450,67455,67561,67562,67648,67675,67695,67711,67784,67880,67906,67915,68011,68012,68034,68039,68059,68158,68237,68308,68330,68349,68434,68452,68484,68540,68548,68567,68599,68610,68651,68664,68665,68672,68772,68827,68869,68915,68999,69059,69133,69135,69224,69226,69232,69343,69445,69446,69448,69466,69478,69479,69515,69542,69567,69592,69678,69681,69708,69713,69717,69719,69761,69804,69929,69990,69991,70098,70104,70146,70170,70248,70317,70341,70449,70561,70574,70681,70740,70756,70851,70853,70905,70912,71016,71022,71107,71173,71221,71272,71284,71352,71484,71488,71551,71663,71773,71777,71856,71894,71990,71991,72094,72148,72150,72155,72157,72163,72166,72168,72259,72276,72288,72348,72468,72505,72588,72663,72688,72689,72809,72811,72913,72935,73061,73101,73203,73278,73283,73285,73313,73352,73402,73442,73523,73525,73568,73570,73572,73663,73693,73707,73721,73723,73740,73743,73746,73751,73759,73873,73887,73956,73969,74002,74152,74275,74321,74409,74479,74502,74510,74516,74518,74548,74678,74738,74859,74933,74941,74971,75062,75074,75075,75085,75202,75214,75289,75314,75393,75433,75460,75618,75701,75702,75711,75712,75780,75781,75834,75847,75968,75995,76011,76134,76138,76190,76266,76269,76360,76367,76453,76464,76594,76706,76751,76760,76796,76919,76959,76964,77083,77085,77086,77122,77127,77169,77265,77268,77277,77454,77550,77606,77610,77674,77784,77791,77958,77967,78078,78214,78329,78476,78478,78577,78758,78801,78931,78989,79116,79147,79183,79248,79440,79494,79502,79521,79523,79564,79566,79570,79631,79694,79698,79759,79781,79782,79829,79942,79943,79944,79978,80021,80023,80044,80062,80104,80108,80186,80236,80247,80258,80370,80373,80382,80425,80461,80485,80493,80519,80633,80643,80744,80749,80810,80815,80913,81109,81145,81250,81305,81453,81472,81516,81517,81522,81543,81557,81563,81568,81635,81691,81759,81761,81790,81791,81849,81852,81862,81965,82167,82381,82403,82404,82417,82418,82423,82670,82851,82928,82937,82962,82983,82986,83052,83058,83067,83228,83230,83254,83281,83512,83774,83776,83998,84200,84391,84543,84545,84763,84766,84918,84970,85073,85083,85087,85141,85270,85366,85369,85377,85393,85405,85464,85506,85670,85926,86032,86092,86187,86188,86306,86454,86495,86544,86687,86693,86755,86759,86803,86806,86838,86843,86844,86880,87056,87158,87451,87546,87834,87838,88149,88151,88425,88454,88514,88529,88530,88583,88584,88666,88879,88953,88968,88994,89030,89127,89166,89402,89520,89521,89594,89646,89654,89847,90194,90254,90255,90260,90585,90611,90614,90717,90730,90766,91104,91132,91133,91355,91361,91485,91512,91516,91692,91821,91826,92027,92028,92433,92435,92562,92576,92676,92682,92685,93080,93183,93391,93414,93482,93748,93752,93915,93933,94042,94044,94063,94077,94242,94243,94290,94402,94738,95161,95191,95216,95561,96061,96526,96745,96819,96911,96917,96966,97230,97333,97417,97818,97823,97824,98049,98284,98842,98861,98900,98954,99128,99943,99973,100028,100978,101147,101164,101849,101966,101969,102171,102181,105284,105301
0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28,29,30,31,32,33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,50,51,52,53,54,55,56,57,58,59,60,61,62,63,64,65,66,67,68,69,70,71,72,73,74,75,76,77,78,79,80,81,82,83,84,85,86,87,88,89,90,91,92,93,94,95,96,97,98,99,100,101,102,103,104,105,106,107,108,109,110,111,112,113,114,115,116,117,118,119,120,121,122,123,124,125,126,127,128,129,130,131,132,133,134,135,136,139,140,142,143,144,145,146,147,148,149,150,151,152,154,155,156,158,159,160,161,162,163,164,165,166,168,169,170,171,172,173,174,175,176,177,178,179,180,183,185,186,187,188,189,190,191,192,194,196,197,199,200,201,202,203,204,205,206,208,209,210,212,213,214,215,216,217,218,219,220,221,222,223,224,225,226,227,230,231,232,233,234,235,236,237,240,241,242,243,244,245,246,248,249,250,251,252,253,254,255,258,259,260,261,263,265,267,269,270,272,273,274,275,277,278,280,281,282,283,285,286,287,288,289,290,291,293,294,295,296,297,298,299,300,302,303,304,305,306,307,308,309,310,311,312,313,314,315,316,317,318,319,320,322,323,324,325,327,328,329,330,331,332,334,335,336,337,338,339,341,342,343,344,346,347,348,350,351,353,354,355,356,357,358,359,360,363,364,365,366,367,368,369,370,371,372,373,374,375,376,377,378,379,380,381,382,383,384,385,387,388,391,392,393,394,395,398,400,401,402,403,404,405,406,407,409,410,411,412,413,414,416,417,418,419,420,421,422,423,424,425,426,427,428,432,433,434,436,437,438,439,440,441,442,443,444,445,447,448,450,451,452,453,454,457,458,459,460,461,462,463,464,465,466,467,468,469,470,471,472,473,474,475,476,477,478,481,482,483,485,486,487,488,489,490,491,493,494,495,496,497,498,499,500,501,502,503,504,506,507,508,509,510,511,512,513,514,515,516,517,518,519,520,521,522,523,524,525,528,530,531,532,533,534,536,537,538,539,540,541,542,543,544,545,546,547,548,549,551,552,554,555,557,559,560,561,562,563,564,565,566,567,568,569,570,571,572,573,574,575,576,577,578,579,580,582,583,584,585,586,587,588,589,590,591,592,593,595,596,597,598,599,600,601,602,604,605,606,607,608,609,611,612,614,615,616,617,619,620,621,623,624,626,627,628,629,630,631,632,633,634,635,636,637,639,640,641,642,643,644,645,646,647,648,650,651,652,653,654,655,656,657,659,660,661,664,665,666,667,668,669,671,672,673,674,675,676,677,678,679,681,682,683,684,685,686,687,688,689,690,691,692,693,695,697,698,699,700,701,702,703,704,705,706,707,708,709,710,711,713,714,715,716,717,718,719,720,721,722,723,724,725,726,727,728,729,730,731,732,733,734,735,736,737,738,739,740,741,742,743,744,745,746,747,748,749,750,751,752,754,755,756,757,759,760,761,762,763,764,765,766,767,768,769,770,771,772,773,774,775,776,777,778,779,780,781,782,783,784,785,786,787,788,789,790,791,792,793,794,795,796,798,799,800,801,802,803,804,805,807,808,809,810,811,812,813,815,816,817,818,819,820,821,822,823,824,825,827,828,829,830,831,832,833,834,836,838,839,840,841,842,843,844,845,846,847,848,850,851,852,853,854,855,856,857,858,859,860,861,862,863,864,865,866,867,868,869,870,872,873,874,875,876,877,878,879,880,881,882,883,884,885,886,887,888,889,890,891,893,894,895,896,897,898,901,902,903,904,905,906,907,908,909,910,911,912,913,914,915,916,918,919,920,921,922,923,924,925,926,927,928,929,930,931,932,933,934,935,937,938,939,940,941,942,943,944,945,946,947,948,949,951,952,953,954,955,957,958,959,960,961,962,963,964,965,966,967,968,969,970,971,972,973,974,975,977,978,979,980,981,982,983,984,985,986,987,988,989,990,991,992,993,994,995,997,998,999,1000,1001,1002,1003,1004,1005,1006,1008,1011,1012
34,86,277,288,333,360,438,442,818,851,903,1063,1088,1688,1718,1786,1834,2008,2020,2192,2247,2268,2288,2333,2382,2395,2427,2594,2615,2768,2863,2956,3034,3461,3508,3515,3681,3896,4148,4155,4235,4610,5110,5441,5479,5756,6215,6387,6479,6698,6836,6868,6893,7033,7230,7362,7567,7589,8030,8095,9356,9364,9709,9750,10452,10525,10865,11078,11614,11753,11779,11810,12206,13511,13843,14177,14249,14728,15063,15200,15247,15895,16037,16130,16909,16920,17884,18271,18523,18564,18750,19652,19800,20068,20403,21185,22127,22477,22512,22521,22525,24158,25318,25676,25685,26615,26662,26875,27021,28228,29690,30521,31678,32214,32303,34065,35230,35700,38640,38835,39406,40606,42150,42171,43633,44577,47483,48535,50884,51348,51381,54773,56840,60769,66576,66584,66649,66656,66687,66688,66690,66744,66811,66843,66869,66870,66871,66880,66903,66913,66927,66952,66982,66989,67001,67035,67052,67054,67063,67115,67116,67123,67175,67193,67194,67221,67232,67234,67237,67242,67243,67268,67272,67276,67282,67298,67335,67339,67341,67342,67350,67363,67366,67387,67425,67470,67471,67472,67475,67486,67492,67538,67545,67595,67596,67599,67600,67606,67608,67614,67662,67663,67668,67707,67708,67723,67725,67728,67729,67757,67770,67783,67785,67786,67795,67801,67804,67808,67830,67847,67849,67850,67851,67858,67880,67881,67947,67991,67993,67996,67997,68052,68053,68057,68081,68121,68123,68127,68156,68159,68166,68171,68176,68184,68208,68217,68221,68251,68254,68274,68310,68330,68334,68377,68379,68424,68489,68496,68519,68570,68584,68616,68617,68632,68652,68679,68717,68720,68734,68758,68766,68780,68790,68791,68817,68903,68911,68936,68972,68988,68999,69077,69078,69094,69141,69143,69145,69165,69168,69174,69175,69177,69184,69208,69276,69296,69331,69333,69345,69361,69363,69367,69373,69384,69462,69498,69500,69521,69528,69540,69588,69594,69595,69689,69704,69756,69770,69776,69784,69800,69855,69874,69964,69965,69966,69968,69978,70060,70061,70085,70102,70120,70127,70130,70138,70139,70142,70148,70158,70159,70162,70163,70189,70251,70263,70269,70292,70293,70347,70357,70367,70414,70420,70447,70470,70478,70479,70484,70487,70492,70524,70612,70630,70631,70634,70672,70673,70681,70714,70727,70728,70773,70774,70782,70804,70864,70867,70870,70883,70903,70904,70911,70932,70933,70963,70981,71042,71134,71203,71204,71209,71212,71258,71333,71335,71366,71384,71483,71499,71552,71617,71658,71662,71671,71683,71700,71724,71749,71767,71775,71781,71791,71800,71804,71811,71812,71831,71833,71878,71910,71940,71955,71956,71972,72009,72041,72076,72099,72197,72200,72208,72215,72232,72347,72348,72418,72419,72421,72426,72446,72458,72468,72469,72506,72511,72512,72604,72609,72729,72845,72961,73034,73126,73167,73192,73315,73317,73318,73319,73321,73322,73347,73348,73361,73390,73391,73440,73443,73453,73455,73479,73491,73492,73511,73550,73567,73572,73575,73608,73612,73637,73640,73654,73657,73659,73759,73775,73791,73809,73879,73912,73955,73957,73979,73992,74029,74057,74062,74067,74197,74199,74209,74229,74244,74279,74378,74392,74450,74459,74461,74537,74550,74650,74652,74655,74687,74690,74825,74830,74858,74870,74893,75030,75032,75072,75150,75156,75170,75265,75326,75365,75420,75455,75573,75631,75646,75650,75656,75738,75778,75780,75790,75909,75969,75974,75976,75987,76007,76014,76023,76061,76067,76069,76156,76278,76395,76510,76612,76617,76650,76703,76735,76746,76803,76823,76826,76850,76893,77049,77056,77117,77158,77241,77251,77273,77281,77283,77349,77361,77430,77452,77459,77467,77473,77474,77536,77574,77672,77827,77839,77887,77923,77934,77936,77978,78086,78165,78315,78330,78331,78332,78491,78542,78596,78698,78727,78745,78746,78756,78758,78810,78897,78924,78950,78972,78977,79134,79138,79150,79164,79240,79243,79245,79270,79292,79302,79339,79377,79435,79436,79506,79638,79650,79683,79724,79737,79837,79839,79849,79852,79858,79861,79916,79945,80009,80010,80021,80040,80161,80167,80171,80305,80357,80389,80401,80404,80453,80495,80556,80671,80692,80696,80716,80719,80807,80834,80900,81014,81018,81032,81091,81193,81197,81370,81563,81605,81612,81724,81808,81842,81867,81900,81909,82100,82104,82224,82226,82350,82352,82363,82408,82413,82429,82510,82513,82615,82643,82653,82753,82799,82876,82979,83063,83133,83135,83258,83359,83451,83551,83558,83569,83808,83823,83892,83915,84035,84115,84127,84132,84313,84550,84646,84764,84767,84774,84935,84939,84944,85107,85108,85120,85122,85144,85359,85438,85446,85451,85608,85628,85634,85840,86039,86048,86206,86262,86427,86433,86485,86642,86699,86842,86854,86913,87099,87369,87530,87703,87785,87881,87902,87936,88062,88077,88106,88120,88240,88355,88468,88568,88569,88717,88718,88812,88977,88987,89118,89304,89313,89550,89551,89595,89661,89664,89788,89926,89966,90149,90154,90198,90203,90284,90469,90470,90595,90660,90806,90825,90827,91164,91165,91166,91201,91323,91350,91353,91563,91568,91791,91793,91898,91903,91914,91935,91936,92197,92219,92227,92243,92251,92346,92349,92351,92656,92685,92823,92824,93069,93253,93495,93850,93860,93874,94133,94455,94782,95133,95146,95554,96013,96117,96555,96774,97064,97310,97456,97613,97939,98142,98158,98200,98659,98896,98898,99218,99222,99591,99597,100078,100598,100916,100932,101370,101613,101892,102230,102450,102688,102697,102706,102729,103000,103218,103222,103641,103643,103721,104053,104409,104546,104589,104598,104788,105481,105645,106202,106208,106625,107113,107128,107444,108018,108575,109155,110124,110149,110150,111132,111264,112442,112461,113272,113314,116200,120942
3,6,7,9,10,13,19,20,26,27,32,36,42,44,45,49,60,64,67,68,72,73,80,84,89,91,96,98,99,102,104,107,108,110,112,113,115,116,121,125,126,127,128,130,131,134,152,164,206,217,228,286,287,308,433,435,451,503,543,592,594,595,613,657,698,704,709,712,720,730,753,764,810,817,820,836,839,854,875,876,881,883,897,913,925,930,933,937,949,950,953,956,960,962,973,974,976,978,987,990,997,1009,1012
1,2,6,12,21,25,26,27,32,33,41,43,45,54,55,57,60,62,63,72,150,151,152,154,156,158,159,161,168,175,182,183,187,221,223,224,261,330,331,332,523,525,527,536,538,541,547,549,550,551,553,554,555,556,558,560,561,562,565,566,570,573,574,575,576,577,585,589,590,591,592,621,622,624,625,626,627,632,636,637,639,641,643,644,645,649,656,657,659,662,673,674,675,680,685,686,691,692,700,701,702,708,717
6,13,14,15,17,18,20,21,22,23,24,28,33,36,38,41,44,45,50,51,52,54,56,57,60,61,62,64,66,67,69,70,72,73,74,76,79,80,84,85,86,87,89,90,93,94,95,98,99,101,102,105,108,110,112,116,119,120,124,125,129,130,131,133,134,136,149,163,164,215,253,257,262,307,315,316,321,345,347,356,357,358,364,365,374,386,400,419,427,430,435,441,448,468,475,476,504,512,523,525,542,561,579,592,595,599,608,612,615,630,637,642,665,673,676,695,712,715,720,723,724,730,736,739,748,750,755,762,768,780,781,785,790,794,802,805,813,819,820,823,825,844,852,854,855,864,876,877,878,885,891,895,906,913,921,922,930,949,950,954,956,968,977,979,980,985,990,991,993,996,997,1000,1001,1002,1005,1008,1009,1012
3,22,25,32,33,34,44,45,54,56,57,60,72,75,78,80,81,82,84,85,89,90,94,95,98,125,145,158,160,170,171,172,180,203,204,205,224,225,237,238,252,254,263,265,266,277,279,280,286,287,290,295,312,329,345,354,364,367,372,396,409,466,483,493,494,495,497,506,507,508,509,512,513,514,516,517,518,521,528,529,530,531,532,533,534,537,538,539,541,542,548,551,552,553,554,555,556,557,560,572,573,574,576,577,578,580,581,582,584,586,592,593,594,595,596,598,601,602,610,611,612,613,614,616,623,624,627,628,630,631,635,636,637,639,640,643,644,647,648,650,651,657,660,661,662,665,668,669,670,674,675,677,679,682,691,694,696,697,698,702,704,705,706,712,714,735,745,747,763,772,773,774,775,778,782,790,794,851
6,23,26,46,48,53,69,71,74,76,77,83,86,90,91,105,106,109,110,114,115,120,123,128,141,147,150,522,606,628,644,686,735,745,776,777,785,893,935,948,955,969,989,1004
2,3,6,7,17,29,35,36,37,38,39,47,69,70,74,83,190,209,210,318,321,322,323,325,328,331,332,336,337,338,340,370,376,378,380,385,386,394,396,397,398,408,501,608
8,18,27,29,43,67,78,95,98,102,121,124,125,129,131,133,467,582,670,736,784,786,805,825,841,852,858,861,919,921,929,931,940,968,978,991,997,1000,1005
45,49,51,56,58,92,130,164,183,211,268,269,282,293,341,385,400,404,423,425,432,433,435,436,453,456,460,475,511,512,518,519,520,524,530,535,548,549,558
1,9,13,18,20,27,29,33,38,42,44,45,50,54,55,60,61,63,68,69,70,71,78,81,84,86,90,93,94,95,97,98,101,102,108,111,117,119,121,124,125,127,129,131,133,134,189,242,294,296,334,346,358,376,384,385,398,450,451,459,463,499,512,525,602,616,623,641,654,667,671,672,684,699,706,707,710,715,719,734,743,750,755,757,786,803,819,821,825,826,839,850,852,855,862,874,890,892,896,911,916,917,921,924,929,933,949,950,958,961,962,968,974,977,978,981,985,991,996,1000,1003,1005
3,36,39,44,46,47,48,67,73,80,102,111,118,142,143,146,157,162,169,193,240,252,258,261,262,264,265,276,277,279,288,289,293,390,392,393,396,403,422,436,446,502,506,511,521,526,531,534,535,536,537,538,539,540,541,542,545,548,550,551,552,553,559,562,568,569,574,575,580,602,623,624,626,627,629,630,636,637,638,641,642,643,646,649,650,654,655,658,667,668,690,693,694,695,698,714,721,727,745,754,757,765,769,770,784,803,806,807,812,857,860,863,866,871,872,875,886,887,888,889,892,893
0,1,9,13,17,18,21,29,31,33,38,42,43,44,45,49,50,54,55,60,63,66,68,70,71,76,85,86,87,89,90,91,93,94,95,97,98,101,102,104,105,108,113,116,119,121,124,125,127,129,131,132,133,134,140,154,175,187,190,197,211,282,287,292,294,323,347,349,384,399,401,430,435,436,438,444,450,490,500,511,525,547,562,568,570,571,572,580,592,594,602,610,623,654,674,676,691,694,699,706,715,723,739,744,774,786,787,798,810,820,822,823,825,844,849,850,896,903,905,917,925,932,944,949,952,957,961,962,963,968,978,993,996,997,1005,1008
6,15,16,23,24,32,33,34,35,48,49,53,54,66,123,124,125,137,138,145,150,151,215,240,260,261,264,266,267,268,275,276,279,284,285,293,300,302,311,326,329,347,349,351,355,366,377,381,383,387,391,392,396,471,473,474,475,476,483,484,485,486,487,488,489,490,491,492,493,494,497,498,499,500,501,502,512,513,525,526,527,529,530,531,532,533,534,535,536,538,539,540,544,555,557,558,559,588,591,594,595,596,599,600,602,603,609,626,629,630,635,636,637,638,639,654,655,656,657,660,663,664,666,670,685,686,688,689,691,699,701,718,721,726,729,733
3,14,30,31,36,37,41,48,51,54,55,56,62,65,66,72,74,80,82,86,96,102,105,106,107,109,111,114,115,116,117,120,121,122,126,128,132,134,136,149,150,213,226,258,290,312,335,337,367,369,379,392,423,444,448,461,508,515,534,550,618,627,639,642,644,650,705,731,747,760,766,768,770,777,782,802,847,851,856,866,870,887,907,923,942,945,959,965,969,982,984,989,992,1004,1006
19,20,22,37,38,86,87,90,92,93,94,101,102,105,106,107,114,117,119,120,122,123,136,140,177,178,184,535,537,538,539,540,541,542,550,551,586,587,588,589,590,591,592,593,598,599,609,613,614,619,620,621,623,636,637,638,639,640,641,642,643,644,645,646,647,648,651,652,656,657,676,677,678,714,722,724,725,749,750,760,762,763,797,798,799,800,802,803,805,807,808,813,815,816,1123
6,13,30,34,39,41,48,50,57,62,72,74,75,82,85,88,90,96,100,105,107,114,116,117,120,122,132,171,233,297,339,392,480,495,504,510,523,603,631,645,714,731,735,738,745,747,759,796,846,847,869,873,889,899,923,928,935,975,983,1001,1006,1010
15,16,17,38,41,84,230,231,232,235,236,243,246,275,278,279,280,281,282,283,285,300,301,304,305,307,378,390,439,440,444,445,470,472,473,474,475,476,477,480,481,482,483,484,496,522,526,529,530,531,534,555,557,558,587,590,689,691,692,695,710,711
1,12,18,27,28,29,36,40,44,50,52,53,56,60,62,63,73,80,84,86,89,91,94,95,98,99,102,104,105,108,112,113,119,121,124,125,127,129,130,131,133,268,309,323,405,436,438,506,568,579,592,595,607,625,630,637,674,726,731,734,739,750,752,769,774,791,803,813,817,825,841,855,856,878,883,887,888,890,891,902,920,925,933,944,954,956,967,968,974,978,990,991,993,996,997,1000,1005,1008,1009,1012
8,9,10,11,12,26,28,35,37,38,39,40,44,45,46,49,50,52,53,54,55,57,58,59,69,72,79,87,89,103,104,106,121,133,149,150,156,246,248,260,261,262,264,265,266,267,269,270,271,272,273,274,275,281,288,289,292,294,295,296,299,301,302,303,307,309,311,339,343,345,347,353,354,355,356,357,358,359,360,361,363,365,367,370,415,416,418,419,422,425,426,427,434,435,442,444,445,459,461,463
1,11,18,25,37,38,44,47,50,53,54,56,59,60,63,64,66,67,69,71,73,76,78,80,84,85,86,89,90,91,93,94,96,97,98,99,101,102,104,105,108,111,112,113,117,119,121,124,127,129,131,132,133,134,177,184,203,207,224,240,244,290,310,338,347,358,423,468,469,503,504,508,560,574,580,581,589,592,609,612,614,616,631,634,645,650,652,655,668,670,674,687,698,702,705,710,731,734,742,752,753,754,756,757,762,779,793,797,800,803,805,808,811,816,820,841,852,859,861,863,866,870,874,877,879,883,887,898,908,926,927,929,933,936,949,950,959,968,977,982,996,997,1000,1003,1008,1009
1,7,8,72,74,77,95,105,108,109,111,114,115,119,120,123,124,152,159,160,162,163,164,167,170,172,174,178,181,189,202,206,208,212,225,231,237,245,254,260,262,298,299,301,305,309,363,389,401,402,407,444,475,477,479,480,482,483,489,497,498,500,501,509,510,511,514,515,516,524,531,532,537,538,539,540,541,542,543,544,562,565,566,567,573,578,579,581,582,586,595,606,607,609,612,614,615,616,617,620,621,624,625,626,627,634,640,649,650,651,656,657,658,659,660,667,672,673,697,698,699,700,702,703,704,705,706,713,715,719,730,736,745,748,756,757,773,774,775,783,785,787,788,789,791,797
4,10,14,16,24,25,26,28,30,31,35,41,46,47,48,49,51,57,62,66,69,71,74,75,76,80,82,88,90,91,96,97,102,105,106,107,108,110,111,112,114,115,116,117,120,126,132,141,149,153,180,217,223,234,266,268,269,283,305,307,339,356,358,364,370,409,412,422,427,431,470,483,495,508,509,547,548,571,589,606,616,617,631,645,668,669,687,690,691,731,735,738,745,747,756,758,760,762,779,802,836,838,846,847,848,859,867,871,874,883,887,888,889,897,899,928,937,945,948,965,966,969,970,982,987,994,999,1004,1006,1007
2,3,21,22,23,26,30,38,39,42,43,45,46,47,51,55,57,64,65,71,75,171,179,216,222,228,229,231,250,253,262,264,265,300,313,315,318,319,321,322,343,344,350,355,437,454,459,461,463,464,466,467,494,498,502,503,504,527,529,530,531,532,533,540,541,542,543,546,547,549,550,556,577,578,579,621,622,623,625,627,628,655,656,658,669,670,673,674,677,686,687,689,692,695,696,704,710,711,712,714,715,716,717,718,719,723,725,729,730,733,735,736,739,740,741,778,780,782,787,791,793,803,808,809,815,816,817,827,835,836
7,9,13,17,31,41,42,49,55,60,64,67,72,73,75,78,84,86,88,89,94,99,102,104,112,113,115,121,125,130,133,134,270,459,460,568,586,676,677,715,755,819,876,895,900,906,909,913,919,921,925,929,946,950,956,962,976,990,996,1009,1012
4,6,7,9,11,12,13,14,15,26,28,29,30,33,35,37,38,41,43,45,47,73,101,113,119,122,127,128,137,581,582,587,588,591,592,597,598,601,602,603,604,606,607,608,609,612,614,615,617,618,624,625,629,634,647,664,670,672,674,685,1129
0,1,5,9,11,13,17,18,25,27,29,36,42,43,44,50,52,54,55,58,60,63,64,66,67,69,73,76,78,80,81,84,86,88,89,90,93,94,96,97,98,99,101,102,104,105,107,108,111,112,113,115,117,119,121,124,125,127,129,130,131,132,133,134,144,169,235,244,263,264,279,280,287,294,327,345,346,358,377,378,382,422,429,436,457,459,468,476,499,504,532,557,563,565,570,571,574,587,592,599,609,614,621,629,645,650,652,661,662,664,667,668,676,686,691,695,700,701,702,706,715,734,739,742,743,750,752,753,755,756,774,786,789,791,793,796,804,818,819,820,822,823,825,827,838,839,841,853,861,863,864,874,876,883,885,890,895,903,906,913,916,920,921,923,924,925,926,931,933,944,947,948,949,950,953,954,956,957,959,962,963,967,968,977,978,981,982,984,990,991,993,996,997,1002,1005,1008,1009,1012
13,22,23,30,32,39,41,43,44,54,59,72,73,75,85,104,107,108,110,111,120,124,129,137,141,143,148,180,184,185,188,195,209,210,222,226,233,237,239,266,269,282,289,321,329,332,333,353,364,376,456,458,463,467,479,480,507,543,553,562,587,625,629,632,635,637,638,641,643,644,647,649,650,653,655,656,657,658,659,669,670,671,672,676,677,679,686,687,689,690,692,695,696,698,700,702,703,704,705,706,707,708,709,771,775,776,779,782,783,784,787,788,789,790,792,795,798,799,800,813,819,820,827,828,829,831,833,834,835,836,839,850,852,855,861,863,864,866,867,868,880,881,883,884,885,893,896,897,899,900,902,903,910,911,917,932,934,937,939,941,942,943,950,958,959,960,961,965,972,973,974,975,978,981,982,983,989,991,997,1000,1001,1012,1014,1017,1018,1019,1021,1030,1037,1043,1056,1060,1062,1063,1064,1065,1071,1080
0,1,9,13,21,36,50,55,65,66,69,77,84,88,89,90,93,95,97,99,104,105,108,110,121,125,129,133,184,196,280,312,327,358,382,481,496,557,589,593,599,616,626,676,702,718,721,743,749,770,774,785,789,792,804,818,819,852,853,874,883,888,890,903,920,929,956,976,991,1009
1,12,13,14,17,18,21,32,34,35,40,43,50,51,52,54,58,59,60,140,155,162,163,169,172,176,177,184,186,187,188,189,190,191,192,193,194,195,196,197,210,211,212,213,214,215,217,228,229,230,234,240,242,243,244,245,246,247,249,252,253,258,259,260,264,265,266,276,277,356
3,9,14,15,17,20,21,24,25,26,30,31,34,35,36,41,52,56,57,61,62,65,66,67,69,71,72,73,74,77,80,82,84,87,88,90,91,94,95,96,97,98,99,103,105,106,109,110,112,113,114,115,116,117,119,121,123,124,125,126,127,128,129,130,131,133,138,156,164,168,207,215,224,245,252,260,263,264,268,269,291,327,367,395,398,409,434,455,458,464,476,477,491,494,504,506,509,512,519,565,570,594,600,601,613,615,616,631,640,643,652,657,668,670,672,679,707,712,714,718,731,736,737,738,741,742,749,750,755,764,770,783,792,803,806,818,819,843,847,853,863,864,867,871,873,874,875,879,881,885,887,888,895,899,900,904,906,913,914,917,918,924,927,932,945,947,953,954,955,960,963,967,973,974,978,979,987,990,992,996,999,1004,1007,1009,1011
1,11,12,13,19,20,22,23,24,25,26,27,28,63,67,70,71,72,75,78,80,84,85,107,111,112,118,132,133,136,147,202,206,217,247,253,257,260,266,270,308,312,313,316,325,334,336,352,365,399,400,414,440,441,463,466,472,491,496,540,572,573,584,587,593,597,598,599,600,601,602,603,604,605,606,611,614,618,619,620,621,623,624,627,631,632,636,637,640,641,645,646,647,648,649,660,661,662,664,675,678,681,683,686,687,697,698,699,701,704,710,711,712,713,715,716,717,719,721,723,726,728,732,734,735,736,737,738,741,748,751,754,763,766,767,769,770,771,772,773,775,776,779,783,784,785,786,787,788,790,795,796,798,800,805,809,810,816,817,818,820,822,823,827,830,840,844,846,847,849,864,870,872,881,883,886,902,905,906,909,944,949,1004,1005,1011
18,20,24,25,26,27,30,35,40,44,47,51,56,59,62,64,65,66,69,72,74,76,77,79,80,82,87,88,90,91,94,95,96,98,105,106,109,110,111,112,113,115,116,117,120,126,128,129,130,131,132,133,136,142,169,170,180,192,215,290,292,297,310,312,335,336,337,339,350,353,367,369,390,402,412,423,462,469,479,482,508,530,591,616,635,647,657,673,677,691,693,702,709,736,742,746,749,758,759,762,764,770,778,779,782,783,793,802,805,806,811,813,818,828,829,844,846,847,852,867,870,871,874,875,879,884,887,889,899,900,908,918,930,933,940,941,955,959,960,968,970,978,982,988,990,996,999,1004,1007,1012
1,4,56,58,60,62,64,67,68,69,71,74,82,86,97,99,111,119,123,126,146,147,149,153,160,163,177,199,203,227,228,229,252,253,272,275,278,288,307,310,321,322,326,348,363,458,477,478,481,482,593,604,605,606,607,618,621,623,624,635,636,638,641,644,645,649,660,672,674,678,680,684,688,690,692,698,699,700,703,704,705,706,717,718,720,721,723,758,759,760,761,762,763,764,765,767,769,779,781,782,783,788,790,791,792,796,798,799,802,804,805,806,808,811,812,815,821,823,830,831,836,842,844,845,846,847,849,852,854,858,859,861,862,863,865,870,871,878,879,880,888,889,899,900,902,903,906,909,912,914
5,9,27,37,38,42,43,44,45,52,67,78,81,93,95,98,101,102,108,113,117,119,120,121,124,127,129,131,133,162,376,391,407,417,475,477,546,562,573,574,592,602,612,630,640,659,667,670,730,736,786,790,792,816,858,861,894,919,924,927,931,933,943,949,954,974,977,978,980,997,1000,1003,1005,1008
3,10,12,13,18,19,20,30,31,41,52,73,78,80,90,106,120,127,128,129,130,196,197,201,211,229,237,308,316,317,319,320,323,332,333,336,340,349,350,356,357,358,361,362,366,381,387,391,393,394,395,397,405,406,407,414,415,427,428,439,440,445,453,454,456,460,498,499,500,504,505,506,507,516
4,16,19,24,31,34,35,41,46,47,48,49,51,57,58,64,65,66,68,71,72,74,75,77,80,82,84,85,86,87,88,90,91,94,96,100,105,106,107,109,111,112,114,115,116,117,120,126,128,130,132,134,139,168,179,197,205,233,255,262,268,269,273,283,295,297,298,310,311,312,313,353,367,409,422,431,442,455,466,469,504,509,520,544,556,559,564,580,582,603,609,610,620,631,645,646,650,655,675,691,693,714,738,746,747,756,762,770,778,779,780,793,796,802,806,811,828,846,848,849,859,867,871,873,879,881,884,887,889,895,898,899,900,904,908,918,923,928,934,937,941,945,950,953,955,964,969,975,981,982,984,987,989,990,996,999,1004,1007,1010,1012
14,16,18,22,31,34,39,42,53,69,70,74,78,79,80,82,104,110,111,114,116,117,119,120,139,148,149,155,160,180,200,201,208,213,239,240,242,254,260,263,286,293,296,312,314,323,325,332,333,347,357,367,369,380,381,382,383,385,399,401,403,407,415,418,421,423,424,425,426,428,432,433,436,441,442,454,455,456,458,462,463,467,468,469,470,476,478,481,482,487,491,492,493,494,495,500,501,503,504,507,509,510,511,512,513,518,521,523,526,530,531,532,535,536,540,541,542,549,556,557,558,572,574,577,578,579,580,582,587,590,596,607,610,611,614,616,620,622,623,629,630,631,636,638,639,644,646,647,651,663,664,665,666,667,671,673,681,690,691,692
18,27,29,42,44,50,69,76,84,86,95,97,98,102,113,117,119,121,124,125,127,128,129,131,132,133,134,151,244,391,396,436,481,565,582,584,592,630,654,726,734,737,739,742,751,757,792,800,810,841,870,921,923,932,938,949,959,963,968,977,978,984,991,997,1000,1008
1,2,18,20,27,28,29,30,31,39,184,185,190,201,246,247,274,312,333,345,360,361,372,376,419,426,427,429,430,432,434,435,437,455,458,459,461,462,466,481,482,485,486,487,492,496,542,543,545,558,562,563,564,589,594,599,632,633,634,638,640,641,643,646,657,678
4,14,17,19,22,24,30,31,32,35,47,48,56,58,62,64,65,66,68,69,70,75,76,77,80,82,83,86,87,88,90,91,94,96,100,103,105,106,107,108,109,110,111,112,114,115,116,117,122,126,128,130,132,134,136,149,181,182,203,233,253,262,268,269,278,281,297,310,311,312,313,335,342,349,350,353,356,358,367,381,388,401,420,422,425,427,431,455,470,484,504,508,509,510,511,524,542,559,564,569,589,590,603,613,616,631,644,645,650,655,668,686,690,691,693,699,700,702,731,735,742,745,746,749,756,770,778,783,785,787,793,806,811,818,828,837,844,846,847,856,867,871,874,879,881,887,888,889,893,895,898,899,900,904,907,908,918,935,941,945,948,950,952,953,960,973,975,986,989,990,996,999,1004,1007,1012
4,7,8,9,17,18,19,23,26,36,46,57,60,62,64,66,95,106,108,131,133,135,136,144,157,163,166,167,180,193,234,235,238,245,247,248,284,291,295,302,306,313,317,336,339,345,351,360,361,365,367,372,387,388,391,392,394,396,398,399,400,402,403,404,405,406,407,408,409,412,421,422,423,424,427,428,429,434,437,439,441,443,444,452,453,454,458,461,462,468,470,473,475,476,477,479,481,485,487,500,505,506,508,512,515,522,526,527,528,529,538,545,558,560,561,563,564,566,568,569,575,580,583,584,586,587,590,591,593,595,598,606,612,615,620,621,625,634,635,636,640,651,657,659,660,664,673,678,679,684,685,689,690,696,697,703,709,710,712,714,715,716,717,718,720,722,723,725,726,733,736,745,747,753,754
3,6,15,20,24,28,31,32,34,39,41,46,53,54,57,62,66,69,72,74,75,80,81,82,84,90,91,92,93,95,96,97,98,103,105,106,107,110,111,113,114,115,117,120,124,126,128,129,131,132,133,141,146,147,156,201,231,233,237,268,335,337,357,373,387,392,402,419,423,453,489,523,573,584,600,608,637,644,657,669,679,688,714,731,738,748,758,766,768,782,788,844,848,869,873,874,884,887,897,902,909,914,923,927,928,930,932,953,955,965,966,969,970,975,980,981,982,984,987,988,989,1003,1004,1007
73,74,82,83,88,89,91,94,98,101,159,163,190,194,198,235,240,246,271,289,298,300,301,303,325,326,347,348,351,353,370,371,372,395,406,416,417,420,433,435,445,466,471,510,511,554,555,560,562,598,604,606,607,614,615,616,619,620,621,622,623,624,630,637,640,641,644,646,656,660,661,664,667,668,669,671,674,675,676,677,681,682,693,699,716,717,721,722,724,725,726,727,731,734,738,740,741,742,749,766,767,839,844,845,847,849,850,854,870,875,877,883,885,886,887,888,893,905,919,920,924,925,935,936
0,1,5,9,11,14,25,29,42,43,45,47,50,52,54,55,60,61,62,63,64,65,66,67,68,69,70,71,73,76,78,80,84,85,90,91,92,93,94,95,97,98,99,101,102,105,107,108,111,112,113,117,119,121,124,125,127,128,129,130,131,132,133,134,177,192,203,224,264,280,282,309,315,328,338,345,377,407,410,441,463,468,474,499,503,511,518,544,547,560,574,576,580,587,592,599,614,615,641,649,668,676,687,691,702,719,734,737,743,744,752,755,756,761,762,770,774,779,787,793,797,798,803,804,806,810,813,818,819,841,850,853,858,874,879,887,888,891,896,898,904,906,908,911,913,921,924,936,943,944,947,950,952,956,959,967,974,977,978,981,990,991,994,995,996,997,1003,1009
2,5,6,7,13,14,79,80,82,83,84,91,92,95,96,106,114,115,117,122,126,132,135,141,142,144,146,150,155,160,171,185,187,188,195,197,198,201,213,216,220,225,244,253,265,272,276,278,285,287,288,292,297,313,322,335,412,413,415,416,438,443,519,525,526,527,529,532,533,534,535,536,538,539,546,548,551,554,555,556,557,558,559,561,562,563,565,566,568,581,587,589,590,592,593,594,615,616,618,619,620,621,625,628,629,630,632,634,636,637,642,644,651,652,655,660,662,663,666,668,688,689,703,704,705,707,708,710,715,716,717,720,721,723,725,727,733,734,735,737,738,739,744,745,751,753,758,761,763,764,765,770,772,775,776,777,780,782,784,785,786,788,792,793,802,805,806,820
3,4,5,9,15,16,17,24,31,35,36,40,41,49,55,57,58,60,61,62,65,66,67,69,71,72,73,74,76,77,80,82,84,85,87,88,90,91,92,93,94,95,96,97,103,105,106,107,109,110,111,112,113,114,115,116,117,119,121,122,124,125,126,127,128,129,130,131,132,133,139,168,181,212,250,252,261,264,267,269,280,295,305,317,319,327,350,359,367,370,388,398,401,402,422,423,427,431,458,462,496,503,506,509,510,514,532,534,538,559,561,569,570,571,575,580,583,584,589,601,602,603,604,615,630,643,647,652,660,666,667,668,670,675,679,682,691,693,702,706,712,723,725,737,742,743,748,750,755,760,761,763,766,780,786,792,793,800,803,806,811,818,819,827,828,848,857,859,861,868,870,871,874,885,888,899,900,911,913,917,923,937,945,947,953,954,955,959,963,969,973,974,979,982,983,987,988,989,992,996,999,1003,1004,1006,1007,1008
2,4,5,27,31,33,36,37,38,45,49,50,51,52,56,57,59,77,86,88,89,113,161,170,171,178,188,191,192,205,209,264,265,266,273,287,290,302,303,309,313,314,315,353,357,370,378,379,381,384,391,393,397,431,438,452,455,469,475,493,509,512,527,562,564,570,579,594,614,621,622,623,633,635,637,640,641,642,643,646,647,648,649,650,699,710,713,714,715,716,722,735,737,740,741,742,744,745,749,750,751,752,756,757,758,763,764,766,767,773,783,784,786,790,791,792,793,798,800,821,822,823,824,830,832,833,834,839,840,841,851,852,853,854,857,859,860,861,862,866,867,869,871,876,878,879,880,881,883,884,885,886,887,888,891,892,893,895,899,900,901,903,904,905,906,910,911,915,916,917,918,919,920,922,924,929,930,932,936,937,940,941,943,948,951,952,955,957,970,991,998,1004,1008,1011,1028,1033,1034,1036,1038,1039,1046,1049,1051,1052,1107,1108
3,14,15,18,23,25,26,28,34,39,41,46,48,49,53,55,57,58,65,69,74,75,76,77,79,83,84,90,91,95,96,98,101,105,106,107,109,110,111,114,115,116,117,122,124,126,127,128,129,131,132,133,147,156,160,168,201,205,214,228,254,303,311,343,354,370,433,466,472,480,499,514,515,534,572,589,597,600,614,635,646,666,669,692,743,757,760,761,766,776,782,785,796,843,848,854,869,870,873,878,886,888,897,902,907,914,923,928,937,945,948,953,954,955,968,970,974,975,984,987,992,994,997,1004,1005,1006
43,44,53,54,57,73,79,81,90,108,109,121,140,141,161,165,169,172,177,180,193,200,207,216,217,218,226,228,255,264,279,280,282,293,316,378,379,380,381,382,475,477,480,481,485,525,533,541,543,552,572,575,576,577,578,581,582,584,587,588,589,590,591,596,597,598,599,600,601,620,622,623,624,644,645,646,647,650,666,669,670,671,676,677,678,682,683,686,689,691,692,693,695,696,700,701,719,721,730,732,733,737,744,752,762,802,812,819,820,828,834,849,850,863,864,865,872,882,884,902,903,945,946,959,960,961
9,17,24,25,27,30,34,35,37,38,39,42,44,47,48,51,54,56,57,62,65,66,69,70,71,75,78,80,82,87,88,90,91,96,98,101,102,105,106,107,108,110,111,113,114,115,116,119,120,122,124,125,126,127,128,130,131,132,133,136,180,203,224,244,258,273,292,323,338,384,385,392,412,423,455,459,470,477,498,510,512,530,547,564,569,576,587,591,602,607,613,614,616,617,623,644,663,668,686,687,689,702,705,707,731,735,739,741,745,757,770,783,790,802,806,816,817,818,825,828,843,844,845,847,850,859,866,870,871,874,883,887,889,899,900,907,908,917,919,923,930,932,933,940,945,949,953,958,959,965,967,969,977,978,982,984,987,994,999,1004,1012
1,2,4,11,13,14,15,19,24,26,27,29,45,46,48,54,57,60,63,65,68,69,71,72,76,77,79,81,104,120,131,134,135,136,140,144,170,199,232,245,260,263,284,304,319,328,329,331,349,350,389,429,432,507,509,511,564,601,602,603,607,608,610,613,615,631,632,635,637,638,643,644,646,648,649,651,654,655,656,657,671,672,673,675,677,678,679,680,681,682,683,684,685,686,687,688,692,693,695,696,697,698,699,720,721,722,723,757,758,759,761,762,764,765,766,772,778,780,782,783,784,786,787,788,789,797,798,804,811,812,814,816,818,819,824,829,830,831,832,833,834,835,844,846,848,855,858,931,937,945,954,968,969,971,975,981,982,983,987,1018,1020
0,1,3,4,5,9,12,19,35,36,51,52,55,60,61,63,64,66,67,72,73,74,75,77,80,81,82,85,87,88,91,93,94,95,96,97,98,99,101,105,106,112,113,115,119,120,121,124,125,126,127,128,129,130,131,132,133,134,175,194,210,231,258,260,263,264,267,280,285,355,385,398,412,416,418,430,431,455,486,497,506,532,557,562,564,565,580,582,596,601,604,610,615,630,643,652,663,664,665,670,676,684,706,723,730,731,743,752,755,761,766,769,774,789,802,819,821,827,841,853,862,867,885,887,891,895,900,904,905,906,909,913,914,921,927,929,932,941,943,944,947,950,953,954,963,967,974,977,978,979,985,987,991,996,997,999,1004,1007,1008,1009,1012
1,2,20,21,25,32,33,34,49,52,65,66,84,90,116,128,129,130,151,152,196,201,209,210,212,215,333,337,342,349,354,360,372,383,384,431,444,445,479,481,482,486,501,504,527,529,553,554,578,597,619,620,624,625,627,628,630,633,635,637,639,640,641,643,650,654,655,657,658,659,662,674,675,681,693,700,701,702,705,707,714,716,717,719,720,723,724,725,726,731,732,733,734,740,743,746,752,754,755,765,769,771,776,780,782,783,786,789,792,797,807,808,809,811,816,817,820,824,827,828,830,831,832,833,834,835,838,839,840,841,842,845,852,856,857,862,866,868,869,872,882,884,886,889,900,904,905,906,907,913,929,939,940,952,956,971,972,1090,1094,1095,1096
3,5,12,23,25,26,34,41,46,49,53,57,62,66,69,71,72,73,74,75,76,80,81,82,83,84,91,93,95,96,97,103,105,106,107,110,115,116,117,119,122,126,129,131,132,133,147,168,205,231,267,283,291,319,372,373,402,409,419,427,441,453,462,470,534,537,547,575,578,608,617,621,628,633,669,687,697,714,728,776,813,836,848,852,862,873,875,902,907,928,930,937,943,948,953,954,955,970,973,975,983,987,1000,1004,1005,1008
6,7,10,63,87,89,133,139,144,146,162,165,173,174,186,235,241,243,247,248,249,251,253,254,255,262,278,283,296,305,306,312,319,325,334,362,390,400,425,434,438,487,542,566,567,577,578,579,581,584,590,603,606,607,611,622,625,627,629,635,636,637,641,643,644,645,669,670,710,712,721,724,733,734,735,759,763,764,766,767,779,795,797,800,802,842,843,858,863,864,879,881,887,888,894,896,900,925,927,928,930,945,983,989,990,999
3,31,33,35,36,52,54,56,64,66,71,72,76,77,82,91,96,98,103,106,109,110,111,113,114,115,116,117,121,128,131,132,133,134,164,203,239,342,417,421,532,617,650,669,679,746,824,841,848,856,857,897,914,927,945,948,981,982,997,999,1004,1007
8,11,12,13,14,15,16,21,23,24,25,26,27,28,31,45,46,48,52,87,89,93,95,97,100,106,107,115,134,142,156,158,172,173,177,178,180,181,182,183,184,185,186,187,188,189,190,194,196,197,198,202,210,211,242,243,249,250,251,252,256,259
0,1,5,11,13,18,21,27,36,37,38,42,43,44,50,52,54,55,60,61,67,70,71,73,78,81,84,89,93,95,97,98,99,101,102,104,108,113,115,119,124,125,129,131,133,150,162,172,184,185,187,193,194,196,206,248,252,270,279,294,299,316,321,345,376,382,393,429,430,435,441,459,464,474,475,496,507,518,519,532,538,540,547,555,561,562,579,582,593,596,599,601,605,606,611,612,618,622,626,630,633,640,662,670,695,698,718,719,721,736,739,743,750,767,788,789,794,804,805,810,816,820,823,827,850,853,855,858,861,866,883,885,890,891,892,896,917,920,926,932,933,936,943,952,954,956,961,963,968,977,978,985,991,997,1000,1003,1005,1009
1,2,11,26,27,60,74,75,83,95,97,101,103,106,107,116,119,123,125,128,140,158,176,181,182,187,194,206,237,257,258,287,305,312,319,320,337,339,341,343,362,371,459,499,529,530,531,533,534,544,546,547,548,558,559,562,563,566,567,569,571,577,582,583,585,588,589,591,592,593,595,596,597,605,610,613,628,630,631,632,633,636,637,640,642,643,669,677,688,689,690,691,692,695,696,698,699,700,704,705,707,726,727,730,732,737,741,743,744,745,746,747,749,751,752,758,761,762,763,765,777,785,786,789,790,792,795,799,804,807,808,810,811,816,817,818,819,820,822,823,826,827,831,836,838,841,845,846,857,858,873,874,876,890,900,901,943,954
0,5,11,13,17,18,20,21,24,31,33,36,38,41,42,44,45,50,53,60,61,62,64,66,67,68,69,72,73,74,78,80,81,84,86,87,89,90,93,94,95,98,99,102,105,108,112,115,119,120,121,124,125,126,127,128,129,130,131,133,134,147,185,218,226,244,295,299,307,313,315,345,355,358,371,374,382,438,441,442,444,450,451,464,476,477,533,570,582,592,599,612,641,676,677,681,682,693,695,699,715,718,738,739,744,751,753,755,756,762,765,787,789,791,804,819,827,844,855,858,862,876,883,885,890,895,897,906,913,921,949,950,954,955,956,957,968,978,979,980,985,988,990,993,996,1000,1005,1009,1012
2,6,8,11,15,17,18,19,20,22,24,32,37,39,40,41,123,129,137,141,156,157,165,167,171,277,279,280,284,293,294,297,298,372,379,380,385,390,392,406,407,416,431,437,438,450,472,475,478,482,483,488,501,502,503,512,530,539,551,557,561,567,568,571,572,573,574,575,576,577,581,582,583,587,588,589,590,611,612,613,615,616,642,643,644,646,647,651,654,666,667,669,670,675,676,699,702,703,706,708,710,711,712,717,721,722,728,729,731,732,799,800,802,807,809,810,814,817,818,819,820,822,824,826,830,833,835,840,843,844,845,849,853,862,865,867,869,877,884,887,894,895,900,909,917,925,927,939,948
6,14,17,20,26,30,31,34,35,36,37,46,48,49,54,56,64,66,69,75,76,80,82,83,86,88,90,96,98,99,102,105,106,107,111,112,113,114,115,116,117,121,126,127,128,130,132,134,136,158,166,170,182,188,241,269,272,288,290,295,308,310,324,332,335,336,350,375,378,400,409,412,422,425,460,480,482,493,508,509,522,534,564,569,589,598,603,621,627,631,644,650,655,664,668,686,690,691,693,700,731,735,742,745,746,747,759,782,787,800,811,825,828,846,847,851,863,866,870,871,873,874,875,879,888,889,895,896,899,906,907,923,928,945,948,950,959,960,973,975,978,981,984,992,994,999,1004,1012
1,4,11,13,29,43,47,50,68,70,78,80,124,128,129,131,132,156,177,180,184,194,197,199,200,204,210,211,213,215,219,256,270,276,278,282,284,286,289,300,320,322,325,326,356,358,398,400,401,402,403,409,410,411,412,413,415,416,419,420,421,424,425,426,427,428,429,430,431,432,433,454,456,484,485,486,487,488,489,491,492,494,495,497,499,501,502,504,505,506,508,509,510,511,512,520,521,522,524,525,526,548,562,573,584,585,596,599,600,602,607,608,609,610,611,612,613,620,622,628,631,634,638,639,645,655,656,657,658,659,660,664,667,668,670,671,672,673,674,675,677,681,702,703,704,720,728,729
3,8,10,15,24,28,32,33,34,35,36,37,41,42,44,48,49,53,54,57,58,59,62,63,64,66,69,72,74,75,76,80,83,84,90,92,94,96,97,102,105,107,109,110,111,112,113,114,115,116,117,120,122,125,126,129,130,131,132,133,147,152,171,190,226,268,286,305,310,332,335,357,365,374,378,416,423,439,443,473,523,524,548,564,565,588,592,637,645,673,693,708,713,733,768,784,787,793,808,810,825,843,851,866,870,871,873,888,889,893,895,902,907,915,923,928,930,935,937,960,964,970,975,982,984,986,987,994,999,1005,1006
45,47,56,71,75,76,106,108,114,140,141,167,170,171,173,184,200,210,213,219,222,223,240,242,244,245,246,247,250,252,254,255,267,268,284,285,286,294,295,302,335,412,422,426,442,448,449,457,459,466,475,476,479,480,486,487,488,494,509,512,513,521,522,524,525,526,527,539,540,541,542,543,544,545,546,547,548,558,560,561,564,582,583,584,585,586,587,590,592,596,597,602,603,604,606,608,613,614,615,616,623,624,627,646,648,650,655,665,668,669,670,672,681,692,693,694,695,698,701,702,703,706,741,753,761,771,772,803,829,830,838
0,1,5,9,11,12,18,21,27,36,38,43,44,45,50,52,54,55,61,67,70,71,78,81,84,86,89,93,95,97,98,99,101,102,108,113,119,124,125,128,129,131,133,137,151,172,173,194,202,206,225,231,242,248,263,283,285,316,321,327,355,376,382,410,416,424,429,430,438,454,463,474,476,496,497,506,518,519,547,553,555,557,562,566,585,601,605,606,611,615,622,634,635,648,667,670,684,698,706,719,721,723,761,767,769,771,774,788,789,790,792,794,804,813,817,822,825,827,834,852,853,855,858,862,863,864,883,885,890,913,927,931,932,943,947,949,952,954,957,961,963,967,968,977,978,997,1003,1005,1009
3,4,25,36,41,77,98,120,122,125,145,146,149,150,169,179,186,188,189,192,216,287,288,300,310,311,312,324,332,337,368,371,380,403,412,421,425,445,448,449,680,686,712,713,724,731,735,736,737,740,741,742,743,744,745,746,747,748,760,761,766,767,771,772,773,774,775,776,777,778,789,791,792,794,796,797,799,800,803,809,818,819,827,828,829,830,831,848,849,850,851,852,853,855,863,864,869,872,873,874,876,878,879,883,894,902,903,908,914,933,936,937,945,951,952,953,954,974,976,977,980,985,987,989,991,996,1004,1006,1018,1019,1028,1029,1032,1033,1035,1037,1043,1044,1051,1053,1055,1057,1065,1067,1076,1081,1087,1305,1308
1,16,19,20,24,28,29,30,31,34,35,36,37,38,42,44,47,48,49,50,51,54,56,57,62,65,66,69,75,76,80,82,84,85,86,87,90,91,96,102,103,105,106,107,108,109,111,112,113,114,115,116,117,121,122,125,126,129,132,136,148,171,230,233,269,277,281,297,300,305,307,357,370,378,412,415,419,422,453,466,509,510,523,564,608,627,631,637,644,673,686,690,693,702,708,712,727,735,741,742,747,752,756,759,780,787,796,800,802,810,811,825,830,842,844,846,851,866,871,874,883,887,889,898,907,908,923,928,935,948,949,954,957,975,981,984,992,999,1001,1004,1006,1007
3,4,5,6,8,10,12,14,17,18,25,26,80,84,104,105,110,121,136,138,148,152,154,174,225,226,227,234,237,239,241,243,244,249,250,253,261,262,263,275,280,281,292,294,297,298,300,301,303,306,308,317,338,341,342,344,345,346,354,356,357,358,360,361,364,365,366,367,374,375,377,384,386,387,388,389,393,394,395,398,400,401,422,424,425,426,427,436,441,442,445,447,448,449,467,471,472,473,475,477,483,486,487,488,489,490,491,493,494,499,503,505,506,508,509,512,563,566,569,571,572,573,574,575,576,578,580,582,584,585,586,587,589,590,597,598,599,605,610,617,620,621
18,31,38,42,43,44,55,60,63,73,74,78,81,84,86,91,93,95,97,98,99,101,102,108,113,119,121,124,125,127,129,131,133,151,161,162,194,195,325,382,407,410,441,444,498,519,565,573,574,592,593,612,634,639,662,706,727,730,751,771,790,792,833,841,858,883,905,921,931,944,949,963,968,974,978,991,997,1000,1005,1008,1009
10,11,166,168,172,179,180,181,191,195,196,200,201,206,209,210,218,226,230,269,272,324,325,334,347,378,383,399,428,433,461,480,532,535,536,537,541,542,543,545,546,547,549,550,552,555,564,566,568,569,570,600,601,602,603,604,606,607,608,609,632,642,644,646,649,658,660,661,663,670,677,679,689,692,702,711,740,753,766,797,800
1,97,99,104,125,127,755,774,839,956,976,1009
1,2,23,37,38,40,41,42,43,48,62,78
1,2,5,11,18,19,27,29,31,33,43,44,45,47,52,55,60,63,64,66,67,68,69,73,76,78,80,83,84,85,86,89,90,91,93,94,95,97,98,99,101,102,105,108,111,112,113,116,117,119,121,124,125,127,129,130,131,132,133,134,185,194,203,206,243,244,248,264,280,282,314,334,349,361,371,378,386,391,398,401,410,436,444,450,469,474,496,503,538,551,565,568,574,580,584,592,596,609,622,629,652,655,660,676,691,698,700,702,719,723,726,731,734,736,737,743,744,746,750,751,753,755,756,757,762,774,779,789,793,804,813,816,819,820,825,827,833,841,849,855,864,883,888,898,906,908,913,921,924,929,931,933,936,940,943,944,949,950,954,956,958,959,963,967,968,974,977,978,991,996,997,1005,1008,1009
8,21,24,34,40,42,44,49,50,52,58,60,62,67,71,77,96,144,159,164,169,172,173,176,179,182,186,187,217,223,229,233,234,236,245,278,290,304,323,358,359,364,373,379,401,406,438,440,459,476,489,498,527,563,587,588,595,613,624,632,633,636,638,640,641,642,643,645,648,659,662,665,666,667,668,669,670,672,673,674,675,676,677,679,680,681,682,683,687,698,700,703,706,707,712,717,719,725,728,749,751,752,753,755,756,761,762,763,764,766,767,770,772,776,786,790,791,792,793,794,795,796,798,799,801,804,805,810,811,826,829,831,835,837,838,840,842,843,847,850,854,858,860,861,865,868,873,878,881,883,885,886,891,892,894,928,929,937,938,943,949,952,954,957,963,965,966,971,978,1005,1019,1028,1039,1069
0,5,6,15,18,20,27,36,37,41,42,43,44,46,49,52,56,58,59,60,62,63,67,69,70,71,73,74,75,78,79,84,86,87,88,90,91,93,94,95,98,99,101,102,104,105,107,108,110,111,112,113,119,120,122,124,125,128,129,130,131,133,134,159,192,194,235,236,244,248,262,295,305,315,376,382,391,477,505,518,574,577,582,587,592,593,595,616,626,633,634,719,738,739,758,772,789,791,805,816,817,827,844,852,855,885,892,913,928,930,931,940,942,943,949,950,952,954,961,966,968,975,978,980,988,991,996,997,1005,1009,1012
1,4,6,25,32,33,39,44,45,46,51,55,56,57,59,62,63,65,67,73,74,75,78,80,83,84,87,109,112,145,146,147,148,149,152,153,154,159,164,190,220,225,227,239,240,243,244,250,251,253,254,258,263,267,269,278,290,292,308,317,365,467,468,469,470,471,474,476,477,479,480,481,482,486,487,488,491,493,510,511,512,513,514,517,518,519,520,521,523,524,526,527,528,530,531,532,533,538,542,543,544,545,546,559,563,564,582,585,587,588,589,591,593,594,595,596,597,603,605,606,612,613,620,621,623,625,628,650,656,661,669
0,1,5,9,14,18,21,27,28,30,36,37,40,42,43,44,50,54,55,59,60,63,67,73,74,78,79,80,84,89,91,92,93,94,95,96,97,98,99,101,102,104,105,108,112,113,115,118,119,121,125,127,129,130,131,133,134,150,154,177,235,352,364,376,427,438,475,477,568,579,593,602,625,627,639,641,649,713,718,736,739,743,750,753,756,774,791,794,805,808,822,827,829,841,852,866,878,883,885,887,890,895,903,906,913,919,920,925,931,933,938,940,944,947,950,951,954,956,962,967,968,974,978,990,991,993,995,996,997,1000,1005,1008,1009,1012
2,5,8,9,10,15,16,31,35,36,37,40,44,47,48,50,67,68,70,87,94,100,102,105,107,160,169,172,173,175,176,195,203,207,210,211,213,236,254,255,260,271,273,280,289,298,300,301,399,416,422,423,435,441,477,523,525,527,530,534,535,543,545,546,547,558,560,562,570,585,589,590,591,592,594,601,602,609,610,611,612,614,615,619,620,621,622,674,676,677,679,682,683,685,694,695,697,699,701,702,715,716,722,724,726,779,783,784,785,787,798,812,818,819,821,826,829,839,843,845,849,850,863,865,866,871,874,877,887,888,892,981,989,995
18,27,43,44,78,89,95,97,98,101,102,108,119,121,124,125,127,129,131,133,244,407,498,499,574,587,813,826,841,852,858,919,921,924,933,940,954,968,977,978,991,997,1005
10,17,21,25,50,51,52,53,96,97,100,101,105,129,133,148,164,194,239,351,352,367,369,370,377,378,382,383,391,395,396,398,400,406,410,411,428,438,442,449,453,489,496
0,5,9,11,12,18,21,27,29,36,37,38,42,44,50,52,54,55,60,61,67,70,71,73,78,81,84,86,89,93,98,99,101,102,104,108,113,119,121,124,125,129,131,133,151,172,173,187,193,202,244,248,261,279,296,299,316,325,328,329,345,346,347,355,376,382,393,408,418,463,464,474,475,476,499,512,518,519,526,540,547,582,587,593,595,599,604,606,612,615,617,622,630,634,640,653,658,667,670,680,698,707,710,730,752,754,757,771,789,790,791,792,804,805,810,825,827,833,834,850,853,861,862,864,885,896,911,916,917,927,931,933,936,949,952,956,957,961,967,968,977,978,985,991,997,1000,1003,1005,1009
1,30,35,40,41,45,54,81,82,89,90,161,180,189,191,196,201,202,204,205,215,283,316,317,320,352,371,377,378,395,401,408,420,431,432,455,457,468,469,587,589,600,607,622,627,628,631,638,639,643,646,647,648,649,676,681,690,691,692,698,700,701,703,705,706,707,708,709,713,714,715,716,717,718,722,749,751,752,755,777,778,793,796,798,799,801,802,806,810,811,813,817,822,826,827,828,835,836,840,841,844,878,939,956,957,961,991,994,996,999,1003,1008,1012,1013,1021,1022,1031,1032,1037,1040,1046,1052,1078,1081,1082,1084,1096,1098,1106,1107,1109,1114,1115,1119,1120,1124,1126,1132,1133,1137,1146,1147,1148,1149,1151,1157,1164,1167,1170
3,4,10,13,16,18,24,25,26,31,34,35,41,46,47,48,49,53,54,58,62,63,65,66,69,71,72,75,77,82,85,87,88,90,91,92,95,96,98,100,105,106,107,109,110,111,112,114,115,116,117,120,122,125,126,128,130,131,132,152,161,164,169,180,188,221,234,258,272,282,290,291,312,332,353,367,370,401,422,426,427,444,455,465,469,470,484,501,504,508,509,510,564,569,580,589,606,613,614,616,631,645,655,665,668,675,683,690,700,702,731,745,746,763,766,772,778,779,793,806,811,818,828,844,846,867,874,875,879,888,889,898,899,900,902,905,907,908,914,934,960,965,968,969,978,980,982,987,988,992,994,997,999,1001,1007,1010
8,10,11,12,14,15,16,18,19,23,24,32,38,39,64,66,68,89,92,95,96,98,101,112,126,127,129,133,136,140,143,144,163,180,182,183,186,188,194,239,259,262,288,289,293,305,306,311,319,322,326,328,329,330,332,334,336,341,406,407,409,410,413,417,418,421,423,424,425,426,427,429,430,431,432,433,434,435,437,443,444,445,446,447,451,454,455,456,458,459,460,461,462,464,465,469,470,473,474,475,476,477,479,480,484,485,486,490,493,494,497,498,501,502,505,506,507,510,515,516,519,521,522,523,532,534,538,539,540,546,548,562,565,566,587,588,608,611,616,618,631,632,633,638,641,642,645,646,647,648,652,653,661,662,664,709
6,15,28,30,31,39,41,46,47,56,58,59,62,64,65,66,69,74,77,79,83,85,87,88,90,91,96,100,105,107,108,109,111,112,114,115,116,117,120,122,126,128,129,130,132,135,160,247,266,269,272,293,310,312,314,350,367,422,455,461,473,509,510,548,558,564,569,609,616,631,650,653,690,691,702,731,759,763,772,781,782,793,811,845,846,847,849,867,888,889,893,895,899,900,918,922,941,966,975,981,986,1006,1010
1,2,5,7,26,29,30,31,32,34,69,72,73,77,78,86,87,91,113,115,124,125,139,144,166,172,175,192,203,204,205,207,215,216,220,222,223,235,237,240,243,244,246,247,256,257,258,259,261,263,264,265,266,267,268,271,273,278,279,281,282,290,293,296,298,299,302,303,304,323,325,326,329,332,333,334,335,344,359,360,361,367,375,378,379,380,381,383,384,388,391,392,393,395,396,397,398,399,400,404,405,408,425
22,23,30,46,53,56,58,68,72,76,77,79,83,87,90,93,96,103,109,110,111,115,116,117,120,122,123,128,132,134,146,147,214,223,273,372,381,394,425,514,537,542,578,628,646,666,671,679,699,714,763,781,800,824,847,856,870,872,893,902,930,970,982,986,1011
1,40,49,62,81,93,101,102,123,124,189,192,442,445,446,447,451,516,518,522,686,691,694,700,701,713,714,715,724,725,730,731,732,745,747,749,750,760,761,762,763,764,771,780,781,812,813,826,827,829,848,850,854,856,860,866,871,882,1033,1040,1041,1043,1096,1183,1184
1,11,18,19,25,27,43,44,55,60,63,66,67,70,73,76,78,80,81,84,85,90,93,94,95,97,98,99,101,102,105,108,111,112,113,117,119,121,124,125,127,129,131,132,133,184,203,224,235,264,265,280,315,338,345,358,395,504,519,560,572,574,580,587,592,599,604,614,643,652,687,691,702,704,743,752,756,798,805,816,823,833,841,852,853,859,862,885,906,913,924,927,929,933,936,940,943,944,952,956,968,970,974,978,990,991,996,997,1005,1008,1009
1,2,10,12,28,29,31,32,36,39,44,50,52,53,54,55,60,61,63,65,66,67,68,69,73,83,267,273,274,277,280,282,283,285,287,288,303,320,331,333,346,348,423,426,569,570,571,573,574,575,576,577,583,585,586,587,588,589,590,591,592,593,594,596,600,601,602,609,612,615,618,619,620,621,623,626,627,628,630,631,632,634,637,646,649,650,651,652,653,654,655,820,821,822,823,824,825,828,829,832,840,841,844,850,851,852,853,866,868,882,885
0,16,17,19,22,23,31,35,47,48,51,53,54,56,57,58,59,65,66,69,74,75,76,77,79,80,82,85,87,88,90,96,100,105,106,107,108,111,112,115,116,117,120,126,128,132,142,200,203,230,234,242,245,297,312,342,350,353,358,377,409,412,415,423,443,455,461,486,557,569,570,571,576,580,608,610,616,619,620,621,631,639,644,647,651,655,668,691,700,702,704,709,716,728,740,763,772,782,793,811,818,837,838,849,856,859,867,870,871,874,883,886,888,889,898,899,900,906,918,923,935,945,955,959,960,965,970,981,982,984,993,999,1001,1007,1010
1,27,30,36,37,38,45,59,63,64,74,76,77,78,82,90,91,92,100,103,132,134,158,160,161,162,167,221,226,230,246,247,362,369,372,373,375,402,403,408,414,442,446,454,455,489,492,498,499,500,501,502,503,504,505,507,508,509,510,511,512,515,520,522,527,530,537,539,541,543,544,545,550,551,554,566,568,570,571,572,575,579,580,583,584,585,587,590,591,593,595,596,597,598,619,620,623,627,631,634,644,645,660,664,665,666,667,668,671,672,673,674,675,679,681,682,683,684,688,690,691,693,708,713,715,716,720,741,747,751,752,766,802,807,922
3,6,16,20,25,34,39,41,44,46,54,58,62,66,69,72,74,75,76,90,92,97,106,107,108,111,114,115,117,122,126,132,181,224,250,358,359,462,485,495,637,644,646,650,690,740,746,768,772,787,796,800,812,845,869,873,907,922,923,928,934,942,948,949,953,955,959,969,970,981,982,987,989,994,995,1004,1006
2,5,11,14,16,30,34,37,41,59,60,66,75,113,123,124,127,128,137,200,202,203,217,220,221,228,292,306,312,313,341,350,351,353,355,374,392,396,421,422,426,427,432,433,445,446,458,460,463,466,467,468,478,481,482,486,487,488,492,493,494,495,504,508,513,516,517,562,563,566,568,574,577,579,581,593,608
0,5,9,11,12,14,19,21,22,29,36,37,41,44,51,52,55,56,57,60,61,62,64,66,67,69,73,80,81,84,85,87,91,93,94,97,98,99,101,102,105,108,110,113,119,124,127,129,131,133,134,149,151,162,172,194,196,197,222,231,235,243,244,252,260,261,263,264,285,327,345,355,356,376,386,402,416,418,430,436,458,474,475,494,506,512,530,538,561,562,580,591,599,600,601,608,610,615,616,622,630,643,648,658,663,667,670,672,691,698,706,718,723,730,750,761,762,769,786,792,804,809,813,826,827,853,863,864,866,885,887,894,904,913,916,918,927,932,933,936,941,947,949,950,954,963,967,974,977,978,979,985,996,1000,1001,1003,1009
1,2,8,108,115,119,120,123,126,127,129,130,131,134,138,144,147,148,152,165,220,222,223,224,264,265,295,299,301,307,311,320,322,332,336,372,384,387,405,406,407,408,409,420,437,497,499,517,531,543,546,548,562,563,566,567,568,569,570,572,573,574,575,577,579,584,586,591,593,595,598,601,603,604,605,606,607,616,619,620,632,641,643,644,654,656,657,658,670,672,673,674,677,678,684,686,687,713,714,715,716,719,721,722,724,725,731,732,733,753,754,756,759,760,763,764,766,767,771,774,776,778,779,780,781,785,786,790,791,793,794,795,796,806,809,812,813,817,819,888,891,900,901,904,911,922,925,926,927,928,960,969,972,973,975,977,980
0,1,5,11,12,18,21,27,29,31,36,37,38,42,43,44,52,55,60,64,66,67,73,76,78,80,84,85,86,89,90,93,94,95,96,97,98,99,101,102,108,112,113,119,120,121,124,125,127,129,131,133,134,299,321,325,347,363,418,444,458,467,474,476,496,561,573,582,592,611,612,622,641,643,653,662,670,698,726,736,756,757,762,774,789,815,816,818,823,826,827,838,858,863,864,867,883,887,892,906,913,919,931,932,933,936,940,949,950,956,963,967,968,974,978,980,991,993,996,997,1000,1001,1005,1009
28,29,31,39,40,54,55,68,75,76,78,83,86,87,94,110,111,114,120,124,126,130,133,134,136,146,151,153,154,159,160,162,165,173,175,183,202,215,221,229,238,239,260,262,265,269,346,357,359,382,404,447,450,467,468,479,480,481,483,484,487,489,490,491,495,496,497,501,504,506,508,510,511,512,513,514,515,516,518,520,522,523,527,528,537,538,539,540,541,542,544,545,547,548,549,551,554,558,559,560,562,563,564,576,587,588,600,605,608,612,614,615,629,631,638,641,644,645,648,660,661,662,682,691
3,4,11,14,16,17,18,20,22,24,25,26,28,29,30,34,37,41,42,46,47,48,50,53,56,58,62,64,66,69,74,75,76,77,79,82,85,86,87,90,91,92,96,98,102,103,105,106,107,108,109,111,114,117,122,126,128,129,132,133,139,143,169,192,203,221,228,247,250,254,307,341,350,358,369,370,412,437,439,469,479,483,493,514,515,544,563,570,576,577,592,598,603,635,636,645,673,683,690,697,733,735,738,745,746,763,768,772,776,777,782,787,793,796,811,812,831,847,851,852,859,866,873,874,878,881,889,890,907,908,914,918,923,928,934,935,937,942,945,948,954,955,957,959,968,969,970,975,981,983,984,988,989,992,1001,1004,1006
1,3,4,28,39,40,41,43,44,45,46,53,54,58,95,131,141,152,163,167,174,268,286,287,288,296,301,302,310,311,312,314,324,337,339,340,368,369,372,424,431,433,443,444,448,450,456,509,515,517,518,527,563,582,584,611,639,661,677,679,683,689,690,692,693,694,700,701,702,706,708,712,713,715,717,719,720,721,722,723,724,729,736,738,740,741,742,743,744,745,746,757,760,761,766,768,769,770,790,800,812,861,863,894,898,900,901,902,903,904,925,934,937,941,943,945,950,961,966,968,970,975,982,983,984,987,989,994,996,997,998,999,1004,1006,1012,1017,1021,1022,1023,1026,1047,1048,1061,1062,1063,1072,1074,1078,1084,1085,1087,1088,1097,1115,1143,1164,1175
1,2,11,18,19,27,31,33,43,44,47,60,61,63,64,67,69,73,76,80,81,83,84,85,91,93,94,95,97,98,99,102,105,108,111,112,113,117,120,121,122,124,125,127,129,130,131,133,134,157,178,194,225,235,409,444,467,468,469,474,498,510,517,537,568,579,580,592,650,652,661,662,676,682,736,737,750,755,756,762,774,789,793,804,805,811,813,823,841,852,853,855,858,863,874,883,885,888,906,908,913,921,924,929,931,933,936,940,944,950,954,956,958,963,967,968,970,974,978,980,983,990,991,996,997,1000,1001,1005,1009
7,8,9,25,27,33,34,36,46,47,50,59,60,68,75,78,79,83,86,89,90,91,95,102,103,106,124,137,140,157,168,169,170,172,174,176,182,188,189,200,204,210,232,260,366,367,404,558,561,562,563,564,567,568,569,570,571,572,573,574,575,576,577,578,580,581,586,589,590,591,592,593,594,595,598,601,602,603,604,605,610,612,614,615,626,627,652,653,659,673,676,681,682,683,684,685,687,688,689,690,693,694,698,699,701,702,703,706,712,715,761,762,764,765,767,781,784,787,792,793,796,797,802,813,825,830,832,852,862
3,4,9,15,17,19,20,21,24,25,31,34,35,36,39,41,47,48,49,52,54,55,57,60,62,63,64,65,66,67,68,69,71,72,73,75,77,80,82,83,84,87,88,90,91,93,94,95,96,97,98,105,106,107,109,110,111,112,113,114,115,116,117,119,120,122,124,125,126,127,128,129,130,131,132,133,134,136,138,150,159,163,168,179,181,192,197,202,212,224,231,247,252,258,260,261,262,263,264,268,269,281,290,295,304,310,311,336,350,354,358,360,379,381,388,398,400,402,409,413,419,422,431,444,450,453,455,473,506,509,510,514,533,534,538,559,561,564,565,569,591,594,601,603,604,608,609,613,614,615,616,617,621,630,631,636,640,641,643,647,652,655,658,660,668,670,672,675,682,687,691,693,697,699,706,708,712,716,721,723,730,731,735,737,738,743,746,748,750,751,756,758,759,760,761,762,764,766,767,770,778,779,780,782,785,786,792,793,803,806,811,813,818,823,828,839,843,845,848,854,855,857,859,864,867,869,870,872,874,875,879,881,885,887,888,889,893,895,896,898,899,900,913,914,918,921,923,924,928,930,931,932,937,941,942,944,945,947,950,953,954,963,964,969,973,975,982,983,987,989,991,994,996,997,999,1004,1007,1008,1012
74,76,79,82,86,87,92,98,100,103,114,118,126,127,134,220,221,222,224,225,236,244,250,252,282,283,285,304,314,344,346,355,358,362,376,381,385,408,455,463,466,511,529,537,578,583,599,600,612,660,661,679,681,697,703,707,721,729,735,739,750,771,775,795,796,804,812,818,842,880,884,908,914,924,936,940,948,949,950,951,962,963,964,965,966,968,971,973,974,975,977,980,981,982,984,988,997,1000,1001,1004,1006,1007,1009,1013,1014,1015,1016,1018,1019,1022,1023,1024,1030,1031,1036,1045,1046,1052,1057,1058,1067,1069,1070,1072,1073,1074,1076,1078,1090,1092,1093,1095,1105,1106,1108,1111,1117,1119,1121,1127,1129,1130,1137,1142,1143,1144,1145,1146,1147,1155,1156,1159,1160,1165,1166,1168,1171,1173,1178,1179,1186,1187,1189,1200,1201,1207,1208,1209,1212,1213,1216,1217,1220,1221,1225,1227,1231,1232,1233,1234,1236,1238,1239,1241,1287,1288,1292,1294,1295,1296,1298,1299,1300,1301,1303,1306,1307,1339,1341,1345,1346,1350,1353,1355,1356,1358,1360,1363,1366,1369,1371,1372,1375,1376,1381,1382,1383,1387,1393,1394,1396,1402,1403,1404,1405,1408,1411,1412,1413,1414,1416,1418,1419,1423,1426,1431,1433,1437,1438,1439,1444,1446,1448,1490,1494,1495,1496,1497,1502,1503,1504,1505,1512,1532,1534,1535,1536,1547,1550,1553,1558,1566,1567,1568,1576,1579,1583,1585,1591,1593,1595,1599,1601,1602,1610,1611,1657,1658,1660
0,1,5,8,9,11,12,13,15,20,21,29,33,36,38,40,42,44,45,54,55,59,60,61,63,68,70,71,73,76,78,81,83,84,86,87,91,93,94,95,97,98,99,102,104,105,108,109,110,111,112,113,116,117,119,121,122,124,127,129,131,132,133,134,154,190,197,218,232,238,287,292,294,299,309,325,334,346,384,391,401,421,428,435,450,451,459,500,511,512,525,541,543,546,557,560,568,572,592,602,605,606,609,618,648,659,671,674,681,694,698,699,701,715,719,721,730,744,754,769,774,790,791,798,804,810,817,822,824,825,827,829,834,839,850,883,885,892,896,903,906,911,916,917,930,933,936,943,944,950,951,952,954,959,961,962,963,967,973,974,977,978,981,985,986,993,996,1003,1008,1009
7,15,16,17,20,25,47,57,58,59,63,73,111,112,114,121,134,139,211,221,224,227,236,242,244,334,394,456,459,464,469,486,487,490,493,494,496,511,529,530,531,532,533,554,571,580,596,597,602,606,607,611,612,619,657,666,668,688,699,702,707,709,715,822,824,826,827,829,838,839,854,856,857,859,860,861,862,866,868,873,882,884,889,890,893,895,896,904,916,921,927,929,939,954,955,964,970,971,972,973,974,977,978,981,986,995,1000,1001,1003,1043,1045,1078,1083,1085,1091,1095,1096,1102,1104,1118,1120,1122,1123,1151,1152,1156,1157,1163,1164,1169,1170,1172,1174,1177,1208,1209,1210,1215,1220,1221,1222,1229,1232,1260,1262,1263,1264,1265,1267,1273,1276,1308,1310,1311,1338,1345,1346,1349,1350,1351,1372,1373,1380,1386,1387,1399,1412,1413,1416,1417
3,4,15,16,17,21,26,31,34,37,39,41,42,48,50,54,62,66,71,74,75,76,80,82,86,91,102,105,106,107,108,109,111,114,115,116,117,122,128,131,132,143,156,168,171,181,233,254,281,341,353,357,400,439,444,522,523,571,598,606,621,637,644,650,675,683,686,740,746,759,766,768,777,796,800,812,825,831,845,848,851,866,869,873,888,890,897,904,914,923,928,935,945,948,955,957,964,969,970,975,981,983,988,989,992,994,1004,1006
12,99,100,102,132,133,134,155,169,176,206,208,222,237,245,249,295,309,341,346,350,354,357,365,375,377,380,413,485,515,516,517,519,655,658,659,694,695,717,718,742,746,747,748,753,800,802,803,806,807,818,826,828,829,840,841,845,846,847,879,897,902,917,918,974,1003,1060,1061,1071,1088,1090,1108,1111,1113,1114,1118,1121,1123,1146,1147,1151,1154,1160,1170,1191,1195,1197,1199,1209,1227,1231,1242,1244,1247,1252,1256,1259,1269,1296,1303,1305,1306,1317,1389,1393,1395,1459,1461
10,14,17,19,22,24,25,28,30,31,32,35,37,40,41,45,47,48,51,56,57,58,59,62,64,65,66,69,71,72,74,75,76,77,79,80,82,84,85,87,88,89,90,91,92,94,96,97,100,103,105,106,107,108,109,110,111,112,114,115,116,117,120,121,122,126,128,130,132,134,135,149,150,153,166,174,192,212,217,221,230,246,253,257,262,269,280,297,299,310,311,349,350,353,356,367,400,405,415,422,425,427,442,448,465,469,492,504,508,509,510,515,520,524,531,533,542,544,548,556,559,564,571,576,580,589,591,607,610,613,616,627,631,642,644,647,649,655,666,668,682,687,690,691,692,693,702,704,705,731,735,744,747,753,759,762,768,778,781,782,785,787,793,796,811,818,836,837,844,846,848,849,853,867,870,871,874,876,879,881,887,888,889,895,898,899,900,904,907,908,909,915,917,918,941,948,950,951,959,960,966,971,972,973,975,982,984,987,988,990,992,993,994,995,996,999,1001,1004,1006,1007,1010,1012
3,10,15,41,47,48,49,50,54,60,67,70,71,73,76,77,91,93,95,106,107,108,109,119,122,123,125,132,133,134,135,140,141,147,155,162,177,178,196,220,222,223,249,252,258,268,282,283,288,289,321,324,334,335,337,343,352,369,371,375,378,401,403,404,405,417,430,438,456,466,467,468,469,470,471,473,474,476,479,480,482,483,484,485,487,488,489,495,496,502,503,505,508,515,516,517,518,520,521,526,527,529,530,531,532,534,535,536,537,541,542,545,550,554,556,557,558,559,560,562,565,568,570,571,573,574,576,578,579,580,583,584,592,596,597,600,603,604,608,612,613,614,615,616,620,626,627,630,632,634,636,637,638,639,641,642,643,644,645,647,649,654,658,660,673,678,679,681,682,692,693,696,697,711,712,718,720,721,723,724,726,729,744,748,754,755,756,760,761,764,767,770,771,777,787,788,793,794,795,798,799,800,801,804,806,807,810,812,813,818,820,821,824,827,834,837,838,839,840,855,860,862
0,5,8,9,11,12,13,14,15,18,19,21,23,28,30,32,33,36,37,38,44,45,50,52,53,55,56,57,59,60,61,62,64,66,67,68,73,75,79,80,81,83,84,85,86,87,88,89,91,93,94,96,98,99,101,102,103,105,108,110,112,115,116,119,120,121,122,124,125,126,127,128,129,130,131,133,134,147,151,158,162,164,194,196,206,218,240,252,261,263,270,289,295,315,321,327,345,355,356,359,364,371,393,398,403,404,421,427,429,430,435,441,448,450,451,458,475,476,477,496,497,499,506,518,520,522,523,525,531,538,548,561,580,593,594,601,612,613,615,628,630,634,637,640,641,642,648,649,651,667,676,679,681,695,698,699,700,702,709,715,719,723,730,739,743,749,750,753,755,756,762,771,783,788,789,791,792,803,816,819,823,825,827,847,852,853,854,855,862,864,866,867,875,876,883,885,886,887,890,891,895,897,899,900,902,904,906,913,918,921,927,930,933,936,941,949,950,954,956,957,960,968,973,977,978,979,980,983,985,987,990,993,996,997,1000,1003,1005,1008,1009,1012
5,11,14,19,30,33,53,55,59,61,63,81,84,89,93,94,113,123,124,136,140,196,200,212,223,224,227,229,239,294,333,337,349,353,376,446,471,472,475,494,502,503,508,511,522,526,543,551,556,567,584,585,590,617,619,623,625,629,659,663,688,710,726,753,755,758,763,794,819,824,826,827,855,890,912,939,942,943,944,947,948,949,950,951,955,958,959,962,965,967,968,976,977,980,989,991,993,994,995,996,997,1000,1003,1004,1005,1006,1007,1008,1010,1011,1012,1014,1016,1020,1022,1029,1030,1031,1032,1034,1036,1037,1041,1043,1044,1045,1046,1050,1052,1053,1054,1056,1059,1060,1062,1068,1071,1072,1081,1084,1085,1088,1089,1092,1094,1095,1097,1098,1099,1100,1104,1106,1159,1164,1165,1170,1171,1172,1174,1179,1180,1183,1184,1186,1187,1192,1196,1199,1205,1211,1216,1217,1219,1220,1228,1231,1233,1234,1235,1241,1245,1246,1249,1253,1256,1266,1270,1271,1273,1280,1281,1282,1283,1285,1287,1291,1297,1301,1302,1303,1305,1315,1316,1317,1318,1322,1326,1335,1337,1340,1341,1342,1344,1348,1350,1352,1355,1362,1370,1372,1373,1375,1376,1383,1384,1404,1405,1406,1416,1420,1435,1458,1471,1472,1473,1480,1481,1490,1501,1536
3,4,7,8,14,15,16,17,19,20,25,28,29,30,31,32,34,35,39,41,46,47,48,49,54,56,57,58,62,64,65,66,69,70,71,72,74,75,76,77,80,82,83,86,87,88,89,90,91,94,96,100,102,103,105,106,107,109,110,111,112,115,116,117,118,122,126,128,130,132,134,139,149,156,159,163,182,186,197,221,245,275,278,288,309,310,312,332,342,350,353,356,367,370,378,388,401,412,414,425,431,455,463,469,470,472,480,483,484,504,509,514,524,544,564,569,570,571,589,598,600,603,608,611,613,616,619,621,623,627,635,644,645,646,647,655,675,686,687,689,693,700,702,705,712,720,729,731,733,735,740,742,746,748,749,753,756,759,760,762,768,770,778,779,782,783,784,785,793,796,806,811,812,818,820,825,828,843,845,846,848,859,860,867,869,870,871,873,874,878,879,880,887,888,889,893,895,896,899,900,904,908,914,918,923,928,934,935,937,941,942,945,948,950,953,955,960,964,975,981,982,983,984,990,992,994,996,999,1004,1007,1010,1012
17,20,21,26,39,41,50,52,53,60,62,63,64,69,75,76,82,88,150,151,154,161,215,216,219,222,230,231,232,240,255,260,265,270,271,274,275,276,279,284,297,309,310,311,324,346,349,350,374,380,388,389,396,412,464,508,535,536,541,557,586,594,616,631,636,655,666,670,672,731,735,736,738,739,740,742,746,748,749,751,752,764,765,766,767,768,769,770,771,772,773,774,775,776,777,778,781,791,793,794,795,797,798,799,800,807,812,815,816,817,822,823,824,828,830,832,833,858,859,860,861,862,864,866,867,868,869,870,871,875,876,878,880,881,884,886,888,893,894,895,899,901,903,904,905,906,911,913,915,939,941,942,943,944,945,946,947,950,967,969,970,973,977,978,982,984,986,989,990,993,995,997,998,999,1000,1001,1004,1006,1053,1065,1080,1081,1082,1085,1100,1101,1110,1112,1113,1114,1115,1116,1121,1123,1133,1134,1142,1143,1145,1155,1160,1162,1179,1184,1187,1188,1195,1200,1201,1203,1206,1217,1219,1223,1225,1226,1229,1233,1236,1241,1243,1246,1257,1263,1282,1300,1306,1312,1320,1331,1332,1334
1,3,4,5,9,14,19,22,23,25,26,30,31,32,34,35,36,40,46,49,52,55,56,58,59,60,62,63,66,67,69,72,73,74,75,77,79,80,81,82,84,85,87,88,90,91,93,94,95,96,97,98,101,103,105,106,107,109,110,111,113,114,115,116,119,120,121,122,124,125,126,127,128,129,130,131,133,134,135,160,162,168,194,197,205,222,224,231,257,258,261,264,268,269,282,285,298,317,327,335,365,367,372,373,382,383,393,398,409,416,418,430,431,441,453,458,462,483,497,498,506,509,512,524,534,538,554,559,561,564,573,577,580,582,591,594,595,601,604,609,615,627,634,635,642,643,652,658,669,670,672,683,690,691,692,697,706,707,712,723,724,737,743,749,750,752,755,761,764,774,777,780,786,789,803,804,805,807,813,827,828,829,836,841,842,844,852,853,854,859,867,875,885,887,891,897,900,904,913,914,916,918,921,924,929,930,932,937,941,944,947,950,953,954,955,963,967,969,971,972,973,974,975,978,983,987,988,989,991,992,996,999,1007,1008,1012
2,3,17,25,26,30,32,38,40,41,42,44,46,47,50,59,60,62,65,66,67,78,79,81,96,102,103,114,119,149,153,154,207,227,231,232,249,260,261,342,357,359,391,410,412,423,427,434,438,444,468,469,477,479,491,492,493,494,511,513,520,528,622,639,656,672,684,690,727,740,816,832,844,852,855,877,924,925,930,931,932,933,934,935,936,938,939,940,941,943,944,947,950,951,952,953,954,955,956,963,964,965,967,968,969,970,971,974,975,978,980,981,983,984,986,994,995,996,997,1002,1006,1008,1009,1010,1011,1012,1015,1016,1017,1018,1019,1020,1021,1023,1024,1027,1029,1046,1047,1048,1059,1060,1061,1070,1077,1079,1080,1081,1082,1088,1089,1091,1093,1097,1104,1105,1106,1107,1108,1112,1113,1114,1115,1116,1118,1120,1124,1128,1129,1130,1133,1135,1137,1139,1140,1142,1150,1151,1152,1159,1162,1164,1165,1167,1168,1169,1177,1185,1187,1188,1191,1193,1195,1197,1198,1262,1272,1274,1279,1280,1281,1299,1300,1301,1303,1306,1307,1309,1317,1320,1327,1328,1338,1339,1357,1360,1361,1363,1364,1365,1368,1369,1370,1371,1376,1399,1402,1404,1405,1406,1413,1422,1503,1506,1507
3,7,8,10,14,16,17,18,19,22,23,28,30,31,32,34,35,40,43,48,54,56,57,59,63,64,65,66,69,74,75,79,80,84,85,86,87,88,90,91,94,95,96,98,100,103,105,106,107,108,110,111,112,114,116,117,122,125,128,129,130,131,134,139,142,148,149,214,221,226,239,268,288,289,307,312,313,320,350,356,374,388,405,443,448,453,461,482,493,495,510,524,533,542,544,580,588,589,607,609,616,621,627,631,644,645,649,668,688,689,690,691,692,700,702,704,705,722,731,742,754,759,762,766,778,783,784,808,811,828,829,847,848,849,859,867,871,873,874,878,882,895,905,906,914,918,921,928,931,935,937,941,955,968,978,981,989,990,992,993,997,999,1000,1001,1005,1010,1012
20,22,23,26,49,51,52,55,58,59,62,98,183,189,191,204,208,219,220,251,252,259,260,263,265,268,271,278,285,315,320,322,324,325,339,350,363,365,372,378,385,388,416,418,440,453,466,467,468,469,471,474,479,480,484,487,488,489,490,494,495,499,500,501,502,504,505,508,509,510,511,512,514,515,516,517,520,538,539,540,546,547,555,559,560,561,567,570,573,583,584,586,593,594,596,597,600,601,603,604,605,606,607,610,611,612,629,630,664,665,668,671,672,673,674,675,676,677,678,679,680,681,682,695,697,698,699,700,701,702,705,772,773,775,783,784,785,798,800,801,812,813,815,816,823,825,826,831,832,863,864,875,901,904,905,907,908,910,911,912,913,916,918,928,930,952,953
3,15,16,17,20,24,25,26,31,34,35,39,41,42,47,48,49,50,57,58,59,62,64,66,69,72,74,76,77,80,82,83,87,88,90,91,96,98,102,103,105,106,107,109,110,111,112,114,115,116,117,122,123,126,128,130,132,136,143,156,170,182,192,203,216,224,228,241,250,254,278,297,310,326,336,337,338,350,351,353,354,377,378,400,422,423,455,462,466,468,469,472,491,501,503,514,515,517,522,523,537,544,559,563,569,584,592,598,600,603,613,616,644,655,666,668,686,691,693,697,709,725,735,745,746,747,759,763,766,772,776,779,780,783,787,793,796,811,812,818,828,838,843,844,845,846,848,859,870,871,874,879,888,889,890,893,895,900,906,908,918,923,935,937,945,948,955,957,959,960,964,970,981,982,983,984,988,990,992,994,997,999,1004,1006,1011,1012
2,5,6,8,12,13,15,37,55,75,86,88,89,90,94,142,144,268,277,287,288,291,296,382,391,399,400,476,499,500,527,536,542,545,577,594,597,598,602,603,699,727,735,736,741,763,767,771,783,787,815,820,848,850,871,873,997,998,1002,1003,1007,1008,1031,1032,1034,1035,1040,1041,1042,1043,1044,1045,1047,1053,1054,1055,1056,1058,1085,1086,1088,1135,1136,1137,1138,1144,1146,1148,1149,1150,1151,1153,1157,1158,1160,1166,1177,1184,1204,1205,1206,1209,1210,1225,1228,1240,1242,1245,1246,1247,1248,1249,1274,1275,1281,1282,1285,1286,1287,1298,1299,1301,1325,1339,1365,1367,1375,1376,1378,1379,1386,1387,1389,1390,1391,1393,1395,1400,1403,1404,1405,1406,1414,1416,1418,1423,1425,1426,1438,1441,1443,1446,1480,1485,1498,1500,1501,1502,1503,1505,1507,1515,1516,1517,1518,1519,1520,1631,1638,1643,1644,1647,1649,1659,1661,1681,1685,1686,1695,1701,1702,1713,1724,1728,1756,1757
0,1,19,50,52,63,64,65,66,69,77,84,85,86,90,93,99,104,105,107,108,113,116,117,121,125,127,129,131,132,133,134,185,221,312,344,345,371,385,386,438,470,518,599,662,668,674,676,715,719,725,742,750,755,760,770,774,778,789,806,819,822,839,855,864,874,875,879,883,888,890,903,907,912,920,921,929,954,959,967,975,991,1001,1009
1,5,7,8,12,13,14,24,26,29,41,45,46,47,48,49,55,71,73,75,77,88,91,92,94,103,106,114,116,117,121,122,123,124,125,126,127,128,129,130,131,132,133,135,140,141,142,144,145,146,151,152,156,158,159,160,163,164,166,171,172,173,174,175,178,180,181,182,183,184,185,194,195,196,202,203,204,205,206,211,212,213,214,220
1,13,21,42,47,50,52,55,60,64,66,67,69,73,80,84,88,89,91,93,94,96,97,98,102,104,108,112,113,115,116,121,125,127,129,130,133,184,206,280,321,323,346,355,399,441,459,640,652,653,662,709,718,719,741,743,749,750,753,756,762,767,774,789,839,864,867,874,876,883,887,903,906,912,913,925,954,962,967,990,993,996,1002,1005,1012
3,6,9,13,14,15,16,17,21,32,34,35,36,39,50,51,52,684,685,687,691,692,701,702,712,721,734,750,754,755,756,757,759,766,771,1404,1407,1408,1409,1410,1411,1415,1418,1419,1420,1421,1424,1425,1426,1428,1429,1430,1431,1432,1433,1434,1435,1436,1438,1442,1445,1446,1447,1450,1453,1454,1455,1456,1463,1464,1467,1471,1474,1475,1476,1480,1481,1483,1484,1495,1505,1508,2133,2135,2768
3,4,7,13,14,17,19,22,24,26,28,30,31,34,35,39,41,42,44,46,48,49,51,54,56,57,58,62,63,65,66,68,69,71,72,74,75,76,77,79,80,82,85,86,87,88,90,91,94,96,102,103,105,106,107,108,109,110,111,112,114,115,116,117,120,122,126,128,130,132,134,150,164,192,196,212,245,247,267,268,281,312,315,318,324,335,342,349,350,356,358,364,367,370,378,402,409,412,422,425,426,427,431,453,455,463,486,508,509,510,513,531,534,542,564,569,580,590,591,597,600,606,608,613,616,627,631,637,642,644,646,650,668,686,693,699,700,705,728,731,742,746,762,766,777,778,782,783,787,796,800,806,807,810,811,818,825,828,832,843,844,846,847,848,849,854,856,859,867,871,873,874,875,887,888,889,895,899,900,907,914,923,928,937,944,948,949,953,955,960,969,970,971,975,981,982,987,990,992,993,994,999,1004,1006,1007
13,20,21,22,23,25,38,39,40,41,51,53,57,63,71,72,75,76,77,97,98,100,102,103,106,110,111,115,116,128,133,135,144,146,147,162,172,175,191,192,199,201,206,209,210,222,244,258,259,314,316,317,365,396,405,407,409,411,415,432,461,493,500,518,568,572,652,654,655,671,672,673,674,675,676,680,681,682,683,684,688,689,690,691,693,694,696,699,700,701,704,705,706,707,708,711,713,724,725,726,728,730,733,743,745,746,747,748,750,752,755,757,758,759,760,762,765,779,780,781,782,783,784,785,787,788,790,791,792,794,803,804,808,812,813,815,816,818,819,823,825,827,829,830,831,834,836,837,838,841,843,846,847,848,851,854,855,856,862,864,867,872,873,876,877,878,879,880,885,902,908,913,914,915,919,922,923,927,931,932,942,953,962,976,977,978,979,983,997,1001,1003,1004,1005,1006,1013,1015,1031,1033,1034,1035,1042,1049,1078,1104,1106
1,6,11,14,17,18,20,21,22,24,26,27,32,36,41,42,44,46,49,50,51,52,54,55,56,58,60,61,62,64,66,67,69,70,71,72,73,77,78,79,80,81,84,85,86,87,88,89,90,91,93,94,95,96,97,98,99,101,102,103,104,105,108,110,112,113,115,116,119,120,121,122,123,125,126,128,129,130,131,132,133,134,150,187,192,196,219,228,229,230,235,268,270,278,290,293,295,315,316,323,336,345,381,386,391,426,427,441,463,482,483,498,510,518,528,538,542,544,561,574,580,582,591,592,599,603,607,613,616,634,640,643,649,662,670,674,679,689,691,698,702,705,712,718,724,736,739,750,753,755,756,762,763,768,772,776,781,789,791,795,805,810,823,825,837,841,848,852,854,855,858,861,864,874,877,885,887,890,891,895,896,900,906,913,915,917,919,922,929,937,940,941,943,949,950,952,954,956,960,961,967,968,971,973,978,980,985,987,990,991,996,997,1000,1005,1009,1012
1,7,8,12,13,14,15,16,24,26,30,43,44,50,52,70,71,77,78,87,89,94,108,109,128,134,158,160,172,182,184,197,201,229,236,238,249,252,263,266,272,273,278,279,280,289,291,292,299,304,306,323,330,335,336,357,371,373,383,389,392,397,400,402,436,451,463,474,479,498,524,525,528,540,543,549,572,597,643,644,750,759,760,761,766,769,772,773,774,775,777,778,779,781,782,783,784,786,787,791,792,797,803,809,810,811,812,814,815,818,819,820,821,822,824,825,826,827,829,830,831,846,851,854,856,859,870,871,873,874,875,876,878,881,884,887,889,891,892,893,894,895,896,897,898,899,901,904,905,906,908,909,910,911,913,916,917,919,921,924,926,943,949,950,953,954,956,960,961,962,964,970,972,974,975,981,982,990,992,993,997,998,1008,1010,1011,1012,1022,1025,1037,1038,1049,1057,1058,1059,1068,1086,1087,1093,1099,1103,1105,1106,1107,1108,1114,1127,1129,1130,1150,1152,1161,1175,1177,1182,1189,1214
4,10,15,16,17,19,22,24,25,27,30,31,34,35,38,40,47,48,51,54,56,57,58,59,62,64,65,66,68,69,71,72,74,75,77,79,80,82,83,85,86,87,88,90,91,94,96,100,105,106,107,109,110,111,112,114,115,116,117,120,128,130,132,134,161,164,171,192,212,213,215,217,221,224,233,245,253,262,268,269,276,277,290,297,310,312,336,338,342,344,350,353,362,365,367,381,383,384,402,405,409,412,414,422,427,437,448,451,503,509,533,536,542,569,580,586,589,610,616,619,631,639,644,645,655,663,668,686,689,690,691,692,693,699,704,705,711,728,731,742,745,747,749,753,756,759,762,770,778,779,780,783,793,797,802,806,808,811,818,828,829,844,846,848,849,850,854,859,867,870,871,874,879,887,888,889,895,896,898,899,900,904,906,908,930,945,950,951,959,960,973,975,986,989,990,992,996,999,1001,1004,1006,1007,1010,1012
1,3,6,7,9,62,67,68,78,79,80,84,85,91,92,94,103,105,122,130,137,138,139,144,145,146,156,159,161,186,187,190,194,200,204,207,222,230,231,240,241,243,268,280,290,293,301,309,348,356,357,359,362,373,412,419,422,432,438,439,441,443,460,463,464,465,466,467,468,469,470,472,473,474,475,476,478,479,481,482,483,484,485,488,492,493,494,495,499,501,504,505,509,510,511,513,514,515,517,518,519,520,521,523,525,526,527,528,529,530,532,533,535,536,538,539,550,551,553,555,556,557,558,559,560,565,570,574,575,576,577,579,581,582,590,592,593,601,602,606,607,610,612,614,615,618,620,623,625,626,627,631,632,633,636,637,640,643,644,645,646,648,658,659,662,663,664,668,672,673,688,694,695,697,698,705,708,709,712,716,721,725,728,731,732,733,734,735,743,748,750,751,752,756,759,761,762,768,769,770,771,778,786,787
0,1,3,4,6,9,14,15,19,20,21,22,24,31,32,35,36,39,41,46,48,51,52,55,56,57,58,59,60,61,62,63,64,66,67,69,72,73,75,79,80,81,82,84,85,87,88,90,91,92,93,94,95,96,97,98,99,101,103,105,106,107,110,111,112,113,114,115,116,117,119,120,121,122,123,125,126,127,128,129,131,132,133,160,165,175,180,196,201,210,231,235,242,247,252,260,264,268,269,280,282,289,299,310,350,355,356,357,370,382,398,410,418,422,430,441,442,453,476,477,486,503,506,509,510,518,524,534,542,557,561,562,564,565,569,580,582,583,589,594,595,604,605,608,609,610,621,631,641,642,643,652,655,658,662,667,669,670,672,673,675,676,679,689,691,693,702,712,717,723,731,737,743,750,752,755,756,761,766,779,782,783,788,800,802,803,813,818,831,841,843,844,848,849,853,854,855,863,867,871,881,885,887,895,897,904,912,913,914,921,927,928,929,930,932,935,943,944,947,956,963,965,967,973,974,975,978,979,985,989,991,994,996,997,999,1001,1005,1007,1009
1,2,8,15,17,26,34,35,39,40,43,46,50,51,58,75,78,79,97,98,100,104,106,117,119,122,124,125,149,225,228,233,234,239,256,258,266,376,412,413,420,421,522,525,531,532,534,537,591,593,608,619,620,629,654,675,678,696,698,701,702,729,740,742,745,755,763,780,849,852,871,894,914,915,916,928,934,966,969,983,986,987,988,1015,1017,1021,1022,1025,1033,1034,1041,1042,1043,1044,1046,1047,1053,1054,1055,1057,1059,1060,1061,1062,1063,1065,1066,1067,1068,1070,1073,1074,1087,1089,1091,1092,1094,1096,1097,1098,1100,1103,1109,1110,1111,1113,1114,1119,1120,1123,1129,1130,1132,1134,1136,1137,1139,1141,1143,1145,1146,1148,1149,1151,1152,1153,1163,1164,1165,1166,1168,1170,1171,1172,1173,1174,1176,1177,1178,1179,1186,1187,1188,1189,1192,1193,1194,1196,1197,1199,1200,1201,1202,1203,1206,1210,1212,1217,1218,1219,1225,1226,1227,1230,1231,1238,1242,1244,1247,1249,1250,1251,1253,1255,1256,1260,1263,1264,1269,1270,1272,1273,1276,1278,1295,1296,1297,1306,1311,1312,1316,1326,1327,1330,1331,1333,1334,1336,1341,1342,1348,1349,1351,1411,1420,1422,1426,1431,1499,1503,1504,1509,1516,1517,1534,1535,1536,1637,1639
3,4,7,14,15,16,19,22,25,26,28,30,31,32,34,35,37,41,47,49,51,54,56,57,58,59,62,63,64,65,66,70,71,74,75,76,77,79,80,82,84,85,87,88,90,91,92,96,97,100,105,106,107,109,110,111,112,114,115,116,117,120,122,123,126,128,130,132,134,136,149,197,201,212,213,215,230,268,269,282,289,305,310,312,336,341,344,350,356,367,370,404,409,425,427,431,448,453,455,486,509,510,514,534,542,559,564,572,580,606,608,610,613,614,636,643,645,650,651,663,693,697,704,749,756,763,770,772,776,778,781,783,800,802,806,811,828,838,846,849,851,854,866,867,875,879,887,895,899,900,904,905,907,910,915,918,923,930,941,948,950,952,960,980,981,982,984,994,999,1001,1007,1010,1011,1012
1,3,4,7,8,9,17,20,22,23,25,27,37,47,48,60,62,130,138,141,149,153,156,159,183,188,189,211,218,231,232,234,236,239,240,259,266,275,286,294,295,339,380,406,408,415,418,422,423,538,539,540,542,549,575,577,585,587,597,608,640,643,647,653,660,670,678,684,688,691,692,693,695,697,698,700,703,705,709,714,715,716,718,721,722,723,724,725,726,731,732,735,736,737,738,739,740,777,783,784,793,797,798,818,819,823,829,830,835,837,839,841,842,843,849,850,851,852,858,860,864,865,866,867,868,869,870,874,875,880,881,888,892,895,898,901,902,915,918,928,929,930,931,935,936,937,938,942,946,954,958,975,976,977,987,993,994,999,1009,1013,1015,1016,1036,1038,1062,1064,1066,1067,1077,1100,1108,1223,1225,1226
0,1,9,11,13,29,33,38,42,43,44,45,54,55,60,61,63,64,68,69,70,71,73,74,78,81,83,84,85,86,89,90,93,94,95,97,98,99,101,102,104,105,108,110,117,119,124,127,129,131,133,134,139,154,196,218,231,242,244,287,292,294,316,324,334,346,384,388,401,407,421,435,450,451,459,500,511,525,546,547,568,574,586,587,592,593,602,616,617,618,623,639,674,694,695,699,710,715,744,757,774,785,787,798,810,813,818,821,822,849,850,853,855,859,874,876,883,885,892,896,903,911,916,917,919,920,924,925,930,933,943,944,949,950,952,961,962,963,970,974,977,981,985,986,993,996,997,1003,1008,1009
1,2,4,5,12,13,21,29,38,39,106,148,156,166,169,170,171,174,200,202,219,244,245,246,270,273,274,275,276,277,279,281,284,296,297,299,311,312,341,384,394,406,420,424,443,456,477,507,509,511,513,885,886,888,890,892,893,924,927,929,933,935,938,951,952,953,954,955,964,975,977,979,984,986,991,993,1005,1007,1009,1010,1011,1015,1016,1017,1018,1019,1021,1022,1023,1033,1036,1037,1038,1072,1073,1075,1083,1084,1085,1101,1102,1103,1104,1137,1140,1141,1142,1143,1144,1145,1166,1167,1168,1171,1172,1173,1176,1177,1180,1183,1185,1186,1187,1189,1191,1194,1198,1200,1201,1246,1247,1248,1270,1272,1283,1284,1295,1296,1299,1304,1315,1317,1318,1319,1330,1332,1344,1346,1348,1349
1,4,14,16,17,19,20,23,25,28,30,31,34,35,46,47,48,51,53,54,56,57,62,65,66,69,74,75,76,80,82,84,86,87,90,91,92,96,97,100,102,105,106,107,108,109,112,114,115,116,117,120,126,130,131,132,150,168,170,197,212,213,214,221,226,229,233,249,266,269,277,290,310,312,335,336,342,350,356,365,402,409,419,427,431,433,452,453,469,482,485,501,508,509,527,534,548,569,571,576,589,608,616,619,627,628,631,639,646,649,655,663,668,691,693,700,702,742,747,758,760,765,770,782,796,797,800,811,812,818,843,844,846,847,848,856,867,870,871,873,874,878,884,886,887,888,889,895,898,902,904,908,923,928,937,941,948,953,955,959,960,965,966,970,975,980,984,987,990,999,1004,1006,1007,1010,1012
1,5,27,29,33,35,36,48,49,55,62,66,70,77,93,105,109,111,127,129,134,136,138,141,146,157,172,186,193,198,208,210,212,216,230,239,241,263,265,266,267,299,322,323,324,325,343,344,363,366,384,448,501,502,504,574,575,579,583,586,588,589,599,600,601,602,605,607,610,611,612,613,617,618,621,622,625,627,629,630,631,632,634,637,640,642,643,644,645,647,648,649,650,653,654,657,659,664,667,669,670,671,674,677,678,680,684,690,697,699,700,702,705,709,711,712,713,715,716,726,727,728,729,730,736,737,738,741,742,744,745,747,756,761,764,765,770,774,793,795,799,802,803,807,809,813,823,828,832,843,844,846,855,867,870,871,877,884,886,892,893,894,901,903,904,910,911,933,934,941,961,962,971,972,973
3,4,6,9,10,14,16,17,19,22,23,24,25,26,30,31,34,35,36,41,46,49,51,53,54,56,57,58,59,62,64,65,71,72,73,74,75,77,79,80,82,84,85,87,88,91,92,93,94,95,96,97,98,100,103,106,107,109,110,111,112,114,115,116,120,121,122,123,124,125,126,129,130,131,132,133,134,136,145,152,168,201,212,213,231,247,262,269,278,292,295,298,303,312,313,326,338,362,367,370,371,402,419,422,437,453,458,462,466,470,475,486,503,509,510,523,538,542,559,564,570,580,594,598,603,607,608,610,613,632,640,642,675,679,692,693,711,714,725,749,750,756,758,763,770,772,778,780,782,783,789,806,807,813,828,837,843,847,848,849,854,867,870,873,875,882,885,887,895,896,897,899,900,904,906,907,914,917,918,922,928,930,937,941,945,950,951,953,954,955,964,965,966,969,971,973,975,978,980,983,987,990,991,992,994,996,999,1000,1001,1004,1007,1010,1011,1012
5,7,32,33,34,36,37,41,50,64,65,66,67,68,89,92,100,152,154,158,159,167,176,177,182,183,187,197,203,215,217,232,234,268,269,291,302,311,313,322,492,518,525,565,608,647,655,658,679,680,696,697,698,701,744,784,794,797,807,808,823,824,848,867,907,908,911,914,918,919,925,943,947,952,960,963,966,969,970,971,972,974,975,976,978,981,983,986,987,989,990,994,995,997,998,1003,1004,1006,1008,1009,1010,1014,1015,1016,1025,1026,1029,1031,1037,1038,1039,1042,1044,1045,1046,1048,1050,1055,1056,1058,1060,1064,1065,1073,1074,1075,1078,1079,1083,1094,1095,1096,1098,1114,1115,1120,1121,1122,1123,1125,1126,1129,1140,1141,1146,1147,1149,1150,1151,1155,1156,1161,1162,1164,1169,1173,1174,1195,1196,1197,1201,1206,1207,1214,1216,1224,1227,1231,1235,1237,1250,1257,1272,1274,1275,1279,1284,1286,1295,1307,1309,1312,1317,1321,1338,1341,1342,1345,1348,1349,1350,1357,1358,1359,1360,1361,1362,1363,1366,1367,1369,1370,1371,1379,1381,1401,1453,1454,1455,1460,1630,1633,1636,1639
4,6,14,15,17,19,22,24,25,30,31,35,40,41,51,54,56,57,59,62,64,65,66,69,70,71,72,74,75,79,80,82,85,87,88,90,91,92,94,96,105,106,107,110,112,115,116,117,120,122,126,128,130,132,134,139,149,160,180,186,197,199,200,203,213,221,226,239,268,269,292,312,316,338,350,356,358,379,381,388,404,412,431,455,483,486,509,510,515,522,523,528,531,533,542,544,559,564,580,603,608,613,642,663,668,689,691,693,702,704,705,753,768,770,782,783,798,802,806,807,811,818,828,837,844,847,849,850,854,859,867,871,874,887,888,889,895,896,899,900,904,917,918,922,941,945,950,951,952,955,960,961,965,975,980,990,994,995,996,999,1001,1007,1012
2,4,8,10,11,19,26,28,29,34,39,86,87,93,100,108,111,113,118,123,128,132,139,140,146,151,157,165,175,178,183,582,592,615,660,663,683,685,715,724,732,734,744,747,757,759,761,764,787,789,793,795,809,813,815,816,817,818,819,821,822,823,825,826,827,828,830,831,832,833,834,835,837,838,839,841,842,843,844,845,846,848,850,852,853,859,863,865,866,867,869,870,871,872,874,876,883,885,888,890,891,892,895,896,897,898,902,907,911,912,914,915,916,917,919,935,936,938,940,945,948,949,956,957,958,960,964,966,967,968,975,976,977,980,981,982,984,987,992,995,996,999,1007,1008,1010,1011,1013,1015,1016,1017,1019,1022,1023,1026,1039,1041,1046,1047,1076,1123,1126,1523,1527
4,7,10,14,17,19,22,25,28,29,30,31,32,34,35,41,47,51,56,57,58,59,62,64,65,66,69,71,72,74,75,77,79,80,85,87,88,90,92,94,96,100,105,106,107,111,112,114,115,116,117,120,121,122,126,130,132,134,159,178,192,197,212,219,245,258,262,268,269,277,280,291,292,297,310,312,313,350,356,362,369,404,412,420,426,431,436,455,469,470,472,483,508,509,523,542,544,548,559,569,580,589,591,597,600,603,608,614,616,620,627,631,632,637,642,649,663,668,690,691,692,693,702,705,716,724,725,731,742,746,747,768,770,778,783,787,796,802,806,811,818,828,836,837,843,844,846,849,850,870,871,873,874,878,879,887,888,889,895,899,900,904,907,918,923,928,941,945,950,960,964,972,975,980,981,984,989,990,992,999,1001,1004,1006,1010,1012
2,4,5,20,26,27,39,45,48,49,50,52,53,57,72,88,89,99,103,111,112,116,124,133,146,151,160,161,162,164,166,170,189,201,228,246,279,302,304,306,308,659,695,773,779,781,796,812,815,816,826,828,830,832,833,839,863,864,867,868,869,871,881,882,885,887,888,889,891,892,893,894,900,917,919,921,932,933,938,942,943,944,947,948,949,951,952,953,954,955,957,961,962,963,964,967,971,972,974,978,981,982,983,994,995,996,999,1003,1004,1005,1006,1009,1010,1013,1014,1019,1025,1032,1035,1036,1037,1043,1048,1050,1051,1063,1064,1066,1067,1068,1069,1070,1072,1075,1076,1077,1080,1083,1085,1088,1094,1098,1099,1104,1107,1108,1109,1110,1111,1112,1117,1121,1124,1126,1127,1130,1133,1137,1139,1146,1154,1160,1161,1168,1181,1183,1185,1202,1203,1205,1206,1207,1211,1213,1214,1216,1218,1220,1222,1237,1259,1317,1331,1681,1684
0,4,9,15,16,17,20,22,23,26,28,30,31,34,35,37,40,46,47,48,49,51,53,56,58,59,63,64,65,66,67,69,74,75,76,77,79,80,82,83,84,86,87,88,90,91,92,93,96,97,101,102,105,106,107,108,110,111,114,115,116,117,120,122,124,126,127,128,132,133,168,171,180,185,189,196,205,233,250,257,268,277,281,297,300,303,320,335,336,341,350,351,369,383,404,409,411,413,422,427,437,482,491,493,503,512,514,530,537,544,557,577,584,585,589,598,607,616,621,627,628,631,632,643,645,646,650,666,668,669,670,688,689,692,697,705,724,733,740,745,746,754,758,763,764,772,773,777,779,781,782,796,800,802,808,811,812,816,826,829,844,846,848,853,856,867,870,873,874,879,881,889,900,902,918,923,928,945,948,953,955,964,965,966,969,970,972,975,982,983,987,989,992,994,1004,1006
1,2,3,17,35,36,44,110,111,112,113,161,164,175,177,184,185,261,269,271,272,319,322,345,382,387,389,390,391,393,395,403,405,408,411,436,448,452,481,482,505,507,509,511,523,537,539,540,561,562,565,568,581,606,676,679,681,694,754,787,788,801,807,811,812,857,859,866,894,895,896,898,899,903,904,908,909,910,919,921,922,927,928,930,931,932,934,935,937,938,939,961,970,1035,1036,1037,1039,1040,1041,1042,1044,1046,1050,1051,1052,1053,1056,1058,1059,1060,1061,1106,1107,1108,1111,1112,1114,1115,1137,1150,1151,1153,1204,1205,1206,1215,1217,1220,1229,1230,1231,1232,1233,1234,1236,1237,1238,1241,1250,1252,1253,1276,1277,1282,1283,1296,1300,1301,1302,1303,1311,1313,1315,1359,1363,1367,1369,1370,1371,1372,1374,1376,1379,1380,1384,1385,1387,1391,1392,1394,1395,1397,1399,1402,1403,1413,1416,1417,1419,1425,1427,1433,1434,1436,1466,1468,1472,1512,1515,1517,1526,1552,1553,1555,1577,1580
0,1,5,8,9,12,19,27,36,38,45,47,50,52,55,63,65,69,73,77,78,81,84,86,90,93,95,98,99,101,104,108,109,111,113,119,124,125,129,131,133,196,231,279,303,312,327,382,391,393,416,476,497,499,518,526,557,567,593,595,599,609,612,626,634,662,676,678,742,743,750,751,761,769,774,778,784,787,788,789,791,804,805,817,824,853,855,858,885,890,898,903,920,921,943,944,954,956,967,976,978,991,1009
1,9,11,13,15,16,17,21,25,33,34,37,38,40,44,47,52,53,55,60,62,68,75,76,77,85,92,93,172,174,205,206,208,209,220,222,223,230,234,239,246,247,249,251,253,254,256,257,259,260,262,263,268,269,271,274,275,277,281,282,290,291,294,297,298,299,300,303,304,305,306,307,308,309,313,314,315,316,317,319,322,324,325,326,327,329,330,332,333,334,337,340,348,350,351,354,356,370,379,397,398,399,463
4,6,16,17,18,20,26,29,31,34,35,37,39,42,43,44,48,49,54,57,62,63,64,65,66,69,70,72,74,75,76,77,80,82,83,84,86,87,88,89,90,91,96,97,98,102,103,104,105,106,107,108,109,110,111,112,114,115,116,117,122,125,126,128,132,142,163,171,182,221,228,241,268,290,293,305,308,334,336,341,342,347,350,351,358,370,422,439,461,472,479,510,515,522,544,564,586,608,645,655,661,668,675,686,693,697,700,712,731,745,747,760,763,764,768,773,777,796,800,810,811,812,820,828,842,845,848,851,860,866,867,869,870,871,873,874,875,879,880,881,889,895,903,909,937,942,945,948,949,952,955,960,961,964,968,969,973,978,981,982,983,987,988,989,990,992,994,997,998,999,1004,1006
1,2,3,4,5,21,32,33,50,65,81,140,167,184,185,186,189,198,201,203,204,206,208,212,214,231,233,238,241,246,249,254,264,299,300,318,320,322,323,324,329,333,385,386,394,404,405,406,415,439,452,456,457,461,486,490,848,852,885,923,976,977,1009,1023,1033,1034,1043,1044,1045,1046,1052,1056,1058,1059,1068,1069,1070,1071,1079,1080,1081,1082,1084,1100,1103,1104,1105,1106,1113,1116,1117,1118,1129,1130,1131,1132,1133,1134,1135,1136,1138,1139,1140,1141,1142,1144,1145,1146,1148,1150,1152,1159,1162,1163,1164,1165,1174,1175,1177,1180,1182,1192,1193,1194,1195,1218,1220,1261,1270,1284,1285,1289,1291,1292,1294,1307,1309,1332,1336,1337,1341,1343,1344,1348,1389,1391,1398,1399,1400,1401,1404,1405,1406,1418,1419,1455,1461,1465,1466,1486,1505,1508,1511,1649,1650,1658,1659,1663,1664,1680,1686,1854
4,16,23,26,31,34,35,39,41,46,47,48,49,51,53,57,58,62,63,65,66,69,71,72,74,75,76,77,80,82,84,85,87,88,90,91,94,96,100,105,106,107,109,110,111,112,114,115,116,117,122,124,126,128,130,132,134,136,141,147,159,169,182,192,214,221,234,250,258,268,273,281,293,297,298,307,310,337,350,359,362,366,367,369,392,414,423,431,470,503,504,509,510,514,515,522,556,563,564,569,572,584,589,591,597,603,613,616,617,628,631,644,645,655,668,677,682,683,686,689,690,691,693,700,702,708,731,735,740,742,745,746,753,756,760,762,763,770,772,778,779,783,787,793,800,802,806,811,812,828,838,846,848,859,867,869,870,871,874,875,881,886,887,888,889,895,897,899,902,905,906,907,908,918,923,930,935,939,941,944,945,955,959,960,964,969,975,981,982,983,984,987,988,989,990,992,996,999,1001,1003,1006,1007,1010
3,22,43,44,47,48,50,52,55,59,62,168,169,171,192,196,238,239,334,343,347,356,358,359,365,366,391,449,463,468,469,491,505,510,530,535,537,540,568,586,592,693,695,729,741,754,809,813,829,859,861,862,881,889,896,947,949,950,951,952,954,955,957,958,962,965,966,967,968,972,973,979,980,982,983,984,985,986,989,990,991,992,993,1000,1001,1002,1004,1006,1008,1009,1010,1012,1013,1014,1015,1017,1019,1031,1032,1034,1035,1037,1039,1040,1042,1044,1046,1048,1049,1058,1059,1061,1063,1064,1065,1066,1067,1068,1072,1074,1078,1079,1082,1083,1086,1088,1089,1111,1128,1129,1152,1153,1154,1155,1156,1157,1173,1174,1192,1194,1195,1196,1204,1208,1209,1211,1214,1218,1219,1220,1221,1222,1223,1227,1229,1230,1257,1259,1263,1267,1272,1274,1278,1280,1285,1286,1288,1289,1293,1386,1389,1392,1395,1399,1400,1403,1461,1462,1465,1467,1468,1474,1475,1476,1572,1577,1578,1602,1608,1609,1610,1615,1616,1618,1623,1628,1629,1630,1652,1653,1694,1696,1724
0,3,4,6,9,10,15,17,20,22,24,26,27,30,31,34,35,36,41,43,46,49,51,55,56,57,58,62,64,65,66,68,71,72,73,74,75,83,84,87,91,93,94,95,96,97,98,103,106,107,109,110,113,114,115,116,119,120,121,122,123,125,126,128,129,131,132,133,160,168,180,194,233,237,239,242,267,271,298,308,317,326,342,365,370,373,434,439,453,475,476,489,491,498,513,523,542,594,595,597,602,606,607,608,621,635,640,673,675,679,682,683,699,714,720,736,738,743,748,750,766,768,770,772,778,780,782,785,786,791,805,807,848,852,853,854,856,872,873,875,885,897,907,909,911,921,923,928,930,953,954,955,965,969,973,975,978,980,987,988,994,997,1004,1005,1008,1011
13,23,49,52,54,56,60,61,62,64,67,68,69,77,79,84,85,90,146,149,161,163,166,167,183,186,194,221,223,226,228,229,230,295,296,371,382,386,400,410,428,437,438,454,457,458,470,494,514,615,616,644,647,649,682,707,736,767,768,770,788,793,841,844,866,953,957,1057,1059,1060,1062,1065,1066,1067,1069,1070,1078,1079,1081,1082,1083,1087,1088,1140,1141,1143,1144,1145,1146,1147,1150,1151,1152,1153,1155,1160,1161,1162,1163,1168,1169,1170,1172,1174,1177,1179,1182,1184,1189,1194,1195,1209,1210,1215,1224,1227,1256,1257,1261,1262,1264,1265,1267,1268,1269,1270,1274,1275,1277,1278,1291,1294,1296,1315,1316,1317,1321,1325,1329,1331,1332,1343,1366,1370,1372,1374,1377,1382,1392,1403,1404,1427,1446,1447,1459,1490,1494,1495,1510,1511,1537,1544,1563,1566,1592,1608
1,4,6,10,13,14,15,16,17,19,20,22,26,28,30,31,32,34,35,37,39,41,42,46,47,49,50,51,56,59,62,64,65,66,68,69,74,75,76,77,79,80,82,85,86,87,90,91,92,94,96,101,102,103,105,106,107,108,110,111,112,114,115,116,117,118,120,122,126,128,132,134,136,148,159,164,180,201,212,213,217,245,253,268,269,281,286,288,292,297,307,310,311,323,335,349,350,353,356,364,370,378,381,401,412,422,425,427,433,437,449,455,469,478,483,486,493,495,508,542,557,580,590,598,605,610,616,627,631,642,646,647,657,663,678,686,689,692,693,699,700,702,704,705,711,729,731,742,746,748,753,759,764,768,770,776,777,778,782,785,796,806,811,816,818,837,842,845,846,847,869,870,871,873,874,878,882,888,889,895,898,904,906,908,909,915,923,928,934,937,941,948,950,953,955,957,960,964,969,972,975,981,982,983,984,987,989,990,992,996,999,1001,1004,1006
1,10,26,27,29,51,54,64,66,76,83,86,100,102,160,162,172,181,183,192,206,208,209,224,232,238,241,244,249,250,258,266,272,283,286,291,319,344,347,350,353,355,356,360,361,364,369,376,377,386,395,399,405,407,438,466,468,471,474,477,483,559,606,607,615,616,618,619,698,702,726,738,739,742,743,744,750,789,791,792,793,794,796,797,799,800,801,802,803,806,807,808,809,810,811,812,813,814,815,816,821,822,823,824,826,827,828,830,834,839,843,847,848,849,860,862,868,885,891,892,893,894,895,896,900,901,902,903,904,907,914,916,919,920,921,922,923,924,925,927,928,929,930,931,932,933,934,935,940,943,944,946,947,949,951,965,971,973,974,975,976,977,982,985,986,987,992,994,1000,1019,1031,1032,1037,1045,1047,1048,1049,1050,1054,1055,1056,1057,1058,1059,1072,1080,1094,1098,1107,1109,1112,1113,1115,1124,1150,1153,1158,1159,1189,1190,1191,1193,1195,1196,1197,1224,1249,1250,1254,1262,1264,1266,1284,1297
16,17,24,27,30,31,34,48,69,71,72,74,75,76,80,82,83,84,87,90,91,95,96,105,107,110,111,115,116,117,120,122,127,128,131,132,133,160,164,170,247,337,378,379,423,444,452,460,485,564,584,594,617,689,712,758,764,780,785,836,838,847,854,870,875,881,889,897,930,937,940,942,948,953,959,965,966,973,980,981,982,983,984,988
4,8,10,11,13,18,21,22,23,24,25,26,27,40,46,47,48,54,60,61,65,67,169,170,172,441,666,676,756,758,773,779,781,785,821,960,977,978,981,982,983,984,988,989,990,995,996,998,999,1000,1001,1004,1005,1006,1012,1015,1017,1020,1027,1029,1034,1036,1045,1046,1053,1055,1056,1060,1141,1157,1158,1161,1165,1169,1210,1211,1212,1218,1223,1224,1302,1303,1304,1305
1,3,4,7,14,16,17,19,20,22,24,25,26,31,34,35,39,40,41,42,46,47,48,49,51,54,57,58,59,62,65,66,69,71,72,75,76,79,80,82,83,84,85,86,87,88,90,91,94,96,100,103,105,106,107,109,110,111,112,114,115,116,117,122,126,128,130,132,134,136,141,143,149,155,168,180,182,184,191,192,197,212,221,223,224,241,245,258,262,267,268,269,277,298,312,313,317,338,341,349,350,351,356,358,367,379,388,414,419,422,423,425,427,428,431,432,470,472,473,478,483,484,501,508,509,510,513,522,523,534,542,559,564,569,572,580,589,600,606,608,613,616,617,631,637,644,645,646,650,663,668,673,675,682,686,690,691,693,697,700,702,716,720,728,735,738,742,745,746,747,749,753,756,759,760,762,770,776,778,779,780,783,785,796,797,800,806,808,812,818,828,829,837,842,843,844,845,846,848,859,860,867,869,870,871,873,874,879,881,887,888,889,893,895,899,900,904,907,908,914,918,928,935,937,939,941,945,950,953,960,969,973,975,981,982,983,984,986,987,989,990,992,994,996,999,1001,1004,1006,1007,1010,1012
9,93,99,100,108,109,112,113,114,123,125,128,129,133,264,273,287,292,298,301,304,316,489,494,495,496,505,506,507,516,534,540,550,554,557,558,564,565,577,581,589,590,592,606,640,653,664,672,679,697,698,699,768,780,807,809,817,826,852,885,913,924,1038,1062,1076,1087,1090,1314,1315,1316,1352,1386,1387,1393,1394,1395,1396,1397,1398,1399,1403,1404,1408,1409,1410,1413,1414,1415,1416,1436,1438,1440,1459,1460,1462,1466,1467,1468,1469,1470,1475,1476,1477,1478,1480,1489,1491,1516,1517,1519,1521,1528,1529,1530,1531,1532,1533,1537,1541,1542,1544,1545,1546,1549,1551,1553,1555,1556,1558,1559,1560,1561,1562,1566,1569,1570,1571,1573,1576,1577,1578,1579,1580,1581,1585,1587,1589,1591,1593,1594,1596,1598,1601,1603,1604,1607,1609,1613,1634,1636,1637,1640,1642,1643,1648,1649,1652,1672,1673,1674,1675,1677,1679,1680,1683,1685,1686,1687,1689,1690,1696,1697,1698,1699,1700,1711,1720,1721,1822,1823,1829,1833,1839,1840,1841,1849,1851,1853,1857,1859,1861,1865,1876,1880,1883,1892,1895,1967,1969,1974,1979,1980,1982,1988,1989,1992,1993,2004,2008,2043,2061,2062,2167,2172,2173,2175,2177,2178,2183,2184,2199,2202,2207,2209,2210,2222,2223,2225,2229,2244,2247,2251,2252,2256,2263,2264,2270,2273,2276,2277,2278
3,5,6,7,9,17,19,24,25,28,34,35,36,41,49,50,51,52,53,54,55,56,57,59,60,61,62,63,64,65,66,67,70,72,73,74,75,77,82,84,85,87,88,90,91,92,93,94,95,96,97,98,99,101,103,105,107,109,110,112,115,116,117,119,120,121,122,123,124,125,126,127,128,129,130,131,132,133,134,136,151,168,200,201,224,237,239,245,258,263,264,292,305,308,320,327,342,345,350,355,373,375,391,392,398,418,425,441,442,458,466,470,476,499,510,518,519,529,533,559,561,564,569,580,582,586,599,601,608,614,615,643,658,660,667,668,670,676,679,687,691,693,697,705,714,716,720,723,731,743,750,753,755,758,761,766,783,785,791,803,804,806,807,818,819,827,843,848,853,864,865,875,878,884,885,890,894,895,897,899,900,902,906,907,912,913,918,921,924,927,928,930,937,944,946,947,950,952,953,959,963,964,965,970,974,977,978,979,985,987,988,990,994,996,997,999,1003,1007,1008,1009,1012
11,13,14,15,42,43,45,46,52,53,55,177,182,183,188,189,252,256,259,260,267,269,274,275,285,410,411,412,415,422,432,543,544,565,756,757,780,782,784,818,819,822,852,855,867,872,874,901,902,916,934,944,957,973,984,985,1006,1007,1020,1057,1074,1086,1087,1093,1118,1119,1122,1123,1135,1168,1176,1193,1195,1196,1200,1212,1214,1227,1232,1233,1236,1239,1241,1251,1254,1255,1256,1257,1262,1279,1281,1283,1284,1285,1287,1288,1302,1306,1307,1309,1310,1311,1314,1315,1318,1324,1325,1329,1331,1333,1334,1335,1337,1338,1339,1341,1343,1403,1404,1405,1417,1419,1421,1422,1423,1425,1426,1430,1431,1432,1433,1435,1436,1439,1443,1444,1450,1457,1458,1460,1462,1464,1466,1467,1468,1470,1472,1473,1474,1476,1477,1480,1495,1497,1498,1509,1514,1515,1516,1519,1525,1531,1535,1537,1540,1542,1543,1547,1549,1551,1554,1557,1558,1571,1574,1575,1576,1584,1597,1600,1615,1618,1635,1649,1650,1691,1692,1698,1700,1702,1712,1715,1718,1719,1720,1723,1727,1728,1730,1732,1736,1740,1742,1743,1746,1747,1749,1815,1862,1866,1867,1868,1871,1892,1894,2015,2016,2018,2020,2033,2035
0,1,8,9,13,14,18,21,22,25,27,28,29,30,36,37,40,42,43,44,50,52,54,55,56,59,60,63,64,65,67,73,74,77,78,79,80,86,89,92,93,94,95,97,98,99,101,102,104,107,108,112,113,119,121,124,125,127,129,130,131,132,133,134,137,194,235,252,253,257,261,264,280,287,294,309,321,327,334,338,355,356,364,376,384,385,390,391,405,438,467,475,476,477,482,496,498,538,540,548,557,558,561,567,568,574,595,602,625,627,630,634,639,640,641,642,659,661,662,664,665,692,705,713,718,724,730,734,739,743,750,751,752,753,756,757,770,774,781,786,794,803,805,808,813,817,820,823,841,855,856,858,861,863,868,876,878,883,885,890,891,895,903,906,911,913,921,925,929,931,932,933,940,944,950,951,954,956,962,963,967,968,974,977,978,990,991,993,994,995,996,997,1002,1005,1008,1009,1012
2,7,8,12,15,19,21,24,28,29,32,54,56,70,72,76,89,90,102,103,107,108,113,116,146,177,184,188,193,194,201,202,204,205,211,287,291,293,302,306,319,325,332,336,364,366,372,384,394,395,426,436,444,500,513,528,573,583,639,670,699,700,714,716,717,719,720,722,729,731,732,733,735,738,739,740,741,743,744,745,747,748,750,751,754,755,767,768,773,774,778,779,780,782,784,785,788,790,791,797,798,800,801,802,803,809,811,813,819,821,830,831,833,837,850,851,852,860,861,863,864,865,866,868,870,936,975,976,982,984,988,990,992,993,995,998,999,1001,1004,1006,1010,1013,1014,1017,1022,1025,1026,1031,1032,1039,1047,1050,1053,1055,1056,1058,1072,1075,1089,1091,1092,1094,1097,1098,1104,1107,1111,1114,1115,1117,1118,1119,1120,1124,1126,1129,1143,1144,1148,1150,1153,1155,1158,1165,1179,1185,1196,1204,1205,1207,1212,1223,1226,1239,1271,1272,1303
0,1,2,7,8,15,18,20,22,27,42,44,55,57,60,67,69,71,78,79,83,86,87,93,95,97,98,104,108,116,119,121,124,125,127,129,131,133,134,176,190,194,285,346,354,424,541,551,581,592,616,641,684,736,743,761,774,794,813,817,839,877,883,893,903,919,938,946,954,968,972,973,978,986,997,1005,1008
2,5,6,9,13,14,16,19,23,26,27,29,34,38,39,41,42,43,44,47,63,67,68,70,95,122,137,138,156,177,196,200,202,206,208,221,223,227,1613,1614,1616,1617,1618,1619,1631,1633,1634,1635,1661,1662,1663,1664,1665,1689,1691,1693,1695,1711,1717,1720,1721,1723,1737,1740,1741,1742,1746,1749,1754,1756,1759,1777,1782,1783,1793,1795,1813
1,5,8,9,11,12,18,19,27,36,37,38,42,43,44,50,52,54,55,60,61,67,70,71,73,78,81,84,86,89,93,94,95,97,98,99,101,102,108,113,118,119,124,125,129,131,133,137,138,162,172,177,190,193,194,222,248,283,301,316,325,329,376,382,384,386,393,417,418,430,441,445,454,459,463,464,474,476,497,498,499,507,518,541,547,553,555,557,560,562,573,574,582,585,593,595,599,602,606,622,630,640,643,658,667,670,695,698,704,710,719,730,743,750,754,769,774,786,789,791,792,794,798,799,804,805,810,815,823,825,826,827,833,850,852,853,858,862,864,877,883,885,891,892,896,911,917,921,926,931,933,940,943,952,956,961,963,967,968,977,978,985,991,996,997,1000,1005,1009
2,26,37,40,43,113,142,143,148,181,182,183,194,214,217,222,239,260,262,267,268,269,293,344,352,376,388,413,443,446,466,468,478,505,526,542,588,628,635,639,640,642,656,692,805,883,959,960,986,995,996,998,1000,1001,1002,1003,1004,1009,1010,1013,1017,1021,1030,1031,1036,1040,1041,1043,1044,1046,1047,1048,1049,1050,1059,1067,1074,1089,1090,1092,1103,1111,1113,1114,1116,1117,1121,1122,1123,1132,1139,1143,1145,1149,1152,1154,1155,1156,1164,1169,1171,1175,1176,1193,1195,1196,1199,1202,1203,1204,1205,1206,1207,1218,1219,1281,1282,1284,1285,1301,1316,1317,1318,1375,1384,1387,1391,1393,1396,1397,1398,1406,1407,1408,1409,1413,1415,1417,1425,1455,1459,1470,1474,1492,1497,1498,1501,1503,1505,1509,1510,1512,1513,1518,1525,1536,1560,1561,1585,1587,1597,1598,1611,1613,1624,1628,1651,1659
10,13,15,18,20,22,29,41,42,43,50,51,52,54,57,59,60,61,63,64,66,69,70,71,72,73,80,83,84,85,86,89,90,91,94,95,98,99,102,103,104,105,108,109,110,113,116,119,120,122,124,125,127,129,131,133,134,136,154,156,163,219,237,292,294,309,316,346,367,384,401,441,463,472,503,510,511,533,537,544,547,558,560,564,575,588,594,600,608,613,616,657,661,663,668,674,676,677,691,714,715,716,720,755,757,762,798,817,819,837,843,850,854,860,864,872,874,875,876,887,890,893,896,901,904,917,920,921,925,930,931,950,952,956,961,962,968,974,976,978,980,983,985,986,991,993,996,997,1003,1008,1009
1,9,14,16,31,41,43,44,50,54,122,130,131,150,168,180,192,195,196,199,201,203,228,283,339,347,354,445,449,451,452,453,454,456,464,466,528,530,559,562,577,579,600,602,627,628,656,658,659,691,728,746,751,759,797,798,827,828,830,831,832,833,838,844,852,854,855,856,857,861,863,864,867,868,869,870,885,886,888,889,890,902,923,924,928,929,930,931,935,938,939,943,944,950,951,952,953,978,979,981,982,984,996,998,1000,1001,1019,1022,1024,1025,1029,1041,1042,1067,1068,1121,1122,1124,1125,1128,1189,1205,1216,1261,1264,1300,1302,1304,1306,1312,1315,1317,1335,1336,1341,1361,1363,1367,1377,1397,1398,1403,1406,1422,1427,1442,1445,1473,1474,1476,1477
4,9,15,17,19,31,35,36,39,41,52,54,57,60,61,62,64,65,66,67,69,72,73,80,82,87,88,90,91,93,94,96,97,98,105,106,107,109,110,111,112,113,114,115,116,117,119,121,122,124,125,126,127,128,129,130,131,132,133,134,135,136,139,156,158,202,210,220,235,252,258,260,262,263,268,269,277,280,310,311,313,318,319,350,379,398,400,417,422,431,441,455,458,475,476,501,503,506,509,510,518,522,532,561,564,569,586,591,601,608,613,615,635,643,647,652,658,660,670,672,675,676,683,689,690,693,701,706,707,722,723,730,731,737,741,749,751,755,760,770,778,780,783,791,792,793,803,811,818,839,841,843,844,846,861,864,867,871,874,875,881,887,889,899,900,904,906,907,913,918,927,929,930,939,941,945,947,950,959,960,963,969,973,974,975,978,985,990,991,994,996,997,999,1006,1007,1008,1012
6,13,17,20,21,33,83,108,110,111,116,117,120,126,132,133,135,137,138,257,262,266,357,367,443,459,472,474,477,479,485,491,514,525,528,529,537,538,548,550,1519,1521,1524,1526,1537,1538,1545,1552,1556,1566,1572,1575,1647,1659,1661,1664,1665,1668,1669,1671,1673,1674,1675,1676,1681,1682,1683,1684,1685,1686,1688,1689,1690,1696,1697,1699,1702,1703,1705,1706,1714,1715,1719,1720,1721,1745,1746,1747,1754,1757,1758,1760,1761,1762,1765,1768,1769,1770,1773,1776,1778,1779,1784,1793,1800,1808,1809,1810,1820,1821,1824,1825,1826,1829,1831,1837,1838,1854,1858,1860,1862,1863,1864,1868,1869,1870,1871,1875,1877,1878,1883,1887,1888,1890,1891,1892,1893,1895,1897,1898,1899,1902,1903,1904,1905,1906,1907,1908,1909,1915,1916,1917,1918,1919,1920,1921,1924,1926,1928,1932,1938,1945,1947,1949,1951,1952,1954,1956,1996,1997,2003,2004,2008,2010,2012,2013,2018,2020,2021,2026,2028,2030,2032,2036,2038,2039,2044,2045,2046,2047,2053,2057,2107,2108,2183,2184,2187
1,7,10,13,15,18,20,22,29,31,42,43,50,54,57,58,59,60,61,64,66,68,69,70,71,72,73,74,78,79,80,83,87,89,90,92,93,94,95,97,98,99,102,103,104,105,108,109,110,113,116,119,120,122,123,124,125,127,129,131,132,133,134,140,154,163,164,167,183,199,240,252,284,287,292,294,298,314,335,346,358,388,390,401,403,435,439,444,459,461,463,472,473,510,511,542,543,544,556,560,568,583,595,599,613,616,617,656,657,671,674,676,679,691,701,702,712,713,724,726,739,748,756,757,760,762,774,780,786,798,819,825,826,830,840,843,850,854,857,858,875,876,882,887,890,893,896,901,904,917,919,920,921,924,925,930,942,943,946,950,952,954,955,956,957,962,966,967,968,973,974,976,978,979,983,986,991,993,996,997,1003,1005,1009,1011
1,3,6,8,16,17,34,35,36,41,45,46,52,57,61,84,86,91,93,95,103,105,106,120,176,178,184,185,186,193,202,209,211,212,213,217,224,236,238,240,242,248,266,274,295,307,312,377,473,474,569,570,576,630,635,640,648,668,671,684,686,688,701,702,704,705,706,707,714,715,717,718,724,725,727,728,730,731,732,733,734,735,737,744,745,746,751,753,755,756,757,758,759,761,765,766,767,768,769,776,779,784,785,788,792,793,794,795,801,803,804,805,808,811,814,817,818,821,828,829,830,836,837,839,859,861,862,864,865,869,871,872,873,874,879,881,888,893,897,898,900,901,905,906,911,916,919,945,947,994,995,1000,1002,1006,1007,1010,1022,1023,1025,1029,1043,1044,1045,1047,1048,1059,1060,1061,1062,1069,1081,1093,1094,1096,1100,1101,1102,1106,1109,1110,1111,1112,1116,1121
1,5,9,11,12,13,18,19,21,29,31,36,38,43,44,45,47,51,52,55,57,61,63,66,67,69,78,80,81,84,85,89,90,91,93,95,96,97,98,99,101,102,104,105,108,110,111,113,116,117,119,120,121,122,124,125,127,129,131,132,133,142,172,185,193,196,206,207,208,242,244,248,252,260,264,268,270,279,280,299,321,327,334,335,344,345,355,358,363,371,381,382,393,395,398,407,410,416,429,444,454,469,472,477,496,497,498,499,506,508,518,519,526,531,538,553,555,557,565,574,580,595,599,601,604,605,608,610,612,616,619,620,626,630,634,640,641,643,645,648,650,652,660,662,663,665,667,670,676,691,695,700,701,702,704,706,710,718,719,721,730,736,741,742,743,746,750,752,755,756,761,762,767,769,774,779,788,789,792,793,802,807,813,818,819,826,827,834,839,841,843,848,849,853,854,855,858,860,862,863,864,867,874,883,887,888,891,898,904,908,912,913,919,920,921,923,924,929,931,932,936,943,944,947,949,954,956,966,967,968,970,973,974,977,978,980,982,983,984,985,991,993,997,1001,1003,1005,1008,1009
17,41,43,45,64,68,71,96,125,131,134,136,157,161,166,167,176,193,220,252,267,274,285,298,318,329,357,412,427,433,449,450,471,484,502,580,593,623,631,809,813,817,818,832,839,841,864,945,967,968,983,1004,1055,1061,1067,1149,1186,1222,1230,1241,1248,1249,1250,1251,1255,1260,1264,1271,1272,1273,1274,1288,1289,1292,1296,1297,1303,1309,1312,1315,1320,1321,1324,1325,1329,1330,1331,1332,1335,1338,1341,1342,1343,1347,1350,1355,1357,1358,1359,1361,1362,1368,1369,1371,1372,1373,1376,1377,1378,1380,1387,1388,1393,1395,1399,1404,1405,1409,1413,1418,1419,1421,1423,1426,1427,1428,1431,1434,1439,1443,1444,1449,1451,1460,1466,1468,1469,1470,1471,1475,1486,1487,1490,1508,1522,1523,1524,1525,1535,1537,1538,1539,1540,1542,1557,1558,1560,1565,1570,1571,1572,1577,1579,1580,1585,1586,1595,1613,1625,1632,1644,1661,1669,1671,1676,1679,1680,1683,1734,1739,1740,1743,1744,1754,1765,1766,1771,1772,1780,1783,1787,1788,1795,1798,1799,1802,1811,1812,1813,1820,1835,1844,1849,1854,1869,1871,1874,1875,1884,1886,1887,1894,1910,1911,1921,1922,1928,1948,1949,1958,1959,2016,2025,2026,2031,2036,2090,2093,2111,2113,2114,2116,2121,2125,2127,2141,2145,2149,2150,2155,2166,2167,2171,2172,2173,2177,2180,2295
1,5,8,9,11,12,15,19,22,31,36,40,51,52,55,60,61,63,64,66,67,73,74,79,80,81,84,85,90,91,92,93,94,95,96,97,98,99,101,105,110,111,112,113,116,117,119,120,121,122,124,125,127,129,131,132,133,134,136,137,159,162,169,178,180,184,185,193,194,196,202,206,213,229,241,249,259,260,270,279,282,324,345,358,381,383,401,410,416,418,429,444,472,474,476,478,488,497,508,529,531,533,542,544,561,565,586,596,599,615,620,631,634,643,648,652,662,665,676,684,690,692,693,698,702,704,706,723,724,730,743,746,748,750,751,753,755,756,760,761,762,765,769,774,784,785,786,787,788,791,796,804,807,809,829,837,849,853,854,855,858,863,880,887,888,891,895,904,906,913,916,921,923,924,930,932,936,944,947,950,954,956,963,967,970,972,973,977,979,980,983,985,990,991,995,996,1001,1009
23,25,42,43,144,154,162,183,193,196,202,203,208,214,225,327,536,540,598,600,607,652,653,662,675,679,686,688,693,698,703,706,810,814,815,836,840,858,861,879,895,896,1266,1307,1328,1332,1339,1351,1354,1360,1362,1390,1397,1403,1405,1410,1415,1489,1490,1491,1494,1495,1496,1500,1503,1508,1509,1511,1513,1516,1518,1521,1522,1524,1525,1528,1529,1535,1538,1540,1541,1542,1543,1544,1545,1547,1548,1549,1553,1554,1555,1557,1560,1566,1567,1569,1570,1571,1577,1579,1583,1585,1586,1588,1589,1590,1593,1596,1599,1601,1602,1603,1606,1608,1609,1610,1611,1612,1614,1615,1616,1618,1624,1630,1631,1632,1633,1635,1637,1638,1639,1640,1642,1645,1649,1690,1692,1693,1699,1702,1705,1711,1713,1716,1719,1720,1722,1723,1724,1727,1728,1729,1730,1731,1732,1733,1734,1735,1736,1737,1738,1741,1742,1748,1751,1755,1822,1823,1895,1899,1904,1908,1912,1913,1914,1923,1982,1983,1985,2058,2059,2062,2070,2086,2087,2089,2091,2094,2287,2291,2292,2302,2440,2445,2446,2489,2490,2501
0,1,13,18,31,33,42,43,44,45,54,55,56,60,61,63,66,68,70,71,78,90,92,93,95,98,102,104,108,110,113,119,121,124,125,127,129,131,133,134,140,187,194,231,249,282,287,346,384,391,444,450,451,459,500,511,525,568,592,661,671,681,694,699,743,744,774,785,786,787,798,803,805,810,813,822,850,852,856,883,896,917,919,920,921,927,931,944,949,950,952,954,961,962,967,968,978,985,991,993,997,1005,1008
1,5,6,19,20,34,43,59,61,96,118,124,125,127,129,132,133,169,176,198,199,200,201,204,211,226,234,238,254,258,262,270,278,280,297,298,311,333,367,686,687,688,689,690,691,692,694,695,696,697,698,699,700,701,720,726,733,734,739,741,744,748,762,766,771,775,778,779,780,781,787,788,794,801,808,809,828,834,835,841,862,865,866,867,870,871,872,873,875,878,881,884,886,892,893,906,913,915,923,930,937,940,941
3,4,6,8,10,14,17,19,20,22,23,24,25,27,28,30,31,32,34,35,37,41,44,46,47,48,49,50,51,53,54,55,56,57,59,62,64,65,66,69,70,71,74,75,76,77,79,80,82,83,84,85,86,87,88,90,91,92,93,94,96,97,98,100,102,106,107,108,109,110,111,112,113,114,115,116,117,120,122,125,126,128,130,131,136,144,146,147,148,149,168,177,181,190,194,205,212,214,217,219,224,231,239,257,258,262,272,286,291,292,305,312,313,316,338,353,369,370,376,382,387,400,404,426,431,437,439,448,455,460,503,504,510,513,524,528,531,534,559,570,580,591,592,593,603,607,608,610,613,614,631,646,649,663,683,687,688,690,693,700,704,716,738,745,749,756,762,763,766,768,770,777,778,783,784,793,796,797,806,812,818,819,828,843,850,851,874,877,878,881,883,887,889,890,895,896,899,900,904,906,907,908,914,915,918,928,940,941,945,948,953,955,957,960,969,975,978,980,982,986,987,989,990,992,993,994,996,999,1001,1004,1006,1007,1010,1012
10,21,23,24,25,34,63,79,80,86,87,89,96,100,123,129,133,193,195,253,281,290,291,414,418,424,434,459,462,495,517,518,529,533,537,556,560,584,586,588,589,590,607,609,615,622,630,649,654,660,661,743,759,784,826,829,837,838,851,855,856,857,858,982,983,1005,1023,1028,1031,1036,1053,1077,1089,1156,1172,1179,1180,1188,1191,1192,1219,1220,1233,1235,1236,1237,1249,1250,1251,1253,1255,1260,1261,1262,1265,1272,1278,1279,1280,1282,1284,1286,1288,1289,1290,1293,1351,1373,1374,1377,1378,1380,1393,1394,1395,1397,1402,1403,1405,1406,1436,1437,1439,1440,1443,1445,1448,1449,1450,1456,1463,1464,1465,1466,1467,1468,1469,1472,1474,1479,1483,1484,1485,1489,1491,1496,1497,1498,1499,1501,1502,1507,1511,1514,1516,1517,1524,1525,1533,1535,1536,1538,1541,1542,1547,1549,1551,1552,1561,1565,1570,1575,1582,1586,1587,1590,1591,1592,1595,1596,1597,1598,1607,1608,1609,1634,1635,1636,1652,1653,1654,1659,1662,1665,1672,1676,1693,1696,1704,1706,1709,1710,1711,1718,1726,1727,1731,1734,1743,1744,1746,1756,1773,1782,1792,1801,1802,1809,1822,1828,1835,1853,1856,1858,1859,1864,1865,1922,1997,2002,2033,2038,2161,2169
6,8,10,14,15,16,17,19,20,22,23,24,25,26,28,30,31,32,34,35,37,46,47,48,49,50,51,53,54,56,57,58,59,62,64,65,69,72,74,75,76,77,79,80,82,84,85,86,87,88,90,92,94,96,100,102,105,106,107,109,110,111,112,114,115,116,117,122,123,126,127,128,130,132,134,136,139,146,155,156,166,182,192,196,212,220,253,288,305,312,323,335,366,379,414,419,433,442,448,470,479,483,492,510,515,522,523,524,531,542,564,569,588,589,590,603,608,613,616,623,642,646,650,651,663,668,689,692,693,704,708,709,735,748,749,768,770,778,782,783,785,793,806,811,812,818,828,833,837,842,843,846,849,859,866,867,871,873,881,888,889,895,898,899,900,904,906,909,915,918,923,928,934,942,948,950,953,955,957,960,973,975,987,989,990,992,994,995,996,999,1004,1006,1007,1010,1011,1012
2,5,7,10,52,56,57,62,64,72,74,76,78,79,80,84,86,88,100,114,122,154,157,163,173,174,178,226,233,237,243,245,246,263,282,300,303,304,311,327,331,332,333,334,337,382,383,400,408,426,436,438,443,447,449,451,462,497,507,508,519,534,539,549,595,596,602,606,620,667,668,670,673,682,683,685,687,689,709,711,713,715,716,720,721,723,725,726,728,730,731,736,740,741,742,744,745,749,750,763,764,766,768,770,771,772,773,774,775,776,777,778,779,780,781,782,783,791,793,799,800,810,811,813,814,815,816,817,818,820,821,824,829,869,872,874,881,882,883,884,887,895,899,903,904,909,911,912,917,918,919,926,927,929,930,931,935,946,947,948,962,963,964,966,974,975,976,990,991,995,1000,1002,1003,1004,1005,1006,1010,1017,1018,1020,1021,1024,1032,1033,1035,1036,1040,1042,1043,1057,1090,1099,1102,1104,1118,1119
3,4,15,17,19,20,22,23,25,26,28,31,32,34,35,40,41,47,48,51,54,57,58,59,62,64,65,66,69,70,74,75,77,79,80,83,84,85,86,87,88,90,91,92,94,96,100,105,106,107,109,110,111,112,114,115,116,117,120,122,124,126,127,128,130,132,144,169,182,221,230,241,254,268,290,292,295,297,313,316,336,337,358,366,368,379,390,400,401,405,420,422,427,431,437,449,458,473,479,480,486,503,508,510,522,524,542,556,569,576,583,589,597,598,608,613,616,623,631,635,642,645,651,663,666,668,669,688,691,697,700,702,704,716,722,724,731,733,735,742,745,748,754,756,762,763,766,768,770,773,776,777,780,783,785,787,793,796,797,811,812,818,824,837,843,844,846,849,859,860,870,871,873,874,875,880,881,887,888,889,898,904,906,907,908,909,918,924,928,935,937,942,945,953,955,959,960,964,965,966,969,970,972,975,982,983,986,987,988,989,992,994,999,1001,1004,1006,1010
1,2,4,6,12,13,30,31,32,43,53,56,58,108,110,112,117,122,231,235,237,240,241,243,244,245,246,265,286,287,288,295,305,309,323,327,344,359,488,495,498,514,517,521,525,526,538,559,604,670,677,688,698,701,715,718,724,736,739,774,776,782,783,905,906,920,985,986,987,988,990,991,998,999,1000,1001,1002,1003,1004,1005,1006,1008,1009,1010,1012,1015,1016,1017,1018,1020,1021,1022,1023,1024,1026,1027,1029,1031,1133,1199,1201,1203,1206,1207,1209,1210,1211,1215,1217,1218,1221,1223,1225,1233,1234,1235,1236,1238,1239,1241,1242,1243,1245,1247,1248,1249,1259,1269,1273,1278,1280,1283,1287,1288,1289,1290,1294,1295,1320,1324,1339,1340,1350,1352,1355,1361,1362,1363,1364,1366,1377,1378,1379,1380,1381,1385,1391,1394,1395,1400,1426,1433,1440,1451,1452,1453,1456,1459,1460,1482,1484,1494,1495,1498,1500,1501,1570,1571,1575,1578,1579,1585,1586,1626,1627,1628,1629,1630,1632,1633,1634,1639,1640,1641,1642,1643,1644,1645,1646,1648,1657,1658,1659,1661,1663,1667,1668,1669,1673,1676,1705,1724,1726,1737,1743,1744,1756
5,6,7,9,10,13,15,17,18,19,20,21,23,24,26,31,33,35,36,41,42,44,45,49,53,55,57,58,60,61,62,64,65,67,68,71,72,73,77,80,81,83,84,85,86,87,89,91,93,94,96,97,99,102,103,104,107,108,109,110,111,112,113,115,116,119,120,121,122,123,124,125,126,127,128,129,130,131,132,133,134,140,150,153,163,164,165,168,190,193,217,218,227,228,235,237,254,270,273,298,305,307,315,327,328,332,346,357,367,376,382,399,413,419,435,441,450,457,459,461,463,470,505,510,515,522,523,533,561,564,568,578,580,592,593,594,613,621,628,630,635,636,651,657,669,676,679,681,689,695,698,707,709,712,715,718,721,738,741,752,753,755,761,762,764,772,776,778,780,786,789,795,819,820,824,839,842,843,854,857,860,870,872,875,876,880,881,882,883,885,886,893,895,897,902,906,910,913,916,921,922,925,929,930,941,942,946,949,950,953,956,959,960,962,963,964,968,973,974,976,982,986,987,988,990,993,994,996,999,1000,1003,1008,1009,1012
1,44,51,54,55,68,78,79,80,81,102,104,114,116,140,141,183,184,187,221,224,226,249,291,311,312,315,351,382,383,403,428,448,457,473,485,490,495,499,500,501,603,604,605,618,624,631,632,636,655,656,657,688,708,723,767,784,793,840,868,874,899,900,958,1038,1040,1041,1051,1185,1198,1216,1229,1241,1243,1544,1550,1596,1602,1635,1645,1654,1656,1657,1658,1659,1662,1663,1664,1665,1666,1667,1672,1676,1686,1687,1688,1691,1692,1693,1694,1695,1697,1703,1704,1705,1707,1708,1714,1715,1716,1717,1718,1724,1726,1730,1731,1732,1734,1741,1742,1749,1753,1763,1766,1767,1775,1777,1779,1783,1784,1796,1803,1804,1817,1818,1822,1825,1826,1829,1831,1843,1846,1850,1863,1867,1871,1874,1886,1888,1889,1891,1894,1895,1921,1928,1929,1930,1941,1943,1950,1954,1955,1956,1957,1961,1963,1972,1973,1975,1976,1977,1989,1990,1993,2011,2012,2027,2028,2041,2043,2074,2079,2080,2085,2088,2090,2100,2105,2111,2113,2117,2119,2123,2126,2142,2148,2149,2154,2156,2157,2158,2185,2188,2193,2196,2225,2232,2233,2240,2285,2301,2317,2321,2328,2329,2333,2334,2343,2344,2358,2363,2446,2447,2450,2461,2462,2474,2486,2487,2492,2502,2504,2517,2563
0,4,8,13,14,15,19,22,24,25,28,30,31,34,35,40,44,47,48,49,51,54,56,57,58,59,62,64,65,66,68,69,70,71,72,74,75,77,79,80,82,83,85,86,87,88,90,91,92,94,96,100,103,105,106,107,109,110,111,112,114,115,116,117,119,120,122,126,128,130,132,142,156,186,190,191,219,221,224,230,239,254,258,266,268,269,273,277,278,281,288,290,292,297,298,300,310,312,313,316,317,332,335,338,350,353,359,362,367,370,375,379,384,404,405,409,412,422,427,431,433,437,439,444,448,465,469,470,472,478,480,483,486,501,503,504,506,508,509,510,531,542,557,564,569,580,589,590,600,608,610,613,614,616,617,619,623,627,631,644,649,650,655,663,668,675,678,682,686,687,690,691,692,693,697,699,700,702,703,704,705,711,716,720,724,731,745,746,747,749,753,756,758,759,760,762,770,778,779,780,783,787,793,797,800,802,806,808,811,812,818,828,829,837,842,843,844,846,847,848,849,854,859,867,868,871,874,875,879,887,888,889,895,896,898,900,904,906,907,908,909,918,928,941,945,951,955,960,964,965,966,971,973,975,980,983,989,990,992,994,996,999,1001,1004,1006,1007,1010,1012
3,7,8,9,17,21,46,58,60,84,86,90,100,104,107,112,113,130,133,137,145,161,165,182,188,203,205,206,230,255,256,263,266,268,271,274,286,295,305,337,344,345,449,450,466,507,521,525,529,540,560,1282,1286,1330,1347,1375,1382,1386,1396,1415,1418,1424,1444,1451,1452,1467,1475,1476,1478,1481,1489,1499,1500,1502,1503,1504,1505,1507,1510,1512,1513,1515,1517,1518,1523,1525,1526,1527,1528,1529,1531,1533,1534,1537,1538,1540,1541,1543,1544,1545,1547,1549,1550,1552,1553,1555,1556,1557,1558,1566,1567,1571,1572,1576,1577,1579,1584,1585,1587,1589,1590,1592,1593,1594,1597,1598,1599,1621,1622,1623,1624,1625,1626,1627,1629,1633,1634,1641,1642,1643,1644,1647,1649,1651,1654,1656,1657,1658,1659,1662,1703,1706,1708,1710,1712,1713,1716,1718,1721,1724,1731,1733,1734,1735,1739,1740,1741,1747,1751,1757,1759,1762,1764,1765,1766,1767,1768,1771,1773,1794,1795,1796,1802,1803,1806,1816,1818,1822,1823,1827,1828,1829,1831,1839,1840,1842,1845,1848,1852,1853,1856,1858,1860,1866,1867,1870,1877,1878,1881,1883,1885,1889,1891,1893,1894,1895,1896,1903,1904,1905,1918,1925,1928,1931,1932,1935,1937,1939,1940,1944,1949,1952,1955,1958,1967,1974,1991,1993,1996,1999,2007,2008,2010,2020,2021,2029,2031,2033,2041,2043,2047,2049,2052,2057,2060,2064,2065,2070,2073,2074,2077,2080,2128,2129,2130,2133,2855,2856
1,5,9,13,14,18,21,22,27,28,29,30,36,37,38,42,43,44,49,50,52,54,55,56,58,59,60,63,64,67,71,72,73,78,79,84,86,87,89,91,92,93,94,95,97,98,99,101,102,104,105,108,109,111,112,113,118,119,120,121,124,125,127,129,130,131,132,133,134,138,149,154,158,193,194,196,225,226,231,235,248,280,289,298,311,366,376,382,391,434,438,448,458,459,467,475,481,482,498,538,541,542,548,552,557,561,567,568,571,579,602,617,622,627,642,652,659,660,661,674,676,677,698,705,713,718,719,726,727,739,743,750,751,752,753,755,761,774,781,786,790,793,794,799,805,808,813,816,819,820,823,826,841,847,851,852,853,858,863,883,885,890,891,895,903,906,911,913,916,920,921,924,925,929,931,933,938,940,944,949,950,954,956,957,962,963,968,972,974,978,990,991,993,996,997,1000,1003,1005,1008,1009,1012
10,12,25,28,42,52,53,54,67,78,80,83,89,90,93,98,99,106,107,110,111,113,118,133,135,140,147,157,160,164,165,167,170,172,185,189,190,191,192,193,244,255,258,272,276,326,336,337,344,356,357,412,413,427,438,440,441,583,584,705,708,741,751,820,826,868,870,989,990,991,993,996,997,998,999,1000,1009,1011,1012,1013,1014,1016,1017,1019,1021,1029,1030,1031,1033,1039,1048,1049,1050,1052,1064,1066,1068,1074,1075,1076,1078,1079,1088,1089,1090,1091,1093,1094,1096,1102,1109,1110,1111,1112,1124,1126,1132,1133,1134,1139,1140,1142,1144,1145,1165,1166,1167,1168,1169,1171,1173,1178,1180,1187,1190,1192,1193,1197,1198,1201,1202,1204,1231,1258,1276,1279,1280,1281,1282,1283,1285,1329,1348,1349,1350,1369,1370,1372,1373,1374,1378,1379,1380,1381,1382,1383,1384,1387,1396,1397,1402,1403,1406,1423,1424,1428,1429,1432,1442,1445,1446,1449,1454,1455,1456,1458,1468,1474,1476,1501,1507,1515,1520,1523,1548,1570,1571,1578,1712,1717,1723
4,6,10,14,15,16,17,19,20,22,23,24,28,30,32,34,35,41,46,47,48,49,50,51,53,54,56,57,58,59,62,65,68,70,71,74,75,77,79,80,83,85,87,88,90,91,92,96,100,103,107,108,109,110,111,112,115,116,117,118,122,128,130,132,164,177,205,268,310,312,331,339,365,367,381,404,419,431,457,462,465,466,514,524,531,542,558,559,580,588,590,591,594,603,610,628,635,646,669,688,690,697,699,704,712,747,748,763,770,772,779,793,808,811,812,828,832,847,849,850,854,856,867,870,875,880,881,886,896,897,899,900,917,918,922,935,941,952,959,971,973,981,982,984,986,990,993,994,999,1001,1010,1012
1,2,3,6,9,10,11,25,26,31,35,36,99,100,101,140,143,144,189,190,194,197,213,214,216,218,220,221,235,241,254,257,260,261,263,357,359,379,381,382,384,468,478,487,491,492,493,496,675,676,678,679,681,692,703,704,714,730,733,734,737,740,745,758,760,776,792,793,794,796,797,805,811,812,813,814,815,816,817,820,821,823,824,825,826,830,831,834,836,837,840,843,845,846,847,849,850,852,865,928,932,956,959,962,963,964,967,974,975,977,979,980,981,982,996,999,1013,1014,1019,1020,1022,1023,1024,1025,1026,1027,1028,1029,1030,1033,1035,1036,1037,1038,1039,1042,1044,1045,1048,1049,1050,1052,1058,1062,1064,1065,1066,1068,1070,1144,1323,1325
0,5,9,11,13,14,19,21,22,24,27,28,30,31,33,36,37,38,41,42,43,44,45,46,49,50,51,52,54,56,60,61,62,64,66,67,68,69,72,73,74,75,76,77,78,79,80,83,84,85,86,87,88,89,90,91,92,93,94,98,99,101,102,105,107,108,110,112,115,116,117,119,120,121,122,124,125,126,127,128,129,130,131,133,134,144,149,178,194,200,205,218,219,225,231,235,241,261,278,285,306,307,314,323,325,329,345,346,356,358,371,374,391,408,421,430,441,444,448,450,451,454,455,458,468,476,477,482,485,506,507,520,523,528,531,542,543,562,573,579,580,582,585,589,592,599,601,605,611,613,615,622,639,640,641,652,653,658,661,672,676,677,681,691,692,695,696,699,710,715,723,734,738,739,744,746,750,753,754,755,756,762,783,789,790,791,792,794,802,803,804,805,810,816,817,819,820,825,827,833,839,842,844,852,853,855,861,864,866,874,876,878,883,885,887,890,891,892,893,894,895,897,906,907,913,918,921,927,928,933,949,950,956,957,962,965,977,978,979,980,983,985,990,993,994,996,997,1000,1001,1002,1003,1005,1008,1009,1012
2,11,13,19,32,35,37,38,39,40,43,44,45,46,48,53,54,57,62,65,72,79,156,163,164,174,178,184,186,188,203,243,251,271,274,307,369,376,379,408,411,416,417,419,437,439,464,465,474,476,486,487,488,522,531,536,537,548,564,578,587,596,606,615,617,659,661,694,698,699,701,745,758,760,763,851,862,863,873,875,888,966,1091,1151,1153,1154,1155,1156,1158,1159,1161,1175,1176,1179,1181,1182,1184,1186,1187,1188,1189,1190,1191,1192,1195,1196,1197,1198,1199,1206,1208,1210,1211,1212,1214,1215,1216,1217,1218,1241,1245,1246,1247,1248,1249,1250,1251,1253,1254,1259,1260,1261,1266,1268,1269,1270,1271,1278,1279,1284,1285,1286,1289,1290,1296,1299,1305,1306,1319,1321,1324,1325,1326,1328,1329,1330,1331,1333,1334,1335,1337,1340,1346,1347,1348,1354,1361,1362,1363,1365,1387,1389,1391,1393,1413,1414,1415,1417,1419,1422,1432,1439,1440,1441,1442,1445,1450,1451,1454,1455,1456,1457,1458,1471,1472,1474,1482,1485,1493,1494,1495,1496,1501,1515,1520,1521,1522,1523,1524,1525,1529,1530,1532,1535,1543,1549,1550,1568,1569,1570,1571,1574,1586,1587,1599,1600,1601,1602,1604,1607,1611,1613,1614,1617,1618,1626,1627,1629,1658,1660,1663,1673,1690,1711,1712,1720,1731,1733,1734,1735,1745,1746,1767,1775,1853
0,1,5,10,11,17,19,25,29,30,31,41,43,47,50,51,52,53,54,55,60,61,62,63,64,66,67,69,70,71,73,75,76,78,80,81,84,85,90,93,94,95,96,97,98,99,101,102,105,108,111,112,113,115,117,119,121,123,124,125,127,129,131,132,133,134,152,173,203,206,221,224,226,270,279,282,315,329,338,345,358,366,377,384,386,447,467,469,474,504,518,521,538,550,560,571,574,579,580,582,593,604,614,616,626,637,641,645,662,664,670,687,691,698,700,702,704,723,736,737,739,743,746,753,755,756,762,774,788,789,793,795,797,804,816,817,823,825,827,833,841,847,853,858,861,863,864,865,874,885,887,890,891,897,898,902,905,906,908,913,917,919,924,929,936,943,944,947,948,950,952,954,956,959,963,967,974,979,990,996,997,1000,1005,1009
1,8,16,20,23,24,27,92,93,94,95,96,97,102,103,110,123,130,134,140,159,161,162,176,186,206,229,233,237,238,242,243,247,300,305,314,317,320,322,327,344,377,378,385,389,414,415,446,450,452,458,463,475,476,498,500,587,590,628,633,652,658,775,785,1001,1006,1008,1009,1010,1013,1014,1019,1020,1021,1022,1023,1024,1025,1036,1037,1039,1041,1042,1043,1044,1047,1051,1053,1055,1057,1058,1064,1065,1066,1082,1084,1085,1089,1090,1091,1092,1093,1110,1113,1118,1119,1120,1122,1123,1129,1130,1136,1139,1141,1143,1145,1146,1148,1156,1158,1170,1171,1172,1174,1175,1176,1178,1180,1181,1185,1187,1190,1205,1206,1207,1208,1210,1217,1221,1223,1253,1254,1257,1259,1270,1272,1278,1279,1280,1285,1286,1287,1293,1294,1295,1302,1303,1304,1306,1317,1318,1370,1375,1393,1394,1399,1408,1409,1411,1415,1417,1418,1422,1424,1427,1435,1438,1440,1442,1454,1458,1459,1462,1480
1,5,9,11,13,21,22,27,36,44,52,54,55,60,64,65,67,72,73,76,80,81,88,89,91,97,98,99,102,104,105,108,111,112,113,119,121,124,125,127,129,130,131,132,133,190,193,207,225,260,279,287,311,312,328,366,385,391,395,410,435,441,490,496,503,518,565,595,603,609,630,634,652,662,674,676,677,695,739,750,752,756,762,770,774,778,779,791,805,817,819,841,855,863,864,876,883,887,888,899,903,906,913,921,925,932,956,962,967,974,978,990,991,993,1002,1012
4,8,9,10,24,25,26,29,32,34,45,46,49,51,52,62,63,66,69,71,86,87,88,440,444,446,448,449,516,572,573,589,598,616,649,653,657,658,668,672,678,2365,2366,2367,2373,2374,2377,2378,2381,2383,2384,2386,2388,2389,2390,2391,2392,2394,2398,2399,2401,2402,2403,2406,2408,2411,2413,2414,2415,2416,2417,2418,2419,2424,2425,2427,2428,2429,2430,2431,2433,2435,2443,2451,2452,2453,2458,2459,2462,2463,2465,2466,2471,2476,2484,2498,2499,2504,2505,2506,2509,2517,2518,2520,2569,2574,2575,2641,2647,2648,2650,2660,2662,2674,3011,4698
13,20,54,71,102,108,134,463,657,739,901,962,993
1,3,4,8,16,17,2432,2434,2436,2443,2446,2447,2448
0,1,9,13,14,21,22,27,28,29,30,31,37,40,42,44,51,52,54,55,56,59,60,63,64,66,67,69,73,74,78,79,80,84,86,89,90,91,92,94,95,96,97,98,101,102,104,105,107,108,109,112,113,119,121,124,125,127,129,130,131,132,133,134,149,154,169,174,190,215,244,289,325,327,334,347,356,383,407,438,454,461,465,482,483,485,490,506,538,542,548,568,574,581,607,616,627,630,634,639,640,641,642,645,649,653,659,661,674,695,705,713,715,730,734,739,750,753,756,757,761,767,774,799,803,808,818,819,820,822,823,825,829,837,841,851,856,857,858,861,866,868,874,877,878,883,885,887,891,895,903,904,906,913,920,924,925,929,933,940,944,947,949,950,951,954,967,972,977,978,990,993,994,996,997,1002,1005,1008,1012
4,6,7,8,19,21,34,35,56,57,66,67,72,170,174,185,186,187,190,191,239,252,256,258,259,260,264,266,268,269,271,279,284,285,299,302,304,305,387,395,396,397,402,405,406,427,449,452,453,476,477,491,492,536,561,571,572,603,625,639,643,645,692,693,697,698,699,702,704,705,706,711,712,713,714,717,719,722,723,733,734,735,736,737,738,739,741,742,743,747,759,760,761,763,765,766,768,769,782,783,784,785,789,791,792,793,794,799,810,811,818,822,824,827,828,839,840,845,846,856,857,858,860,863,879,881,882,883,884,885,887,888,889,891,892,894,911,912,913,914,915,1012,1013,1014,1019,1020,1021,1024,1025,1026,1027,1028,1031,1034,1037,1038,1042,1044,1052,1053,1055,1056,1057,1058,1061,1062,1063,1066,1073,1074,1079,1086,1087,1091,1092,1093,1101,1135,1149
4,6,17,19,20,25,31,34,35,41,46,47,48,49,54,56,57,62,64,65,66,69,70,71,74,75,76,77,79,80,82,83,86,87,88,90,91,92,94,96,100,105,106,107,109,110,111,112,114,115,116,117,120,122,126,128,130,132,134,136,150,168,221,224,268,269,293,297,310,312,318,336,338,350,353,367,419,422,425,431,455,470,473,482,508,509,510,511,534,544,559,560,564,569,608,613,616,631,646,647,668,683,686,691,692,700,702,742,746,747,762,782,793,811,828,846,848,850,867,871,873,881,884,888,889,893,895,896,899,900,901,907,922,923,928,930,937,948,950,952,953,955,960,961,966,969,970,975,980,983,986,987,988,989,990,994,995,996,999,1004,1006,1007,1010,1012
9,17,18,19,20,22,28,30,36,41,49,82,84,86,87,88,89,93,95,96,114,117,119,125,130,131,137,140,143,148,157,162,163,165,182,195,215,216,224,249,253,266,275,284,291,304,306,311,352,371,374,389,398,407,439,455,461,471,475,476,479,480,481,482,483,487,488,489,494,495,498,499,500,502,504,508,509,517,519,527,531,532,533,534,536,542,548,549,550,552,555,556,565,568,569,577,579,590,595,596,597,598,599,605,607,608,609,610,613,614,615,616,618,631,632,638,641,647,660,661,663,667,668,670,683,684,686,687,690,695,696,699,700,701,702,703,708,713,716,717,721,726,728,729,730,731,732,735,741,742,743,749,752,753,754,755,756,763,769,778,815,823,827,829
1,3,4,10,14,15,16,22,25,26,28,30,31,34,38,39,46,48,50,51,53,57,58,63,66,74,75,76,77,80,82,84,86,87,91,96,97,100,105,106,107,110,111,113,114,117,120,128,131,132,136,143,146,181,185,205,230,250,267,273,290,297,331,336,351,353,364,369,379,423,432,437,443,453,461,502,509,513,515,529,550,581,584,588,598,603,614,629,632,639,675,683,686,688,708,735,740,745,746,748,754,763,772,777,785,802,831,838,847,859,869,870,878,886,888,890,909,934,935,937,939,941,944,945,955,959,969,970,980,981,982,989,992,994,1006,1010
2,44,95,96,116,117,214,216,217,218,256,276,278,279,280,300,307,356,357,364,366,368,375,376,394,470,673,675,682,683,779,803,820,824,825,828,833,835,843,900,982,984,1002,1008,1225,1244,1246,1250,1254,1307,1308,1309,1310,1320,1321,1324,1325,1335,1377,1380,1381,1382,1383,1384,1387,1396,1416,1423,1457,1458,1479,1549,1582,1586,1587,1600,1601,1603,1605,1609,1611,1616,1618,1619,1634,1635,1636,1642,1644,1646,1684,1685,1692,1709,1710,1711,1775,1810,1814,1815,1839,1841,1844,1864,1865,1866,1968,1969,1989,1991,1992,1994,1995,1996,1997,1998,2000,2001,2013,2016,2030,2031,2032,2033,2038,2043,2074,2076,2077,2094,2096,2106,2140,2188,2232,2234
2,4,6,10,14,15,17,19,22,24,25,28,30,31,32,34,35,40,41,47,48,49,51,54,56,57,59,62,64,65,66,69,70,71,72,74,75,77,79,80,82,83,85,87,88,90,91,94,100,103,105,106,107,109,110,111,112,114,115,116,117,120,122,126,128,130,132,134,135,136,149,152,156,159,160,169,171,181,182,186,192,197,199,200,203,212,219,220,221,226,230,245,246,258,262,268,269,274,281,290,291,295,297,308,310,311,312,313,335,338,350,353,354,356,358,359,362,367,368,369,400,401,405,409,412,415,422,427,428,431,442,455,469,470,472,473,478,484,503,504,508,509,510,511,515,522,537,544,560,564,569,571,580,583,586,589,591,600,603,608,609,610,613,616,623,631,635,642,649,650,655,663,668,675,682,683,686,689,690,691,692,693,700,702,704,714,716,720,731,742,745,747,748,749,756,759,760,768,770,775,778,779,780,781,782,783,787,793,798,802,806,807,811,818,828,837,843,846,847,849,850,854,859,870,871,872,873,874,875,879,887,888,889,893,895,896,897,898,899,900,904,906,908,915,918,923,928,930,935,941,942,945,950,951,959,960,961,964,965,969,971,973,975,980,981,982,984,986,987,989,990,999,1001,1006,1007,1010,1012
2,9,13,15,22,26,45,53,59,63,66,68,70,72,73,77,188,194,197,272,283,286,319,325,329,364,365,378,398,419,440,467,468,470,474,477,480,482,486,534,538,592,1088,1155,1196,1263,1266,1271,1358,1359,1437,1439,1446,1450,1465,1494,1539,1586,1603,1630,1643,1647,1657,1678,1682,1686,1746,1753,1754,1757,1758,1760,1761,1763,1764,1767,1768,1769,1775,1776,1777,1781,1782,1783,1786,1798,1799,1800,1804,1806,1817,1822,1823,1830,1833,1839,1848,1849,1850,1858,1859,1860,1871,1875,1879,1880,1881,1883,1884,1886,1892,1893,1909,1910,1911,1913,1914,1917,1918,1923,1924,1928,1929,1933,1934,1936,1941,1944,1949,1953,1954,1955,1963,1966,1967,1977,1978,1981,1982,1984,1987,1988,1994,1995,2000,2001,2002,2003,2004,2007,2022,2024,2026,2027,2035,2037,2040,2043,2046,2054,2057,2064,2067,2070,2072,2075,2076,2078,2082,2084,2088,2098,2118,2119,2121,2122,2124,2126,2132,2137,2138,2151,2155,2171,2172,2173,2180,2181,2184,2191,2192,2195,2196,2200,2203,2207,2209,2211,2215,2217,2223,2231,2234,2235,2240,2244,2247,2256,2258,2262,2267,2268,2275,2301,2311,2316,2323,2352,2353,2361,2363,2365,2373,2382,2419,2421,2425,2435,2438,2439,2455,2463,2499,2505,2509,2511,2522,2531,2541,2547,2566,2569,2583,2584,2607,2611,2613,2614,2617,2630,2631,2632,2638,2639,2644,2667,2668,2672,2673,2683,2684,2687,2688,2689,2692,2696,2699,2715,2727,2728,2735,2835,3305,3339,3343,3430,3433
0,4,6,9,14,15,16,17,19,20,21,22,23,24,25,26,28,30,31,32,34,35,40,41,42,46,47,48,49,51,54,56,57,58,59,62,64,65,68,69,70,71,72,74,75,76,77,79,80,82,83,84,85,87,88,90,91,92,94,96,97,100,102,105,106,107,108,109,110,111,112,114,115,116,117,119,120,122,124,126,128,130,132,133,134,136,139,143,146,148,149,150,156,168,171,196,197,212,218,219,223,226,254,262,268,269,274,278,281,293,295,298,307,310,312,315,336,338,341,350,356,362,364,367,370,387,391,404,419,425,431,437,448,453,455,470,482,483,486,509,510,511,522,523,524,528,534,542,544,557,559,560,564,572,580,594,597,603,607,608,610,613,614,616,617,623,627,637,643,645,646,650,657,671,673,675,687,693,697,702,709,712,718,738,749,756,758,760,764,768,770,778,780,782,783,793,798,802,806,810,811,812,818,828,837,843,846,848,849,850,854,856,867,875,879,881,889,893,895,896,897,899,900,904,906,907,908,909,917,918,923,928,930,935,937,941,945,950,952,953,955,959,960,961,965,966,973,980,981,982,983,984,986,987,990,992,994,996,999,1001,1004,1006,1007,1010,1012
2,12,13,15,26,30,31,32,42,46,47,52,53,55,63,64,65,72,78,89,100,106,107,130,131,156,189,191,194,196,211,232,241,245,248,269,272,297,298,299,302,317,320,324,332,339,349,360,368,379,384,387,404,456,522,523,527,528,545,565,566,609,610,613,641,645,648,657,675,727,766,785,807,838,869,870,893,904,905,935,944,972,1012,1013,1017,1021,1022,1023,1024,1027,1030,1031,1034,1035,1036,1037,1041,1044,1045,1046,1047,1048,1049,1051,1053,1056,1057,1058,1083,1084,1085,1086,1087,1088,1093,1095,1096,1098,1101,1102,1105,1106,1107,1114,1115,1125,1126,1127,1128,1129,1134,1137,1140,1142,1146,1149,1152,1154,1157,1159,1161,1162,1163,1165,1166,1167,1168,1171,1172,1173,1182,1184,1189,1191,1195,1196,1197,1199,1201,1207,1212,1217,1221,1222,1223,1232,1233,1234,1235,1236,1243,1244,1247,1248,1249,1251,1252,1260,1265,1266,1268,1269,1270,1272,1275,1276,1278,1279,1280,1283,1287,1299,1306,1307,1309,1311,1313,1314,1315,1316,1328,1333,1334,1345,1346,1348,1370,1371,1372,1375,1377,1390,1406,1407,1416,1418,1440,1441,1449,1451,1452,1476,1496,1500,1502,1503,1504,1505,1509,1518,1525,1527,1532,1533,1534,1539,1542,1545,1546,1548,1549,1554,1558,1559,1567,1568,1572,1574,1584,1589,1596,1599,1603,1608,1617,1623,1625,1636,1640,1641,1656,1672,1683,1726,1739
0,1,18,25,27,31,32,36,37,38,42,43,44,47,50,52,54,55,60,63,67,69,71,73,76,78,81,84,86,89,90,93,94,95,97,98,99,101,102,105,108,111,113,117,119,121,124,125,127,129,131,132,133,151,172,203,207,224,241,244,264,279,282,286,290,338,358,378,391,410,414,417,444,463,469,470,498,499,504,560,562,565,571,582,609,611,620,645,650,652,653,655,662,670,676,687,698,710,731,736,739,755,757,761,765,774,789,793,797,803,805,810,811,813,816,817,819,820,852,853,858,861,883,888,890,891,898,912,913,919,921,924,927,929,931,933,940,944,947,949,954,956,959,967,968,974,977,978,981,982,991,996,997,1000,1003,1005,1008,1009
1,5,31,49,51,52,54,56,57,59,62,69,108,117,118,120,121,127,134,147,165,166,169,174,176,197,198,199,200,206,207,208,209,232,245,276,283,309,363,365,382,402,413,427,450,456,525,568,645,663,735,750,762,766,767,769,771,777,778,784,786,787,790,791,792,795,796,797,805,807,808,809,810,813,814,815,816,817,819,821,825,828,832,837,838,842,843,853,859,863,868,870,871,873,875,879,880,882,883,891,895,897,899,901,902,904,905,912,915,917,919,922,923,925,927,929,930,932,935,936,939,949,954,955,956,957,965,971,977,997,1000,1005,1014,1016,1019,1046,1048,1056,1057,1069,1073,1074,1078,1079,1105,1126,1131,1136,1138,1141,1147,1148,1165,1173,1174,1177,1186,1191
1,5,8,10,11,12,18,19,21,27,29,36,37,38,42,43,44,47,50,52,55,60,61,63,64,67,73,76,78,81,84,89,93,94,95,97,98,99,101,102,108,111,113,117,119,121,124,125,127,129,131,132,133,134,170,172,185,190,196,203,206,222,236,242,243,248,252,260,261,290,294,299,309,328,334,337,341,345,363,366,371,378,385,386,397,407,410,414,416,417,423,429,436,441,464,467,469,488,496,504,507,517,532,538,541,546,550,557,561,565,566,571,579,581,582,584,587,588,592,595,599,601,604,605,611,619,620,622,633,640,643,645,648,650,652,653,655,660,662,664,665,670,672,676,680,696,706,710,723,737,739,741,743,750,751,752,755,761,767,774,779,784,789,790,793,794,796,799,803,804,805,809,811,813,816,817,819,820,823,826,827,835,838,853,855,858,863,866,870,879,883,885,891,898,908,912,913,919,921,923,924,927,929,932,933,936,940,944,948,949,950,954,956,958,959,963,967,968,970,974,977,978,979,981,982,984,985,991,996,997,1000,1005,1008,1009
5,14,52,53,68,71,136,139,144,150,158,164,174,195,197,212,227,275,280,308,325,349,353,363,371,390,401,411,422,425,434,443,446,455,460,519,557,581,597,607,639,706,741,867,924,944,1012,1081,1123,1239,1333,1541,1574,1578,1580,1582,1591,1592,1594,1596,1597,1598,1599,1601,1602,1604,1609,1610,1613,1616,1617,1619,1624,1627,1628,1630,1633,1635,1636,1637,1638,1640,1641,1651,1655,1656,1657,1681,1682,1683,1708,1709,1710,1712,1714,1715,1725,1726,1731,1738,1743,1745,1746,1749,1751,1752,1753,1754,1758,1759,1767,1768,1778,1798,1806,1818,1819,1820,1821,1824,1830,1831,1834,1835,1838,1840,1843,1846,1847,1850,1852,1854,1855,1859,1861,1862,1868,1870,1873,1874,1876,1879,1880,1881,1882,1884,1887,1889,1896,1907,1912,1913,1919,1923,1934,1935,1946,1947,1951,1954,1958,1973,1975,1994,1998,2011,2015,2016,2020,2024,2026,2038,2041,2042,2048,2055,2059,2064,2077,2080,2085,2095,2096,2100,2122,2123,2135,2138,2152,2153,2161,2178,2203,2229,2239,2247,2255,2256,2258,2266,2267,2281,2283,2286,2298,2301,2306,2314,2317,2319,2323,2325,2329,2338,2354,2374,2375,2440,2460,2463,2490,2492,2493,2506,2517,2591,2593,2597,2605,2624,2653,2681,2705,2725
0,5,6,7,8,10,12,13,15,17,18,20,21,23,24,26,27,29,32,33,36,37,38,41,42,43,45,49,50,52,53,55,57,58,60,62,64,65,68,72,76,77,78,80,81,82,83,84,86,87,89,90,91,93,94,95,98,99,101,102,103,104,107,108,109,110,112,113,115,116,119,121,122,123,125,126,127,128,129,130,131,133,134,139,140,152,154,156,164,166,176,216,236,237,244,265,278,279,293,305,307,308,315,329,332,357,372,373,374,379,399,417,419,421,435,438,450,451,457,459,466,468,476,485,498,503,507,522,523,526,543,549,553,568,582,592,594,595,600,603,608,611,613,621,622,626,635,637,642,647,657,662,673,674,676,679,681,682,689,699,709,712,715,717,718,720,725,730,748,753,756,760,761,762,764,768,769,776,782,784,785,786,795,805,817,820,822,824,825,836,839,842,852,854,858,860,866,876,877,881,883,886,890,895,902,906,922,925,930,931,940,941,942,946,950,953,956,957,962,964,967,968,971,973,976,978,983,986,988,990,991,994,996,997,1000,1002,1005,1008,1009,1011,1012
5,10,31,42,48,95,97,100,111,112,123,128,130,134,157,186,199,201,205,221,222,227,289,290,294,299,361,391,399,400,423,426,450,455,466,499,509,513,698,706,708,719,721,724,725,726,733,747,757,761,773,774,778,779,792,802,823,847,850,877,895,907,929,931,932,951,959,997,1091,1128,1134,1135,1178,1189,1200,1299,1310,1326,1337,2069,2107,2148,2157,2163,2164,2169,2171,2172,2173,2174,2176,2177,2180,2181,2182,2183,2184,2185,2186,2194,2202,2206,2210,2212,2213,2214,2215,2216,2217,2218,2219,2220,2221,2237,2239,2240,2245,2258,2261,2270,2273,2274,2275,2276,2277,2278,2279,2280,2282,2344,2362,2368,2369,2372,2373,2382,2383,2385,2387,2388,2389,2393,2394,2396,2397,2398,2399,2405,2406,2407,2408,2410,2411,2412,2414,2415,2431,2433,2435,2436,2442,2450,2453,2455,2457,2459,2470,2472,2482,2485,2486,2488,2491,2492,2494,2506,2507,2511,2512,2518,2519,2520,2530,2535,2536,2538,2543,2544,2545,2554,2560,2575,2578,2580,2581,2588,2590,2594,2598,2601,2602,2613,2618,2620,2625,2627,2634,2640,2642,2645,2655,2659,2661,2672,2680,2682,2688,2691,2707,2719,2728,2739,2746,2752,2754,2759,2777,2780,2781,2782,2786,2791,2801,2817,2818,2819,2829,2835,2853,2854,3584
3,6,16,17,18,20,22,23,24,31,32,34,35,37,39,41,42,44,46,47,48,50,51,53,62,64,66,69,72,74,76,79,80,82,84,85,86,87,90,91,94,98,100,101,102,103,105,106,107,110,111,112,114,115,116,117,122,123,128,130,132,133,134,165,190,200,213,241,268,269,274,278,284,310,350,365,369,370,387,388,400,412,422,437,453,460,502,510,515,523,534,544,570,592,603,609,635,637,644,673,686,690,693,700,702,714,728,735,745,747,766,777,793,796,800,810,811,845,860,866,869,871,873,875,890,902,908,910,923,934,935,939,945,948,953,955,960,964,968,969,970,975,978,981,982,984,989,992,994,996,999,1001,1004,1006,1007,1010,1011,1012
55,56,75,152,153,155,156,159,261,262,270,275,280,282,308,312,321,322,323,324,333,339,345,350,361,363,366,367,371,380,383,384,385,388,498,506,507,508,509,510,511,518,523,524,526,531,536,576,581,582,591,594,726,734,754,800,811,818,826,827,854,857,861,899,900,901,904,907,908,909,942,943,953,954,955,956,959,961,969,973,979,980,981,982,983,986,1069,1070,1071,1080,1082,1083,1084,1085,1086,1088,1090,1091,1095,1104,1105,1106,1107,1108,1109,1110,1116,1117,1119,1120,1175,1213,1214,1219,1244,1246,1247,1266,1267,1269,1274,1275,1280,1293,1299,1304,1305,1308,1313,1332,1333,1335,1338,1341,1343,1345,1349,1350,1351,1384,1389,1391,1398,1403,1407,1413,1438,1445,1447,1448,1453,1461,1489,1525,1526,1531,1535,1536
4,7,10,14,16,17,19,22,28,30,35,39,41,47,51,58,62,63,64,65,66,69,71,72,74,75,76,77,79,80,82,85,87,88,90,91,92,94,96,97,100,103,105,106,107,108,109,111,112,115,116,117,120,126,132,149,168,169,171,182,196,200,203,230,237,245,258,268,290,297,312,326,335,341,350,351,353,356,359,362,367,392,409,412,431,452,465,469,483,486,530,531,542,549,564,570,572,575,586,616,624,632,639,642,645,649,668,677,691,693,702,704,713,720,740,749,758,759,760,762,763,770,772,778,779,780,782,783,793,802,806,807,811,828,832,838,845,846,847,849,850,859,867,869,871,874,884,887,889,895,898,899,900,904,905,918,928,934,939,941,944,946,947,953,959,960,965,966,972,980,981,987,990,992,993,994,995,996,999,1001,1007,1010
2,6,7,14,25,27,30,31,32,34,41,49,50,56,64,80,81,91,99,113,120,124,125,144,153,168,176,189,192,203,206,240,254,281,283,291,301,303,306,307,418,419,431,434,436,437,438,445,457,563,569,582,703,848,855,856,857,858,875,877,878,881,883,884,891,892,894,898,899,900,906,910,911,914,915,916,928,929,930,931,932,933,934,935,937,949,953,954,956,957,958,960,961,962,963,964,965,966,972,974,975,977,978,980,981,983,984,986,987,988,990,991,992,994,1004,1005,1024,1082,1087,1088,1093,1096,1100,1102,1103,1104,1105,1106,1111,1112,1114,1119,1124,1127,1137,1138,1139,1140,1142,1143,1144,1149,1150,1151,1152,1154,1157,1158,1164,1166,1169,1174,1184,1187,1196,1200,1201,1202,1206,1211,1212,1214,1215,1234,1235,1236,1252,1283,1284,1302,1310,1379,1383,1386,1387,1389,1394,1396,1402,1433,1435,1546
3,4,6,9,10,14,15,17,19,22,23,24,25,26,28,30,31,34,35,39,40,41,45,46,49,51,53,54,56,57,58,59,62,64,65,66,69,70,71,72,74,75,77,79,80,82,85,87,88,89,90,91,92,94,95,96,98,100,101,102,103,105,106,107,108,109,110,111,112,113,114,115,116,117,120,122,123,125,126,128,129,130,131,132,134,135,141,146,150,152,156,178,180,186,191,197,205,212,213,215,226,230,239,245,249,258,262,266,268,269,272,273,292,293,305,306,312,313,317,332,336,338,339,350,356,362,367,369,372,373,384,404,409,416,418,425,431,433,437,443,448,455,457,465,466,470,478,482,484,486,509,510,513,523,531,542,547,559,560,564,569,570,571,580,591,598,602,603,606,607,608,610,613,614,616,625,627,628,631,635,637,639,642,646,649,651,663,666,675,677,679,682,687,693,697,700,702,705,708,711,713,716,728,738,739,749,756,758,762,763,768,770,772,773,778,780,783,797,798,802,806,807,808,812,818,828,836,837,842,843,845,849,850,853,867,871,873,874,875,876,878,881,887,895,896,899,900,902,904,906,907,914,917,918,922,930,937,941,952,953,960,965,966,969,975,978,980,981,983,988,989,990,992,993,994,995,996,997,999,1001,1004,1005,1006,1007,1010,1011,1012
2,28,29,30,35,48,53,62,69,76,81,92,104,105,106,115,118,131,151,152,154,170,171,198,211,222,232,235,259,270,350,354,377,384,404,405,409,419,432,434,471,478,533,545,570,588,666,722,776,777,786,797,801,819,821,837,840,980,983,987,997,1003,1009,1055,1057,1060,1066,1067,1087,1088,1137,1154,1201,1202,1251,1262,1264,1265,1274,1307,1309,1312,1319,1332,1336,1341,1342,1351,1352,1356,1357,1364,1365,1366,1367,1368,1370,1373,1375,1378,1379,1380,1381,1382,1383,1391,1392,1393,1397,1402,1404,1409,1414,1415,1418,1419,1423,1427,1430,1434,1435,1438,1441,1442,1446,1448,1449,1450,1451,1452,1454,1455,1458,1470,1473,1477,1487,1488,1490,1513,1518,1525,1527,1528,1530,1534,1535,1539,1541,1546,1548,1553,1554,1555,1557,1558,1559,1560,1561,1563,1567,1573,1574,1577,1583,1584,1585,1586,1588,1593,1597,1604,1606,1609,1611,1613,1618,1622,1623,1624,1627,1628,1629,1631,1634,1635,1636,1638,1640,1641,1644,1649,1652,1656,1659,1660,1662,1663,1665,1666,1667,1668,1669,1673,1677,1678,1679,1683,1684,1712,1717,1720,1740,1749,1753,1756,1760,1762,1764,1770,1775,1776,1779,1784,1791,1804,1805,1807,1808,1810,1811,1817,1824,1825,1837,1839,1842,1843,1849,1850,1851,1852,1857,1862,1863,1871,1885,1886,1895,1896,1925,1927,1929,1952,1953,1955,1957,1966,1973,1974,1986,1988,1990,2002,2007,2009,2037,2038,2039,2042,2053,2060,2063,2065,2070,2071,2075,2076,2096,2156,2158,2160,2183,2201,2340,2342,2343
0,1,5,9,11,12,14,19,21,27,31,36,51,52,55,56,57,60,61,63,64,66,67,69,73,74,75,80,84,91,93,94,95,97,98,99,101,105,110,113,116,119,120,121,124,125,127,129,131,133,134,175,184,185,206,225,235,243,260,261,265,282,299,327,345,356,385,386,398,416,430,441,444,454,458,474,476,488,492,497,503,506,512,531,532,554,557,561,599,601,613,615,639,642,643,652,660,662,663,664,670,672,676,682,698,700,737,741,743,750,752,755,762,769,774,786,807,809,819,827,841,853,855,864,865,874,887,891,905,912,921,924,927,929,936,943,944,947,950,954,966,967,974,979,980,985,991,996,997,1003,1009
1,9,13,14,257,262,263,265,267,269,270,272,274,282,287,288,289,417,517,534,585,586,601,602,838,839,841,851,852,869,875,991,992,997,1001,1007,1011,1013,1014,1022,1023,1029,1048,1061,1083,1135,1189,1201,1205,1212,1223,1231,1232,1233,1234,1235,1236,1237,1239,1240,1242,1243,1244,1245,1246,1247,1262,1263,1264,1272,1273,1276,1277,1278,1279,1280,1281,1284,1285,1286,1287,1288,1290,1292,1293,1294,1295,1297,1298,1309,1310,1314,1315,1316,1317,1319,1320,1321,1322,1324,1326,1327,1336,1337,1354,1355,1376,1377,1378,1381,1383,1387,1394,1398,1402,1404,1405,1408,1414,1418,1421,1429,1431,1433,1434,1435,1437,1443,1444,1446,1452,1455,1456,1457,1690,1691,1699,1701,1711,1713,1726,1731,1732,1829,1832,1834,1837,1910,1911,1912,1918
0,2,5,8,9,10,12,13,14,15,17,18,19,20,22,23,24,26,27,28,29,30,33,36,38,41,43,44,45,46,49,50,51,52,53,55,56,57,58,59,60,61,62,64,65,66,67,68,69,72,73,74,75,77,78,79,81,83,84,85,86,87,89,90,93,94,95,98,99,101,102,103,104,105,107,108,110,111,112,113,115,116,119,120,121,122,123,124,125,126,127,128,129,130,131,133,134,138,144,146,151,158,188,202,205,213,217,218,223,231,252,261,263,270,273,274,277,293,295,302,303,319,325,344,345,356,357,365,383,398,401,403,406,408,416,421,429,430,435,447,448,450,451,455,458,474,475,481,486,490,494,497,498,499,506,508,512,518,522,523,528,532,538,553,554,557,561,573,574,578,579,580,582,585,593,594,595,599,601,607,608,612,613,615,622,626,630,631,634,637,641,642,646,649,652,658,661,663,670,672,676,677,681,682,688,690,695,698,699,708,709,711,712,715,720,723,734,737,738,739,741,748,750,751,752,753,755,758,761,763,764,768,769,772,773,780,786,789,790,791,792,795,803,804,813,817,819,820,821,823,826,827,834,836,839,842,844,847,849,853,854,855,856,858,861,864,875,876,877,882,883,885,890,891,895,897,906,912,913,919,920,921,924,927,930,933,941,943,949,950,953,954,955,956,965,967,968,972,973,975,977,978,979,980,983,985,986,990,991,993,996,997,1000,1002,1005,1008,1009,1011,1012
4,5,10,13,16,19,39,71,80,83,85,126,134,141,143,145,222,229,231,233,234,237,261,278,287,295,296,308,365,387,398,401,402,427,428,430,434,440,461,462,518,552,570,597,605,607,664,736,739,770,785,798,802,815,842,843,844,846,870,873,904,912,935,939,954,991,1003,1015,1038,1048,1060,1062,1066,1073,1074,1128,1190,1192,1230,1231,1250,1343,1380,1440,1458,1477,1496,1515,1549,1557,1589,1590,1613,1738,1777,1809,1821,1822,1823,1830,1833,1834,1835,1836,1842,1843,1844,1867,1877,1879,1880,1883,1884,1885,1887,1910,1911,1912,1916,1917,1919,1920,1924,1927,1929,1934,1935,1937,1939,1941,1942,1946,1948,1949,1952,1974,1977,1980,1983,1989,1991,1993,2005,2009,2011,2012,2015,2016,2017,2020,2080,2082,2084,2085,2089,2090,2092,2093,2096,2105,2106,2107,2108,2109,2111,2112,2118,2121,2122,2123,2124,2126,2139,2168,2169,2172,2182,2184,2186,2187,2188,2189,2192,2197,2200,2201,2203,2204,2207,2210,2211,2213,2215,2216,2218,2219,2221,2222,2223,2228,2231,2233,2259,2260,2261,2263,2267,2272,2279,2282,2283,2284,2286,2302,2331,2332,2335,2337,2339,2341,2342,2345,2347,2348,2350,2356,2361,2366,2368,2371,2378,2380,2397,2399,2400,2403,2404,2410,2411,2415,2420,2421,2436,2446,2447,2457,2461,2465,2467,2473,2475,2480,2481,2482,2483,2493,2495,2497,2498,2506,2512,2513,2515,2518,2527,2530,2543,2554,2559,2561,2572,2582,2583,2597,2603,2611,2618,2619,2639,2664,2665,2666,2667,2668,2689,2690,2693,2697,2706,2717,2721,2723,2733,2738,2755,2756,2761,2762,2768,2769,2773,2776,2794,2796,2802,2812,2814,2833,2834,2861,2892,2896,2898,2899,2909,2929,2946,2964,3089
1,2,5,11,18,27,29,33,36,37,38,42,43,44,50,55,60,63,64,67,76,78,84,86,89,93,94,95,97,98,99,101,102,108,111,112,113,117,119,121,124,125,127,129,131,132,133,134,176,244,280,282,285,324,337,347,376,382,391,397,410,423,436,494,496,498,517,571,572,574,579,584,592,593,605,611,618,626,653,659,665,676,710,719,736,737,743,752,755,775,786,789,790,794,799,809,810,816,817,819,823,827,838,841,851,852,853,855,858,861,866,870,877,883,905,906,913,919,921,923,924,927,931,933,940,944,949,950,956,958,959,968,974,977,978,981,982,984,991,996,997,1000,1003,1005,1008,1009
2,3,4,6,38,43,51,53,54,71,88,115,131,145,147,152,160,218,219,227,229,250,253,260,262,276,281,331,332,403,415,423,440,461,480,482,492,582,658,665,942,1008,1031,1080,1143,1280,1410,1411,1413,1414,1415,1423,1424,1426,1427,1429,1431,1433,1434,1435,1436,1443,1445,1446,1449,1452,1464,1467,1468,1471,1497,1506,1510,1517,1527,1529,1531,1532,1533,1536,1537,1539,1553,1554,1586,1589,1590,1591,1592,1593,1594,1601,1604,1649,1654,1656,1660,1661,1671,1675,1684,1685,1686,1689,1691,1692,1693,1695,1696,1703,1708,1714,1715,1718,1721,1723,1724,1741,1748,1750,1755,1773,1780,1787,1791,1838,1845,1846,1853,1856,1859,1891,1892,1924,1940,1941,1942,1956,1968,1973,2010,2015,2018,2055,2091,2095
4,9,13,16,19,20,23,28,30,31,34,35,46,47,48,49,50,51,56,57,62,64,65,66,69,71,72,74,75,76,80,82,85,86,87,88,89,90,91,94,96,100,105,106,107,111,112,114,115,116,117,120,122,123,126,128,130,132,171,192,212,220,230,233,269,273,277,281,310,327,350,353,362,402,409,422,461,486,508,509,510,548,559,564,576,606,611,616,628,631,635,646,647,663,668,675,686,689,693,702,705,728,745,746,749,762,770,773,779,782,783,793,795,796,800,806,811,818,828,833,838,844,846,849,867,871,873,874,889,890,895,899,900,904,908,918,923,928,934,935,937,948,953,955,959,965,966,970,975,980,982,987,988,990,996,999,1001,1004,1006,1007,1010,1011
2,3,4,5,8,9,11,17,18,24,29,35,84,89,95,97,98,105,106,107,108,109,113,116,130,131,132,149,158,166,191,198,208,215,228,246,248,253,270,273,317,322,341,383,386,389,398,413,439,440,448,488,489,532,606,869,870,884,885,886,890,893,895,897,898,900,901,905,906,907,908,909,910,916,919,920,921,923,924,926,927,932,933,934,935,936,938,939,941,943,944,950,951,953,954,956,957,958,959,960,961,963,964,966,967,970,972,1002,1003,1005,1007,1008,1026,1028,1029,1031,1033,1034,1036,1037,1038,1039,1048,1050,1056,1064,1069,1071,1079,1080,1081,1082,1089,1102,1103,1106,1112,1115,1116,1121,1133,1140,1148,1162,1164,1187,1188,1191,1194,1196,1197,1217,1246,1247,1250,1255,1256,1298,1313,1320,1325,1350
3,4,8,14,15,16,17,19,20,22,23,24,28,30,31,34,35,37,39,40,41,46,47,48,49,51,54,56,57,59,62,63,64,65,66,68,69,72,74,75,76,77,79,80,82,85,86,87,88,90,91,92,94,96,100,101,103,105,106,107,108,110,111,112,114,115,116,117,118,120,122,123,126,128,130,132,134,136,145,156,159,166,168,174,197,203,205,212,214,215,233,234,239,245,253,262,268,269,277,281,282,307,310,312,326,339,342,350,353,356,357,358,372,381,409,412,422,423,456,461,469,479,482,486,508,509,510,514,517,531,548,559,564,580,610,613,616,619,627,631,636,642,644,646,655,671,675,683,686,690,691,693,699,700,717,745,746,747,748,749,756,759,762,766,768,770,777,778,779,780,782,783,784,793,796,802,806,811,818,828,829,843,845,846,847,848,849,854,859,866,867,869,871,873,874,878,883,887,888,889,895,898,899,900,904,905,906,908,914,918,923,928,934,935,937,941,945,948,950,953,955,959,960,969,970,975,980,981,982,984,987,988,989,990,992,995,996,999,1001,1004,1006,1007,1010,1011,1012
7,10,11,14,20,21,24,32,33,34,38,41,43,46,54,66,69,70,95,98,99,239,254,265,266,268,269,277,280,281,285,288,298,331,335,337,356,357,404,409,414,416,418,462,473,484,537,567,595,614,650,653,656,712,714,715,716,738,865,866,869,871,884,894,1140,1181,1191,1207,1208,1238,1239,1240,1342,1379,1382,1442,1445,1446,1447,1449,1450,1452,1454,1455,1458,1459,1460,1461,1462,1463,1465,1466,1467,1468,1470,1474,1475,1478,1480,1481,1482,1485,1487,1491,1492,1493,1495,1496,1497,1498,1499,1501,1502,1505,1511,1513,1514,1515,1516,1517,1518,1520,1521,1522,1523,1528,1529,1530,1531,1535,1536,1538,1539,1541,1544,1545,1547,1550,1551,1552,1554,1557,1558,1595,1597,1598,1599,1600,1609,1611,1612,1614,1616,1618,1619,1620,1621,1622,1625,1628,1630,1635,1639,1640,1643,1654,1656,1661,1662,1670,1678,1680,1681,1685,1688,1689,1695,1701,1703,1708,1711,1712,1715,1728,1729,1736,1737,1747,1752,1753,1758,1780,1786,1798,1810,1811,1814,1821,1823,1836,1837,1841,1846,1855,1860,1861,1862,1866,1872,1877,1901,1905,1906,1915,1933,1937,1948,1952,1954,1969,2015,2026,2027,2079,2084,2085,2114,2115,2122,2124,2153,2157,2316,2317,2320,2322,2325,2328,2329,2442,2473,2484,2486,2487,2488
3,4,6,7,10,14,15,17,19,20,22,23,24,25,26,27,28,30,31,32,34,35,37,38,39,40,41,42,44,46,49,50,51,53,54,56,57,58,59,62,64,65,66,67,69,70,71,72,74,75,76,77,79,80,82,83,84,85,86,87,88,89,90,91,92,94,96,97,100,102,103,104,105,106,107,108,109,110,112,113,114,115,116,117,119,120,122,123,124,125,126,128,130,131,132,133,134,136,147,149,155,156,163,166,173,177,180,183,198,200,205,212,215,223,227,230,239,244,245,253,262,266,267,268,269,271,273,276,284,286,291,292,293,295,300,305,308,317,320,326,339,341,347,350,357,362,369,374,383,390,392,400,411,419,422,427,437,443,453,462,463,466,468,470,483,484,491,510,514,523,531,536,547,548,550,559,564,575,580,583,597,598,599,600,603,606,613,614,616,621,623,627,628,631,637,638,639,646,647,649,656,657,673,675,679,682,689,691,693,695,697,702,707,708,713,728,738,742,746,748,753,758,763,764,768,770,772,777,778,780,782,783,785,790,796,802,808,810,812,817,818,819,823,824,825,828,829,836,842,843,844,845,847,848,850,851,857,860,861,866,868,869,873,874,875,877,881,883,886,888,890,895,899,900,902,903,904,914,922,923,928,931,937,940,941,942,945,946,949,950,955,957,960,964,969,971,972,973,975,977,980,983,986,987,988,989,990,992,994,996,998,999,1001,1004,1006,1010,1011,1012
3,9,25,32,33,39,45,53,57,62,65,74,81,83,84,91,93,101,109,116,157,188,214,216,243,252,269,286,290,297,472,511,523,557,608,615,672,687,692,738,741,767,772,773,786,787,795,803,838,887,889,919,933,937,942,945,948,966,1046,1056,1075,1079,1086,1097,1112,1119,1168,1169,1172,1221,1236,1237,1272,1362,1387,1494,1530,1542,1562,1570,1609,1617,1643,1645,1646,1663,1686,1690,1692,1695,1724,1735,1739,1753,1758,1766,1767,1768,1772,1773,1793,1794,1796,1797,1798,1799,1801,1802,1803,1804,1805,1806,1808,1809,1852,1854,1856,1857,1858,1859,1860,1861,1862,1864,1866,1867,1868,1869,1875,1882,1921,1922,1926,1927,1932,1934,1937,1938,1950,1956,1964,1965,1966,1967,1968,1969,1974,1977,1979,1980,1984,1985,1999,2000,2001,2003,2004,2008,2009,2018,2020,2021,2022,2025,2026,2027,2037,2038,2039,2049,2050,2055,2057,2059,2060,2061,2065,2066,2070,2071,2076,2083,2084,2088,2090,2091,2095,2097,2098,2099,2101,2105,2109,2110,2115,2119,2121,2122,2123,2124,2125,2126,2127,2128,2129,2131,2132,2133,2140,2144,2146,2148,2149,2174,2175,2186,2191,2194,2196,2197,2199,2202,2206,2207,2214,2217,2222,2223,2224,2226,2235,2236,2239,2240,2242,2246,2247,2249,2267,2369,2371,2372,2373,2407,2422,2429,2431,2434,2448,2467,2468,2473,2474,2475,2476,2490,2492,2508,2509,2521,2528,2550,2557,2558,2565,2566,2569,2571,2572,2587,2600,2604,2607,2610,2613,2614,2615,2617,2620,2625,2636,2637,2645,2652,2654,2655,2669,2674,2677,2678,2689,2713,2720,2721,2722,2723,2734,2738,2748,2749,2751,2754,2757,2765,2771,2773,2776,2797,2810,2813,2815,2845,2856,2901,2932,2934,2938,2941
6,16,20,28,31,34,37,38,49,54,57,62,76,77,86,90,91,96,105,106,109,110,111,114,115,117,122,126,128,133,181,275,308,341,381,439,479,495,612,686,689,708,710,740,763,779,787,790,796,800,842,848,866,878,888,934,937,942,948,953,969,970,981,989,1004,1006
1,74,75,90,91,92,103,118,126,146,154,155,176,190,191,268,279,298,334,335,356,357,366,379,383,412,417,420,421,422,423,425,426,434,435,456,459,460,461,463,464,468,469,480,482,483,513,526,535,538,541,542,552,567,575,637,644,648,661,662,664,667,675,676,677,686
2,6,9,13,15,17,18,20,21,26,29,30,31,32,33,35,36,41,42,44,45,49,50,52,53,54,56,57,58,60,62,64,65,67,68,71,72,73,76,77,78,80,81,83,84,86,87,88,89,90,91,93,94,95,96,97,98,99,102,103,104,105,108,109,110,111,112,113,115,116,117,119,121,122,123,124,125,126,127,128,129,130,131,132,133,134,136,140,150,156,163,164,183,198,228,235,241,254,268,273,284,295,296,303,305,306,308,317,319,320,323,332,336,417,425,426,433,439,440,451,455,459,463,466,468,471,472,476,494,506,507,510,515,519,520,524,532,537,539,543,544,564,568,574,582,591,592,594,613,635,636,638,647,651,652,657,659,667,674,677,679,681,689,703,706,707,708,712,714,715,718,720,725,727,741,744,748,753,756,760,762,763,764,765,768,773,776,778,780,782,785,786,792,795,803,805,806,810,817,819,820,821,825,833,834,836,841,842,843,847,852,854,858,860,861,872,875,876,880,881,882,883,887,890,891,893,895,897,902,906,910,912,915,922,925,930,931,938,942,950,953,956,957,960,962,963,968,971,973,974,976,978,981,983,986,987,988,990,991,996,997,998,999,1002,1005,1011,1012
1,24,31,33,83,84,90,183,186,227,228,231,233,244,245,249,251,253,257,260,359,425,429,431,435,436,437,470,477,485,548,559,579,582,613,617,622,624,631,681,688,703,705,864,868,875,893,894,900,904,908,909,931,932,935,938,943,944,970,987,1023,1024,1034,1044,1338,1345,1354,1355,1424,1596,1598,1607,1615,2415,2446,2454,2466,2766,2792,2968,2970,2983,2997,2998,3064,3071,3072,3075,3080,3081,3087,3093,3110,3115,3139,3141,3142,3160,3161,3162,3165,3166,3170,3192,3195,3257,3258,3263,3273,3278,3279,3283,3285,3286,3287,3288,3292,3293,3294,3298,3302,3305,3309,3313,3314,3315,3316,3317,3318,3319,3320,3324,3329,3330,3335,3338,3339,3341,3386,3393,3395,3397,3406,3413,3415,3417,3419,3430,3435,3445,3453,3456,3461,3465,3466,3468,3476,3482,3484,3487,3490,3498,3505,3506,3507,3510,3532,3549,3550,3553,3556,3557,3558,3559,3561,3618,3666,3667,3671,3679,3683,3684,3694,3695,3752,3753,3765,3782,3798,3800,3801,3802,3805,3806,3819,3827,3828,3829,3837,3839,3840,3846,3849,3850,3852,3853,3855,3878,3881,3884,3887,3913,3914,3947,3949,4038,4049,4052,4056,4105,4106,4108,4109,4110,4111,4128,4130,4133,4134,4137,4166,4170,4177,4196,4215,4289,4291,4292,4429,4436,4440,4441,4442,4448,4458,4459,4465,4469,4483,4495,4507,4508,4509,4513,4564,4565,4575,4577,4579,4589,4593,4596,4599,4600,4602,4603,4616
3,4,6,7,8,9,10,13,15,16,17,20,24,25,26,31,32,34,35,37,39,41,42,44,45,46,47,48,49,51,54,56,57,60,62,63,64,65,66,68,69,71,72,74,75,76,77,80,82,83,84,86,87,88,90,91,92,94,96,97,102,105,106,107,108,109,110,111,112,114,115,116,117,120,122,123,126,127,128,130,132,134,135,136,143,150,155,156,159,164,168,179,181,182,192,197,200,212,213,221,223,233,234,239,241,245,247,249,250,258,262,268,273,281,286,293,297,310,312,313,317,327,338,341,342,346,350,351,362,370,378,381,388,401,402,409,414,419,422,425,427,431,443,455,462,463,472,478,486,491,495,508,509,514,515,520,522,533,534,543,544,549,564,569,571,584,586,589,590,591,598,600,603,608,613,616,617,621,631,639,643,645,646,647,650,655,668,675,682,687,693,700,702,712,716,731,735,738,742,748,756,762,764,766,770,773,776,777,778,779,780,782,783,787,793,795,796,802,806,811,812,817,818,828,842,843,846,848,850,853,854,859,860,867,869,870,871,873,874,875,879,881,883,887,888,889,893,895,898,899,900,904,905,907,908,914,917,918,923,928,930,934,935,937,941,944,945,948,949,950,953,955,959,960,969,970,973,975,981,982,983,984,986,987,989,990,992,994,996,999,1004,1006,1007,1012
56,83,84,85,91,92,96,97,122,128,130,132,136,143,147,173,181,191,199,203,208,222,242,243,250,269,288,327,332,334,338,339,380,381,394,418,430,448,453,456,477,482,483,488,497,527,534,568,609,614,621,924,975,1014,1036,1079,1080,1091,1169,1174,1175,1224,1396,1411,1421,1422,1436,1449,1460,1507,1609,1649,1694,1696,1721,1723,1789,1790,1807,1808,1854,1857,1861,1862,1866,1867,1868,1885,1887,1890,1892,1894,1896,1911,1912,1920,1921,1923,1924,1926,1931,1932,1936,1937,1939,1940,1942,1943,1946,1953,1961,1965,1966,1968,1974,1975,1976,1982,1985,1986,1987,1988,1989,1999,2000,2019,2020,2028,2030,2033,2041,2043,2046,2047,2048,2051,2055,2056,2059,2060,2061,2071,2072,2073,2074,2075,2077,2078,2079,2081,2085,2090,2095,2097,2099,2100,2103,2104,2105,2108,2109,2113,2114,2120,2122,2137,2143,2145,2146,2151,2152,2160,2165,2166,2168,2172,2173,2174,2177,2178,2182,2183,2187,2191,2199,2201,2207,2224,2228,2234,2237,2238,2241,2243,2257,2259,2260,2261,2263,2264,2265,2269,2271,2285,2292,2300,2304,2317,2318,2322,2329,2333,2334,2341,2344,2346,2349,2350,2353,2359,2363,2364,2367,2376,2377,2388,2400,2407,2408,2409,2416,2421,2423,2438,2443,2444,2448,2452,2460,2462,2465,2466,2474,2480,2482,2488,2489,2491,2497,2504,2518,2524,2531,2536,2540,2578,2579,2587,2594,2600,2603,2605,2630,2652,2664,2673,2780,2790,2791,2793,2807,2809,2812,2815,2816,2817,2822,2828,2854,2857,2866,2867,2871,2888,2894,2897,2913,2916,2922,2925,2967,2990,3016,3017
0,1,5,6,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,27,29,30,31,36,38,41,42,43,44,46,49,50,51,52,53,54,55,56,57,59,60,62,64,66,67,69,70,71,72,73,74,75,76,77,78,79,80,81,84,85,86,87,88,90,91,92,93,94,95,96,97,98,99,101,102,103,104,105,109,110,111,112,113,115,116,117,118,119,120,122,123,124,125,126,128,129,130,131,132,133,134,136,138,146,149,150,152,154,156,160,164,172,173,186,187,193,194,196,200,206,213,215,222,240,247,248,251,254,270,272,275,278,289,293,295,308,315,316,321,327,328,336,339,341,345,355,360,362,376,378,382,384,386,393,402,416,419,425,426,430,433,441,448,452,454,455,461,463,466,472,474,482,483,489,496,503,509,511,518,529,547,553,560,561,574,577,578,580,581,587,591,592,593,594,599,603,606,607,608,612,614,616,620,622,627,634,635,640,641,649,651,658,662,667,670,672,674,682,684,689,691,692,698,705,708,709,712,713,719,721,731,738,739,740,742,750,753,756,758,760,761,762,764,768,788,789,796,798,800,802,804,805,808,810,818,825,827,847,848,850,852,853,854,858,860,861,864,865,867,874,875,879,884,886,887,890,891,895,896,897,899,900,902,906,913,917,918,922,928,930,932,936,937,940,941,942,948,949,950,952,953,955,956,960,961,965,966,967,968,970,973,978,980,981,987,990,991,995,996,997,1000,1003,1005,1008,1009,1011,1012
6,8,15,17,23,25,32,36,38,44,136,137,139,146,150,151,157,158,160,162,163,166,167,174,193,198,210,216,249,251,255,268,271,275,281,292,294,301,309,318,323,337,395,406,408,411,425,430,448,491,492,499,536,585,600,601,608,612,629,630,638,639,643,645,654,655,744,777,796,806,841,870,873,906,973,979,987,988,1015,1016,1018,1067,1078,1126,1135,1153,1241,1357,1358,1365,1394,1605,1707,1867,1872,1890,1923,1937,2005,2026,2030,2066,2074,2076,2077,2079,2081,2083,2085,2086,2087,2091,2092,2096,2098,2100,2101,2103,2105,2106,2107,2108,2110,2112,2113,2115,2120,2121,2122,2125,2127,2128,2129,2133,2135,2138,2140,2141,2145,2150,2151,2152,2156,2157,2160,2163,2167,2170,2172,2174,2176,2177,2179,2181,2185,2186,2204,2205,2206,2207,2210,2212,2213,2214,2216,2217,2218,2220,2222,2236,2237,2238,2240,2241,2242,2243,2244,2258,2259,2260,2263,2264,2271,2272,2273,2274,2280,2282,2284,2285,2287,2288,2289,2291,2292,2298,2300,2301,2304,2306,2308,2309,2310,2313,2317,2319,2321,2322,2346,2347,2349,2351,2352,2354,2355,2360,2361,2362,2388,2389,2390,2392,2394,2397,2401,2402,2404,2405,2409,2415,2417,2420,2421,2423,2425,2426,2427,2428,2436,2443,2446,2465,2466,2470,2471,2477,2478,2489,2494,2495,2541,2542,2543,2553,2554,2571,2572,2573,2576,2577,2586,2589,2596,2598,2603,2604,2605,2613,2618,2622,2626,2627,2637,2641,2642,2643,2646,2648,2649,2653,2655,2656,2657,2660,2661,2667,2671,2677,2678,2679,2727,2744,2745,2746,2751,2754,2755,2791,2798,2802,2808,2814,2818,2836,2853,2854,2855,2856,2861,2862,2869,2871,2885,2893,2899,2907,2908,2934,2938,2941,2950,2972,2977,2982,2989,2990,3033,3135,3203
3,4,5,10,15,16,17,21,22,30,31,32,34,35,36,37,39,42,47,49,50,51,53,54,56,57,58,59,62,65,66,69,70,74,75,76,79,82,84,85,86,87,88,89,90,91,92,93,95,96,98,100,102,105,106,107,108,109,110,111,112,114,116,117,118,120,122,129,130,131,132,134,152,158,203,230,233,245,250,269,277,286,290,297,300,305,313,316,335,339,349,350,353,378,384,400,412,414,437,444,482,484,504,542,550,564,569,572,598,607,616,621,622,627,632,639,644,645,668,671,675,686,690,693,695,702,708,724,729,731,740,746,759,766,767,768,770,781,787,793,796,800,811,818,825,830,831,837,842,843,844,846,848,851,866,869,870,871,873,875,879,884,888,889,895,906,909,914,923,928,934,937,941,945,948,950,951,955,959,969,970,975,978,981,982,983,989,992,994,997,999,1001,1004,1005,1006,1010,1012
29,30,31,46,47,360,369,373,375,377,379,380,385,410,411,439,462,464,465,477,478,492,518,535,541,551,555,558,561,566,570,574,575,616,647,654,660,680,694,697,698,700,701,702,708,713,715,717,718,722,727,728,737,772,877,887,895,897,898,903,910,1017,1019,1057,1058,1061,1063,1065,1067,1071,1102,1107,1122,1123,1124,1128,1130,1131,1137,1139,1149,1150,1152,1158,1160,1161,1162,1163,1164,1166,1167,1169,1191,1193,1194,1195,1196,1197,1198,1199,1200,1201,1202,1203,1205,1206,1207,1208,1222,1227,1230,1232,1233,1235,1316,1344,1345,1346,1347,1348,1349,1350,1351,1355,1356,1358,1366,1368,1369,1372,1389,1393,1427,1439,1443,1444,1447,1448,1449,1451,1455,1457,1463,1464,1465,1467,1468,1469,1472,1473,1474,1477,1481,1500,1509,1532,1536,1542,1546,1548,1549,1550,1551,1557,1559,1560,1581,1598,1602,1610,1897,1898,1899,1903,1907,1911,1913,1917,1918,1967,1976,1978,1980,1992,1993,1995,2013,2077,2083,2086,2110,2113,2136,2138,2177,2178,2180
9,27,29,36,37,38,44,50,67,78,86,89,97,98,101,102,104,108,113,119,124,125,127,129,131,133,158,177,309,327,333,334,391,436,476,540,541,567,582,611,652,658,659,661,667,670,701,710,726,730,734,803,813,825,826,852,861,863,877,883,912,919,924,933,940,947,963,1000,1005,1008
6,24,47,56,60,61,63,66,81,110,163,165,278,287,522,530,550,565,666,706,709,717,829,1047,1066,1068,1076,1077,1089,1091,1094,1096,1107,1109,1110,1112,1115,1135,1148,1150,1153,1154,1171,1172,1175,1183,1185,1186,1187,1192,1213,1222,1225,1229,1232,1233,1240,1246,1260,1266,1274,1284,1313,1315,1333,1410,1446,1554,1661,1673
1,5,7,8,10,13,15,17,20,22,23,24,26,27,31,36,40,41,42,43,46,49,52,53,54,55,56,57,58,59,60,62,64,65,66,69,70,71,72,73,74,75,77,79,80,83,84,85,86,89,90,91,92,93,94,95,96,97,98,99,101,102,103,104,105,108,109,110,113,116,119,120,121,122,123,124,125,127,128,129,131,133,134,136,140,146,154,163,164,176,183,198,208,214,219,221,231,232,249,257,259,266,284,285,287,289,298,304,314,317,323,326,346,352,354,358,374,384,390,400,401,404,408,413,430,435,439,444,449,459,463,465,466,472,473,484,489,498,505,510,511,514,520,535,537,542,554,558,560,564,568,590,594,595,600,604,605,613,616,617,623,625,639,656,657,666,671,674,676,677,679,684,692,696,700,701,702,712,713,714,715,717,720,723,724,725,738,742,743,754,755,756,761,762,763,764,772,781,785,786,791,795,798,807,808,809,818,820,824,825,833,836,839,840,842,843,849,850,854,856,857,859,860,863,867,868,872,874,875,876,877,880,886,887,893,894,896,901,903,904,910,917,920,921,925,930,931,940,942,946,950,951,952,961,962,963,965,971,972,973,976,978,983,986,991,993,995,996,997,1002,1005,1008,1009,1011
1,2,47,49,55,62,71,72,141,154,162,163,165,166,168,172,182,186,201,203,204,249,250,254,263,267,290,301,327,453,459,460,464,477,478,484,495,516,564,566,573,581,631,740,767,946,952,953,980,987,994,999,1076,1084,1090,1093,1095,1096,1102,1103,1107,1127,1218,1229,1240,1257,1347,1421,1422,1787,1824,1837,1838,2132,2162,2205,2215,2220,2224,2227,2241,2248,2274,2276,2277,2278,2282,2295,2309,2311,2314,2354,2357,2361,2362,2363,2366,2367,2399,2426,2427,2432,2442,2478,2479,2492,2493,2503,2504,2544,2546,2549,2551,2560,2586,2589,2591,2592,2593,2594,2597,2602,2604,2605,2608,2609,2616,2618,2627,2628,2629,2630,2649,2652,2656,2664,2665,2666,2669,2670,2678,2680,2681,2700,2704,2706,2707,2718,2723,2724,2728,2729,2791,2792,2794,2795,2796,2797,2800,2801,2802,2804,2806,2811,2826,2831,2833,2834,2835,2838,2839,2841,2855,2857,2858,2859,2863,2889,2907,2913,2914,2915,2930,2975,3020,3023,3027,3029,3030,3034,3035,3037,3038,3042,3048,3049,3052,3053,3061,3062,3063,3070,3089,3095,3097,3098,3099,3100,3101,3102,3103,3107,3109,3129,3136,3140,3141,3151,3176,3199,3200,3202,3229,3230,3232,3240,3302,3303,3352,3357,3381,3386,3390,3401,3409,3415,3419,3460,3461,3468,3475,3482,3486,3487,3488,3491,3492,3493,3545,3590,3594,3675,3685,3686,3697,3698,3700,3736,3741,3767,3769,3774,3787,3860,3861,3875,3878,3879,3880,3881,3883,3912,3913,3928
0,1,3,4,5,6,9,11,12,14,15,17,18,19,20,21,22,23,24,25,26,27,30,31,36,40,41,42,43,44,46,49,50,51,52,53,54,55,56,57,59,60,61,62,64,65,66,67,69,70,71,72,73,74,75,76,78,79,80,81,83,84,85,86,87,88,90,91,92,93,94,95,96,97,98,99,101,102,103,104,105,106,107,108,110,111,112,113,115,116,117,119,120,122,123,124,125,126,128,129,130,131,132,133,134,136,138,147,149,156,163,164,172,180,183,186,187,191,192,193,194,197,207,213,215,216,219,223,228,229,235,237,241,244,248,252,253,261,273,274,276,278,279,283,292,293,295,316,323,324,325,326,332,337,339,344,347,355,356,357,360,375,384,388,404,410,419,425,426,427,430,441,443,448,452,454,455,461,466,472,474,482,486,492,496,497,499,509,510,513,517,518,528,532,538,541,542,550,553,554,555,558,562,564,567,570,574,575,577,578,580,581,582,587,591,592,593,594,595,602,603,607,608,609,613,616,620,622,634,635,637,640,642,649,651,655,657,659,670,677,679,680,682,687,689,691,692,698,702,704,705,706,709,711,712,714,718,719,723,736,738,739,742,743,746,749,750,753,756,758,760,761,762,764,768,769,773,774,776,778,779,780,781,782,783,785,789,791,792,795,796,800,802,804,805,808,814,821,823,825,826,827,837,843,848,850,852,853,854,855,856,860,861,864,867,874,875,877,879,880,881,885,887,890,891,895,896,897,899,900,902,904,906,913,916,917,918,922,927,928,930,933,936,941,942,943,949,950,952,953,954,955,956,959,960,961,962,963,965,966,970,973,975,977,978,979,980,982,984,985,987,990,991,996,997,998,1001,1005,1008,1009,1011,1012
1,4,5,7,11,17,22,31,42,48,53,54,70,83,128,129,139,140,148,155,167,169,170,201,235,236,284,286,287,302,303,308,309,310,322,325,335,348,375,402,407,490,496,536,544,550,559,652,660,694,726,744,750,756,772,776,808,814,833,834,835,853,859,869,902,925,928,944,945,960,993,1128,1145,1178,1197,1227,1260,1329,1345,1346,1350,1351,1353,1364,1410,1431,1577,1579,1642,1679,1721,1752,1879,2077,2084,2153,2156,2175,2238,2266,2407,2453,2485,2527,2545,2546,2549,2551,2552,2555,2567,2571,2572,2575,2578,2579,2580,2583,2586,2587,2589,2594,2595,2596,2602,2604,2606,2611,2612,2615,2616,2618,2619,2623,2624,2625,2627,2629,2631,2632,2633,2634,2635,2637,2638,2639,2642,2649,2654,2657,2660,2661,2663,2665,2667,2669,2670,2674,2676,2680,2681,2682,2683,2684,2685,2686,2692,2698,2701,2703,2705,2706,2707,2708,2709,2710,2714,2722,2723,2726,2729,2730,2732,2733,2737,2738,2739,2740,2741,2743,2751,2753,2755,2756,2759,2760,2761,2762,2764,2766,2767,2769,2770,2773,2774,2775,2787,2788,2789,2790,2793,2818,2821,2829,2831,2834,2835,2837,2838,2841,2848,2849,2863,2866,2874,2877,2878,2879,2882,2924,2925,2928,2929,2931,2934,2944,2948,2958,2977,2982,2989,2991,2993,2994,2996,2999,3000,3004,3006,3007,3009,3013,3025,3030,3038,3041,3042,3044,3045,3053,3063,3069,3071,3072,3075,3084,3087,3102,3108,3154,3155,3159,3163,3165,3167,3172,3174,3175,3180,3186,3188,3198,3200,3205,3206,3209,3221,3248,3249,3255,3264,3268,3269,3277,3278,3279,3280,3281,3294,3303,3312,3314,3317,3319,3324,3325,3327,3331,3334,3339,3345,3346,3409,3414,3423,3426,3428,3434,3441,3447,3448,3455,3460,3461,3466,3476,3479,3485,3490,3494,3495,3497,3508,3519,3522,3536,3538,3543,3544,3552,3568,3580,3581,3588,3625,3626,3629,3644,3663,3676,3680,3681,3691,3694,3703,3707,3708,3709,3768,3769,3776,3779,3780,3784,3786,3787,3790,3792,3797,3802,3803,3904,3905,3925,3933,3934,3936,3939,3953,3971,3972,4112
3,14,15,16,17,19,22,28,30,31,35,44,46,49,51,56,58,59,62,63,66,69,71,74,75,76,77,79,80,82,84,85,86,87,88,90,91,95,96,100,103,105,106,109,110,111,112,114,116,117,119,120,122,125,127,130,132,134,156,171,185,196,212,215,229,230,234,278,297,299,305,311,349,353,356,364,366,367,381,409,422,427,486,503,508,509,528,547,548,559,569,572,580,584,609,610,616,619,639,649,663,668,688,690,691,702,712,734,746,756,758,759,763,766,768,772,793,802,807,811,818,832,837,846,848,849,871,874,878,889,895,900,904,905,909,914,918,923,928,933,937,941,942,944,945,948,950,955,959,965,966,972,980,981,982,984,992,999,1001,1004,1006,1007,1008,1010
8,15,16,17,18,19,21,51,52,53,54,55,56,98,115,123,142,144,145,157,165,172,186,192,197,202,213,214,217,223,235,257,274,281,287,297,303,304,309,388,389,401,436,437,438,466,467,468,470,513,517,544,549,550,551,552,582,583,584,591,593,594,595,596,598,599,600,601,602,603,610,611,615,616,618,619,622,623,625,626,627,632,633,635,636,637,638,639,648,649,650,651,652,654,657,658,659,660,662,666,667,669,671,673,675,678,679,680,684,685,688,694,702,708,709,718,727,739,741,743,744,762,764,773,774,776,783,788,796,798,799,800,801,808,809,811,812,813,814,815,816,818,819,824,826,830,831,832,836,841,846,847,850,890,897,898,929,930,947,949,950,951,952,1031
1,5,6,8,9,11,14,15,17,18,19,20,21,22,23,24,26,27,29,30,31,36,37,38,40,41,42,43,44,46,49,50,51,52,53,54,56,57,58,59,60,61,62,64,65,66,67,69,70,71,72,73,74,75,76,77,78,79,80,81,82,83,84,85,86,87,88,89,90,91,92,93,94,95,96,97,98,99,101,102,103,105,107,108,109,110,111,112,113,115,116,117,119,120,122,123,124,125,126,127,128,129,130,131,132,133,134,136,146,149,151,156,160,163,164,166,168,172,187,189,193,194,197,205,206,213,214,219,223,225,227,228,236,239,241,242,244,247,248,249,252,272,274,278,279,292,294,298,300,308,309,321,326,327,328,329,339,340,341,344,345,347,355,356,357,372,376,391,393,394,402,404,408,418,419,425,426,427,440,441,444,448,455,458,461,470,472,474,477,482,483,484,485,489,494,499,503,506,508,509,510,512,518,527,537,542,561,562,570,574,575,577,578,579,580,582,587,590,592,594,600,601,603,608,610,611,612,613,615,616,621,627,628,630,634,635,636,637,639,641,646,647,649,653,657,659,662,668,669,670,672,673,677,679,682,690,691,698,702,705,710,712,714,718,719,720,723,727,734,736,739,742,747,750,753,756,757,760,762,763,764,767,772,773,774,776,780,785,790,794,798,800,802,804,805,810,813,816,823,825,837,838,840,843,847,848,849,850,852,853,854,858,861,863,864,866,867,868,870,874,875,876,881,883,886,887,888,891,895,896,897,899,900,902,904,906,911,913,917,919,923,928,930,931,933,936,937,940,941,942,949,950,952,953,954,956,959,960,961,963,965,967,968,970,973,975,977,978,979,980,981,982,983,984,985,988,990,991,996,997,1000,1001,1005,1008,1009,1011,1012
4,13,14,16,26,34,43,44,47,61,72,88,91,102,135,168,188,193,194,201,212,215,220,238,242,252,253,254,289,311,333,337,373,380,395,407,436,452,483,484,531,539,559,579,581,583,634,650,675,686,723,730,739,752,771,785,838,841,879,882,883,884,891,904,906,911,917,924,929,1023,1025,1046,1062,1172,1216,1218,1252,1280,1321,1358,1368,1380,1383,1395,1397,1496,1545,1590,1609,1688,1753,1789,1849,1871,1898,1903,1964,1973,1983,1990,2044,2109,2199,2225,2629,2724,2731,2732,2733,2736,2737,2738,2743,2747,2749,2750,2752,2753,2754,2755,2758,2760,2761,2766,2767,2772,2773,2774,2775,2778,2793,2808,2810,2811,2817,2818,2825,2829,2832,2833,2834,2835,2849,2856,2858,2860,2861,2866,2867,2868,2869,2870,2889,2892,2893,2895,2896,2897,2901,2902,2904,2905,2914,2917,2918,2919,2920,2926,2927,2932,2934,2935,2936,2938,2939,2940,2941,2942,2945,2946,2950,2951,2953,2956,2958,2960,2964,2967,2968,2972,2973,2975,2976,2983,2990,2995,2998,3000,3002,3003,3005,3008,3011,3012,3013,3015,3017,3019,3020,3023,3026,3028,3031,3032,3036,3042,3077,3079,3083,3089,3090,3093,3098,3099,3100,3101,3102,3106,3114,3119,3121,3122,3128,3129,3130,3134,3136,3139,3140,3143,3144,3147,3148,3151,3156,3174,3179,3181,3183,3198,3200,3207,3211,3214,3215,3216,3218,3219,3221,3222,3234,3240,3241,3242,3245,3248,3249,3250,3252,3254,3256,3259,3260,3263,3267,3276,3281,3282,3294,3300,3305,3306,3323,3329,3332,3333,3334,3335,3339,3385,3386,3402,3433,3434,3451,3452,3463,3466,3470,3471,3473,3474,3475,3478,3480,3490,3493,3495,3534,3536,3538,3541,3567,3568,3571,3572,3573,3577,3593,3601,3611,3612,3620,3624,3628,3635,3638,3639,3642,3647,3709,3710,3712,3714,3721,3733,3734,3737,3741,3744,3754,3763,3772,3773,3788,3790,3793,3795,3796,3798,3813,3820,3836,3838,3848,3864,3932,3936,3942,3944,3946,3950,3964,3976,3981,3982,3989,3995,3996,4012,4014,4020,4022,4025,4027,4043,4066,4068,4081,4097,4099,4101,4118,4131,4143,4148,4238
4,6,14,16,17,19,20,23,24,26,28,30,31,32,34,35,37,39,40,41,42,43,44,46,49,50,51,53,54,56,57,58,62,64,65,66,69,71,72,74,75,76,77,79,80,82,83,84,85,86,87,88,90,91,92,94,95,96,98,100,102,103,105,106,107,108,109,111,114,115,117,120,122,125,128,129,130,131,132,134,136,147,149,153,155,159,163,170,203,205,212,213,214,226,230,234,244,245,247,250,253,262,269,271,275,284,291,298,303,305,310,314,350,351,353,356,378,402,405,409,414,422,439,443,462,466,468,470,471,483,484,485,491,501,503,509,517,535,536,537,564,584,589,590,606,616,621,622,627,628,631,649,650,657,661,686,697,702,703,704,708,720,728,740,742,746,747,754,759,763,779,787,793,796,800,804,810,811,812,818,824,825,831,836,840,842,843,844,848,851,857,860,866,868,869,871,873,874,878,879,888,889,890,899,907,915,922,923,928,934,948,949,950,955,957,959,964,965,966,969,970,975,978,980,981,982,983,989,994,996,997,999,1004,1005,1006,1007,1010,1012
1,2,6,48,51,52,57,66,68,69,72,75,80,82,113,148,226,227,233,237,240,241,245,248,298,318,321,335,343,361,370,382,385,386,389,390,404,405,406,412,423,435,450,451,453,460,464,583,584,589,590,594,661,688,746,747,748,751,758,760,777,778,817,832,1000,1002,1101,1119,1208,1209,1242,1251,1288,1289,1290,1294,1296,1300,1348,1349,1350,1351,1352,1357,1382,1383,1384,1385,1386,1389,1390,1391,1396,1397,1399,1403,1404,1405,1410,1411,1413,1414,1415,1430,1501,1504,1527,1535,1538,1541,1543,1547,1550,1551,1553,1554,1555,1560,1561,1563,1564,1567,1573,1574,1623,1631,1633,1636,1719,1720,1723,1741,1756,1757,1759,1760,1761,1819,1827,1828,1830,1832,1833,1834,1835,1836,1846,1847,1848,1852,1855,1856,1857,1860,1862,1863,1864,1865,1867,1868,1875,1876,1896,1902,1907,1908,1909,1934,1936,1937,1938,1951,1954,1956,1961,1962,1964,1973,1976,1979,1982,1985,1996,1997,2001,2008,2009,2010,2012,2049,2052,2056,2097,2102,2103,2109,2136,2137,2140,2141,2150,2161,2179,2182,2185,2187,2188,2203,2211,2231,2232,2236,2237,2241,2243,2244,2274,2276,2277,2281,2282,2343,2344,2350,2356,2370,2402,2405,2468,2469,2475,2510,2524,2528,2597,2599,2601,2602
1,2,5,6,9,10,12,13,15,17,18,19,20,24,26,27,29,31,32,33,35,36,38,41,42,43,44,45,49,52,56,57,58,60,62,64,65,67,68,69,71,72,73,76,77,78,80,84,86,87,89,91,93,94,95,96,97,98,99,102,103,104,107,108,109,110,112,115,116,118,119,121,122,123,124,125,126,128,129,130,131,133,134,139,140,150,152,154,156,160,163,164,218,227,235,237,238,248,254,261,264,268,295,303,305,308,309,315,318,319,326,327,332,387,397,413,417,419,421,425,433,435,439,450,451,453,454,455,459,460,463,474,476,496,510,514,515,518,522,523,536,538,543,550,552,554,561,564,568,575,579,592,594,603,608,612,613,622,624,630,634,637,647,648,651,657,666,673,677,679,681,689,693,698,699,709,710,712,714,715,725,736,738,752,753,763,764,768,772,773,774,780,782,785,789,790,794,795,804,805,810,817,820,823,825,827,842,843,852,853,854,860,861,875,876,877,880,881,882,883,885,887,891,895,897,906,913,918,919,920,921,925,929,930,937,940,942,949,950,953,954,956,960,962,963,968,973,976,978,983,987,988,990,994,996,997,999,1002,1005,1009,1011,1012
4,5,7,29,30,31,35,62,67,76,79,80,177,216,223,226,233,234,239,264,265,270,301,305,311,313,319,349,381,387,388,393,402,453,464,479,480,495,641,642,643,666,673,675,706,725,728,757,786,796,816,820,821,850,852,860,868,885,1006,1053,1065,1126,1130,1136,1139,1506,1565,1780,1992,1997,1999,2013,2279,2289,2291,2293,2909,3182,3190,3334,3396,3510,3536,3537,3539,3541,3542,3546,3547,3548,3555,3559,3573,3587,3589,3590,3591,3592,3597,3598,3600,3602,3603,3606,3608,3609,3611,3616,3617,3618,3619,3620,3622,3625,3626,3696,3697,3698,3719,3736,3753,3757,3758,3763,3793,3794,3795,3798,3799,3808,3809,3810,3811,3813,3814,3818,3832,3833,3839,3840,3843,3845,3847,3848,3853,3854,3856,3858,3873,3875,3876,3879,3891,3895,3896,3903,3905,3908,3909,3914,3915,3916,3923,3926,3961,3965,3980,4011,4015,4018,4026,4028,4029,4030,4042,4045,4046,4062,4066,4080,4081,4083,4084,4085,4100,4101,4106,4108,4110,4130,4132,4133,4136,4141,4146,4169,4170,4174,4187,4190,4191,4192,4195,4205,4206,4207,4210,4211,4215,4218,4238,4243,4244,4287,4297,4299,4301,4314,4319,4320,4333,4334,4338,4341,4364,4374,4386,4395,4414,4428,4429,4462,4466,4511,4512,4513,4622,4628,4654,4664,4667,4698,4705,4726,4731,4734,4749,4761,4770,4771,4773,4828,4858,4862,4884,4890,4891,4896,4898,4985,4991,5135
1,5,12,19,29,37,38,47,50,52,55,63,66,73,81,84,85,93,99,104,105,108,111,113,117,121,125,132,133,142,177,184,193,196,231,236,248,260,279,280,324,335,345,359,371,386,393,410,415,416,469,497,526,531,593,599,609,619,634,662,684,743,752,755,769,774,779,788,789,816,834,841,849,853,863,864,879,898,903,908,920,929,932,944,956,967,976,1009
8,16,17,26,27,28,29,56,57,58,64,70,73,74,78,79,86,93,134,166,169,170,184,203,207,221,228,232,233,234,235,236,237,238,241,243,244,245,246,247,250,253,255,256,258,260,266,268,269,270,271,276,277,284,287,288,289,292,299,301,302,304,305,309,310,313,314,315,316,317,318,320,323,324,327,328,329,334,340,361,362,364,367,369,382,387,411,432
0,5,12,13,15,17,19,20,22,24,27,29,38,40,42,44,49,51,52,54,57,58,59,60,61,64,65,66,67,69,70,71,72,73,77,79,80,83,84,86,89,91,93,94,95,96,97,98,99,102,103,104,105,108,109,110,113,116,120,122,123,125,129,131,133,134,136,140,154,156,191,219,237,238,239,248,292,295,303,309,314,316,323,334,335,346,355,384,388,401,413,428,435,439,459,463,466,472,473,481,508,510,511,514,526,544,547,557,558,560,561,564,568,570,586,590,594,599,608,612,613,629,634,661,662,666,671,674,677,678,691,695,700,702,704,708,712,714,725,731,736,739,748,755,756,760,762,763,764,772,778,781,785,792,798,802,817,819,820,824,826,827,829,840,842,843,850,854,864,867,872,874,876,877,883,887,893,896,901,903,904,913,917,920,925,931,937,942,950,951,952,954,956,961,962,967,971,972,973,976,978,980,983,985,991,993,996,997,1002,1009,1011
2,4,5,37,58,60,61,86,88,89,91,104,113,120,133,134,161,162,163,182,203,246,268,297,299,309,437,442,448,449,468,575,581,596,725,728,748,766,767,769,781,784,788,818,819,827,829,838,844,894,899,948,958,1012,1132,1157,1177,1222,1224,1300,1302,1311,1312,1328,1344,1376,1378,1382,1383,1388,1389,1391,1392,1393,1396,1397,1399,1400,1407,1409,1411,1413,1418,1420,1422,1429,1431,1432,1435,1436,1459,1463,1466,1470,1473,1474,1481,1523,1529,1530,1531,1536,1546,1572,1574,1576,1577,1579,1581,1602,1604,1605,1622,1624,1625,1637,1638,1640,1641,1644,1649,1663,1664,1665,1667,1701,1706,1708,1710,1812,1813,1814,1817,1818,1819,1828,1833,1834,1843,1848,1849,1851,1852,1853,1858,1862,1866,1868,1872,1882,1891,1893,1904,1905,1927,1928,1930,1931,1936,1966,1971,1972,1975,1996,1998,1999,2041,2045,2046,2048,2050,2051,2056,2057,2060,2062,2072,2086,2162,2167,2174,2178,2240,2246,2250,2252,2254,2258,2270,2284,2297,2298,2299,2303,2331,2333,2334,2335,2347,2375,2378,2379,2387,2389,2391,2432,2449,2452,2453,2458,2460
0,3,4,6,9,10,14,15,17,19,20,22,23,24,25,26,30,31,32,33,34,35,37,41,42,44,46,47,49,50,51,53,54,56,57,58,59,62,64,65,67,68,69,70,71,72,74,75,76,77,79,80,82,84,85,86,87,88,90,91,94,95,96,97,100,102,103,105,106,107,108,109,110,111,112,114,115,116,117,118,120,122,123,126,128,130,132,134,136,146,147,148,149,150,156,168,191,196,197,205,212,215,226,230,233,239,258,267,268,269,274,278,283,288,292,293,294,295,298,305,307,312,313,314,316,318,325,331,338,342,350,356,362,367,370,402,404,409,421,425,427,431,433,442,448,453,455,466,482,483,484,509,510,511,513,514,523,525,534,536,541,542,543,547,557,559,560,564,569,570,580,588,590,594,598,600,603,606,607,608,610,613,614,616,617,622,626,627,628,639,646,649,666,668,669,675,678,679,683,687,689,692,693,697,702,705,709,711,712,714,729,730,733,738,749,753,756,762,763,764,766,768,770,773,776,778,780,782,783,796,798,802,806,810,818,825,826,828,837,844,848,849,850,851,854,856,860,867,871,873,874,875,887,895,896,897,899,900,904,906,907,914,917,918,923,928,930,937,941,942,945,948,950,953,955,960,961,966,969,973,975,980,983,987,989,990,992,993,994,996,999,1001,1004,1006,1007,1010,1011,1012
1,58,107,108,110,120,139,144,147,157,161,166,182,186,217,223,230,239,240,243,257,281,282,295,313,314,394,395,407,408,418,435,437,521,528,531,532,541,548,581,582,585,589,593,622,636,651,685,691,703,713,743,770,778,797,817,848,895,901,921,939,940,969,971,990,1019,1034,1049,1381,1387,1399,1407,1427,1433,1486,1516,1589,1651,1653,1654,1677,1684,1697,1834,1858,1865,1895,1897,1898,1899,1905,1906,1908,1909,1914,1916,1917,1919,1922,1923,1928,1929,1931,1933,1934,1938,1940,1944,1945,1949,1950,1952,1958,1959,1961,1962,1964,1967,1968,1970,1978,1982,1984,1985,1986,1991,1992,1993,1994,1995,1996,2003,2004,2010,2013,2017,2021,2023,2024,2031,2032,2041,2042,2043,2044,2053,2057,2066,2072,2078,2079,2086,2087,2088,2104,2106,2107,2109,2111,2113,2116,2120,2121,2129,2130,2131,2132,2136,2137,2139,2143,2150,2152,2156,2159,2160,2161,2162,2165,2166,2169,2172,2175,2177,2187,2188,2189,2190,2191,2192,2213,2216,2217,2218,2223,2227,2235,2238,2241,2262,2263,2265,2277,2280,2281,2344,2345,2348,2362,2363,2364,2365,2366,2368,2372,2374,2378,2380,2383,2386,2400,2402,2406,2417,2423,2431,2433,2450,2451,2453,2454,2456,2461,2473,2478,2490,2497,2510,2511,2512,2517,2524,2525,2526,2533,2539,2540,2549,2560,2569,2570,2573,2582,2593,2594,2596,2602,2617,2624,2626,2627,2665,2668,2672,2690,2706,2707,2715,2720,2724,2744,2750,2751,2767,2778,2807,2808,2811,2821,2824,2828,2830,2831,2858,2862,2876,3082,3083,3084,3099,3121,3125,3199,3211,3226,3245,3258,3263
1,5,11,13,15,19,20,22,30,31,36,38,42,44,51,52,54,55,57,58,59,60,61,62,63,64,65,66,67,68,69,70,71,72,73,74,75,77,80,83,84,85,89,90,91,92,93,94,96,97,98,99,102,103,104,105,108,109,110,113,116,118,119,120,122,124,125,129,131,133,134,136,137,140,154,156,180,191,200,219,230,235,260,264,287,292,294,303,314,323,346,352,354,355,358,383,384,399,401,435,441,442,444,457,459,463,472,473,478,508,509,510,511,518,519,527,528,537,542,544,550,552,560,564,568,580,592,594,599,600,604,608,610,613,623,637,639,653,661,662,671,676,677,682,700,702,704,711,712,716,722,725,730,731,732,748,755,756,760,761,762,763,772,785,798,802,807,819,820,827,837,840,843,844,847,849,850,853,854,867,872,874,875,876,887,893,896,901,903,904,913,917,920,925,930,944,950,951,952,956,961,962,963,967,973,976,978,979,980,983,985,986,993,996,1005,1008,1009
1,2,4,28,69,70,72,81,83,85,86,87,94,95,99,101,109,112,144,228,230,250,254,255,264,274,275,281,282,283,285,304,354,355,366,367,369,441,450,569,570,574,580,582,584,586,588,604,607,610,619,632,654,661,689,704,727,765,866,870,967,968,975,987,1005,1006,1009,1010,1100,1187,1198,1200,1201,1207,1208,1209,1210,1211,1212,1213,1214,1216,1217,1218,1220,1221,1222,1223,1224,1230,1233,1234,1235,1238,1239,1240,1241,1242,1250,1252,1254,1255,1256,1257,1258,1260,1261,1268,1269,1270,1271,1280,1288,1290,1291,1307,1308,1309,1310,1314,1315,1316,1323,1327,1329,1330,1331,1332,1334,1335,1336,1341,1343,1351,1353,1354,1355,1356,1357,1358,1362,1363,1364,1365,1370,1372,1373,1374,1377,1386,1388,1404,1405,1407,1409,1411,1412,1414,1416,1417,1419,1430,1469,1470,1484,1485,1489,1490,1491,1492,1497,1501,1505,1507,1508,1509,1532,1534,1535,1537,1539,1540,1550,1555,1557,1569,1571,1606,1609,1611,1612,1637,1639,1642,1658,1666,1668,1669,1679,1686,1692,1703,1704,1705,1711,1722,1725,1727,1731,1733,1735,1828,1846,1853,1854,1861,1867
9,14,17,19,20,24,31,35,47,51,56,57,59,61,64,65,67,68,69,71,72,74,75,77,82,85,87,88,91,94,96,97,105,106,107,109,110,112,115,116,117,119,120,122,124,126,127,132,135,136,149,160,179,212,213,230,239,245,264,265,273,293,327,335,342,356,381,398,400,434,472,510,561,570,589,590,600,608,613,615,630,643,652,660,663,667,682,693,699,701,706,730,731,737,742,764,770,802,803,806,828,839,843,848,859,867,874,899,900,907,908,913,917,924,947,951,963,973,974,975,979,985,990,992,996,999,1007
4,6,11,12,14,15,16,18,19,23,24,29,30,389,390,397,404,405,415,416,417,418,421,423,425,426,428,438,445,447,450,510,515,517,520,521,522,524,526,537,538,540,541,543,547,548,592,593,594,595,596,598,600,602,603,604,605,606,609,611,613,614,615,616,619,620,621,625,626,628,629,630,631,634,636,637,638,640,641,643,644,650,657,658,659,660,661,662,663,664,665,667,669,671,672,673,674,675,676,681,684,686,687,688,690,691,692,693,695,696,697,698,699,702,708,709,719,720,729,731,734,1089,1090,1092,1093,1095,1097
31,35,51,57,64,77,80,82,85,91,100,116,120,126,212,230,336,455,509,514,580,608,610,802,904,960,999,1001,1007,1010
2,4,8,9,10,11,16,18,25,28,37,38,39,41,42,45,46,48,49,50,51,52,53,54,57,58,60,63,65,74
80,82,112,753,895,904,990,1007
1,2,6,7,8,9,10,11
25,60,67,73,90,95,97,99,101,121,127,129,131,133,222,325,338,358,614,750,853,855,858,885,913,924,1009
8,12,14,17,19,21,22,25,26,27,33,37,40,41,43,44,48,50,53,54,56,57,59,60,62,63,66
35,51,61,73,74,82,95,97,119,121,125,126,663,676,755,819,921,979,987,999,1007
1,7,8,11,12,18,23,24,25,26,31,36,42,43,44,45,46,47,48,49,55
4,9,14,22,25,28,47,54,71,76,79,88,96,109,115,117,122,128,139,149,150,338,356,364,367,425,542,617,811,846,867,889,899,917,983
1,2,5,7,8,9,14,16,18,19,20,21,22,23,25,26,32,33,34,35,36,37,38,39,40,41,42,43,44,45,46,48,49,50,56
52,63,66,69,84,93,104,108,113,121,125,129,132,133,185,221,371,385,676,719,742,750,755,789,819,855,864,883,903,912,920,921,954,959,967,991
2,3,5,6,7,8,14,15,16,17,25,31,32,34,35,36,37,38,39,40,41,44,46,47,48,49,51,52,57,58,59,60,61,62,63,64
18,84,95,98,117,121,125,132,584,592,800,870,923,959,968,978,984
1,2,9,10,11,12,13,56,57,58,59,63,64,97,98,99,100
11,25,60,64,67,80,94,99,102,112,125,134,315,338,345,614,676,687,755,756,797,798,819,853,913,921,936,950,990,996,1009
1,21,23,24,25,27,33,39,40,41,47,50,52,55,57,63,64,65,66,68,78,79,81,82,83,84,85,87,88,92,97
3,34,39,57,75,82,84,91,103,106,115,117,126,233,373,766,869,873,928,981,987,1004
1,2,4,5,7,8,11,14,16,17,22,24,35,36,43,44,46,47,48,49,52,53
110
1
115,897
4,8
40,78,84,95,101,102,111,117,119,124,128,131,132,133,244,323,800,825,1008
1,2,3,7,8,17,19,20,21,24,25,26,27,29,31,35,36,37,38
20,22,77,83,109,116,122,124,163,657,725,840,872,880
18,19,23,26,36,38,45,46,59,62,63,65,68,70
11,25,67,91,94,96,104,105,121,132,503,560,614,670,674,841,861,936,996
1,2,30,38,39,40,49,50,53,54,62,63,64,68,77,78,102,103,104
0,42,45,54,70,71,102,131,133,134,187,786,810,822,850,896,917,961,962
1,3,5,6,9,11,12,14,15,50,51,52,54,55,56,57,58,60,61
13,24,52,60,61,64,67,73,84,94,102,108,120,123,130,133,274,711,795,891,913,979,993,996,1012
1,4,5,7,8,9,10,11,12,13,14,16,17,18,26,28,30,31,32,33,34,35,37,38,46
35,47,75,85,88,90,95,100,105,111,116,117,122,469,510,668,793,811,846,889,898,908,999,1010
1,7,8,9,10,11,13,20,21,24,26,29,30,31,32,33,36,39,42,43,46,47,48,55
13,60,80,86,89,94,96,99,102,112,115,116,119,122,126,130,131,133,134,268,568,715,753,854,875,876,897,942,950,956,962,990,1009,1012
1,2,3,4,5,6,7,9,11,13,15,25,27,35,45,46,47,49,50,51,52,53,54,55,64,65,66,70,71,72,73,74,75,76
4,80,85,88,106,107,114,115,116,431,675,875,887,969,989,992,1001,1006
5,6,7,8,10,11,41,42,70,73,74,77,78,83,84,86,87,109
11,13,45,60,64,73,94,99,112,119,129,131,134,681,950,954,990,996,1008,1009
2,3,6,25,28,29,34,37,40,43,44,45,46,49,50,51,54,58,61,64
75,96,98,106,114,117,132,796,923,928,969,970,992,1004,1006
1,2,3,15,28,30,31,32,33,34,43,44,51,56,60
13,54,60,61,70,71,102,103,104,108,134,292,511,560,674,798,850,896,901,917,952,961,962,976,985,993
3,5,7,8,12,16,21,22,24,26,28,31,32,33,34,35,38,40,42,43,46,47,52,53,54,56
18,43,78,95,98,121,124,125,129,133,587,852,927,968,978,991,997,1005
8,10,14,16,40,41,42,44,46,50,52,53,58,66,72,73,86,88
5,18,52,54,70,71,73,93,99,108,131,194,329,384,507,518,555,585,833,850,885,961,968,1009
6,11,12,13,16,17,19,20,21,22,25,26,30,31,39,40,44,45,46,47,48,51,52,53
0,5,19,21,78,98,99,104,105,111,117,118,121,125,129,131,133,248,321,574,718,789,916,954,976,978,981,982,991,997,1005,1009
1,2,3,6,7,9,12,14,15,16,26,27,28,29,32,33,35,36,38,39,40,41,42,43,45,46,48,49,50,51,52,55
13,24,61,67,74,86,98,110,112,115,125,130,131,261,435,561,613,642,658,953,955,978,990,997,1012
2,4,9,10,13,14,19,20,21,23,25,29,30,31,32,33,34,36,37,39,42,43,44,48,52
20,51,75,116,875
1,2,3,7,11
13,36,60,76,95,97,108,111,113,121,125,131,132,133,532,630,741,789,793,826,838,870,959,967,982,991
3,10,13,15,16,18,19,25,26,27,35,37,51,52,59,68,75,76,77,78,80,81,87,88,93,94
98,121,124,131,133,841,927
7,8,9,13,27,28,35
72,83,111,116,132,714,893,982,986
1,11,20,21,22,23,30,33,36
6,14,24,56,70,73,95,98,110,115,116,125,128,129,130,131,133,498,613,649,705,736,952,960,978,997,1012
1,3,4,6,7,8,9,15,16,18,19,21,22,28,30,36,40,41,42,43,44,45,46,47,49,53,55
13,17,27,40,54,59,60,70,71,73,79,89,99,102,104,108,122,134,292,316,428,459,472,568,570,599,671,798,820,829,850,876,896,901,917,951,952,961,962,972,976,993,1009
1,3,4,11,13,21,22,26,31,32,33,35,36,39,40,42,43,44,45,47,51,52,53,54,56,57,58,59,60,63,64,65,66,70,74,82,83,84,86,87,88,90,91
13,14,37,38,42,51,52,66,69,73,80,84,89,93,102,104,105,108,113,120,125,127,128,129,137,154,172,287,346,416,435,441,459,483,515,518,522,538,599,612,616,652,674,676,691,702,742,755,802,804,820,853,859,864,874,876,883,904,920,962,974,993
2,3,4,5,8,9,14,17,20,23,24,25,27,28,34,40,42,44,45,46,49,51,52,53,54,56,57,59,60,61,62,63,67,68,70,71,73,74,75,76,77,78,79,80,82,83,84,85,86,87,88,90,91,95,96,97,98,99,102,103,104,105
50,54,60,70,71,102,104,110,134,560,798,850,890,896,901,917,930,961,962,976
1,2,3,4,10,11,13,14,15,19,20,21,22,23,24,29,30,31,32,34
19,30,49,54,60,61,64,71,72,80,87,94,112,115,116,120,122,126,127,128,129,130,134,617,677,762,780,842,847,854,887,895,906,950,954,979,980,983,985,987,990,996,1012
1,2,3,4,8,13,14,15,17,19,22,25,32,33,34,35,36,37,39,40,42,46,47,48,49,50,52,53,54,55,56,57,59,60,62,66,67,68,69,70,74,76,80
31,43,55,60,74,78,80,90,97,98,99,105,108,119,124,125,129,131,358,444,639,659,702,750,756,761,818,947,978,991,993,997,1005,1008,1009
1,2,3,4,5,6,7,9,11,16,17,18,20,22,23,25,27,30,31,32,33,34,35,36,37,38,39,40,41,42,44,48,49,50,51
39,49,54,65,69,74,105,114,305,564,842,845,869,955,969,989,1006
5,10,13,15,16,19,20,38,39,40,41,44,46,49,60,63,64
11,56,60,61,64,73,93,94,113,119,124,129,260,345,512,936,979,996
9,10,20,25,36,37,38,54,55,56,57,59,60,61,62,70,75,86
1,5,13,52,60,63,67,73,84,89,93,94,97,99,104,105,108,112,113,119,129,130,131,134,137,231,252,382,435,538,561,593,622,640,642,662,695,719,750,753,774,804,827,855,885,891,895,906,913,925,944,950,954,967,977,990,993,996,1002,1005,1009,1012
1,2,4,5,9,10,12,13,17,19,25,31,32,35,36,37,39,44,49,50,63,66,67,68,69,70,71,72,73,74,75,77,78,81,83,84,85,86,87,88,89,90,91,95,96,97,98,99,100,101,102,103,105,109,110,112,114,119,120,121,124,127
15,19,25,64,65,70,71,87,88,114,115,124,126,128,197,262,312,338,455,544,560,614,687,749,770,778,783,798,828,899,917,918,941,952,1006
1,2,5,6,9,10,19,29,45,47,48,49,50,51,54,55,56,57,58,59,60,61,62,63,64,65,66,67,73,81,83,86,88,89,91
13,60,67,69,73,78,84,89,95,97,99,102,104,108,113,127,130,131,133,490,574,695,734,736,861,874,876,962,993,1009,1012
1,2,4,5,6,7,8,11,12,13,14,15,16,19,20,22,23,27,33,34,35,36,37,38,39,40,42,43,45,46,47
11,29,37,38,44,86,95,98,101,102,108,113,119,121,125,127,129,131,133,244,334,347,565,612,816,826,933,949,977,978,1000,1008
2,4,9,10,16,17,18,19,20,21,24,26,30,31,32,34,35,40,42,43,44,45,47,48,49,50,55,56,58,59,60,62
16,35,105,110,117,132,353,759,889,934,970,999
2,5,6,7,11,14,16,17,18,20,23,26
13,52,55,57,60,64,67,89,94,97,99,102,104,108,111,112,113,116,117,127,129,132,518,568,608,652,737,743,750,753,793,811,839,855,864,876,895,906,913,924,925,956,959,962,967,974,990,993,1002,1009
1,5,6,8,12,13,15,17,18,19,22,24,26,29,31,37,39,40,41,49,52,53,54,55,56,57,58,59,60,61,63,64,65,67,70,71,72,73,74,75,77,78,79,81,83,84,87,90,91,93
12,31,42,45,51,54,61,68,70,71,94,102,104,108,113,120,134,287,444,511,617,663,694,699,769,798,850,896,917,920,952,961,962,967,985,993
2,3,4,5,6,9,10,23,25,29,30,33,36,38,40,41,42,43,44,46,48,49,50,62,63,65,66,67,68,69,70,71,72,73,74,76
13,18,20,33,60,61,64,66,73,89,95,99,110,125,129,130,131,133,163,525,599,676,755,819,820,956,968,979,985,1005,1012
2,3,13,25,26,30,32,33,37,38,39,40,43,47,48,54,55,57,66,78,79,80,81,82,83,84,85,87,89,90,96
66,72,87,111,117,126,129,132,423,677,813,844,870,959,970,982
1,2,3,6,14,24,25,72,77,78,79,80,83,87,95,98
4,5,31,32,34,35,46,73,75,80,82,84,88,94,96,110,115,116,126,133,268,269,409,431,509,524,577,594,764,827,853,854,867,887,900,904,930,973,987,996,999,1007
2,3,5,6,7,8,9,14,15,20,24,26,30,31,32,48,49,56,57,58,59,60,61,63,65,66,67,68,69,70,71,73,74,75,78,80,83,84,85,86,87,91
18,27,43,58,59,75,78,95,124,125,128,129,133,772,805,855,928,954,968,988,991,1005
1,2,6,8,10,11,13,18,19,29,30,42,47,48,50,54,55,60,61,62,64,67
30,34,35,41,56,62,72,75,77,82,84,87,88,94,107,112,115,120,129,131,607,693,813,847,873,885,897,996,999,1007
1,8,11,13,14,16,17,18,19,35,36,38,39,45,46,49,50,51,53,54,55,57,58,59,66,67,68,73,76,92
76,85,96,100,107,120,341,994,1001,1010
4,8,9,22,23,24,27,28,32,45
5,11,13,21,60,73,80,88,89,91,94,99,108,112,115,122,125,129,130,133,321,496,520,640,709,749,750,753,755,756,762,789,827,852,855,876,887,895,899,904,936,954,956,983,990,993,996,1009,1012
1,3,5,9,12,14,19,22,23,24,27,32,35,39,41,42,43,49,54,65,66,67,68,69,71,72,73,74,75,76,77,84,85,86,87,88,90,91,92,93,94,95,98,99,101,104,107,108,113
13,15,70,71,94,102,104,116,122,134,140,316,510,511,560,760,798,850,853,901,917,925,952,962,973,996
1,2,18,21,28,29,31,36,39,43,44,45,47,48,50,52,56,58,59,64,65,66,81,82,84,85
1,44,86,108,124,127,131,133,190,652,817,826,883,924
1,10,14,54,55,70,72,73,77,91,104,106,131,132
26,34,53,57,72,74,75,84,91,95,106,107,115,119,122,126,132,373,575,608,776,873,902,928,953,955,975,983,987,1004,1008
1,2,3,6,7,9,10,11,12,13,14,15,23,32,34,43,44,54,55,57,58,59,60,61,62,64,65,67,68,69,78
24,35,69,84,86,102,114,117,132,350,584,747,811,825,844,871,889,969,999,1006
1,2,5,7,9,10,24,25,28,29,30,31,32,33,34,35,38,41,42,53
13,17,33,38,42,45,54,55,63,68,70,71,85,86,90,93,97,101,102,104,108,127,129,134,154,175,211,282,349,399,430,435,450,500,547,570,580,623,674,694,715,798,850,903,917,925,952,961,963,993
2,3,4,5,6,7,9,10,12,22,27,30,31,32,33,35,37,38,39,45,48,49,50,53,54,55,56,57,58,59,60,61,69,70,71,72,73,74,75,76,77,78,79,80,81,82,83,84,85,88
18,27,61,64,80,88,91,94,96,112,120,125,129,130,131,676,749,755,756,762,819,867,906,968,979,980,985,990,991,996,1005,1012
4,5,11,12,15,17,18,21,22,24,25,31,36,40,41,42,43,44,46,47,48,49,50,54,59,60,61,62,64,65,70,74
9,26,34,55,60,67,72,73,82,84,88,94,98,101,105,110,113,115,119,124,127,128,129,131,133,261,327,398,416,430,497,506,512,561,595,601,635,750,761,805,836,852,859,897,900,913,954,978,996,1007
1,2,3,4,6,9,10,11,28,29,30,31,32,33,34,35,36,40,42,43,44,51,54,55,58,59,60,61,62,63,64,65,66,67,68,69,76,77,78,79,80,82,83,87,88,89,90,91,92,109
46,47,64,88,94,107,114,117,196,479,811,846,889,898,900,996,1006
1,2,3,5,6,7,12,13,15,16,17,18,22,23,24,25,30
13,18,27,29,42,43,50,89,95,102,104,108,125,129,131,309,346,739,817,858,876,890,920,921,931,940,962,968,991,993,1005
4,9,10,11,12,13,14,15,19,23,25,29,54,66,68,69,70,71,72,74,75,76,78,79,80,81,84,89,90,93,105
20,46,74,80,91,106,115,126,132,412,646,923,955,1004
1,2,5,8,9,10,11,12,13,14,15,16,19,20
14,30,35,46,53,56,79,87,90,105,110,115,116,126,128,132,150,350,356,425,613,668,724,847,871,881,889,953,960,999
1,2,4,6,7,8,10,13,15,20,21,25,30,31,34,43,44,45,46,47,48,50,51,52,59,60,62,63,69,71
37,76,117,127,132,341,378,423,563,796,839,948,981
1,21,38,58,59,62,65,66,67,69,89,90,91
4,34,56,58,65,74,77,84,85,88,94,105,108,112,116,120,130,212,215,226,269,295,298,313,314,350,559,639,693,697,778,828,849,895,900,966,993,996,1012
1,2,4,6,9,11,13,15,17,21,26,27,28,30,31,34,38,39,40,41,42,43,44,46,47,48,49,50,51,52,54,55,57,58,60,63,64,69,71
1,33,44,45,60,61,68,71,102,104,108,113,134,451,500,511,525,592,694,774,850,917,920,949,950,962,985,993
1,4,6,10,11,12,32,36,37,38,39,40,48,49,67,68,69,70,72,73,76,77,78,80,81,82,83,84
78,105,108,124,131,132,587,757,816,817,859,883
1,9,18,19,21,22,23,24,25,31,39,41
35,42,62,79,85,105,107,114,117,132,134,290,412,437,811,959,994,999,1001,1006
2,3,4,5,6,7,12,34,36,40,41,43,44,45,47,48,53,55,56,78
5,65,77,81,84,93,95,99,104,393,593,599,626,778,956,976,1009
1,2,3,4,7,8,10,25,26,27,28,32,34,35,40,41,50
25,55,60,64,66,67,70,73,80,94,96,99,102,119,127,129,131,133,134,338,345,560,582,614,739,756,762,797,867,887,904,913,950,952,956,996,1000,1008,1009
7,9,11,13,15,16,17,18,25,28,29,32,33,38,39,40,41,45,46,47,48,49,50,53,54,56,58,61,62,64,65,66,67,68,69,71,72,77,79
77,103,109,110,116,123,133,439,590,725,840,971,973,1011
10,14,18,19,21,22,25,26,35,36,37,38,40,41
35,85,100,107,111,132,793,846,889,994,999,1001,1010
7,12,35,40,43,44,47,49,51,55,62,67,90
14,25,35,51,54,56,62,65,70,79,87,88,96,103,115,125,130,134,149,230,262,292,356,614,687,749,770,778,783,828,899,900,912,918,941,950,952,999,1012
2,6,9,12,13,16,20,23,24,26,34,44,46,47,48,49,51,52,53,56,57,58,59,60,63,64,65,67,68,72,74,76,77,78,80,81,82,85,87
27,64,94,97,108,124,125,131,133,676,751,853,883,912,940,991,996,1003
3,5,6,9,10,11,17,18,38,39,41,42,43,44,45,47,48,49
38,102,117,127,132,265,323,970
62,66,71,72,77,78,80,83
0,18,29,55,84,93,95,99,104,108,118,125,127,131,157,240,287,334,424,592,661,729,801,858,903,920,968,991,1009
14,16,17,18,19,27,28,29,34,35,42,45,46,47,56,64,65,66,80,81,82,84,85,86,87,90,92,93,94
14,37,42,44,54,56,60,67,69,73,80,89,90,91,94,102,104,108,112,113,129,130,131,133,134,149,174,356,674,695,705,739,753,756,818,825,851,866,874,885,887,895,903,904,906,913,925,933,950,967,990,993,996,1012
3,7,9,10,11,16,18,19,20,21,25,26,27,28,30,41,51,55,63,64,65,69,70,72,73,74,75,76,83,84,85,94,96,97,98,99,101,102,103,104,106,107,108,109,110,111,112,113,114,115,119,123,124,128
1,21,89,102,130,321,459,718,774,1002,1012
1,3,13,14,77,78,79,80,81,91,154
21,45,49,50,64,67,68,80,90,92,93,94,102,105,108,122,127,129,130,144,358,450,451,601,640,652,744,762,803,816,842,844,855,890,983,996,1012
1,24,25,26,28,29,33,34,35,36,37,38,39,40,53,54,57,59,60,61,62,64,65,66,67,68,88,89,90,103,104,105,106,107,108,109,110
6,46,96,106,126,646,922,1004
8,9,10,11,12,13,14,15
1,13,42,45,68,70,71,83,97,102,105,108,113,129,134,346,401,450,511,560,671,694,744,798,850,917,952,954,961,962,963,986,993
1,4,5,10,12,18,26,27,28,30,31,33,34,35,41,42,43,44,45,46,47,49,52,54,56,62,66,67,69,70,71,72,74
7,20,57,58,59,92,103,109,110,122,284,298,390,657,713,857,882,946
2,5,7,11,13,16,20,60,65,66,72,73,75,78,81,85,89,91
11,36,42,56,78,93,102,110,112,116,120,130,133,164,229,325,587,607,711,854,936,990,1012
4,5,6,7,12,13,18,19,20,23,26,29,30,31,33,34,38,39,40,42,43,44,47
25,70,76,95,113,121,131,133,338,504,626,645,841,853,948,952,967
1,2,3,5,6,7,14,36,37,38,39,41,42,43,44,45,46
1,11,19,60,61,66,67,69,73,80,84,85,90,93,94,97,99,105,113,125,127,129,131,134,175,344,349,409,645,676,690,691,719,742,750,756,774,818,819,855,859,874,913,921,936,950,956,963,967,974,985,991,996,1001,1009
3,5,7,8,9,10,11,13,14,18,19,21,24,25,27,28,31,35,36,43,47,50,52,53,54,55,56,57,58,60,61,62,63,64,65,68,70,71,73,75,79,80,81,82,83,84,85,86,87,88,89,90,91,93,95
35,82,120,965,999
1,24,42,50,51
1,29,45,54,68,70,71,104,108,117,125,133,134,187,292,324,334,511,560,568,671,674,694,755,798,850,896,917,950,952,961,993
1,2,7,9,17,24,27,29,31,32,33,34,43,44,46,47,48,51,52,53,54,56,61,62,65,67,68,69,70,72,75,77
11,57,60,61,64,73,80,91,94,119,120,127,134,506,642,698,762,807,936,950,966,974,979,980,996
11,12,24,26,32,35,38,39,52,53,58,60,61,62,63,64,66,67,78,79,80,81,83,84,92
9,67,117,122,124,131,667,670,861,879
5,13,17,22,35,36,41,42,49,53
28,30,31,56,66,74,79,87,88,90,91,100,105,111,117,132,350,422,509,548,631,759,793,811,847,889,900,1010
3,5,6,8,9,11,12,16,18,23,25,26,29,32,35,40,42,43,44,47,52,53,55,58,59,61,62,63
43,64,68,95,115,126,130,131,858,931,1012
1,2,10,11,42,46,49,51,52,54,57
52,54,56,60,62,64,69,70,71,73,77,80,87,88,91,94,96,102,103,112,115,116,126,128,129,130,131,134,219,278,293,345,441,603,689,739,756,864,874,896,906,937,941,950,952,960,961,971,973,990,996,1005,1012
2,5,6,9,11,13,14,18,19,22,23,24,28,29,30,32,33,34,35,40,44,45,46,48,49,54,55,56,57,58,59,60,61,62,63,64,65,67,68,69,70,71,75,76,78,81,83,84,85,89,90,91,96
1,38,44,101,102,119,124,127,132,133,499,571,701,710,757,774,924,933,949,974,977
3,4,22,29,34,37,40,51,52,55,56,57,58,59,61,63,64,77,82,85,88
102,131,133,592,853
1,7,22,23,24
1,27,52,69,76,80,86,93,96,99,101,104,108,113,115,131,133,168,629,645,662,853,864,867,874,883,903,920,937,960,967,1009
1,2,3,4,10,11,13,14,18,19,20,25,42,47,48,49,50,52,53,54,56,58,59,60,61,77,81,82,83,87,89,90
4,25,31,34,65,66,71,80,82,88,90,91,96,107,109,111,112,117,126,130,132,224,268,269,312,336,367,422,431,509,559,564,631,691,793,811,828,850,867,873,895,907,1007,1012
3,4,6,7,8,10,12,14,15,16,17,18,23,25,26,27,28,29,30,34,36,37,38,39,40,41,42,44,47,49,51,52,53,55,56,57,58,60,65,66,67,68,69,71
10,35,48,58,105,107,109,112,114,115,116,132,439,588,907,994,999,1006
1,6,7,10,11,21,24,28,30,31,33,34,37,38,44,48,53,55
1,25,43,54,60,67,73,84,93,95,97,98,99,101,113,119,125,129,131,133,391,477,614,641,662,676,774,885,896,913,997,1009
1,2,3,4,5,6,8,10,13,14,16,17,19,22,24,48,49,50,54,55,74,76,77,80,81,82,83,84,85,86,87,89
58,66,69,77,80,87,100,126,293,691,763,772,874,887,1010
13,14,15,50,52,53,55,56,57,58,69,76,77,79,81
17,42,49,60,72,75,86,89,94,99,102,104,112,115,130,134,459,460,568,677,715,876,895,906,909,925,950,956,962,976,996,1009,1012
2,3,4,5,6,8,9,10,11,16,25,33,35,38,46,49,51,52,54,55,56,57,58,59,61,63,66,69,74,80,81,83,91
19,24,35,51,54,56,62,68,75,80,82,88,96,105,109,110,111,115,116,117,120,132,134,342,381,409,451,699,711,793,811,859,871,889,900,930,973,999,1007
8,9,11,14,15,16,17,19,21,22,23,24,26,35,36,37,38,39,42,44,45,46,48,51,52,53,54,55,56,57,58,61,62,63,64,65,66,68,69
36,52,73,78,81,84,93,97,98,99,104,119,129,131,235,279,791,891,903,919,963,978,1008,1009
1,2,3,5,7,8,9,10,15,16,17,27,29,46,47,49,50,51,52,53,54,55,65,66
44,95,113,127,131,133,396,565,737,932,949
1,10,55,58,59,60,62,80,83,108,109
31,88,107,110,128,613,964
2,4,71,72,73,74,141
1,12,13,27,36,42,60,64,67,73,89,95,102,104,108,112,113,129,130,131,235,260,346,475,769,774,791,858,876,906,913,925,932,954,962,967,990,993,1012
2,3,4,5,6,7,8,13,15,16,17,20,21,24,26,28,36,38,46,48,49,51,52,53,54,55,56,57,58,59,61,64,68,70,71,73,74,76,84
43,67,98,129,133,670,805,852,861,978,997,1005
1,35,40,41,53,72,74,75,90,94,95,96
1,11,25,50,63,64,69,80,93,94,97,121,124,127,133,134,184,338,614,634,687,742,756,841,874,887,926,936,950,1003
1,3,10,11,12,14,16,18,22,23,24,33,39,42,50,52,53,55,58,59,60,61,62,64,65,66,70,71,72,73
31,34,35,48,69,75,102,105,106,113,117,128,132,335,509,644,735,871,873,874,923,928,959,981,999,1004
1,3,5,8,9,10,11,13,18,19,20,21,24,25,26,27,28,29,31,32,33,34,35,36,38,43
35,84,85,86,91,100,107,120,754,966,994,999,1001,1010
1,15,16,74,75,78,79,80,89,90,91,92,93,96
27,119,125,129,130,133,642,991,1000,1008,1012
1,6,7,13,14,15,16,17,23,28,29
38,47,52,65,77,84,93,99,104,109,113,312,416,497,518,526,593,612,662,804,824,898,920,976,1009
5,6,7,8,9,12,13,17,22,23,24,25,27,32,33,36,37,38,39,41,42,43,47,48,52
99,102,108,132,423,817,870,923,959,984,1009
1,2,3,13,16,17,18,20,21,22,23
42,95,125,151,481,792,810
2,12,14,16,18,64,66
99,1009
3,6
13,20,26,45,89,102,104,108,110,112,115,116,119,122,125,126,128,133,228,254,399,435,568,630,635,636,676,689,695,776,875,876,881,895,925,953,973,976,988,993,1008
3,4,12,13,15,16,22,23,26,27,28,37,39,40,42,48,77,78,79,81,82,83,84,85,87,89,90,91,92,99,103,104,106,107,108,109,110,114,115,116,118
4,17,62,103,369,523,570,679,768
2,3,7,10,11,12,13,16,19
61,70,129,671,855,961,985
1,2,3,4,5,6,7
0,84,90,99,104,129,605,818,855,956,976,1009
1,2,3,25,51,52,53,54,55,59,85,103
44,63,78,84,89,94,97,98,99,101,102,111,113,119,121,124,127,244,280,574,629,757,820,933,944,949,977,996,997,1009
2,3,4,5,6,8,15,16,17,18,19,20,37,38,41,44,47,48,49,50,67,68,69,70,71,72,73,74,75,76
14,21,30,44,55,94,97,102,112,119,125,129,130,131,133,149,356,730,739,753,761,767,819,823,933,996,1005,1008,1012
2,3,4,5,6,9,10,12,13,17,18,24,28,30,38,39,40,41,43,44,45,46,47,49,50,52,57,61,65
13,36,50,52,60,64,65,67,69,84,86,89,93,97,99,102,104,108,112,113,125,127,129,130,131,132,133,203,312,518,538,568,616,634,652,662,674,676,715,750,789,820,823,864,876,883,885,906,913,925,956,962,967,974,990,991,993,1002,1009,1012
3,4,5,8,10,11,12,13,14,15,28,33,34,36,39,42,46,48,51,54,56,58,61,63,71,72,73,74,75,76,77,78,79,81,82,83,85,86,94,95,96,97,100,102,103,104,106,108,109,111,113,115,117,118,119,120,121,123,124,126
18,37,38,44,86,95,98,101,102,108,121,124,125,127,499,612,757,816,841,877,921,933,949,968,974,978,997
1,11,12,21,23,24,26,29,30,37,50,51,56,63,65,66,67,74,78,80,81,89,90,91,96,97,98
5,25,55,61,64,66,67,76,84,102,117,127,131,132,133,329,338,377,804,825,853,861,924,959,974,979
1,2,3,4,5,8,15,16,17,18,37,39,44,45,49,50,51,52,53,54,55,62,63,64,65,66
14,54,56,59,70,80,85,87,91,94,100,112,115,126,128,130,149,269,356,448,455,483,580,610,693,782,846,896,897,904,961,996,1010,1012
4,5,6,7,8,9,12,13,14,15,35,37,38,41,43,45,46,47,48,49,50,51,52,53,54,55,56,57,58,59,60,61,81,83
39,75,82,87,100,107,114,128,379,437,686,939,941,1006,1010
3,5,11,12,13,16,23,27,30,45,50,52,53,60,61
84,99,104,108,133,789,804,956,976,993,1009
4,35,46,47,48,49,50,52,63,64,93
1,5,81,84,99,104,113,117,125,193,324,345,386,634,662,755,774,788,879,956,967,976,1009
3,4,5,6,23,35,37,39,40,41,42,43,44,45,46,47,48,49,50,55,56,68,80
13,42,52,73,78,89,99,102,104,127,131,435,459,518,568,786,876,956,962,976,1009
17,23,25,26,27,34,43,58,67,69,70,71,73,75,76,77,84,86,97,106,113
13,38,42,85,86,89,102,104,108,134,287,294,346,435,610,661,695,715,739,849,876,883,903,920,962,976,993,1002
8,9,15,23,24,31,42,51,69,74,75,79,83,85,86,87,89,90,91,98,101,102,104,108,112,114,130,131
31,70,84,110,116,134,444,613,961,973
1,2,3,4,5,83,84,85,86,87
21,42,50,60,73,86,97,108,124,127,129,131,133,177,347,538,581,750,767,805,817,852,883,885,954,993
1,2,5,6,9,10,15,46,47,48,64,66,75,76,77,79,84,85,86,89,90,93,118,120,125,127
11,25,44,47,50,53,54,78,84,89,93,108,111,117,124,127,132,133,177,240,290,338,469,574,652,754,779,800,811,879,883,908,936,949,982
1,2,8,10,12,13,14,15,18,20,29,30,41,44,47,48,49,52,54,62,63,64,65,66,67,70,75,76,77,78,79,80,81,87,93
37,49,54,57,305,466,708,842,851,866
36,51,55,74,75,78,96,98,133,134
64,87,88,132,783,828,900,941
1,3,9,10,11,12,13,14
85,88,90,100,610,818,1001,1010
6,8,9,59,61,62,66,116
35,62,66,69,80,82,90,94,105,108,112,115,116,117,132,134,297,350,422,511,564,569,589,616,631,668,691,700,702,742,811,818,846,871,874,887,889,895,904,950,990,996,999,1007
3,4,6,11,16,17,22,23,29,32,40,42,43,45,47,48,49,50,51,52,53,57,58,59,60,62,63,64,65,66,68,70,72,77,79,81,83,86,89,90,92,93,96,97
44,76,77,109,111,116,117,120,126,132,303,341,514,854,879,933,949,959,965,973,980,982,987
10,11,15,16,26,28,36,40,49,70,71,72,73,74,80,84,90,92,93,94,95,96,99
15,96,110,116,122,126,164,594,712,764,854,875,942,973
1,2,5,50,65,77,80,85,92,96,102,104,105,112
3,8,27,51,56,64,69,74,75,82,87,91,95,97,115,230,239,266,273,501,569,671,742,874,914,943,1007
1,2,3,8,9,12,15,17,22,23,24,29,30,31,36,39,41,42,43,44,45,46,47,48,49,50,51
3,91,102,115,116,126,503,594,712,854,953,973,987
1,2,4,6,20,51,52,54,55,57,58,60,61
34,46,75,76,86,106,115,126,132,646,782,873,923,928,948,1004
2,12,16,17,19,33,38,43,48,50,54,56,60,64,65,79
34,75,100,106,107,117,132,796,811,873,889,923,928,975,1004,1010
3,4,10,34,37,39,54,55,56,59,60,73,74,77,101,107
4,16,26,64,65,82,87,88,115,116,127,312,362,675,770,776,783,875,899,900,918,1007
1,2,6,7,9,10,13,19,20,22,23,24,25,26,27,31,32,33,35,37,38,39
60,63,64,68,84,94,99,116,121,124,129,280,282,450,538,568,944,956,996,1009
2,42,44,46,48,49,57,58,61,62,63,64,75,77,78,80,109,111,112,118
4,20,31,35,41,57,66,67,69,72,73,77,80,82,83,91,96,112,113,116,117,126,127,129,202,268,431,473,509,514,652,670,708,756,762,813,848,867,879,887,889,895,937,954,999,1007
1,2,4,6,10,12,13,16,17,18,22,24,32,40,41,44,47,50,51,53,54,55,58,64,65,66,67,68,70,72,75,76,78,79,81,82,83,84,85,87,89,91,92,93,95,103
71,73,134,547,901,917
3,4,72,73,74,76
31,34,35,46,65,74,75,80,88,91,105,108,112,114,132,409,509,646,778,832,871,873,900,923,928,955,969,999
1,2,3,5,6,9,10,12,13,14,17,18,20,22,24,26,27,28,29,30,32,33,34,36,37,40,42,43
35,37,49,54,57,69,85,91,102,112,131,133,466,693,708,843,851,866,999,1001
2,12,18,33,36,37,39,40,41,43,44,46,47,49,51,52,60,62,64,66
14,22,28,42,56,59,60,66,67,69,92,94,104,105,107,108,112,129,130,132,133,169,465,538,542,548,616,642,674,705,837,906,913,925,993,994,996,1012
1,4,8,9,10,11,12,13,14,15,16,17,23,25,26,29,30,32,33,34,36,37,38,39,40,42,43,44,48,49,50,51,52,54,57,58,59,60
20,22,28,34,42,44,46,53,102,106,110,116,131,132,147,155,291,383,470,536,548,646,764,825,873,875,886,902,923,931,1004
1,3,4,5,10,11,13,20,30,35,40,54,55,56,57,74,76,78,80,85,86,87,88,91,92,99,100,102,103,104,109
58,77,100,763,772,1010
5,7,12,13,14,19
22,41,91,96,103,110,115,116,120,131,160,542,594,597,848,854,897,930,965,973
1,2,10,11,12,19,21,33,52,53,55,56,57,58,59,60,61,67,86,90
60,61,64,73,94,98,99,112,130,131,753,885,906,979,985,990,1009,1012
2,14,16,21,26,27,29,41,45,46,50,51,55,66,67,71,73,77
58,85,100,132,203,1001,1010
1,2,8,10,12,13,19
27,58,70,71,72,77,83,94,98,103,116,125,129,131,133,134,136,326,677,712,798,850,872,921,931,940,952,978,991,997,1005
1,12,13,14,18,21,22,23,29,31,37,43,45,46,47,48,49,52,53,58,59,60,61,62,63,64,65,70,71,72,74
13,36,44,61,64,67,73,74,75,84,105,108,110,112,115,126,130,613,709,949,953,955,979,985,990,993,1012
2,3,4,9,10,11,12,14,16,17,19,20,21,22,27,29,40,41,42,43,45,47,51,52,53,54,65
57,58,77,86,90,106,107,114,115,117,132,598,608,645,651,763,811,818,907,945,959,989,992,994
1,2,11,30,31,62,71,72,74,76,78,86,87,88,90,96,98,99,101,102,103,104,126,133
35,36,52,61,72,73,88,94,99,112,125,134,441,476,518,676,693,755,791,895,899,906,921,950,979,996,999,1009
5,7,9,17,19,45,46,47,48,54,58,59,60,61,63,64,66,67,68,69,70,73,74,75,83,84,89,90
5,36,45,60,68,71,86,89,104,115,122,125,126,128,130,131,133,150,168,193,463,676,681,820,925,953,1012
1,2,8,9,11,18,19,20,21,23,24,26,28,59,60,61,62,63,64,65,72,73,76,77,78,79,80
13,43,52,60,64,66,67,73,89,90,93,94,99,112,130,131,133,134,270,302,634,690,789,821,823,853,891,906,913,950,956,990,996,1009,1012
2,3,5,16,18,19,20,21,22,24,25,33,39,42,43,45,48,49,50,51,52,54,56,58,59,60,62,63,64,65,66,68,75,79,80
13,22,42,51,54,60,64,70,71,73,80,83,85,94,99,102,108,110,113,122,134,346,463,511,663,837,896,901,904,917,930,952,961,962,983,986,993,996,1009
1,2,5,13,16,18,20,22,28,29,32,33,34,35,36,37,38,40,41,42,44,45,46,47,53,54,55,56,59,64,66,67,68,69,70,71,72,73,74
34,46,74,76,97,106,114,115,126,132,181,644,873,923,948,955,987,1004,1006
1,3,4,5,6,8,10,12,17,20,21,22,23,24,25,26,28,30,31
3,35,49,61,64,65,67,73,84,87,94,97,112,115,134,168,345,441,466,586,753,766,806,897,906,913,918,947,950,979,985,996,999
1,21,23,27,28,29,32,46,47,48,51,52,56,59,60,61,63,64,65,66,67,68,69,72,74,76,77,78,79,81,83,86,106
6,13,17,21,45,55,64,68,99,110,121,124,128,129,131,132,133,218,450,592,613,681,718,752,761,870,929,956,959,1000,1003
1,2,3,4,17,18,19,26,27,28,38,44,45,47,48,53,54,59,60,61,62,68,69,76,77,78,81,82,83,85,88
4,34,35,49,54,75,90,102,105,107,114,117,132,350,422,631,759,796,871,873,889,923,928,959,975,999,1006
1,9,11,12,14,15,16,20,21,23,29,30,38,39,41,42,43,44,46,54,56,62,63,64,66,68,73
15,20,29,42,58,71,83,94,102,104,105,108,116,122,125,127,134,140,154,287,346,401,459,473,511,560,568,726,748,760,819,825,854,901,917,921,950,962,974,976,983,993,996
6,7,8,11,14,16,17,20,24,27,28,29,33,36,38,44,46,47,48,49,50,51,52,53,54,56,58,59,65,66,67,68,69,74,76,77,78,79,85,86,88,89,91
131,133
2,15
18,43,98,119,125,131,931,968,978,997,1008
1,4,35,37,38,57,60,61,69,78,80
1,5,36,50,55,58,60,63,79,93,97,104,109,118,119,121,133,158,193,226,231,298,467,552,642,761,774,925,929,944,957,972,1008
6,7,8,10,13,15,16,17,22,24,25,26,27,28,54,67,71,72,73,74,75,77,89,90,92,93,95,96,97,98,99,102,128
1,18,43,50,52,60,97,98,111,113,125,129,131,196,417,579,774,890,921,931,957,963,967,968,997,1005
1,2,4,69,70,71,75,77,78,79,83,84,85,86,87,88,89,94,95,96,97,101,102,103,105,106
58,65,77,109,110,122,124,125,131,133,676,725,755,772,840,942
10,22,24,31,33,37,38,40,42,43,44,45,46,48,49,50
28,364,568
1,2,3
17,32,50,82,99,101,113,129,130,131,133,236,568,582,890,956,957,1005,1009,1012
1,2,10,11,13,14,17,18,26,29,30,33,34,35,40,41,44,45,46,54
1,8,14,19,27,43,69,84,95,98,105,109,113,120,125,128,129,131,133,225,236,508,649,742,774,823,840,940,980,988,991,997,1005
1,3,4,5,9,10,11,12,13,15,16,17,22,26,28,29,30,32,34,37,39,40,41,42,43,44,45,46,50,51,53,55,56
47,74,85,87,88,96,100,109,110,115,213,367,867,900,918,930,941,1001,1010
1,2,5,9,11,12,24,26,47,48,49,51,52,53,54,58,59,62,74
35,85,87,88,100,112,610,828,941,999,1001,1010
1,11,12,17,67,68,72,73,74,75,81,131
0,33,60,67,78,80,84,91,94,99,122,127,131,133,407,525,822,823,887,913,983,996,1009
3,4,9,10,11,13,14,15,17,19,20,21,27,29,30,31,34,36,38,39,40,42,44
93,99,121,280,929,1009
2,9,11,12,13,20
5,35,61,67,72,73,94,98,99,112,125,127,129,133,151,398,519,564,660,670,750,827,906,921,924,927,978,979,985,996,997,999,1009
2,9,26,29,31,48,50,55,59,60,61,65,66,67,69,71,73,74,75,77,78,80,81,82,83,84,86,99,103,105,106,113,117
10,20,22,57,59,60,66,69,70,71,73,98,122,134,163,316,511,558,588,616,657,798,850,860,901,952,978,997
1,5,14,15,27,28,29,30,32,33,34,38,63,65,66,67,68,80,81,82,85,86,87,112,114,115,118,119
31,35,49,50,65,71,91,115,116,126,127,128,320,332,463,652,806,881,999
2,3,4,6,7,8,10,14,15,21,22,26,28,29,30,31,32,36,37
38,45,68,70,71,108,134,511,671,694,798,850,917,952,993
1,9,11,13,15,16,63,65,66,71,74,75,76,78,79
18,20,24,42,44,46,58,60,67,70,78,95,102,108,110,116,131,132,133,290,574,592,670,772,810,858,861,949,968
1,2,3,9,10,16,22,23,29,31,32,34,35,36,37,41,43,44,45,46,47,48,49,51,56,57,62,63,64
28,30,37,50,52,54,60,63,64,93,98,99,102,104,108,113,119,121,125,127,129,130,131,133,384,477,540,548,595,661,739,750,751,817,823,855,863,883,885,891,903,921,929,932,944,954,956,974,978,991,993,997,1012
1,2,3,5,6,9,10,12,13,14,17,18,19,20,32,34,35,36,41,43,46,52,57,58,61,62,63,64,66,70,71,72,73,75,76,77,78,80,84,85,86,87,88,89,91,92,93,95,96,98,100,102,108
1,18,36,38,43,54,55,60,61,70,71,73,84,89,93,98,99,108,113,129,131,222,316,441,463,476,560,593,622,695,698,710,743,754,791,804,823,853,883,885,896,952,956,961,967,968,985,997,1000,1005,1009
1,2,5,6,7,9,10,11,12,19,20,23,35,36,38,39,43,44,45,50,67,68,69,70,71,73,74,75,78,79,80,81,82,83,84,89,92,94,95,102,104,106,108,112,113,114,115,116,120,121,122
4,42,50,69,74,84,85,86,87,90,102,104,106,115,117,123,125,126,130,271,462,491,559,599,616,631,675,742,780,782,796,810,825,877,890,903,941,1004,1011
1,10,11,13,14,15,17,20,24,25,31,32,34,35,36,38,40,42,43,44,53,63,64,65,66,67,68,69,71,72,73,75,76,77,78,79,81,83,85
5,11,13,38,42,60,64,73,86,89,93,94,97,102,104,108,119,121,134,140,211,280,287,435,568,674,710,715,739,827,876,883,925,936,947,962,976,993
1,3,10,11,14,15,16,17,22,25,26,27,28,32,52,73,75,76,89,90,92,93,103,104,105,107,108,110,112,113,114,115,117,119,120,122,124,144
18,27,43,67,73,97,98,101,113,117,125,129,131,706,750,861,931,967,968,978,981,991,997
2,4,6,8,9,22,25,26,27,28,32,34,36,48,49,51,53,54,56,57,58,59,61
0,25,31,52,60,63,64,66,67,69,73,81,93,94,99,105,112,113,121,124,127,131,133,134,224,338,616,737,788,789,853,864,891,913,924,944,950,956,967,990,996,1009
1,3,4,7,14,15,16,17,19,20,22,25,26,29,33,34,35,38,39,42,46,50,56,57,58,59,60,62,63,64,65,67,68,70,71,72,73,76,79,80,82,83
8,37,44,60,63,64,68,84,94,95,99,108,119,121,124,127,129,131,132,134,571,592,652,661,736,754,799,866,905,933,950,956,977,996,1000,1005,1009
4,5,7,8,9,10,11,12,16,17,21,22,38,41,42,45,49,56,59,61,64,65,66,67,68,69,70,71,72,74,75,77,93,97,99,100,102
46,49,68,80,88,96,105,106,112,115,117,126,132,212,646,699,762,811,871,900,981,987,1004
1,2,3,4,5,6,11,12,14,19,25,29,30,31,32,33,34,35,39,40,45,46,47
5,10,19,75,78,97,121,127,131,133,152,467,574,582,704,841,919
1,5,6,7,11,12,20,21,27,36,38,42,43,44,45,49,52
46,69,114,126,128,833,844,987,1006
1,2,3,5,16,17,18,19,20
9,17,26,35,66,67,82,88,91,97,112,113,119,124,126,127,131,260,263,264,398,455,458,494,601,615,652,803,871,885,895,913,924,960,963,967,974,990,999,1007
1,2,3,4,5,13,21,27,29,31,33,35,36,45,46,57,59,60,61,62,64,65,66,67,70,76,79,80,81,82,83,86,87,88,89,90,94,95,96,104
17,66,74,105,114,128,129,369,412,570,746,954,955,989,1006
1,4,5,7,11,12,34,35,36,37,40,61,62,64,66
9,36,44,73,76,78,97,101,102,108,113,119,121,124,127,132,244,378,476,499,557,574,587,661,667,701,734,752,841,933,949,963,967,977
3,5,15,16,17,20,21,27,38,39,40,42,45,46,65,66,69,70,71,73,74,75,76,77,80,81,82,83,84,91,94,95,96,98
5,45,68,90,105,131,133,329,451,485,605,681,844,852
1,24,30,31,32,53,63,64,66,67,68,71,72,82
11,64,66,74,77,105,108,119,124,125,127,133,314,639,746,805,844,852,853,921,993,1003,1008
2,3,4,5,7,8,9,29,38,40,42,46,47,48,49,50,51,53,54,55,56,65,85
8,18,27,43,95,98,101,124,125,129,131,133,498,499,858,931,940,968,978,991,997,1005
1,11,13,20,30,33,34,37,51,65,69,70,71,72,76,78,80,90,92,96,97,111
6,17,22,35,59,75,82,84,87,88,91,92,95,110,115,116,129,168,462,486,538,542,570,613,640,750,837,854,897,900,918,922,999,1007
11,13,17,19,22,24,33,45,48,50,52,56,57,59,60,64,69,70,72,74,75,76,78,80,81,82,83,87,88,90,92,103,105,114
11,25,52,60,61,64,67,80,84,93,94,96,98,99,101,108,113,121,127,129,131,132,133,134,474,538,614,687,698,789,817,864,887,913,936,950,954,967,979,996,997,1009
2,6,7,11,12,13,14,15,17,18,19,20,21,25,26,27,28,29,31,33,35,36,41,43,44,45,48,49,51,52,53,54,55,56,57,58,59,60,61,62,63,67
10,24,45,81,84,130,131,133,279,681,1012
36,46,57,58,61,63,67,70,71,73,75
3,31,82,111,113,114,131,133,342,669,914,1007
7,8,9,10,12,15,27,39,40,41,48,49
99,104,956,976,1009
12,14,15,17,28
17,58,75,77,83,86,109,116,120,123,124,131,400,511,590,725,763,795,840,872,965,986
1,3,11,21,33,35,37,38,45,48,49,52,53,54,55,56,58,61,62,71,73,76
32,72,96,126,937
1,78,79,84,85
98,119,121,124,131,133,315,841,927,1008
11,13,17,18,29,44,50,51,62,64
6,19,51,54,56,64,68,75,80,96,106,111,114,115,116,120,132,134,203,239,253,337,381,482,671,699,758,764,896,937,965,969,992
1,2,3,6,8,11,14,19,20,27,28,30,32,36,43,55,58,59,60,63,64,66,69,70,72,75,77,78,79,80,81,83,84
2,18,60,80,94,98,125,129,133,134,676,755,756,762,813,921,950,954,968,996,997
1,3,4,6,8,10,14,74,77,78,79,80,81,82,103,104,105,144,146,147,149
13,29,52,54,57,60,70,71,73,80,89,91,94,99,102,104,108,110,113,122,134,140,238,309,435,510,560,568,608,613,756,762,798,864,876,887,896,901,904,917,920,950,952,956,962,967,976,993,996,1009
3,12,13,14,15,18,19,20,24,33,34,35,39,41,42,50,53,54,55,56,57,58,59,60,61,62,64,65,66,67,69,70,73,74,75,76,77,81,84,85,89,90,91,92,93,94,97,100,103,104
20,134,657,901
2,94,96,98
0,5,60,84,91,94,99,112,124,130,134,827,950,990,996,1009,1012
2,3,11,13,14,15,21,22,26,30,31,32,33,34,35,41,45
31,78,84,93,101,124,127,194,444,574,593,833,974
1,3,4,6,11,16,20,21,22,24,25,26,29
13,32,73,104,110,116,164,594,712,764,854,875,930,973,976
1,2,3,4,34,89,91,93,96,125,128,132,136,139,140
36,55,66,76,86,93,97,99,101,104,111,113,117,129,132,133,337,386,584,743,789,796,903,923,925,948,956,967,981,1000,1009
1,2,3,25,26,27,28,31,32,34,35,36,66,70,77,80,81,82,83,84,85,93,94,95,96,118,119,120,129,133,135
96,112,867
1,2,3
6,32,42,46,74,91,96,102,106,115,126,323,646,915,937,953,955,987,1004
14,17,18,20,44,50,55,57,59,64,74,75,76,79,80,81,105,109,111
18,25,37,38,44,86,99,101,102,108,111,124,129,132,203,244,338,347,423,504,612,614,710,816,866,870,877,933,949,959,968,1000,1009
1,6,8,11,16,18,20,21,28,29,40,44,45,65,66,67,68,69,72,74,75,76,78,79,80,81,82,86,87,95,96,97,99
14,35,58,71,85,87,100,116,849,850,999,1001,1010
1,2,3,4,7,9,21,22,23,24,25,27,39
14,22,23,25,37,51,62,64,65,70,74,77,87,88,90,105,108,109,112,115,130,239,338,530,559,591,637,649,702,709,753,770,778,780,783,802,806,816,818,828,837,899,900,941,952,955,1012
1,2,3,4,9,11,15,16,21,23,32,33,37,46,47,48,49,51,52,53,55,56,57,58,59,61,62,63,64,65,66,67,69,70,71,72,74,75,76,80,81,83,84,86,88,97,98
11,13,38,42,56,61,86,89,102,104,108,113,129,134,154,211,287,294,435,459,526,567,568,612,715,817,876,883,920,936,962,967,976,985,993
2,13,17,22,23,24,26,27,33,48,65,66,67,83,85,86,92,93,95,96,98,99,100,101,103,104,105,106,112,114,116,117,118,119,134
60,68,84,86,94,99,102,104,115,126,128,130,134,140,399,459,592,674,715,825,881,925,950,953,956,962,996,1009,1012
1,2,3,4,5,8,13,17,20,25,26,81,82,83,84,85,86,87,88,89,90,91,92,93,94,97,98,100,155
30,93,101,111,113,119,132,133,311,571,863,1008
1,2,3,4,5,19,20,29,30,31,32,46
64,70,87,96,99,101,102,116,130,133,474,592,739,805,852,952,956,1012
1,2,3,5,8,9,10,11,14,27,28,30,31,36,44,45,48,51
14,42,44,60,67,84,86,94,95,102,112,119,124,129,133,134,640,641,649,672,739,753,805,810,852,858,861,865,906,949,950,990,996,1000,1003,1008
1,14,15,18,19,20,21,23,24,28,31,35,59,61,63,65,66,67,68,69,71,72,73,86,87,88,89,93,94,95,97,98,100,101,105,106
78,133,407,438,454
1,2,3,4,5
5,20,37,44,50,69,74,86,102,108,112,115,125,127,244,347,393,443,501,639,816,890,933,949,990,991
1,13,19,39,42,74,77,79,87,92,93,94,95,96,99,100,101,103,135,136,141,144,161,164,165,166
14,30,35,51,56,59,64,69,74,75,87,88,90,105,107,110,116,117,120,122,132,262,356,422,495,501,510,515,522,528,529,530,544,564,569,589,613,616,639,663,668,702,802,811,818,828,847,856,860,874,951,975,980,994,999
1,4,5,11,15,16,18,23,24,25,26,27,30,33,39,40,43,44,47,49,50,51,52,53,54,55,56,57,58,59,60,62,64,67,71,72,73,75,76,78,79,81,82,83,84,85,88,90,91,92,93,94,96,99,100
5,13,36,37,38,42,55,84,89,95,99,101,102,104,108,115,124,129,131,133,150,248,270,435,464,474,475,599,611,612,618,640,698,736,794,816,820,858,866,883,885,920,956,1000,1003,1005,1009
1,2,4,9,10,11,12,13,20,28,34,36,37,38,48,49,50,59,65,67,68,69,70,71,72,73,78,79,80,81,82,83,86,87,88,97,102,105,107,108,109,110,112,115,116,120,123
4,41,57,62,72,75,87,91,94,97,103,106,113,116,125,126,129,132,133,233,237,326,513,523,682,768,782,921,923,928,973,1004
3,6,7,33,41,43,45,46,47,48,49,61,63,64,65,68,69,72,74,75,76,80,82,87,88,89,92,93,95,97,98,110
34,69,72,76,80,111,115,127,132,133,838,953,959,982
1,2,3,7,9,50,51,52,66,69,73,74,75,81
8,11,13,14,19,31,51,59,60,61,73,79,80,89,91,92,94,96,99,102,104,105,108,113,115,119,120,124,126,128,134,149,150,230,249,352,444,455,477,508,642,662,682,704,762,867,876,887,936,950,956,960,962,966,967,972,976,979,985,993,1009
3,10,13,14,17,18,19,20,27,29,30,31,36,37,39,40,42,43,45,48,50,51,54,56,58,59,60,62,64,65,66,67,68,69,70,71,72,74,75,76,77,78,79,81,83,84,85,88,93,94,95,97,100,101,102,103,105,106,107,110,111
0,1,50,90,99,108,113,116,117,470,662,668,774,822,875,879,890,1009
1,5,6,7,8,9,10,11,12,13,14,15,18,19,20,21,22,23
31,36,41,52,67,72,80,88,91,96,98,107,110,115,116,119,122,124,127,128,135,210,398,476,503,510,518,522,532,561,564,601,647,652,660,670,730,760,791,846,881,887,889,899,907,913,927,960,974,975,994,1008
1,15,16,18,29,30,32,33,34,35,37,41,42,43,46,47,48,49,70,74,75,76,81,82,83,84,85,86,89,91,92,94,95,97,105,107,108,110,111,112,116,118,119,120,121,123,125,126,127,128,129,130
46,69,89,102,105,106,112,122,132,275,569,589,611,644,686,820,871,942,983,992,1004
1,2,5,10,16,19,20,27,31,38,39,40,42,44,49,50,51,54,56,58,59
1,5,9,36,52,54,60,71,73,81,84,97,99,108,113,124,131,133,222,235,429,474,476,496,518,532,622,698,786,804,853,862,883,885,896,956,963,1009
1,2,3,7,9,10,11,19,21,22,28,29,36,37,38,40,44,47,48,49,50,52,53,54,55,56,58,63,64,68,69,70,71,72,73,76,77,81
0,11,55,60,61,64,66,67,73,93,94,97,108,113,119,124,129,134,222,231,243,264,345,474,506,601,615,691,698,706,750,761,826,853,913,932,936,947,950,963,967,979,985,996
1,7,9,12,18,19,20,26,28,29,30,44,45,47,54,57,58,59,60,61,62,66,67,68,73,75,78,79,80,81,82,83,84,85,88,89,95,98,99,104,105,110,111,112
50,78,95,98,109,110,116,122,125,840,858,875,890,919,930,942,991,997
5,6,7,8,17,27,69,79,80,84,85,87,92,93,96,102,103,104
1,18,31,42,43,60,93,95,98,125,129,131,133,444,810,921,931,968,978,991,997,1005
1,11,12,14,29,30,31,36,42,58,60,61,62,63,64,67,68,78,80,88,92,94
9,65,72,74,115,126,131,133,434,720,770,953,955,987
1,2,28,29,30,33,38,41,42,51,52,53,54,55
63,78,124,130,438,574,634,944,1012
1,7,15,16,17,23,24,25,26
9,13,18,29,33,54,60,61,68,70,71,78,97,102,121,124,127,133,134,189,242,334,384,398,451,459,602,654,667,671,706,757,839,850,892,896,917,961,985
12,13,14,15,17,23,24,25,28,39,43,49,51,52,57,58,73,76,77,82,85,86,87,88,90,91,97,102,103,107,108,109,123,125,131,136,138,148,149
18,98,101,118,121,127,129,133,418,752,805,852,853,968,997,1000
4,9,11,13,18,19,20,49,50,53,56,58,59,63,68,69
6,33,45,49,57,72,84,87,103,113,116,126,129,131,237,305,308,421,679,681,720,941,1005
1,9,11,16,30,32,33,34,35,37,38,57,58,60,61,66,67,75,76,77,78,79,80
26,40,51,56,59,65,74,79,87,88,105,110,112,115,116,120,126,128,132,339,350,390,423,635,709,749,758,764,770,778,828,829,846,870,875,884,900,941,955,960,990
1,2,3,4,8,11,31,34,38,42,45,54,55,56,58,73,74,76,84,96,97,101,102,104,105,106,116,117,119,120,122,123,124,126,127,128,129,130,131,132,133
44,76,89,95,104,105,108,121,124,127,129,131,521,661,736,737,920,943,949,1000,1003,1005
1,2,3,9,10,14,22,33,34,43,48,68,79,87,91,100,101,103,104,108,109,110
27,32,36,52,60,66,70,73,87,95,98,102,112,119,125,129,130,131,133,235,345,381,391,518,599,750,791,825,885,891,915,941,943,952,954,990,1005,1012
1,2,5,7,9,10,15,17,18,19,20,21,22,23,24,30,31,34,53,55,56,62,63,64,65,66,67,68,69,70,71,72,73,78,79,80,84,85
4,10,14,19,22,57,59,79,80,85,88,90,96,100,107,111,115,128,132,268,404,431,462,542,588,690,704,808,828,867,881,959,982,994,1001,1010
1,2,3,4,5,6,7,9,10,26,27,28,29,42,44,51,52,53,56,57,58,59,62,63,64,65,66,67,68,69,70,71,77,79,95,108
1,27,60,63,73,98,113,127,129,225,737,774,853,855,940,944,967,974,997,1000,1005
1,5,6,7,8,9,10,27,34,37,40,41,43,45,46,47,48,49,50,53,54
13,14,36,52,60,61,64,67,73,84,85,86,94,99,108,119,124,129,130,131,134,252,325,458,506,580,601,649,715,750,786,791,792,891,913,950,979,985,993,996,1009,1012
3,4,6,7,15,20,23,27,28,29,30,31,36,37,39,40,45,47,53,56,58,59,60,61,62,63,64,65,66,67,68,69,71,72,75,77,80,81,83,88,89,95
5,11,13,23,32,36,52,55,60,67,72,78,81,89,99,101,102,104,124,131,185,248,345,518,562,599,634,670,677,707,827,853,876,913,919,925,956,962,976,1009
2,3,7,8,9,10,13,14,15,24,26,29,30,36,45,46,51,53,61,66,67,68,70,73,74,75,76,82,84,88,89,90,95,98,101,102,103,106,107,115
31,35,67,69,73,82,88,93,94,96,105,107,110,112,119,121,124,125,126,127,128,130,131,258,280,455,475,509,564,613,676,723,749,755,783,839,871,874,900,907,913,960,996,999,1007,1012
1,11,26,27,39,57,60,61,63,64,65,66,67,69,70,72,73,75,77,79,80,82,83,84,85,87,88,89,90,91,92,93,94,95,96,97,98,99,100,101,106,108,110,120,138,140
17,24,30,35,41,46,57,62,66,74,80,87,88,90,105,106,107,108,115,126,205,268,564,597,691,702,738,818,843,847,945,955,960,975,987,992,999
2,3,4,5,16,17,18,21,22,30,31,32,33,35,37,39,45,46,47,55,56,57,58,63,64,65,70,72,73,74,75,83,84,90,97,98,99
13,14,22,38,52,56,61,64,67,73,79,84,85,86,89,93,94,99,108,112,120,124,125,130,131,149,215,253,386,435,441,475,512,542,561,579,595,612,615,630,715,794,819,823,864,876,885,891,895,906,985,990,993,996,1001,1002,1009,1012
4,5,6,7,10,13,16,18,21,26,27,28,29,33,35,36,37,38,40,45,46,49,51,62,69,70,71,72,75,77,78,79,80,81,82,83,84,85,87,88,89,90,91,92,94,95,98,99,100,101,104,107,109,110,111,112,113,124
118
102
9,26,36,45,52,64,77,97,108,115,122,125,126,128,131,133,228,254,303,319,532,544,635,647,667,689,706,725,741,744,776,817,852,880,891,942
6,9,10,12,13,15,42,43,44,45,49,51,62,71,73,74,76,77,99,109,110,112,115,117,123,124,125,126,127,129,130,131,132,133,134,135
6,17,34,46,48,49,64,75,87,106,107,110,115,122,126,132,182,288,305,646,735,785,812,846,873,909,918,923,942,975,987,1004
1,2,4,10,14,15,17,18,19,21,22,23,24,25,29,31,32,33,34,36,39,40,41,42,43,44,45,46,47,48,50,52
18,44,50,95,97,98,119,131,133,736,949,968,978,1008
1,3,4,6,7,8,9,25,38,39,41,42,43,44
1,11,19,36,52,55,60,61,63,64,73,80,91,93,94,98,110,113,116,119,121,125,129,133,531,532,613,676,682,750,752,774,887,891,936,967,979,991,996
2,17,18,19,20,21,36,39,40,41,42,43,45,47,64,65,66,67,68,70,72,76,78,79,80,81,82,84,85,86,87,88,89,90,105,106,108,109,116
6,15,78,86,95,98,101,102,129,244,505,587,978,1005
2,21,22,23,33,35,37,43,45,46,63,64,66,68
33,44,54,61,68,70,71,74,108,121,125,127,129,190,398,450,451,525,652,671,699,803,817,896,917,921,924,952,955,961,974,985
3,6,13,14,17,36,42,43,44,46,48,58,87,90,93,94,95,98,99,105,106,109,110,117,122,123,124,131,132,144,145,146
0,8,21,36,95,98,99,108,125,129,133,532,653,784,978,997,1009
23,75,78,79,80,89,90,91,94,96,98,99,100,118,121,127,128
20,27,65,72,88,90,105,109,110,113,117,130,132,170,290,297,312,337,350,353,367,657,749,770,783,806,818,828,844,871,899,900,930,940,1012
2,4,7,8,23,24,27,30,31,42,53,54,88,99,107,108,109,120,121,123,125,127,128,129,131,132,133,134,135,137,139,142,143,145,146
11,14,19,41,56,60,61,62,67,73,80,85,87,94,129,149,197,345,356,580,591,599,610,762,792,887,913,918,936,941,954,979,996,1000,1001
16,20,21,22,23,25,29,31,32,37,39,43,52,53,56,58,59,60,62,63,64,65,66,67,69,70,71,74,87,90,92,96,97,98,100
25,47,56,64,66,69,73,76,90,97,98,99,105,111,119,121,132,133,358,468,508,560,589,614,616,668,687,702,705,731,752,779,793,797,805,852,859,908,927,1009
23,24,27,28,29,32,33,34,36,38,49,50,54,59,60,65,66,78,79,80,81,83,84,91,93,94,101,102,105,106,107,108,111,116,121,126,127,128,139,140
13,42,51,60,71,73,80,83,89,93,94,96,102,104,108,109,122,123,134,154,219,388,473,568,674,762,798,802,817,820,850,867,876,883,887,901,904,917,920,925,937,950,962,976,983,993,996,1002,1011
2,6,7,10,13,14,18,23,26,27,33,37,39,43,46,49,50,51,52,53,54,57,62,64,65,66,67,68,69,70,72,73,74,75,76,87,89,90,91,92,94,95,97,98,99,100,104,105,106
13,36,50,60,64,67,80,89,94,102,104,108,112,117,119,130,137,196,234,435,756,762,800,876,913,925,962,977,981,990,993,996,1012
5,6,7,12,14,17,22,23,24,29,30,34,36,38,40,83,84,85,86,87,90,92,93,94,97,98,103,105,106,108,112,113,156
1,25,29,55,66,95,97,111,121,125,127,129,131,132,133,571,700,736,755,793,797,1000,1005
1,3,4,6,7,8,9,10,11,13,14,17,21,22,30,31,32,33,34,35,37,38,40
18,63,84,95,97,98,119,131,133,736,944,963,968,978
1,2,3,9,13,15,16,18,29,35,36,40,41,43
1,5,11,29,33,42,45,60,61,63,68,70,71,73,84,94,102,104,105,108,110,127,129,134,238,287,346,401,511,568,606,674,694,730,774,798,804,827,850,917,930,936,944,952,954,985,993,996
2,3,6,7,13,15,33,34,35,36,38,41,47,48,49,51,52,55,56,57,60,61,63,70,71,73,75,76,78,79,82,83,86,87,88,90,91,92,94,95,97,98,99,102,103,104,105,106
4,19,20,34,35,46,62,67,71,77,88,100,102,105,106,109,110,112,114,115,116,126,128,130,132,367,427,431,534,547,590,617,646,689,697,730,825,899,930,953,960,969,989,990,999,1004,1010,1012
1,3,4,6,8,12,13,14,21,22,23,25,52,53,56,57,58,61,67,68,72,74,75,76,77,78,79,80,81,82,83,89,91,92,94,95,107,108,109,110,111,114,117,118,120,122,124,125
78,101,102,119,127,244,977
3,16,40,64,73,75,99
13,55,90,97,99,111,113,119,124,127,129,132,358,410,538,680,813,954,967,982,984,1003,1009
2,3,6,8,9,12,14,33,34,35,92,99,102,103,105,124,134,178,180,183,190,191,192
57,58,70,71,83,94,104,105,110,116,134,401,568,674,843,850,893,901,917,950,952,976
1,3,8,23,27,28,34,39,53,56,58,63,64,65,66,67,71,74,88,89,94,99
19,22,31,35,36,49,53,75,77,82,87,88,91,94,106,110,115,116,126,129,131,132,262,362,466,503,538,613,632,782,870,885,900,941,953,973,996,999,1004,1007
2,3,5,7,8,9,10,13,14,37,42,47,49,50,51,53,55,56,58,60,61,62,63,64,65,66,67,69,78,79,80,81,84,85,87,88,89,91,92,115
58,72,77,103,109,514,666,679,714,763,824
5,25,35,100,102,103,106,119,120,124,126
4,15,31,35,47,57,74,76,80,82,87,96,105,111,112,132,156,431,509,762,843,846,867,887,889,895,904,918,948,955,982,999,1007
1,2,4,5,7,9,10,12,16,18,19,21,23,37,41,43,44,45,47,48,49,50,52,53,54,56,58,59,61,62,64,65,67
93,95,98,117,125,129,131,133,157,231,736,981,997,1005
1,7,10,13,16,18,20,37,42,43,48,49,52,54
13,36,45,60,61,64,68,73,80,84,89,94,99,108,131,133,218,325,451,507,543,579,585,592,695,762,791,979,985,993,996,1009
1,3,8,9,11,12,34,36,37,38,41,42,43,44,59,61,72,73,74,75,76,77,80,81,84,85,87,88,89,90,91,92
11,25,50,52,60,61,67,71,73,76,78,80,93,95,99,113,119,124,125,127,131,134,177,338,377,407,511,518,547,560,574,614,641,687,734,756,762,797,819,858,913,936,950,956,967,1009
1,13,14,15,17,18,22,23,24,27,36,41,42,43,50,51,52,55,56,57,58,59,60,63,66,69,70,71,72,81,87,90,91,92,93,96,98,100,101,102,106,107,108,110,111,115
31,35,48,65,80,82,87,88,105,112,115,117,118,126,132,288,312,509,693,729,735,770,778,782,796,806,828,871,889,900,904,941,953,990,999,1007
1,4,6,10,11,14,16,22,29,32,33,35,36,40,42,43,44,45,46,47,48,49,50,53,55,56,57,58,60,65,66,67,68,69,72,75
48,50,58,77,85,91,96,105,106,107,114,122,128,132,247,254,598,735,745,772,890,923,937,942,945,957,975,989,992,1001,1006
3,8,16,21,27,32,41,42,63,64,67,68,69,70,71,72,82,83,85,86,87,88,92,93,94,98,99,100,110,116,118
31,34,36,55,67,69,72,80,82,87,88,91,93,95,97,107,111,112,115,119,124,126,127,128,129,131,132,168,231,252,268,304,336,455,506,538,561,601,604,615,621,636,640,641,643,672,697,750,792,793,855,885,889,900,954,987,1007
4,7,8,9,13,14,15,20,27,28,30,31,33,34,37,38,40,41,42,53,55,63,64,66,77,78,79,80,81,82,83,84,86,88,96,97,99,100,101,103,104,106,109,110,112,113,116,117,118,119,121,122,123,124,125,126,133
13,42,44,59,60,64,67,70,71,99,104,108,109,110,113,116,122,346,511,599,629,674,712,764,840,850,854,901,913,917,951,952,973,976,993,1009
1,5,6,7,8,9,10,11,15,16,19,22,28,29,43,50,51,55,56,57,71,72,74,76,78,79,80,84,85,88,89,90,91,93,96,97
9,11,51,67,93,97,98,101,119,124,129,151,194,252,263,327,355,458,494,506,530,561,601,615,643,913,936,947,963,977
3,9,10,16,19,30,35,36,39,56,58,70,71,73,75,76,79,86,87,88,89,90,93,98,99,100,101,104,108,109
91,110,111,115,131,132,133,160,785,982,984
1,3,54,55,57,60,65,66,68,95,96
82,85,88,90,92,100,465,610,631,900,1001,1007,1010
1,6,9,10,11,81,82,83,84,85,89,90,160
75,95,101,102,499,736,928
1,19,21,27,28,29,30
18,43,95,98,99,108,125,129,131,133,653,736,921,956,968,978,991,997,1005
4,6,35,39,40,41,46,51,55,59,60,87,88,89,93,94,96,99,104
44,69,78,99,101,102,105,117,119,124,127,129,134,324,587,592,757,855,859,874,933,974,977,1003,1008,1009
7,8,9,10,16,17,20,23,31,34,41,42,77,80,81,82,83,84,87,88,95,96,102,103,105,106
0,11,12,13,36,38,52,56,59,60,61,66,67,73,81,84,86,93,94,101,102,105,108,112,115,119,129,131,133,162,289,345,359,430,497,499,506,518,612,634,648,715,739,750,789,791,816,823,825,853,885,895,913,936,954,979,990,993,996
1,2,4,5,6,7,8,10,20,21,23,24,28,29,30,31,35,38,40,42,46,47,51,53,55,59,62,68,71,72,80,81,82,83,84,85,88,89,90,91,92,95,97,98,99,100,101,102,103,106,108,109,111,112,113,115,116,118,119
5,11,18,27,52,60,64,67,78,80,84,93,94,95,98,99,108,111,113,125,127,129,131,132,133,134,194,386,496,574,584,609,676,750,762,789,804,816,827,864,883,913,924,936,940,950,954,956,958,967,968,991,996,997,1005,1009
2,4,5,6,8,10,14,15,16,17,20,24,29,30,31,36,39,40,41,43,50,54,55,59,64,65,68,69,70,71,72,73,74,75,76,78,79,81,83,85,86,87,88,90,91,92,93,94,100,101,102,103,107,108,110,114
9,11,60,61,67,73,97,98,99,119,124,127,129,172,474,506,561,630,643,698,792,894,927,936,954,974,1003,1009
1,2,5,13,14,16,17,19,20,21,25,27,28,29,32,33,34,35,36,37,38,39,40,41,42,43,44,45
1,5,19,55,66,69,73,82,84,88,91,94,96,113,114,119,120,121,125,126,127,128,131,133,285,398,416,573,595,635,642,658,691,723,752,755,761,777,789,804,805,827,841,844,867,885,900,932,996,1007,1008
1,4,5,7,8,9,16,28,29,31,32,33,34,35,37,38,40,44,45,46,47,49,54,58,59,60,61,62,63,65,66,67,68,71,72,73,74,75,77,78,79,82,83,84,85,87,89,90,91,103,104
12,13,27,44,47,52,60,67,80,86,89,94,97,99,102,104,105,108,111,113,119,121,125,129,130,131,132,133,204,242,264,280,395,409,423,477,662,715,731,779,793,819,841,864,876,908,913,949,962,967,976,982,984,991,993,996,1002,1005,1009,1012
1,2,9,10,13,14,15,17,18,19,23,24,26,31,32,33,36,38,50,57,58,62,79,81,86,87,92,93,94,95,96,97,98,99,101,102,103,104,106,107,108,122,123,124,127,129,131,132,133,137,138,147,149,152,154,155,156,158,163,168
60,61,63,73,97,99,113,125,662,676,921,944,979,991,1009
3,11,14,32,33,34,35,40,41,43,44,46,54,55,56
18,57,80,83,91,95,97,98,115,129,537,600,858,953,968,978,997
1,2,10,12,14,15,16,25,26,35,37,38,39,40,41,43,50
13,49,60,67,84,93,94,99,102,104,112,119,125,129,130,131,132,133,134,138,194,561,568,571,622,739,753,786,823,885,895,906,911,925,950,956,962,990,991,996,1000,1008,1009,1012
2,3,5,6,8,10,11,15,17,19,23,37,38,40,42,65,66,69,70,71,72,73,74,75,76,77,78,80,81,84,85,86,87,89,90,93,94,95,96,97,99,113,114,116
5,52,54,60,67,70,71,73,84,89,93,94,98,99,113,119,125,131,133,172,316,474,496,498,518,599,622,695,698,789,804,805,823,827,852,853,885,891,896,911,913,956,967,978,996,1008,1009
1,3,4,5,6,8,15,17,22,23,30,31,32,38,40,41,44,60,68,71,73,76,77,80,81,82,84,85,87,88,91,92,94,95,100,101,105,106,107,112,113,114,115,116,117,118,123
58,65,80,85,88,96,100,105,115,128,132,254,268,867,889,1001,1010
1,2,3,4,6,7,86,97,98,100,101,103,104,105,106,107,186
19,32,46,47,75,80,88,94,96,100,106,110,111,114,115,116,117,168,268,387,524,594,613,646,793,811,846,854,867,889,900,953,973,996,1004,1006,1010
1,12,14,15,16,17,19,20,24,25,27,28,29,37,45,50,51,52,53,63,64,65,66,67,68,69,70,71,75,80,81,82,84,85,87,95,96
3,34,41,42,48,54,75,86,105,111,115,126,128,132,267,423,425,728,738,812,870,889,914,928,982,984,987
66,68,69,71,73,74,75,76,77,80,81,83,85,88,108,109,111,112,113,115,116,117,135,136,137,138,140
19,35,69,80,82,85,92,100,111,112,120,168,704,713,793,849,874,889,895,904,980,999,1001,1007,1010
1,4,5,6,7,11,12,19,20,25,27,28,29,30,31,32,33,34,35,36,38,41,43,44,51
59,83,109,116,122,134,605,824,875,951,986
6,7,12,14,16,17,18,19,20,26,27
23,26,53,123,128,147,628
1,2,14,15,17,20,21
31,51,72,77,80,85,88,90,96,100,105,112,116,120,230,268,375,470,610,663,818,849,871,904,990,1001,1010
2,4,5,6,10,24,26,27,28,92,93,94,95,96,97,98,99,100,102,103,104,108,109,112,113,121,185
13,19,35,36,42,49,60,61,64,67,81,85,86,89,93,94,96,99,102,104,107,109,110,112,115,122,125,126,127,128,130,131,132,270,346,367,376,382,435,459,510,522,533,561,568,580,592,593,613,676,715,755,839,870,876,881,885,895,906,913,925,953,956,960,976,994,999,1012
2,3,4,5,6,7,8,9,10,16,17,18,20,21,25,27,28,31,35,52,55,56,57,59,60,61,63,64,65,68,71,72,73,74,75,76,77,78,79,83,84,92,94,98,100,101,104,105,106,107,108,109,110,111,112,113,114,115,116,118,134,135,136,139,140,143,144,147
27,38,44,60,93,94,95,97,98,101,102,113,119,124,127,129,131,194,264,499,701,710,734,757,813,924,927,933,940,949,954,974,977
1,3,12,13,14,15,16,19,20,24,29,36,45,47,66,72,74,75,76,78,79,81,82,84,86,87,88,93,94,98,102,107,116
46,64,74,78,94,95,98,99,108,122,125,129,130,133,592,805,852,956,996,997,1005,1012
3,4,5,8,9,10,12,13,14,17,18,26,29,41,42,46,54,55,56,58,66,69
95,98,125,131,858,931,978,991,997
2,6,8,9,10,11,13,15,17
129,131,133,592,855
1,14,43,45,46
0,1,5,9,29,52,55,84,93,95,99,104,113,119,125,129,131,133,185,194,248,309,518,662,719,736,774,789,823,864,885,920,931,967,976,991,1000,1008,1009
1,6,7,8,9,11,14,15,18,22,23,30,32,37,38,40,46,56,57,59,60,61,62,63,64,67,69,70,71,72,73,79,80,81,82,83,85,90,91
15,66,71,72,94,103,108,109,110,116,134,156,237,511,560,575,677,691,714,720,798,850,917,930,950,993
1,2,6,61,62,64,65,66,69,71,74,75,80,81,82,86,111,112,114,126,127,128,131,132,133,134
15,43,58,65,95,96,98,107,109,110,116,125,126,129,131,373,439,621,640,748,770,785,786,875,907,921,987,994,997,1005
4,7,8,9,10,11,14,77,78,81,82,85,88,92,94,95,96,98,99,103,104,105,107,108,128,129,130,154,157,159
1,6,60,62,67,71,73,84,86,94,98,99,102,104,108,112,122,126,129,130,131,133,134,295,463,523,538,622,698,715,774,804,817,895,913,925,950,956,962,978,990,1009,1012
1,2,5,8,9,10,11,15,16,17,18,23,25,26,27,30,31,51,53,62,65,68,69,70,71,72,73,74,75,76,77,78,79,80,81,82,83,85,86,87,89,92,101
13,77,102,109,122,134,568,792,962
1,2,3,5,6,7,8,9,10
111,132,982
93,95,138
3,37,66,105,107,114,116,117,335,760,766,851,866,1006
19,26,27,31,32,39,40,41,44,45,64,70,71,78
42,83,84,120,122,125,131,537,893,980,983,986,991
1,15,16,17,20,24,30,31,40,41,44,48,51
13,80,89,91,102,104,108,112,127,130,503,603,762,925,962,990,993,1002,1012
1,3,20,22,23,25,26,28,29,108,110,111,113,115,116,118,119,136,215
18,27,89,98,113,118,131,133,438,579,794,852,940,968,978,997
2,15,17,19,23,24,26,36,37,50,97,103,116,118,119,120
5,13,21,60,64,67,73,80,86,89,93,99,108,110,115,116,119,122,125,129,130,164,196,477,594,676,695,755,756,819,827,853,854,897,913,921,930,956,973,993,1008,1009,1012
1,2,3,6,7,8,9,10,11,12,13,16,18,20,29,39,42,43,51,52,53,54,55,56,58,59,60,61,62,63,64,65,68,77,78,79,80,81,82,84,86,88,89
21,24,35,55,60,61,67,72,73,82,88,93,94,97,98,99,107,112,119,121,125,127,131,231,264,382,604,621,652,693,717,752,755,761,913,927,929,975,978,985,994,996,999,1007,1009
1,2,7,11,12,14,15,16,27,41,42,47,48,52,54,55,79,80,81,83,84,85,86,87,88,89,91,100,101,102,103,104,105,106,107,108,109,110,111,113,117,118,123,137,138
60,64,78,80,94,95,96,99,102,112,115,120,126,130,134,229,455,592,762,867,950,990,996,1009,1012
13,16,17,18,23,24,26,28,33,39,40,41,42,55,57,58,59,60,61,62,64,70,74,76,89
5,26,52,54,60,64,67,70,71,75,80,81,83,84,88,93,94,95,96,99,101,103,112,120,124,129,130,132,133,134,180,193,194,248,279,283,325,474,496,562,593,603,634,698,719,749,753,756,789,804,827,864,895,906,913,950,952,954,984,990,996,1009,1012
4,5,7,11,12,13,14,17,23,25,27,28,29,30,31,35,36,40,41,42,45,46,53,54,58,59,64,65,77,78,81,82,83,84,85,87,88,89,92,93,94,95,96,97,98,99,100,101,108,109,111,113,114,115,116,117,120,121,122,125,126,127,132
64,65,83,109,110,116,122,124,164,439,785,875,942,983,986
1,2,3,8,17,48,83,93,94,95,97,104,105,108,109
4,14,16,28,30,39,77,106,114,364,432,598,675,688,740,847,939,989,992,1006
4,5,9,35,48,52,53,80,83,100,103,114,118,127,131,144,145,146,162,164
5,13,14,42,52,60,61,64,67,73,80,84,86,89,99,105,108,112,119,120,124,129,130,131,133,149,235,261,325,346,441,573,579,601,640,658,695,756,794,827,852,864,883,906,913,979,980,985,990,993,1003,1008,1009,1012
1,2,3,4,5,8,14,15,22,27,28,29,30,31,32,33,36,40,43,44,45,47,52,55,56,57,58,59,60,61,62,63,64,65,66,68,69,70,71,72,73,74,75,77,79,84,85,86,88,90,91,92,93,98
27,31,36,56,68,78,90,97,98,99,102,104,105,117,118,124,125,127,129,131,133,244,324,358,444,498,699,789,856,859,879,903,919,920,954,976,1009
1,2,3,5,8,15,16,17,18,21,23,27,28,31,62,67,68,69,71,72,73,75,76,77,78,79,82,83,85,86,87,88,95,97,98,99,102
5,11,13,36,44,50,52,57,60,61,66,67,73,80,81,84,86,89,93,94,99,105,108,112,124,125,127,129,130,131,261,345,393,435,441,458,497,593,615,640,695,700,702,715,719,730,750,756,762,803,819,823,853,864,883,885,887,890,891,933,949,954,956,957,979,985,990,993,996,1012
1,2,3,4,8,12,15,16,17,24,26,27,30,33,36,37,39,43,45,46,50,51,54,56,60,61,62,66,76,80,81,82,83,84,85,86,87,88,90,91,94,95,96,97,98,99,100,101,102,103,104,105,106,108,109,111,112,113,114,116,118,119,120,122,125,126,128,130,131,141
18,20,26,57,58,68,83,103,110,115,116,122,126,128,129,133,228,254,515,564,594,635,636,776,843,854,881,882,893,930,942,968,973,1005
1,2,13,14,15,16,21,22,31,32,41,52,73,91,92,96,104,106,108,109,112,118,121,124,125,128,129,130,135,138,140,141,142,143
6,20,24,27,55,64,75,84,93,115,116,119,129,131,133,239,342,640,743,805,852,853,875,897,911,953,1008
1,2,3,4,5,7,8,10,12,16,18,44,46,47,64,66,67,68,69,77,83,84,85,88,90,91,117
18,33,43,55,63,90,98,108,129,131,133,525,592,743,787,813,883,944,954,968,978,997
1,2,3,4,5,6,10,12,21,26,31,32,33,34,35,42,44,45,47,48,50,52
19,30,31,56,64,69,71,72,77,82,85,88,90,91,96,100,105,107,109,112,114,115,128,130,150,269,350,367,422,509,544,559,564,580,616,631,693,705,867,895,899,900,907,917,960,975,990,1007,1010,1012
3,5,6,7,8,9,10,11,13,16,17,19,23,24,31,34,37,41,42,46,47,48,49,54,55,56,57,58,59,60,61,63,64,65,66,70,71,72,79,80,81,82,83,84,87,89,90,93,96,97
14,22,30,35,46,55,62,63,66,67,73,74,80,82,84,87,88,93,95,96,97,115,116,121,124,126,129,135,205,222,231,264,458,538,591,601,615,642,669,691,750,761,804,813,853,867,875,887,900,918,929,947,955,963,987,999,1007
2,3,4,9,11,12,13,14,16,17,25,29,30,40,41,42,45,46,47,48,57,59,60,63,66,73,78,81,82,84,85,86,87,88,89,90,92,94,95,97,98,99,100,101,102,103,104,105,107,108,109,111,115,116,119,124,134
18,27,43,78,95,98,125,129,131,133,805,852,931,940,968,978,991,997,1005
2,5,6,7,19,35,38,39,45,58,60,64,66,69,71,84,85,88,89
13,28,61,64,67,68,69,73,80,84,98,101,112,119,120,124,129,130,131,133,218,408,450,528,579,615,762,792,844,852,878,913,965,978,979,990,1000,1012
3,4,5,7,8,31,32,34,35,36,37,38,39,40,50,51,54,60,61,65,68,69,89,91,92,93,94,95,97,98,99,100,108,109,110,111,113,119
11,12,21,22,51,57,60,61,67,73,84,91,93,94,97,98,101,113,119,124,131,133,134,260,261,264,402,416,418,430,458,475,506,512,562,600,608,615,643,648,663,670,718,769,932,936,947,950,963,967,978,979,985,996
5,8,9,12,14,17,18,21,26,31,32,33,34,35,44,45,50,55,57,65,72,74,75,77,82,83,84,85,88,89,90,91,92,93,94,95,97,99,100,101,103,108,109,110,112,116,119,120,121,122,123,124,125,126
1,21,57,66,69,90,97,99,105,110,113,116,121,122,125,127,270,321,345,616,662,676,691,702,742,755,818,819,839,874,956,967,983,1009
1,4,5,8,12,15,17,37,39,40,44,49,50,51,57,58,60,63,64,65,67,69,70,71,72,74,77,79,80,82,88,90,91,103
16,19,32,34,39,47,68,74,96,105,112,115,126,132,134,286,292,642,699,845,846,869,871,873,955,960
1,3,4,6,20,21,23,24,25,27,28,31,34,36,42,43,44,45,47,49,50,62,64,66,67,68
35,47,62,64,65,69,82,85,87,88,91,100,105,109,117,132,335,367,616,778,811,828,889,898,900,941,999,1001,1007,1010
1,2,3,4,5,6,7,12,14,19,22,52,56,57,58,60,61,62,63,64,65,66,69,70,72,73,74,79,80,110
24,28,32,49,53,54,62,64,66,72,80,83,84,96,102,105,109,111,113,125,126,129,131,132,268,286,423,439,523,548,565,673,768,793,825,870,889,893,902,923,937,982,984,986,987,1005
1,2,3,5,6,8,11,12,13,14,15,25,26,34,41,42,45,58,59,60,66,67,68,80,81,82,83,86,88,89,90,91,92,93,100,101,102,103,104,105,108,120,128,137,138,139
14,22,27,36,49,50,52,54,60,62,64,66,70,71,73,77,79,80,88,90,91,93,94,99,102,105,112,113,115,120,128,129,130,131,133,134,192,278,295,427,463,542,634,649,691,724,756,762,768,781,789,791,805,823,837,885,890,891,895,896,900,906,917,950,952,956,960,961,980,990,996,1000,1009,1012
1,9,10,13,14,15,16,25,26,27,29,30,37,42,43,44,46,48,49,51,52,53,55,58,59,61,70,71,72,73,75,78,79,86,89,90,91,92,93,94,95,96,97,98,99,100,101,102,103,104,105,106,107,111,114,115,116,117,118,120,121,124,125,126,130,131,132,133,134,138,140,142,144,145
29,50,89,102,177,334,611
1,3,5,9,10,11,13
24,25,62,65,80,82,91,96,105,128,133,312,402,591,673,770,805,852,887,988,1007
52,53,64,66,67,68,92,94,95,112,114,115,117,128,163,164,165,166,167,168,169
5,9,27,52,67,78,81,93,101,119,129,131,133,162,376,407,417,475,477,562,573,574,592,602,630,640,730,786,792,861,977,1005
3,4,6,16,23,30,31,33,42,46,51,54,56,57,59,61,70,71,74,83,84,89,90,91,92,96,98,99,107,114,115,116
13,20,29,42,54,57,60,65,70,71,73,77,83,86,102,104,108,109,116,122,125,134,314,334,346,473,510,511,560,594,712,764,840,850,854,877,893,896,901,917,950,952,962,973,976,983,991,993
1,2,4,5,6,7,8,10,12,22,23,24,28,29,32,34,38,40,58,61,62,64,65,67,68,69,70,71,73,74,76,77,78,83,85,86,89,90,92,97,98,100,102,111,113,114,115,119
4,25,47,66,69,74,82,88,90,96,105,110,112,115,128,150,221,338,350,422,431,508,534,569,613,616,631,668,686,691,700,742,746,846,867,888,955,988,990,1007
2,3,6,17,19,21,22,24,28,30,36,40,41,43,44,45,46,47,49,51,53,54,55,58,62,63,66,67,68,69,70,71,73,74,76,78,80,81,82,83
76,79,85,100,111,117,132,341,610,879,923,959,982,984,1010
2,3,7,16,66,74,106,107,109,117,123,128,133,135,144
31,76,82,84,91,107,110,111,116,122,128,444,594,838,930,942,948,973,982,988
1,4,5,6,7,8,25,50,53,58,59,60,62,63,76,79,81,82,91,92
13,44,57,60,89,99,102,107,110,112,115,116,122,125,128,130,131,134,190,568,592,613,630,651,786,820,942,950,956,962,964,973,990,1012
1,2,3,5,6,8,12,15,17,19,20,25,66,67,72,74,75,76,77,80,81,82,83,84,85,86,108,109,111,113,116,121,123,125
61,62,65,72,73,76,82,87,88,91,96,97,109,110,116,119,124,127,131,367,398,652,723,737,803,806,848,900,937,973,974,979,1007
3,5,6,7,8,9,21,22,24,27,28,29,30,32,38,39,40,49,50,51,52,54,55,56,57,58,61,62,63,66,70,73,85
110,116,131,133,785,836,854,875,930
74,83,87,89,94,95,96,97,135
13,31,42,54,57,60,66,69,70,71,80,83,85,90,92,102,104,105,108,109,122,134,154,221,323,384,401,444,473,511,595,600,616,671,700,702,742,798,818,840,849,850,859,874,887,896,901,917,920,942,952,962,976,986,993,995
1,2,6,10,13,16,17,23,27,33,35,39,40,41,42,46,51,58,64,68,69,72,75,76,78,79,80,81,83,85,86,88,91,92,93,96,98,104,105,107,108,113,115,116,118,120,130,131,135,136,140,142,143,144,147,148
34,46,63,75,76,79,82,86,92,105,106,108,117,126,132,257,351,411,585,598,646,697,724,781,782,800,812,826,873,923,928,948,970,972,987,1004
4,9,10,12,13,21,34,35,37,41,55,56,59,62,64,65,74,76,77,78,80,82,83,84,85,87,88,89,90,91,93,94,95,98,100,113
15,20,52,57,61,73,77,80,88,91,94,96,110,112,115,116,119,122,125,126,130,131,150,156,258,268,441,455,503,510,518,522,564,608,613,676,709,716,753,756,760,854,864,867,875,887,960,973,979,983,990,996,1012
6,7,9,21,26,29,30,34,36,41,42,44,47,49,56,82,83,87,88,89,91,92,95,98,99,100,101,102,106,107,108,109,111,112,113,114,115,123,124,126,130,131,132,133,143,144,145,150,151,152,153,154,156
30,133
1,4
9,18,27,43,44,70,71,95,98,99,101,102,108,124,131,133,316,562,667,817,825,858,883,949,952,961,968,978,1009
8,9,10,11,13,21,22,23,25,26,34,56,63,72,73,80,81,89,97,98,99,100,106,108,114,115,116,118,119
46,47,74,82,106,115,126,128,132,646,889,898,923,955,1004,1007
5,6,7,8,14,15,18,19,22,24,25,26,29,30,36,37
9,22,35,52,55,56,60,61,64,67,72,73,80,82,84,92,93,94,97,98,113,121,125,127,175,231,264,355,442,518,534,667,676,752,761,841,887,913,927,974,978,979,985,996,999,1007
1,3,8,10,11,12,18,29,30,33,34,45,46,64,65,66,67,69,71,77,78,81,82,85,86,87,89,90,91,93,94,95,96,97,98,99,100,102,103,104,106,107,116,117,122,140
34,35,52,61,65,67,72,73,77,82,87,88,91,95,97,106,112,115,116,119,125,126,129,252,506,755,764,770,783,864,873,895,900,947,953,963,979,987,999,1004,1007
1,5,6,7,8,11,12,17,18,29,31,42,43,46,50,55,57,60,63,66,67,84,86,87,89,90,93,94,97,98,99,100,103,104,105,107,108,114,118,123,134
3,10,34,58,71,73,84,93,95,98,107,114,115,116,126,129,131,267,373,475,606,621,736,750,766,885,897,907,954,975,978,987,994,997
9,10,11,12,13,14,17,18,22,27,33,34,37,38,50,56,65,73,74,75,76,77,78,79,80,81,83,86,87,88,91,96,97,98
53,90,110,116,117,126,132,147,423,564,870,875,886,930,981
3,4,14,27,28,29,31,32,33,34,35,39,40,42,43
12,13,36,38,52,60,61,62,64,67,73,78,84,86,89,93,95,98,99,101,108,112,120,130,131,134,277,430,475,497,574,612,661,715,753,804,820,853,864,876,885,895,906,913,943,950,956,978,979,980,985,990,993,1009,1012
1,4,7,8,9,13,15,16,18,21,22,23,25,26,28,29,30,33,37,38,42,47,48,67,71,73,74,75,76,77,78,79,80,81,83,85,86,87,88,89,90,91,92,94,95,97,98,99,100,101,102,103,106,109,128
36,50,54,60,67,73,79,92,93,94,97,98,99,112,113,119,125,129,131,133,134,177,235,475,477,568,593,713,750,753,791,885,890,895,906,913,947,950,954,956,967,978,990,991,996,997,1009
1,9,10,11,12,14,23,34,37,40,41,52,58,62,63,64,65,68,75,77,78,82,83,84,85,86,89,91,92,93,94,96,100,101,102,103,104,105,106,107,108,113,114,115,117,123,128
37,38,44,86,93,98,101,124,125,299,347,512,519,593,658,790,792,933,949,978,991
1,2,11,13,14,15,16,29,31,33,35,48,49,50,51,52,53,58,62,63,64
50,55,57,60,61,65,66,71,84,95,97,99,102,110,112,123,126,130,132,134,276,492,592,613,753,761,778,795,804,890,917,950,956,979,990,1009,1012
1,3,5,9,10,14,15,16,17,22,23,31,32,37,44,45,47,61,62,64,65,66,67,72,74,75,79,80,81,82,83,85,89,90,95,99,113
12,33,45,54,68,70,71,73,81,102,108,120,127,134,162,398,500,511,525,568,648,652,694,699,798,850,917,950,952,962,980,993
1,4,11,14,22,23,28,30,31,35,36,37,39,50,51,52,53,58,59,61,62,63,70,76,82,86,87,88,89,92,93,94
25,42,50,63,80,119,121,124,129,614,672,756,810,887,890,1005,1008
1,2,3,4,7,9,10,11,12,13,14,15,16,18,19,20,21
35,49,52,60,61,65,67,72,73,84,88,91,96,98,99,110,112,116,119,125,131,133,345,355,441,658,755,785,804,806,853,864,885,900,912,913,937,979,985,999,1008,1009
3,4,5,8,20,22,41,42,52,54,57,59,67,68,71,72,73,74,76,79,82,83,84,85,86,87,89,90,91,92,93,94,96,97,98,107,110,118,122,125,127,130
34,74,75,76,86,106,107,109,111,117,132,341,439,621,650,800,873,923,928,955,1004
1,6,7,8,9,10,12,13,15,18,19,20,21,23,24,25,26,27,28,33,34
9,13,15,33,45,54,60,68,70,71,91,94,102,108,134,511,560,602,694,699,798,850,883,896,952,961,962,993,996
1,2,3,10,19,21,23,28,37,45,47,49,51,57,61,63,65,66,75,76,80,88,89,91,96,100,102,107,109
69,104,105,674,874,920
1,5,6,7,8,11
15,132,870,871,889
2,3,4,5,6
20,34,47,62,69,76,79,86,103,106,107,115,116,117,126,132,437,657,764,873,882,889,948,953,964,972,981,1004
3,4,5,6,7,8,9,10,11,13,14,19,20,21,22,23,26,29,30,31,32,33,34,35,36,37,38,40
23,46,47,80,82,102,106,107,114,115,117,132,185,196,205,268,297,628,646,669,811,846,879,923,964,969,1004
1,4,7,8,9,10,11,12,13,14,18,19,21,23,24,25,26,27,28,29,31,32,33,34,35,36,37
14,19,31,54,56,60,62,70,71,80,90,91,94,95,96,98,101,102,108,112,115,124,129,130,132,133,134,192,356,384,496,554,634,655,709,756,762,826,867,887,917,943,950,952,990,996,1012
1,2,3,4,6,11,12,13,14,17,18,20,21,35,36,37,38,39,40,46,47,49,50,54,63,65,66,67,68,69,70,71,72,81,82,83,84,85,86,87,88,89,90,91,97,98,102
14,19,22,30,41,45,54,58,62,65,70,74,77,80,85,87,88,100,107,113,114,126,212,266,312,356,455,542,738,763,770,828,887,896,899,941,952,969,1001,1010
6,7,8,9,12,13,14,19,21,24,25,26,27,28,33,34,37,53,54,55,60,61,62,63,64,68,69,70,72,73,74,75,76,77,79,80,81,86,91,107
1,9,18,27,29,38,44,69,76,95,98,99,102,104,108,118,119,121,125,127,129,131,133,378,667,701,710,742,752,774,841,874,920,921,925,933,949,968,977,978,997,1005,1008,1009
5,13,15,16,18,27,30,32,39,40,50,52,55,57,58,59,62,67,72,75,82,83,88,95,103,104,113,114,115,118,119,120,121,122,123,124,126,128,129,135,139,146,148,150
99,104,956,976,1009
13,19,23,29,38
35,37,78,87,101,102,105,106,108,111,124,132,244,273,423,564,816,844,866,870,871,919,923,945,959,982,984,999,1004
1,6,7,10,13,20,25,58,63,84,85,102,105,108,110,111,116,117,118,120,121,122,123,125,126,130,136,137,168
4,9,31,35,41,44,49,72,80,82,87,88,91,96,103,106,109,112,122,123,168,196,269,367,431,509,675,679,683,693,714,738,756,762,848,887,895,899,904,906,937,942,983,990,992,999,1007,1011
15,16,19,22,23,24,25,35,44,46,47,50,52,53,68,71,72,78,81,92,93,94,96,97,102,104,106,109,112,114,115,116,118,119,121,124,125,127,130,131,132,134,135,136,139,140,141,152
18,27,37,43,67,89,97,98,101,108,119,124,125,129,131,133,562,579,805,852,861,866,876,940,963,968,978,991,993,997,1000,1005,1008
1,2,4,5,10,11,13,16,17,18,36,39,44,50,51,99,100,101,133,143,148,149,150,151,152,153,154,156,157,159,160,163,180
9,14,15,19,20,22,24,30,33,35,41,51,54,56,57,62,64,65,70,71,76,79,84,87,88,100,105,107,110,112,115,122,149,156,274,312,325,356,404,421,511,542,560,613,692,693,738,749,768,770,798,802,837,850,856,860,899,900,907,917,918,941,948,953,961,975,990,999,1010
1,5,6,7,8,10,12,14,15,17,21,22,23,24,25,27,28,30,31,33,36,39,40,42,47,48,49,51,53,55,57,58,59,60,61,62,63,65,66,67,68,69,70,72,73,74,75,76,77,78,79,80,81,82,83,84,86,87,88,89,90,91,94,95,96,97,98,100,101
5,25,50,52,54,60,66,67,81,99,113,121,131,133,345,384,614,687,827,841,863,864,890,913,1009
1,5,6,7,9,10,13,14,15,17,18,20,24,40,41,42,45,46,47,48,49,50,51,52,54
4,30,31,34,35,56,58,65,69,74,75,82,87,88,90,91,96,100,110,116,120,122,126,128,180,197,213,269,425,431,455,509,510,564,613,616,627,642,697,758,770,780,783,807,812,818,828,867,900,918,937,941,960,999,1007,1010
1,6,7,15,16,17,18,20,21,23,25,26,46,52,53,54,58,61,62,63,68,69,73,75,76,77,78,79,80,81,85,86,87,88,89,90,95,96,99,103,104,105,106,107,111,112,113,115,118,129,131,134,138,139,140,143
17,21,31,51,56,60,61,62,64,70,71,74,75,80,90,91,95,101,102,103,108,110,112,115,119,120,122,124,127,128,129,130,242,339,448,458,461,472,485,499,570,653,734,739,747,762,767,773,848,906,917,952,953,960,961,980,990,1012
3,4,7,8,9,13,15,16,17,19,20,21,22,23,24,25,27,28,29,30,31,32,35,38,40,46,48,55,56,61,62,63,64,65,66,67,68,69,70,71,72,73,74,75,76,77,78,83,84,85,86,87,88,89,90,95,97,98
41,55,62,67,73,82,91,97,116,122,126,127,129,132,264,419,594,643,652,660,706,712,738,761,764,803,854,889,942,954,963,973,987,1007
52,55,70,71,75,80,82,86,99,103,105,111,112,113,114,118,119,120,121,124,125,129,162,163,164,166,167,168,169,170,171,177,178,183
0,63,81,84,98,104,117,118,125,241,567,599,801,822,834,905,944,997
33,71,72,73,82,83,91,100,101,109,110,111,114,147,148,169,186,195
1,9,13,33,42,45,60,61,63,68,70,71,86,94,102,104,108,109,112,113,127,134,154,287,292,294,325,346,435,500,511,525,557,568,592,694,698,715,774,798,810,824,825,903,906,917,944,950,952,962,967,985,993,996
2,4,6,13,21,29,30,31,32,44,47,49,50,54,65,79,80,81,82,83,84,87,89,102,103,104,105,106,107,113,114,117,118,120,121,126,127,128,129,131,134,135,140,141,142,144,145,147,150,151,152,153,154,156
52,84,90,93,99,104,113,115,131,133,150,662,818,823,864,920,967,1009
1,2,3,4,6,11,13,17,26,72,75,76,77,78,79,84,85,87
5,13,20,31,45,50,60,61,64,66,67,68,72,90,94,99,102,108,112,115,125,127,128,129,131,133,134,358,444,592,676,677,739,744,789,827,844,855,885,890,895,897,906,913,950,954,956,957,979,985,988,990,993,996,1009
1,2,3,4,8,13,14,17,18,19,20,23,24,25,27,30,31,32,38,40,41,42,47,49,52,56,57,58,59,60,61,62,63,67,69,70,71,72,73,77,78,80,84,85,86,87,88,89,90,91,92,93,94,96,98
19,24,26,41,51,52,57,59,60,62,65,67,70,71,73,75,79,80,84,87,88,94,96,99,102,110,112,115,116,122,126,128,130,134,136,172,191,197,293,316,344,425,441,455,474,518,608,613,698,749,756,760,776,778,781,802,804,825,843,895,900,906,913,918,941,950,952,953,960,987,990,1009,1012
1,2,3,4,5,6,8,9,15,16,17,19,25,26,28,29,30,31,32,37,40,41,42,45,46,47,56,63,68,69,74,78,83,84,85,86,87,88,89,91,92,95,96,98,99,100,101,102,103,105,106,107,108,109,110,111,112,113,114,115,116,117,119,121,123,124,126,127,131,132,138,139,144
16,17,34,35,47,76,84,91,92,98,102,105,106,114,117,132,203,245,269,350,353,378,569,622,759,800,811,825,848,871,873,889,934,969,978,989,999,1004,1006
6,8,9,14,15,17,19,21,22,23,26,28,29,40,43,45,46,47,49,50,55,57,58,59,63,64,66,67,69,71,72,74,80,87,88,90,95,96,98
5,9,13,15,21,30,36,38,52,56,60,61,64,67,73,75,79,80,81,84,86,93,94,98,99,105,108,110,112,115,119,120,121,124,125,128,129,130,131,133,206,252,261,321,327,371,393,404,427,429,441,448,458,475,496,518,538,561,612,613,634,640,676,750,755,756,788,791,819,823,827,847,852,853,864,913,954,956,977,978,979,980,990,993,996,1009,1012
2,3,5,9,19,21,23,24,31,32,35,37,38,42,45,46,49,51,53,54,55,56,57,58,61,62,64,66,69,70,72,73,74,76,79,80,90,93,98,102,106,109,111,119,120,121,122,123,124,125,126,128,129,130,131,132,133,134,135,136,137,138,139,140,141,143,144,145,146,148,149,151,152,153,158,160,162,163,165,166,168,169,171,173,174,176,179
24,31,42,46,74,76,86,91,96,105,106,111,115,126,132,247,402,646,686,782,810,832,848,923,937,955,1004
1,3,4,5,11,12,13,23,68,70,71,73,76,77,78,79,82,83,85,86,87,88,89,90,104,110,111
3,34,39,47,69,87,90,91,96,105,106,111,112,114,115,117,631,690,800,812,845,869,874,914,918,937,939,953,969,989,990,992,1004,1006
14,15,29,34,35,40,42,44,49,50,52,53,54,84,111,114,115,116,119,120,122,133,134,147,152,153,154,159,174,188,189,190,191,192
0,3,6,10,41,56,74,87,91,93,107,114,122,125,126,128,131,133,194,498,595,635,766,782,805,955,969,975,994
13,14,15,16,17,18,19,22,26,27,57,58,60,61,62,64,65,69,70,71,72,74,75,76,77,78,79,108,109
35,37,49,63,83,94,97,105,110,112,115,116,335,693,866,871,888,895,986,999
12,31,32,34,35,36,37,42,43,45,46,51,52,53,70,72,74,75,76,88
29,104,119,121,129,506,726,813,855,925,954
9,10,11,12,84,85,87,114,120,121,160
1,18,20,42,50,54,60,61,68,69,70,71,93,97,108,119,124,127,129,133,346,376,450,512,641,671,672,699,719,750,803,839,874,890,896,911,917,961,968,974,977,985,1003,1005
2,3,5,7,14,16,17,19,23,40,41,43,50,51,52,55,63,78,80,81,82,83,86,91,92,94,95,96,97,98,102,110,126,133,135,140,142,143,144,147,148,150,152,153
18,29,43,50,63,78,84,93,95,98,99,108,113,119,121,124,125,127,129,132,133,176,436,494,498,719,736,737,841,855,883,919,924,944,968,974,978,991,997,1003,1005,1008
1,3,4,6,16,30,31,32,34,42,43,44,46,53,54,56,58,76,82,85,87,89,91,92,93,94,95,98,99,101,102,116,121,131,132,133,138,139,142,144,148,155
15,33,45,49,60,62,64,84,94,98,110,112,115,116,122,125,126,128,130,131,133,134,164,166,332,421,498,522,594,637,647,682,689,712,748,760,764,782,785,854,895,950,973,983,996,997,1012
10,11,12,13,14,16,17,18,21,24,26,27,29,50,51,52,54,56,111,115,116,117,118,119,120,121,122,123,124,126,127,128,130,138,148,149,150,151,152,153,154,155,159,160,162,165,220
9,18,27,43,59,78,95,97,98,104,111,119,125,127,129,130,131,133,475,602,652,808,858,903,920,921,940,954,963,968,974,978,997,1000,1005,1008,1012
3,6,7,8,9,10,20,21,31,33,34,36,43,49,55,56,57,69,70,73,75,76,77,78,79,80,81,82,83,86,88,93,98,99,103,105,106
10,13,19,36,50,51,56,60,61,64,67,84,86,89,94,95,99,101,108,112,119,120,125,126,129,130,131,261,325,344,416,430,455,506,607,630,634,661,663,670,676,715,723,755,791,817,819,856,876,877,883,890,913,943,980,985,990,993,996,1005,1009,1012
1,3,7,9,11,12,14,15,16,19,24,26,37,41,43,44,45,46,52,53,54,55,59,60,61,76,77,78,79,80,81,82,83,84,85,86,87,88,89,90,91,96,97,98,99,100,101,102,104,105,106,107,109,110,111,112,113,115,117,118,119,134
110,116,875,930
88,107,120,149
0,29,44,76,78,97,98,101,113,117,119,121,127,129,131,132,133,334,341,407,423,576,641,643,701,730,752,796,799,870,916,927,931,933,949,967,974,977,1008
1,2,5,11,13,16,18,21,22,23,35,37,40,106,109,114,116,117,123,124,126,127,130,133,134,135,136,137,203,204,206,208,210,212,213,214,215,222,224
42,60,64,94,102,104,110,112,115,122,126,128,519,568,810,881,895,925,930,942,953,960,962,976,990,996
2,3,5,7,10,16,18,20,37,51,109,111,112,113,114,116,117,121,123,128,131,132,133,135,136,137
4,17,19,25,35,39,41,51,57,65,70,71,77,80,82,85,87,88,91,94,96,100,105,106,108,112,115,120,126,128,150,186,258,269,338,409,425,431,455,531,560,598,675,687,693,716,756,762,763,778,797,806,828,845,849,850,867,887,899,900,904,917,918,952,960,992,993,996,999,1001,1007,1010
2,4,7,10,13,14,16,17,18,21,24,27,28,39,41,44,47,53,54,55,58,69,70,72,73,74,78,79,81,82,83,84,85,86,87,89,90,91,93,95,96,97,98,99,100,101,102,103,104,105,106,107,109,110,111,113,116,118,120,121,126,127,129,131,135,136,137,138,141,143,145,156
1,9,95,98,108,124,125,131,133,541,602,774,805,852,978,991,997
2,6,7,15,17,18,20,24,61,63,67,68,86,104,108,109,113
26,44,45,68,89,102,110,115,126,127,451,613,820,949,953,962,974,987
1,2,3,4,5,6,7,9,113,114,115,116,117,118,120,121,122,124
18,44,95,123,949,968,1011
3,9,10,27,33,36,53
90,115,116,124,131,358,897
1,3,4,34,42,43,45
15,17,35,65,67,72,73,80,82,84,87,88,91,95,97,105,110,112,113,115,116,121,125,126,127,129,131,156,164,264,268,395,570,594,613,640,652,712,714,736,749,750,755,764,770,792,806,819,853,885,887,900,924,932,953,954,967,973,974,987,999,1007
1,4,11,14,15,16,19,22,35,36,39,45,46,49,50,54,56,57,60,63,78,79,81,88,91,98,100,101,102,103,104,105,108,111,112,114,115,117,118,120,121,122,123,127,129,131,132,133,134,135,137,138,139,140,141,143,144,146,147,149,156,169
3,19,34,35,46,48,49,62,66,96,105,106,112,116,117,122,126,155,313,350,414,510,523,646,691,735,745,796,867,871,873,879,914,935,987,999,1004
4,5,6,7,8,30,32,33,35,36,44,45,48,50,97,98,100,104,105,106,131,132,133,134,135,138,155,156,157,158,159,180,184,185,186,187,188
34,41,54,62,76,91,105,106,114,116,181,353,357,523,637,759,768,777,812,848,888,948,969,1004
4,6,9,55,56,58,59,63,84,85,104,105,113,117,122,124,142,143,147,148,149,150,151,155
11,36,52,57,60,61,67,73,99,101,110,124,129,131,133,172,418,458,474,561,615,698,786,809,864,936,954,979,985,1009
6,7,10,11,13,20,25,27,28,34,35,44,46,47,50,51,55,57,58,63,68,83,84,85,88,90,91,94,98,99
34,69,71,82,86,88,89,90,100,105,106,107,112,114,115,126,128,132,192,233,606,611,846,873,889,923,953,975,990,1004,1006,1007,1010
1,2,3,4,5,7,9,10,11,16,18,19,20,21,23,29,30,32,33,34,35,37,39,40,42,44,45,46,47,49,50,51,52
84,110,116,854,875,930
2,113,126,127,131,153
22,30,31,35,47,51,57,59,69,75,80,85,88,92,94,100,105,111,112,116,117,120,132,230,469,501,542,608,610,616,627,655,793,811,849,854,888,889,895,904,909,990,996,999,1001,1010
1,2,5,6,8,9,10,11,13,14,17,30,31,32,33,54,56,57,63,70,72,73,75,76,77,78,79,80,88,89,90,91,92,94,96,100,102,103,105,108,109,111,112,113,116,137
61,97,105,335,985
87,89,90,91,178
1,11,51,60,61,64,73,74,80,91,93,94,99,113,119,120,121,125,127,398,503,639,762,841,853,887,936,967,979,980,991,996,1009
1,12,13,34,36,48,51,52,56,68,69,102,103,104,105,106,109,112,113,114,115,116,119,120,121,122,133,134,136,137,138,154,155
18,20,34,35,39,42,49,76,80,82,90,96,98,102,106,110,111,114,115,116,117,122,125,268,341,697,777,796,812,867,869,873,968,969,982,983,989,992,997,999,1004,1006
1,2,12,15,17,18,19,20,21,22,23,24,25,26,33,34,50,84,85,86,87,90,91,92,93,94,95,96,102,103,105,107,108,109,125,126,133,135,136,139,144,169
1,37,44,78,80,113,119,121,129,131,133,438,841,919,933,1000,1008
2,3,5,57,58,61,84,88,89,108,113,114,116,168,170,171,194
85,100,112,610,849,1001,1010
11,96,97,104,105,108,193
7,19,28,31,62,70,75,80,84,85,87,88,90,100,111,116,122,130,136,268,344,486,509,510,534,783,849,899,900,941,982,1001,1010
1,2,4,6,7,8,9,10,11,16,21,27,29,46,48,51,52,53,54,55,56,57,59,60,62,64,66,67,69,72,74,77,94
18,37,42,76,113,127,133,154,315,468,642,839,968
1,3,7,9,10,15,31,33,37,38,39,42,43
5,47,60,63,64,66,67,69,73,76,80,84,93,94,97,99,105,111,113,117,124,125,129,131,132,133,134,203,252,264,290,468,469,593,601,616,650,676,742,750,779,793,811,813,827,874,898,904,908,913,944,950,959,967,991,996,1009
1,11,17,18,21,22,24,29,31,32,33,34,37,40,41,44,45,51,53,55,56,58,61,64,80,81,84,87,88,89,90,91,92,93,94,96,107,108,110,111,112,116,117,118,119,120,121,122,129,130,131,134,146,147,148,150,153
104,976
6,12
11,60,61,63,73,94,99,125,129,676,698,819,921,936,944,954,979,996,1009
23,25,36,37,57,58,59,67,69,72,73,75,77,99,100,101,112,113,114
1,5,13,49,60,64,67,84,86,89,94,97,99,102,104,110,112,115,116,126,130,131,133,134,435,496,561,568,579,622,630,634,647,715,753,789,804,820,823,827,853,876,885,895,906,913,925,930,950,956,962,963,990,996,1002,1009,1012
1,2,5,6,10,13,16,27,32,37,41,42,56,58,59,60,72,73,75,83,92,99,105,106,107,108,110,112,113,114,115,116,117,121,124,127,135,136,138,139,141,142,146,147,151,152,153,154,155,161,162,163,167,170,172,179,188
11,18,19,60,64,80,83,84,85,89,94,95,98,99,112,119,124,125,127,129,131,133,134,206,568,580,592,736,737,756,804,820,906,921,931,936,950,956,968,978,991,996,997,1005,1009
3,7,8,9,10,11,12,17,18,19,23,26,40,43,44,45,49,56,57,60,63,64,67,69,70,71,73,76,77,78,82,83,84,86,87,89,92,93,97,101,103,106,116,119,121
20,22,72,93,102,108,109,110,116,122,124,133,163,240,542,739,760,826,854,942,973
3,4,5,7,9,10,14,37,63,94,95,96,97,99,100,101,120,121,122,128,133
18,35,58,63,88,95,98,100,107,111,115,125,131,132,282,665,793,905,907,960,968,978,994,997,999,1010
1,2,3,5,8,9,13,21,23,24,26,27,28,85,86,87,88,89,90,92,93,96,97,98,99,107
86
1
1,22,29,56,59,79,86,91,92,98,108,119,120,125,131,133,289,391,542,705,713,726,781,826,978
1,2,4,13,14,16,17,18,69,70,114,116,117,119,120,123,124,126,127,128,148,149,150,194,195
13,38,44,56,60,61,64,73,75,80,91,94,101,102,108,115,119,124,127,134,172,244,853,887,904,924,933,950,977,979,985,993,996,1008
1,2,7,8,9,12,13,14,16,18,19,20,26,47,50,51,59,60,70,71,72,77,78,79,80,81,86,87,93,95,96,98,99,100
26,44,45,62,68,78,101,102,116,119,121,124,127,133,357,523,734,752,764,803,919,924,933,949,1008
1,3,6,9,10,35,36,44,45,57,75,78,97,109,110,111,112,114,115,128,153,154,155,156,168
24,41,45,62,68,84,113,115,122,126,128,131,133,293,357,438,451,523,543,637,673,681,699,768,988
1,2,5,25,65,68,69,77,78,91,92,96,97,98,99,100,101,103,113,117,118,120,121,133,134
46,65,74,75,88,105,106,112,114,126,342,777,782,806,871,889,900,928,955,987,1004,1006
4,5,10,11,12,15,17,19,28,38,39,40,41,42,43,44,45,46,51,55,57,65
49,57,60,80,94,99,102,104,112,115,116,122,125,130,134,374,459,466,568,600,676,753,756,760,762,895,925,950,956,962,983,996,1009,1012
2,3,6,9,11,14,21,22,25,26,28,29,30,105,106,107,113,114,115,116,117,119,120,121,122,123,124,125,126,127,128,130,132,207
18,43,78,89,95,98,121,125,129,131,133,813,841,858,919,954,968,978,991,997
5,6,8,9,10,24,44,45,68,72,78,82,88,89,91,108,113,115,116,128
14,82,105,106,115,117,126,132,782,811,846,987,1004,1007
1,3,4,5,6,7,12,13,14,15,16,17,18,20
1,10,12,14,15,22,24,30,31,41,52,54,55,56,60,62,67,70,71,77,79,80,84,88,91,92,93,94,96,99,112,113,115,116,119,120,126,128,130,150,152,156,172,194,270,278,295,316,382,416,425,426,448,455,482,509,518,547,607,622,627,649,682,689,692,705,756,761,848,850,867,875,887,895,900,906,913,932,952,953,960,980,987,990,1009,1012
2,3,4,6,7,8,9,12,16,17,18,19,23,28,31,32,33,36,45,46,50,53,55,57,65,66,70,71,73,75,82,84,88,90,91,92,94,97,100,101,102,103,104,105,106,107,109,110,111,112,113,114,115,116,117,118,119,125,127,128,130,131,132,133,136,137,138,139,142,143,144,145,146,147,148,150,151,152,153,155,156,157,158,160,161,164
29,44,95,97,98,124,125,131,133,134,582,726,757,949,963,978,997
15,16,37,38,42,48,49,50,55,56,59,74,78,79,80,81,84
34,46,75,106,107,114,115,126,132,646,812,873,923,928,953,969,989,994,1004,1006
4,18,19,28,30,79,80,84,87,90,91,94,97,98,99,142,145,147,156,159
12,14,19,22,24,30,41,52,55,56,59,60,61,66,67,69,70,71,74,75,79,80,85,87,88,91,92,93,94,96,97,98,99,105,112,113,115,116,120,128,130,131,191,207,213,261,316,344,356,404,410,426,427,542,553,580,616,682,689,691,692,702,704,742,743,756,761,769,823,837,848,856,864,867,874,875,885,887,895,913,917,927,952,953,980,985,990,996,1001,1009,1012
2,4,11,19,21,22,23,24,33,34,35,38,39,41,42,46,49,52,53,54,57,60,62,63,64,67,68,69,73,74,75,76,77,79,89,90,94,95,98,99,106,114,115,116,117,118,119,120,121,122,123,124,125,126,127,128,130,132,133,135,136,137,138,139,140,141,143,144,147,150,152,153,154,155,156,157,158,160,161,162,165,166,168,171,173,174,178,179,180,181,188
15,31,87,90,358,444,780
1,3,4,5,6,8,9
13,18,36,41,52,56,58,60,65,67,77,86,94,95,99,105,108,115,116,119,122,125,127,129,130,131,134,295,303,429,475,508,594,715,737,755,763,764,772,813,854,861,913,950,956,968,993,996,1005,1008,1009,1012
1,3,4,5,6,7,24,25,33,44,54,56,59,60,63,64,65,66,87,88,90,94,95,104,108,109,110,111,112,113,114,115,116,118,119,120,122,125,127,128,131,140,142,143,144,146,147,149,157,158,160,164
13,54,57,60,61,68,70,71,73,80,83,89,93,94,95,99,102,103,104,108,109,110,116,122,129,131,133,134,403,435,439,511,543,560,613,671,679,762,798,830,843,850,876,896,901,917,925,943,950,952,954,956,962,976,979,986,993,1009
2,4,5,7,8,10,14,29,32,33,34,35,39,40,41,46,51,53,58,60,67,71,73,74,75,76,77,80,81,82,86,88,89,91,92,94,96,97,98,99,100,105,106,107,114,124,125,126,127,131,132,134,139,143,144,145,147,150
13,19,23,36,45,46,52,53,58,60,61,62,64,68,73,77,81,85,89,93,94,95,98,105,108,110,116,122,129,130,131,133,146,205,218,319,429,518,523,554,557,580,593,594,595,599,626,637,646,681,699,750,763,764,791,792,823,834,844,854,858,876,883,891,943,973,979,993,996,1012
1,2,3,5,7,15,21,22,26,27,28,37,38,50,51,53,54,55,56,57,58,64,65,66,68,69,86,90,91,96,97,102,106,107,115,116,118,119,127,129,130,131,132,133,134,135,136,137,139,140,143,144,145,146,148,149,150,151,152,154,157,158,159,162,163,169,170,171,172,177
5,13,19,36,41,52,56,60,61,62,64,67,84,89,93,94,98,105,108,112,124,125,127,129,130,131,133,134,151,344,416,435,522,532,538,561,582,734,737,738,741,768,803,804,820,823,826,827,844,856,876,885,891,906,913,950,979,985,990,993,996,1000,1012
1,3,5,7,9,10,11,13,16,17,18,22,25,27,28,29,30,31,36,38,39,40,43,47,64,68,70,71,74,75,76,77,79,80,81,82,83,84,85,87,88,89,90,91,92,93,95,96,97,98,99,100,101,102,105,106,108,109,110,113,114,116,133
70,71,798,850,917
1,3,5,6,7
9,25,35,36,60,61,67,72,73,82,84,87,88,91,92,94,99,115,121,125,128,130,134,168,258,263,342,476,561,676,687,755,783,804,819,848,899,900,913,921,950,953,979,985,988,996,999,1007,1009
1,2,5,6,8,22,49,52,59,60,73,74,81,87,90,94,96,97,98,105,106,107,108,109,110,111,113,114,125,126,127,129,130,134,136,139,140,143,152,154,155,156,161,168,169,173,176,177,179
27,29,86,95,102,131,436,592,739
1,2,3,40,41,42,43,44,45
4,7,17,19,34,35,39,51,57,71,74,76,77,80,91,96,102,105,106,110,111,112,115,116,117,122,126,132,134,212,342,378,431,455,510,590,606,608,613,644,800,825,832,843,848,867,871,873,923,982,999,1004
1,2,3,7,8,10,11,12,14,15,16,17,30,31,34,39,41,45,48,49,51,53,55,59,61,62,66,69,70,71,72,73,74,75,76,88,89,90,91,92,94,95,96,97,99,101,104,105,108,110,112,115
27,47,63,78,84,86,89,97,111,113,121,127,131,133,280,366,467,574,652,765,793,841,908,963,982
8,9,11,12,13,14,16,17,26,27,39,40,46,55,59,65,66,67,68,69,70,72,73,74,76
11,19,41,49,52,54,55,60,67,69,70,71,90,93,96,99,112,113,116,126,130,132,133,172,206,293,316,384,426,454,463,511,547,560,616,698,719,738,753,798,805,818,850,852,867,895,896,913,917,936,952,956,990,1009,1012
2,3,7,8,10,14,15,18,19,20,23,37,38,41,42,47,56,57,58,60,62,64,67,68,69,70,72,74,76,77,86,87,88,89,90,91,92,93,95,96,97,98,99,101,102,103,104,105,108,109,110,112,117,120,122
72,74,91,95,96,103,106,115,116,117,126,129,133,168,402,453,534,714,813,875,953,954,955,970,987,1004
2,4,9,15,17,21,23,35,36,37,64,68,71,72,75,76,77,78,81,82,86,87,89,90,98,100
24,35,47,48,56,64,68,69,75,77,90,96,105,108,115,116,117,126,132,253,342,358,381,422,455,631,668,699,731,742,846,871,879,889,960,999
1,3,4,5,6,7,9,10,11,12,16,18,23,26,28,29,30,31,34,35,36,38,40,42,43,44,45,47,49,50,51,52,53,54,55,57
15,31,58,66,77,87,88,90,91,96,105,107,109,111,112,116,117,130,132,247,310,367,422,509,569,609,631,691,702,763,772,793,811,895,899,900,918,941,975
1,2,10,11,19,26,28,32,33,34,35,36,37,39,40,41,43,44,46,47,48,49,51,52,53,54,58,59,60,62,64,65,67,68,69,70,71,72,73
1,5,19,31,52,55,57,63,66,67,69,78,80,81,85,91,93,96,97,98,99,102,105,113,116,120,121,125,127,129,131,193,196,248,264,398,444,454,498,508,518,531,610,634,662,691,719,730,750,761,762,774,841,843,849,864,867,874,887,913,919,944,954,956,973,980,997,1005,1009
2,5,6,7,9,10,12,15,16,18,19,21,27,28,30,32,33,35,36,38,72,73,75,77,78,79,80,82,84,89,91,92,93,95,96,98,99,100,101,102,103,104,105,106,107,108,109,110,111,112,116,117,118,120,121,122,123,124,125,126,128,131,133,146,147,148,150,151,172
34,39,49,57,76,102,106,107,111,114,305,466,468,708,793,842,843,869,873,889,907,989,1004
1,2,26,30,31,32,33,34,35,37,40,42,43,46,47,51,52,53,54,55,56,58,59
48,49,66,71,80,82,85,88,90,105,111,117,132,136,171,203,268,290,350,427,515,564,569,571,655,668,747,779,793,811,818,846,850,870,871,889,899,904,959,1001,1007
1,2,4,5,7,8,13,14,19,25,28,31,42,43,44,45,46,47,49,50,51,52,53,54,55,56,57,58,59,61,65,70,71,72,85,89,90,91,94,99,100
3,19,34,47,58,65,74,80,82,85,87,88,91,100,105,109,112,116,120,269,409,427,534,580,693,697,778,783,828,849,895,899,960,980,1001,1007,1010
1,2,3,6,9,11,13,14,15,23,26,34,35,46,47,48,51,54,55,57,58,59,60,61,62,63,64,67,68,71,73,75,83,84,87,88,99
51,56,58,77,85,100,763,772,871,1001,1010
1,2,4,6,8,14,16,18,19,21,27
13,42,57,70,71,73,80,83,89,91,92,94,96,99,102,104,105,108,109,110,122,129,134,140,401,465,473,510,560,568,674,702,762,798,820,840,850,867,876,887,901,904,917,942,950,952,962,993,996,1009
2,3,4,5,13,15,26,27,29,31,32,34,36,37,39,41,43,46,50,51,55,56,59,60,61,62,63,64,65,66,67,68,69,72,73,76,78,80,81,86,93,97,101,102,103,104,106,109,110,111
13,26,36,49,52,60,68,73,89,99,102,104,107,112,115,123,126,128,130,131,133,134,218,261,450,515,518,647,699,773,823,842,876,885,891,925,950,956,962,976,987,990,994,1009,1011,1012
2,3,4,6,10,15,32,33,34,37,42,47,48,49,60,62,70,89,100,115,116,118,121,122,124,125,126,128,133,152,157,158,159,166,169,172,174,175,177,179,180,181,182,184,186,197
6,20,37,57,60,62,64,68,84,86,91,94,99,102,103,104,110,112,113,116,128,129,130,133,134,278,451,568,592,600,608,657,715,866,906,925,950,956,962,990,996,1005,1009,1012
2,5,8,10,12,13,14,15,16,17,19,20,23,25,35,37,39,41,42,44,45,46,127,129,130,131,132,133,135,136,137,138,139,141,142,144,145,146,148,149,150,151,153,234
1,9,13,21,31,33,44,45,54,60,63,68,70,71,86,87,90,97,102,104,105,108,119,124,125,127,133,134,140,197,292,323,347,401,444,500,511,525,547,602,694,706,787,798,844,850,896,905,944,949,952,962,963,993,1008
1,2,3,4,5,8,10,33,34,35,38,47,54,59,60,61,62,66,68,69,71,75,77,80,81,82,85,105,106,107,108,109,110,112,113,118,119,120,121,122,134,137,138,141,142,146,147,148,150,152,159,160,161,165,167
35,47,49,64,71,77,83,94,110,111,112,115,116,122,126,128,132,269,318,422,455,473,510,544,564,613,793,850,881,889,893,895,930,983,986,996,999
2,4,5,6,7,8,13,14,17,18,19,20,21,26,30,36,37,38,41,42,43,44,47,48,52,53,54,55,59,60,61,62,63,64,65,66,68
31,53,58,77,91,105,331,461,763,772
1,2,8,14,15,20,21,22,24,27
4,48,58,77,80,85,87,100,105,110,115,117,141,159,268,431,745,772,918,930,981,1001,1010
1,12,21,23,27,37,40,48,50,61,62,64,65,66,69,70,81,86,87,88,90,100,108
13,22,44,89,104,113,125,130,385,876,921,925,967,1002,1012
1,2,3,34,35,36,38,149,150,151,152,153,154,184,295
33,45,64,68,99,102,104,112,126,130,134,421,450,451,681,753,950,962,976,1009,1012
1,11,12,19,20,21,22,23,24,142,143,144,145,146,150,151,152,153,154,155,273
1,11,19,31,55,60,61,64,73,90,91,92,94,97,99,112,113,116,122,125,127,134,202,358,444,544,634,693,753,761,774,895,906,936,950,956,967,979,983,990,995,996,1009
1,11,12,13,14,24,31,39,43,44,45,46,65,66,70,111,112,114,115,116,117,122,123,124,125,126,127,129,135,136,137,140,147,156,161,163,164,171,172,190,191,198,200
36,65,84,99,104,129,312,770,976,1009
1,3,4,24,27,28,29,30,33,53
14,19,31,51,54,56,60,64,67,79,80,81,85,88,94,95,98,101,112,120,124,129,130,134,149,166,239,355,356,444,483,509,562,580,601,610,705,750,756,762,802,849,887,896,900,904,906,950,960,996,1000,1012
4,5,8,9,10,13,16,18,19,20,37,38,45,46,48,61,62,65,66,67,68,70,74,75,76,77,78,79,80,81,82,83,84,85,86,87,88,89,91,94,95,98,103,104,105,112,113,114,115,117,118,122
27,44,78,98,101,102,119,125,127,129,131,133,244,499,587,933,940,977,991,997,1005
7,11,12,15,16,19,23,28,32,36,38,91,92,93,94,98,99,103,104,107,111
1,9,13,52,60,63,64,84,88,89,90,93,94,97,99,102,104,108,112,113,115,119,125,129,130,131,133,144,287,294,327,629,662,676,753,755,774,818,819,825,864,876,883,885,895,906,921,925,956,977,990,991,993,996,1008,1009,1012
1,2,3,4,7,9,11,12,13,15,16,17,18,19,25,31,34,41,49,112,113,115,121,124,129,135,136,139,140,141,142,204,205,206,207,208,209,210,211,213,214,216,217,222,224,226,228,229,233,234,237,238,242,243,244,245,250
1,9,13,22,30,36,37,56,59,60,64,67,79,80,93,97,98,99,102,104,108,113,124,125,127,129,130,131,252,264,327,355,390,476,498,538,627,640,730,750,756,757,781,813,855,885,911,913,925,951,954,962,967,978,993,1009,1012
1,2,4,7,17,18,19,20,39,43,44,46,48,49,54,56,60,61,62,63,69,70,73,75,76,104,110,112,113,114,115,117,126,127,129,130,131,134,147,149,150,153,154,158,163,165,170,172,173,175,185,186,187,188,191,192,198
3,6,15,41,46,53,57,62,66,69,72,74,84,91,96,103,105,106,113,114,115,117,120,126,128,129,132,201,357,392,402,523,584,600,637,644,679,714,738,758,768,848,884,902,914,923,966,969,970,987,988,1004
3,4,8,21,22,24,25,35,40,41,46,48,49,57,64,67,68,74,75,77,79,80,99,111,112,117,120,121,122,123,124,127,128,129,132,133,134,138,140,144,146,148,149,151,154,155,157,159,160,167,168,174
31,35,36,55,56,61,66,67,73,80,82,88,93,96,97,113,116,119,121,260,264,410,476,509,565,662,691,712,783,863,867,887,913,932,963,967,973,985,999,1007
1,2,3,5,6,16,18,20,40,41,56,57,58,61,63,71,132,133,134,135,136,137,138,139,141,142,143,144,145,146,149,150,152,153,154,155,215,225,226,241
21,31,36,52,54,55,60,61,64,66,67,69,70,71,79,80,88,90,91,94,96,98,99,101,102,110,112,120,128,129,130,131,133,134,138,192,316,325,355,375,425,430,474,528,532,538,616,634,718,750,753,761,762,785,791,805,850,855,867,887,891,896,900,904,906,913,917,950,952,956,985,990,996,1009,1012
1,3,5,6,8,9,22,24,25,26,28,29,32,38,39,45,46,47,49,55,62,63,67,69,70,72,82,83,84,87,102,103,104,105,106,107,108,109,110,111,112,114,115,116,117,118,119,120,121,122,123,124,126,128,129,130,131,132,135,137,138,139,140,142,143,145,146,147,149,150,152,160,164,167,182
5,13,21,38,45,60,61,68,73,81,84,93,108,125,129,131,133,185,382,451,612,676,681,718,751,804,827,862,954,985,993
2,3,4,5,83,84,85,110,112,113,116,117,118,121,122,123,124,125,126,134,135,136,159,160,161,162,164,165,166,167,168
4,34,46,49,51,74,75,95,110,115,120,121,126,131,133,242,271,365,453,491,675,683,736,873,909,930,953,955
22,24,31,32,35,96,100,103,105,109,112,113,118,119,127,128,129,181,182,183,188,202,204,206,210,211,213,222
3,31,39,41,52,54,55,62,67,68,72,73,77,80,82,91,93,97,106,110,111,114,115,116,119,120,125,126,127,129,150,360,381,419,506,613,630,641,699,738,743,758,766,793,864,869,887,913,914,947,973,987,989,991,1004,1007
44,45,46,73,74,84,87,93,96,97,98,99,100,101,106,107,108,109,110,111,112,114,117,118,123,124,126,130,131,133,134,135,136,138,141,142,144,145,146,159,160,161,192,193,194,195,196,197,210,211,212,214,216,217,218,223
52,54,60,64,67,69,80,86,89,90,91,93,94,98,99,105,108,110,112,119,122,124,126,128,129,130,131,133,194,219,455,582,611,613,653,817,820,861,876,883,887,983,990,996,997,1009,1012
1,2,3,4,5,7,8,10,24,25,26,27,29,30,31,32,35,36,38,39,40,41,42,44,45,51,57,68,69,70,71,72,84,85,86,87,88,89,90,91,92,93,94,95,96,97,103
44,95,98,101,102,108,111,119,121,131,133,592,805,816,841,852,927,933,1008
1,2,9,10,12,13,14,17,18,33,51,52,54,55,56,64,71,72,75
18,43,78,98,125,129,131,133,407,574,852,921,968,978,991,997,1005
5,8,30,53,60,62,74,100,115,122,126,128,133,138,140,158,160
3,24,74,82,86,91,96,105,106,112,114,115,126,128,132,717,889,914,937,953,955,987,989,1004,1007
1,2,3,5,6,9,13,14,15,16,18,20,22,23,24,25,26,27,31,32,33,34,36,37,39
64,65,69,77,99,116,129,134,312,345,599,725,770,778,806,874,1009
1,11,12,21,22,23,24,25,26,27,28,33,34,35,40,41,42
34,48,107,111,221,480,733,745,793,907,994
5,8,10,11,12,14,15,16,17,18,19
25,31,47,54,66,69,82,85,88,90,96,100,105,115,469,509,614,818,867,874,898,960,1001,1010
1,2,8,10,11,13,14,15,17,18,20,27,28,29,30,31,32,33,35,37,42,43,44,51
19,46,65,79,80,82,94,105,106,112,117,126,132,412,778,806,811,846,889,895,923,987,996,1004,1007
1,3,6,7,11,12,13,15,16,17,18,20,22,23,24,25,26,28,32,33,34,35,36,37,38
18,21,35,60,64,77,83,86,94,98,110,116,122,125,126,129,130,131,134,494,718,760,786,872,893,930,931,942,950,968,978,983,991,996,997,999,1005,1012
5,8,9,10,11,12,23,25,26,31,34,38,107,109,123,124,126,128,129,130,133,134,135,137,146,147,148,168,169,174,175,176,178,179,183,184,185,187
11,65,77,88,109,439,763,770
2,3,28,29,63,97,103,104
1,4,14,19,24,26,31,41,49,53,56,60,62,69,78,84,88,90,95,101,102,107,108,112,115,119,124,126,128,130,133,147,149,186,192,215,253,419,425,426,466,482,509,513,550,587,591,649,659,705,742,768,774,776,852,856,874,895,906,953,960,975,990,1012
1,3,6,7,9,11,12,13,14,16,27,30,34,36,37,38,39,40,51,54,55,57,58,63,67,69,76,77,79,82,84,86,87,88,89,94,95,96,98,99,100,101,102,104,105,106,107,109,111,113,114,116,117,119,121,123,124,125,127,130,132,133,135,138
18,43,95,98,101,102,124,125,129,131,499,562,858,931,968,991,997,1000,1005
14,15,20,24,30,31,32,44,102,109,110,114,115,118,132,135,139,188,197
1,9,23,24,44,52,57,60,61,67,69,72,74,76,77,83,87,98,101,102,105,113,115,116,119,120,123,124,127,130,133,189,197,326,355,518,537,561,578,592,616,639,670,672,734,774,780,805,838,852,861,874,949,977,980,985,997,1011,1012
1,5,6,7,8,9,10,12,14,37,40,46,47,48,49,50,52,54,55,56,57,58,59,61,63,65,66,67,68,72,92,93,94,100,101,102,103,104,105,106,107,108,116,117,118,119,120,126,127,139,152,154,155,156,158,160,161,162,166
13,18,27,33,45,54,61,69,70,71,84,86,90,95,97,98,101,102,108,111,121,124,125,127,131,133,134,358,398,512,616,623,671,706,707,715,755,819,826,852,896,917,921,929,950,961,962,968,978,985,991
1,2,3,4,13,23,29,30,60,63,64,66,67,69,71,72,73,74,75,76,81,83,87,90,94,97,98,99,101,102,103,108,121,122,123,124,125,126,127,128,133,136,137,142,143,172,173,174,175,181,182
2,27,86,95,97,104,108,124,129,131,134,285,551,592,684,736,817,883,903,1005
1,4,5,27,28,29,46,47,48,49,62,63,64,65,66,88,91,105,106,107
0,5,11,21,29,36,41,60,67,70,71,73,80,84,88,93,94,99,112,113,119,122,124,126,130,133,134,172,173,187,196,270,316,321,345,376,472,474,561,622,641,662,698,719,738,753,804,827,853,887,899,913,950,952,956,967,990,996,1009,1012
1,2,4,5,6,8,9,16,18,23,24,26,27,29,30,33,35,53,59,62,63,64,67,74,97,98,99,101,103,104,105,106,107,108,109,110,111,112,113,114,115,116,117,118,119,120,121,122,123,124,125,126,127,128,134,135,139,141,149,172
22,35,36,41,49,62,64,65,72,73,74,77,80,82,84,87,88,91,96,98,103,106,110,112,115,116,120,121,123,126,129,130,131,132,133,213,303,326,371,475,542,559,679,693,711,770,789,885,887,897,899,900,918,930,937,945,971,978,987,992,999,1000,1007,1011
1,5,6,7,9,14,16,17,25,26,27,28,29,50,56,62,64,66,67,68,81,94,97,98,102,108,109,110,112,115,117,118,121,122,123,124,125,130,131,132,133,134,146,147,148,149,150,151,152,155,156,157,160,161,162,168,169,170,172,179,183,184,205,207
31,34,35,37,49,57,62,82,86,91,102,105,106,107,114,132,247,402,503,702,708,848,851,866,873,923,999,1004,1006
2,4,10,16,17,18,19,22,23,42,43,45,51,52,92,93,98,101,102,103,104,106,111,112,114,115,121,127,166
13,15,20,22,54,57,60,61,64,70,71,73,93,99,102,104,105,108,110,116,122,124,134,401,510,511,542,560,564,568,657,671,798,843,850,896,901,917,930,950,952,962,973,976,985,993,1009
3,4,7,8,15,16,19,20,22,24,34,37,38,39,51,54,55,57,59,63,66,67,71,72,74,76,77,81,82,84,86,87,92,93,95,101,111,119,121,122,124,136,139,142,143,145,146
119,133,1008
3,4,7
51,56,60,64,67,69,70,73,90,91,94,98,105,112,113,115,120,125,129,130,131,133,134,192,482,544,582,599,616,670,702,823,848,861,906,950,952,990,991,996,997,1012
1,3,6,9,12,13,14,15,16,17,19,21,22,25,26,27,29,31,32,39,48,60,61,62,63,64,65,66,67,69,70,71,72,73,74,75,76,77,78,79,81,88
20,35,48,49,66,75,76,86,90,102,105,106,114,115,117,126,644,668,735,745,746,782,811,825,871,889,928,945,948,999,1004
1,3,11,13,26,28,29,30,31,32,33,36,37,39,40,43,44,45,51,53,62,65,66,67,68,71,73,74,75,77,79
4,24,31,32,35,47,57,62,65,69,77,80,87,88,90,100,105,109,111,115,117,130,132,169,192,212,221,269,274,290,362,367,401,427,431,509,515,522,591,616,655,668,690,702,731,742,759,768,770,779,787,793,811,818,828,843,859,871,874,879,887,888,889,899,900,904,908,915,918,960,984,999,1010,1012
2,4,5,6,12,16,17,26,29,32,33,44,47,57,67,70,85,86,89,91,93,94,103,105,106,107,108,110,111,114,115,116,118,120,122,123,125,126,129,130,131,135,137,139,141,142,146,148,151,152,153,155,156,158,160,161,162,166,167,168,175,178,184,189,190,194,196,197,198,200,201,207,210,211
13,29,52,54,57,60,61,69,70,71,73,80,84,89,91,94,102,104,108,109,110,122,134,292,309,367,384,441,463,503,510,511,533,560,608,613,661,716,762,798,817,843,850,864,874,876,896,901,917,925,952,961,962,976,985,993
2,4,5,9,15,17,19,20,25,44,48,49,50,51,53,55,59,66,77,78,79,80,84,86,88,89,90,91,93,94,95,98,99,104,105,106,107,109,110,116,119,120,121,122,123,124,127,141,154,156,160,161,165,170,172,177
1,42,60,62,70,74,75,78,93,94,95,96,99,101,102,105,112,115,116,119,122,130,131,132,133,134,244,419,443,587,603,702,753,789,867,885,906,928,942,950,952,960,990,996,1009,1012
1,2,4,8,9,10,12,15,16,17,23,24,25,27,34,35,44,45,46,47,52,60,62,63,64,65,66,67,68,69,70,71,72,73,74,75,76,77,80,81,82,83,90,91,92,100
2,20,44,57,71,93,96,101,113,122,131,133,134,194,231,418,519,547,600,608,867,901,917,949,983
3,5,7,9,11,16,17,18,20,21,27,44,134,135,138,139,140,141,142,143,144,145,146,148,149
1,2,23,27,37,38,44,46,50,66,80,85,90,95,97,98,102,104,105,108,118,119,125,127,129,131,146,175,244,427,580,706,710,741,774,787,816,866,890,920,933,940,949,978,991,997,1005,1008
8,10,11,12,14,17,29,30,32,33,34,35,42,43,46,48,53,54,60,63,64,71,77,81,93,94,95,96,97,103,104,105,107,109,117,124,127,128,129,130,139,140,143,144,145,146,147,154
37,44,67,78,81,101,102,108,117,119,124,127,407,546,574,670,816,894,924,933,974,977,1008
1,4,8,10,14,19,26,27,28,74,82,100,101,105,106,110,111,112,113,116,120,157,166
1,13,33,42,45,54,60,68,70,71,84,86,87,94,105,108,110,113,119,124,131,133,134,197,391,401,450,500,511,560,568,659,671,694,715,754,798,810,817,850,896,917,950,952,961,967,996
1,2,4,5,9,11,12,17,31,42,43,45,46,48,49,50,51,52,61,63,64,67,89,90,91,92,93,94,95,98,99,107,108,110,111,112,120,121,122,127,129,135,136,141,149,150,152
78,101,102,119,124,127,499,924,977
2,25,28,70,72,99,100,101,143
6,24,45,52,68,95,98,102,110,116,119,126,130,131,244,417,451,543,592,764,997,1008
4,5,6,7,79,80,83,84,88,94,96,106,107,111,112,113,115,122,123,124,127,129
17,31,75,79,85,100,105,106,114,117,132,212,245,796,928,945,981,984,1001,1004,1006,1010
1,2,3,4,6,13,16,64,65,67,69,71,72,73,74,88,89,90,92,126,127,134
17,28,46,48,56,58,68,75,77,85,115,117,646,688,699,763,772,856,897,981,1001
1,7,11,12,13,16,17,19,27,28,31,33,34,40,41,44,45,46,49,51,52
1,13,17,33,42,50,54,68,70,71,78,84,86,95,98,102,105,108,125,129,131,133,242,245,500,585,671,715,774,850,859,890,896,917,919,931,952,961,962,978,991,993,997,1005
4,5,6,7,8,15,27,28,51,54,60,61,62,65,84,87,89,90,94,97,98,101,104,105,106,107,111,112,114,115,117,124,136,138,144,145,155,168,171,174,176,177,193,196
9,35,51,55,57,60,61,67,72,73,84,88,91,93,94,95,96,103,107,110,112,116,117,119,125,131,132,200,320,327,529,676,679,755,783,819,875,900,906,913,921,930,959,964,970,977,985,996,999
1,5,67,69,70,71,73,79,81,97,106,109,110,112,114,115,116,120,124,127,128,133,134,135,144,145,147,149,150,151,211,215,216,219,220,221,223,225,226,230,231,232,234,238,239,240,242,244,248
44,95,122,131,133,134,736,949
8,10,11,14,16,141,142,150
4,15,17,35,47,48,57,62,66,69,80,82,87,88,90,105,111,112,114,116,117,122,134,136,156,181,182,268,295,297,422,431,472,510,569,608,616,668,691,693,702,749,787,793,811,818,828,843,846,859,871,874,887,889,898,899,900,904,908,918,960,973,990,999,1007
1,2,3,5,14,15,21,23,27,32,44,46,53,61,72,80,83,89,90,95,96,98,99,100,101,102,103,104,105,107,108,109,110,111,114,117,118,123,126,127,129,130,131,134,135,140,142,143,153,156,157,159,164,171,172,174,176,182,183,187,190,193,194,196,198
5,13,18,29,36,43,52,60,67,73,78,89,95,97,98,99,101,102,108,113,124,125,129,131,133,238,328,395,407,435,476,498,499,518,595,643,662,826,827,858,863,876,885,913,931,956,962,968,978,991,997,1002,1005,1009
1,5,10,11,15,25,26,27,28,29,31,38,43,45,63,67,69,72,73,77,79,93,100,109,114,115,116,117,118,119,121,122,123,124,125,126,127,128,129,130,131,135,136,137,143,144,147,152,160,163,173,175,182,185
0,1,5,14,36,43,52,55,73,84,93,95,97,98,99,108,119,124,125,129,131,133,410,441,458,476,593,595,604,643,649,750,774,786,791,804,822,827,891,978,993,997,1005,1008,1009
1,4,5,6,11,12,14,16,17,20,22,23,24,70,72,73,74,76,77,80,139,147,148,149,151,152,154,155,156,157,158,159,160,161,163,165,166,167,169,208,209,215,216,217,219
5,25,54,60,63,66,67,69,70,80,94,95,97,99,102,105,121,131,133,270,338,560,614,616,691,700,702,736,746,762,825,827,841,913,944,996,1009
1,6,7,8,10,15,16,17,18,20,23,27,28,32,46,47,48,57,84,85,87,89,92,93,94,95,96,100,101,102,104,105,106,107,108,109,110
3,17,56,58,77,100,106,111,116,117,132,763,766,772,793,846,914,945,959,981,992,1004,1010
8,9,10,15,20,26,30,33,34,41,47,50,56,58,61,66,68,69,71,78,79,81,87
45,54,68,70,71,83,94,101,102,104,108,124,134,287,294,316,511,623,694,757,798,850,896,917,952,986,993
8,10,13,16,23,24,25,28,29,31,32,33,90,92,93,94,98,99,106,107,111,117,118,119,121,122,123
58,77,103,519,763,772
42,66,70,71,79,98
5,19,22,35,36,40,55,63,67,69,73,74,82,84,85,87,88,91,93,94,95,97,113,115,119,121,125,126,127,133,264,382,383,441,498,604,652,805,827,829,853,913,929,944,947,953,955,963,974,987,996,999,1007
1,2,3,4,5,6,8,9,11,12,23,31,42,43,44,45,46,49,50,52,55,58,60,82,83,84,89,134,136,139,140,141,142,143,148,149,150,153,154,155,158,160,161,162,163,170,178,179,180,191,193,194,205
99,104,111,113,662,793,967,976,1009
2,4,5,7,8,9,10,12,14
9,13,33,38,44,54,60,63,70,71,81,93,94,97,101,102,117,119,121,124,125,127,129,131,133,134,294,296,385,499,525,667,671,684,710,734,757,786,821,850,862,896,911,916,917,921,924,929,933,949,958,961,962,977,981,996,1000,1003
21,22,38,44,66,69,70,75,77,79,82,86,87,91,94,97,100,104,105,108,111,134,135,136,139,142,143,144,145,146,149,170,171,173,179,182,184,185,188,189,192,195,199,202,203,205,206,207,226,229,234,236,238,242,245,246,247,248
16,31,34,37,77,86,106,109,128,133,439,689,740,763,866,934,1004
73,74,75,76,90,91,92,113,114,115,136,137,148,150,151,213,214
15,49,57,60,64,80,86,87,89,91,94,102,104,110,112,113,115,116,122,125,130,134,136,140,156,295,323,459,510,568,592,715,727,753,756,760,762,820,825,843,854,876,887,906,912,925,942,950,960,962,976,990,996,1012
1,2,3,4,5,14,18,19,22,23,32,48,67,68,72,73,74,84,118,119,123,125,126,129,130,131,132,134,136,139,140,142,143,144,148,154,158,159,162,163,165,167,168,170,171,182,188,190,191,198,203,204,209,213
4,14,16,26,30,34,47,48,50,56,66,75,76,77,82,85,106,107,109,111,117,126,128,132,228,250,439,493,598,636,683,697,735,745,746,763,782,847,873,907,928,934,935,948,957,959,970,975,1001,1004
1,2,7,12,15,18,21,43,49,50,52,53,56,62,63,74,79,82,83,87,88,110,116,119,124,125,126,127,128,129,130,131,140,152,153,155,176,177,179,181,182,186,187,190,196,197,198,199,210,214
54,70,71,108,122,134,136,292,316,384,653,896,901,917,993
9,10,14,20,21,161,162,163,164,169,173,175,180,184,185
18,31,41,44,46,59,60,62,66,67,74,75,78,80,84,93,95,98,101,102,105,111,112,116,117,119,130,133,324,339,427,461,517,555,577,587,670,680,738,746,756,808,825,861,875,928,949,982,990,997,1012
1,18,24,25,26,27,31,32,35,37,39,40,42,44,46,47,57,59,61,63,64,73,75,77,86,88,90,93,94,96,97,101,109,110,111,112,113,115,116,119,120,121,122,123,124,125,126,128,130,132,134
1,93,95,99,105,108,112,131,133,427,568,593,736,753,774,956,993
1,3,4,7,8,9,10,11,21,22,25,26,27,28,29,32,33
15,116,131,133,314,594,712,875
34,82,84,87,88,89,91,98
8,27,95,121,124,131,133,467,784,841,929,940
45,46,53,110,111,115,123,138,145,162,168,169
11,13,20,22,59,68,71,131,133,352,383,901,917
2,3,4,5,6,7,8,9,12,13,14,15,16
13,15,36,38,42,44,51,52,54,57,58,60,61,64,67,70,71,73,75,77,80,83,89,93,96,97,99,102,104,105,108,110,113,116,120,122,124,134,136,140,156,200,235,260,264,292,346,354,384,399,435,441,459,463,473,508,510,511,518,528,560,564,600,608,613,623,711,716,748,762,785,798,802,807,843,850,876,893,903,913,917,925,952,956,961,962,963,973,976,979,980,983,993,1009
2,6,7,8,11,12,15,16,20,42,43,44,45,46,47,50,57,61,62,64,65,73,75,76,77,80,82,85,92,94,97,100,101,102,109,114,115,116,117,118,119,120,121,122,123,124,127,128,129,130,131,132,133,135,141,142,143,145,146,147,148,149,150,151,152,154,155,164,165,166,167,168,169,172,175,176,178,179,182,183,187,188,189,190,192,193,194,195,196,197,198,200,202,203
3,59,69,73,74,79,88,113,115,116,121,125,126,131,257,462,594,692,712,755,828,841,914,953,955,967,973,987
1,16,17,20,26,40,41,42,45,52,53,54,73,82,83,84,86,93,94,95,96,97,98,100,106,107,109,117
17,26,35,48,88,107,112,117,128,132,400,412,425,460,686,693,735,745,811,828,871,889,907,984,999
4,19,21,41,42,43,44,45,74,97,98,119,147,148,149,150,165,170,171,172,173,174,175,196,198
131
11
19,34,35,46,69,87,88,91,106,107,112,115,126,128,132,616,646,773,782,871,873,889,900,953,959,975,987,988,999,1004
1,2,3,6,7,9,11,12,17,19,20,22,28,89,90,91,92,122,123,124,125,126,127,128,129,131,132,133,134,139
17,47,56,66,87,88,90,96,105,109,115,117,120,122,126,128,132,293,297,310,353,367,422,455,482,508,510,631,647,691,702,746,747,811,846,867,889,900,960,966,987
1,4,5,8,9,10,14,17,21,22,23,27,28,30,34,35,36,37,38,41,43,44,46,48,49,50,52,56,57,58,59,60,61,65,66,69,73,74,75,76,77
23,24,49,50,53,54,57,72,74,85,87,105,106,107,112,116,130,580,628,693,708,895,945,955,973,994,1012
2,3,8,9,24,32,35,36,37,38,39,40,41,42,44,45,46,47,49,50,53,54,55,56,57,58,59
13,37,38,44,50,60,61,62,64,68,69,73,78,84,86,89,94,108,124,130,131,133,306,307,325,523,790,820,866,874,892,949,957,979,1012
1,2,3,6,7,10,13,19,22,23,24,25,43,44,45,50,53,54,67,73,82,83,84,85,86,91,92,97,98,99,117,120,121,124,130
13,15,20,30,35,42,45,56,57,62,68,72,84,89,102,104,110,112,115,116,121,122,123,125,126,128,130,134,455,459,510,613,681,714,741,782,819,843,847,875,876,881,895,906,912,925,930,938,942,950,960,962,976,999,1011,1012
2,3,8,9,11,12,21,22,24,27,28,29,31,33,38,46,64,67,76,86,87,118,119,126,131,133,138,140,143,144,145,146,148,149,150,151,153,154,155,156,157,159,160,161,164,167,170,171,185,187,191,193,198,199,200,205
13,52,76,80,89,102,104,108,112,113,129,130,131,287,435,518,674,855,864,876,887,925,962,967,990,993,1002,1012
4,8,10,15,38,40,50,55,57,58,59,174,175,176,177,178,179,180,183,186,191,199,201,202,204,208,228,343
13,18,22,33,36,38,45,52,60,61,62,64,67,68,73,74,75,84,86,89,93,94,99,108,112,119,120,125,127,130,131,133,213,218,231,365,383,408,416,451,497,506,561,579,622,652,695,699,715,755,768,789,790,792,804,819,820,853,864,876,885,891,895,906,955,965,985,990,993,996,1009,1012
1,37,39,40,41,42,44,47,48,49,50,55,57,68,69,72,73,81,82,85,87,89,91,94,98,101,120,122,123,126,131,132,133,139,140,141,143,144,145,146,147,148,150,151,152,153,154,157,158,159,160,161,162,163,169,170,171,174,175,176,178,179,180,181,182,188,189,191,194,196,198,201
21,99,104,133,718,956,976,1009
1,74,84,85,86,88,98,169
13,80,89,97,102,104,105,108,113,129,130,662,750,762,855,883,888,903,925,932,962,1002,1012
1,2,38,40,52,56,57,58,60,64,179,180,181,182,185,186,187,188,191,192,204,240,355
1,5,13,19,44,52,61,63,67,69,80,81,85,90,91,93,96,97,99,105,108,111,113,116,119,121,122,125,127,129,131,133,196,207,248,260,280,358,371,395,496,508,518,565,599,630,634,662,676,701,702,704,719,750,755,756,762,789,793,818,839,841,849,854,860,863,864,867,874,883,887,904,913,924,929,932,944,949,956,967,983,985,1009
2,4,6,8,9,13,15,16,17,18,24,25,26,29,30,36,41,43,57,59,60,63,82,86,87,91,94,98,105,106,107,111,112,113,115,117,118,119,120,124,125,126,127,129,130,131,132,133,135,136,137,138,140,141,142,143,144,146,149,150,154,155,156,157,158,160,163,167,168,169,170,173,174,175,176,181,182,183,184,185,187,189,201
78,98,119,133,919,978,997
2,7,11,64,66,68,71
23,34,40,69,80,91,96,100,116,117,129,131,214,268,405,829,848,873,874,937,981,1005,1010
3,4,12,13,14,18,20,23,24,26,27,30,33,34,41,42,43,44,45,46,48,49,52
11,60,61,64,66,67,73,80,81,94,99,105,110,112,116,121,127,134,279,474,599,702,753,887,895,906,913,930,936,950,979,990,996,1009
24,31,46,53,54,56,63,64,66,85,87,88,102,127,129,130,131,143,145,146,147,148,151,152,154,164,166,167,172,184,199,203,214,216
12,22,36,52,54,60,61,69,70,71,75,76,78,79,87,94,95,98,101,102,103,104,112,113,119,120,129,130,131,134,138,197,219,486,497,553,567,582,640,711,753,769,781,823,825,864,874,896,906,917,928,950,952,961,977,978,985,990,1012
3,5,8,9,10,13,15,16,18,22,25,27,29,30,31,35,44,48,54,60,61,62,71,72,73,74,75,85,91,92,94,95,97,99,100,101,102,105,106,107,112,113,114,119,122,123,124,125,126,130,131,132,133,134,135,136,138,141,151
6,24,41,42,60,62,64,67,86,89,94,99,102,104,108,112,130,134,165,357,419,459,523,592,738,820,883,906,913,925,950,956,976,996,1009,1012
37,39,72,73,82,102,110,111,112,113,117,121,122,126,127,128,136,138,139,145,147,148,150,152,163,164,165,166,167,169,171,172,174,175,178,186
11,25,44,60,63,67,73,78,89,98,99,102,119,120,124,127,131,202,338,474,587,614,757,786,861,919,927,933,936,944,949,977,980,997,1009
3,7,22,24,27,28,29,35,36,98,100,101,105,108,124,132,134,138,140,141,142,144,146,147,148,153,214,222,223,226,233,237,240,241,243
36,45,81,84,99,104,113,279,476,634,788,903,967,976,1009
1,2,7,8,19,22,23,25,26,27,28,29,30,32,43
5,9,13,29,55,60,63,64,67,84,86,89,93,94,97,101,104,107,108,112,113,117,124,127,129,131,132,134,175,189,193,248,285,309,393,490,561,593,602,634,674,715,723,743,750,753,761,855,885,903,906,913,925,944,950,967,974,990,993,996,1002,1003
4,6,7,8,11,12,13,14,16,17,19,20,22,26,27,30,47,48,49,53,54,88,89,156,158,161,162,163,208,209,211,212,215,216,217,224,225,226,227,228,229,230,234,235,236,237,238,239,240,245,246,247,250,251,252,253,275,277,278,281,282,283
5,9,13,29,36,52,60,61,64,67,73,84,86,89,93,94,98,99,105,108,112,115,119,122,124,126,127,129,130,131,133,134,202,231,263,293,295,325,345,401,455,458,512,561,622,641,652,672,682,695,715,734,753,789,792,803,804,820,823,827,855,876,883,885,891,895,906,913,927,950,977,979,983,985,990,993,996,1002,1008,1009,1012
2,3,6,7,8,11,18,21,24,33,35,39,44,50,51,59,60,62,63,69,82,83,96,97,99,103,108,110,121,130,133,134,135,136,137,138,140,141,142,143,145,146,147,149,150,151,152,156,157,159,163,164,167,169,170,171,172,173,175,177,178,180,181,186,189,192,193,197,198,199,203,205,206,207,213,218,223,224,228,230,241
2,12,13,19,45,49,61,64,68,72,73,77,85,86,87,89,93,95,108,110,111,112,125,126,129,130,131,133,138,218,303,344,448,450,455,475,595,599,613,677,681,699,769,842,849,853,876,877,941,979,990,993,1005,1012
1,3,4,5,52,62,63,64,79,81,83,84,85,86,87,88,90,91,92,93,95,97,98,99,101,110,112,113,114,118,119,120,121,123,124,125,126,127,128,130,155,156,157,166,167,169,170,171,172,173,175,176,177,186
26,31,47,49,58,59,63,71,77,85,87,88,91,100,123,197,269,305,509,606,772,776,849,900,905,941,1010,1011
1,3,5,6,20,21,28,30,31,32,40,41,43,51,55,56,57,58,60,62,65,66,67,68,75,76,84,86
17,48,84,105,114,369,400,437,460,534,570,686,702,728,745,969,1006
45,47,48,49,78,81,86,87,90,92,93,94,95,101,103,121,132
66,98,117,119,124,131,133,592,927,970,1008
1,65,66,75,78,88,111,112,176,177,186
13,27,29,49,60,67,73,86,94,97,98,99,104,122,124,126,130,133,134,238,264,309,474,554,715,805,852,913,920,925,940,942,950,956,976,978,996,997,1009,1012
3,4,10,12,14,15,16,19,20,22,28,34,47,62,63,105,112,124,126,127,128,130,131,132,134,135,136,137,147,149,150,161,163,166,167,169,170,174,177,184
31,35,37,82,90,114,128,132,351,668,851,866,969,988,989,999,1006
2,4,57,64,65,154,155,156,158,159,199,212,233,234,299,301,302
1,11,33,43,52,63,64,66,67,73,84,85,90,94,95,97,98,99,102,105,111,112,119,125,127,129,131,132,133,264,314,349,361,398,474,592,622,652,655,698,702,723,737,746,753,774,779,789,804,825,833,849,855,864,906,913,921,924,944,959,963,974,991,996,997,1005,1009
4,6,7,10,12,13,14,17,18,19,36,41,42,43,45,47,49,51,52,54,55,57,59,66,75,80,82,86,89,90,93,94,95,96,97,98,101,102,103,107,108,110,113,114,115,117,118,119,129,130,132,136,137,139,140,141,142,143,144,146,147,148,150,151,153,156,158
25,47,80,90,105,121,124,131,133,206,338,358,469,474,756,762,833,841,929
1,2,4,5,6,14,18,33,62,63,64,65,66,67,68,69,71,75,76
15,22,58,65,70,77,80,83,85,90,98,102,109,110,116,119,122,131,303,323,355,472,510,610,725,748,762,837,844,875,930,986,1008
3,8,13,14,15,41,42,43,45,46,47,48,49,89,108,115,117,123,124,125,126,127,128,130,145,146,147,152,154,155,161,162,169
19,20,58,65,71,72,77,83,95,98,109,116,122,125,131,133,237,413,439,472,704,714,725,736,772,840,850,893,978,997
1,23,25,28,30,34,36,42,43,46,50,52,92,95,100,113,114,136,137,177,178,179,180,181,182,183,185,191,192,194
11,60,61,64,73,79,80,94,97,99,112,116,122,127,129,134,270,472,478,724,753,760,895,904,906,936,950,979,990,996,1009
14,21,68,72,75,76,78,82,83,86,112,116,117,118,119,126,128,129,130,131,133,135,136,137,148,150,157,204,212,214,215
4,14,17,22,35,47,51,54,56,57,64,65,66,69,77,79,80,87,88,90,94,105,109,111,112,114,115,117,120,130,132,159,212,219,221,226,245,258,268,269,297,367,369,400,511,560,600,609,623,649,663,668,683,690,692,702,716,742,749,778,783,798,802,806,807,818,828,837,871,874,888,889,895,896,899,900,904,908,918,941,981,999,1006,1012
1,3,7,10,15,18,20,22,24,30,31,39,40,42,43,45,49,66,77,84,85,88,89,90,91,94,95,96,97,98,99,100,105,106,107,108,109,115,116,117,118,119,120,121,122,123,124,125,126,128,129,131,132,133,134,135,138,139,141,143,145,146,147,151,152,154,158,161,164,165,166,167,168,169,170,172,175,176,184,192,193,196,199,200
6,26,76,86,90,105,115,141,150,644,686,776,948
2,4,5,7,8,17,19,22,23,25,55,57,58
19,35,47,75,87,91,94,100,105,115,117,120,126,132,486,811,846,889,918,953,965,996,999,1010
1,3,5,6,7,8,9,11,12,14,16,18,21,23,24,26,31,32,33,35,36,37,39,41
8,20,22,42,44,55,57,60,83,93,97,108,116,119,121,127,133,134,176,190,346,541,581,743,794,839,893,938,973,986,1008
4,5,6,7,9,11,15,16,20,21,47,48,60,78,82,84,86,132,133,135,136,137,163,165,181,182,185,189,201,202,220
20,26,32,52,58,77,84,86,97,99,108,109,110,115,116,121,122,125,126,128,129,131,164,303,387,413,417,439,514,592,594,666,712,752,763,764,772,854,875,880,881,883,929,930,954,963,973,1009
1,2,3,4,8,30,31,32,34,35,37,39,65,66,96,103,106,107,129,130,133,136,137,138,139,140,141,142,146,147,154,168,171,172,173,174,176,180,181,182,183,184,188,189,192,194,202,203
19,35,42,43,75,76,82,86,88,90,98,102,106,107,111,117,125,129,132,244,350,353,468,584,704,796,810,811,818,825,871,907,928,948,981,982,997,999,1004,1005,1007
1,7,10,11,13,15,17,19,20,21,27,33,35,36,50,55,56,59,62,63,64,66,67,68,69,70,72,73,74,75,76,77,79,80,83,97,103,109,111,114,115
18,25,27,29,43,50,54,65,67,73,89,92,93,94,95,98,102,108,112,113,125,127,129,130,131,133,134,235,252,309,338,498,538,561,662,665,753,770,786,813,820,823,858,863,876,883,885,890,895,906,913,921,931,940,950,954,962,967,968,974,978,990,991,995,996,997,1002,1005,1012
2,3,6,7,19,21,22,23,25,26,29,31,32,35,42,61,62,63,70,75,106,107,125,132,148,149,151,152,153,154,155,156,157,158,159,160,161,162,163,164,165,168,171,172,173,174,182,184,185,186,187,189,191,192,194,196,197,199,201,202,213,217,225,227,230,238,239,251,258
34,46,47,75,86,88,106,115,126,132,646,846,873,889,923,928,953,987,1004
3,11,12,14,19,20,27,41,44,48,52,53,56,57,61,63,65,66,73
13,33,49,60,62,64,67,68,72,73,77,84,89,94,97,99,102,104,110,112,115,116,122,126,128,129,130,131,133,134,218,326,421,450,451,459,538,568,624,689,699,712,773,782,823,842,854,875,881,913,925,930,942,950,953,956,962,963,990,996,1002,1009,1012
2,22,25,27,28,30,31,66,68,69,70,71,73,75,76,80,85,86,101,102,104,119,122,128,132,133,136,140,145,147,158,159,179,181,182,183,184,186,187,189,191,195,196,197,199,201,202,203,204,205,206,207,208,210,211,214,217,218,219,221,222,223,226
5,19,20,24,26,67,87,94,104,110,116,122,126,128,133,134,163,248,318,319,425,453,515,522,592,673,785,854,875,881,913,918,942,950,976,996
1,2,8,47,48,49,58,60,61,86,116,129,147,151,153,154,156,157,158,159,160,161,163,165,166,197,198,202,203,206,207,216,226,227,228,229
97,98,108,121,125,129,131,133,498,826,841,997,1005
1,4,5,9,11,12,38,61,63,64,66,69,70
11,18,63,78,95,98,99,104,125,127,129,131,132,133,571,574,582,676,736,737,920,944,968,974,991,997,1005,1009
2,5,8,10,17,19,22,23,29,35,41,54,56,77,79,81,84,85,86,87,88,91,94,98,100,102,108,111
17,26,34,39,48,50,57,82,87,106,111,115,117,128,132,170,203,228,310,337,351,423,584,598,697,735,745,776,793,796,811,812,843,845,846,870,889,890,923,957,959,981,982,984,988,992
1,5,18,20,31,56,62,64,65,67,88,91,101,106,177,181,182,184,185,186,188,194,205,206,210,212,221,222,224,226,227,230,236,238,240,245,246,257,264,278,284,286,296,316,318,319
44,58,78,93,101,102,115,117,119,123,124,127,132,244,298,499,587,911,933,949,959,970,977,1011
8,11,14,15,24,37,39,50,53,56,64,69,131,133,136,138,141,142,148,150,162,173,176,179
3,5,34,35,53,75,76,87,102,105,106,114,117,130,132,297,353,504,572,598,644,759,766,796,831,871,873,889,914,923,928,941,948,992,999,1004,1012
3,4,6,7,8,10,12,13,14,16,41,42,43,44,50,53,66,67,68,71,72,90,91,92,93,97,99,100,102,104,106,107,108,120,121,131,132
6,32,36,41,49,60,62,64,84,89,94,99,104,108,112,115,122,126,128,130,131,133,134,308,332,397,425,455,515,568,603,630,738,753,789,876,925,942,950,956,976,990,996,1009,1012
2,3,4,7,9,11,12,13,14,15,18,34,38,39,44,47,63,78,84,95,96,97,101,102,103,104,110,111,112,114,116,120,121,122,123,124,127,132,136,138,139,143,145,159,170
9,14,25,45,47,55,62,64,65,68,69,78,80,90,92,94,101,102,105,107,112,117,119,124,125,127,128,130,131,132,133,192,328,499,544,576,587,615,649,668,734,743,744,761,770,787,797,803,806,818,819,874,879,888,904,906,908,921,924,977,994,995,996
1,2,3,4,5,8,10,12,18,19,20,22,23,29,30,33,40,51,57,61,62,65,67,71,73,142,143,144,145,147,148,149,150,152,153,155,157,158,159,160,161,162,163,164,169,172,173,187,188,189,190,191,193,199,200,201,202,203,207,209,213,214,217
14,46,59,75,84,91,115,120,160,289,669,897,928,965
1,2,3,33,34,43,50,73,82,83,85,92,102,103
31,34,35,41,46,57,62,66,74,75,83,84,91,95,103,106,113,115,120,126,129,131,132,133,317,608,640,738,807,848,872,873,897,923,928,953,955,980,987,1004
2,3,4,50,55,57,58,59,71,72,76,80,84,91,92,95,96,111,115,130,132,133,134,135,136,138,139,168,171,172,176,177,181,182,183,189,201,202,207,210
65,76,85,87,88,100,312,362,645,749,770,778,783,806,899,900,918,941,960,1001,1010
4,6,10,12,21,30,31,32,33,34,35,36,37,38,40,43,44,45,46,50,59
35,65,91,96,105,107,112,114,117,128,132,221,515,811,848,889,895,964,989,999,1006
3,4,6,12,14,15,16,150,151,158,160,161,166,167,169,171,172,173,218,221,308
0,13,18,27,36,43,44,52,60,67,73,84,89,95,98,99,102,104,107,113,124,125,127,129,131,133,287,429,435,476,518,532,695,736,741,852,864,876,913,920,925,931,940,949,962,968,976,997,1002,1003,1005,1009
1,13,17,22,29,30,33,40,41,43,48,50,59,62,64,65,72,79,80,85,87,90,94,108,118,121,123,126,128,129,132,134,135,138,139,140,141,148,150,151,153,154,159,162,167,171,173,174,175,176,189,190
13,57,60,64,69,71,92,104,105,108,110,116,131,133,401,511,564,608,722,732,854,874,901,917,930,973,976,993
1,4,5,6,7,8,10,11,12,13,33,37,38,39,40,41,42,44,46,48,49,50,53,54,60,61,62,63
13,57,60,62,64,84,91,93,94,96,99,108,110,112,116,121,122,126,130,131,133,568,608,753,942,956,990,996,1009,1012
1,2,8,9,13,14,15,16,20,24,37,38,39,47,48,50,87,96,104,107,112,113,114,116,119,120,126,128,140,148
22,31,37,42,49,56,57,59,62,74,91,96,102,106,107,114,116,117,122,126,128,132,133,253,266,291,339,390,523,708,768,782,796,825,851,866,923,942,945,964,1004,1006
1,4,16,19,26,27,31,32,40,49,50,65,71,109,110,119,120,121,123,129,130,134,135,136,137,138,146,147,153,157,158,164,165,166,172,178,182,183,186,187,222,231
5,11,18,25,27,29,43,44,52,54,67,70,71,73,78,86,93,97,98,99,102,108,117,119,121,125,127,129,131,133,241,248,338,384,574,599,614,670,687,739,786,789,792,797,813,825,827,841,864,885,891,913,917,919,931,933,936,952,962,963,968,974,977,978,991,993,997,1005,1008,1009
2,7,10,19,20,23,24,25,27,28,34,35,36,37,39,41,42,44,50,54,59,60,61,103,108,111,112,124,137,138,139,140,142,143,144,145,149,150,152,153,154,155,156,157,158,159,160,161,162,163,164,169,170,171,172,173,175,176,177,178,181,182,184,186,187,188,192,197,237,241
33,45,54,55,56,68,70,71,93,98,102,108,119,121,131,133,134,194,391,511,661,681,694,699,743,798,805,850,852,856,896,927,950,952,962,978,993,997
1,10,11,13,14,18,19,23,24,29,30,33,39,47,48,64,116,117,118,120,121,122,127,129,131,134,140,144,150,151,152,153,154,155,156,159,160,161
4,30,31,35,46,47,57,65,77,80,82,83,87,88,94,96,100,106,109,110,112,114,115,116,117,126,128,130,134,212,269,312,336,367,431,509,608,613,627,646,693,778,811,846,867,889,895,900,918,950,986,987,990,996,999,1004,1007,1010,1012
3,5,6,8,10,16,18,22,23,24,28,31,32,36,38,45,47,48,49,50,57,58,59,62,65,66,67,70,71,73,75,76,77,78,81,82,84,85,86,87,88,91,94,100,105,109,111,114,115,116,119,120,123,125,127,128,132,134,137
11,12,13,19,21,36,42,43,52,56,60,71,73,78,80,81,95,96,98,99,104,117,122,124,125,128,129,130,131,133,215,328,463,518,574,658,721,762,788,789,800,810,852,860,867,917,978,991,1005,1009,1012
1,3,4,5,10,11,26,28,29,32,35,37,38,42,43,44,46,47,48,52,53,74,75,96,98,99,103,107,112,118,120,121,122,123,127,129,132,133,134,138,159,162,163,164,165,166,167,169,173,177,181
28,34,41,46,54,65,69,74,75,86,90,105,106,110,112,114,115,123,126,132,168,486,548,613,646,778,782,871,873,923,928,945,953,955,987,989,1004,1011
1,2,3,17,18,19,20,22,24,36,48,49,69,70,71,72,79,80,92,95,96,97,98,99,104,105,108,109,110,112,113,114,119,121,124,125,144,145
13,54,58,60,61,64,70,71,94,99,102,104,108,110,116,125,131,133,134,140,219,442,511,568,613,819,850,901,917,952,956,961,962,976,985,993,996,1009
3,4,5,10,11,13,20,28,32,37,38,41,46,47,48,49,50,54,55,56,57,58,59,60,61,62,66,70,74,79,82,84,85,87,88,92,94,96
4,24,31,35,46,62,74,80,82,86,87,88,94,96,106,107,110,112,114,115,116,126,132,268,431,509,613,647,762,867,875,887,900,923,937,953,955,975,987,989,996,999,1004,1007
2,3,6,7,8,10,11,14,18,19,21,23,24,30,34,35,36,37,39,79,82,91,93,94,96,97,98,99,100,103,104,105,106,107,108,112,113,114,116,117,118,119,123,127
1,14,27,54,56,60,61,67,69,70,72,78,80,87,90,91,93,94,96,98,99,103,112,113,123,125,130,131,133,134,192,219,336,483,582,592,607,662,679,795,805,848,852,874,887,906,913,919,940,941,950,952,956,967,978,985,990,991,996,997,1009,1012
1,2,13,14,16,17,18,19,20,21,23,31,33,35,37,39,40,43,47,52,54,58,64,75,78,84,87,94,104,106,108,109,110,111,122,123,124,126,128,131,132,133,135,136,137,140,141,149,160,162,164,165,166,167,168,169,172,173,174,178,179,182
10,17,19,22,30,47,56,57,66,69,75,80,82,87,90,96,100,103,105,107,110,111,116,117,132,134,153,174,212,217,253,262,310,311,349,350,422,427,508,520,531,564,571,616,627,631,647,668,690,691,702,705,731,747,793,811,818,836,837,867,871,874,879,887,888,889,898,908,918,950,971,973,984,994,1007,1010
3,4,7,8,9,11,13,14,15,18,19,20,23,27,36,37,38,39,55,57,58,62,65,69,74,75,76,78,79,82,83,85,88,89,90,91,94,96,97,102,103,105,106,108,109,112,114,116,117,118,119,120,122,123,124,126,128,129,130,131,132,133,134,135,137,138,139,140,141,142,143,146,147,148,151,152
0,7,18,69,78,79,86,95,98,116,124,125,129,131,133,134,424,616,736,919,946,968,972,973,978,997,1005
2,5,7,8,9,12,13,16,31,35,36,40,41,42,44,188,190,191,193,194,197,199,202,203,208,218,219
28,35,51,57,58,65,69,77,80,82,85,87,88,90,91,100,105,107,115,116,258,292,312,362,470,569,608,616,702,772,778,783,802,806,818,828,871,878,899,900,941,975,994,999,1001,1007,1010
1,2,3,4,6,10,11,12,13,15,25,27,39,43,45,63,64,67,68,71,72,74,75,76,77,80,81,82,83,84,85,86,87,89,92,93,94,95,96,101,102,103,104,105,115,117,134
3,4,14,19,30,35,54,56,57,58,62,64,65,70,77,80,82,85,87,88,90,91,94,96,98,100,112,116,120,128,131,269,362,384,425,431,569,608,610,635,637,649,763,772,778,783,818,828,843,849,867,900,904,914,952,960,978,980,990,997,999,1001,1007,1010
2,4,8,10,11,12,13,15,17,24,25,26,27,29,39,40,42,59,62,73,74,76,77,79,82,125,126,127,128,131,133,134,135,136,138,140,141,142,143,144,145,147,153,157,158,159,160,164,165,167,169,172,173,175,177,178,180,181,182,183,184,197,199,242
14,30,35,40,56,59,64,69,79,87,94,105,110,111,115,117,128,130,132,134,203,215,262,310,356,482,564,613,627,692,702,705,793,811,847,871,874,941,950,951,959,996,999,1012
1,5,6,10,18,19,20,21,24,32,33,37,38,39,40,41,42,43,51,52,53,54,55,56,57,58,59,60,61,62,63,65,66,67,69,71,72,74,75,76,77,78,79,80
26,34,46,47,94,112,115,117,126,132,134,433,646,776,811,846,873,889,923,950,953,987,990,996
14,17,19,21,22,23,43,45,73,78,80,84,86,100,102,104,107,108,113,114,117,126,127,128
5,9,11,12,29,37,44,51,60,61,67,69,73,80,81,93,99,101,102,105,113,129,131,162,244,285,327,430,436,474,538,561,601,616,648,698,786,827,853,866,904,913,933,936,949,967,979,1009
1,2,21,25,26,27,30,31,32,46,56,57,60,61,63,64,65,67,68,69,70,71,72,73,74,75,76,78,79,82,83,85,86,87,88,89,90,91,92,93,94,98,100,117,118,119,133,134
18,45,54,55,61,70,71,78,92,102,104,108,119,124,129,131,133,134,249,287,511,592,671,694,743,798,850,896,919,954,962,968,985,993,1005
1,2,19,21,22,23,31,32,33,34,36,37,38,39,41,42,43,164,165,167,168,169,172,173,175,176,184,201,202,203,204,205,206,207,208
18,43,44,63,74,84,86,91,93,97,98,101,113,119,121,124,125,127,129,131,133,161,194,195,325,573,639,730,771,841,905,921,944,949,968,978,997,1005,1008
2,3,10,20,21,22,23,24,25,26,31,51,52,53,58,65,67,68,71,77,80,81,82,83,84,86,87,88,89,91,93,94,101,108,110,112,115,118,119
5,25,47,63,64,67,78,93,99,102,111,113,117,121,127,131,132,133,203,206,469,560,614,687,723,793,825,841,863,898,905,908,913,919,944,974,1009
1,4,7,11,12,13,21,22,23,27,29,30,33,37,38,52,55,72,73,74,75,76,77,79,81,82,86,89,90,91,92,93,94,102,105,106,107
19,34,35,46,57,58,62,69,74,75,76,86,88,100,106,110,112,115,122,126,132,510,569,613,616,646,782,843,871,873,900,923,928,948,955,987,990,992,999,1004,1010
1,8,15,24,27,28,29,30,33,34,35,40,41,43,63,67,68,76,78,89,92,93,94,98,99,103,104,105,106,113,114,116,117,118,121,122,123,124,131,150,152
13,44,45,52,60,61,64,68,74,93,94,99,102,104,105,108,113,119,120,125,127,130,131,133,218,261,398,403,451,490,506,528,582,595,676,739,751,817,819,883,891,921,949,955,967,979,985,993,996,1008,1009,1012
2,3,6,7,8,10,11,33,37,39,40,41,42,45,46,58,59,61,63,68,70,76,78,79,80,81,83,84,92,95,96,97,109,110,111,112,113,120,121,125,126,127,128,132,133,134,135,136,137,138,139,145
14,17,19,31,35,41,47,62,65,70,75,80,82,85,87,88,93,100,106,107,111,112,114,115,116,122,125,126,130,258,262,312,313,316,460,510,580,610,649,749,770,778,793,806,819,828,887,895,899,900,904,907,908,918,945,960,989,992,994,999,1001,1004,1006,1007,1010,1012
2,5,8,9,18,19,20,22,29,30,31,35,37,47,53,62,63,83,90,97,98,100,109,111,112,113,114,115,116,117,119,120,122,123,126,127,129,130,131,133,135,137,138,139,140,142,144,146,150,151,152,155,156,159,161,162,169,171,175,184,190,192,194,196,216,217
21,50,55,77,90,93,97,99,104,105,125,382,481,593,599,721,743,818,819,888,920,976,1009
2,3,4,5,6,8,9,30,34,35,37,38,39,40,41,43,44,45,46,47,48,51,72
38,73,83,102,104,108,109,113,122,323,817,840,872,920,962
1,3,4,6,7,9,17,18,19,20,21,23,24,25,26
42,63,93,108,119,124,129,133,154,382,458,642,799,944,1000,1008
1,4,5,6,52,53,82,86,87,88,89,94,121,124,125,171
1,11,18,44,60,64,73,76,84,94,97,99,108,111,117,119,125,127,131,132,133,134,290,337,378,423,441,496,592,680,774,789,803,817,826,838,870,885,923,933,936,950,959,968,974,981,982,996,1008,1009
3,10,11,12,13,14,17,19,20,21,22,25,29,45,98,123,124,129,131,147,153,154,157,158,159,161,162,163,164,165,166,167,169,170,173,174,178,179,181,182,184,185,187,188,189,198,202,203,227,230
58,76,77,117,132,763,772,838,959,970,981
3,18,20,37,43,44,45,60,62,66,79
14,15,56,69,71,82,85,90,100,103,105,109,111,114,117,156,356,367,547,569,668,702,793,811,846,849,871,874,889,1001,1006,1007,1010
1,2,3,4,5,6,9,10,24,25,27,28,33,34,35,36,37,38,39,40,41,43,48,49,51,52,55,56,57,59,60,61,75
6,13,18,20,32,33,43,45,60,62,68,72,86,98,99,102,112,116,121,122,126,128,130,131,133,134,139,421,425,450,451,592,637,677,699,805,852,877,906,942,950,968,978,990,997,1009,1012
10,12,13,14,15,16,17,18,20,21,31,34,36,39,44,45,48,50,51,77,106,107,109,112,126,127,128,129,130,131,132,133,134,137,142,144,147,148,150,162,163,164,165,166,168,173,175
19,20,34,39,46,47,57,64,65,66,69,82,88,96,100,102,105,106,107,110,112,115,116,117,122,126,132,163,275,598,608,613,646,733,778,811,825,845,846,859,867,869,871,873,889,899,900,923,964,975,981,983,990,994,1007,1010
1,3,7,69,71,72,73,74,75,77,78,80,86,87,88,90,101,102,113,114,116,117,120,127,129,131,137,139,142,143,144,145,146,148,149,151,152,199,200,201,202,217,218,220,221,222,223,224,227,228,233,234,235,242,244,245
35,47,48,54,62,64,66,69,70,71,80,82,88,90,91,94,105,106,110,115,117,120,126,128,132,136,150,269,310,425,511,544,560,564,613,616,631,691,762,811,848,850,871,889,896,900,901,961,996,999,1004
2,6,7,8,9,10,12,13,14,15,18,19,20,21,25,26,28,29,30,34,35,36,37,41,42,43,44,45,46,47,48,49,50,51,52,53,54,56,57,58,59,60,61,62,63,64,65,66,67,69,70
18,55,73,84,95,99,101,129,131,133,151,410,441,519,792,968,1000,1005,1009
1,2,5,6,7,9,13,29,40,79,82,83,85,88,98,99,112,113,115
1,13,20,70,80,91,96,108,109,134,511,527,756,867,887,904,952,993
1,2,3,4,7,8,9,11,24,25,26,39,40,41,42,43,44,45
80,95,110,111,115,116,131,132,133,164,423,594,712,764,854,870,897,959,973,982
2,3,5,42,46,64,67,108,110,112,113,114,120,121,124,125,129,145,149,158
19,46,47,62,65,74,77,80,82,85,87,88,91,94,96,100,106,109,110,112,114,117,126,132,171,367,610,613,646,693,749,756,778,846,848,849,867,889,900,923,945,981,996,1004,1006,1007,1010
2,3,6,7,9,10,11,12,13,16,17,21,22,25,29,32,36,37,38,41,42,51,52,54,55,56,57,58,59,60,61,62,63,66,67,68,71,72,75,76,77,86,89,92,93,94,97
20,110,116,126,163,875,930
5,98,108,215,220,226,237
91,94,96,106,109,114,120,367,848,937,969,980,996,1004,1006
12,13,15,16,17,55,61,62,64,66,67,73,74,75,112
15,24,31,34,41,62,74,84,91,95,96,103,105,106,107,111,115,117,120,126,132,133,141,357,419,573,731,738,848,873,897,923,955,965,970,975,980,982,987,1004
2,7,9,11,56,82,96,100,103,104,114,116,118,119,120,122,129,130,133,140,145,149,150,155,157,160,161,176,177,179,186,190,204,205,206,207,208,210,212,213
6,18,24,27,43,45,49,60,62,68,83,89,93,94,95,99,102,104,110,113,125,129,130,131,134,308,459,592,681,786,820,842,876,925,931,940,950,956,962,967,968,986,991,996,1005,1009,1012
2,3,11,21,24,25,32,33,38,43,46,48,49,50,51,56,59,60,64,91,94,96,197,199,200,201,202,203,204,205,206,213,214,215,216,225,226,227,229,238,239,242,244,245,247,251,352
13,26,49,60,64,67,68,72,73,86,87,89,94,99,102,103,104,108,110,112,116,122,126,128,130,133,134,140,315,328,332,435,441,459,568,592,635,679,712,715,753,780,876,881,883,895,906,913,916,925,930,950,956,962,976,990,996,1009,1012
1,2,4,6,9,10,12,13,14,17,19,20,25,37,42,52,61,62,73,83,95,118,120,123,141,143,146,147,151,152,153,154,155,157,158,161,163,166,173,176,178,180,181,182,183,185,187,188,190,194,198,201,207,208,212,216,220,226,244
19,24,30,31,35,37,42,47,48,56,57,62,66,69,75,80,82,85,87,90,102,103,106,107,114,115,116,117,122,126,132,136,148,269,357,370,419,422,509,510,523,564,608,631,637,673,686,712,735,780,800,810,811,825,846,851,866,871,874,887,889,898,908,923,928,975,999,1001,1004,1006,1007
1,2,3,5,12,28,33,36,44,45,46,97,98,99,100,101,102,107,110,111,116,121,125,126,129,130,139,145,146,147,152,154,155,158,165,166,170,171,173,174,195,197,198,199,208,209,210,214,215,216,217,218,222,223,226,241,242,245,246,247,248,249,250,252,253,254,260,265,268,271,272
13,15,42,49,54,57,58,59,60,64,70,71,77,80,89,93,94,102,104,105,108,109,110,116,120,122,134,140,156,303,401,435,439,466,514,547,560,568,666,695,725,748,756,762,763,764,772,785,798,840,842,843,850,896,901,903,904,917,925,950,952,962,976,983,993
2,11,12,18,19,20,41,42,44,45,46,49,150,154,155,156,157,158,167,168,170,179,190,191,192,197,198,199,204,211,212,213,214,218,243,244,246,247,281,282,289,290,291,293,295,296,303,312,315,316,317,318,319,320,321,325,326,327,330,331,332,333,334,338,340
5,11,12,18,51,60,74,80,86,88,91,95,96,99,102,112,124,129,130,131,133,193,247,248,328,402,496,553,599,634,684,753,789,825,853,887,900,906,956,968,990,1005,1009,1012
3,4,5,7,9,11,18,19,20,22,28,29,30,39,43,52,92,93,95,99,106,108,109,110,111,115,116,117,118,119,120,124,129,130,131,132,133,134,138,140,144,145,149,151
6,14,17,19,24,35,40,51,56,59,66,72,74,75,82,87,88,90,91,92,94,96,105,106,115,120,122,128,160,180,197,200,269,356,358,381,388,483,486,510,528,663,689,702,704,705,807,828,859,918,922,941,951,955,965,980,996,999,1007
1,3,4,6,7,10,11,16,18,20,21,24,27,33,92,98,104,105,109,110,111,112,115,116,117,137,138,139,140,141,142,144,145,146,147,148,149,150,155,156,157,158,159,161,162,163,165,169,170,172,173,174,176,177,178,191,192,195,253
46,56,62,80,82,87,91,96,105,106,114,115,116,126,132,780,848,937,953,973,987,1004,1006,1007
1,2,3,9,11,12,40,85,87,88,90,93,95,102,103,104,110,126,127,128,130,131,133,135
0,14,31,42,44,46,49,54,56,57,59,60,64,71,72,73,74,80,86,87,91,93,96,99,112,113,117,124,130,131,132,146,149,247,272,289,336,360,376,402,448,461,466,503,592,708,810,848,896,917,941,949,955,956,981,990,1009,1012
2,4,18,19,22,33,34,35,36,38,51,56,57,60,61,63,80,81,82,83,141,142,154,159,160,161,171,179,185,186,187,189,191,195,196,198,199,201,202,209,210,212,213,227,229,231,232,238,239,240,241,244,261,264,269,270,272,278
1,11,18,19,47,55,60,61,63,64,67,73,84,93,94,95,97,98,99,102,111,113,117,121,124,125,127,129,134,185,196,467,469,550,579,581,599,622,643,652,660,662,670,706,737,739,741,751,774,794,804,853,898,908,929,932,950,963,968,970,978,979,981,991,996,997,1000,1005,1009
2,3,4,5,11,12,17,18,19,21,22,23,28,29,30,34,76,81,86,88,98,102,104,115,119,123,131,143,144,153,155,156,158,159,169,189,190,191,192,193,194,195,196,199,203,205,206,207,209,222,226,228,231,232,234,236,237,254,255,256,258,259,260,261,262,265,271,274,279
13,31,54,64,70,71,77,80,86,89,91,92,94,102,104,105,108,110,116,122,124,131,134,219,249,287,401,444,511,560,568,594,623,712,715,756,762,798,850,876,887,896,901,904,917,925,942,950,952,961,962,976,993,1002
2,3,8,9,13,16,17,30,31,34,37,38,40,45,48,49,56,59,64,73,76,77,85,86,87,88,89,90,92,94,96,97,98,99,100,102,105,110,112,114,118,120,137,140,141,142,143,145,148,149,154,155,162,163
4,31,34,46,62,64,65,69,75,76,77,80,82,86,87,88,90,91,96,105,106,107,110,111,115,117,126,130,132,233,268,297,312,362,409,431,455,509,591,613,616,646,650,655,675,702,762,770,778,782,793,796,806,811,818,828,848,867,870,871,873,879,887,899,900,904,918,923,928,937,948,953,960,975,982,984,987,992,994,1004,1007,1012
4,6,8,10,12,13,18,20,23,26,27,34,38,39,40,54,55,60,74,77,90,92,93,95,109,113,124,125,136,137,138,139,141,142,143,146,147,149,151,152,154,155,158,159,160,161,162,163,164,165,166,168,169,170,171,174,175,178,179,180,182,183,184,187,193,196,197,201,204,209,212,213,214,215,216,217,219,220,221,233,237,238
11,19,60,61,64,73,80,84,94,97,99,112,113,122,125,134,196,260,676,753,762,765,895,906,921,936,950,963,979,985,990,996,1009
10,11,34,59,67,69,70,71,85,87,90,125,126,127,129,136,137,138,139,140,141,142,145,150,151,160,167,168,191,193,214,219,222
6,34,46,64,74,77,82,86,88,96,105,106,114,115,116,126,132,370,381,514,867,873,900,923,953,955,987,989,1004,1007
1,2,6,7,8,10,11,25,27,30,32,36,37,50,51,61,62,63,64,65,66,67,68,69,70,71,77,78,81,82
31,34,39,46,47,66,74,80,82,85,86,91,96,105,106,111,112,115,117,120,126,132,212,234,461,509,646,700,762,782,793,796,811,845,846,849,867,869,873,888,889,908,923,953,955,970,980,987,990,1004,1007
4,6,27,33,34,36,40,43,44,45,46,49,50,53,62,63,67,72,77,79,93,100,101,102,103,105,106,108,109,111,112,113,115,117,121,122,123,142,144,145,147,148,153,154,158,160,162,165,166,175,176
19,34,46,51,69,75,80,82,85,86,96,105,106,114,115,117,126,132,268,646,796,811,849,867,871,873,874,889,923,928,953,987,1004,1006,1007
1,5,14,15,16,22,23,24,25,27,28,31,48,63,76,79,114,122,123,127,129,130,131,132,133,137,138,139,144,150,154,164,181,196,197
13,55,72,88,89,91,102,104,108,112,125,129,130,676,819,855,876,899,906,925,962,990,993,1002,1012
2,3,4,5,65,66,73,79,82,84,86,87,226,227,228,229,235,236,237,243,250,251,254,308,447
31,35,58,65,77,100,111,128,763,772,778,982,988,999,1010
1,2,4,5,8,14,17,18,19,20,21,24,25,26,32
3,4,14,20,39,46,65,66,69,74,76,79,80,90,91,103,105,107,109,114,122,126,131,247,277,341,343,378,409,439,630,639,787,848,857,869,874,907,914,972,989,994
3,5,6,8,9,10,11,23,25,26,30,31,32,37,43,45,47,61,77,80,81,83,87,89,90,91,92,94,95,99,112,113,118,120,132,133,134,136,139,140,143,155
0,1,9,14,21,30,54,56,60,67,73,79,80,84,86,89,90,94,98,101,102,108,109,112,119,124,127,129,130,133,215,244,325,327,347,438,485,627,630,634,642,705,715,730,753,757,774,857,861,887,977,978,997,1002,1005,1008,1012
1,3,4,5,6,10,11,16,17,18,19,20,21,22,27,28,29,31,34,35,37,41,42,43,54,62,64,66,70,81,82,83,84,85,88,95,96,98,99,112,113,114,116,118,119,127,129,130,131,132,133,134,135,136,138,148,152
6,14,17,19,25,35,47,51,54,56,57,64,66,69,74,79,80,85,87,88,90,94,100,105,106,114,116,120,122,132,149,200,212,226,230,245,268,269,281,308,356,359,369,401,409,412,422,442,469,510,564,580,600,603,608,610,668,690,700,702,716,756,760,783,802,818,843,849,854,871,874,888,889,898,904,908,918,960,969,980,999,1001,1006,1010
1,3,7,9,10,13,25,37,38,40,50,53,58,61,63,64,71,87,92,93,103,104,110,121,122,126,132,134,136,137,138,139,143,144,152,153,154,156,157,158,159,160,163,164,168,169,170,171,175,177,178,179,181,183,184,185,187,190,194,197,199,200,202,203,205,208,210,212,214,218,221,222,225,228,229,230,232,239,242,243,246,256,257,263
22,59,85,88,100,105,115,542,610,960,1001,1010
2,3,15,16,130,131,133,134,141,142,147,261
18,29,43,78,95,98,102,125,129,131,133,582,736,786,825,852,919,921,931,968,978,991,997,1005
2,3,4,25,27,41,69,81,86,116,124,128,130,131,132,134,154,155,156,158,160,165,177,182
6,20,26,33,41,42,49,64,65,68,80,94,104,109,110,115,116,122,126,128,131,133,134,228,254,268,296,332,433,439,451,568,613,635,636,651,689,792,805,852,881,925,930,942,950,953,987
1,2,9,10,11,12,13,14,15,17,18,20,21,22,24,38,39,70,115,136,137,145,147,154,155,156,160,161,162,163,165,167,168,169,172,176,178,181,188,189,192,193,194,210,212,213,214
63,944
1,2
6,13,15,18,29,45,49,60,62,68,78,84,86,89,94,95,98,99,101,102,108,110,112,115,116,119,121,126,127,129,130,131,133,134,156,176,435,450,451,459,543,592,595,626,681,715,805,817,852,877,883,906,930,950,962,968,978,996,997,1000,1002,1005,1008,1009,1012
1,4,5,8,10,42,43,45,46,88,90,91,96,98,100,105,111,112,113,117,119,121,122,123,124,128,129,130,132,134,180,186,193,195,196,198,200,204,209,210,211,214,215,216,221,222,224,225,228,232,233,234,236,237,240,243,244,245,250,251,252,253,257,258,304
4,14,35,49,51,56,57,58,64,71,77,82,85,91,100,112,114,115,116,126,258,305,431,606,608,610,708,763,772,802,960,990,999,1001,1006,1007,1010
1,2,5,7,9,10,13,52,53,55,72,75,91,93,105,106,107,108,109,110,111,113,114,116,117,118,120,130,143,145,146,147,150,164,165,168,180
11,22,31,60,61,64,67,73,80,91,93,94,97,112,113,116,119,120,122,129,134,194,383,444,472,561,586,662,693,750,753,756,760,762,863,887,895,906,913,936,950,967,979,980,985,990,996
18,20,21,33,69,84,87,98,104,106,107,125,128,173,177,179,180,181,182,183,204,205,207,208,209,210,211,212,213,214,215,216,217,218,219,222,226,242,243,260,281,283,316,317,320,337,342
19,35,47,64,94,105,106,115,134,136,350,598,846,871,889,898,950,960,996,999
2,4,5,7,9,10,11,14,17,18,19,20,21,22,23,24,25,26,28,30
4,6,14,17,19,22,30,31,35,40,47,51,54,57,64,69,70,74,80,82,85,90,100,105,111,112,120,126,132,134,203,230,245,268,269,308,311,350,369,401,405,431,455,469,569,603,608,609,623,642,649,655,663,668,693,702,704,742,756,779,793,802,818,837,847,849,870,871,874,887,889,896,898,904,950,961,965,982,984,999,1001,1007,1010
1,2,3,5,6,7,8,9,18,19,22,35,37,38,42,44,45,46,52,53,121,126,131,137,144,150,151,152,161,165,167,170,171,172,176,177,178,179,180,181,182,183,184,185,188,189,190,192,193,194,195,196,203,204,207,209,210,211,213,214,216,217,221,222,223,225,229,235,236,237,239,240,241,243,247,248,249,250,252,261,325,326,331
5,12,13,26,27,36,46,49,50,52,55,60,61,64,66,67,68,73,75,83,86,87,89,90,93,94,104,108,112,115,116,122,124,125,129,130,131,134,144,188,273,295,325,345,406,435,451,474,486,512,594,595,601,615,630,631,634,676,695,698,712,715,753,755,761,764,769,780,817,819,823,827,842,853,864,876,883,891,895,897,906,913,920,941,950,954,979,985,986,990,991,993,996,1012
1,14,18,24,25,26,29,30,31,36,38,41,44,45,46,52,57,60,61,63,65,70,72,74,75,78,79,87,93,95,99,102,109,114,116,120,122,125,126,127,128,129,130,131,133,134,137,138,139,140,141,142,143,147,148,149,150,151,152,157,159,161,162,164,166,167,180,183,185,186,187,188,189,190,191,192,196,197,198,200,202,204,205,206,208,210,212,213,215,217,218,219,221,225
13,38,43,44,45,61,64,66,67,84,88,94,99,102,108,112,124,125,130,131,133,285,601,691,696,723,754,783,833,906,933,949,979,990,993,996,1009,1012
1,2,9,13,15,16,18,19,20,22,23,24,25,26,28,31,32,33,36,70,96,97,98,99,106,128,130,131,132,134,137,138,139,140,142,143,144,147
13,50,55,60,64,66,67,69,80,86,89,90,94,97,102,104,108,111,112,117,124,125,127,130,133,177,517,674,691,746,751,779,818,861,874,876,877,887,904,912,920,962,974,981,990,991,993,996,1003,1012
1,4,5,6,8,27,29,31,34,35,36,37,38,40,41,43,47,48,50,147,152,155,158,160,164,167,169,170,171,175,176,177,178,180,182,183,184,185,187,188,189,190,193,206,208,209,212,213,218,220
13,60,66,71,84,91,120,122,131,133,510,850,901
1,3,4,6,7,8,9,10,19,34,35,37,38
84,85,99,104,121,127,131,133,386,789,920,929,1001,1009
1,2,5,6,7,8,9,11,12,13,14,15,16,19
6,34,41,46,66,75,76,94,106,111,114,115,117,126,132,288,370,378,646,746,782,796,871,873,923,928,953,982,987,996,1004,1006
2,4,6,16,25,28,30,32,45,46,59,69,70,99,108,109,110,111,115,120,121,122,123,125,132,135,139,140,154,156,168,181
10,15,26,30,31,35,46,49,54,64,65,74,79,84,86,87,88,96,102,106,109,110,115,117,126,130,156,312,448,613,646,692,749,770,778,783,806,811,828,899,900,918,953,955,987,999,1004,1012
1,2,3,6,8,9,12,13,14,16,26,28,29,30,31,35,48,51,52,54,55,56,65,66,72,73,74,76,77,78,79,80,82,84,85,86,89,90,92,94,99,102,103,105,107,108,110,111
1,25,55,121,131,133,743,752,797
1,2,6,7,26,95,96,97,98
29,67,78,86,97,101,108,119,127,133,309,333,334,436,582,670,730,803,852,861,877,883,947,963,1008
17,21,22,23,134,135,137,139,140,141,151,152,153,154,160,161,163,164,165,168,169,171,248,282,284
31,63,73,76,84,86,89,93,97,102,108,119,121,124,127,129,131,132,264,290,444,739,765,813,817,820,883,954
1,2,3,4,5,6,7,8,10,14,21,22,23,24,27,33,35,36,37,38,39,43,44,46,48,49,54,58
6,22,32,49,51,58,62,68,77,85,90,100,110,115,118,466,524,542,690,699,763,849,922,1001,1010
1,4,5,7,8,11,14,15,20,26,29,47,48,50,51,53,54,57,60,61,63,64,65,70,88
13,52,60,64,80,88,89,91,94,97,102,108,112,113,116,125,129,130,206,640,662,749,750,753,756,762,864,876,887,906,912,962,990,993,996,1002,1005,1012
2,3,5,8,12,13,88,89,92,100,101,105,113,114,115,116,120,243,244,245,246,247,248,249,250,251,252,255,257,258,259,260,266,270,272,344,346,469
1,11,13,15,17,18,20,33,38,45,54,55,61,68,70,71,78,81,86,89,95,97,98,102,108,119,125,131,133,347,435,450,498,500,540,570,592,612,623,661,665,671,680,706,715,748,774,788,817,858,876,896,917,919,952,961,962,968,978,985,991,993
1,2,5,6,8,10,13,23,24,26,37,38,39,46,147,157,158,159,163,164,165,167,168,170,184,185,190,193,200,201,202,205,206,209,211,213,214,215,217,221,222,236,237,238,241,242,243,244,245,246,247,256,266,267,281,368,370,372,373,374,375,378
0,34,40,46,63,91,93,96,105,106,107,115,117,126,128,132,646,650,796,829,848,873,923,953,964,970,987,992,1004
1,2,3,16,17,24,25,28,29,32,33,40,43,50,51,59,61,62,64,65,67,68,73,75,76,77,80,81,83
5,11,21,36,37,38,43,44,54,60,67,70,71,73,84,93,98,99,101,102,108,124,125,129,131,133,248,316,321,441,474,496,547,562,579,582,606,612,633,670,789,794,816,827,853,861,866,917,933,952,954,956,961,978,997,1009
3,5,6,7,14,15,16,19,20,21,29,36,39,40,41,43,48,52,56,57,61,77,80,81,106,111,112,113,114,115,116,117,118,119,144,151,152,153,154,157,159,160,163,165,166,171,172,173,176,180,181,182,184,187,189,191
25,44,95,98,108,127,129,131,133,560,653,736,805,852,933,974,978,1000
1,3,5,7,10,12,20,31,39,40,42,44,46,49,51,52,54,62
1,3,32,35,51,55,60,61,63,66,67,69,72,73,75,80,82,87,90,91,93,94,96,97,98,99,107,112,119,121,125,127,129,175,196,231,264,268,382,422,506,524,561,569,589,595,631,642,691,752,755,761,803,813,818,841,867,895,913,914,921,927,944,947,963,979,985,994,996,997,999,1005,1007,1009
1,6,7,12,14,15,19,47,48,50,54,56,61,89,92,93,134,135,138,141,145,148,150,156,161,162,163,164,166,173,176,181,183,186,187,189,190,191,192,193,195,196,199,201,203,204,205,206,208,209,211,212,214,215,217,218,220,221,222,227,228,230,231,232,235,238,263,264,267,268,273,274,315,316
2,6,20,36,45,62,67,94,110,115,116,122,126,128,130,150,163,164,183,308,425,440,476,515,568,594,657,712,744,764,768,782,854,860,875,880,881,930,942,973,983,996,1012
1,2,40,41,96,98,99,100,125,128,161,197,225,232,233,235,240,242,254,255,256,257,258,260,261,263,264,270,325,327,329,330,333,337,339,342,347,354,358,360,361,362,363
13,57,60,67,71,94,96,109,110,116,122,134,136,510,561,564,613,798,850,901,913,917,942,950,996
1,2,6,9,14,17,18,20,23,24,27,29,30,31,33,34,36,37,41,44,45,46,47,49,51
18,78,95,125,129,131,133,919,968,1000,1005
2,18,36,37,42,55,71,87,89,90,93
1,5,9,11,12,19,21,36,38,51,52,55,67,69,80,84,91,93,97,99,101,102,105,108,111,113,116,119,120,121,125,127,129,132,193,242,248,270,382,477,518,526,538,612,616,634,640,641,662,667,676,704,719,721,743,750,752,756,761,762,779,802,807,819,855,864,887,913,932,936,954,956,966,967,974,980,991,993,1009
1,9,10,11,12,16,17,18,26,27,31,37,39,40,48,49,53,59,61,90,91,92,93,94,95,107,108,113,121,122,125,126,136,138,140,141,147,148,149,150,153,155,156,157,158,160,161,162,167,168,169,171,173,174,175,177,178,179,181,183,184,185,187,188,190,191,195,196,197,198,200,210,211,212,213,218,219,220,237
19,21,29,36,55,63,99,104,105,113,125,196,207,321,397,436,531,567,662,676,718,743,859,920,956,976,991,1009
1,3,4,6,8,10,53,126,130,132,135,136,137,138,140,141,142,144,145,146,147,148,152,154,164,233,234,267
4,6,10,14,23,34,35,40,46,49,51,53,56,57,66,72,74,75,77,80,85,87,88,92,94,96,100,101,103,107,111,112,114,116,122,130,131,146,152,178,191,205,249,268,272,313,317,332,339,372,373,418,431,443,470,478,486,607,610,625,628,646,649,677,700,728,780,802,836,873,875,895,900,902,904,918,922,941,969,989,994,999,1001,1010
1,2,6,7,12,15,16,18,45,51,52,62,65,66,67,69,95,100,101,103,107,116,118,119,127,128,131,134,135,137,138,139,151,155,159,160,164,173,177,184,185,187,188,189,191,193,196,200,203,204,205,208,209,232,233,234,239,242,243,245,249,251,252,253,254,255,256,257,258,261,264,265,266,267,268,273,274,275,277,287,288,289,292,295
18,27,36,42,43,86,95,98,124,125,129,131,133,476,498,630,786,792,799,858,877,921,931,968,978,991,1005
9,10,13,14,15,16,18,24,25,32,46,49,50,53,54,56,58,70,77,78,79,80,81,90,96,99,106
11,36,52,70,71,93,99,101,108,131,133,518,547,593,791,883,961,1009
1,2,3,4,6,7,8,19,20,55,117,118,119,120,121,122,123,124
102,124,131,962
2,36,42,44
4,23,62,65,76,77,85,91,100,105,110,111,114,115,116,117,132,136,214,221,258,297,563,591,628,683,778,793,811,870,889,897,960,1001,1010
1,5,6,7,11,12,13,15,16,18,19,21,27,29,30,31,32,33,34,35,36,38,39,40,43,44,45,47,48,49,51,53,54,55,56
36,76,78,93,111,117,132,133,378,382,796,919,959,982
1,2,11,12,163,174,261,262,263,264,275,284,319,381
5,20,23,37,38,44,46,53,60,67,75,84,86,88,91,96,101,102,107,108,111,116,117,119,124,128,129,131,133,160,193,223,244,329,347,506,612,628,641,670,672,760,816,861,866,875,883,886,902,928,933,949,970,975,982,1005,1008
4,5,7,12,13,32,33,43,45,62,70,71,72,73,90,94,95,99,100,104,134,137,138,154,155,156,157,165,180,181,183,184,186,188,189,190,191,193,194,201,202,203,206,216,217,219,220,224,226,233,241,252,253,254,258,259,269
15,17,19,22,35,40,47,49,57,59,62,64,65,66,69,80,83,87,88,90,94,100,105,107,110,111,112,114,115,116,126,128,132,134,136,169,197,221,245,258,262,290,297,312,350,354,359,428,469,473,508,583,586,613,655,668,682,689,691,693,702,742,748,749,770,779,783,793,806,818,828,843,859,871,872,887,888,889,893,897,899,904,906,908,918,941,950,951,964,986,987,990,999,1010
1,3,6,7,11,16,29,30,31,32,33,43,47,51,52,56,105,126,132,137,140,144,157,160,164,168,178,179,190,192,202,203,209,210,211,212,216,218,219,220,222,226,231,232,234,250,251,256,257,264,265,266,274,277,278,281,283,284,285,288,290,291,292,293,294,297,298,299,300,302,303,304,307,308,310,313,315,317,323,334,335,336,338,341,348,351,352,353,356,371,381,385,389,393
58,64,77,91,105,114,160,314,731,763,772,1006
19,20,33,36,37,39,40,41,42,49,58,60
18,27,29,31,43,44,60,64,76,78,80,84,95,97,98,99,108,113,120,121,124,125,127,129,131,133,134,444,467,582,653,736,757,838,883,887,892,931,933,949,950,963,967,968,974,978,980,991,997,1005,1009
6,7,8,9,11,15,17,18,19,20,23,24,29,33,39,43,48,49,50,51,53,55,57,62,72,81,82,83,84,88,89,91,92,93,96,99,100,101,104,105,106,107,108,114,116,118,119,120,124,129,133
13,22,37,40,44,56,78,79,89,97,105,112,119,132,174,190,383,506,574,581,659,753,820,829,856,868,947,972
1,4,5,88,90,93,94,95,96,99,100,101,103,104,106,108,111,112,113,115,116,117,118,119,122,204,205,206
4,27,35,39,49,56,57,82,86,102,107,112,113,114,115,126,155,369,470,708,825,869,895,940,975,987,994,999
1,4,8,10,18,19,20,22,23,29,34,35,36,37,38,43,46,47,48,49,53,55,56,59,60,61,65,69
20,23,26,38,44,46,54,56,60,70,72,73,76,78,95,99,101,102,103,110,111,116,122,123,124,126,129,130,131,132,164,206,244,252,326,340,341,575,594,669,677,679,712,714,720,723,764,875,896,930,956,961,1009,1011,1012
6,11,12,13,17,19,20,21,23,24,53,54,58,60,65,68,72,75,84,133,134,165,170,171,177,181,183,185,186,192,193,194,195,196,209,210,214,216,218,219,223,226,230,231,234,235,239,246,247,251,253,254,255,256,258
31,41,47,62,70,71,76,79,82,87,88,90,91,94,96,105,107,115,117,130,132,134,310,509,564,631,692,811,850,867,899,900,907,948,950,952,975,996,1007
1,2,6,8,9,10,15,18,20,21,23,24,25,26,27,28,30,31,32,33,34,35,36,37,38,39,41,42,43,44,45,46,47,52,53,54,55,56,58
20,50,54,57,60,70,71,73,80,83,84,94,102,104,108,125,133,134,219,292,511,560,676,755,798,819,850,872,887,890,893,896,901,917,921,952,962,976,986,993
1,68,70,71,74,75,78,79,81,156,157,158,161,162,164,169,170,177,178,179,182,185,186,187,190,192,194,247,249,309,316,318,332,333,334,335,338,339,350,352
58,117,772,981
4,12,14,22
1,11,12,60,61,73,93,99,120,125,441,698,769,774,819,853,921,936,966,979,1009
1,41,43,46,63,99,100,102,106,108,109,110,111,112,113,115,116,153,157,174,176
6,9,15,30,31,52,56,57,60,64,67,69,84,91,94,95,98,102,105,108,110,112,113,115,116,119,120,124,126,128,129,130,131,156,278,308,327,355,402,444,455,482,503,518,582,601,603,613,615,616,630,702,712,813,843,847,848,863,874,875,883,897,930,954,960,973,977,980,988,990,996,1008,1012
1,2,3,6,8,9,30,31,36,38,39,41,42,53,56,60,62,64,65,66,88,89,92,100,111,113,114,118,119,121,142,152,153,154,159,160,161,163,164,165,166,170,171,172,174,175,177,178,179,180,181,182,184,195,196,197,198,199,200,201,202,206,210,220,221,222,223,224,226,227,230,231,241
13,15,29,49,54,57,61,70,71,77,89,102,104,108,109,113,116,122,134,309,323,466,514,560,568,662,708,760,820,840,842,850,854,896,901,903,917,950,952,962,976,983,985,993
2,3,4,24,27,40,41,42,50,51,52,59,63,64,75,76,80,83,84,85,88,90,91,93,95,96,105,107,108,112,113,117,118,121,123,124,128,129,130,134,137,138,139,140
4,17,25,35,47,48,49,57,62,64,65,66,69,72,80,83,85,87,88,90,103,105,109,110,111,112,114,122,130,132,134,159,182,220,245,246,262,297,310,313,335,338,367,422,469,473,484,510,544,608,650,668,675,686,691,693,714,716,742,745,779,780,846,870,874,889,895,898,904,906,908,918,930,935,941,942,950,960,971,986,990,999,1006,1012
2,7,8,48,72,78,79,83,84,86,87,91,94,96,97,101,106,116,117,122,123,125,127,131,132,141,142,146,147,151,152,153,158,159,160,161,162,164,167,169,170,171,172,173,175,178,181,182,183,185,187,189,190,191,192,194,195,196,198,199,200,203,213,214,215,220,222,225,226,227,232,233,234,236,237,238,239,241,242,243,244,280,281,282
19,104,105,113,129,344,438,662,750,888,903,967
2,3,4,8,9,10,11,14,15,16,17,18
4,15,35,41,58,77,80,82,87,88,91,96,100,109,116,258,268,431,763,772,867,941,999,1007,1010
1,2,4,5,18,22,23,24,25,26,27,28,33,34,36,38,39,40,41,42,43,44,46,47,52
56,64,66,85,87,90,100,239,631,691,849,941,1001,1010
1,2,3,10,11,12,18,19,20,21,22,23,29,35
5,19,21,38,44,45,52,55,57,61,67,69,80,81,89,91,93,96,97,99,105,110,111,113,116,119,120,121,125,127,129,132,248,260,280,477,506,526,538,608,616,662,676,695,704,743,750,752,755,756,762,767,779,819,839,843,848,863,864,867,874,887,904,913,921,929,949,956,967,980,985,991,1009
1,3,4,5,7,8,13,16,22,23,24,26,41,42,43,46,47,51,52,83,84,85,87,100,106,108,110,115,131,133,139,141,142,143,144,145,146,147,149,150,151,157,160,161,162,163,165,166,171,173,177,178,179,182,183,184,185,186,191,193,194,199,202,203,205,208,210,217,222,224,225,226,250
11,12,19,41,52,54,56,59,60,62,64,66,70,71,73,75,78,87,88,91,93,94,95,96,99,101,102,112,115,116,120,122,123,124,125,126,128,129,130,131,132,133,197,244,261,316,419,510,528,538,554,558,587,691,709,738,749,753,769,789,792,795,821,823,848,853,855,867,885,891,895,896,906,916,917,928,941,942,952,953,956,961,973,990,996,1012
1,4,5,8,10,11,13,15,21,22,24,25,29,33,34,36,41,43,44,49,50,52,76,77,81,84,88,101,110,111,112,121,123,124,125,127,128,130,143,154,155,162,163,164,165,166,167,168,169,170,171,173,174,175,181,182,183,185,186,188,189,191,192,195,196,197,198,199,202,204,206,207,209,212,214,216,217,218,220,221,225,226,227,233,235,248
24,35,41,52,54,55,56,59,60,61,62,63,64,65,66,67,72,73,74,75,84,88,90,91,94,96,97,99,107,112,115,120,125,130,131,133,168,239,258,264,342,345,392,470,559,561,569,643,691,705,743,755,758,761,783,806,807,818,853,864,884,885,895,897,900,906,913,928,944,947,963,979,985,994,996,999,1009,1012
1,11,12,13,14,18,20,21,23,35,36,37,38,40,42,75,78,99,100,107,110,124,126,127,131,132,146,147,149,154,160,183,184,186,187,188,189,190,194,196,197,198,199,200,201,202,204,205,207,208,209,210,212,213,216,218,222,224,225,226,239,240,243,249,257,258,269,275,276,278,280,288,292,294,298,308,309,310
12,15,19,36,49,52,54,56,57,60,67,70,72,85,87,88,91,93,94,95,96,97,98,99,101,102,103,108,110,112,115,117,122,124,128,129,130,132,133,156,215,237,252,253,278,316,324,355,448,472,538,541,564,575,580,603,608,613,640,642,677,679,698,723,750,753,769,773,791,814,848,853,855,856,864,867,891,895,897,899,906,913,930,941,953,954,960,961,963,990,996,1001,1009,1012
1,6,7,32,33,35,36,46,50,52,53,55,69,73,75,80,83,84,85,88,89,91,92,93,96,98,100,101,125,137,142,144,149,154,156,167,177,184,186,189,190,192,193,194,195,196,198,199,200,203,204,205,208,209,211,215,216,217,219,220,225,227,228,229,230,231,232,234,259,260,261,262,265,268,269,270,271,272,273,278,279,280,295,297,301,303,305,306,307,315,316,317,318,327
5,9,19,23,46,53,56,60,61,70,78,79,85,94,95,99,101,102,110,115,116,119,120,122,124,128,129,132,133,146,187,214,248,272,327,372,391,393,394,562,578,580,587,603,628,646,659,669,785,897,942,956,959,988,996,1001,1005
2,5,10,34,39,44,45,46,47,48,53,54,57,58,63,64,69,72,73,74,76,101,102,103,108,112,118,137,141,142,143,144,145,146,147,148,154,155,160,161,162,164,166,167,171,172,190,191,192,193,194,195,201,205,206,207,213
4,31,35,48,57,58,76,77,82,85,90,91,96,100,105,107,110,111,114,115,117,124,132,169,192,392,423,431,509,563,584,613,731,763,772,793,870,889,935,959,964,981,982,999,1001,1003,1006,1007,1010
1,2,3,4,5,24,36,54,55,60,61,62,63,68,70,79,88,95,96,97,116,117,134,135,136,137,138,139,140,150,151,152,153,158,164,165,177,179,180,181,190,209,215,216,221,222,223,224,229
19,20,41,44,60,62,67,70,74,84,86,94,95,99,112,122,124,129,130,131,133,347,357,419,637,670,698,736,738,753,861,913,933,949,952,955,990,996,1005,1009,1012
1,2,32,35,47,72,81,82,83,84,86,87,94,96,118,119,129,132,154,159,164,165,169,170,171,176,177,183,189,190,193,194,196,197,198,199,220,221,223,225,247
26,30,45,49,52,64,65,68,71,80,83,94,96,110,111,115,116,121,122,126,127,128,131,132,133,164,228,254,332,336,417,433,451,463,537,543,594,636,712,764,776,803,847,854,875,881,930,931,942,973,988
14,16,28,29,30,33,34,51,52,54,55,57,58,76,79,83,132,135,174,197,198,262,266,267,268,271,275,280,281,283,284,285,286,287,288,292,294,296,299,305,313,314,316,330,331,345,346,347,352,354,363
14,18,30,34,37,42,50,85,86,102,105,106,107,114,117,128,132,133,254,483,493,697,733,796,847,852,866,873,923,957,968,975,981,1001,1004,1006
11,12,43,59,61,72,75,76,77,81,82,93,95,97,101,103,108,110,111,115,120,121,128,130,140,142,144,147,150,153,154,156,158,159,170,172
20,31,34,35,43,49,76,82,91,96,98,102,105,106,110,111,112,114,115,116,122,128,132,163,655,731,764,812,860,870,871,875,880,895,937,942,945,948,978,982,983,988,989,992,997,999,1004,1006
13,15,18,21,22,25,26,39,40,41,48,54,56,63,66,69,70,102,104,113,152,154,157,164,165,166,167,170,179,180,181,183,185,186,187,188,193,194,198,199,215,217,224,225,228,231,232,257
3,10,17,26,35,46,54,57,62,65,72,75,77,80,82,87,88,91,94,96,97,100,107,112,115,116,120,124,126,129,130,136,145,152,262,278,362,422,437,458,470,486,608,642,725,749,770,783,806,843,848,867,887,895,897,899,900,907,914,937,953,954,994,996,999,1007,1010,1012
2,3,5,6,17,18,19,21,22,25,29,30,31,33,60,64,80,91,94,104,105,106,111,112,121,122,123,126,127,129,130,132,133,134,135,136,137,138,142,145,146,147,148,149,150,151,153,155,156,157,158,160,162,163,171,172,179,182,184,186,187,188,190,193,204,231,232,233
0,1,5,11,55,60,61,64,73,80,84,91,94,113,120,121,124,129,133,134,416,599,752,762,774,827,841,936,950,954,966,967,979,996
1,2,3,35,36,67,95,107,112,114,115,116,144,146,154,158,161,163,165,174,175,176,177,179,180,181,183,214,223,224,232,234,262,287
1,8,13,38,42,55,63,76,84,93,98,101,102,105,113,117,119,125,127,129,131,132,133,157,196,244,282,328,341,346,376,385,498,554,584,605,618,660,684,710,750,754,784,821,859,905,911,924,926,931,944,967,974,977,997,1005,1008
13,17,19,20,22,26,66,75,80,116,119,122,126,134,138,148,167,170,177,192,198,204,205,215,217,218,226,227,228,230,233,234,235,243,249,258,262,263,276,277,290,294,298,324,332,342,345,346,373,376,397,400,401,411,414,416,425
34,35,46,51,62,64,80,86,88,96,106,115,116,120,126,132,239,433,646,711,828,873,875,887,923,937,953,987,999,1004
1,2,8,10,11,14,18,20,21,27,33,40,45,46,68,70,73,74,76,77,78,79,81,84,86,88,89,90,91,97
5,13,21,36,52,60,67,73,80,81,89,99,102,104,108,111,112,113,121,125,130,133,193,279,311,328,435,441,496,518,609,630,634,662,752,762,805,817,863,864,876,913,925,956,962,990,991,993,1002,1012
3,5,6,7,12,14,15,18,19,20,94,95,96,101,103,106,107,110,113,114,293,299,302,303,305,306,307,308,311,312,313,314,315,317,319,320,323,324,325,329,331,332,337,338,339,340,341,342,414,593
3,24,34,41,49,51,54,57,58,62,71,75,77,79,84,85,86,88,106,107,109,116,119,120,122,126,136,267,347,453,514,523,564,598,600,623,637,673,763,768,772,802,812,824,860,945,972,973,975,977,992,994,1001
1,2,32,37,43,47,62,66,81,86,87,91,118,119,120,121,124,125,134,141,175,180,181,184,201,202,203,204,205,206,207,208,209,210,213,215,216,217,221,224,229,233,251,285,301,302,303,304,307,308,315,319,320
46,75,76,86,91,96,106,115,117,126,132,646,782,796,848,923,928,937,948,953,970,987,1004
10,11,13,14,18,35,42,49,51,87,91,96,97,98,99,100,101,104,106,108,109,129,136
1,5,19,25,41,43,54,55,60,64,66,67,69,70,71,78,81,93,94,95,97,98,99,102,105,112,113,121,129,131,133,134,173,206,224,279,338,386,560,614,662,670,691,702,736,739,743,753,789,797,827,841,861,874,885,913,917,919,929,950,952,963,996,997,1005,1009
1,3,5,34,35,36,37,38,42,44,46,53,54,56,57,68,73,74,79,80,81,83,87,93,94,95,96,113,114,130,157,158,159,160,162,163,168,169,181,185,186,187,188,189,190,196,197,198,200,208,209,218,222,223,224,226,227,238,239,240,241,242,245,247,248,252
7,14,23,46,50,51,53,54,56,57,62,69,71,74,75,79,84,85,89,94,102,105,106,110,116,120,122,126,128,149,180,215,223,230,350,369,523,547,613,628,637,639,695,758,768,782,825,874,881,888,890,928,945,946,955,957,973,983,992,996,1001
2,4,11,15,33,34,42,44,46,47,62,63,66,75,86,87,88,89,90,91,95,119,121,122,124,134,135,137,138,139,141,142,143,144,145,146,147,149,150,152,156,157,158,161,163,165,168,169,170,185,190,193,194,195,196,209,211,212,213,214,215
18,24,41,42,62,66,84,102,105,114,122,132,523,637,810,968,969,989,1006
1,4,7,8,18,19,21,23,25,56,57,60,69,70,71,72,83,101,103
6,18,24,26,27,32,36,49,60,64,68,84,89,94,95,98,99,102,104,115,116,123,125,126,128,129,130,131,133,134,305,308,451,476,676,682,709,717,876,925,950,953,956,962,968,976,978,991,996,997,1005,1009,1011,1012
1,4,6,14,15,17,18,20,21,27,32,33,36,39,41,45,50,54,56,73,74,75,77,82,90,92,262,263,268,269,270,271,273,274,275,276,282,284,287,288,289,290,291,295,298,299,301,302,304,306,308,312,313,483
13,24,42,54,70,71,73,89,94,102,104,108,109,122,125,134,140,292,346,384,435,439,463,560,568,739,755,798,820,840,850,876,896,901,917,942,950,952,962,976,993,996
4,5,7,10,12,21,22,24,26,37,43,48,60,61,62,63,65,66,67,68,69,70,71,73,76,77,78,80,81,83,87,88,90,95,99,100,101,103,105,109,114,115
97,104,976
1,4,7
5,19,37,43,44,47,50,60,61,63,67,73,97,98,99,102,108,111,113,124,125,127,129,131,132,133,190,416,469,541,582,652,653,670,737,739,755,816,819,863,866,870,944,963,982,985,997,1005,1009
1,2,9,10,11,12,13,15,17,23,24,26,27,28,32,34,40,45,65,77,82,86,93,95,96,99,100,101,102,104,109,110,111,112,113,115,116,119,120,131,133,134,140,141,144,145,146,152,156
0,4,19,31,32,35,46,50,51,53,57,65,68,74,75,77,80,82,87,88,90,94,96,100,106,114,115,118,120,122,126,132,196,205,370,442,453,510,513,514,543,564,598,603,646,666,669,709,729,756,763,802,818,828,867,871,887,900,904,941,945,953,969,980,987,992,996,999,1004,1006,1007,1010
1,20,21,22,23,25,36,37,41,44,46,47,48,49,53,58,61,63,68,70,71,73,75,76,118,128,143,144,148,149,163,165,166,167,170,171,172,173,185,186,187,189,191,192,193,194,199,200,201,202,204,205,206,207,209,210,211,212,213,215,229,231,238,240,243,244,245,247,262,265,267,268
9,13,33,38,42,44,55,63,68,69,70,71,78,81,86,89,93,94,95,98,101,102,105,117,119,124,127,133,134,242,244,316,401,407,421,451,511,546,574,602,616,618,695,710,715,757,798,810,821,850,892,911,916,917,924,933,943,944,949,952,962,970,974,977,981,997,1003
2,3,5,8,12,37,47,48,50,51,54,58,78,80,81,82,83,84,85,91,105,124,125,131,132,134,148,149,179,210,212,213,214,225,227,229,230,232,236,238,239,249,250,253,254,255,261,264,265,268,271,272,273,274,277,294,295,296,304,306,307,310,311,312,314,320,321
0,22,27,44,59,79,86,92,95,102,119,121,124,127,133,289,542,568,642,713,734,757,803,808,837,841,858,877,933,940,949,951,977
2,5,6,13,22,23,25,106,107,111,115,136,138,166,174,179,181,182,183,187,188,190,206,207,208,209,210,211,217,218,219,222,226
11,14,18,60,61,64,67,69,73,76,78,84,85,87,88,89,90,91,93,94,95,96,97,99,101,102,105,108,111,115,117,119,120,124,126,130,131,132,133,149,151,241,244,356,408,427,455,499,506,611,615,653,668,742,805,852,870,876,913,923,956,960,963,968,977,979,980,996,1001,1009,1012
4,6,8,17,20,22,23,24,25,39,40,41,43,44,45,47,49,51,52,54,69,71,72,75,77,78,79,80,81,82,88,93,94,100,101,112,113,133,147,148,149,155,156,157,158,159,160,161,162,163,166,167,168,169,171,182,193,194,195,204,205,206,207,209,211,212,213,214,215,217,228
0,16,18,20,23,31,42,76,78,80,92,96,98,110,111,113,115,116,117,122,124,125,126,131,132,133,154,164,251,341,433,574,578,594,620,712,713,740,756,764,796,800,854,897,937,942,948,953,968,970,973,981,991,995,997
1,2,4,5,7,8,9,20,22,23,55,64,66,67,77,78,85,120,184,196,213,214,218,223,224,226,227,228,229,232,233,235,237,243,247,251,257,258,259,260,263,286,291,292,297,303,310,311,313,315,327,330,331,334,336
14,34,46,49,51,56,62,75,76,86,105,106,107,110,114,115,126,132,335,433,646,785,859,871,873,889,923,928,953,975,987,1004,1006
1,4,15,16,17,18,20,21,22,32,41,51,52,53,54,59,75,79,84,85,88,89,91,93,96,98,100,101,102,103,105,115,116
76,91,96,115,121,841,848,948
1,9,10,11,30,34,36,37
11,60,61,64,73,80,94,97,99,112,113,116,119,122,134,196,416,472,634,693,723,753,760,762,880,887,895,906,932,936,950,977,979,985,990,996,1009
13,55,117,127,141,143,165,166,168,244,245,249,250,252,273,274,275,276,277,280,281,292,295,296,297,298,301,311,312,325,346,347,404,409,451,464,466
15,20,26,29,45,49,57,68,83,84,104,110,115,116,119,122,126,127,128,131,164,228,254,305,451,466,543,582,594,657,659,674,708,712,748,765,785,842,854,872,881,930,973,974
47,50,51,52,54,110,139,149,151,152,154,226,228,238,246,248,312,323,327,328,329,330,331,334,335,339,342,344,346,347,355,357,379,380,427,428,429,447,449,451,454,481,483,494
19,35,47,48,71,82,88,94,100,110,112,115,116,117,126,128,134,150,425,455,564,613,811,846,850,889,900,950,960,996,999,1007,1010
1,3,18,19,20,21,24,28,32,34,36,38,39,45,47,49,51,52,53,54,55,57,63,66,67,74,76,78,79,82,84,85,89
5,13,54,57,58,60,64,67,70,71,73,77,89,93,94,102,104,108,110,116,134,191,219,239,248,295,511,544,560,568,586,613,634,671,772,798,820,827,850,896,901,913,917,950,952,961,962,976,993,996
2,7,10,13,18,25,30,32,34,54,56,61,62,63,72,76,77,89,90,92,105,106,107,109,110,111,113,114,117,119,120,121,122,123,125,129,130,131,135,136,160,162,178,180,181,182,185,186,197,202
23,34,46,47,64,65,68,69,74,75,76,80,82,86,88,91,96,105,106,108,114,115,117,126,132,168,239,312,372,381,646,699,782,811,848,867,871,873,883,887,889,899,900,923,928,937,948,953,955,987,1004,1006,1007
1,3,17,18,19,20,21,22,25,26,27,32,33,36,38,63,109,111,131,134,139,144,145,157,162,163,164,165,166,168,173,174,175,176,181,182,183,185,188,190,191,192,193,197,198,212,213,215,218,220,240,245,246
1,5,11,18,19,29,38,43,44,52,55,57,61,63,67,78,80,84,85,90,91,93,95,96,97,98,99,101,105,113,116,119,120,121,122,124,125,127,129,131,132,133,196,206,248,264,268,280,371,398,410,416,472,497,498,499,526,538,565,608,612,630,634,640,652,660,662,665,676,704,710,743,750,752,755,756,761,762,774,792,818,819,843,849,858,863,864,867,887,904,913,921,923,931,932,944,947,949,956,967,968,973,974,977,980,983,985,991,997,1009
9,12,13,15,18,19,28,29,30,36,46,51,52,59,67,69,86,88,89,90,93,96,106,108,117,119,159,160,161,169,174,175,180,186,188,189,222,228,231,233,234,236,239,240,243,245,246,247,248,249,250,251,252,253,255,256,258,259,260,262,264,267,269,270,271,272,273,274,276,279,280,283,284,285,287,289,291,297,301,302,303,305,306,307,314,316,321,323,326,328,330,336,337,338,339,344,345,346,362,364,366,367,369,370,375,376,377,384,386,410
18,43,94,95,98,121,124,125,127,129,131,133,178,498,805,841,852,931,968,978,991,997,1005
3,6,7,9,15,22,25,34,35,41,48,129,130,131,136,141,147,148,151,154,157,160,166
4,17,35,49,56,57,58,64,71,74,77,80,82,85,87,88,91,92,95,96,100,102,103,105,106,108,112,114,123,129,130,212,245,268,273,305,306,336,465,466,570,608,610,713,739,763,772,850,853,899,918,941,990,993,999,1001,1004,1005,1007,1010,1011,1012
9,15,16,20,21,22,23,25,26,28,31,34,35,42,50,51,52,54,56,57,65,69,74,75,77,78,79,80,82,83,84,85,86,88,93,94,95,96,97,99,104,105,107,108,112,113,114,115,116,117,119,120,121,122,123,128,130,131,132,140,142,143
8,9,29,37,44,52,55,67,76,78,90,93,97,98,105,108,113,117,119,121,124,125,127,129,131,133,170,189,285,327,445,581,602,629,653,670,685,690,723,731,755,757,761,784,799,803,805,839,841,852,866,891,892,911,916,924,926,929,933,949,974,977,978,1000,1005
1,10,11,16,21,26,27,34,36,47,48,80,82,109,115,117,131,139,140,143,156,161,242,287,288,306,314,318,319,321,322,324,327,341,343,349,368,369,370,376,381,394,395,396,410,411,420,431,432,433,436,441,452,453,459,464,495,497,500,502,565,566,593,623,624
4,22,47,57,59,64,65,69,77,79,80,85,87,88,90,92,100,107,112,115,117,132,134,192,197,297,310,313,412,431,469,470,542,544,603,608,690,693,724,725,747,770,811,837,844,846,874,879,887,889,900,907,918,950,972,1001,1010
1,5,6,8,10,16,17,18,20,33,34,36,47,51,53,54,151,152,154,155,158,159,160,161,163,180,181,186,189,190,191,192,194,197,198,200,201,202,214,215,216,217,218,220,221,222,223,224,225,227,231,232,237,238,239,241,338
47,55,93,99,104,111,113,129,131,133,567,609,750,885,898,903,920,956,967,976,1009
2,3,4,11,16,17,18,19,20,21,22,23,24,25,27,28,29,31,32,34,39
52,63,73,78,97,119,121,124,131,133,841,864,865,919,929,944,963
1,2,3,33,34,35,37,66,74,94,95,96,97,127,128,129,130
4,14,17,19,22,25,32,35,41,51,56,57,58,59,62,64,65,66,69,71,72,74,77,79,80,85,87,88,90,92,94,100,105,106,107,112,115,116,121,122,126,130,132,178,212,219,245,258,262,268,269,280,291,292,312,313,350,356,362,404,420,426,431,455,472,483,508,542,559,569,580,591,597,600,608,614,631,632,649,663,668,691,692,693,702,705,716,731,770,778,783,802,806,818,828,837,843,849,850,870,871,874,887,895,899,900,904,918,941,945,960,964,975,990,999,1001,1010,1012
1,15,18,19,25,31,32,42,56,65,68,74,75,77,80,82,94,95,98,99,100,102,104,109,119,128,134,160,169,170,172,204,215,216,218,230,232,233,235,236,237,243,244,245,252,253,255,257,258,259,260,261,262,268,270,276,277,282,285,286,287,288,289,290,291,295,296,297,299,303,306,307,318,319,320,324,325,326,331,337,343,344,345,349,352,353,354,356,357,360,361,364,366,368,371,372,375,376,377,378,383,385,387,389,396,400,405,407,408,409,411,412,413,415,425,429,461,464
54,70,71,187,316,384,463,511,560,606,617,623,671,798,850,896,917,952,961
72,210,277,284,286,287,288,291,292,302,313,314,340,342,348,408,416,456,525
17,22,35,51,64,66,69,85,88,90,100,105,106,112,117,122,159,269,362,472,544,642,690,693,702,787,796,828,836,837,945,992,999,1001,1010
2,4,8,9,10,11,13,23,25,27,220,228,231,232,233,234,237,238,239,240,241,242,243,244,246,247,248,249,250,252,253,254,258,268,460
9,11,21,36,52,55,60,61,63,64,67,73,93,94,97,98,99,113,124,125,127,129,134,175,185,243,260,282,327,386,458,474,476,554,601,615,643,652,670,698,743,750,755,853,855,891,905,924,936,944,979,985,996,1009
1,41,43,44,45,48,54,62,69,72,81,141,142,144,145,146,147,148,158,159,170,173,174,182,183,184,185,186,187,188,189,190,191,192,199,200,201,202,204,211,212,214,215,216,217,218,219,220,258,260,266,268,270,271
20,30,36,41,44,58,62,65,74,75,77,78,84,91,95,96,101,102,115,117,119,122,124,128,163,168,213,247,402,425,499,503,587,590,647,736,763,772,825,848,897,928,937,942,949,981,983,988,1008
4,5,6,14,15,41,44,46,51,53,61,88,89,106,124,135,137,140,197,198,199,210,212,227,231,232,237,241,242,243,244,246,271,273,276,277,281,298,299,302,359,361,363,364,365,366,368,373,374
1,5,14,19,21,22,30,36,41,52,54,56,57,60,62,64,66,67,69,70,71,73,74,77,80,84,87,88,91,92,93,94,95,96,98,99,105,112,113,115,116,120,122,124,129,130,131,133,134,136,149,193,219,248,278,292,321,328,345,355,376,419,441,470,472,474,508,510,518,542,600,603,608,615,634,662,682,691,718,719,727,750,753,756,762,774,798,804,823,837,843,850,853,864,874,887,891,895,896,899,900,906,913,917,941,950,952,956,961,965,967,978,980,990,996,1000,1009,1012
2,5,6,8,10,20,21,23,24,28,36,37,43,50,51,57,58,59,60,74,83,86,87,88,99,101,102,105,109,110,118,122,126,127,136,145,148,177,185,187,188,191,194,197,200,236,241,245,248,249,250,251,252,254,255,257,258,259,261,266,267,268,269,271,273,275,276,277,278,280,281,283,284,285,286,290,291,292,293,296,297,299,303,307,313,314,315,316,319,321,323,325,327,329,330,331,332,334,336,337,338,345,346,348,349,352,363,365,367,369,372,373,374,387,389,390,397,433
14,15,17,19,22,24,28,35,41,47,51,54,57,65,69,71,75,85,87,88,90,91,100,105,106,107,111,112,114,115,117,126,132,135,160,186,199,212,291,368,415,470,478,503,504,508,569,580,608,610,616,642,649,663,693,702,716,778,798,811,818,837,843,846,849,850,870,871,895,941,945,960,964,987,990,999,1001,1006,1010
1,3,4,6,7,9,11,52,55,58,64,65,71,73,75,76,77,124,128,129,132,135,201,205,206,207,210,222,224,225,227,231,234,235,236,237,238,240,241,242,244,247,248,249,251,253,257,258,259,265,266,267,268,270,273,275,276,277,278,280,283,284,286,289,293,294,297,299,300,301,302,311,312,313,314,351,387,389,455
19,58,64,77,79,85,88,100,115,116,117,132,134,239,610,763,849,900,981,1001,1010
1,5,6,8,19,25,27,34,35,36,37,49,50,51,52,53,55,56,57,60,67
17,19,22,40,47,48,56,64,79,80,82,84,85,87,89,92,94,96,100,105,107,108,110,112,117,120,122,126,128,130,132,212,297,299,310,350,353,405,422,442,469,510,542,580,607,613,693,735,759,762,811,837,846,867,871,876,881,889,904,918,966,990,993,995,996,1007,1010,1012
1,2,5,6,10,12,16,17,18,20,23,24,25,31,32,33,36,38,39,44,46,47,48,50,56,57,58,59,60,61,64,65,70,71,72,73,77,78,79,80,82,83,84,85,87,88,89,91,93,94,100,101,107,109,110,111,112,121,122,127,128,129,130,131,133,136,137,138
10,14,17,22,35,41,47,63,65,66,69,71,72,76,77,79,85,87,88,90,94,97,100,105,108,111,126,132,245,290,297,312,326,356,359,542,549,564,570,572,632,649,668,691,763,770,780,793,832,850,859,871,874,900,905,918,941,944,947,959,993,996,999,1001,1010
1,4,6,7,9,10,11,13,18,21,22,23,24,25,27,28,32,37,39,40,42,43,63,69,70,71,75,77,78,79,80,83,84,85,86,87,88,89,90,91,92,94,95,96,97,98,99,100,110,111,116,117,118,119,120,121,122,123,124,125,126,128,129,132,152
4,14,15,19,22,30,31,35,41,51,54,57,59,62,64,65,66,70,71,72,74,75,79,80,82,85,87,88,90,91,94,96,107,110,112,115,116,120,122,126,130,134,149,186,199,221,239,268,312,316,356,379,404,431,455,509,510,531,542,544,559,580,642,668,691,693,770,783,802,806,807,828,837,844,849,850,854,867,887,895,896,899,900,904,917,918,941,950,952,960,961,975,990,994,996,999,1001,1007,1012
2,4,6,12,14,17,20,48,53,54,60,61,63,64,68,72,73,76,80,82,84,87,89,94,163,169,178,198,199,204,217,225,235,236,239,240,241,244,245,246,252,253,254,256,257,258,259,260,261,262,263,264,265,267,268,270,271,272,273,275,278,281,284,285,286,287,288,289,290,292,295,297,298,299,301,303,304,311,314,315,316,320,322,323,325,328,329,330,331,332,333,336,337,342,355,383,384,453,455
6,11,13,18,27,38,42,50,52,57,60,62,64,70,76,80,86,93,94,95,97,98,99,110,111,112,115,116,117,119,122,124,125,128,129,130,131,133,136,150,315,345,355,386,419,591,599,608,612,651,689,750,756,760,768,796,800,805,852,864,875,890,895,922,930,940,942,952,953,961,968,973,978,991,996,997,1009,1012
1,2,3,5,6,18,19,22,23,25,29,35,36,39,40,42,43,44,45,46,47,74,78,100,101,102,106,129,146,147,205,229,231,233,234,239,240,246,247,248,252,254,255,258,259,260,261,263,264,265,266,267,269,276,278,280,282,286,288,289,290,292,293,294,308,309,317,319,320,321,323,324,330,331,332,352,356,361
48,69,71,83,86,91,106,109,110,114,522,606,735,745,777,785,893,969,989,1004
8,14,15,23,41,44,151,170,171,279,283,284,290,292,297,298,306,316,409,516
18,43,44,60,67,78,94,95,98,99,101,108,111,112,117,119,124,125,129,131,132,133,337,498,571,592,605,626,676,755,794,799,809,817,819,861,883,906,919,921,927,933,956,959,968,977,978,982,996,997,1005,1008,1009
4,7,8,11,17,20,21,25,51,54,56,69,71,72,73,104,133,146,155,160,243,248,249,250,253,254,264,265,266,267,273,278,280,286,287,293,294,295,297,300,318,319,320,322,326,328,331,332,333,338,342,371,373
1,5,11,12,18,21,29,37,38,42,43,44,52,55,60,64,66,67,73,80,84,85,86,89,90,93,94,95,97,98,99,101,102,108,112,113,119,120,121,124,125,129,131,133,134,321,347,363,418,467,496,561,573,592,612,622,643,662,698,726,756,762,774,789,815,816,818,823,826,827,858,864,887,906,913,933,936,949,950,956,963,968,978,980,993,996,997,1001,1005,1009
1,3,11,12,14,15,17,20,23,24,28,40,41,44,45,46,48,51,52,58,61,63,64,67,68,70,73,75,78,86,94,100,108,112,113,114,115,117,120,157,160,163,165,181,182,183,184,185,187,188,192,193,194,195,197,199,200,201,202,204,206,209,210,219,220,221,222,223,224,226,228,229,230,231,233,241,242,246,247,251,252,254,257,259,260,263,268,269,271,275
1,99,104,125,127,755,774,839,956,976,1009
1,3,5,6,8,9,10,11,12,14,15
34,46,74,75,86,102,106,107,114,115,117,126,132,646,796,873,923,928,953,955,975,987,1004,1006
1,5,7,11,12,13,22,23,24,30,31,44,49,51,52,53,56,60,64,66,67,69,78,79
14,17,20,23,30,34,35,46,53,56,57,69,72,75,80,84,88,90,91,96,105,106,108,110,112,115,120,126,128,147,149,150,212,278,356,402,425,455,482,483,564,600,607,616,628,649,668,693,702,705,711,749,756,818,828,856,873,874,895,897,904,928,937,945,953,960,990,992,999,1004
6,7,8,9,11,12,14,22,31,42,43,45,46,48,53,54,56,59,65,66,68,286,287,288,293,302,303,308,317,319,320,321,322,324,325,329,335,336,337,340,341,342,343,344,345,346,347,348,349,350,351,352,353,355,356,361,362,363,364,366,367,369,370,371,372,376,378,580,582,583
8,11,18,29,36,47,52,55,60,61,63,64,67,73,76,81,84,93,94,97,98,99,111,113,117,121,125,127,129,131,132,133,134,252,334,337,371,378,423,441,488,504,517,532,565,584,599,620,622,645,655,662,737,750,784,793,805,853,855,870,885,891,898,908,913,923,936,944,950,956,959,968,974,984,985,991,996,997,1005,1009
36,38,39,40,41,48,49,50,59,60,62,64,67,71,72,74,75,76,80,81,84,92,104,106,108,109,110,112,118,128,221,237,238,239,240,241,242,243,266,267,268,269,270,271,272,284,285,286,287,288,289,290,291,294,309,312,314,315,316,321,323,324,327,330,333,336,337,338,339,341,352,353,354,369,370,371,374,377,378,384
13,21,64,66,80,84,89,104,108,112,121,130,133,280,753,756,767,789,876,883,906,925,990,993,1002,1012
1,2,3,5,6,7,29,30,33,38,39,389,391,392,393,394,395,397,399,400,401,402,405,407,427,777
0,17,19,22,35,47,51,53,57,59,65,66,69,74,75,82,85,87,88,90,96,100,105,108,111,112,115,116,117,120,126,128,132,142,200,230,245,312,358,423,455,486,557,569,576,580,608,610,616,619,620,631,647,651,655,668,702,704,709,716,782,793,811,818,837,849,859,867,870,871,874,883,886,888,889,898,899,900,906,918,923,955,960,982,984,993,999,1001,1007,1010
1,3,9,10,24,27,33,35,39,40,41,42,44,45,46,51,105,110,113,125,126,241,246,248,266,267,272,278,281,282,290,291,311,312,315,316,317,318,319,321,324,325,327,329,334,335,338,350,351,353,354,356,357,358,359,360,361,363,364,365,369,372,375,383,384,388,389,390,391,394,395,396,397,398,401,402,403,404,405,409,410,411,413,416,420,421,435,471,476,591
18,27,42,46,98,102,108,124,125,129,130,131,133,315,577,582,739,805,852,940,978,1005
1,2,7,8,9,13,17,18,19,20,21,62,140,142,143,144,146,147,152,153,154,155
5,13,14,22,24,36,41,50,52,56,60,61,62,64,66,67,72,73,80,86,89,91,93,94,98,99,101,102,108,112,115,119,121,124,125,127,129,130,131,134,178,194,261,345,371,430,448,458,476,482,506,542,562,599,601,615,676,677,695,715,738,739,750,753,755,756,762,791,819,820,827,853,876,885,887,890,891,895,906,913,927,950,956,977,979,985,990,993,996,1002,1009,1012
6,9,10,11,12,15,20,22,25,27,30,56,57,63,64,84,87,103,120,122,129,131,135,140,141,146,148,149,159,174,175,181,183,186,194,195,198,235,243,244,245,246,247,248,250,251,252,253,254,256,257,258,259,262,265,267,269,272,274,276,278,279,280,282,285,292,296,297,299,300,306,310,313,316,322,323,324,325,330,339,340,341,342,343,360,368,375,385,388,389,393,430
3,15,17,35,39,49,54,56,57,62,66,74,75,85,86,94,96,97,100,102,105,106,114,120,128,156,244,269,369,422,708,746,748,825,842,843,844,845,869,914,928,937,945,988,989,999,1001,1006,1010
2,4,5,16,41,104,107,108,127,128,132,133,134,139,147,148,158,159,160,172,173,177,191,192,195,196,197,198,199,200,206,208,209,212,220,232,233,238,258,260,261,269,272,275,277,288,293,304,305
20,45,50,54,62,102,110,111,115,116,121,122,124,126,127,128,681,803,881,890,942,957,974
36,55,57,58,59,60,65,67,68,70,71,378,379,380,393,395,401,413,414,415,470,471,472
18,36,44,54,60,64,73,84,94,99,102,108,112,119,125,127,129,130,133,438,642,676,739,753,755,816,819,852,933,949,956,968,990,993,996,1005,1008,1009,1012
2,5,10,12,14,15,16,18,19,24,26,28,32,43,46,47,49,51,67,68,69,70,71,72,73,74,75,76,79,81,83,85,88,89,90,92,103,106,108
4,14,17,19,25,26,30,31,32,34,35,37,41,44,49,51,54,56,57,65,74,75,76,77,79,80,82,85,86,87,88,91,96,100,102,105,106,112,114,115,116,120,130,200,230,269,273,286,292,305,317,341,400,443,468,531,580,614,647,649,702,708,770,777,778,783,828,842,843,866,873,895,899,900,904,928,945,949,990,999,1004,1006,1010,1012
2,3,4,6,8,9,10,11,18,23,26,29,30,33,59,61,67,68,83,86,88,93,95,98,99,101,104,106,141,144,152,158,176,177,180,182,189,191,204,207,208,209,211,212,213,214,215,222,223,225,226,227,228,230,231,232,233,235,236,237,238,247,249,250,251,252,255,261,264,267,272,273,275,276,277,281,284,287,288,291,294,305,306,308
13,33,42,45,54,60,61,64,68,70,71,85,89,93,94,102,104,105,108,129,131,134,154,231,292,346,388,401,435,450,500,511,586,617,623,674,694,798,813,849,850,853,876,883,885,896,903,920,925,950,952,961,962,985,993,996
4,7,10,33,36,38,39,42,55,62,66,67,68,69,76,83,90,93,101,102,103,203,205,206,210,211,212,215,217,221,223,228,229,230,232,233,250,258,259,260,263,264,265,266,267,268,270,273,274,275,279,280,285,286,293,295
25,36,43,44,60,97,98,101,102,108,111,113,117,119,124,125,127,129,131,132,133,151,203,224,241,244,338,391,470,571,582,653,655,662,757,797,921,927,933,949,959,974,977,981,1008
4,5,6,16,20,22,25,28,37,38,39,40,42,54,57,61,71,72,117,125,126,130,131,133,134,137,138,146,147,151,152,153,155,156,157,158,159,162,168,172,173,177,179,180,182
17,35,37,49,54,57,82,84,90,100,102,105,117,120,132,234,245,466,491,564,622,631,708,759,804,811,825,842,851,866,980,999,1007,1010
2,13,65,76,77,78,79,81,82,84,85,88,93,94,97,99,100,104,105,106,107,108,109,110,111,113,114,115,146,167,168,179,180,182
22,42,57,60,63,64,70,73,77,83,90,93,102,104,105,108,110,122,125,131,133,140,154,287,323,358,401,441,510,542,544,608,613,671,676,798,893,901,944,952,976,986,993
1,4,6,7,14,19,20,21,22,118,119,120,121,124,125,127,130,131,132,148,166,167,168,169,170,171,172,173,174,175,179,180,182,183,184,185,190,191,197,198,199,290,292
4,14,19,20,25,35,46,54,70,71,72,75,82,84,85,86,87,90,94,100,105,106,112,114,115,126,132,316,483,610,614,646,782,818,849,871,895,896,917,923,928,953,960,987,990,996,999,1004,1006,1007,1010
1,2,4,5,6,9,16,17,19,21,22,23,24,25,29,32,33,34,35,39,40,47,52,57,65,87,91,92,93,94,95,98,110,111,114,115,118,119,121,125,126,130,131,135,136,137,140,147,151,152,156
13,42,54,60,70,71,102,104,105,108,116,118,131,133,401,511,552,560,671,712,798,850,896,901,917,952,962,973,976,993
6,7,8,9,10,20,23,25,27,29,31,32,37,41,43,44,45,46,47,48,49,55,56,61,65,66,69,70,72,74
18,38,42,43,73,78,81,84,86,95,97,98,99,101,108,113,119,124,125,129,131,133,162,407,498,565,592,634,662,706,727,858,883,931,963,968,978,991,997,1005,1008,1009
7,109,111,114,115,117,118,120,122,129,132,166,167,188,197,209,239,243,269,278,280,290,291,292,294,303,304,305,306,307,309,312,321,323,325,332,340,349,375,384,414,415
4,17,19,25,32,35,37,41,44,46,47,48,49,54,57,62,64,65,75,76,77,79,80,82,83,85,87,88,97,100,102,106,110,111,112,114,116,117,120,130,136,190,224,272,286,305,353,369,387,431,504,513,524,534,608,614,683,693,738,745,749,762,770,777,778,783,793,796,797,828,851,887,889,895,899,900,904,915,928,941,945,948,960,969,980,982,986,989,999,1001,1004,1006,1007,1010,1012
5,6,12,15,75,95,120,123,124,153,155,156,165,178,179,184,185,187,188,191,193,194,198,199,200,210,212,217,218,236,237,241,242,257,263,283,285,286,289,293,294,295,296,325,347,348,350,352,382,383,384,385,386,387,388,389,391,393,395,396,397,398,399,404,405,406,407,408,409,410,435,436,438,439,440,441,443,450,451,452,453,454,455,457,460,473,474,477,496,506,507,516,517,535,539
58,66,74,77,85,100,105,111,117,132,427,763,772,793,846,889,955,981,1001,1010
9,10,11,13,16,19,21,24,27,28,30,31,35,36,37,38,39,41,44,47
4,34,41,49,74,75,77,96,102,106,107,114,115,120,126,128,294,466,513,598,773,873,955,987,994,1006
4,5,6,15,16,17,18,20,22,23,24,25,26,34,38,49,51,60,64,65,76,77,78,80,81,82
34,35,37,50,51,54,57,58,65,69,82,105,112,132,300,616,693,702,708,770,851,873,923,999
1,3,4,5,7,11,12,13,16,17,18,20,21,23,24,25,26,27,28,31,32,33,35,37
71,134
1,445
13,55,58,60,61,63,71,73,77,94,99,102,103,113,116,125,131,133,134,323,511,662,755,763,772,798,850,901,944,950,956,967,985,996,1009
2,3,27,28,29,31,32,33,47,50,52,53,56,59,61,62,69,71,73,74,76,77,78,81,95,96,97,99,101,102,103,104,105,107,108
0,18,27,36,43,64,67,78,80,84,95,96,97,98,113,119,124,125,129,131,133,134,325,458,474,641,670,762,863,867,919,932,940,950,968,978,991,997,1000,1005
28,34,46,47,48,50,51,52,53,54,55,56,57,61,80,81,117,122,136,146,163,164,175,178,179,180,181,182,183,184,185,197,209,210,216,218,219,221,222,235
37,42,75,86,102,106,117,125,741,810,825,851,866,928,981,1004
2,17,18,19,26,30,36,37,38,42,43,44,45,46,52,56
73,119,123,127,1011
1,2,7,8,13
4,14,17,19,28,30,31,34,35,46,47,48,51,53,56,57,62,66,69,74,75,76,80,82,84,87,90,91,96,97,100,105,106,112,115,117,120,126,132,170,197,212,233,269,310,336,350,356,365,419,427,431,452,469,482,501,509,548,569,589,608,616,631,639,649,663,668,691,693,702,742,758,765,796,811,818,846,847,848,867,871,873,874,878,884,887,888,889,895,898,904,908,923,928,941,948,966,970,980,999,1004,1007,1010
2,7,8,10,13,17,21,22,29,31,43,44,46,47,49,50,52,56,63,68,77,80,83,92,93,96,105,106,111,112,113,131,135,150,152,162,219,223,234,235,237,239,240,241,244,245,246,248,249,251,252,254,255,256,257,258,261,263,267,268,269,271,274,276,278,280,283,287,288,289,290,299,300,303,306,308,317,320,321,326,344,345,347,348,349,350,351,361,365,369,370,372,377,384,385,388,395,396,400,407,411,420,421
20,42,43,74,80,102,109,116,122,134,346,639,657,696,712,840,880
2,3,5,6,7,10,14,20,22,24,25,26,28,30,35,38,40
3,16,20,24,25,31,34,46,47,64,65,69,75,76,77,82,83,86,87,88,91,96,106,107,108,111,114,115,117,126,132,155,181,239,342,351,381,414,462,646,687,766,777,782,796,806,817,828,846,853,867,873,883,889,900,907,914,923,928,934,941,948,953,975,986,987,1004,1006
27,29,30,32,33,34,35,45,47,48,50,51,53,54,56,61,62,65,66,70,71,74,81,84,93,95,117,142,147,172,180,181,182,183,184,188,189,193,194,197,198,210,212,215,216,217,218,220,221,222,223,224,232,233,234,236,251,252,253,255,256,257,263,264,265,270,277,296
4,19,31,35,46,47,64,65,74,75,76,80,82,85,86,87,88,94,96,100,105,106,109,112,114,115,116,120,126,132,134,168,312,509,580,646,711,770,778,806,828,849,867,871,887,895,899,900,923,928,948,950,953,955,987,989,990,996,999,1001,1004,1007,1010
1,3,4,5,6,7,9,15,16,18,19,20,23,29,40,42,48,50,51,56,58,63,64,66,68,74,76,77,91,93,94,95,97,98,99,100,101,102,103,105,106,108,109,112,113,114,115,117,118,119,120,121,122,123,127,128,129,131,132,134,138,141,146
20,34,46,47,48,49,66,74,75,76,86,87,91,96,105,106,114,115,117,126,128,132,646,745,746,782,796,800,846,871,873,889,918,923,928,935,937,948,955,970,987,1004,1006
1,4,30,32,38,39,42,50,53,54,56,57,59,63,64,67,75,80,83,100,109,115,118,119,121,122,123,124,125,126,129,131,132,135,138,143,144,145,153,154,160,163,171
15,17,49,51,74,75,80,85,88,91,95,101,109,110,112,116,120,122,126,130,160,200,213,293,339,452,529,580,603,758,760,884,895,928,942,965,966,980,1012
25,27,28,31,43,92,93,94,95,99,128,131,133,134,135,147,173,272,273,275,279,280,282,283,286,287,288,289,290,293,305,306,307,355,375,376,381,384,386
4,6,8,17,25,28,31,35,46,48,49,50,53,54,56,62,65,74,76,80,82,84,85,87,88,91,100,106,107,111,114,120,168,177,212,291,292,338,400,460,528,534,570,607,614,768,778,784,793,828,878,887,889,890,904,918,945,955,957,975,999,1001,1006,1007,1010
1,2,3,25,27,43,44,56,59,60,61,79,80,81,84,89,90,97,99,104,105,106,116,122,125,126,155,156,166,167,188,189,190,195,197,198,200,201,202,205,206,207,212,213,214,218,219,220,221,222,238,239,240,243,246,248,249,255,265,274,286,296,316,317,346
4,20,62,74,76,77,82,88,96,106,107,110,115,116,117,126,168,419,431,683,867,953,955,970,975,987,1004,1007
3,4,5,8,9,10,11,12,13,16,18,21,25,26,28,35,36,37,39,40,41,44,47,48,50,51,54,55
19,46,64,65,86,88,100,106,115,120,126,128,132,728,770,900,923,953,965,987,988,1004,1010
1,9,10,11,12,13,14,16,22,23,36,201,202,204,205,206,207,208,209,211,237,239,240
25,35,47,48,66,69,72,79,85,88,90,100,105,111,112,115,116,117,122,126,132,297,310,338,350,353,358,422,510,515,569,571,589,631,686,693,702,720,731,742,747,779,781,787,818,828,846,859,871,874,888,889,898,900,908,959,981,987,1001,1010
1,2,6,8,9,15,17,18,373,375,381,384,394,395,396,397,398,401,403,404,413,414,415,416,417,418,419,421,423,425,429,430,432,435,436,437,439,440,441,442,444,445,446,447,448,449,450,451,454,455,456,462,463,464,465,467,469,470,825,828
9,18,19,20,24,26,27,29,38,41,42,49,51,52,54,57,58,59,60,64,66,67,69,70,72,73,77,78,80,89,91,93,94,95,96,98,99,101,102,105,107,108,109,110,111,112,115,116,120,122,124,125,126,128,129,130,131,133,134,160,164,227,228,274,294,298,300,309,344,426,440,484,489,494,510,527,575,579,582,594,613,616,621,635,636,657,691,702,710,736,763,764,773,776,790,794,810,848,854,858,861,864,874,881,887,888,906,919,930,931,940,950,952,953,956,968,973,978,980,982,988,990,996,997,1005,1012
1,13,14,18,32,51,52,53,68,69,70,86,90,91,92,93,95,96,101,103,104,107,111,113,115,116,120,123,124,125,133,134,136,144,145,157,159,161,162,166,168,169,170,194,195,200,203,217,219,220,222,228,232,257,277,282,289,299,301,302,303,318,333,347,348,350,351,352,353,354,357,359,366,373,374,375,376,377,381,385,386,388,390,394,396,400,401,402,414,415,417,418,419,420,423,469,470,473,475,478,481,482,484,492,493,496,498,501,502,503,504,506,508,509,511,523,527,529,531,532,535,538,540,550,551,556
55,60,64,67,88,94,99,102,104,112,113,115,130,134,459,568,586,900,906,913,925,950,956,962,990,996,1009,1012
1,5,7,8,10,11,18,21,22,24,25,26,423,425,426,427,428,429,430,431,432,434,436,438,439,440,444,841
54,59,70,71,94,102,104,109,122,560,798,840,850,896,901,917,952,962,976
1,3,5,12,13,15,17,22,25,26,27,29,31,32,33,38,40,42,44
35,58,59,65,68,77,85,87,88,100,111,116,130,132,312,558,610,699,763,772,828,849,900,941,959,999,1001,1010,1012
1,5,6,8,9,12,43,44,49,136,137,138,139,143,145,146,147,148,150,151,153,154,155,156,157,158,187,274,275
20,49,58,71,76,77,93,97,99,105,108,110,115,116,119,121,122,124,126,128,131,133,150,163,183,332,433,463,468,472,506,510,568,594,613,689,707,712,760,764,817,833,841,854,875,881,883,897,930,956,963,973
9,10,15,16,17,33,34,35,36,37,45,183,190,226,227,230,398,405,411,419,421,462,463,464,469,470,471,472,473,474,475,476,478,479,482,485,488,495,496,497,503,504,506,508,515,518,520,523,552,553,554,556
35,67,73,82,87,91,94,96,112,119,124,127,128,129,269,398,652,689,867,913,996,999,1007
14,63,129,141,142,143,145,146,483,484,485,488,489,490,491,493,494,495,496,515,517,531,543
58,76,77,85,88,91,100,117,132,169,258,763,772,838,900,981,1001,1010
14,15,26,27,29,30,40,46,47,48,49,53,57,58,59,65,66,76
9,42,43,44,78,95,98,119,120,121,124,129,131,133,391,659,667,736,858,919,927,931,943,954,978,980,997,1000,1003
6,7,8,12,24,34,50,66,67,71,73,76,144,150,151,166,172,173,174,186,197,198,206,208,209,210,214,215,216
0,13,29,38,54,58,60,61,64,65,70,71,79,80,84,89,91,93,94,96,98,99,102,104,105,108,109,110,113,116,122,125,131,134,314,335,481,511,526,560,568,612,671,756,781,798,840,850,867,876,896,901,904,917,931,950,952,962,967,976,985,993,996,997,1009
1,5,6,14,18,21,27,28,29,30,31,46,48,50,51,52,53,54,55,58,59,60,64,69,71,79,88,89,91,92,96,98,101,106,107,109,110,115,117,122,124,127,128,129,131,134,137,143,144,145,148,154,155,163,165,167,168,172,173,178,179,187,188,189,190
0,5,9,12,19,35,36,51,55,60,61,63,64,66,67,73,74,75,80,81,82,85,87,88,91,93,94,95,96,97,98,101,112,113,115,119,121,124,125,126,127,128,129,130,131,133,134,175,194,210,231,258,260,263,264,280,285,355,385,398,412,416,418,430,455,497,506,557,562,564,565,580,582,596,601,610,615,630,643,652,665,670,676,684,706,723,743,755,769,789,802,827,841,853,862,867,885,887,895,900,905,906,913,921,927,929,932,941,943,944,947,950,954,963,967,974,977,978,979,985,996,997,999,1007,1012
1,5,12,13,14,22,23,27,31,37,58,69,70,71,88,114,115,116,117,118,207,211,215,222,225,230,240,243,244,266,279,291,295,309,310,324,334,335,344,346,367,368,372,373,375,377,380,382,383,385,386,387,389,396,398,399,400,401,402,414,415,421,427,430,431,433,438,439,440,441,444,445,446,447,452,453,454,455,456,458,459,466,468,469,470,472,473,474,475,477,479,483,485,486,487,488,489,490,491,494,495,496,499,501,502,505,508,509,510,512,514,516,519,528,532,533,534,535,539,552,562,566,574,663,664
1,19,36,55,90,99,104,113,132,169,185,345,690,743,774,967,976,1009
1,2,3,4,5,34,36,38,39,40,41,42,43,44,45,47,49,78
38,99,104,113,967,976,1009
1,8,11,13,15,18,25
4,19,41,47,64,71,80,82,84,87,88,94,100,106,108,110,112,114,115,116,117,126,128,130,134,268,431,559,603,613,712,764,798,811,828,846,850,889,900,945,953,987,990,996,1006,1007,1010
2,5,6,9,10,12,13,14,15,16,19,20,21,22,25,26,27,29,31,34,35,46,47,49,50,51,53,55,56,57,58,59,60,61,62,64,66,68,70,71,72,74,75,76,78,79,80
1,8,12,18,27,36,42,43,44,52,54,55,60,67,70,71,73,78,81,84,93,95,97,98,99,101,102,108,113,118,124,125,129,131,133,172,190,283,301,316,325,384,386,454,463,464,474,476,497,547,562,573,574,582,595,599,606,622,640,670,698,719,774,792,799,805,810,826,852,858,862,864,883,885,891,892,896,917,921,926,931,933,940,943,952,956,961,963,968,978,991,997,1005,1009
1,12,14,26,28,29,30,37,40,46,56,57,60,61,71,97,99,119,122,126,130,138,162,176,180,184,187,189,192,193,206,231,305,321,332,333,335,340,341,343,347,351,352,353,361,369,375,376,377,378,379,385,386,388,390,391,394,395,396,397,399,400,401,403,453,456,457,458,459,460,462,464,465,468,472,490,492,494,495,497,500,501,503,504,506,508,510,533,545,549,559,569,584,586
13,20,42,49,66,69,70,71,72,73,77,80,86,91,97,102,103,104,105,108,109,110,113,116,122,125,134,323,346,413,459,466,472,508,511,557,560,568,590,662,671,677,691,700,702,731,756,760,785,798,819,840,850,874,901,917,942,961,962,976,983,991,993
3,4,5,6,10,11,12,26,28,31,35,36,37,38,39,46,47,50,57,58,62,67,68,71,77,79,80,81,82,83,85,86,87,88,89,90,92,94,95,96,98,100,101,103,104,109,110,112,114,117,118,119,124,125,132,141,143,144,147,150,151,152,153
0,6,9,11,17,18,20,36,41,44,49,52,57,60,62,66,67,70,71,75,78,80,84,87,88,93,94,95,97,98,99,101,102,103,108,110,111,112,122,124,129,130,131,132,133,134,191,194,197,295,332,355,388,419,426,518,570,574,591,592,603,608,609,613,622,657,670,706,719,739,749,750,753,756,780,783,789,804,825,826,853,860,885,891,895,900,906,913,917,928,933,942,950,952,956,959,961,962,990,996,998,1009,1012
1,2,4,8,9,24,28,30,31,41,43,45,60,66,68,69,72,76,82,83,84,85,93,108,117,121,127,134,138,139,142,145,155,158,166,167,170,182,207,233,234,240,243,245,248,253,254,255,256,258,260,261,262,263,264,265,266,267,268,269,270,282,285,286,287,290,291,295,296,298,303,304,305,306,314,315,317,321,323,331,332,333,334,335,336,337,338,340,342,343,353,354,359,362,363,364,365,366,375,379,380,382,388
0,14,16,19,24,25,26,31,35,41,42,46,47,48,51,56,57,62,65,68,70,71,72,75,76,77,79,80,82,83,87,88,91,96,100,102,105,106,107,109,111,112,114,115,116,117,120,122,126,128,130,132,134,149,218,226,262,281,312,315,341,367,482,486,509,511,523,528,559,560,607,608,614,637,646,650,673,693,768,770,778,793,802,806,810,811,828,843,846,850,856,867,881,889,895,899,900,904,918,935,941,945,950,966,986,987,999,1004,1007,1010,1012
1,2,3,5,6,7,8,9,10,13,14,19,24,25,26,43,46,56,62,63,64,65,68,71,73,76,78,79,81,82,96,106,107,109,111,112,113,123,124,125,126,129,131,134,138,139,143,144,148,149,152,153,154,155,156,157,158,164,166,168,170,171,174,177,178,179,181,182,183,184,185,187,188,189,191,192,193,194,195,196,198,199,200,201,202,203,204,205,209,210,223,225,226,229,230,231,237,238,241,242,243,244,245,246,247,248,249,256,258,260,261
16,46,66,90,111,114,115,117,126,132,250,485,646,746,800,934,953,959,969,982,1006
2,3,21,46,51,62,64,65,68,70,71,96,97,108,109,110,111,112,113,115,125
3,15,17,34,46,47,51,65,74,75,76,80,86,106,107,114,115,117,126,128,132,156,233,619,646,770,782,796,802,811,846,873,889,914,923,928,948,953,955,970,975,981,987,988,989,1004
5,6,8,15,47,48,49,58,90,91,94,96,100,137,138,143,159,166,203,221,233,234,235,236,246,255,256,258,259,260,261,268,270,275,280,281,284,289,321,324,325,326,339,341,346,383
27,30,31,34,71,87,91,95,96,107,110,111,115,116,120,122,127,131,132,133,247,379,444,564,617,780,847,875,930,937,940,953,959,980,982,983
1,3,5,6,7,11,13,14,116,117,141,155,158,171,172,173,174,189,201,203,204,205,207,208,209,211,213,215,218,234,235,238,244,245,250,251
6,7,15,20,23,26,33,41,49,53,57,58,60,65,72,77,83,87,103,107,108,109,110,115,116,122,123,124,128,133,153,163,164,227,228,237,254,273,298,305,307,332,413,457,470,505,578,594,621,628,635,636,657,669,712,764,776,778,795,824,842,854,857,860,872,875,880,881,882,886,893,902,910,930,941,946,964,973,986,988,994
4,11,21,41,51,66,108,109,147,167,168,175,176,195,197,200,302,306,311,322,323,369,379,380,411,455,468,469,503,506,507,508,511,515,524,525,526,527,528,529,531,532,538,540,544,554,561,565,566,569,577,578,591,595,596,600,602,603,615,633,647,656,658,689,690,691,693,695,700,704,706,722,723,724,727,734,735,738,821,823,832
1,9,20,25,44,49,55,57,64,67,70,72,84,86,87,88,91,93,97,98,101,110,112,115,116,119,120,122,124,125,126,128,129,130,133,134,163,164,183,187,273,292,316,355,455,581,594,595,602,657,670,687,689,712,714,764,780,804,843,848,853,854,861,877,880,895,900,913,941,942,949,952,973,978,990,991,997,1005,1008,1012
1,4,38,43,44,45,46,47,48,120,124,127,129,132,135,136,137,138,163,167,168,173,175,176,196,207,208,313,315,317,319,321,322,323,324,327,339,342,345,346,347,348,349,350,351,376,378,379,382,383,395,396,397,405,408,409,410,411,412,413,414,415,474,475,478,479,480,481,482,508,509,510,512,513,514,515,517,518,529,530
1,10,12,13,20,29,32,38,42,45,49,56,57,60,64,67,68,73,80,84,86,89,94,99,102,104,110,115,116,121,122,123,124,126,128,130,131,133,134,140,152,235,332,387,425,435,451,612,648,651,657,715,774,810,825,843,853,875,876,881,885,887,897,913,925,930,942,950,956,962,976,996,1002,1009,1011,1012
2,3,6,10,12,13,15,16,17,18,19,20,21,26,27,28,34,35,36,38,44,46,49,54,61,66,225,236,288,289,320,322,323,436,439,450,457,475,476,477,478,479,480,482,483,484,485,486,489,495,497,499,500,501,502,503,504,525,526,527,528,529,534,535,537,556,566,567,569,571,573,576,577,580,582,593
34,35,46,65,69,70,74,75,82,86,87,91,94,96,100,105,106,108,112,114,115,126,132,212,288,307,541,646,678,778,782,848,871,873,928,937,945,953,955,960,987,996,999,1004,1007,1010
1,4,18,22,23,24,26,29,32,34,35,46,47,57,59,63,80,83,86,88,94,110,112,113,114,117,120,124,126,128,131,134,139,140,143,147,152,153,155,156,160,161,164,176,179,181
3,4,10,16,17,22,30,35,37,39,42,53,54,57,59,66,69,70,79,85,86,87,92,100,105,106,108,111,112,114,117,118,131,132,152,250,277,297,316,384,400,542,550,598,616,627,675,693,708,724,729,731,746,759,793,811,837,846,866,869,889,895,914,934,969,982,992,999,1001,1006,1010
1,2,17,26,33,35,37,45,46,66,67,72,80,83,84,87,89,90,94,96,97,98,99,100,120,147,155,159,163,187,188,189,190,193,208,212,222,225,226,227,228,229,231,232,233,235,236,239,241,243,244,246,249,257,259,260,261,263,264,284,286,287,288,293,316,317,343,350,352,353,354
1,42,47,50,55,60,64,73,80,89,93,94,98,102,104,108,112,113,115,127,129,130,133,184,323,346,355,399,441,459,709,719,743,756,789,839,876,903,906,925,954,967,993,996,1002,1012
2,5,6,7,8,9,12,15,16,486,487,488,489,496,504,506,507,508,509,510,511,598,599,600,604,607,608,609,610,612,613,614,615,616,617,618,620,624,625,628,629,630,632,633,1101,1188
25,37,38,43,44,47,50,55,60,63,67,71,73,78,94,95,97,98,99,101,102,105,108,111,113,117,119,121,124,125,127,131,133,224,244,264,282,338,410,463,499,560,645,652,653,676,687,710,731,755,757,761,793,797,803,811,819,888,890,898,913,924,933,944,947,949,967,974,977,996,997,1003,1008,1009
6,7,9,11,35,37,38,44,46,51,52,53,56,57,58,62,69,72,76,94,106,108,109,114,119,120,130,132,142,146,195,199,200,202,205,206,209,210,212,213,214,215,217,219,220,221,222,224,225,227,228,230,234,236,238,239,240,241,242,244,245,249,268,270,271,276,277,291,294,295,298,299,306,309
31,35,47,51,65,69,74,75,85,87,88,96,100,105,106,112,114,120,126,212,230,273,281,509,663,749,770,783,806,849,867,871,874,895,900,908,955,965,980,987,999,1004,1006,1010
1,2,3,8,10,12,20,22,23,29,34,35,36,44,47,53,54,59,63,66,67,69,72,73,74,75,76,77,78,79,80,81,83,84,87,88,94,97,99,100,101,104,105,106
54,134
1,634
13,15,17,18,20,26,44,45,49,58,60,62,67,68,69,73,77,78,84,86,89,95,97,98,99,102,104,109,110,112,115,116,118,122,126,128,129,130,131,133,134,156,227,235,264,303,419,425,433,435,451,460,515,552,568,613,651,657,666,681,715,725,736,753,804,860,861,876,877,881,897,906,919,925,942,949,950,953,956,960,962,968,976,978,983,987,988,990,1005,1009,1012
4,6,15,17,30,31,37,51,68,73,78,79,80,118,119,120,128,145,147,150,151,153,154,155,170,176,182,183,189,199,306,307,312,335,377,393,395,421,428,450,455,456,470,471,472,474,475,476,481,482,496,505,506,511,514,515,531,533,534,537,539,540,542,546,548,549,550,551,552,554,571,572,589,594,601,607,612,618,624,626,630,632,633,634,635,636,638,643,645,654,680
1,8,20,22,24,46,49,57,58,59,62,70,71,72,77,79,83,86,90,92,94,102,103,108,109,116,119,122,125,127,131,133,134,146,176,249,257,298,314,317,358,374,439,463,466,498,511,558,560,617,666,671,679,713,714,717,720,724,781,798,825,842,843,872,875,877,901,917,942,952,962,973,993,1008
1,3,4,7,8,9,29,32,34,45,46,47,50,60,78,83,84,106,108,147,148,151,160,161,168,181,215,225,226,228,232,237,240,241,243,274,275,276,277,278,280,281,282,283,293,294,295,306,308,309,314,315,316,324,326,327,328,329,330,333,334,335,337,338,339,361,364,365,370,371,373,380,381,409
5,13,31,54,55,57,58,59,60,66,70,71,73,74,77,80,89,94,98,99,102,104,105,108,109,110,116,120,122,131,133,134,136,140,355,401,435,444,478,510,527,560,564,568,592,599,604,608,613,639,653,671,682,700,702,725,731,756,760,761,798,827,843,850,876,887,896,901,904,917,920,925,930,951,952,961,962,973,976,978,980,993,1009
1,5,6,7,9,13,16,17,18,23,27,41,42,43,48,52,54,55,57,58,61,66,74,78,81,110,115,116,121,149,169,170,171,173,174,178,179,180,181,184,187,192,194,195,196,197,198,199,201,202,203,205,206,211,213,214,216,217,218,219,227,228,229,235,237,238,239,247,248,256,257,258,262,263,264,266,268,269,270,271,272,275,276
19,22,40,46,49,51,56,60,64,67,69,73,75,78,80,86,90,91,92,95,96,97,98,99,101,102,105,108,111,112,113,115,117,119,124,128,130,131,132,133,134,160,168,172,244,249,278,458,512,574,577,587,601,615,616,641,662,668,756,757,762,800,802,848,867,868,870,883,895,923,937,950,956,959,963,967,978,981,984,988,990,997,1008,1012
1,2,6,8,14,43,44,50,53,54,55,56,58,67,73,74,75,109,110,114,136,137,141,143,147,151,152,154,160,163,165,166,183,185,197,198,204,205,492,493,494,497,498,499,500,501,502,504,507,510,512,517,518,520,521,522,523,524,526,531,532,548,577,579,580,584,586,588,589,590,591,592,594,643,644,645,648,649,651,652,654,655,656,662
2,6,13,26,33,35,36,45,49,52,57,60,62,64,65,67,68,72,84,86,87,89,94,96,98,99,102,103,104,107,110,112,115,116,122,123,126,128,130,131,133,134,237,254,305,315,425,433,451,476,496,536,543,568,575,613,647,651,677,679,681,693,712,714,715,753,768,780,789,795,820,854,875,876,885,891,906,913,920,925,937,942,950,953,956,960,962,978,988,990,994,996,999,1002,1009,1011,1012
1,10,12,15,19,20,22,36,40,41,44,51,53,56,57,59,97,113,115,117,118,122,126,127,131,140,149,156,163,166,170,179,199,209,221,226,300,506,521,526,534,536,537,541,543,546,552,558,570,571,572,575,577,578,579,580,584,586,587,590,595,596,598,601,602,605,606,607,608,611,613,614,615,616,617,618,620,622,624,629,630,636,638,639,642,643,646,649,701,704,707,711,712,713,719,721,736
9,35,36,67,73,82,91,98,112,121,125,127,263,652,751,755,841,913,974,978,991,997,999,1007
1,8,9,13,15,25,26,30,655,658,661,665,666,668,669,670,671,674,675,676,677,680,687,697
3,4,10,14,17,19,25,33,34,35,37,41,46,49,53,56,62,65,68,71,75,76,82,84,85,86,87,88,94,105,106,107,110,112,114,115,116,117,120,123,126,132,146,147,258,267,283,305,312,318,356,362,453,455,523,525,536,547,570,580,588,594,606,610,617,646,675,687,705,712,749,753,764,766,770,783,796,806,828,851,854,873,895,900,914,918,923,928,948,953,973,975,987,990,996,999,1001,1004,1006,1011
57,61,63,64,66,67,93,95,99,101,102,109,133,135,139,201,203,206,208,225,227,229,240,241,245,247,251,262,263,264,287,289,304,312,316,326,342,344,345,347,358,371,372,376,378,382,388,390,391,392,393,394,402,403,404,406,407,413,415,417,418,421,422,423,427,435,437,458,520,524,526,527,530,544,545,546,548,549,551,552,555,559,561,565,603,605,614,616,618,620,621,623,625,629,630,632,633,656,660,662
22,60,71,94,131,133,191,917
2,3,4,5,13,22,23,24
5,10,20,22,23,26,36,40,41,49,52,53,56,57,58,59,64,72,74,77,79,83,84,86,90,92,93,95,101,103,109,110,113,116,119,121,122,123,124,127,128,131,134,164,198,208,214,231,232,259,266,284,285,352,354,390,404,408,430,439,449,466,489,520,535,554,613,656,657,684,701,714,720,723,738,754,763,772,786,791,809,839,840,842,843,854,856,863,868,872,875,877,880,886,893,894,901,942,950,951,971,972,983,1008,1011
1,7,17,24,32,34,38,46,50,75,76,80,103,106,107,127,128,143,148,151,159,189,193,195,198,221,229,232,236,285,312,315,316,379,382,383,444,445,451,454,458,459,460,461,501,504,508,511,512,513,518,519,555,562,584,585,590,592,595,598,599,608,609,610,629,630,631,636,646,648,649,650,663,708,712,716,717,718,719,720,721,723,729,735,737,744,767,768,776,778,784,786,787,791,797,803,806,822,823,833,842,845,854,855,856
4,39,58,65,69,80,82,85,87,88,96,100,105,111,117,268,312,351,392,409,431,616,702,762,770,793,806,811,828,845,867,869,887,889,899,900,904,981,1001,1010
2,4,5,8,9,16,17,24,25,32,34,38,39,40,43,45,46,47,48,49,51,52,53,54,55,56,57,59,60,61,62,63,64,65,68,70,72,73,80,84
6,22,35,54,59,66,70,71,72,79,82,87,88,91,92,94,105,110,117,130,132,134,139,203,292,316,412,533,542,559,603,613,691,783,798,811,888,896,899,900,917,950,961,995,996,999,1007,1012
1,6,13,14,15,18,21,22,23,24,257,258,275,283,284,297,298,299,301,303,304,305,306,307,308,309,311,312,313,314,316,317,319,333,334,336,337,338,339,340,341,342,344,345,357,364,596,597
4,6,14,23,24,25,31,35,53,56,62,64,65,74,79,80,82,84,87,88,94,96,106,109,112,115,116,120,122,126,128,130,132,134,226,239,307,331,342,356,367,409,431,448,455,482,509,536,557,594,607,614,622,678,712,753,762,770,778,780,806,828,844,854,867,875,887,895,900,904,923,941,942,955,960,973,987,999,1004,1007,1012
3,4,6,21,22,25,28,30,31,36,37,38,51,59,61,66,69,70,75,82,83,87,90,94,99,101,128,129,130,138,139,141,142,143,144,145,149,150,151,153,156,157,160,161,162,166,169,170,171,172,173,175,176,182,192,193,194,195,197,198,200,203,204,208,212,213,214,215,218,220,221,222,223,230,232,234,237,239,242,245,247
34,35,37,41,49,54,57,74,75,90,91,95,98,106,107,111,114,117,129,132,203,310,402,466,503,708,793,800,811,812,818,836,842,851,866,871,873,889,923,928,955,970,975,978,999,1004,1005,1006
2,8,28,32,42,47,49,53,54,55,63,64,65,70,71,73,82,87,88,109,110,111,113,114,115,117,119,120,123,124,125,126,127,128,147,149,150,152,156,157,161,162,163,164,170,175,176,185
15,20,22,55,83,86,87,116,119,129,134,354,641,761,813,877,954,973
1,3,6,9,21,23,24,29,30,41,771,783,784,786,792,794,799,804
3,4,15,19,22,28,30,31,46,47,48,49,57,62,63,65,66,69,72,75,80,82,85,86,87,88,90,91,92,94,96,100,105,106,111,114,115,117,122,126,128,130,132,134,174,197,203,205,233,262,268,269,277,282,307,310,312,326,350,358,409,469,509,510,517,531,559,580,610,627,631,646,675,683,691,747,748,749,756,759,762,766,768,770,777,778,779,780,782,783,793,806,811,818,828,846,848,867,871,874,878,887,898,899,900,904,908,918,923,928,935,941,945,950,953,982,987,988,995,996,1001,1004,1006,1007,1010,1012
1,4,8,14,15,16,17,20,85,96,104,105,106,109,110,123,124,136,137,138,160,164,174,179,204,221,224,225,228,230,232,234,235,263,274,303,305,307,308,327,341,344,348,350,351,354,355,356,357,360,361,364,365,366,369,370,372,373,374,375,381,382,384,385,386,389,391,393,396,397,398,411,412,413,414,415,418,420,422,423,425,426,429,430,432,433,434,441,442,443,445,449,450,451,454,457,458,459,462,474,475,479,482,485,490,495,498,503,506,507,515,518,527,529,530,537,543,544,546,548,549,568,593,597,599,600
9,15,25,27,53,57,60,70,71,78,80,98,99,101,103,110,112,113,116,122,123,130,131,133,134,136,138,186,222,316,327,355,430,587,614,698,756,789,875,886,906,930,940,950,956,973,978,990,1009,1011,1012
1,67,68,70,71,72,77,78,79,80,81,83,89,92,93,118,121,122,137,138,240,243,244,246,247,248,249,251,252,253,254,255,257,258,259,260,261,262,270,271,272,275,277,278,279,280,281,283,288,390,393
11,52,54,55,64,80,89,91,102,104,108,111,112,113,125,127,130,207,260,395,410,490,518,565,652,662,739,762,779,863,864,925,932,962,967,974,990,991,993,1002,1012
1,3,4,6,7,8,56,57,98,111,113,117,118,139,140,142,864,865,866,870,871,872,873,874,875,876,877,878,882,886,887,899,903,943,945,946,947,948,950,998,1720
19,49,51,62,66,76,80,85,87,88,100,105,117,120,132,134,171,212,305,353,409,584,691,759,768,802,849,871,900,918,923,941,948,950,959,981,984,1001,1010
1,2,3,4,5,9,10,14,18,22,40,41,71,72,90,91,97,98,99,100,101,103,104,109,110,111,112,113,114,115,116,118,122,123,125,155,156,159,177
0,8,21,30,40,44,56,59,67,79,92,101,102,108,119,121,127,129,253,257,390,405,467,625,627,630,661,713,718,724,730,734,781,794,808,861,868,933,954,977
2,3,5,6,19,20,27,36,39,109,111,112,113,114,119,121,123,124,131,133,136,141,143,149,150,153,154,156,158,224,249,250,252,256,259,262,263,264,265,270
61,985
14,28
1,2,5,11,18,27,33,38,43,44,55,60,67,78,84,93,94,95,98,99,101,102,108,112,119,121,124,125,127,129,131,133,134,280,376,382,391,410,496,498,574,579,593,618,659,665,710,743,752,775,786,789,794,817,819,823,827,841,852,853,858,861,906,913,921,931,949,950,956,968,977,978,991,996,997,1005
1,2,3,4,16,17,19,30,34,41,46,47,49,52,53,66,68,73,89,91,93,97,101,102,107,112,221,241,242,257,300,345,346,347,349,351,352,353,356,357,360,386,393,395,398,399,410,411,412,413,414,421,460,464,467,476,477,479,480,481,482,483,484,485,486,489,496,497,499,511,512,515,518,520,533,548
13,33,45,54,68,70,102,108,134,384,525,694,699,798,896,952,962,993
1,4,5,7,9,10,11,12,62,63,66,67,68,69,70,71,72,73
11,14,31,60,61,64,69,73,91,94,113,120,356,444,874,936,967,979,980,996
3,4,5,24,27,33,34,55,56,62,63,64,65,66,67,70,71,74,75,78
5,8,10,11,12,18,19,27,29,36,37,38,42,43,44,47,50,52,60,64,76,78,94,95,97,98,99,101,102,108,111,117,119,121,124,125,127,129,131,132,133,170,203,294,309,341,397,407,414,436,469,507,517,557,582,587,588,605,619,648,664,665,696,710,723,752,755,779,790,796,799,803,809,816,817,827,835,858,866,870,883,898,908,912,919,923,927,933,940,949,956,958,963,968,970,977,981,982,984,991,996,997,1000,1005
2,3,4,6,7,69,70,76,82,86,89,110,112,126,139,143,144,145,146,148,152,155,156,157,160,186,187,192,195,210,218,280,307,315,367,392,405,431,480,553,556,558,559,560,565,568,572,573,596,597,598,603,604,605,607,608,609,610,611,612,613,614,616,618,625,626,627,629,648,652,653,654,655,658,664,665,675,676,677,679,685,687,688,695,696,697,711,722,727,729,730,739,741,803,821,848,851,853,910,911,912,924,931,948
51,58,69,77,85,88,90,100,132,299,663,690,763,772,874,1001,1010
1,4,6,8,9,10,12,17,19,20,21,23,25,26,28,29,34
1,18,25,27,32,36,43,47,52,60,63,67,71,76,78,81,95,97,98,99,101,108,111,113,117,121,124,125,127,129,131,132,133,172,203,207,224,279,286,338,378,414,417,463,469,498,504,562,582,609,620,645,650,652,653,670,676,687,698,736,774,789,793,816,853,858,861,891,898,912,913,919,921,924,927,929,931,940,944,956,959,968,974,978,981,982,991,997,1005,1009
3,29,36,38,40,41,45,52,54,55,62,79,81,82,102,103,120,122,145,148,153,158,172,174,185,188,243,278,291,294,304,310,312,313,314,316,318,319,320,321,322,323,324,326,327,328,330,334,338,339,340,348,354,356,357,359,360,363,364,370,372,373,376,378,379,382,392,393,399,405,410,430,432,433,439,441,444,446,452,453,456,482,484,487,488,491,497,511,514,516
16,32,35,37,49,79,91,96,98,102,106,107,114,117,131,132,286,305,353,598,621,645,759,781,842,848,851,866,934,945,969,978,981,989,992,994,997,999,1004,1006
3,4,5,22,29,30,31,34,36,39,71,74,110,115,117,118,119,120,122,132,134,135,136,137,138,139,148,156,159,163,178,179,184,191,207,208,209,210,212,226
1,11,18,27,29,36,37,38,42,43,44,60,63,64,76,78,84,86,94,95,97,98,99,101,102,108,111,113,117,119,121,124,125,127,129,131,132,133,244,282,285,324,347,397,423,517,572,584,592,676,710,736,790,810,816,838,851,866,870,877,883,905,919,921,923,931,933,940,944,956,958,959,968,977,978,981,984,991,996,997,1000,1003,1005,1009
1,2,13,17,23,24,41,45,71,77,83,87,135,136,138,139,140,147,149,159,160,177,182,186,199,201,218,226,315,348,349,486,512,516,530,541,592,666,667,675,676,678,680,681,688,700,701,710,713,714,717,721,722,726,727,728,730,735,741,742,743,746,747,749,751,755,761,765,802,805,808,809,820,849,853,854,868,874,876,889,894,895,904,906
13,102,108,134,739,901,962,993
1,9,10,1697,1704,1705,1706,1707
20,28,37,38,49,54,57,76,90,91,96,105,111,114,115,117,122,126,181,275,341,479,612,686,708,710,779,787,790,796,800,842,848,866,878,888,937,942,948,953,969,970,981,1006
1,16,26,41,48,61,69,90,167,178,195,231,240,252,254,283,288,291,292,294,302,305,306,308,312,313,314,344,357,366,369,372,373,382,397,405,412,416,429,430,432,435,443,452
14,47,51,56,61,97,106,122,127,149,356,510,652,802,908,974,985,992
2,3,4,5,29,30,31,32,38,39,40,41,42,43,44,46,70,71"""
if __name__ == "__main__":
sys.exit(main())
|
# -*- coding: utf-8 -*-
INPUTSTREAM_PROTOCOLS = {
'mpd': 'inputstream.adaptive',
'ism': 'inputstream.adaptive',
'hls': 'inputstream.adaptive',
'rtmp': 'inputstream.rtmp'
}
DRM_SCHEMES = {
'widevine': 'widevine',
'com.widevine.alpha': 'widevine'
}
CDM_EXTENSIONS = (
'.so',
'.dll',
'.dylib'
)
ARCH_MAP = {
'x86_64': 'x86_64',
'AMD64': 'x86_64',
'x86': 'x86',
'i386': 'x86',
'i686': 'x86',
'armv7': 'arm',
'armv8': 'arm',
'aarch64': 'arm64',
'aarch64_be': 'arm64'
}
WIDEVINE_SUPPORTED_ARCHS = [
'x86_64',
'x86',
'arm',
'arm64'
]
WIDEVINE_ARCH_MAP_X86 = {
'x86_64': 'x64',
'x86': 'ia32'
}
WIDEVINE_OS_MAP = {
'Linux': 'linux',
'Windows': 'win',
'Darwin': 'mac'
}
WIDEVINE_SUPPORTED_OS = [
'Android',
'Linux',
'Windows',
'Darwin'
]
WIDEVINE_MINIMUM_KODI_VERSION = {
'Android': '18.0',
'Windows': '17.4',
'Linux': '17.4',
'Darwin': '17.4'
}
WIDEVINE_CURRENT_VERSION_URL = 'https://dl.google.com/widevine-cdm/current.txt'
WIDEVINE_VERSIONS_URL = 'https://dl.google.com/widevine-cdm/versions.txt'
WIDEVINE_DOWNLOAD_URL = 'https://dl.google.com/widevine-cdm/{0}-{1}-{2}.zip'
WIDEVINE_LICENSE_FILE = 'LICENSE.txt'
WIDEVINE_MANIFEST_FILE = 'manifest.json'
WIDEVINE_CONFIG_NAME = 'widevine_config.json'
WIDEVINE_UPDATE_INTERVAL_DAYS = 30
WIDEVINE_LEGACY_VERSION = '1.4.8.903'
CHROMEOS_RECOVERY_URL = 'https://dl.google.com/dl/edgedl/chromeos/recovery/recovery.conf'
CHROMEOS_RECOVERY_URL_LEGACY = 'https://gist.githubusercontent.com/emilsvennesson/5e74181c9a833129ad0bb03ccb41d81f/raw/8d162568277caaa31b54f4773e75a20514856825/recovery.conf'
CHROMEOS_ARM_HWID = 'SKATE' #SPRING
CHROMEOS_BLOCK_SIZE = 512
HLS_MINIMUM_IA_VERSION = '2.0.10'
|
import os
from urllib.parse import urlsplit
import click
import requests
import config as cfg
def read_and_save(res):
"""Reads content of the accessed file and saves it locally."""
fname = os.path.split(urlsplit(res.url).path)[-1]
fpath = os.path.join(cfg.OUTPUT_DIR, fname)
with open(fpath, 'wb') as f:
for chunk in res.iter_content(cfg.CHUNK):
f.write(chunk)
@click.command()
@click.argument('urls', nargs=-1)
def run(urls):
"""Handles program flow."""
click.echo('Starting...')
if not urls:
click.echo('No URLs found. Please provide at least 1 URL.')
else:
n_files = len(urls)
skipped = 0
downloaded = 0
errors = 0
click.echo('Total files to be downloaded: {}'.format(n_files))
for url in urls:
click.echo('Processing file {}'.format(url))
try:
click.echo(' - Accessing file')
res = requests.get(url, stream=True)
s_code = res.status_code
if s_code == 200:
click.echo(' - Reading file...')
read_and_save(res)
click.echo(' - File saved.')
downloaded += 1
else:
click.echo(' - Unable to access: {}'.format(s_code))
skipped += 1
except requests.exceptions.RequestException as e:
click.echo(e)
errors += 1
click.echo('Finished.')
click.echo('Files successfully downloaded: {}'.format(downloaded))
click.echo('Files skipped: {}'.format(skipped))
click.echo('Errors: {}'.format(errors))
|
"""A rule that copies source files to the output tree.
This rule uses a Bash command (diff) on Linux/macOS/non-Windows, and a cmd.exe
command (fc.exe) on Windows (no Bash is required).
Originally authored in rules_nodejs
https://github.com/bazelbuild/rules_nodejs/blob/8b5d27400db51e7027fe95ae413eeabea4856f8e/internal/common/copy_to_bin.bzl
"""
load(
"//lib/private:copy_to_bin.bzl",
_copy_file_to_bin_action = "copy_file_to_bin_action",
_copy_files_to_bin_actions = "copy_files_to_bin_actions",
_copy_to_bin = "copy_to_bin",
)
copy_file_to_bin_action = _copy_file_to_bin_action
copy_files_to_bin_actions = _copy_files_to_bin_actions
copy_to_bin = _copy_to_bin
|
import asyncio, logging, unittest
from rx_scheduler import Scheduler
from functools import partial
class RxSchedulerTest(unittest.IsolatedAsyncioTestCase):
def setUp(self):
logging.basicConfig(level=logging.DEBUG)
async def test_sync(self):
loop = asyncio.get_event_loop()
scheduler = Scheduler(loop)
self.assertRaisesRegex(
AttributeError,
'There is no "syncTask" task defined',
lambda: scheduler.runTask('syncTask')
)
self.assertRaisesRegex(
AttributeError,
'There is no "syncTask" task defined',
lambda: scheduler.stopTask('syncTask')
)
self.assertRaisesRegex(
AttributeError,
'There is no "syncTask" task defined',
lambda: scheduler.delTask('syncTask')
)
scheduler.addTask(
lambda i: logging.debug('sync task output'),
name = 'syncTask',
interval = 1
)
self.assertTrue('syncTask' in scheduler.taskList())
self.assertRaisesRegex(
AttributeError,
'Task "syncTask" is defined already',
lambda: scheduler.addTask(
lambda i: logging.debug('sync task output'),
name = 'syncTask',
interval = 1
)
)
self.assertRaisesRegex(
AttributeError,
'Task "syncTask" is not running',
lambda: scheduler.stopTask('syncTask')
)
scheduler.runTask('syncTask')
self.assertRaisesRegex(
AttributeError,
'Task "syncTask" is running',
lambda: scheduler.runTask('syncTask')
)
await asyncio.sleep(5)
scheduler.stopTask('syncTask')
scheduler.delTask('syncTask')
self.assertFalse('syncTask' in scheduler.taskList())
async def test_async(self):
async def async_fn(interval):
await asyncio.sleep(interval)
logging.debug('async task output')
loop = asyncio.get_event_loop()
scheduler = Scheduler(loop)
self.assertRaisesRegex(
AttributeError,
'There is no "asyncTask" task defined',
lambda: scheduler.runTask('asyncTask')
)
self.assertRaisesRegex(
AttributeError,
'There is no "asyncTask" task defined',
lambda: scheduler.stopTask('asyncTask')
)
self.assertRaisesRegex(
AttributeError,
'There is no "asyncTask" task defined',
lambda: scheduler.delTask('asyncTask')
)
scheduler.addTask(
partial(async_fn, 0.5),
name = 'asyncTask',
interval = 1
)
self.assertTrue('asyncTask' in scheduler.taskList())
self.assertRaisesRegex(
AttributeError,
'Task "asyncTask" is defined already',
lambda: scheduler.addTask(
partial(async_fn, 0.5),
name = 'asyncTask',
interval = 1
)
)
self.assertRaisesRegex(
AttributeError,
'Task "asyncTask" is not running',
lambda: scheduler.stopTask('asyncTask')
)
scheduler.runTask('asyncTask')
self.assertRaisesRegex(
AttributeError,
'Task "asyncTask" is running',
lambda: scheduler.runTask('asyncTask')
)
await asyncio.sleep(5)
scheduler.stopTask('asyncTask')
scheduler.delTask('asyncTask')
self.assertFalse('asyncTask' in scheduler.taskList())
|
from abc import ABC, abstractmethod
from .initializers import Constant
from .optimizers import MomentumFeedForward
from .constants import EPSILON
import torch
class Layer(ABC):
@abstractmethod
def init_weights(self, num_input, optimizer, initializer):
pass
@abstractmethod
def feed_forward(self, input, is_predict):
pass
@abstractmethod
def back_prop(self, learning_rate):
pass
class Dense(Layer):
def __init__(self, num_output):
self._num_output = num_output
def init_weights(self, num_input, optimizer, initializer):
# init weights
self._weights = initializer(num_input, self._num_output)
self._bias = initializer(1, self._num_output)
# init optimizer
self._optimizer_w = optimizer.generate_optimizer(self._weights.shape)
self._optimizer_b = optimizer.generate_optimizer(self._bias.shape)
def feed_forward(self, input, is_predict):
return input.mm(self._weights) + self._bias
def back_prop(self, learning_rate):
with torch.no_grad():
self._weights -= learning_rate * \
self._optimizer_w.get_velocity(self._weights.grad)
self._bias -= learning_rate * \
self._optimizer_b.get_velocity(self._bias.grad)
self._weights.grad.zero_()
self._bias.grad.zero_()
@property
def num_output(self):
return self._num_output
class BatchNorm(Layer):
"""
I don't know why batch norm does worse than normal layer
Batch norm backprop
https://kratzert.github.io/2016/02/12/understanding-the-gradient-flow-through-the-batch-normalization-layer.html
"""
def init_weights(self, num_output, optimizer, initializer):
self._batch_norm_G = initializer(1, num_output)
self._batch_norm_B = initializer(1, num_output)
# init optimizer
self._optimizer_G = optimizer.generate_optimizer((1, num_output))
self._optimizer_B = optimizer.generate_optimizer((1, num_output))
self._optimizer_mean = MomentumFeedForward().generate_optimizer(
(1, num_output))
self._optimizer_variance = MomentumFeedForward().generate_optimizer(
(1, num_output))
def feed_forward(self, z, is_predict):
current_mean = z.mean(dim=0)
mean = self._optimizer_mean._velocity if is_predict else self._optimizer_mean.get_velocity(
current_mean)
diff_mean = z-mean
current_variance = torch.pow(diff_mean, 2).mean(dim=0)
variance = self._optimizer_variance._velocity if is_predict else self._optimizer_variance.get_velocity(
current_variance)
z_norm = diff_mean / torch.sqrt(variance + EPSILON)
output = self._batch_norm_G * z_norm + self._batch_norm_B
return output
def back_prop(self, learning_rate):
with torch.no_grad():
self._batch_norm_G -= learning_rate * \
self._optimizer_G.get_velocity(self._batch_norm_G.grad)
self._batch_norm_B -= learning_rate * \
self._optimizer_G.get_velocity(self._batch_norm_B.grad)
self._batch_norm_G.grad.zero_()
self._batch_norm_B.grad.zero_()
class Conv2D(ABC):
def __init__(self, filter_size, padding, stride, channel):
pass
def init_weights(self, num_input, optimizer, initializer):
pass
def feed_forward(self, input, is_predict):
pass
def back_prop(self, learning_rate):
pass
|
import common
import exporter
import importer
import manager
import antler
import os
import sys
import time
from flask_api import FlaskAPI
from flask import request
from flask_api import status
import json
from datetime import datetime
import utils
import threading
free_data = {}
insert_queue = []
app = FlaskAPI(__name__)
def _do_insert():
global insert_queue
global free_data
print('Thread starting')
while True:
while len(insert_queue) > 0:
obj = insert_queue.pop(0)
free_data, ident = importer.get_writable(obj['insert_string'], free_data, obj['meta'])
with open('{}/data/free_data.json'.format(common.CERES_HOME), 'w') as f:
json.dump(free_data, f)
print('Inserted record with id {}'.format(ident))
time.sleep(0.5)
def _init_free():
global free_data
if os.path.exists('{}/data/free_data.json'.format(common.CERES_HOME)):
with open('{}/data/free_data.json'.format(common.CERES_HOME)) as f:
free_data = json.load(f)
else:
for i in range(0, common.MAX_GROUPS):
free_data[i] = {j:[{"start": 0, "end": 65536}] for j in range(0, common.MAX_BLOCKS)}
@app.route('/insert', methods=['POST'])
def post_message():
global free_data
data = request.get_json()
start = time.time()
idents = []
for message in data['messages']:
insert_string = ''
meta = []
for k in common.SCHEMA['order']:
if common.SCHEMA['fields'][k] == 'str':
insert_string += '{},'.format(message[k].replace(',', '<COMMA>'))
else:
insert_string += '{},'.format(message[k])
for k in common.SCHEMA['meta']:
meta.append('{}/{}'.format(k, message[k]))
insert_string = insert_string[:-1]
# print(message)
# print(insert_string)
insert_queue.append({'insert_string': insert_string, 'meta': meta})
# free_data, ident = importer.get_writable(insert_string, free_data, meta)
# print(ident)
# idents.append(ident)
# with open('{}/data/free_data.json'.format(common.CERES_HOME), 'w') as f:
# json.dump(free_data, f)
end = time.time()
return {'status': status.HTTP_200_OK, "ids": []}
@app.route('/query', methods=['POST'])
def get_results():
global free_data
data = request.get_json()
out = []
length = 0
query = data['query']
start = time.time()
idents, mode, error = antler.parse(query, common.SCHEMA['meta'])
if error == '':
if mode == 'select':
length = len(idents)
for i in idents:
data = exporter.get_data(i)
out.append(utils.map_dict(data, i))
elif mode == 'delete':
length = len(idents)
for i in idents:
free_data = manager.delete_data(i, free_data)
free_data = manager.merge_free(free_data)
with open('{}/data/free_data.json'.format(common.CERES_HOME), 'w') as f:
json.dump(free_data, f)
end = time.time()
print('{}'.format(end - start))
return {'status': status.HTTP_200_OK, "data": out, "error": error, "length": len(out)}
@app.route('/index', methods=['POST'])
def get_index_values():
print('INDEX')
data = request.get_json()
out = []
key = data['key']
out = [x for x in os.listdir('{}/indices/{}'.format(common.CERES_HOME, key))]
return {'status': status.HTTP_200_OK, "data": out}
def _do_run():
global app
app.run(host='0.0.0.0', port=common.SERVER_PORT, debug=common.SERVER_DEBUG)
def _do_test():
global free_data
out = []
counter = 0
with open('test/logs.txt') as f:
logs = f.read().split('\n')
for l in logs:
start = time.time()
free_data, ident = importer.get_writable(l, free_data)
end = time.time()
print('{} : {}'.format(end - start, ident))
out.append('{},{}'.format(counter, end - start))
with open('timing.csv', 'w') as f:
f.write('\n'.join(out))
_do_run()
if __name__ == '__main__':
config_path = os.getenv('CERES_CONFIG_PATH')
if not config_path:
config_path = 'ceres_home/config/config.ini'
common.read_config(config_path)
common.init_schema()
_init_free()
insert_thread = threading.Thread(target=_do_insert)
insert_thread.start()
if sys.argv[1] == 'test':
print('testing')
_do_test()
elif sys.argv[1] == 'run':
print('running')
_do_run()
|
from utils import *
def matrix_subtraction(a, b):
"""matrix_subtraction
Matrix subtraction a[i][j]-b[i][j] for all i j
:param a: first matrix
:param b: second matrix
:return: first-second result
"""
c = [[0 for y in range(len(a[0]))] for x in range(len(a))]
for i in range(len(a)):
for j in range(len(a[0])):
c[i][j] = a[i][j] - b[i][j]
return c
def multiply_scalar(x, scalar):
"""multiply_scalar
matrix multiply scalar for each element
:param x: matrix
:param scalar: scalar to multiply matrix
:return: matrix after multiplying scalar
"""
for i in range(len(x)):
for j in range(len(x[0])):
x[i][j] = scalar * x[i][j]
return x
def newton(A, b, n):
"""newton
Use Newton's method to get line fitting function coefficient
:param A: power of x matrix
:param b: original data points y axis
:param n: polynomial bases
:return: prediction of coefficient of the line fitting function
"""
x = [[0.0] for y in range(n)]
AT = transpose(A)
ATb = matrix_multiplication(AT, b)
ATA = matrix_multiplication(AT, A)
# gradient: 2A.TAx-2A.Tb
ATAx = matrix_multiplication(ATA, x)
gradient = matrix_subtraction(multiply_scalar(ATAx, 2), multiply_scalar(ATb, 2))
# Hessian: (2A.TA)^-1
hessian = multiply_scalar(ATA, 2)
upper, lower = LU_decomposition(hessian)
hessian_inv = inverse(upper, lower)
x = matrix_subtraction(x, matrix_multiplication(hessian_inv, gradient))
return x
|
# Copyright (c) 2015, Riverbank Computing Limited
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# 1. Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
#
# 2. Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
# This is v1.4 of this boilerplate.
from distutils import sysconfig
import glob
import os
import optparse
import sys
###############################################################################
# You shouldn't need to modify anything above this line.
###############################################################################
# This must be kept in sync with Python/configure-old.py, qscintilla.pro,
# example-Qt4Qt5/application.pro and designer-Qt4Qt5/designer.pro.
QSCI_API_MAJOR = 12
class ModuleConfiguration(object):
""" This class encapsulates all the module specific information needed by
the rest of this script to implement a configure.py script for modules that
build on top of PyQt. Functions implemented by the rest of this script
that begin with an underscore are considered internal and shouldn't be
called from here.
"""
# The name of the module as it would be used in an import statement.
name = 'Qsci'
# The descriptive name of the module. This is used in help text and error
# messages.
descriptive_name = "QScintilla"
# The version of the module as a string. Set it to None if you don't
# provide version information.
version = '2.9.1'
# Set if a configuration script is provided that handles versions of PyQt4
# prior to v4.10 (i.e. versions where the pyqtconfig.py module is
# available). If provided the script must be called configure-old.py and
# be in the same directory as this script.
legacy_configuration_script = True
# The minimum version of SIP that is required. This should be a
# dot-separated string of two or three integers (e.g. '1.0', '4.10.3'). If
# it is None or an empty string then the version is not checked.
minimum_sip_version = '4.16'
# Set if support for C++ exceptions can be disabled.
no_exceptions = True
# Set if the module supports redefining 'protected' as 'public'.
protected_is_public_is_supported = True
# Set if the module supports PyQt4.
pyqt4_is_supported = True
# Set if the module supports PyQt5.
pyqt5_is_supported = True
# Set if the PyQt5 support is the default. It is ignored unless both
# 'pyqt4_is_supported' and 'pyqt5_is_supported' are set.
pyqt5_is_default = False
# The name (without the .api extension) of the name of the QScintilla API
# file to be generated. If it is None or an empty string then an API file
# is not generated.
qscintilla_api_file = 'QScintilla2'
# The email address that will be included when an error in the script is
# detected. Leave it blank if you don't want to include an address.
support_email_address = 'support@riverbankcomputing.com'
# Set if the user can provide a configuration file. It is normally only
# used if cross-compilation is supported.
user_configuration_file_is_supported = True
# Set if the user is allowed to pass PyQt sip flags on the command line.
# It is normally only used if cross-compilation is supported. It is
# ignored unless at least one of 'pyqt4_is_supported' or
# 'pyqt5_is_supported' is set.
user_pyqt_sip_flags_is_supported = True
def init_target_configuration(self, target_configuration):
""" Perform any module specific initialisation of the target
target configuration. Typically this is the initialisation of module
specific attributes. To avoid name clashes attributes should be given
a module specific prefix. target_configuration is the target
configuration.
"""
target_configuration.qsci_version = None
target_configuration.qsci_inc_dir = None
target_configuration.qsci_lib_dir = None
target_configuration.qsci_is_dll = (target_configuration.py_platform == 'win32')
target_configuration.qsci_sip_dir = None
def init_optparser(self, optparser, target_configuration):
""" Perform any module specific initialisation of the command line
option parser. To avoid name clashes destination attributes should be
given a module specific prefix. optparser is the option parser.
target_configuration is the target configuration.
"""
optparser.add_option('--qsci-incdir', '-n', dest='qsci_inc_dir',
type='string', default=None, action='callback',
callback=optparser_store_abspath_dir, metavar="DIR",
help="the directory containing the QScintilla Qsci header "
"file directory is DIR [default: QT_INSTALL_HEADERS]")
optparser.add_option('--qsci-libdir', '-o', dest='qsci_lib_dir',
type='string', default=None, action='callback',
callback=optparser_store_abspath_dir, metavar="DIR",
help="the directory containing the QScintilla library is DIR "
"[default: QT_INSTALL_LIBS]")
optparser.add_option('--no-dll', '-s', dest='qsci_is_dll',
default=None, action='store_false',
help="QScintilla is a static library and not a Windows DLL")
optparser.add_option('--qsci-sipdir', '-v', dest='qsci_sip_dir',
type='string', default=None, action='callback',
callback=optparser_store_abspath_dir, metavar="DIR",
help="the QScintilla .sip files will be installed in DIR "
"[default: %s]" % target_configuration.pyqt_sip_dir)
optparser.add_option("--no-sip-files", action="store_true",
default=False, dest="qsci_no_sip_files",
help="disable the installation of the .sip files "
"[default: enabled]")
def apply_options(self, target_configuration, options):
""" Apply the module specific command line options to the target
configuration. target_configuration is the target configuration.
options are the parsed options.
"""
if options.qsci_inc_dir is not None:
target_configuration.qsci_inc_dir = options.qsci_inc_dir
if options.qsci_lib_dir is not None:
target_configuration.qsci_lib_dir = options.qsci_lib_dir
if options.qsci_is_dll is not None:
target_configuration.qsci_is_dll = options.qsci_is_dll
if options.qsci_sip_dir is not None:
target_configuration.qsci_sip_dir = options.qsci_sip_dir
else:
target_configuration.qsci_sip_dir = target_configuration.pyqt_sip_dir
if options.qsci_no_sip_files:
target_configuration.qsci_sip_dir = ''
def check_module(self, target_configuration):
""" Perform any module specific checks now that the target
configuration is complete. target_configuration is the target
configuration.
"""
# Find the QScintilla header files.
inc_dir = target_configuration.qsci_inc_dir
if inc_dir is None:
inc_dir = target_configuration.qt_inc_dir
sciglobal = os.path.join(inc_dir, 'Qsci', 'qsciglobal.h')
if not os.access(sciglobal, os.F_OK):
error(
"Qsci/qsciglobal.h could not be found in %s. If "
"QScintilla is installed then use the --qsci-incdir "
"argument to explicitly specify the correct "
"directory." % inc_dir)
# Get the QScintilla version string.
qsci_version = read_define(sciglobal, 'QSCINTILLA_VERSION_STR')
if qsci_version is None:
error(
"The QScintilla version number could not be determined by "
"reading %s." % sciglobal)
lib_dir = target_configuration.qsci_lib_dir
if lib_dir is None:
lib_dir = target_configuration.qt_lib_dir
if not glob.glob(os.path.join(lib_dir, '*qscintilla2*')):
error(
"The QScintilla library could not be found in %s. If "
"QScintilla is installed then use the --qsci-libdir "
"argument to explicitly specify the correct "
"directory." % lib_dir)
# Because we include the Python bindings with the C++ code we can
# reasonably force the same version to be used and not bother about
# versioning in the .sip files.
if qsci_version != self.version:
error(
"QScintilla %s is being used but the Python bindings %s "
"are being built. Please use matching "
"versions." % (qsci_version, self.version))
target_configuration.qsci_version = qsci_version
def inform_user(self, target_configuration):
""" Inform the user about module specific configuration information.
target_configuration is the target configuration.
"""
inform("QScintilla %s is being used." %
target_configuration.qsci_version)
if target_configuration.qsci_sip_dir != '':
inform("The QScintilla .sip files will be installed in %s." %
target_configuration.qsci_sip_dir)
def pre_code_generation(self, target_config):
""" Perform any module specific initialisation prior to generating the
code. target_config is the target configuration.
"""
# Nothing to do.
def get_sip_flags(self, target_configuration):
""" Return the list of module-specific flags to pass to SIP.
target_configuration is the target configuration.
"""
# Nothing to do.
return []
def get_sip_file(self, target_configuration):
""" Return the name of the module's .sip file. target_configuration is
the target configuration.
"""
return 'sip/qscimod5.sip' if target_configuration.pyqt_package == 'PyQt5' else 'sip/qscimod4.sip'
def get_sip_installs(self, target_configuration):
""" Return a tuple of the installation directory of the module's .sip
files and a sequence of the names of each of the .sip files relative to
the directory containing this configuration script. None is returned
if the module's .sip files are not to be installed.
target_configuration is the target configuration.
"""
if target_configuration.qsci_sip_dir == '':
return None
path = os.path.join(target_configuration.qsci_sip_dir, 'Qsci')
files = glob.glob('sip/*.sip')
return path, files
def get_qmake_configuration(self, target_configuration):
""" Return a dict of qmake configuration values for CONFIG, DEFINES,
INCLUDEPATH, LIBS and QT. If value names (i.e. dict keys) have either
'Qt4' or 'Qt5' prefixes then they are specific to the corresponding
version of Qt. target_configuration is the target configuration.
"""
qmake = {'CONFIG': 'qscintilla2'}
if target_configuration.qsci_inc_dir is not None:
qmake['INCLUDEPATH'] = quote(target_configuration.qsci_inc_dir)
if target_configuration.qsci_lib_dir is not None:
qmake['LIBS'] = '-L%s' % quote(target_configuration.qsci_lib_dir)
if target_configuration.qsci_is_dll:
qmake['DEFINES'] = 'QSCINTILLA_DLL'
return qmake
def get_mac_wrapped_library_file(self, target_configuration):
""" Return the full pathname of the file that implements the library
being wrapped by the module as it would be called on OS/X so that the
module will reference it explicitly without DYLD_LIBRARY_PATH being
set. If it is None or an empty string then the default is used.
target_configuration is the target configuration.
"""
lib_dir = target_configuration.qsci_lib_dir
if lib_dir is None:
lib_dir = target_configuration.qt_lib_dir
return os.path.join(lib_dir,
'libqscintilla2.%s.dylib' % QSCI_API_MAJOR)
###############################################################################
# You shouldn't need to modify anything below this line.
###############################################################################
def error(msg):
""" Display an error message and terminate. msg is the text of the error
message.
"""
sys.stderr.write(_format("Error: " + msg) + "\n")
sys.exit(1)
def inform(msg):
""" Display an information message. msg is the text of the error message.
"""
sys.stdout.write(_format(msg) + "\n")
def quote(path):
""" Return a path with quotes added if it contains spaces. path is the
path.
"""
if ' ' in path:
path = '"%s"' % path
return path
def optparser_store_abspath(option, opt_str, value, parser):
""" An optparser callback that saves an option as an absolute pathname. """
setattr(parser.values, option.dest, os.path.abspath(value))
def optparser_store_abspath_dir(option, opt_str, value, parser):
""" An optparser callback that saves an option as the absolute pathname
of an existing directory.
"""
if not os.path.isdir(value):
raise optparse.OptionValueError("'%s' is not a directory" % value)
setattr(parser.values, option.dest, os.path.abspath(value))
def optparser_store_abspath_exe(option, opt_str, value, parser):
""" An optparser callback that saves an option as the absolute pathname
of an existing executable.
"""
if not os.access(value, os.X_OK):
raise optparse.OptionValueError("'%s' is not an executable" % value)
setattr(parser.values, option.dest, os.path.abspath(value))
def read_define(filename, define):
""" Read the value of a #define from a file. filename is the name of the
file. define is the name of the #define. None is returned if there was no
such #define.
"""
f = open(filename)
for l in f:
wl = l.split()
if len(wl) >= 3 and wl[0] == "#define" and wl[1] == define:
# Take account of embedded spaces.
value = ' '.join(wl[2:])[1:-1]
break
else:
value = None
f.close()
return value
def version_from_string(version_str):
""" Convert a version string of the form m, m.n or m.n.o to an encoded
version number (or None if it was an invalid format). version_str is the
version string.
"""
parts = version_str.split('.')
if not isinstance(parts, list):
return None
if len(parts) == 1:
parts.append('0')
if len(parts) == 2:
parts.append('0')
if len(parts) != 3:
return None
version = 0
for part in parts:
try:
v = int(part)
except ValueError:
return None
version = (version << 8) + v
return version
def _format(msg, left_margin=0, right_margin=78):
""" Format a message by inserting line breaks at appropriate places. msg
is the text of the message. left_margin is the position of the left
margin. right_margin is the position of the right margin. Returns the
formatted message.
"""
curs = left_margin
fmsg = " " * left_margin
for w in msg.split():
l = len(w)
if curs != left_margin and curs + l > right_margin:
fmsg = fmsg + "\n" + (" " * left_margin)
curs = left_margin
if curs > left_margin:
fmsg = fmsg + " "
curs = curs + 1
fmsg = fmsg + w
curs = curs + l
return fmsg
class _ConfigurationFileParser:
""" A parser for configuration files. """
def __init__(self, config_file):
""" Read and parse a configuration file. """
self._config = {}
self._extrapolating = []
cfg = open(config_file)
line_nr = 0
last_name = None
section = ''
section_config = {}
self._config[section] = section_config
for l in cfg:
line_nr += 1
# Strip comments.
l = l.split('#')[0]
# See if this might be part of a multi-line.
multiline = (last_name is not None and len(l) != 0 and l[0] == ' ')
l = l.strip()
if l == '':
last_name = None
continue
# See if this is a new section.
if l[0] == '[' and l[-1] == ']':
section = l[1:-1].strip()
if section == '':
error(
"%s:%d: Empty section name." % (
config_file, line_nr))
if section in self._config:
error(
"%s:%d: Section '%s' defined more than once." % (
config_file, line_nr, section))
section_config = {}
self._config[section] = section_config
last_name = None
continue
parts = l.split('=', 1)
if len(parts) == 2:
name = parts[0].strip()
value = parts[1].strip()
elif multiline:
name = last_name
value = section_config[last_name]
value += ' ' + l
else:
name = value = ''
if name == '' or value == '':
error("%s:%d: Invalid line." % (config_file, line_nr))
section_config[name] = value
last_name = name
cfg.close()
def sections(self):
""" Return the list of sections, excluding the default one. """
return [s for s in self._config.keys() if s != '']
def preset(self, name, value):
""" Add a preset value to the configuration. """
self._config[''][name] = value
def get(self, section, name, default=None):
""" Get a configuration value while extrapolating. """
# Get the name from the section, or the default section.
value = self._config[section].get(name)
if value is None:
value = self._config[''].get(name)
if value is None:
if default is None:
error(
"Configuration file references non-existent name "
"'%s'." % name)
return default
# Handle any extrapolations.
parts = value.split('%(', 1)
while len(parts) == 2:
prefix, tail = parts
parts = tail.split(')', 1)
if len(parts) != 2:
error(
"Configuration file contains unterminated "
"extrapolated name '%s'." % tail)
xtra_name, suffix = parts
if xtra_name in self._extrapolating:
error(
"Configuration file contains a recursive reference to "
"'%s'." % xtra_name)
self._extrapolating.append(xtra_name)
xtra_value = self.get(section, xtra_name)
self._extrapolating.pop()
value = prefix + xtra_value + suffix
parts = value.split('%(', 1)
return value
def getboolean(self, section, name, default):
""" Get a boolean configuration value while extrapolating. """
value = self.get(section, name, default)
# In case the default was returned.
if isinstance(value, bool):
return value
if value in ('True', 'true', '1'):
return True
if value in ('False', 'false', '0'):
return False
error(
"Configuration file contains invalid boolean value for "
"'%s'." % name)
def getlist(self, section, name, default):
""" Get a list configuration value while extrapolating. """
value = self.get(section, name, default)
# In case the default was returned.
if isinstance(value, list):
return value
return value.split()
class _HostPythonConfiguration:
""" A container for the host Python configuration. """
def __init__(self):
""" Initialise the configuration. """
self.platform = sys.platform
self.version = sys.hexversion >> 8
self.inc_dir = sysconfig.get_python_inc()
self.venv_inc_dir = sysconfig.get_python_inc(prefix=sys.prefix)
self.module_dir = sysconfig.get_python_lib(plat_specific=1)
if sys.platform == 'win32':
self.data_dir = sys.prefix
self.lib_dir = sys.prefix + '\\libs'
else:
self.data_dir = sys.prefix + '/share'
self.lib_dir = sys.prefix + '/lib'
class _TargetQtConfiguration:
""" A container for the target Qt configuration. """
def __init__(self, qmake):
""" Initialise the configuration. qmake is the full pathname of the
qmake executable that will provide the configuration.
"""
pipe = os.popen(' '.join([qmake, '-query']))
for l in pipe:
l = l.strip()
tokens = l.split(':', 1)
if isinstance(tokens, list):
if len(tokens) != 2:
error("Unexpected output from qmake: '%s'\n" % l)
name, value = tokens
else:
name = tokens
value = None
name = name.replace('/', '_')
setattr(self, name, value)
pipe.close()
class _TargetConfiguration:
""" A container for the target configuration. """
def __init__(self, module_config):
""" Initialise the configuration with default values. module_config is
the module configuration.
"""
# Values based on the host Python configuration.
py_config = _HostPythonConfiguration()
self.py_platform = py_config.platform
self.py_version = py_config.version
self.py_module_dir = py_config.module_dir
self.py_inc_dir = py_config.inc_dir
self.py_venv_inc_dir = py_config.venv_inc_dir
self.py_pylib_dir = py_config.lib_dir
self.py_sip_dir = os.path.join(py_config.data_dir, 'sip')
self.sip_inc_dir = py_config.venv_inc_dir
# The default qmake spec.
if self.py_platform == 'win32':
if self.py_version >= 0x030500:
self.qmake_spec = 'win32-msvc2015'
elif self.py_version >= 0x030300:
self.qmake_spec = 'win32-msvc2010'
elif self.py_version >= 0x020600:
self.qmake_spec = 'win32-msvc2008'
elif self.py_version >= 0x020400:
self.qmake_spec = 'win32-msvc.net'
else:
self.qmake_spec = 'win32-msvc'
else:
# Use the Qt default. (We may update it for MacOS/X later.)
self.qmake_spec = ''
# Remaining values.
self.pyqt_sip_flags = None
self.pyqt_version_str = ''
self.qmake = self._find_exe('qmake')
self.qt_version_str = ''
self.sip = self._find_exe('sip5', 'sip')
self.sip_version = None
self.sysroot = ''
self.prot_is_public = (self.py_platform.startswith('linux') or self.py_platform == 'darwin')
if module_config.pyqt5_is_supported and module_config.pyqt4_is_supported:
pyqt = 'PyQt5' if module_config.pyqt5_is_default else 'PyQt4'
elif module_config.pyqt5_is_supported and not module_config.pyqt4_is_supported:
pyqt = 'PyQt5'
elif not module_config.pyqt5_is_supported and module_config.pyqt4_is_supported:
pyqt = 'PyQt4'
else:
pyqt = None
if pyqt is not None:
self.module_dir = os.path.join(py_config.module_dir, pyqt)
self.pyqt_sip_dir = os.path.join(self.py_sip_dir, pyqt)
else:
self.module_dir = py_config.module_dir
self.pyqt_sip_dir = None
self.pyqt_package = pyqt
module_config.init_target_configuration(self)
def update_from_configuration_file(self, config_file):
""" Update the configuration with values from a file. config_file
is the name of the configuration file.
"""
inform("Reading configuration from %s..." % config_file)
parser = _ConfigurationFileParser(config_file)
# Populate some presets from the command line.
parser.preset('py_major', str(self.py_version >> 16))
parser.preset('py_minor', str((self.py_version >> 8) & 0xff))
parser.preset('sysroot', self.sysroot)
if self.pyqt_package is None:
section = ''
else:
# At the moment we only need to distinguish between PyQt4 and
# PyQt5. If that changes we may need a --target-pyqt-version
# command line option.
pyqt_version = 0x050000 if self.pyqt_package == 'PyQt5' else 0x040000
# Find the section corresponding to the version of PyQt.
section = None
latest_section = -1
for name in parser.sections():
parts = name.split()
if len(parts) != 2 or parts[0] != 'PyQt':
continue
section_pyqt_version = version_from_string(parts[1])
if section_pyqt_version is None:
continue
# Major versions must match.
if section_pyqt_version >> 16 != pyqt_version >> 16:
continue
# It must be no later that the version of PyQt.
if section_pyqt_version > pyqt_version:
continue
# Save it if it is the latest so far.
if section_pyqt_version > latest_section:
section = name
latest_section = section_pyqt_version
if section is None:
error(
"%s does not define a section that covers PyQt "
"v%s." % (config_file, self.pyqt_version_str))
self.py_platform = parser.get(section, 'py_platform', self.py_platform)
self.py_inc_dir = parser.get(section, 'py_inc_dir', self.py_inc_dir)
self.py_venv_inc_dir = self.py_inc_dir
self.py_pylib_dir = parser.get(section, 'py_pylib_dir',
self.py_pylib_dir)
self.sip_inc_dir = self.py_venv_inc_dir
self.module_dir = parser.get(section, 'module_dir', self.module_dir)
if self.pyqt_package is not None:
self.py_sip_dir = parser.get(section, 'py_sip_dir',
self.py_sip_dir)
# Construct the SIP flags.
flags = []
flags.append('-t')
flags.append(self._get_platform_tag())
qt_version = version_from_string(self.qt_version_str)
if qt_version is None:
error("Unable to determine the version of Qt.")
if self.pyqt_package == 'PyQt5':
if qt_version < 0x050000:
error("PyQt5 requires Qt v5.0 or later.")
if qt_version > 0x060000:
qt_version = 0x060000
else:
if qt_version > 0x050000:
qt_version = 0x050000
major = (qt_version >> 16) & 0xff
minor = (qt_version >> 8) & 0xff
patch = qt_version & 0xff
flags.append('-t')
flags.append('Qt_%d_%d_%d' % (major, minor, patch))
for feat in parser.getlist(section, 'pyqt_disabled_features', []):
flags.append('-x')
flags.append(feat)
self.pyqt_sip_flags = ' '.join(flags)
def _get_platform_tag(self):
""" Return the tag for the target platform. """
# This replicates the logic in PyQt's configure scripts.
if self.py_platform == 'win32':
plattag = 'WS_WIN'
elif self.py_platform == 'darwin':
plattag = 'WS_MACX'
else:
plattag = 'WS_X11'
return plattag
def introspect_pyqt(self, module_config):
""" Introspect PyQt to determine the sip flags required. module_config
is the module configuration.
"""
if self.pyqt_package == 'PyQt5':
try:
from PyQt5 import QtCore
except ImportError:
error(
"Unable to import PyQt5.QtCore. Make sure PyQt5 is "
"installed.")
else:
try:
from PyQt4 import QtCore
except ImportError:
error(
"Unable to import PyQt4.QtCore. Make sure PyQt4 is "
"installed.")
self.pyqt_version_str = QtCore.PYQT_VERSION_STR
self.qt_version_str = QtCore.qVersion()
# See if we have a PyQt that embeds its configuration.
try:
pyqt_config = QtCore.PYQT_CONFIGURATION
except AttributeError:
pyqt_config = None
if pyqt_config is None:
if module_config.legacy_configuration_script:
# Fallback to the old configuration script.
config_script = sys.argv[0].replace('configure', 'configure-old')
args = [sys.executable, config_script] + sys.argv[1:]
try:
os.execv(sys.executable, args)
except OSError:
pass
error("Unable to execute '%s'" % config_script)
error("PyQt v4.10 or later is required.")
self.pyqt_sip_flags = pyqt_config['sip_flags']
def apply_sysroot(self):
""" Apply sysroot where necessary. """
if self.sysroot != '':
self.py_inc_dir = self._apply_sysroot(self.py_inc_dir)
self.py_venv_inc_dir = self._apply_sysroot(self.py_venv_inc_dir)
self.py_pylib_dir = self._apply_sysroot(self.py_pylib_dir)
self.py_sip_dir = self._apply_sysroot(self.py_sip_dir)
self.module_dir = self._apply_sysroot(self.module_dir)
self.sip_inc_dir = self._apply_sysroot(self.sip_inc_dir)
def _apply_sysroot(self, dir_name):
""" Replace any leading sys.prefix of a directory name with sysroot.
"""
if dir_name.startswith(sys.prefix):
dir_name = self.sysroot + dir_name[len(sys.prefix):]
return dir_name
def get_qt_configuration(self, opts):
""" Get the Qt configuration that can be extracted from qmake. opts
are the command line options.
"""
try:
qmake = opts.qmake
except AttributeError:
# Windows.
qmake = None
if qmake is not None:
self.qmake = qmake
elif self.qmake is None:
# Under Windows qmake and the Qt DLLs must be on the system PATH
# otherwise the dynamic linker won't be able to resolve the
# symbols. On other systems we assume we can just run qmake by
# using its full pathname.
if sys.platform == 'win32':
error("Make sure you have a working Qt qmake on your PATH.")
else:
error(
"Make sure you have a working Qt qmake on your PATH "
"or use the --qmake argument to explicitly specify a "
"working Qt qmake.")
# Query qmake.
qt_config = _TargetQtConfiguration(self.qmake)
# The binary MacOS/X Qt installer defaults to XCode. If this is what
# we might have then use macx-clang (Qt v5) or macx-g++ (Qt v4).
if sys.platform == 'darwin':
try:
# Qt v5.
if qt_config.QMAKE_SPEC == 'macx-xcode':
# This will exist (and we can't check anyway).
self.qmake_spec = 'macx-clang'
else:
# No need to explicitly name the default.
self.qmake_spec = ''
except AttributeError:
# Qt v4.
self.qmake_spec = 'macx-g++'
self.qt_version_str = getattr(qt_config, 'QT_VERSION', '')
self.api_dir = os.path.join(qt_config.QT_INSTALL_DATA, 'qsci')
self.qt_inc_dir = qt_config.QT_INSTALL_HEADERS
self.qt_lib_dir = qt_config.QT_INSTALL_LIBS
if self.sysroot == '':
self.sysroot = getattr(qt_config, 'QT_SYSROOT', '')
def apply_pre_options(self, opts):
""" Apply options from the command line that influence subsequent
configuration. opts are the command line options.
"""
# Get the system root.
if opts.sysroot is not None:
self.sysroot = opts.sysroot
if self.pyqt_package is not None:
try:
self.pyqt_package = opts.pyqt_package
except AttributeError:
# Multiple PyQt versions are not supported.
pass
self.module_dir = os.path.join(self.py_module_dir,
self.pyqt_package)
def apply_post_options(self, opts, module_config):
""" Apply options from the command line that override the previous
configuration. opts are the command line options. module_config is
the module configuration.
"""
if self.pyqt_package is not None:
if module_config.user_pyqt_sip_flags_is_supported:
if opts.pyqt_sip_flags is not None:
self.pyqt_sip_flags = opts.pyqt_sip_flags
if opts.pyqt_sip_dir is not None:
self.pyqt_sip_dir = opts.pyqt_sip_dir
else:
self.pyqt_sip_dir = os.path.join(self.py_sip_dir,
self.pyqt_package)
if module_config.qscintilla_api_file:
if opts.apidir is not None:
self.api_dir = opts.apidir
if opts.no_qsci_api:
self.api_dir = ''
if opts.destdir is not None:
self.module_dir = opts.destdir
if opts.qmakespec is not None:
self.qmake_spec = opts.qmakespec
if module_config.protected_is_public_is_supported:
if opts.prot_is_public is not None:
self.prot_is_public = opts.prot_is_public
else:
self.prot_is_public = False
if opts.sip_inc_dir is not None:
self.sip_inc_dir = opts.sip_inc_dir
if opts.sip is not None:
self.sip = opts.sip
module_config.apply_options(self, opts)
@staticmethod
def _find_exe(*exes):
""" Find an executable, ie. the first on the path. """
path_dirs = os.environ.get('PATH', '').split(os.pathsep)
for exe in exes:
if sys.platform == 'win32':
exe = exe + '.exe'
for d in path_dirs:
exe_path = os.path.join(d, exe)
if os.access(exe_path, os.X_OK):
return exe_path
return None
def _create_optparser(target_config, module_config):
""" Create the parser for the command line. target_config is the target
configuration containing default values. module_config is the module
configuration.
"""
module_name = module_config.descriptive_name
p = optparse.OptionParser(usage="python %prog [options]",
version=module_config.version)
p.add_option('--spec', dest='qmakespec', default=None, action='store',
metavar="SPEC",
help="pass -spec SPEC to qmake [default: %s]" % "don't pass -spec" if target_config.qmake_spec == '' else target_config.qmake_spec)
if module_config.qscintilla_api_file:
p.add_option('--apidir', '-a', dest='apidir', type='string',
default=None, action='callback',
callback=optparser_store_abspath, metavar="DIR",
help="the QScintilla API file will be installed in DIR "
"[default: QT_INSTALL_DATA/qsci]")
p.add_option('--no-qsci-api', dest='no_qsci_api', default=False,
action='store_true',
help="disable the installation of the QScintilla API file "
"[default: enabled]")
if module_config.user_configuration_file_is_supported:
p.add_option('--configuration', dest='config_file', type='string',
default=None, action='callback',
callback=optparser_store_abspath, metavar="FILE",
help="FILE defines the target configuration")
p.add_option('--destdir', '-d', dest='destdir', type='string',
default=None, action='callback', callback=optparser_store_abspath,
metavar="DIR",
help="install the %s module in DIR [default: %s]" %
(module_name, target_config.module_dir))
if module_config.protected_is_public_is_supported:
p.add_option('--protected-is-public', dest='prot_is_public',
default=None, action='store_true',
help="enable building with 'protected' redefined as 'public' "
"[default: %s]" % target_config.prot_is_public)
p.add_option('--protected-not-public', dest='prot_is_public',
action='store_false',
help="disable building with 'protected' redefined as 'public'")
if target_config.pyqt_package is not None:
pyqt = target_config.pyqt_package
if module_config.pyqt5_is_supported and module_config.pyqt4_is_supported:
p.add_option('--pyqt', dest='pyqt_package', type='choice',
choices=['PyQt4', 'PyQt5'], default=pyqt,
action='store', metavar="PyQtn",
help="configure for PyQt4 or PyQt5 [default: %s]" % pyqt)
if module_config.user_pyqt_sip_flags_is_supported:
p.add_option('--pyqt-sip-flags', dest='pyqt_sip_flags',
default=None, action='store', metavar="FLAGS",
help="the sip flags used to build PyQt [default: query PyQt]")
if sys.platform != 'win32':
p.add_option('--qmake', '-q', dest='qmake', type='string',
default=None, action='callback',
callback=optparser_store_abspath_exe, metavar="FILE",
help="the pathname of qmake is FILE [default: "
"%s]" % (target_config.qmake or "None"))
p.add_option('--sip', dest='sip', type='string', default=None,
action='callback', callback=optparser_store_abspath_exe,
metavar="FILE",
help="the pathname of sip is FILE [default: "
"%s]" % (target_config.sip or "None"))
p.add_option('--sip-incdir', dest='sip_inc_dir', type='string',
default=None, action='callback',
callback=optparser_store_abspath_dir, metavar="DIR",
help="the directory containing the sip.h header file file is DIR "
"[default: %s]" % target_config.sip_inc_dir)
if target_config.pyqt_package is not None:
p.add_option('--pyqt-sipdir', dest='pyqt_sip_dir', type='string',
default=None, action='callback',
callback=optparser_store_abspath_dir, metavar="DIR",
help="the directory containing the PyQt .sip files is DIR "
"[default: %s]" % target_config.pyqt_sip_dir)
p.add_option('--concatenate', '-c', dest='concat', default=False,
action='store_true',
help="concatenate the C++ source files")
p.add_option('--concatenate-split', '-j', dest='split', type='int',
default=1, metavar="N",
help="split the concatenated C++ source files into N pieces "
"[default: 1]")
p.add_option('--static', '-k', dest='static', default=False,
action='store_true',
help="build the %s module as a static library" % module_name)
p.add_option("--sysroot", dest='sysroot', type='string', action='callback',
callback=optparser_store_abspath_dir, metavar="DIR",
help="DIR is the target system root directory")
p.add_option('--no-docstrings', dest='no_docstrings', default=False,
action='store_true',
help="disable the generation of docstrings")
p.add_option('--trace', '-r', dest='tracing', default=False,
action='store_true',
help="build the %s module with tracing enabled" % module_name)
p.add_option('--debug', '-u', default=False, action='store_true',
help="build the %s module with debugging symbols" % module_name)
p.add_option('--verbose', '-w', dest='verbose', default=False,
action='store_true',
help="enable verbose output during configuration")
p.add_option('--no-timestamp', '-T', dest='no_timestamp', default=False,
action='store_true',
help="suppress timestamps in the header comments of generated "
"code [default: include timestamps]")
module_config.init_optparser(p, target_config)
return p
def _inform_user(target_config, module_config):
""" Tell the user the values that are going to be used. target_config is
the target configuration. module_config is the module configuration.
"""
module_name = module_config.descriptive_name
inform("Configuring %s %s..." % (module_name, module_config.version))
module_config.inform_user(target_config)
inform("The %s module will be installed in %s." %
(module_name, target_config.module_dir))
if target_config.pyqt_version_str != '':
inform("PyQt %s is being used." % target_config.pyqt_version_str)
else:
inform("%s is being used." % target_config.pyqt_package)
if target_config.qt_version_str != '':
inform("Qt %s is being used." % target_config.qt_version_str)
if target_config.sysroot != '':
inform("The system root directory is %s." % target_config.sysroot)
inform("sip %s is being used." % target_config.sip_version)
inform("The sip executable is %s." % target_config.sip)
if target_config.prot_is_public:
inform(
"The %s module is being built with 'protected' redefined as "
"'public'." % module_name)
if module_config.qscintilla_api_file and target_config.api_dir != '':
inform("The QScintilla API file will be installed in %s." %
os.path.join(target_config.api_dir, 'api', 'python'))
def _generate_code(target_config, opts, module_config):
""" Generate the code for the module. target_config is the target
configuration. opts are the command line options. module_config is the
module configuration.
"""
inform(
"Generating the C++ source for the %s module..." %
module_config.name)
# Build the SIP command line.
argv = [quote(target_config.sip)]
# Add the module-specific flags.
argv.extend(module_config.get_sip_flags(target_config))
if target_config.pyqt_package is not None:
# Get the flags used for the main PyQt module.
argv.extend(target_config.pyqt_sip_flags.split())
# Add the backstop version.
argv.append('-B')
argv.append('Qt_6_0_0' if target_config.pyqt_package == 'PyQt5'
else 'Qt_5_0_0')
# Add PyQt's .sip files to the search path.
argv.append('-I')
argv.append(target_config.pyqt_sip_dir)
if module_config.qscintilla_api_file and target_config.api_dir != '':
# Generate the API file.
argv.append('-a')
argv.append(module_config.qscintilla_api_file + '.api')
if target_config.prot_is_public:
argv.append('-P');
if opts.no_timestamp:
argv.append('-T')
if not opts.no_docstrings:
argv.append('-o');
if opts.concat:
argv.append('-j')
argv.append(str(opts.split))
if opts.tracing:
argv.append('-r')
argv.append('-c')
argv.append('.')
argv.append(module_config.get_sip_file(target_config))
check_file = 'sipAPI%s.h' % module_config.name
_remove_file(check_file)
_run_command(' '.join(argv), opts.verbose)
if not os.access(check_file, os.F_OK):
error("Unable to create the C++ code.")
# Generate the .pro file.
pro = _generate_pro(target_config, opts, module_config)
# Generate the Makefile.
_run_qmake(target_config, opts.verbose, pro, module_config)
def _get_qt_qmake_config(qmake_config, qt_version):
""" Return a dict of qmake configuration values for a specific Qt version.
"""
qt_qmake_config = {}
for name, value in qmake_config.items():
name_parts = name.split(':')
if len(name_parts) == 2 and name_parts[0] == qt_version:
qt_qmake_config[name_parts[1]] = value
return qt_qmake_config
def _write_qt_qmake_config(qt_qmake_config, pro):
""" Write the qmake configuration values to a .pro file. """
for name in ('QT', 'CONFIG', 'DEFINES', 'INCLUDEPATH', 'LIBS'):
value = qt_qmake_config.get(name)
if value:
pro.write(' %s += %s\n' % (name, value))
def _generate_pro(target_config, opts, module_config):
""" Generate the .pro file for the module and return its name.
target_config is the target configuration. opts are the command line
options. module_config is the module configuration.
"""
inform("Generating the .pro file for the %s module..." % module_config.name)
# Without the 'no_check_exist' magic the target.files must exist when qmake
# is run otherwise the install and uninstall targets are not generated.
qmake_config = module_config.get_qmake_configuration(target_config)
pro_name = module_config.name + '.pro'
pro = open(pro_name, 'w')
pro.write('TEMPLATE = lib\n')
qt = qmake_config.get('QT')
if qt:
pro.write('QT += %s\n' % qt)
pro.write('CONFIG += %s\n' % ('debug' if opts.debug else 'release'))
pro.write('CONFIG += %s\n' % ('staticlib' if opts.static else 'plugin'))
config = qmake_config.get('CONFIG')
if config:
pro.write('CONFIG += %s\n' % config)
# Work around QTBUG-39300.
pro.write('CONFIG -= android_install\n')
qt5_qmake_config = _get_qt_qmake_config(qmake_config, 'Qt5')
qt4_qmake_config = _get_qt_qmake_config(qmake_config, 'Qt4')
if qt5_qmake_config or qt4_qmake_config:
pro.write('''
greaterThan(QT_MAJOR_VERSION, 4) {
''')
if qt5_qmake_config:
_write_qt_qmake_config(qt5_qmake_config, pro)
if qt4_qmake_config:
pro.write('} else {\n')
_write_qt_qmake_config(qt4_qmake_config, pro)
pro.write('}\n')
mname = module_config.name
if not opts.static:
pro.write('''
win32 {
PY_MODULE = %s.pyd
target.files = %s.pyd
LIBS += -L%s
} else {
PY_MODULE = %s.so
target.files = %s.so
}
target.CONFIG = no_check_exist
''' % (mname, mname, quote(target_config.py_pylib_dir), mname, mname))
pro.write('''
target.path = %s
INSTALLS += target
''' % quote(target_config.module_dir))
if module_config.qscintilla_api_file and target_config.api_dir != '':
pro.write('''
api.path = %s/api/python
api.files = %s.api
INSTALLS += api
''' % (target_config.api_dir, module_config.qscintilla_api_file))
sip_installs = module_config.get_sip_installs(target_config)
if sip_installs is not None:
path, files = sip_installs
pro.write('''
sip.path = %s
sip.files =''' % quote(path))
for f in files:
pro.write(' \\\n %s' % f)
pro.write('''
INSTALLS += sip
''')
pro.write('\n')
# These optimisations could apply to other platforms.
if module_config.no_exceptions:
if target_config.py_platform.startswith('linux') or target_config.py_platform == 'darwin':
pro.write('QMAKE_CXXFLAGS += -fno-exceptions\n')
if target_config.py_platform.startswith('linux') and not opts.static:
if target_config.py_version >= 0x030000:
entry_point = 'PyInit_%s' % mname
else:
entry_point = 'init%s' % mname
exp = open('%s.exp' % mname, 'wt')
exp.write('{ global: %s; local: *; };' % entry_point)
exp.close()
pro.write('QMAKE_LFLAGS += -Wl,--version-script=%s.exp\n' % mname)
if target_config.prot_is_public:
pro.write('DEFINES += SIP_PROTECTED_IS_PUBLIC protected=public\n')
defines = qmake_config.get('DEFINES')
if defines:
pro.write('DEFINES += %s\n' % defines)
includepath = qmake_config.get('INCLUDEPATH')
if includepath:
pro.write('INCLUDEPATH += %s\n' % includepath)
# Make sure the SIP include directory is searched before the Python include
# directory if they are different.
pro.write('INCLUDEPATH += %s\n' % quote(target_config.sip_inc_dir))
if target_config.py_inc_dir != target_config.sip_inc_dir:
pro.write('INCLUDEPATH += %s\n' % quote(target_config.py_inc_dir))
libs = qmake_config.get('LIBS')
if libs:
pro.write('LIBS += %s\n' % libs)
if not opts.static:
pro.write('''
win32 {
QMAKE_POST_LINK = $(COPY_FILE) $(DESTDIR_TARGET) $$PY_MODULE
} else {
QMAKE_POST_LINK = $(COPY_FILE) $(TARGET) $$PY_MODULE
}
macx {
QMAKE_LFLAGS += "-undefined dynamic_lookup"
greaterThan(QT_MAJOR_VERSION, 4) {
QMAKE_LFLAGS += "-install_name $$absolute_path($$PY_MODULE, $$target.path)"
greaterThan(QT_MINOR_VERSION, 4) {
QMAKE_RPATHDIR += $$[QT_INSTALL_LIBS]
}
}
''')
dylib = module_config.get_mac_wrapped_library_file(target_config)
if dylib:
pro.write('''
QMAKE_POST_LINK = $$QMAKE_POST_LINK$$escape_expand(\\\\n\\\\t)$$quote(install_name_tool -change %s %s $$PY_MODULE)
''' % (os.path.basename(dylib), dylib))
pro.write('}\n')
pro.write('\n')
pro.write('TARGET = %s\n' % mname)
pro.write('HEADERS = sipAPI%s.h\n' % mname)
pro.write('SOURCES =')
for s in glob.glob('*.cpp'):
pro.write(' \\\n %s' % s)
pro.write('\n')
pro.close()
return pro_name
def _run_qmake(target_config, verbose, pro_name, module_config):
""" Run qmake against a .pro file. target_config is the target
configuration. verbose is set if the output is to be displayed. pro_name
is the name of the .pro file. module_config is the module configuration.
"""
inform("Creating the Makefile for the %s module..." % module_config.name)
# qmake doesn't behave consistently if it is not run from the directory
# containing the .pro file - so make sure it is.
pro_dir, pro_file = os.path.split(pro_name)
if pro_dir != '':
cwd = os.getcwd()
os.chdir(pro_dir)
else:
cwd = None
mf = 'Makefile'
_remove_file(mf)
args = [quote(target_config.qmake)]
if target_config.qmake_spec != '':
args.append('-spec')
args.append(target_config.qmake_spec)
args.append(pro_file)
_run_command(' '.join(args), verbose)
if not os.access(mf, os.F_OK):
error(
"%s failed to create a Makefile from %s." %
(target_config.qmake, pro_name))
# Restore the current directory.
if cwd is not None:
os.chdir(cwd)
def _run_command(cmd, verbose):
""" Run a command and display the output if requested. cmd is the command
to run. verbose is set if the output is to be displayed.
"""
if verbose:
sys.stdout.write(cmd + "\n")
fout = _get_command_output(cmd)
# Read stdout and stderr until there is no more output.
lout = fout.readline()
while lout:
if verbose:
if sys.hexversion >= 0x03000000:
sys.stdout.write(str(lout, encoding=sys.stdout.encoding))
else:
sys.stdout.write(lout)
lout = fout.readline()
fout.close()
try:
os.wait()
except:
pass
def _get_command_output(cmd):
""" Return a pipe from which a command's output can be read. cmd is the
command.
"""
try:
import subprocess
except ImportError:
_, sout = os.popen4(cmd)
return sout
p = subprocess.Popen(cmd, shell=True, stdin=subprocess.PIPE,
stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
return p.stdout
def _remove_file(fname):
""" Remove a file which may or may not exist. fname is the name of the
file.
"""
try:
os.remove(fname)
except OSError:
pass
def _check_sip(target_config, module_config):
""" Check that the version of sip is good enough. target_config is the
target configuration. module_config is the module configuration.
"""
if target_config.sip is None:
error(
"Make sure you have a working sip on your PATH or use the "
"--sip argument to explicitly specify a working sip.")
pipe = os.popen(' '.join([target_config.sip, '-V']))
for l in pipe:
version_str = l.strip()
break
else:
error("'%s -V' did not generate any output." % target_config.sip)
pipe.close()
if 'snapshot' not in version_str and 'preview' not in version_str:
version = version_from_string(version_str)
if version is None:
error(
"'%s -V' generated unexpected output: '%s'." % (
target_config.sip, version_str))
min_sip_version = module_config.minimum_sip_version
if min_sip_version:
min_version = version_from_string(min_sip_version)
if version < min_version:
error(
"This version of %s requires sip %s or later." %
(module_config.descriptive_name,
min_sip_version))
target_config.sip_version = version_str
def _main(argv, module_config):
""" Create the configuration module module. argv is the list of command
line arguments. module_config is the module configuration.
"""
# Create the default target configuration.
target_config = _TargetConfiguration(module_config)
# Parse the command line.
p = _create_optparser(target_config, module_config)
opts, args = p.parse_args()
if args:
p.print_help()
sys.exit(2)
target_config.apply_pre_options(opts)
# Query qmake for the basic configuration information.
target_config.get_qt_configuration(opts)
# Update the target configuration.
if module_config.user_configuration_file_is_supported:
config_file = opts.config_file
else:
config_file = None
if config_file is not None:
target_config.update_from_configuration_file(config_file)
else:
target_config.apply_sysroot()
target_config.apply_post_options(opts, module_config)
if target_config.pyqt_package is not None:
if target_config.pyqt_sip_flags is None:
target_config.introspect_pyqt(module_config)
# Check SIP is new enough.
_check_sip(target_config, module_config)
# Perform any module specific checks now that all other information has
# been captured.
module_config.check_module(target_config)
# Tell the user what's been found.
_inform_user(target_config, module_config)
# Allow for module specific hacks.
module_config.pre_code_generation(target_config)
# Generate the code.
_generate_code(target_config, opts, module_config)
###############################################################################
# The script starts here.
###############################################################################
if __name__ == '__main__':
module_config = ModuleConfiguration()
try:
_main(sys.argv, module_config)
except SystemExit:
raise
except:
if module_config.support_email_address:
sys.stderr.write(
"""An internal error occured. Please report all the output from the program,
including the following traceback, to %s.
""" % module_config.support_email_address)
raise
|
import asyncio
import logging
import os
from azure.eventhub.aio import EventHubClient
from azure.eventhub.aio.eventprocessor import EventProcessor, PartitionProcessor
from azure.eventhub.extensions.checkpointstoreblobaio import BlobPartitionManager
from azure.storage.blob.aio import ContainerClient
RECEIVE_TIMEOUT = 5 # timeout in seconds for a receiving operation. 0 or None means no timeout
RETRY_TOTAL = 3 # max number of retries for receive operations within the receive timeout. Actual number of retries clould be less if RECEIVE_TIMEOUT is too small
CONNECTION_STR = os.environ["EVENT_HUB_CONN_STR"]
STORAGE_CONNECTION_STR = os.environ["AZURE_STORAGE_CONN_STR"]
logging.basicConfig(level=logging.INFO)
async def do_operation(event):
# do some sync or async operations. If the operation is i/o intensive, async will have better performance
print(event)
class MyPartitionProcessor(PartitionProcessor):
async def process_events(self, events, partition_context):
if events:
await asyncio.gather(*[do_operation(event) for event in events])
await partition_context.update_checkpoint(events[-1].offset, events[-1].sequence_number)
else:
print("empty events received", "partition:", partition_context.partition_id)
if __name__ == '__main__':
loop = asyncio.get_event_loop()
client = EventHubClient.from_connection_string(CONNECTION_STR, receive_timeout=RECEIVE_TIMEOUT, retry_total=RETRY_TOTAL)
container_client = ContainerClient.from_connection_string(STORAGE_CONNECTION_STR, container="eventprocessor")
partition_manager = BlobPartitionManager(container_client=container_client)
event_processor = EventProcessor(client, "$default", MyPartitionProcessor, partition_manager, polling_interval=10)
try:
loop.run_until_complete(event_processor.start())
except KeyboardInterrupt:
loop.run_until_complete(event_processor.stop())
finally:
loop.stop()
|
from datetime import datetime as date
input_date = input()
d1 = date.strptime(input_date, "%Y-%m-%d").date()
d2 = date.strptime('2018-08-26', "%Y-%m-%d").date()
if d1 < d2:
print("Passed")
elif d1 > d2:
print(f"{(d1 - d2).days + 1} days left")
else:
print("Today date")
|
import flask, json, os, re, sys, arrow
def get_state_by_coords(lat, lng):
include_geometry = flask.request.args.get('geometry', False)
columns = "aclu_id, geoid, ocd_id, name, state, area_land, area_water"
if include_geometry == '1':
columns += ', boundary_simple'
cur = flask.g.db.cursor()
cur.execute('''
SELECT {columns}
FROM states
WHERE ST_within(ST_GeomFromText('POINT({lng} {lat})', 4326), boundary_geom)
'''.format(columns=columns, lng=lng, lat=lat))
rs = cur.fetchall()
state = None
if rs:
for row in rs:
state = {
'aclu_id': row[0],
'geoid': row[1],
'ocd_id': row[2],
'name': row[3],
'state': row[4],
'area_land': row[5],
'area_water': row[6]
}
if include_geometry == '1':
state['geometry'] = row[7]
cur.close()
return state
def get_state_by_abbrev(abbrev):
include_geometry = flask.request.args.get('geometry', False)
columns = "aclu_id, geoid, ocd_id, name, state, area_land, area_water"
if include_geometry == '1':
columns += ', boundary_simple'
cur = flask.g.db.cursor()
cur.execute('''
SELECT {columns}
FROM states
WHERE state = %s
'''.format(columns=columns), (abbrev,))
rs = cur.fetchall()
state = None
if rs:
for row in rs:
state = {
'aclu_id': row[0],
'geoid': row[1],
'ocd_id': row[2],
'name': row[3],
'state': row[4],
'area_land': row[5],
'area_water': row[6]
}
if include_geometry == '1':
state['geometry'] = row[7]
cur.close()
return state
def get_state_by_id(id):
include_geometry = flask.request.args.get('geometry', False)
columns = "aclu_id, geoid, ocd_id, name, state, area_land, area_water"
if include_geometry == '1':
columns += ', boundary_simple'
cur = flask.g.db.cursor()
cur.execute('''
SELECT {columns}
FROM states
WHERE aclu_id = %s
'''.format(columns=columns), (id,))
rs = cur.fetchall()
state = None
if rs:
for row in rs:
state = {
'aclu_id': row[0],
'geoid': row[1],
'ocd_id': row[2],
'name': row[3],
'state': row[4],
'area_land': row[5],
'area_water': row[6]
}
if include_geometry == '1':
state['geometry'] = row[7]
cur.close()
return state
|
from .base import InteractiveAgent
from .scpr import SCPRAgent
from .ear import EARAgent
|
import multiprocessing
import os
from typing import Optional
from inboard.start import configure_logging
def calculate_workers(
max_workers_str: Optional[str],
web_concurrency_str: Optional[str],
workers_per_core_str: str,
cores: int = multiprocessing.cpu_count(),
) -> int:
"""Calculate the number of Gunicorn worker processes."""
use_default_workers = max(int(float(workers_per_core_str) * cores), 2)
if max_workers_str and int(max_workers_str) > 0:
use_max_workers = int(max_workers_str)
if web_concurrency_str and int(web_concurrency_str) > 0:
use_web_concurrency = int(web_concurrency_str)
return (
min(use_max_workers, use_web_concurrency)
if max_workers_str and web_concurrency_str
else use_web_concurrency
if web_concurrency_str
else use_default_workers
)
# Gunicorn setup
max_workers_str = os.getenv("MAX_WORKERS")
web_concurrency_str = os.getenv("WEB_CONCURRENCY")
workers_per_core_str = os.getenv("WORKERS_PER_CORE", "1")
workers = calculate_workers(max_workers_str, web_concurrency_str, workers_per_core_str)
worker_tmp_dir = "/dev/shm"
host = os.getenv("HOST", "0.0.0.0")
port = os.getenv("PORT", "80")
bind_env = os.getenv("BIND")
use_bind = bind_env or f"{host}:{port}"
use_loglevel = os.getenv("LOG_LEVEL", "info")
accesslog_var = os.getenv("ACCESS_LOG", "-")
use_accesslog = accesslog_var or None
errorlog_var = os.getenv("ERROR_LOG", "-")
use_errorlog = errorlog_var or None
graceful_timeout_str = os.getenv("GRACEFUL_TIMEOUT", "120")
timeout_str = os.getenv("TIMEOUT", "120")
keepalive_str = os.getenv("KEEP_ALIVE", "5")
# Gunicorn config variables
logconfig_dict = configure_logging(
logging_conf=os.getenv("LOGGING_CONF", "inboard.logging_conf")
)
loglevel = use_loglevel
bind = use_bind
errorlog = use_errorlog
accesslog = use_accesslog
graceful_timeout = int(graceful_timeout_str)
timeout = int(timeout_str)
keepalive = int(keepalive_str)
|
"""
The DateRange class and a set of common date ranges.
"""
class DateRange(object):
"""
A range of dates, used to control sampling periods.
Args:
start_year (int): The lower bound of the date range, years BC are
negative numbers.
end_year (int): The upper bound of the date range, years BC are
negative numbers.
Attributes:
start_year (int): The lower bound of the date range.
end_year (int): The upper bound of the date range.
label (str): The date range formated as a string _e.g._ "50BC-250AD".
Raises:
(InvalidDateRange): Raised if the date arguments do not create a valid
range.
"""
def __init__(self, start_year, end_year):
if start_year > end_year:
raise InvalidDateRange('The start year must be < the end year')
self.start_year = start_year
self.end_year = end_year
self.label = self._create_label()
@classmethod
def from_string(cls, string):
"""
Create a DateRange object from a string.
Args:
string (str): The date string.
Returns:
(DateRange): The DateRange object corresponding to the date string.
"""
# Get dates in AD/BC format from string
dates = string.split('-')
# Change into integer representation
for i, date in enumerate(dates):
if date == '0':
dates[i] = 0
elif date[-2:] == 'BC':
dates[i] = int(date[:-2])*-1
else:
dates[i] = int(date[:-2])
return cls(*dates)
def _create_label(self):
if self.start_year < 0:
label = '{:d}BC-'.format(abs(self.start_year))
elif self.start_year == 0:
label = '{:d}-'.format(abs(self.start_year))
else:
label = '{:d}AD-'.format(abs(self.start_year))
if self.end_year < 0:
label += '{:d}BC'.format(abs(self.end_year))
elif self.end_year == 0:
label += '{:d}'.format(abs(self.end_year))
else:
label += '{:d}AD'.format(abs(self.end_year))
return label
def __str__(self):
return self.label
def __hash__(self):
return hash(self.label)
def __eq__(self, other):
if isinstance(other, DateRange):
return (self.start_year == other.start_year
and self.end_year == other.end_year)
elif isinstance(other, str):
return self.label == other
else:
return False
def is_within(self, year):
"""
Determine whether a year is within the date range.
Args:
year (int): The year to consider, years BC are negative numbers.
Returns:
(bool): True if year is within the date range, False otherwise.
"""
if year >= self.start_year and year < self.end_year:
return True
else:
return False
"""
Date ranges of cities data.
"""
cities_date_ranges = [
DateRange(-2500, -1000),
DateRange(-1000, 0),
DateRange(0, 500),
DateRange(500, 1000),
DateRange(700, 850),
DateRange(850, 1000),
DateRange(1000, 1100),
DateRange(1100, 1200),
DateRange(1200, 1300),
DateRange(1300, 1400),
DateRange(1400, 1500)]
"""
Default date ranges for imperial density eras.
"""
imperial_density_date_ranges = [
DateRange(-1500, -500),
DateRange(-500, 500),
DateRange(500, 1500)]
class InvalidDateRange(Exception):
"""
Exception raised when a user attempts to create an invalid date range.
"""
pass
|
from django.contrib import admin
from geolocations.models import Geolocation, GeolocationRequest
@admin.register(GeolocationRequest)
class GeolocationRequestAdmin(admin.ModelAdmin):
list_display = ('id', 'lookup', 'user', 'created_at')
search_fields = ('id', 'lookup')
list_filter = ('created_at',)
raw_id_fields = ('user',)
@admin.register(Geolocation)
class GeolocationAdmin(admin.ModelAdmin):
list_display = ('id', 'ip', 'type', 'city', 'zip', 'latitude', 'longitude')
search_fields = ('id', 'ip', 'city', 'zip')
list_filter = ('type',)
raw_id_fields = ('request',)
|
""" flametree/__init__.py """
# __all__ = []
from .version import __version__
from .Directory import Directory, File
from .DiskFileManager import DiskFileManager
from .ZipFileManager import ZipFileManager
from .utils import file_tree
|
import sys
def getArea(width, height) -> int:
# O(1)
return width * height
def main():
width, height = map(int, sys.stdin.readline().strip().split(' '))
print(getArea(width, height))
# O(1)
main()
|
# Burgers test evolution: just one
from models import burgers
from bcs import periodic
from simulation import simulation
from methods import weno3_lf
from rk import rk3
from grid import grid
from matplotlib import pyplot
Ngz = 3
Npoints = 400
interval = grid([-1, 1], Npoints, Ngz)
model = burgers.burgers(initial_data = burgers.initial_square())
sim = simulation(model, interval, weno3_lf, rk3, periodic)
sim.evolve(0.5)
sim.plot_scalar_vs_initial()
pyplot.show()
|
# coding: utf-8
from __future__ import unicode_literals
import re
from .common import InfoExtractor
from ..utils import (
js_to_json,
determine_ext,
)
class BpbIE(InfoExtractor):
IE_DESC = 'Bundeszentrale für politische Bildung'
_VALID_URL = r'https?://www\.bpb\.de/mediathek/(?P<id>[0-9]+)/'
_TEST = {
'url': 'http://www.bpb.de/mediathek/297/joachim-gauck-zu-1989-und-die-erinnerung-an-die-ddr',
# md5 fails in Python 2.6 due to buggy server response and wrong handling of urllib2
'md5': 'c4f84c8a8044ca9ff68bb8441d300b3f',
'info_dict': {
'id': '297',
'ext': 'mp4',
'title': 'Joachim Gauck zu 1989 und die Erinnerung an die DDR',
'description': 'Joachim Gauck, erster Beauftragter für die Stasi-Unterlagen, spricht auf dem Geschichtsforum über die friedliche Revolution 1989 und eine "gewisse Traurigkeit" im Umgang mit der DDR-Vergangenheit.'
}
}
def _real_extract(self, url):
video_id = self._match_id(url)
webpage = self._download_webpage(url, video_id)
title = self._html_search_regex(
r'<h2 class="white">(.*?)</h2>', webpage, 'title')
video_info_dicts = re.findall(
r"({\s*src:\s*'http://film\.bpb\.de/[^}]+})", webpage)
formats = []
for video_info in video_info_dicts:
video_info = self._parse_json(video_info, video_id, transform_source=js_to_json)
quality = video_info['quality']
video_url = video_info['src']
formats.append({
'url': video_url,
'preference': 10 if quality == 'high' else 0,
'format_note': quality,
'format_id': '%s-%s' % (quality, determine_ext(video_url)),
})
self._sort_formats(formats)
return {
'id': video_id,
'formats': formats,
'title': title,
'description': self._og_search_description(webpage),
}
|
import collections
import os
import re
from shift_oelint_parser.cls_item import Comment
from shift_oelint_parser.cls_item import Export
from shift_oelint_parser.cls_item import Function
from shift_oelint_parser.cls_item import FunctionExports
from shift_oelint_parser.cls_item import Include
from shift_oelint_parser.cls_item import Item
from shift_oelint_parser.cls_item import PythonBlock
from shift_oelint_parser.cls_item import TaskAdd
from shift_oelint_parser.cls_item import TaskAssignment
from shift_oelint_parser.cls_item import Variable
from shift_oelint_parser.helper_files import expand_term
from shift_oelint_parser.helper_files import find_local_or_in_layer
from shift_oelint_parser.inlinerep import inlinerep
INLINE_BLOCK = "!!!inlineblock!!!"
def get_full_scope(_string, offset, _sstart, _send):
"""get full block of an inline statement
Args:
_string (str): input string
offset (int): offset in string
_sstart (int): block start index
_send (int): block end index
Returns:
str: full block on inline statement
"""
scopelevel = 0
pos = 0
for c in _string[offset:]:
if c == _sstart:
scopelevel += 1
elif c == _send:
scopelevel -= 1
pos += 1
if scopelevel < 0:
break
return _string[:pos+offset]
def prepare_lines_subparser(_iter, lineOffset, num, line, raw_line=None):
"""preprocess raw input
Args:
_iter (interator): line interator object
lineOffset (int): current line index
num (int): internal line counter
line (int): input string
raw_line (string, optional): internal line representation. Defaults to None.
Returns:
list: list of preproccessed chunks
"""
__func_start_regexp__ = r".*(((?P<py>python)|(?P<fr>fakeroot))\s*)*(?P<func>[\w\.\-\+\{\}\$]+)?\s*\(\s*\)\s*\{"
res = []
raw_line = raw_line or line
if re.search(r"\\\s*\n", raw_line):
_, line = next(_iter)
while re.search(r"\\\s*\n", line):
raw_line += line
_, line = next(_iter)
raw_line += line
elif re.match(__func_start_regexp__, raw_line):
_, line = next(_iter)
stopiter = False
scope_level = 0
while not stopiter:
raw_line += line
if "{" in line:
scope_level += 1
if "}" in line:
scope_level -= 1
try:
_, line = next(_iter)
except StopIteration:
stopiter = True
if line.strip() == "}" and not scope_level:
stopiter = True
if line.strip() == "}":
raw_line += line
elif raw_line.strip().startswith("def "):
stopiter = False
while not stopiter:
try:
_, line = next(_iter)
except StopIteration:
stopiter = True
if re.match("^[A-Za-z0-9#]+", line) or stopiter:
if not stopiter:
res += prepare_lines_subparser(_iter,
lineOffset, num, line)
break
if line.startswith("def "):
raw_line = line
res += prepare_lines_subparser(_iter,
lineOffset, num, line, raw_line=raw_line)
break
raw_line += line
real_raw = raw_line
while raw_line.find("${@") != -1:
_inline_block = raw_line.find("${@")
repl = get_full_scope(raw_line[_inline_block:], len("${@"), "{", "}")
_repl = inlinerep(repl) or INLINE_BLOCK
raw_line = raw_line.replace(repl, _repl)
res.append({"line": num + 1 + lineOffset, "raw": raw_line,
"realraw": real_raw,
"cnt": raw_line.replace("\n", "").replace("\\", chr(0x1b))})
return res
def prepare_lines(_file, lineOffset=0):
"""break raw file input into preprocessed chunks
Args:
_file (string): Full path to file
lineOffset (int, optional): line offset counter. Defaults to 0.
Returns:
list: preprocessed list of chunks
"""
try:
prep_lines = []
with open(_file) as i:
_iter = enumerate(i.readlines())
for num, line in _iter:
prep_lines += prepare_lines_subparser(
_iter, lineOffset, num, line)
except (IOError, OSError):
pass
return prep_lines
def get_items(stash, _file, lineOffset=0, addInheritedFile=False, addIncludedFile=True):
"""parses file
Args:
stash (shift_oelint_parser.cls_stash.Stash): Stash object
_file (string): Full path to file
lineOffset (int, optional): line offset counter. Defaults to 0.
Returns:
list: List of shift_oelint_parser.cls_item.* representations
"""
res = []
__regex_var = r"^(?P<varname>([A-Z0-9a-z_-]|\$|\{|\}|:)+?)(\[(?P<ident>(\w|-|\.)+)\])*(?P<varop>(\s|\t)*(\+|\?|\:|\.)*=(\+|\.)*(\s|\t)*)(?P<varval>.*)"
__regex_func = r"^((?P<py>python)\s*|(?P<fr>fakeroot\s*))*(?P<func>[\w\.\-\+\{\}:\$]+)?\s*\(\s*\)\s*\{(?P<funcbody>.*)\s*\}"
__regex_inherit = r"^.*?inherit(\s+|\t+)(?P<inhname>.+)"
__regex_export_wval = r"^.*?export(\s+|\t+)(?P<name>.+)\s*=\s*\"(?P<value>.*)\""
__regex_export_woval = r"^.*?export(\s+|\t+)(?P<name>.+)\s*$"
__regex_comments = r"^(\s|\t)*#+\s*(?P<body>.*)"
__regex_python = r"^(\s*|\t*)def(\s+|\t+)(?P<funcname>[a-z0-9_]+)(\s*|\t*)\(.*\)\:"
__regex_include = r"^(\s*|\t*)(?P<statement>include|require)(\s+|\t+)(?P<incname>[A-za-z0-9\-\./\$\{\}]+)"
__regex_addtask = r"^(\s*|\t*)addtask\s+(?P<func>\w+)\s*((before\s*(?P<before>((.*(?=after))|(.*))))|(after\s*(?P<after>((.*(?=before))|(.*)))))*"
__regex_taskass = r"^(\s*|\t*)(?P<func>[a-z0-9_-]+)\[(?P<ident>\w+)\](\s+|\t+)=(\s+|\t+)(?P<varval>.*)"
__regex_export_func = r"^EXPORT_FUNCTIONS\s+(?P<func>.*)"
_order = collections.OrderedDict([
("comment", __regex_comments),
("func", __regex_func),
("inherit", __regex_inherit),
("export", __regex_export_wval),
("export_noval", __regex_export_woval),
("python", __regex_python),
("include", __regex_include),
("addtask", __regex_addtask),
("taskassign", __regex_taskass),
("exportfunc", __regex_export_func),
("vars", __regex_var)
])
includeOffset = 0
for line in prepare_lines(_file, lineOffset):
good = False
for k, v in _order.items():
m = re.match(v, line["cnt"], re.MULTILINE)
if m:
if k == "python":
res.append(PythonBlock(
_file, line["line"] + includeOffset, line["line"] - lineOffset, line["raw"], m.group("funcname"), line["realraw"]))
good = True
break
elif k == "exportfunc":
res.append(FunctionExports(
_file, line["line"] + includeOffset, line["line"] - lineOffset, line["raw"], m.group("func"), line["realraw"]))
good = True
break
elif k == "vars":
res.append(Variable(
_file, line["line"] + includeOffset, line["line"] -
lineOffset, line["raw"], m.group(
"varname"), m.group("varval"),
m.group("varop"), m.group("ident"), line["realraw"]))
good = True
break
elif k == "func":
res.append(Function(
_file, line["line"] + includeOffset, line["line"] -
lineOffset, line["raw"],
m.group("func"), m.group("funcbody"), line["realraw"],
m.group("py"), m.group("fr")))
good = True
break
elif k == "comment":
res.append(
Comment(_file, line["line"] + includeOffset, line["line"] - lineOffset, line["raw"], line["realraw"]))
good = True
break
elif k == "inherit":
if addInheritedFile:
inhname = expand_term(stash, _file, m.group("inhname"))
if not inhname.endswith(".bbclass"):
inhname += ".bbclass"
_path = find_local_or_in_layer(
os.path.join("classes", inhname),
os.path.dirname(_file))
if _path:
tmp = stash.AddFile(_path, lineOffset=line["line"], forcedLink=_file)
if any(tmp):
includeOffset += max([x.InFileLine for x in tmp])
res.append(Variable(
_file, line["line"] + includeOffset, line["line"] -
lineOffset, line["raw"], "inherit", m.group("inhname"), line["realraw"],
"", ""))
good = True
break
elif k == "export":
res.append(Export(
_file, line["line"] + includeOffset, line["line"] -
lineOffset, line["raw"], m.group("name").strip() , m.group("value"), line["realraw"]))
good = True
break
elif k == "export_noval":
res.append(Export(
_file, line["line"] + includeOffset, line["line"] -
lineOffset, line["raw"], m.group("name").strip(), "", line["realraw"]))
good = True
break
elif k == "taskassign":
res.append(TaskAssignment(_file, line["line"] + includeOffset, line["line"] - lineOffset, line["raw"], m.group(
"func"), m.group("ident"), m.group("varval"), line["realraw"]))
good = True
break
elif k == "addtask":
# treat the following as variables
if any([m.group("func").startswith(x) for x in ['pkg_preinst', 'pkg_postinst', 'pkg_prerm', 'pkg_postrm']]):
continue
_g = m.groupdict()
if "before" in _g.keys():
_b = _g["before"]
else:
_b = ""
if "after" in _g.keys():
_a = _g["after"]
else:
_a = ""
res.append(TaskAdd(
_file, line["line"] + includeOffset, line["line"] - lineOffset, line["raw"], m.group("func"), line["realraw"], _b, _a))
break
elif k == "include":
if addIncludedFile:
_path = find_local_or_in_layer(
expand_term(stash, _file, m.group("incname")), os.path.dirname(_file))
if _path:
tmp = stash.AddFile(
_path, lineOffset=line["line"], forcedLink=_file)
if any(tmp):
includeOffset += max([x.InFileLine for x in tmp])
res.append(Include(
_file, line["line"], line["line"] - lineOffset, line["raw"], m.group("incname"), m.group("statement"), line["realraw"]))
good = True
break
if not good:
res.append(
Item(_file, line["line"], line["line"] - lineOffset, line["raw"], line["realraw"]))
return res
|
import icepyx as ipx
import pytest
import warnings
def test_CMRparams():
reg_a = ipx.Query("ATL06", [-64, 66, -55, 72], ["2019-02-22", "2019-02-28"])
reg_a.build_CMR_params()
obs_keys = reg_a.CMRparams.keys()
exp_keys_all = ["short_name", "version", "temporal"]
exp_keys_any = ["bounding_box", "polygon"]
assert all(keys in obs_keys for keys in exp_keys_all)
assert any(key in obs_keys for key in exp_keys_any)
def test_reqconfig_params():
reg_a = ipx.Query("ATL06", [-64, 66, -55, 72], ["2019-02-22", "2019-02-28"])
# test for search params
reg_a.build_reqconfig_params("search")
obs_keys = reg_a.reqparams.keys()
exp_keys_all = ["page_size"]
assert all(keys in obs_keys for keys in exp_keys_all)
# test for download params
reg_a.reqparams = None
reg_a.build_reqconfig_params("download")
reg_a.reqparams.update({"token": "", "email": ""})
obs_keys = reg_a.reqparams.keys()
exp_keys_all = [
"page_size",
"page_num",
"request_mode",
"token",
"email",
"include_meta",
]
assert all(keys in obs_keys for keys in exp_keys_all)
# BestPractices: should do additional properties tests for each potential property type (e.g. spatial extent can have type bounding_box or polygon)
# check that search results are correct (spatially, temporally, match actually available data)
|
#2- crie um programa que calcule a rentabilidade anual de um investimento baseando-se em sua rentabilidade mensal(juros compostos)
# e a rentablidade deve ser apresentada em % e R$ utilizar metodos
from Aula11 import *
investimento = float(input('Digite o valor do investimento: R$'))
taxa = float(input('Informe a taxa que deseja investir: '))
taxa = taxa/100
mes = int(input('Informe a quantidade de meses que deseja investir: '))
vinvest = invest(investimento,mes,taxa)
print(f'O valor do seu investimento de {mes} meses vai ser de R${vinvest}/Lucro de {(vinvest-investimento)}%')
|
# -*- coding: utf-8 -*-
import pika
import os
import argparse
parser = argparse.ArgumentParser(description='parse rabbit')
parser.add_argument("-r", "--receive", action = 'store_true', help ='set this option to switch to receive mode' )
parser.add_argument("-s", "--sendmany",type=int,default=1)
args = parser.parse_args()
import amqpkey
amqp_url = amqpkey.key
url = os.environ.get('CLOUDAMQP_URL',amqp_url)
params = pika.URLParameters(url)
params.socket_timeout = 20
connection = pika.BlockingConnection(params)
channel= connection.channel()
channel.queue_declare(queue='hello2')
if not args.receive:
for i in range(args.sendmany):
msg=channel.basic_publish(exchange='', routing_key='hello2', body='Hello')
print("Send")
connection.close()
else:
cpt = 0
def callback(ch,method,properties,body):
print("Reçu");
global cpt
cpt += 1;
print(cpt)
channel.basic_consume(callback,queue='hello2',no_ack=True)
print("Waiting for message. To exit press CTRL+C")
channel.start_consuming()
|
#!/usr/bin/env python
# encoding: utf-8
import unittest
from nose.tools import assert_equal
from os.path import join, dirname, abspath
from main import process
SAMPLE_DIR = join(dirname(abspath(__file__)), 'sample_data')
class ScraperTestCase(unittest.TestCase):
@classmethod
def setUpClass(cls):
"""Run once before all tests in this test class."""
with open(join(SAMPLE_DIR, 'sample_data.html'), 'r') as f:
cls.rows = list(process(f))
def test_correct_number_of_rows(self):
assert_equal(10, len(self.rows))
|
from abc import ABC
class Intermediate_Representation(ABC):
pass
|
"""
This module defines some plotting functions that are used by the
BALTO GUI app. It should be included in the same directory as
"balto_gui.py" and the corresponding Jupyter notebook.
"""
#------------------------------------------------------------------------
#
# Copyright (C) 2020. Scott D. Peckham
#
# Added use of cartopy in 2020-11 to 2020-12.
#
#------------------------------------------------------------------------
import matplotlib.pyplot as plt
import cartopy.crs as ccrs
import cartopy.feature as cfeature
import numpy as np
#------------------------------------------------------------------------
#
# plot_data() # for x vs. y plots
#-----------------
# hist_equal()
# power_stretch1()
# power_stretch2()
# power_stretch3()
# log_stretch()
# stretch_grid()
# show_grid_as_image() (used for balto_gui.show_grid() method)
#
#------------------------------------------------------------------------
def plot_data( x, y, xmin=None, xmax=None, ymin=None, ymax=None,
x_name='x', x_units='', marker=',',
y_name='y', y_units='',
x_size=8, y_size=4):
figure = plt.figure(1, figsize=(x_size, y_size))
# fig, ax = plt.subplots( figsize=(x_size, y_size))
# Set the plot point marker
# https://matplotlib.org/3.1.1/api/markers_api.html
# marker = ',' # pixel
# marker = '.' # point (small circle)
# marker = 'o' # circle
# marker = '+'
# marker = 'x'
#if (ymin is None):
# ymin = y.min()
#if (ymax is None):
# ymax = y.max()
#if (ymax - ymin < 0.1):
# ymin = ymin - 0.5
# ymax = ymin + 0.5
# x_name2 = x_name.replace('_', ' ').title()
# y_name2 = y_name.replace('_', ' ').title()
plt.plot( x, y, marker=marker)
plt.xlabel( x_name + ' [' + x_units + ']' )
plt.ylabel( y_name + ' [' + y_units + ']' )
plt.ylim( ymin, ymax )
plt.xlim( xmin, xmax )
#-------------------------------------
# This may be necessary depending on
# the data type of ymin, ymax
#-------------------------------------
## plt.ylim( np.array([ymin, ymax]) )
## plt.xlim( np.array([xmin, xmax]) )
plt.show()
# plot_data()
#------------------------------------------------------------------------
def histogram_equalize( grid, PLOT_NCS=False):
# https://docs.scipy.org/doc/numpy/reference/generated/numpy.histogram.html
(hist, bin_edges) = np.histogram( grid, bins=256)
# hmin = hist.min()
# hmax = hist.max()
cs = hist.cumsum()
ncs = (cs - cs.min()) / (cs.max() - cs.min())
ncs.astype('uint8');
############## ncs.astype('uint8') # no semi-colon at end ??????????
if (PLOT_NCS):
plt.plot( ncs )
flat = grid.flatten()
flat2 = np.uint8( 255 * (flat - flat.min()) / (flat.max() - flat.min()) )
grid2 = ncs[ flat2 ].reshape( grid.shape )
return grid2
# histogram_equalize()
#------------------------------------------------------------------------
def power_stretch1( grid, p ):
return grid**p
# power_stretch1()
#------------------------------------------------------------------------
def power_stretch2( grid, a=1000, b=0.5):
# Note: Try a=1000 and b=0.5
gmin = grid.min()
gmax = grid.max()
norm = (grid - gmin) / (gmax - gmin)
return (1 - (1 + a * norm)**(-b))
# power_stretch2()
#------------------------------------------------------------------------
def power_stretch3( grid, a=1, b=2):
# Note: Try a=1, b=2 (shape of a quarter circle)
gmin = grid.min()
gmax = grid.max()
norm = (grid - gmin) / (gmax - gmin)
return (1 - (1 - norm**a)**b)**(1/b)
# power_stretch3()
#------------------------------------------------------------------------
def log_stretch( grid, a=1 ):
return np.log( (a * grid) + 1 )
# log_stretch()
#------------------------------------------------------------------------
def stretch_grid( grid, stretch, a=1, b=2, p=0.5 ):
if (stretch == 'power_stretch1'):
# Try: p = 0.3
grid2 = power_stretch1( grid, p)
elif (stretch == 'power_stretch2'):
# Try: a=1000, b=0.5.
grid2 = power_stretch2( grid, a=a, b=b )
elif (stretch == 'power_stretch3'):
# Try: a=1, b=2.
grid2 = power_stretch3( grid, a=a, b=b)
elif (stretch == 'log_stretch'):
grid2 = log_stretch( grid, a=a )
elif (stretch == 'hist_equal'):
grid2 = histogram_equalize( grid, PLOT_NCS=False)
else:
print('SORRY, Unknown stretch =', stretch)
return None
return grid2
# stretch_grid()
#------------------------------------------------------------------------
def get_map_projection( proj_name,
central_longitude=0.0, central_latitude=0.0,
false_easting=0.0, false_northing=0.0, globe=None,
standard_parallels=(20.0, 50.0),
scale_factor=None,
min_latitude=-80.0, max_latitude=84.0,
true_scale_latitude=None, latitude_true_scale=None, ### BOTH
secant_latitudes=None,
pole_longitude=0.0, pole_latitude=90.0,
central_rotated_longitude=0.0, sweep_axis='y',
satellite_height=35785831, cutoff=-30, approx=None,
southern_hemisphere=False, zone=15): #### numeric UTM zone
proj_name = proj_name.lower()
if (proj_name == 'albersequalarea'):
proj = ccrs.AlbersEqualArea(central_longitude=central_longitude,
central_latitude=central_latitude,
false_easting=false_easting,
false_northing=false_northing, globe=globe,
standard_parallels=standard_parallels)
elif (proj_name == 'azimuthalequidistant'):
proj = ccrs.AzimuthalEquidistant(central_longitude=central_longitude,
central_latitude=central_latitude,
false_easting=false_easting,
false_northing=false_northing, globe=globe)
elif (proj_name == 'equidistantconic'):
proj = ccrs.EquidistantConic(central_longitude=central_longitude,
central_latitude=central_latitude,
false_easting=false_easting,
false_northing=false_northing, globe=globe,
standard_parallels=standard_parallels)
elif (proj_name == 'lambertconformal'):
proj = ccrs.LambertConformal(central_longitude=-96.0, ##########
central_latitude=39.0, ##########
false_easting=false_easting,
false_northing=false_northing, globe=globe,
secant_latitudes=None,
standard_parallels=None, ## default: (33,45)
cutoff=cutoff)
elif (proj_name == 'lambertcylindrical'):
proj = ccrs.LambertCylindrical(central_longitude=central_longitude)
elif (proj_name == 'mercator'):
proj = ccrs.Mercator(central_longitude=central_longitude,
min_latitude=min_latitude, max_latitude=max_latitude,
latitude_true_scale=latitude_true_scale,
false_easting=false_easting,
false_northing=false_northing, globe=globe,
scale_factor=None) #########
elif (proj_name == 'miller'):
proj = ccrs.Miller(central_longitude=central_longitude, globe=globe)
elif (proj_name == 'mollweide'):
proj = ccrs.Mollweide(central_longitude=central_longitude,
false_easting=false_easting,
false_northing=false_northing, globe=globe)
elif (proj_name == 'orthographic'):
proj = ccrs.Orthographic(central_longitude=central_longitude,
central_latitude=central_latitude, globe=globe)
elif (proj_name == 'robinson'):
proj = ccrs.Robinson(central_longitude=central_longitude,
false_easting=false_easting,
false_northing=false_northing, globe=globe)
elif (proj_name == 'sinusoidal'):
proj = ccrs.Sinusoidal(central_longitude=central_longitude,
false_easting=false_easting,
false_northing=false_northing, globe=globe)
elif (proj_name == 'stereographic'):
proj = ccrs.Stereographic(central_latitude=central_latitude,
central_longitude=central_longitude,
false_easting=false_easting,
false_northing=false_northing, globe=globe,
true_scale_latitude=true_scale_latitude,
scale_factor=scale_factor)
elif (proj_name == 'transversemercator'):
proj = ccrs.TransverseMercator(central_longitude=central_longitude,
central_latitude=central_latitude,
false_easting=false_easting,
false_northing=false_northing, globe=globe,
scale_factor=1.0, ##########
approx=approx)
elif (proj_name == 'utm'):
proj = ccrs.UTM(zone, southern_hemisphere=southern_hemisphere, globe=globe)
elif (proj_name == 'interruptedgoodehomolosine'):
proj = ccrs.InterruptedGoodeHomolosine(central_longitude=central_longitude,
globe=globe)
elif (proj_name == 'rotatedpole'):
proj = ccrs.RotatedPole(pole_longitude=pole_longitude,
pole_latitude=pole_latitude, globe=globe,
central_rotated_longitude=central_rotated_longitude)
elif (proj_name == 'osgb'):
proj = ccrs.OSGB(approx=approx)
elif (proj_name == 'europp'):
proj = ccrs.EuroPP
elif (proj_name == 'geostationary'):
proj = ccrs.Geostationary(central_longitude=central_longitude,
satellite_height=satellite_height,
false_easting=false_easting,
false_northing=false_northing, globe=globe,
sweep_axis=sweep_axis)
elif (proj_name == 'nearsideperspective'):
proj = ccrs.NearsidePerspective(central_longitude=central_longitude,
central_latitude=central_latitude,
satellite_height=satellite_height,
false_easting=false_easting,
false_northing=false_northing, globe=globe)
elif (proj_name == 'eckerti'):
proj = ccrs.EckertI(central_longitude=central_longitude,
false_easting=false_easting,
false_northing=false_northing, globe=globe)
elif (proj_name == 'eckertii'):
proj = ccrs.EckertII(central_longitude=central_longitude,
false_easting=false_easting,
false_northing=false_northing, globe=globe)
elif (proj_name == 'eckertiii'):
proj = ccrs.EckertIII(central_longitude=central_longitude,
false_easting=false_easting,
false_northing=false_northing, globe=globe)
elif (proj_name == 'eckertiv'):
proj = ccrs.EckertIV(central_longitude=central_longitude,
false_easting=false_easting,
false_northing=false_northing, globe=globe)
elif (proj_name == 'eckertv'):
proj = ccrs.EckertV(central_longitude=central_longitude,
false_easting=false_easting,
false_northing=false_northing, globe=globe)
elif (proj_name == 'eckertvi'):
proj = ccrs.EckertVI(central_longitude=central_longitude,
false_easting=false_easting,
false_northing=false_northing, globe=globe)
elif (proj_name == 'equalearth'):
proj = ccrs.EqualEarth(central_longitude=central_longitude,
false_easting=false_easting,
false_northing=false_northing, globe=globe)
elif (proj_name == 'gnomonic'):
proj = ccrs.Gnomonic(central_latitude=central_latitude,
central_longitude=central_longitude, globe=globe)
elif (proj_name == 'lambertazimuthalequalarea'):
proj = ccrs.LambertAzimuthalEqualArea(central_longitude=central_longitude,
central_latitude=central_latitude, globe=globe,
false_easting=false_easting,
false_northing=false_northing)
elif (proj_name == 'northpolarstereo'):
proj = ccrs.NorthPolarStereo(central_longitude=central_longitude,
true_scale_latitude=true_scale_latitude, globe=globe)
elif (proj_name == 'osni'):
proj = ccrs.OSNI(approx=approx)
elif (proj_name == 'southpolarstereo'):
proj = ccrs.SouthPolarStereo(central_longitude=central_longitude,
true_scale_latitude=true_scale_latitude, globe=globe)
else:
# This is same as "Geographic coordinates"
proj = ccrs.PlateCarree(central_longitude=central_longitude,
globe=globe)
return proj
# get_map_projection()
#------------------------------------------------------------------------
def show_grid_as_image( grid, long_name, extent=None,
cmap='rainbow',
stretch_name='hist_equal',
stretch_a = 1.0,
stretch_b = 1.0,
stretch_p = 1.0,
nodata_value=None,
interp_method='nearest',
## crs='platecarree', # for Geographic coordinates
projection='mercator',
NO_SHOW=False, im_file=None,
## stretch='power_stretch3',
xsize=8, ysize=8, dpi=None):
# Note: extent = [minlon, maxlon, minlat, maxlat]
# See get_map_bounds() in balto_gui.py.
#-------------------------
# Other color map names
#--------------------------------------------
# hsv, jet, gist_rainbow (reverse rainbow),
# gist_ncar, gist_stern
#--------------------------------------------
#------------------------------------------
# Replace nodata value before the stretch
#------------------------------------------
grid2 = grid.copy()
if (nodata_value is not None):
w1 = (grid2 == nodata_value)
w2 = np.invert( w1 )
gmin = min(grid2[w2])
grid2[ w1 ] = gmin
#---------------------------------------------
# Apply stretch function to enhance contrast
#---------------------------------------------
# grid2 = stretch_grid( grid, stretch='power_stretch1', p=0.4)
# grid2 = stretch_grid( grid, stretch='power_stretch2', a=1000, b=0.5)
# grid2 = stretch_grid( grid, stretch='power_stretch3', a=1, b=2)
# grid2 = stretch_grid( grid, stretch='log_stretch', a=1)
grid2 = stretch_grid( grid2, stretch=stretch_name, a=stretch_a,
b=stretch_b, p=stretch_p)
#-----------------------------------------------
# Get new min and max, before replacing nodata
#-----------------------------------------------
gmin = grid2.min()
gmax = grid2.max()
#------------------------------------------
# Replace the nodata values after stretch
#------------------------------------------
if (nodata_value is not None):
grid2[ w1 ] = np.nan # 2020-12-11
### grid2[ w1 ] = nodata_value
#-------------------------------------------
# Create figure and axes (without cartopy)
#--------------------------------------------
# balto_crs = None
# fig, ax = plt.subplots( figsize=(xsize, ysize), dpi=dpi )
#----------------------------------------------------------
# balto_crs = None
# balto_proj = None
# fig = plt.figure( figsize=(xsize, ysize), dpi=dpi )
# ax = fig.add_subplot(1,1,1, projection=balto_proj)
## ax.set_extent( extent, crs=balto_crs ) ##### NOT WORKING
#-------------------------
# Set the map projection
#--------------------------------------------
# extent = [minlon, maxlon, minlat, maxlat]
#--------------------------------------------
center_lon = (extent[0] + extent[1]) / 2.0
center_lat = (extent[2] + extent[3]) / 2.0
min_lat = extent[2]
max_lat = extent[3]
print('Extent =', extent )
#-------------------------------------------------------
# See: https://scitools.org.uk/cartopy/docs/latest/
# tutorials/understanding_transform.html
#-------------------------------------------------------
balto_crs = ccrs.PlateCarree() # For Geographic lon/lat coordinates.
projection = projection.lower()
balto_proj = get_map_projection(projection, globe=None,
central_longitude=center_lon,
central_latitude=center_lat,
false_easting=0.0, false_northing=0.0,
standard_parallels=(20.0, 50.0), scale_factor=None,
min_latitude=-80.0, max_latitude=84.0,
true_scale_latitude=None, latitude_true_scale=None, ### BOTH
secant_latitudes=None,
pole_longitude=0.0, pole_latitude=90.0,
central_rotated_longitude=0.0, sweep_axis='y',
satellite_height=35785831, cutoff=-30, approx=None,
southern_hemisphere=False, zone=15) ######
#------------------------------------------------
# Create figure and axes (matplotlib + cartopy)
#------------------------------------------------
# First 3 args to subplot: ncols, nrows, index for the subplots
fig = plt.figure( figsize=(xsize, ysize), dpi=dpi )
ax = fig.add_subplot(1,1,1, projection=balto_proj)
ax.set_extent( extent, crs=balto_crs )
## ax.set_xlim([extent[0], extent[1]])
## ax.set_ylim([extent[2], extent[3]])
GRIDLINES = True
if (GRIDLINES):
## DRAW_LABELS = (projection == 'platecarree') # (unsupported otherwise)
DRAW_LABELS = False # Now done by other means
gl = ax.gridlines(crs=balto_crs, draw_labels=DRAW_LABELS, linewidth=2,
color='gray', alpha=0.5, linestyle='--')
# ax.add_feature(cfeature.COASTLINE) # placed wrong ###########
# ax.add_feature(cfeature.COASTLINE, extent=extent) # no extent keyword
# ax.add_feature(cfeature.BORDERS)
# ax.add_feature(cfeature.RIVERS)
# ax.add_feature(cfeature.LAND) # (raster?)
# ax.add_feature(cfeature.OCEAN) # (raster?)
# ax.add_feature(cfeature.LAKES) # (raster?)
#----------------------------
# Set title and axis labels
#----------------------------
im_title = long_name.replace('_', ' ').title()
ax.set_title( im_title )
ax.set_xlabel('Longitude [deg]')
ax.set_ylabel('Latitude [deg]')
#----------------------------------------------------
# Need next 2 lines as work-around for cartopy bug
# that prevents axis labels from showing.
# https://stackoverflow.com/questions/35479508/
# cartopy-set-xlabel-set-ylabel-not-ticklabels
#----------------------------------------------------
ax.set_xticks(ax.get_xticks())
ax.set_yticks(ax.get_yticks())
## ax.set_xticks(ax.get_xticks()[abs(ax.get_xticks())<=360])
## ax.set_yticks(ax.get_yticks()[abs(ax.get_yticks())<=90])
#------------------------------------
# New code to use cartopy (2020-11)
#------------------------------------
## balto_crs = ccrs.PlateCarree()
## balto_crs = ccrs.PlateCarree(central_longitude=center_lon)
# im = ax.imshow(grid2, interpolation=interp_method, cmap=cmap,
# vmin=gmin, vmax=gmax, extent=extent)
im = ax.imshow(grid2, interpolation=interp_method, cmap=cmap,
vmin=gmin, vmax=gmax, extent=extent, transform=balto_crs)
# im = ax.imshow(grid2, interpolation=interp_method, cmap=cmap,
# vmin=gmin, vmax=gmax, transform=balto_crs)
#----------------------------------
# Old code, before use of cartopy
#----------------------------------
# im = ax.imshow(grid2, interpolation=interp_method, cmap=cmap,
# vmin=gmin, vmax=gmax)
# im = ax.imshow(grid2, interpolation=interp_method, cmap=cmap,
# vmin=gmin, vmax=gmax, extent=extent)
#--------------------------------------------------------
# NOTE! Must save before "showing" or get blank image.
# File format is inferred from extension.
# e.g. TMP_Image.png, TMP_Image.jpg.
#--------------------------------------------------------
if (im_file is not None):
plt.savefig( im_file )
if not(NO_SHOW):
plt.show()
plt.close()
# show_grid_as_image()
#------------------------------------------------------------------------
|
from __future__ import unicode_literals
from django.apps import AppConfig
class HelpmeConfig(AppConfig):
name = 'helpme'
|
# MIT licensed
# Copyright (c) 2013-2020 lilydjwg <lilydjwg@gmail.com>, et al.
from typing import Optional
from .base import TemporaryError, HTTPError
class Proxy:
_obj = None
def set_obj(self, obj):
super().__setattr__('_obj', obj)
def __getattr__(self, name):
return getattr(self._obj, name)
def __setattr__(self, name, value):
return setattr(self._obj, name, value)
session = Proxy()
def setup(
which: Optional[str] = None,
concurreny: int = 20,
timeout: int = 20,
) -> None:
if which is None:
which = find_best_httplib()
m = __import__(
'%s_httpclient' % which, globals(), locals(), level=1)
session.set_obj(m.session)
session.setup(concurreny, timeout)
def find_best_httplib() -> str:
try:
import tornado, pycurl
# connection reuse, http/2
which = 'tornado'
except ImportError:
try:
import aiohttp
which = 'aiohttp'
# connection reuse
except ImportError:
try:
import httpx
which = 'httpx'
except ImportError:
import tornado
which = 'tornado'
# fallback
return which
|
import numpy as np
from det3d import torchie
from det3d.datasets.kitti import kitti_common as kitti
from det3d.core.evaluation.bbox_overlaps import bbox_overlaps
from det3d.core.bbox import box_np_ops
from det3d.core.sampler import preprocess as prep
from det3d.builder import (
build_dbsampler,
build_anchor_generator,
build_similarity_metric,
build_box_coder,
)
from det3d.core.input.voxel_generator import VoxelGenerator
from det3d.core.anchor.target_assigner import TargetAssigner
from ..registry import PIPELINES
def _dict_select(dict_, inds):
for k, v in dict_.items():
if isinstance(v, dict):
_dict_select(v, inds)
else:
dict_[k] = v[inds]
@PIPELINES.register_module
class Preprocess(object):
def __init__(self, cfg=None, **kwargs):
self.remove_environment = cfg.remove_environment
self.shuffle_points = cfg.shuffle_points
self.remove_unknown = cfg.remove_unknown_examples
self.min_points_in_gt = cfg.get("min_points_in_gt", -1)
self.add_rgb_to_points = cfg.get("add_rgb_to_points", False)
self.reference_detections = cfg.get("reference_detections", None)
self.remove_outside_points = cfg.get("remove_outside_points", False)
self.random_crop = cfg.get("random_crop", False)
self.mode = cfg.mode
if self.mode == "train":
self.gt_rotation_noise = cfg.gt_rot_noise
self.gt_loc_noise_std = cfg.gt_loc_noise
self.global_rotation_noise = cfg.global_rot_noise
self.global_scaling_noise = cfg.global_scale_noise
self.global_random_rot_range = cfg.global_rot_per_obj_range
self.global_translate_noise_std = cfg.global_trans_noise
self.gt_points_drop = (cfg.gt_drop_percentage,)
self.remove_points_after_sample = cfg.remove_points_after_sample
self.class_names = cfg.class_names
self.db_sampler = build_dbsampler(cfg.db_sampler)
self.npoints = cfg.get("npoints", -1)
self.random_select = cfg.get("random_select", False)
self.symmetry_intensity = cfg.get("symmetry_intensity", False)
def __call__(self, res, info):
res["mode"] = self.mode
if res["type"] in ["KittiDataset", "LyftDataset"]:
points = res["lidar"]["points"]
elif res["type"] == "NuScenesDataset":
points = res["lidar"]["combined"]
if self.mode == "train":
anno_dict = res["lidar"]["annotations"]
gt_dict = {
"gt_boxes": anno_dict["boxes"],
"gt_names": np.array(anno_dict["names"]).reshape(-1),
}
if "difficulty" not in anno_dict:
difficulty = np.zeros([anno_dict["boxes"].shape[0]], dtype=np.int32)
gt_dict["difficulty"] = difficulty
else:
gt_dict["difficulty"] = anno_dict["difficulty"]
if "calib" in res:
calib = res["calib"]
else:
calib = None
if self.add_rgb_to_points:
assert calib is not None and "image" in res
image_path = res["image"]["image_path"]
image = (
imgio.imread(str(pathlib.Path(root_path) / image_path)).astype(
np.float32
)
/ 255
)
points_rgb = box_np_ops.add_rgb_to_points(
points, image, calib["rect"], calib["Trv2c"], calib["P2"]
)
points = np.concatenate([points, points_rgb], axis=1)
num_point_features += 3
if self.reference_detections is not None:
assert calib is not None and "image" in res
C, R, T = box_np_ops.projection_matrix_to_CRT_kitti(P2)
frustums = box_np_ops.get_frustum_v2(reference_detections, C)
frustums -= T
frustums = np.einsum("ij, akj->aki", np.linalg.inv(R), frustums)
frustums = box_np_ops.camera_to_lidar(frustums, rect, Trv2c)
surfaces = box_np_ops.corner_to_surfaces_3d_jit(frustums)
masks = points_in_convex_polygon_3d_jit(points, surfaces)
points = points[masks.any(-1)]
if self.remove_outside_points:
assert calib is not None
image_shape = res["image"]["image_shape"]
points = box_np_ops.remove_outside_points(
points, calib["rect"], calib["Trv2c"], calib["P2"], image_shape
)
if self.remove_environment is True and self.mode == "train":
selected = kitti.keep_arrays_by_name(gt_names, target_assigner.classes)
_dict_select(gt_dict, selected)
masks = box_np_ops.points_in_rbbox(points, gt_dict["gt_boxes"])
points = points[masks.any(-1)]
if self.mode == "train":
selected = kitti.drop_arrays_by_name(
gt_dict["gt_names"], ["DontCare", "ignore"]
)
_dict_select(gt_dict, selected)
if self.remove_unknown:
remove_mask = gt_dict["difficulty"] == -1
"""
gt_boxes_remove = gt_boxes[remove_mask]
gt_boxes_remove[:, 3:6] += 0.25
points = prep.remove_points_in_boxes(points, gt_boxes_remove)
"""
keep_mask = np.logical_not(remove_mask)
_dict_select(gt_dict, keep_mask)
gt_dict.pop("difficulty")
if self.min_points_in_gt > 0:
# points_count_rbbox takes 10ms with 10 sweeps nuscenes data
point_counts = box_np_ops.points_count_rbbox(
points, gt_dict["gt_boxes"]
)
mask = point_counts >= min_points_in_gt
_dict_select(gt_dict, mask)
gt_boxes_mask = np.array(
[n in self.class_names for n in gt_dict["gt_names"]], dtype=np.bool_
)
if self.db_sampler:
sampled_dict = self.db_sampler.sample_all(
res["metadata"]["image_prefix"],
gt_dict["gt_boxes"],
gt_dict["gt_names"],
res["metadata"]["num_point_features"],
self.random_crop,
gt_group_ids=None,
calib=calib,
)
if sampled_dict is not None:
sampled_gt_names = sampled_dict["gt_names"]
sampled_gt_boxes = sampled_dict["gt_boxes"]
sampled_points = sampled_dict["points"]
sampled_gt_masks = sampled_dict["gt_masks"]
gt_dict["gt_names"] = np.concatenate(
[gt_dict["gt_names"], sampled_gt_names], axis=0
)
gt_dict["gt_boxes"] = np.concatenate(
[gt_dict["gt_boxes"], sampled_gt_boxes]
)
gt_boxes_mask = np.concatenate(
[gt_boxes_mask, sampled_gt_masks], axis=0
)
if self.remove_points_after_sample:
masks = box_np_ops.points_in_rbbox(points, sampled_gt_boxes)
points = points[np.logical_not(masks.any(-1))]
points = np.concatenate([sampled_points, points], axis=0)
prep.noise_per_object_v3_(
gt_dict["gt_boxes"],
points,
gt_boxes_mask,
rotation_perturb=self.gt_rotation_noise,
center_noise_std=self.gt_loc_noise_std,
global_random_rot_range=self.global_random_rot_range,
group_ids=None,
num_try=100,
)
_dict_select(gt_dict, gt_boxes_mask)
gt_classes = np.array(
[self.class_names.index(n) + 1 for n in gt_dict["gt_names"]],
dtype=np.int32,
)
gt_dict["gt_classes"] = gt_classes
gt_dict["gt_boxes"], points = prep.random_flip(gt_dict["gt_boxes"], points)
gt_dict["gt_boxes"], points = prep.global_rotation(
gt_dict["gt_boxes"], points, rotation=self.global_rotation_noise
)
gt_dict["gt_boxes"], points = prep.global_scaling_v2(
gt_dict["gt_boxes"], points, *self.global_scaling_noise
)
if self.shuffle_points:
# shuffle is a little slow.
np.random.shuffle(points)
if self.mode == "train" and self.random_select:
if self.npoints < points.shape[0]:
pts_depth = points[:, 2]
pts_near_flag = pts_depth < 40.0
far_idxs_choice = np.where(pts_near_flag == 0)[0]
near_idxs = np.where(pts_near_flag == 1)[0]
near_idxs_choice = np.random.choice(
near_idxs, self.npoints - len(far_idxs_choice), replace=False
)
choice = (
np.concatenate((near_idxs_choice, far_idxs_choice), axis=0)
if len(far_idxs_choice) > 0
else near_idxs_choice
)
np.random.shuffle(choice)
else:
choice = np.arange(0, len(points), dtype=np.int32)
if self.npoints > len(points):
extra_choice = np.random.choice(
choice, self.npoints - len(points), replace=False
)
choice = np.concatenate((choice, extra_choice), axis=0)
np.random.shuffle(choice)
points = points[choice]
if self.symmetry_intensity:
points[:, -1] -= 0.5 # translate intensity to [-0.5, 0.5]
# points[:, -1] *= 2
res["lidar"]["points"] = points
if self.mode == "train":
res["lidar"]["annotations"] = gt_dict
return res, info
@PIPELINES.register_module
class Voxelization(object):
def __init__(self, **kwargs):
cfg = kwargs.get("cfg", None)
self.range = cfg.range
self.voxel_size = cfg.voxel_size
self.max_points_in_voxel = cfg.max_points_in_voxel
self.max_voxel_num = cfg.max_voxel_num
self.voxel_generator = VoxelGenerator(
voxel_size=self.voxel_size,
point_cloud_range=self.range,
max_num_points=self.max_points_in_voxel,
max_voxels=self.max_voxel_num,
)
def __call__(self, res, info):
# [0, -40, -3, 70.4, 40, 1]
voxel_size = self.voxel_generator.voxel_size
pc_range = self.voxel_generator.point_cloud_range
grid_size = self.voxel_generator.grid_size
# [352, 400]
if res["mode"] == "train":
gt_dict = res["lidar"]["annotations"]
bv_range = pc_range[[0, 1, 3, 4]]
mask = prep.filter_gt_box_outside_range(gt_dict["gt_boxes"], bv_range)
_dict_select(gt_dict, mask)
res["lidar"]["annotations"] = gt_dict
# points = points[:int(points.shape[0] * 0.1), :]
voxels, coordinates, num_points = self.voxel_generator.generate(
res["lidar"]["points"]
)
num_voxels = np.array([voxels.shape[0]], dtype=np.int64)
res["lidar"]["voxels"] = dict(
voxels=voxels,
coordinates=coordinates,
num_points=num_points,
num_voxels=num_voxels,
shape=grid_size,
)
return res, info
@PIPELINES.register_module
class AssignTarget(object):
def __init__(self, **kwargs):
assigner_cfg = kwargs["cfg"]
target_assigner_config = assigner_cfg.target_assigner
tasks = target_assigner_config.tasks
box_coder_cfg = assigner_cfg.box_coder
anchor_cfg = target_assigner_config.anchor_generators
anchor_generators = []
for a_cfg in anchor_cfg:
anchor_generator = build_anchor_generator(a_cfg)
anchor_generators.append(anchor_generator)
similarity_calc = build_similarity_metric(
target_assigner_config.region_similarity_calculator
)
positive_fraction = target_assigner_config.sample_positive_fraction
if positive_fraction < 0:
positive_fraction = None
target_assigners = []
flag = 0
box_coder = build_box_coder(box_coder_cfg)
for task in tasks:
target_assigner = TargetAssigner(
box_coder=box_coder,
anchor_generators=anchor_generators[flag : flag + task.num_class],
region_similarity_calculator=similarity_calc,
positive_fraction=positive_fraction,
sample_size=target_assigner_config.sample_size,
)
flag += task.num_class
target_assigners.append(target_assigner)
self.target_assigners = target_assigners
self.out_size_factor = assigner_cfg.out_size_factor
self.anchor_area_threshold = target_assigner_config.pos_area_threshold
def __call__(self, res, info):
class_names_by_task = [t.classes for t in self.target_assigners]
# Calculate output featuremap size
grid_size = res["lidar"]["voxels"]["shape"]
feature_map_size = grid_size[:2] // self.out_size_factor
feature_map_size = [*feature_map_size, 1][::-1]
anchors_by_task = [
t.generate_anchors(feature_map_size) for t in self.target_assigners
]
anchor_dicts_by_task = [
t.generate_anchors_dict(feature_map_size) for t in self.target_assigners
]
reshaped_anchors_by_task = [
t["anchors"].reshape([-1, t["anchors"].shape[-1]]) for t in anchors_by_task
]
matched_by_task = [t["matched_thresholds"] for t in anchors_by_task]
unmatched_by_task = [t["unmatched_thresholds"] for t in anchors_by_task]
bv_anchors_by_task = [
box_np_ops.rbbox2d_to_near_bbox(anchors[:, [0, 1, 3, 4, -1]])
for anchors in reshaped_anchors_by_task
]
anchor_caches_by_task = dict(
anchors=reshaped_anchors_by_task,
anchors_bv=bv_anchors_by_task,
matched_thresholds=matched_by_task,
unmatched_thresholds=unmatched_by_task,
anchors_dict=anchor_dicts_by_task,
)
if res["mode"] == "train":
gt_dict = res["lidar"]["annotations"]
task_masks = []
flag = 0
for class_name in class_names_by_task:
task_masks.append(
[
np.where(
gt_dict["gt_classes"] == class_name.index(i) + 1 + flag
)
for i in class_name
]
)
flag += len(class_name)
task_boxes = []
task_classes = []
task_names = []
flag2 = 0
for idx, mask in enumerate(task_masks):
task_box = []
task_class = []
task_name = []
for m in mask:
task_box.append(gt_dict["gt_boxes"][m])
task_class.append(gt_dict["gt_classes"][m] - flag2)
task_name.append(gt_dict["gt_names"][m])
task_boxes.append(np.concatenate(task_box, axis=0))
task_classes.append(np.concatenate(task_class))
task_names.append(np.concatenate(task_name))
flag2 += len(mask)
for task_box in task_boxes:
# limit rad to [-pi, pi]
task_box[:, -1] = box_np_ops.limit_period(
task_box[:, -1], offset=0.5, period=np.pi * 2
)
# print(gt_dict.keys())
gt_dict["gt_classes"] = task_classes
gt_dict["gt_names"] = task_names
gt_dict["gt_boxes"] = task_boxes
res["lidar"]["annotations"] = gt_dict
anchorss = anchor_caches_by_task["anchors"]
anchors_bvs = anchor_caches_by_task["anchors_bv"]
anchors_dicts = anchor_caches_by_task["anchors_dict"]
example = {}
example["anchors"] = anchorss
if self.anchor_area_threshold >= 0:
example["anchors_mask"] = []
for idx, anchors_bv in enumerate(anchors_bvs):
anchors_mask = None
# slow with high resolution. recommend disable this forever.
coors = coordinates
dense_voxel_map = box_np_ops.sparse_sum_for_anchors_mask(
coors, tuple(grid_size[::-1][1:])
)
dense_voxel_map = dense_voxel_map.cumsum(0)
dense_voxel_map = dense_voxel_map.cumsum(1)
anchors_area = box_np_ops.fused_get_anchors_area(
dense_voxel_map, anchors_bv, voxel_size, pc_range, grid_size
)
anchors_mask = anchors_area > anchor_area_threshold
example["anchors_mask"].append(anchors_mask)
if res["mode"] == "train":
targets_dicts = []
for idx, target_assigner in enumerate(self.target_assigners):
if "anchors_mask" in example:
anchors_mask = example["anchors_mask"][idx]
else:
anchors_mask = None
targets_dict = target_assigner.assign_v2(
anchors_dicts[idx],
gt_dict["gt_boxes"][idx],
anchors_mask,
gt_classes=gt_dict["gt_classes"][idx],
gt_names=gt_dict["gt_names"][idx],
)
targets_dicts.append(targets_dict)
example.update(
{
"labels": [
targets_dict["labels"] for targets_dict in targets_dicts
],
"reg_targets": [
targets_dict["bbox_targets"] for targets_dict in targets_dicts
],
"reg_weights": [
targets_dict["bbox_outside_weights"]
for targets_dict in targets_dicts
],
}
)
res["lidar"]["targets"] = example
return res, info
|
import numpy as np
import simtk.unit as unit
from .utils.targets import to_singular as singular_target
from .utils.indices import intersection_indices
# Classes
from .forms.classes import dict_is_form as dict_classes_is_form, \
dict_new as dict_classes_new, \
dict_add_microstate as dict_classes_add_microstate, \
dict_add_transition as dict_classes_add_transition, \
dict_remove_microstate as dict_classes_remove_microstate, \
dict_remove_transition as dict_classes_remove_transition, \
dict_microstate_is_in as dict_classes_microstate_is_in, \
dict_transition_is_in as dict_classes_transition_is_in, \
dict_update_weights as dict_classes_update_weights, \
dict_update_probabilities as dict_classes_update_probabilities, \
dict_symmetrize as dict_classes_symmetrize, \
dict_select as dict_classes_select, \
dict_get as dict_classes_get, \
dict_microstate_name_to_index as dict_classes_microstate_name_to_index, \
dict_transition_index as dict_classes_transition_index, \
dict_transitions_in as dict_classes_transitions_in, \
dict_transitions_out as dict_classes_transitions_out
# Files
from .forms.files import dict_is_form as dict_files_is_form, \
dict_new as dict_files_new, \
dict_add_microstate as dict_files_add_microstate, \
dict_add_transition as dict_files_add_transition, \
dict_remove_microstate as dict_files_remove_microstate, \
dict_remove_transition as dict_files_remove_transition, \
dict_microstate_is_in as dict_files_microstate_is_in, \
dict_transition_is_in as dict_files_transition_is_in, \
dict_update_weights as dict_files_update_weights, \
dict_update_probabilities as dict_files_update_probabilities, \
dict_symmetrize as dict_files_symmetrize, \
dict_select as dict_files_select, \
dict_get as dict_files_get, \
dict_microstate_name_to_index as dict_files_microstate_name_to_index, \
dict_transition_index as dict_files_transition_index, \
dict_transitions_in as dict_files_transitions_in, \
dict_transitions_out as dict_files_transitions_out
dict_is_form = {**dict_classes_is_form, **dict_files_is_form}
dict_new = {**dict_classes_new, **dict_files_new}
dict_add_microstate = {**dict_classes_add_microstate, **dict_files_add_microstate}
dict_add_transition = {**dict_classes_add_transition, **dict_files_add_transition}
dict_remove_microstate = {**dict_classes_remove_microstate, **dict_files_remove_microstate}
dict_remove_transition = {**dict_classes_remove_transition, **dict_files_remove_transition}
dict_microstate_is_in = {**dict_classes_microstate_is_in, **dict_files_microstate_is_in}
dict_transition_is_in = {**dict_classes_transition_is_in, **dict_files_transition_is_in}
dict_update_weights = {**dict_classes_update_weights, **dict_files_update_weights}
dict_update_probabilities = {**dict_classes_update_probabilities, **dict_files_update_probabilities}
dict_symmetrize = {**dict_classes_symmetrize, **dict_files_symmetrize}
dict_select = {**dict_classes_select, **dict_files_select}
dict_get = {**dict_classes_get, **dict_files_get}
dict_microstate_name_to_index = {**dict_classes_microstate_name_to_index, **dict_files_microstate_name_to_index}
dict_transition_index = {**dict_classes_transition_index, **dict_files_transition_index}
dict_transitions_out = {**dict_classes_transitions_out, **dict_files_transitions_out}
dict_transitions_in = {**dict_classes_transitions_in, **dict_files_transitions_in}
def get_form(ktn):
try:
return dict_is_form[type(ktn)]
except:
try:
return dict_is_form[ktn]
except:
raise NotImplementedError("This KTN's form has not been implemented yet")
def kinetic_transition_network(form='openktn.KineticTransitionNetwork', temperature=0.0*unit.kelvin, time_step=0.0*unit.nanoseconds):
tmp_ktn = dict_new[form](temperature=temperature, time_step=time_step)
return tmp_ktn
def add_microstate(ktn, name=None):
form = get_form(ktn)
return dict_add_microstate[form](ktn, name=name)
def remove_microstate(ktn, selection=None):
form = get_form(ktn)
return dict_remove_microstate[form](ktn, name=None)
def add_transition(ktn, origin, end, weight=1.0, origin_index=False):
form = get_form(ktn)
return dict_add_transition[form](ktn, origin, end, weight=weight)
def microstate_is_in(ktn, name):
form = get_form(ktn)
return dict_microstate_is_in[form](ktn, name)
def transition_is_in(ktn, origin, end):
form = get_form(ktn)
return dict_transition_is_in[form](ktn, origin, end)
def transitions_out(ktn, name):
form = get_form(ktn)
return dict_transitions_out[form](ktn, name=name)
def transitions_in(ktn, name):
form = get_form(ktn)
return dict_transitions_in[form](ktn, name=name, index=index, output_names=output_names)
def update_weights(ktn):
form = get_form(ktn)
return dict_update_weights[form](ktn)
def update_probabilities(ktn):
form = get_form(ktn)
return dict_update_probabilities[form](ktn)
def select(ktn, selection='all', target='microstate', mask=None):
form = get_form(ktn)
if type(selection)==str:
if selection in ['all', 'All', 'ALL']:
n_microstates = dict_get[form]['network']['n_microstates'](ktn)
microstate_indices = np.arange(n_microstates, dtype='int64')
else:
microstate_indices = dict_selector[form](ktn, selection)
elif type(selection) in [int, _int64, _int]:
microstate_indices = np.array([selection], dtype='int64')
elif hasattr(selection, '__iter__'):
microstate_indices = np.array(selection, dtype='int64')
else :
microstate_indices = None
output_indices = None
if target=='microstate':
output_indices = microstate_indices
elif target=='component':
output_indices = get(item, target='microstate', indices=microstate_indices, component_index=True)
output_indices = _unique(output_indices)
elif target=='basin':
output_indices = get(item, target='microstate', indices=microstate_indices, basin_index=True)
output_indices = _unique(output_indices)
elif target=='transition':
output_indices = get(item, target='microstate', indices=microstate_indices, inner_transition_index=True)
if mask is not None:
output_indices = intersection_indices(output_indices,mask)
return output_indices
def get(ktn, target='microstate', selection='all', **kwargs):
form = get_form(ktn)
target = singular_target(target)
attributes = [ key for key in kwargs.keys() if kwargs[key] ]
if type(indices)==str:
if indices in ['all', 'All', 'ALL']:
indices = 'all'
else:
raise ValueError()
elif type(indices) in [int, np.int64, np.int]:
indices = np.array([indices], dtype='int64')
elif hasattr(indices, '__iter__'):
indices = np.array(indices, dtype='int64')
if indices is None:
if selection is not 'all':
indices = select(ktn, target=target, selection=selection)
else:
indices = 'all'
results = []
for attribute in attributes:
result = dict_get[form][target][attribute](ktn, indices=indices)
results.append(result)
if len(results)==1:
return results[0]
else:
return results
def symmetrize(ktn):
form = get_form(ktn)
return dict_symmetrize[form](ktn)
def info(ktn, target='network', selection='all', output='dataframe'):
if output=='dataframe':
from pandas import DataFrame as df
form = get_form(ktn)
target = singular_target(target)
if target=='microstate':
if get(ktn, target='network', symmetrized=True):
index, name, weight, probability, degree, component_index, basin_index = get(ktn, target=target,
microstate_index=True, microstate_name=True, weight=True, probability=True, degree=True,
component_index=True, basin_index=True)
tmp_df = df({'index':index, 'name':name, 'weight':weight, 'probability':probability,
'degree':degree, 'component_index':component_index, 'basin_index':basin_index})
else:
index, name, weight, probability, out_degree, in_degree, component_index, basin_index = get(ktn, target=target,
microstate_index=True, microstate_name=True, weight=True, probability=True, out_degree=True, in_degree=True,
component_index=True, basin_index=True)
tmp_df = df({'index':index, 'name':name, 'weight':weight, 'probability':probability,
'out_degree':out_degree, 'in_degree':in_degree, 'component_index':component_index,
'basin_index':basin_index})
n_components, n_basins = get(ktn, target='network', n_components=True, n_basins=True)
if n_components==0: tmp_df.drop(columns=['component_index'], inplace=True)
if n_basins==0: tmp_df.drop(columns=['basin_index'], inplace=True)
return tmp_df.style.hide_index()
elif target=='transition':
index, origin_index, end_index, weight, probability, symmetrized = get(ktn, target=target,
transition_index=True, origin_index=True, end_index=True,
transition_weight=True, transition_probability=True, symmetrized=True)
tmp_df = df({'index':index, 'origin_index':origin_index, 'end_index':end_index, 'weight':weight,
'probability':probability, 'symmetrized':symmetrized})
return tmp_df.style.hide_index()
elif target=='component':
raise NotImplementedError
elif target=='basin':
raise NotImplementedError
elif target=='network':
form, n_microstates, n_transitions, n_components, n_basins, weight, symmetrized, temperature, time_step = get(ktn, target=target,
form=True, n_microstates=True, n_transitions=True, n_components=True, n_basins=True,
weight=True, symmetrized=True, temperature=True, time_step=True)
tmp_df = df({'form':form, 'n_microstates':n_microstates, 'n_transitions':n_transitions, 'n_components':n_components,
'n_basins':n_basins, 'weight':weight, 'symmetrized':symmetrized,
'temperature':temperature, 'time_step':time_step}, index=[0])
if n_components==0: tmp_df.drop(columns=['n_components'], inplace=True)
if n_basins==0: tmp_df.drop(columns=['n_basins'], inplace=True)
return tmp_df.style.hide_index()
else:
raise ValueError('"target" needs one of the following strings: "network", "microstate",\
"transition", "component", "basin"')
|
import tornado.httpserver
import tornado.ioloop
import tornado.options
import tornado.web
import os.path
from tornado.options import define, options
define("port", default=8888, help="run on the given port", type=int)
SCHEMA_PATH = os.path.split(os.path.abspath(__file__))[0]
class SchemaHandler(tornado.web.RequestHandler):
cache_max_age = 604800 # 7days
def get(self, ns, file_name):
file_path = os.path.join(SCHEMA_PATH, ns, file_name)
# if no file, raise 404 error
file_path += '.json'
self.set_header("Content-Type", "application/json; charset=UTF-8")
self.set_header("Cache-Control", "max-age={}, public".format(self.cache_max_age))
if os.path.exists(file_path):
with open(file_path, 'r') as json_context:
self.write(json_context.read())
else:
raise tornado.web.HTTPError(404)
APP_LIST = [
(r"/(.+)/(.+)", SchemaHandler)
]
def main():
tornado.options.parse_command_line()
application = tornado.web.Application(APP_LIST)
http_server = tornado.httpserver.HTTPServer(application)
http_server.listen(options.port)
tornado.ioloop.IOLoop.current().start()
if __name__ == "__main__":
main()
|
import pandas as pd
import sys
sys.path.append('../')
from src.models.svd import mySVD
import surprise
# set testing cases
targetSongidList = ['SOAKIMP12A8C130995','SOBBMDR12A8C13253B','SOBXHDL12A81C204C0','SOBYHAJ12A6701BF1D','SODACBL12A8C13C273']
# create svd object
svd = mySVD()
# create testdata and transform into the required format from the recommender package
newObs = svd.createNewObs(targetSongidList)
testset = svd.testGenerator(newObs)
# transform into the required format from the recommender package
trainset = svd.trainGenerator(svd.song_df, newObs)
# fit model
algo_svd = svd.fitModel(trainset)
# make final recommendation
user_recommend = svd.predictTopSong(algo_svd, testset, targetSongidList)
def test_readSongData():
"""Test method readSongData from class mySVD"""
# make sure the number of columns pull out from the database is correct
assert svd.song_df.shape[1] == 8
def test_createNewObs():
"""Test method createNewObs from class mySVD"""
# check type
assert isinstance(newObs, pd.DataFrame)
# check if there are exactly 5 songs has listen count and 3 columns in the generated df
assert newObs.query("listen_count!=0").shape == (5,3)
# check if the number of songs in the song_df is the same as the generated new df
assert len(svd.song_df.song_id.unique()) == len(newObs.song_id.unique())
def test_testGenerator():
"""Test method testGenerator from class mySVD"""
# check type
assert isinstance(testset, list)
# check the shape
assert len(testset)==newObs.shape[0]
def test_trainGenerator():
"""Test method trainGenerator from class mySVD"""
# check type
assert isinstance(trainset, surprise.trainset.Trainset)
# the number of users in trainset should be equal to the user from database plus 1
assert len(trainset.all_users()) == len(svd.song_df.user_id.unique())+1
def test_fitModel():
"""Test method fitModel from class mySVD"""
# check type
assert isinstance(algo_svd, surprise.prediction_algorithms.matrix_factorization.SVD)
def test_predictTopSong():
"""Test method predictTopSong from class mySVD"""
user_recommend = svd.predictTopSong(algo_svd, testset, targetSongidList)
# check type
assert isinstance(user_recommend, pd.DataFrame)
# check shape
assert user_recommend.shape == (10, 6)
# check sorted
assert user_recommend.loc[0]['score'] == max(user_recommend.score)
|
# -*- coding: utf-8 -*-
import logging
from .version import __version__
from .core import TTrackerSession, how_much_hours, report_activities
from .main import main
from totra.output_format import format_activities, save_output
logging.getLogger(__name__).addHandler(logging.NullHandler())
|
"""
Формат ввода
В первой строчке задано число N — число фанатов,
во второй число М — количество знакомств между
фанатами.
Далее следует M пар чисел от 0 до N-1,
разделенных пробелом: каждая пара означает обоюдное
знакомство между фанатами.
Далее следует число K —
количество типов машин, и K пар вида (вместимость
машины, количество таких машин). Пары
гарантированно уникальны.
1 <= N <= 1000
0 <= K <= 1000
Формат вывода
На выходе должно быть число 1, если у таксопарка
получится развести фанатов, и 0, если не получится.
"""
|
#!/usr/bin/python3
#
# Copyright 2019 LiveSite authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import http.server as http_server
import json
import os
import re
import sys
import threading
import time
import requests
SCALE = 60
STRIDE = 30
URL_PREFIX = 'http://localhost:5002'
STANDINGS_JSON_RE = re.compile(r'^standings\.(\d+)\.json$')
class Handler(http_server.SimpleHTTPRequestHandler):
def send_response(self, code, message=None):
super().send_response(code, message)
self.send_header('Access-Control-Allow-Origin', '*')
def run_server():
server = http_server.HTTPServer(('localhost', 5002), Handler)
server.serve_forever()
def main(argv):
basedir, = argv[1:]
os.chdir(basedir)
threading.Thread(target=run_server, daemon=True).start()
time.sleep(1)
session = requests.Session()
with open('contest.json', 'r') as f:
contest = json.load(f)
times = contest['times']
start_time = times['start']
init_time = int(time.time())
new_start_time = init_time + 3
new_end_time = (times['end'] - times['start']) // SCALE + new_start_time
new_freeze_time = (times['freeze'] - times['start']) // SCALE + new_start_time
new_times = {
'start': new_start_time,
'end': new_end_time,
'freeze': new_freeze_time,
'scale': SCALE,
}
contest['times'] = new_times
contest_json_name = 'contest.%d.json' % init_time
with open(contest_json_name, 'w') as f:
json.dump(contest, f, indent=2, sort_keys=True)
print(contest_json_name, file=sys.stderr)
session.put('http://localhost:9000/feeds/contest.json?ns=fake-server',
json=('%s/%s' % (URL_PREFIX, contest_json_name)))
print('teams.json', file=sys.stderr)
session.put('http://localhost:9000/feeds/teams.json?ns=fake-server',
json=('%s/%s' % (URL_PREFIX, 'teams.json')))
standings_series = []
for name in os.listdir('.'):
m = STANDINGS_JSON_RE.search(name)
if not m:
continue
ts = int(m.group(1))
if ts < start_time:
continue
standings_series.append((ts, name))
standings_series.sort()
last_standings = standings_series[-1]
standings_series = standings_series[0::STRIDE]
if standings_series[-1] != last_standings:
standings_series.append(last_standings)
print(standings_series[0][1], file=sys.stderr)
session.put('http://localhost:9000/feeds/standings.json?ns=fake-server',
json=('%s/%s' % (URL_PREFIX, standings_series[0][1])))
for record_time, name in standings_series[1:]:
new_play_time = (record_time - start_time) / SCALE + new_start_time
delta_time = new_play_time - time.time()
if delta_time > 0:
time.sleep(delta_time)
else:
print('warning: behind', file=sys.stderr)
print(name, file=sys.stderr)
done = False
while not done:
try:
session.put('http://localhost:9000/feeds/standings.json?ns=fake-server',
json=('%s/%s' % (URL_PREFIX, name)))
except Exception:
print('warning: retry', file=sys.stderr)
else:
done = True
print('finished')
time.sleep(10000000)
if __name__ == '__main__':
main(sys.argv)
|
def number_of_occurrences(element: int, sample: list) -> int:
return sample.count(element)
|
# -*- coding: utf-8 -*-
"""
Created on Thu Aug 25 13:07:56 2016
@author: likui
"""
import numpy as np
from sklearn.metrics import roc_curve, auc
from sklearn.metrics import precision_recall_curve
from sklearn.metrics import average_precision_score
from roc import*
def seg(filename):
file = open(filename,"r")
lines1 = file.readlines()
file.close()
amino1=[]
target=[]
for i in range(1,69,2):
amino1.append(lines1[i].strip('\n'))
for i in range(34):
target.append(np.zeros((1,len(amino1[i]))))
target[0][0,16:32]=1
target[0][0,87:99]=1
target[1][0,12:28]=1
target[2][0,7:20]=1
target[2][0,13:21]=1
target[2][0,19:29]=1
target[2][0,20:30]=1
target[2][0,29:37]=1
target[3][0,0:93]=1
target[4][0,59:70]=1
target[5][0,10:25]=1
target[5][0,24:35]=1
target[5][0,29:40]=1
target[5][0,36:42]=1
target[6][0,20:31]=1
target[6][0,32:41]=1
target[6][0,58:71]=1
target[6][0,82:89]=1
target[6][0,90:96]=1
target[7][0,10:20]=1
target[7][0,100:110]=1
target[7][0,115:126]=1
target[7][0,145:152]=1
target[8][0,14:20]=1
target[9][0,80:125]=1
target[10][0,0:35]=1
target[10][0,35:67]=1
target[11][0,97:103]=1
target[12][0,0:29]=1
target[12][0,100:118]=1
target[13][0,172:230]=1
target[14][0,217:289]=1
target[15][0,12:18]=1
target[16][0,10:17]=1
target[17][0,491:509]=1
target[18][0,537:545]=1
target[19][0,8:34]=1
target[20][0,4:14]=1
target[20][0,24:34]=1
target[20][0,55:61]=1
target[21][0,83:105]=1
target[21][0,104:125]=1
target[21][0,147:153]=1
target[21][0,153:163]=1
target[21][0,155:171]=1
target[21][0,179:196]=1
target[21][0,208:231]=1
target[22][0,31:41]=1
target[22][0,41:50]=1
target[23][0,65:72]=1
target[24][0,111:157]=1
target[25][0,6:21]=1
target[25][0,19:34]=1
target[25][0,42:57]=1
target[26][0,23:32]=1
target[27][0,0:142]=1
target[28][0,0:0:12]=1
target[29][0,5:12]=1
target[30][0,34:44]=1
target[30][0,48:59]=1
target[30][0,59:68]=1
target[30][0,68:82]=1
target[30][0,85:95]=1
target[31][0,588:600]=1
target[32][0,9:20]=1
target[32][0,104:115]=1
target[33][0,0:88]=1
labels=[]
proteins=[]
for i in range(len(amino1)):
qw=[]
w1=[]
for jj in range(len(amino1[i])-6):
aa=amino1[i][jj:jj+6] #+kmer(seq[i][yy:yy+7])
qw.append(aa)
bb=sum(target[i][0,jj:jj+6])
w1.append(bb)
indexes = [ii for ii,x in enumerate(w1) if x == 5]
# w1=np.array(w1)
for ik in indexes:
for ij in range(-8,0):
if (ik-ij)<0:
pass
elif(ik--ij)<0:
pass
else:
w1[ik--ij]=1.0
if(ik+ij)>=len(w1):
pass
elif(ik-ij)>=len(w1):
pass
else:
w1[ik-ij]=1.0
for iii in range(len(w1)):
if w1[iii]>1.0:
w1[iii]=1.0
proteins.append(qw)
labels.append(w1)
# import pdb;pdb.set_trace()
return labels, proteins
def read(filename):
file=open(filename,'r')
lines=file.readlines()
file.close()
s=[]
for i in range(len(lines)):
s.append(float(lines[i].strip('\n')))
return s
def compute(score,labels):
sc=[]
for i in range(33):
aq=[]
aw=[]
ww=[]
for jj in range(len(labels[i])):
aa=score[i+jj]
ww.append(aa)
if labels[i][jj]==1:
aq.append(jj)
aw.append(aa)
else:
pass
for ie in (range(len(aq))):
ww[aq[ie]]=max(aw)
sc.append(ww)
sco=[]
lab=[]
for i in range(33):
for k in range(len(sc[i])):
sco.append(sc[i][k])
lab.append(labels[i][k])
return sco,lab
if __name__ == '__main__':
labels,pro=seg('S333.txt')
aa=[]
for i in [('linerasvm.txt',':','o'),('linearmil24.txt','--','2'),('ampmil.txt','-.','*'),('me.txt','-','>'),('aggre.txt',':','v'),('mett2.txt','--','4')]:
s=read(i[0])
sco,lab=compute(s,labels)
fpr, tpr, thresholds = roc_curve(lab,sco) # plotting ROC
a=auc(fpr,tpr)
aa.append(a)
# print a
plt.plot(fpr,tpr, marker=i[2],linestyle=i[1])
# plt.figure()
plt.xlabel('Fpr')
plt.ylabel('Tpr')
plt.xlim([-0.05,0.4])
plt.grid()
plt.legend(['Linear SVM:'+str(round(aa[0],3)*100),'MIL:'+str(round(aa[1],3)*100),'MIL-Rank:'+str(round(aa[2],3)*100),'MetAmyl:'+str(round(aa[3],3)*100),'Aggrescan:'+str(round(aa[4],3)*100),'APPNN:'+str(round(aa[5],3)*100)],loc=4)
#plt.legend([', auc='+str(round(a[0],3)),'APPNN, auc='+str(round(a[1],3)),'Aggrescan, auc='+str(round(a[2],3)),'MetAmyl, auc='+str(round(a[3],3)),'Linear MIL, train ds1+ds2, auc='+str(round(a[4],3)),'LLCMIL, train ds1+ds2, auc='+str(round(a[5],3)),'LLCMIL5cv, auc='+str(round(a[6],3))],loc=4)
#
##
#with open('S33w.fasta','w')as f:
# for i in range(len(pro)-1):
# for j in range(len(pro[i])):
# f.write('>tro|'+str(i)+"|"+str(labels[i][j]))
# f.write('\n')
# f.write(str(pro[i][j]))
#
# f.write("\n")
#
#f.close()
##
##
#
#
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.