max_stars_repo_path
stringlengths 3
269
| max_stars_repo_name
stringlengths 4
119
| max_stars_count
int64 0
191k
| id
stringlengths 1
7
| content
stringlengths 6
1.05M
| score
float64 0.23
5.13
| int_score
int64 0
5
|
|---|---|---|---|---|---|---|
saefportal/users/migrations/0024_auto_20210924_1403.py
|
harry-consulting/SAEF1
| 0
|
12779351
|
# Generated by Django 3.1.6 on 2021-09-24 14:03
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('users', '0023_auto_20210924_1400'),
]
operations = [
migrations.RemoveField(
model_name='objectpermission',
name='permission_level',
),
migrations.AddField(
model_name='objectpermission',
name='can_delete',
field=models.BooleanField(default=True),
preserve_default=False,
),
migrations.AddField(
model_name='objectpermission',
name='can_execute',
field=models.BooleanField(default=True),
preserve_default=False,
),
migrations.AddField(
model_name='objectpermission',
name='can_update',
field=models.BooleanField(default=True),
preserve_default=False,
),
migrations.AddField(
model_name='objectpermission',
name='can_view',
field=models.BooleanField(default=True),
preserve_default=False,
),
]
| 1.648438
| 2
|
source/code/testing/elbv2.py
|
mobri2a/aws-ops-automator
| 94
|
12779352
|
######################################################################################################################
# Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved. #
# #
# Licensed under the Apache License Version 2.0 (the "License"). You may not use this file except in compliance #
# with the License. A copy of the License is located at #
# #
# http://www.apache.org/licenses/ #
# #
# or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES #
# OR CONDITIONS OF ANY KIND, express or implied. See the License for the specific language governing permissions #
# and limitations under the License. #
######################################################################################################################
import boto3
import services.elbv2_service
class ElbV2(object):
def __init__(self, region=None, session=None):
self.region = region if region is not None else boto3.Session().region_name
self.session = session if session is not None else boto3.Session(region_name=self.region)
self.elbv2_client = self.session.client("elbv2", region_name=self.region)
self.elbv2_service = services.elbv2_service.Elbv2Service(session=self.session)
def register_instance(self, target_group_arn, instance_id, port=None, availability_zone=None):
target = {
"Id": instance_id
}
if port is not None:
target["Port"] = port
if availability_zone is not None:
target["AvailabilityZone"] = availability_zone
self.elbv2_client.register_targets(TargetGroupArn=target_group_arn, Targets=[target])
def get_instance_target_groups(self, instance_id):
result = []
args = {
"service_resource": services.elbv2_service.TARGET_GROUPS,
"region": self.region,
}
target_groups = list(self.elbv2_service.describe(**args))
for target_group in target_groups:
target_group_healths = list(self.elbv2_service.describe(services.elbv2_service.TARGET_HEALTH,
TargetGroupArn=target_group["TargetGroupArn"]))
for target_group_health in target_group_healths:
target = target_group_health["Target"]
if target["Id"] != instance_id:
continue
result.append(target_group.get("TargetGroupArn"))
return result
| 1.882813
| 2
|
mnist_cnn_gpu.py
|
cannin/mnist-cnn-gpu
| 1
|
12779353
|
from __future__ import print_function
import tensorflow
import tensorflow.keras as keras
from tensorflow.keras.datasets import mnist
from tensorflow.keras.models import Sequential
from tensorflow.keras.layers import Dense, Dropout, Flatten
from tensorflow.keras.layers import Conv2D, MaxPooling2D
from tensorflow.keras.callbacks import ModelCheckpoint, TensorBoard
from tensorflow.keras import backend as K
import os
os.environ["CUDA_VISIBLE_DEVICES"] = "1"
gpu_devices = tensorflow.config.experimental.list_physical_devices('GPU')
tensorflow.config.experimental.set_memory_growth(gpu_devices[0], True)
#print("GPUs: " + gpu_devices[0])
gpus = tensorflow.test.gpu_device_name()
print("GPUs: " + gpus)
batch_size = 128
num_classes = 10
epochs = 12
# input image dimensions
img_rows, img_cols = 28, 28
# the data, split between train and test sets
(x_train, y_train), (x_test, y_test) = mnist.load_data()
if K.image_data_format() == 'channels_first':
x_train = x_train.reshape(x_train.shape[0], 1, img_rows, img_cols)
x_test = x_test.reshape(x_test.shape[0], 1, img_rows, img_cols)
input_shape = (1, img_rows, img_cols)
else:
x_train = x_train.reshape(x_train.shape[0], img_rows, img_cols, 1)
x_test = x_test.reshape(x_test.shape[0], img_rows, img_cols, 1)
input_shape = (img_rows, img_cols, 1)
x_train = x_train.astype('float32')
x_test = x_test.astype('float32')
x_train /= 255
x_test /= 255
print('x_train shape:', x_train.shape)
print(x_train.shape[0], 'train samples')
print(x_test.shape[0], 'test samples')
# convert class vectors to binary class matrices
y_train = keras.utils.to_categorical(y_train, num_classes)
y_test = keras.utils.to_categorical(y_test, num_classes)
model = Sequential()
model.add(Conv2D(32, kernel_size=(3, 3),
activation='relu',
input_shape=input_shape))
model.add(Conv2D(64, (3, 3), activation='relu'))
model.add(MaxPooling2D(pool_size=(2, 2)))
model.add(Dropout(0.25))
model.add(Flatten())
model.add(Dense(128, activation='relu'))
model.add(Dropout(0.5))
model.add(Dense(num_classes, activation='softmax'))
model.compile(loss=keras.losses.categorical_crossentropy,
optimizer=keras.optimizers.Adadelta(),
metrics=['accuracy'])
best_check = ModelCheckpoint(filepath="model-best.h5", verbose=1, save_weights_only=True, save_best_only=True)
model.fit(x_train, y_train,
batch_size=batch_size,
epochs=epochs,
verbose=1,
validation_data=(x_test, y_test),
callbacks=[best_check])
score = model.evaluate(x_test, y_test, verbose=0)
print('Test loss:', score[0])
print('Test accuracy:', score[1])
| 2.890625
| 3
|
subs2srs/core/preview_item.py
|
TFarla/subs2srs-cross-platform
| 3
|
12779354
|
<gh_stars>1-10
class PreviewItem:
def __init__(self, from_time, end_time, target_sub, native_sub):
super().__init__()
self.from_time = from_time
self.end_time = end_time
self.target_sub = target_sub
self.native_sub = native_sub
def from_time_seconds(self):
if self.from_time <= 0 or self.from_time is None:
return -1
return self.from_time / 1000
| 2.296875
| 2
|
poc/backtest_in_memory.py
|
alexcwyu/python-trading
| 17
|
12779355
|
<reponame>alexcwyu/python-trading<filename>poc/backtest_in_memory.py
import math
from datetime import datetime
from datetime import timedelta
import numpy as np
import pandas as pd
from algotrader.config.app import BacktestingConfig
from algotrader.event.market_data import BarSize, BarType
from algotrader.chart.plotter import StrategyPlotter
from algotrader.provider.broker.sim.simulator import Simulator
from algotrader.provider.feed.pandas_memory import PandasMemoryDataFeed
from algotrader.strategy.sma_strategy import SMAStrategy
from algotrader.trading import clock
from algotrader.trading.portfolio import Portfolio
from algotrader.trading.subscription import BarSubscriptionType
from tests.mock_ref_data import MockRefDataManager, build_inst_dataframe_from_list
class BacktestRunner(object):
def __init__(self, stg):
self.__stg = stg
def start(self):
clock.default_clock = clock.simluation_clock
clock.simluation_clock.start()
inst_data_mgr.start()
order_mgr.start()
self.__stg.start()
def main():
symbols = ['SPY', 'VXX', 'XLV', 'XIV']
inst_df = build_inst_dataframe_from_list(symbols)
ccy_df = pd.DataFrame({"ccy_id": ["USD", "HKD"],
"name": ["US Dollar", "HK Dollar"]})
exchange_df = pd.DataFrame({"exch_id": ["NYSE"],
"name": ["New York Stock Exchange"]})
mgr = MockRefDataManager(inst_df=inst_df, ccy_df=ccy_df, exch_df=exchange_df)
portfolio = Portfolio(portf_id='test', cash=100000)
start_date = datetime(2000, 1, 1)
num_days = 3000
dates = [start_date + timedelta(days=i) for i in range(num_days)]
sigma = 0.3
x0 = 100
dt = 1. / 252
dW = np.random.normal(0, math.sqrt(dt), num_days)
asset = []
asset.append(x0)
for i in xrange(1, num_days):
xprev = asset[-1]
x = xprev + xprev * 0.02 * dt + sigma * xprev * dW[i]
asset.append(x)
df = pd.DataFrame({"dates": dates,
"Open": asset,
"High": asset,
"Low": asset,
"Close": asset,
"Volume": 10000 * np.ones(num_days)})
df = df.set_index(keys="dates")
dict_df = {'SPY': df,
'VXX': df,
'XLV': df,
'XIV': df}
feed = PandasMemoryDataFeed(dict_df, ref_data_mgr=mgr)
broker = Simulator()
instrument = 0
config = BacktestingConfig(stg_id="sma", portfolio_id='test',
instrument_ids=[instrument],
subscription_types=[BarSubscriptionType(bar_type=BarType.Time, bar_size=BarSize.D1)],
from_date=dates[0], to_date=dates[-1],
broker_id=Simulator.ID,
feed_id=PandasMemoryDataFeed.ID)
close = inst_data_mgr.get_series("Bar.%s.Time.86400" % instrument)
mgr.get_insts([instrument])
mgr.get_inst(instrument)
# strategy = Down2PctStrategy("down2%", portfolio,
# instrument=0, qty=1000, trading_config=config, ref_data_mgr=mgr)
strategy = SMAStrategy("sma", stg_configs={'qty':1})
runner = BacktestRunner(strategy)
runner.start()
print portfolio.get_result()
# pyfolio
rets = strategy.get_portfolio().get_return()
# import pyfolio as pf
# pf.create_returns_tear_sheet(rets)
# pf.create_full_tear_sheet(rets)
# build in plot
plotter = StrategyPlotter(strategy)
plotter.plot(instrument=0)
# import matplotlib.pyplot as plt
# plt.show()
import talib
sma10 = talib.SMA(df.Close.values, 10)
sma25 = talib.SMA(df.Close.values, 25)
# signal = pd.Series(1*(sma10 > sma25),index=df.index.tz_localize("UTC"))
signal = pd.Series(1 * (sma10 > sma25), index=df.index)
target_rets = df["Close"].pct_change() * signal.shift(1)
target_rets.index = target_rets.index.tz_localize("UTC")
print target_rets.values[1:] - rets.values
if __name__ == "__main__":
main()
| 2.546875
| 3
|
models/block_configs_trianglecnn.py
|
xuyongzhi/SparseVoxelNet
| 0
|
12779356
|
<filename>models/block_configs_trianglecnn.py
import numpy as np
def block_configs(net_flag='default'):
block_configs = {}
block_configs['use_face_global_scale0'] = False
block_configs['e2fl_pool'] = ['max']
block_configs['f2v_pool'] = ['max']
#*****************************************************************************
block_sizes = {}
filters = {}
if net_flag == '3A':
block_sizes['edge'] = [ [1], [1], [1] ]
filters['edge'] = [ [16], [32], [64]]
block_sizes['centroid']=[ [1], [1], [1] ]
filters['centroid'] = [ [16], [32], [64]]
block_sizes['face'] = [ [1, 1], [1, 1], [1, 1 ]]
filters['face'] = [ [32, 32], [32, 32], [64, 64]]
block_sizes['vertex']=[ [1], [1], [1] ]
filters['vertex'] = [ [64], [64], [128]]
elif net_flag == '3B':
block_sizes['edge'] = [ [1], [1], [1] ]
filters['edge'] = [ [32], [64], [128]]
block_sizes['centroid']=[ [1], [1], [1] ]
filters['centroid'] = [ [32], [64], [64]]
block_sizes['face'] = [ [2], [2], [2]]
filters['face'] = [ [64], [128], [128]]
block_sizes['vertex']=[ [2], [2], [2] ]
filters['vertex'] = [ [64], [128], [256]]
else:
raise NotImplementedError
tmp = [i for fs in filters.values() for f in fs for i in f]
block_flag = '%d_%d'%(len(tmp), np.mean(tmp))
block_configs['block_sizes'] = block_sizes
block_configs['filters'] = filters
block_configs['block_flag'] = block_flag
return block_configs
| 2.0625
| 2
|
geokey/contributions/tests/media/test_managers.py
|
chabies/config_socialauth_geokey
| 0
|
12779357
|
"""Tests for managers of contributions (media files)."""
import os
import glob
from PIL import Image
from StringIO import StringIO
from django.core.files.base import ContentFile
from django.test import TestCase
from django.conf import settings
from nose.tools import raises
from geokey.core.exceptions import FileTypeError
from geokey.core.tests.helpers.image_helpers import get_image
from geokey.contributions.models import MediaFile
from geokey.contributions.tests.model_factories import ObservationFactory
from geokey.users.tests.model_factories import UserFactory
from .model_factories import ImageFileFactory
class ModelManagerTest(TestCase):
def tearDown(self):
files = glob.glob(os.path.join(
settings.MEDIA_ROOT,
'user-uploads/images/*'
))
for f in files:
os.remove(f)
def test_get_queryset(self):
ImageFileFactory.create_batch(3)
files = MediaFile.objects.all()
self.assertEqual(len(files), 3)
for f in files:
self.assertEqual('ImageFile', f.type_name)
def test_create_image(self):
image_file = MediaFile.objects.create(
name='<NAME>',
description='Test Description',
contribution=ObservationFactory.create(),
creator=UserFactory.create(),
the_file=get_image()
)
self.assertIsNotNone(image_file.image)
self.assertEqual(image_file.type_name, 'ImageFile')
@raises(FileTypeError)
def test_create_not_supported(self):
xyz_file = StringIO()
xyz = Image.new('RGBA', size=(50, 50), color=(256, 0, 0))
xyz.save(xyz_file, 'png')
xyz_file.seek(0)
the_file = ContentFile(xyz_file.read(), 'test.xyz')
the_file.content_type = 'chemical/x-xyz'
MediaFile.objects.create(
name='<NAME>',
description='Test Description',
contribution=ObservationFactory.create(),
creator=UserFactory.create(),
the_file=the_file
)
| 2.25
| 2
|
apps/projects/models.py
|
gannetson/sportschooldeopenlucht
| 1
|
12779358
|
<gh_stars>1-10
import datetime
from apps.tasks.models import Task
from django.db import models
from django.db.models.aggregates import Count, Sum
from django.db.models.signals import post_save
from django.dispatch import receiver
from django.utils.translation import ugettext as _
from django.conf import settings
from django_extensions.db.fields import ModificationDateTimeField, CreationDateTimeField
from djchoices import DjangoChoices, ChoiceItem
from sorl.thumbnail import ImageField
from taggit_autocomplete_modified.managers import TaggableManagerAutocomplete as TaggableManager
from apps.fund.models import Donation, DonationStatuses
from django.template.defaultfilters import slugify
from django.utils import timezone
class ProjectTheme(models.Model):
""" Themes for Projects. """
# The name is marked as unique so that users can't create duplicate theme names.
name = models.CharField(_("name"), max_length=100, unique=True)
name_nl = models.CharField(_("name"), max_length=100, unique=True)
slug = models.SlugField(_("slug"), max_length=100, unique=True)
description = models.TextField(_("description"), blank=True)
def __unicode__(self):
return self.name
class Meta:
ordering = ['name']
verbose_name = _("project theme")
verbose_name_plural = _("project themes")
class ProjectPhases(DjangoChoices):
pitch = ChoiceItem('pitch', label=_("Pitch"))
plan = ChoiceItem('plan', label=_("Plan"))
campaign = ChoiceItem('campaign', label=_("Campaign"))
act = ChoiceItem('act', label=_("Act"))
results = ChoiceItem('results', label=_("Results"))
realized = ChoiceItem('realized', label=_("Realised"))
failed = ChoiceItem('failed', label=_("Failed"))
class ProjectManager(models.Manager):
def order_by(self, field):
if field == 'money_asked':
qs = self.get_query_set()
qs = qs.filter(phase__in=[ProjectPhases.campaign, ProjectPhases.act, ProjectPhases.results, ProjectPhases.realized])
qs = qs.order_by('projectcampaign__money_asked')
return qs
if field == 'deadline':
qs = self.get_query_set()
qs = qs.filter(phase=ProjectPhases.campaign)
qs = qs.order_by('projectcampaign__deadline')
qs = qs.filter(phase='campaign')
return qs
if field == 'money_needed':
qs = self.get_query_set()
qs = qs.order_by('projectcampaign__money_asked')
qs = qs.filter(phase='campaign')
return qs
if field == 'donations':
qs = self.get_query_set()
qs = qs.order_by('popularity')
return qs
qs = super(ProjectManager, self).order_by(field)
return qs
class Project(models.Model):
""" The base Project model. """
owner = models.ForeignKey(settings.AUTH_USER_MODEL, verbose_name=_("initiator"), help_text=_("Project owner"), related_name="owner")
coach = models.ForeignKey(settings.AUTH_USER_MODEL, verbose_name=_("coach"), help_text=_("Assistent at 1%OFFICE"), related_name="team_member", null=True, blank=True)
title = models.CharField(_("title"), max_length=255, unique=True)
slug = models.SlugField(_("slug"), max_length=100, unique=True)
phase = models.CharField(_("phase"), max_length=20, choices=ProjectPhases.choices, help_text=_("Phase this project is in right now."))
partner_organization = models.ForeignKey('projects.PartnerOrganization', null=True, blank=True)
created = CreationDateTimeField(_("created"), help_text=_("When this project was created."))
updated = ModificationDateTimeField()
popularity = models.FloatField(null=False, default=0)
objects = ProjectManager()
def __unicode__(self):
if self.title:
return self.title
return self.slug
def update_popularity(self):
last_month = timezone.now() - timezone.timedelta(days=30)
donations = Donation.objects.filter(status__in=[DonationStatuses.paid, DonationStatuses.pending])
donations = donations.exclude(donation_type='recurring')
donations = donations.filter(created__gte=last_month)
# For all projects.
total_recent_donors = len(donations)
total_recent_donations = donations.aggregate(sum=Sum('amount'))['sum']
# For this project
donations = donations.filter(project=self)
recent_donors = len(donations)
recent_donations = donations.aggregate(sum=Sum('amount'))['sum']
if recent_donors and recent_donations:
self.popularity = 50 * (float(recent_donors) / float(total_recent_donors)) + 50 * (float(recent_donations) / float(total_recent_donations))
else:
self.popularity = 0
self.save()
@property
def supporters_count(self, with_guests=True):
# TODO: Replace this with a proper Supporters API
# something like /projects/<slug>/donations
donations = Donation.objects.filter(project=self)
donations = donations.filter(status__in=[DonationStatuses.paid, DonationStatuses.in_progress])
donations = donations.filter(user__isnull=False)
donations = donations.annotate(Count('user'))
count = len(donations.all())
if with_guests:
donations = Donation.objects.filter(project=self)
donations = donations.filter(status__in=[DonationStatuses.paid, DonationStatuses.in_progress])
donations = donations.filter(user__isnull=True)
count = count + len(donations.all())
return count
@property
def task_count(self):
return len(self.task_set.filter(status=Task.TaskStatuses.open).all())
@property
def get_open_tasks(self):
return self.task_set.filter(status=Task.TaskStatuses.open).all()
@models.permalink
def get_absolute_url(self):
""" Get the URL for the current project. """
return 'project-detail', (), {'slug': self.slug}
class Meta:
ordering = ['title']
verbose_name = _("project")
verbose_name_plural = _("projects")
def save(self, *args, **kwargs):
if not self.slug:
original_slug = slugify(self.title)
counter = 2
qs = Project.objects
while qs.filter(slug = original_slug).exists():
original_slug = '%s-%d' % (original_slug, counter)
counter += 1
self.slug = original_slug
if not self.phase:
self.phase = ProjectPhases.pitch
super(Project, self).save(*args, **kwargs)
class ProjectNeedChoices(DjangoChoices):
skills = ChoiceItem('skills', label=_("Skills and expertise"))
finance = ChoiceItem('finance', label=_("Crowdfunding campaign"))
both = ChoiceItem('both', label=_("Both"))
class ProjectPitch(models.Model):
class PitchStatuses(DjangoChoices):
new = ChoiceItem('new', label=_("New"))
submitted = ChoiceItem('submitted', label=_("Submitted"))
rejected = ChoiceItem('rejected', label=_("Rejected"))
approved = ChoiceItem('approved', label=_("Approved"))
project = models.OneToOneField("projects.Project", verbose_name=_("project"))
status = models.CharField(_("status"), max_length=20, choices=PitchStatuses.choices)
created = CreationDateTimeField(_("created"), help_text=_("When this project was created."))
updated = ModificationDateTimeField(_('updated'))
# Basics
title = models.CharField(_("title"), max_length=100, help_text=_("Be short, creative, simple and memorable"))
pitch = models.TextField(_("pitch"), blank=True, help_text=_("Pitch your smart idea in one sentence"))
description = models.TextField(_("why, what and how"), help_text=_("Blow us away with the details!"), blank=True)
need = models.CharField(_("Project need"), null=True, max_length=20, choices=ProjectNeedChoices.choices, default=ProjectNeedChoices.both)
theme = models.ForeignKey(ProjectTheme, blank=True, null=True, verbose_name=_("theme"), help_text=_("Select one of the themes "))
tags = TaggableManager(blank=True, verbose_name=_("tags"), help_text=_("Add tags"))
# Location
latitude = models.DecimalField(_("latitude"), max_digits=21, decimal_places=18, null=True, blank=True)
longitude = models.DecimalField(_("longitude"), max_digits=21, decimal_places=18, null=True, blank=True)
country = models.ForeignKey('geo.Country', blank=True, null=True)
# Media
image = ImageField(_("picture"), max_length=255, blank=True, null=True, upload_to='project_images/', help_text=_("Upload the picture that best describes your smart idea!"))
video_url = models.URLField(_("video"), max_length=100, blank=True, default='', help_text=_("Do you have a video pitch or a short movie that explains your project. Cool! We can't wait to see it. You can paste the link to the YouTube or Vimeo video here"))
def __unicode__(self):
return self.title
class Meta:
verbose_name = _('pitch')
verbose_name_plural = _('pitches')
class ProjectPlan(models.Model):
class PlanStatuses(DjangoChoices):
new = ChoiceItem('new', label=_("New"))
submitted = ChoiceItem('submitted', label=_("Submitted"))
rejected = ChoiceItem('rejected', label=_("Rejected"))
needs_work = ChoiceItem('needs_work', label=_("Needs work"))
approved = ChoiceItem('approved', label=_("Approved"))
project = models.OneToOneField("projects.Project", verbose_name=_("project"))
status = models.CharField(_("status"), max_length=20, choices=PlanStatuses.choices)
created = CreationDateTimeField(_("created"), help_text=_("When this project was created."))
updated = ModificationDateTimeField(_('updated'))
# Basics
title = models.CharField(_("title"), max_length=100, help_text=_("Be short, creative, simple and memorable"))
pitch = models.TextField(_("pitch"), blank=True, help_text=_("Pitch your smart idea in one sentence"))
need = models.CharField(_("Project need"), null=True, max_length=20, choices=ProjectNeedChoices.choices, default=ProjectNeedChoices.both)
theme = models.ForeignKey(ProjectTheme, blank=True, null=True, verbose_name=_("theme"), help_text=_("Select one of the themes "))
tags = TaggableManager(blank=True, verbose_name=_("tags"), help_text=_("Add tags"))
# Extended Description
description = models.TextField(_("why, what and how"), help_text=_("Blow us away with the details!"), blank=True)
effects = models.TextField(_("effects"), help_text=_("What will be the Impact? How will your Smart Idea change the lives of people?"), blank=True)
for_who = models.TextField(_("for who"), help_text=_("Describe your target group"), blank=True)
future = models.TextField(_("future"), help_text=_("How will this project be self-sufficient and sustainable in the long term?"), blank=True)
reach = models.PositiveIntegerField(_("Reach"), help_text=_("How many people do you expect to reach?"), blank=True, null=True)
# Location
latitude = models.DecimalField(_("latitude"), max_digits=21, decimal_places=18, null=True, blank=True)
longitude = models.DecimalField(_("longitude"), max_digits=21, decimal_places=18, null=True, blank=True)
country = models.ForeignKey('geo.Country', blank=True, null=True)
# Media
image = ImageField(_("image"), max_length=255, blank=True, upload_to='project_images/', help_text=_("Main project picture"))
video_url = models.URLField(_("video"), max_length=100, blank=True, null=True, default='', help_text=_("Do you have a video pitch or a short movie that explains your project. Cool! We can't wait to see it. You can paste the link to the YouTube or Vimeo video here"))
organization = models.ForeignKey('organizations.Organization', verbose_name=_("organisation"), blank=True, null=True)
# Crowd funding
money_needed = models.TextField(blank=True, help_text=_("Describe in one sentence what you need the money for."))
campaign = models.TextField(_("Campaign strategy"), blank=True)
def __unicode__(self):
return self.title
class Meta:
verbose_name = _('plan')
verbose_name_plural = _('plans')
class ProjectCampaign(models.Model):
class CampaignStatuses(DjangoChoices):
running = ChoiceItem('running', label=_("Running"))
realized = ChoiceItem('realized', label=_("Realized"))
closed = ChoiceItem('closed', label=_("Closed"))
project = models.OneToOneField("projects.Project", verbose_name=_("project"))
status = models.CharField(_("status"), max_length=20, choices=CampaignStatuses.choices)
deadline = models.DateTimeField(null=True)
payout_date = models.DateTimeField(null=True)
created = CreationDateTimeField(_("created"), help_text=_("When this project was created."))
updated = ModificationDateTimeField(_('updated'))
currency = models.CharField(max_length="10", default='EUR')
# For convenience and performance we also store money donated and needed here.
money_asked = models.PositiveIntegerField(default=0)
money_donated = models.PositiveIntegerField(default=0)
money_needed = models.PositiveIntegerField(default=0)
@property
def nr_days_remaining(self):
""" Return the number of days that remain before the deadline passes """
if not self.deadline:
return 0
days = (self.deadline.date() - datetime.date.today()).days
if days < 0:
return 0
return days
@property
def percentage_funded(self):
""" Return a float containing the percentage of funds still required for this campaign """
if not self.money_donated or not self.money_asked:
return 0.0
if self.status != 'running' or self.money_donated > self.money_asked:
return 100.0
return self.money_donated / (self.money_asked / 100.0)
@property
def local_money_asked(self, currency='EUR'):
# TODO: Make this currency aware and move it to a more sensible place like view.
return self.money_asked / 100
@property
def local_money_donated(self, currency='EUR'):
# TODO: Make this currency aware and move it to a more sensible place like view.
return self.money_donated / 100
@property
def local_money_needed(self, currency='EUR'):
# TODO: Make this currency aware and move it to a more sensible place like view.
return self.money_needed / 100
@property
def supporters_count(self, with_guests=True):
# TODO: Replace this with a proper Supporters API
# something like /projects/<slug>/donations
donations = Donation.objects.filter(project=self.project)
donations = donations.filter(status__in=[DonationStatuses.paid, DonationStatuses.pending])
donations = donations.filter(user__isnull=False)
donations = donations.annotate(Count('user'))
count = len(donations.all())
if with_guests:
donations = Donation.objects.filter(project=self.project)
donations = donations.filter(status__in=[DonationStatuses.paid, DonationStatuses.pending])
donations = donations.filter(user__isnull=True)
count += len(donations.all())
return count
# The amount donated that is secure.
@property
def money_safe(self):
if self.money_asked == 0:
return 0
donations = Donation.objects.filter(project=self.project)
donations = donations.filter(status__in=[DonationStatuses.paid])
total = donations.aggregate(sum=Sum('amount'))
if not total['sum']:
return 0
return total['sum']
def update_money_donated(self):
donations = Donation.objects.filter(project=self.project)
donations = donations.filter(status__in=[DonationStatuses.paid, DonationStatuses.pending])
total = donations.aggregate(sum=Sum('amount'))
if not total['sum']:
self.money_donated = 0
else:
self.money_donated = total['sum']
self.money_needed = self.money_asked - self.money_donated
if self.money_needed < 0:
self.money_needed = 0
self.save()
class ProjectResult(models.Model):
class ResultStatuses(DjangoChoices):
running = ChoiceItem('running', label=_("Running"))
realized = ChoiceItem('realized', label=_("Realized"))
closed = ChoiceItem('closed', label=_("Closed"))
project = models.OneToOneField("projects.Project", verbose_name=_("project"))
status = models.CharField(_("status"), max_length=20, choices=ResultStatuses.choices)
created = CreationDateTimeField(_("created"), help_text=_("When this project was created."))
updated = ModificationDateTimeField(_('updated'))
class PartnerOrganization(models.Model):
"""
Some projects are run in cooperation with a partner
organization like EarthCharter & MacroMicro
"""
name = models.CharField(_("name"), max_length=255, unique=True)
slug = models.SlugField(_("slug"), max_length=100, unique=True)
description = models.TextField(_("description"))
image = ImageField(_("image"), max_length=255, blank=True, null=True, upload_to='partner_images/', help_text=_("Main partner picture"))
@property
def projects(self):
return self.project_set.exclude(phase__in=['pitch', 'failed']).all()
class Meta:
verbose_name = _("partner organization")
verbose_name_plural = _("partner organizations")
def __unicode__(self):
if self.name:
return self.name
return self.slug
class ProjectAmbassador(models.Model):
"""
People that are named as an ambassador.
"""
project_plan = models.ForeignKey(ProjectPlan)
name = models.CharField(_("name"), max_length=255)
email = models.EmailField(_("email"))
description = models.TextField(_("description"))
class ProjectBudgetLine(models.Model):
"""
BudgetLine: Entries to the Project Budget sheet.
This is the budget for the amount asked from this
website.
"""
project_plan = models.ForeignKey(ProjectPlan)
description = models.CharField(_("description"), max_length=255, blank=True)
currency = models.CharField(max_length=10, default='EUR')
amount = models.PositiveIntegerField(_("Amount (in cents)"))
created = CreationDateTimeField()
updated = ModificationDateTimeField()
class Meta:
verbose_name = _("budget line")
verbose_name_plural = _("budget lines")
def __unicode__(self):
return self.description + " : " + str(self.amount)
@receiver(post_save, weak=False, sender=Project)
def progress_project_phase(sender, instance, created, **kwargs):
# Skip all post save logic during fixture loading.
if kwargs.get('raw', False):
return
# If a new project is created it should have a pitch
try:
instance.projectpitch
except ProjectPitch.DoesNotExist:
instance.projectpitch = ProjectPitch(project=instance)
instance.projectpitch.title = instance.title
instance.projectpitch.status = ProjectPitch.PitchStatuses.new
instance.projectpitch.save()
if instance.phase == ProjectPhases.pitch:
#If project is rolled back to Pitch (e.g. from Plan) then adjust Pitch status.
if instance.projectpitch.status == ProjectPitch.PitchStatuses.approved:
instance.projectpitch.status = ProjectPitch.PitchStatuses.new
instance.projectpitch.save()
# If phase progresses to 'plan' we should create and populate a ProjectPlan.
if instance.phase == ProjectPhases.plan:
try:
instance.projectplan
except ProjectPlan.DoesNotExist:
# Create a ProjectPlan if it's not there yet
instance.projectplan = ProjectPlan.objects.create(project=instance)
instance.projectplan.status = ProjectPlan.PlanStatuses.new
# Get the Pitch and copy over all fields to the new Plan
try:
for field in ['country', 'title', 'description', 'image', 'latitude', 'longitude', 'need', 'pitch',
'image', 'video_url', 'theme']:
setattr(instance.projectplan, field, getattr(instance.projectpitch, field))
instance.projectplan.save()
# After the plan is saved we can add tags
for tag in instance.projectpitch.tags.all():
instance.projectplan.tags.add(tag.name)
if instance.projectpitch.status != ProjectPitch.PitchStatuses.approved:
instance.projectpitch.status = ProjectPitch.PitchStatuses.approved
instance.projectpitch.save()
except ProjectPitch.DoesNotExist:
# This would normally only happen during migrations, so please ignore.
pass
# If phase progresses to 'campaign' we should change status on ProjectPlan.
if instance.phase == ProjectPhases.campaign:
try:
# Set the correct statuses and save pitch and plan
if instance.projectplan.status != ProjectPlan.PlanStatuses.approved:
instance.projectplan.status = ProjectPlan.PlanStatuses.approved
instance.projectplan.save()
if instance.projectpitch.status != ProjectPitch.PitchStatuses.approved:
instance.projectpitch.status = ProjectPitch.PitchStatuses.approved
instance.projectpitch.save()
except ProjectPlan.DoesNotExist:
# This would normally only happen during migrations, so please ignore.
pass
# If we don't have a Campaign then create one and set the deadline and money_asked (based on ProjectBudgetLines).
try:
instance.projectcampaign
except ProjectCampaign.DoesNotExist:
# Set Campaign to running and set the Deadline and MoneyAsked (based on ProjectBudgetLines).
instance.projectcampaign = ProjectCampaign.objects.create(project=instance)
instance.projectcampaign.status = ProjectCampaign.CampaignStatuses.running
instance.projectcampaign.deadline = timezone.now() + timezone.timedelta(days=180)
budget = instance.projectplan.projectbudgetline_set
if len(budget.all()):
budget = budget.aggregate(sum=Sum('amount'))['sum']
else:
budget = 0
instance.projectcampaign.money_asked = budget
instance.projectcampaign.currency = 'EUR'
instance.projectcampaign.save()
@receiver(post_save, weak=False, sender=ProjectPitch)
def pitch_status_status_changed(sender, instance, created, **kwargs):
# Skip all post save logic during fixture loading.
if kwargs.get('raw', False):
return
project_saved = False
# If Pitch is approved, move Project to PLan phase.
if instance.status == ProjectPitch.PitchStatuses.approved:
if instance.project.phase == ProjectPhases.pitch:
instance.project.phase = ProjectPhases.plan
instance.project.save()
# Ensure the project 'updated' field is updated for the Saleforce sync script.
if not project_saved:
instance.project.save()
@receiver(post_save, weak=False, sender=ProjectPlan)
def plan_status_status_changed(sender, instance, created, **kwargs):
project_saved = False
# If plan is approved the move Project to Campaign phase.
if instance.status == ProjectPlan.PlanStatuses.approved:
if instance.project.phase == ProjectPhases.plan:
instance.project.phase = ProjectPhases.campaign
instance.project.save()
# Ensure the project 'updated' field is updated for the Saleforce sync script.
if not project_saved:
instance.project.save()
@receiver(post_save, weak=False, sender=ProjectCampaign)
def plan_status_status_changed(sender, instance, created, **kwargs):
instance.project.save()
# Change project phase according to donated amount
@receiver(post_save, weak=False, sender=Donation)
def update_project_after_donation(sender, instance, created, **kwargs):
# Skip all post save logic during fixture loading.
if kwargs.get('raw', False):
return
project = instance.project
campaign = project.projectcampaign
# Don't look at donations that are just created.
if instance.status not in [DonationStatuses.in_progress, DonationStatuses.new]:
campaign.update_money_donated()
project.update_popularity()
if campaign.money_asked <= campaign.money_donated:
project.phase = ProjectPhases.act
project.save()
else:
project.phase = ProjectPhases.campaign
project.save()
| 2.015625
| 2
|
RLBotPack/Kamael/States.py
|
robbai/RLBotPack
| 0
|
12779359
|
from Utilities import *
import math
from rlbot.agents.base_agent import BaseAgent, SimpleControllerState
from rlbot.utils.structures.game_data_struct import GameTickPacket
from rlbot.utils.game_state_util import GameState, BallState, CarState, Physics, Vector3, Rotator
import random
"""
Right corner loc: (-2048, -2560), yaw: 0.25 pi loc: (2048, 2560), yaw: -0.75 pi
Left corner loc: (2048, -2560), yaw: 0.75 pi loc: (-2048, 2560), yaw: -0.25 pi
Back right loc: (-256.0, -3840), yaw: 0.5 pi loc: (256.0, 3840), yaw: -0.5 pi
Back left loc: (256.0, -3840), yaw: 0.5 pi loc: (-256.0, 3840), yaw: -0.5 pi
Far back center loc: (0.0, -4608), yaw: 0.5 pi loc: (0.0, 4608), yaw: -0.5 pi
"""
def getKickoffPosition(vec):
kickoff_locations = [[2048, 2560], [256, 3848], [0, 4608]]
if abs(vec[0]) >= 350:
return 0
elif abs(vec[0]) > 5:
return 1
else:
return 2
class baseState:
def __init__(self, agent):
self.agent = agent
self.active = True
def __repr__(self):
return f"{type(self).__name__}"
class State:
RESET = 0
WAIT = 1
INITIALIZE = 2
RUNNING = 3
class GetBoost(baseState):
def update(self):
return saferBoostGrabber(self.agent)
class airLaunch(baseState):
def __init__(self,agent):
baseState.__init__(self,agent)
self.initiated = agent.time
self.jumpTimer = agent.time
self.firstJump = False
self.secondJump = False
self.firstJumpHold = 0.5
self.secondJumpHold = 0.4
self.active = True
def update(self):
stateController = SimpleControllerState()
if not self.firstJump:
self.firstJump = True
stateController.jump = True
self.jumpTimer = self.agent.time
elif self.firstJump and not self.secondJump:
if self.agent.time - self.jumpTimer < self.firstJumpHold:
stateController.jump = True
elif self.agent.time - self.jumpTimer > self.firstJumpHold and self.agent.time - self.jumpTimer < self.firstJumpHold +.05:
stateController.boost = True
stateController.jump = False
else:
self.secondJump = True
stateController.boost = True
self.jumpTimer = self.agent.time
else:
if self.agent.time - self.jumpTimer < self.secondJumpHold:
stateController.jump = True
stateController.boost = True
else:
self.active = False
self.jump = False
self.agent.activeState = DivineGrace(self.agent)
if self.agent.time - self.jumpTimer > 0.15 and self.agent.time - self.jumpTimer < 0.35:
stateController.pitch = 1
return stateController
class Aerial():
def __init__(self,agent,target,time):
self.active = False
self.agent = agent
self.target = target
self.time = clamp(10,0.00001,time)
self.jumping = False
self.jumpTimer = 0
self.airborne = False
self.launcher = None
self.setup()
def setup(self):
dv_target = backsolve(self.target, self.agent, self.time)
if self.agent.deltaV >= dv_target.magnitude():
self.dv_target = dv_target
self.active = True
self.launcher = airLaunch(self.agent)
def update(self):
# takes the agent, an intercept point, and an intercept time.Adjusts the agent's controller
# (agent.c) to perform an aerial
self.time = self.time = clamp(10,0.00001,self.time - self.agent.deltaTime)
before = self.jumping
dv_target = backsolve(self.target, self.agent, self.time)
dv_total = dv_target.magnitude()
dv_local = matrixDot(self.agent.me.matrix, dv_target)
# dv_local = agent.me.matrix.dot(dv_target)
angles,self.controller = defaultPD(self.agent, dv_local)
print(self.controller.yaw,self.controller.pitch,self.controller.roll)
precision = clamp(0.6, 0.05, dv_total / 1500)
# precision = cap((dv_total/1500),0.05, 0.60)
# if dv_local[2] > 100 or not self.airborne and self.agent.onSurface: #agent.me.airborne == False:
# #if agent.sinceJump < 0.3:
# if self.jumpTimer < 0.3:
# self.jumping = True
# if before != True:
# self.controller.pitch = self.controller.yaw = self.controller.roll = 0
#
# elif self.jumpTimer >= 0.32:
# self.jumping = True
# self.airborne = True
# if before != True:
# self.controller.pitch = self.controller.yaw = self.controller.roll = 0
# #agent.c.pitch = agent.c.yaw = agent.c.roll = 0
# else:
# self.jumping = False
# #agent.c.jump = False
# else:
# self.jumping = False
# #agent.c.jump = False
if self.launcher.active:
return self.launcher.update()
else:
if dv_total > 50:
if abs(angles[1]) + abs(angles[2]) < precision:
self.controller.boost = True
#agent.c.boost = True
else:
self.controller.boost = False
#print(dv_total)
#agent.c.boost = False
else:
fly_target = self.agent.me.matrix.dot(self.target - self.agent.me.location)
angles = defaultPD(self.agent, fly_target)
self.controller.boost = False
#self.controller.jump = self.jumping
if self.time <= 0.0001:
self.active = False
print("timed out?")
return self.controller
class Celestial_Arrest(baseState):
def __init__(self,agent):
self.active = True
self.agent = agent
def update(self):
pass
class LeapOfFaith(baseState):
def __init__(self,agent, targetCode,target = None):
self.agent = agent
self.active = True
self.targetCode = targetCode #0 flip at ball , 1 flip forward, 2 double jump, 3 flip backwards, 4 flip left, 5 flip right, 6 flip at target ,7 left forward diagnal flip, 8 right forward diagnal flip
self.flip_obj = FlipStatus(agent.time)
self.target = target
self.cancelTimerCap = .3
self.cancelStartTime = None
self.jumped = False
def update(self):
controller_state = SimpleControllerState()
jump = flipHandler(self.agent, self.flip_obj)
if jump:
if self.targetCode == 1:
controller_state.pitch = -1
controller_state.steer = 0
controller_state.throttle = 1
elif self.targetCode == 0:
ball_local = toLocal(self.agent.ball.location, self.agent.me).normalize()
ball_angle = math.atan2(ball_local.data[1], ball_local.data[0])
controller_state.jump = True
controller_state.yaw = math.sin(ball_angle)
pitch = -math.cos(ball_angle)
controller_state.pitch = pitch
if pitch > 0:
controller_state.throttle = -1
else:
controller_state.throttle = 1
elif self.targetCode == 2:
controller_state.pitch = 0
controller_state.steer = 0
controller_state.yaw = 0
elif self.targetCode == 3:
controller_state.pitch = 1
controller_state.steer = 0
controller_state.throttle = -1
elif self.targetCode == -1:
controller_state.pitch = 0
controller_state.steer = 0
controller_state.throttle = 0
elif self.targetCode == 4:
controller_state.pitch = 0
controller_state.yaw = -1
controller_state.steer = -1
controller_state.throttle = -0
elif self.targetCode == 5:
controller_state.pitch = 0
controller_state.yaw = 1
controller_state.steer = 1
controller_state.throttle = -0
elif self.targetCode == 6:
target_local = toLocal(self.target, self.agent.me).normalize()
target_angle = math.atan2(target_local.data[1], target_local.data[0])
controller_state.jump = True
controller_state.yaw = math.sin(target_angle)
pitch = -math.cos(target_angle)
controller_state.pitch = pitch
if pitch > 0:
controller_state.throttle = -1
else:
controller_state.throttle = 1
elif self.targetCode == 7:
controller_state.pitch = -1
controller_state.yaw = -1
controller_state.steer = -1
controller_state.throttle = 1
elif self.targetCode == 8:
controller_state.pitch = -1
controller_state.yaw = 1
controller_state.steer = 1
controller_state.throttle = 1
elif self.targetCode == 9:
#diagnal flip cancel
controller_state.pitch = -1
controller_state.roll = -1
#controller_state.steer = -1
controller_state.throttle = 1
elif self.targetCode == 10:
#diagnal flip cancel
controller_state.pitch = -1
controller_state.roll = 1
#controller_state.steer = -1
controller_state.throttle = 1
elif self.targetCode == -1:
controller_state.pitch = 0
controller_state.steer = 0
controller_state.throttle = 0
controller_state.jump = jump
controller_state.boost = False
if self.targetCode == 7 or self.targetCode == 8:
controller_state.boost = True
if self.flip_obj.flipDone:
if self.targetCode != 9 or self.targetCode != 10:
self.active = False
else:
if not self.cancelStartTime:
self.cancelStartTime = self.agent.time
return controller_state
if self.targetCode == 9:
controller_state.pitch = 1
controller_state.roll = 1
controller_state.throttle = 1
else:
controller_state.pitch = 1
controller_state.roll = -1
controller_state.throttle = 1
if self.agent.time - self.cancelStartTime >= self.cancelTimerCap:
self.active = False
# if self.agent.forward:
# controller_state.throttle = 1
# else:
# controller_state.throttle = -1
return controller_state
class Action_chain():
#class for performing consecutive actions over a period of time. Example: Flipping forward
def __init__(self, agent,controls_list: list, durations_list : list):
self.controls = controls_list
self.durations = durations_list
self.complete = False
self.index = 0
self.current_duration = 0
self.agent = agent
# there should be a duration in the durations for every controller given in the list. This inserts 0 for any lacking
if len(durations_list) < len(controls_list):
self.durations+= [0*len(controls_list)-len(durations_list)]
self.active = True
def create_custom_controls(self,actionCode):
#perform specialized actions if creating controlers at creation time wasn't feasible
controller_state = SimpleControllerState()
if actionCode == 0:
ball_local = toLocal(self.agent.ball.location, self.agent.me).normalize()
ball_angle = math.atan2(ball_local.data[1], ball_local.data[0])
controller_state.jump = True
controller_state.yaw = clamp(1,-1,math.sin(ball_angle))
controller_state.pitch = clamp(1,-1,-math.cos(ball_angle))
print(self.agent.me.location[2])
return controller_state
def update(self): #call this once per frame with delta time to recieve updated controls
self.current_duration += self.agent.deltaTime
if self.current_duration > self.durations[self.index]:
self.index+=1
self.current_duration = 0
if self.index == len(self.controls):
self.active = False
return SimpleControllerState()
if type(self.controls[self.index]) == SimpleControllerState:
return self.controls[self.index]
else:
return self.create_custom_controls(self.controls[self.index])
class RighteousVolley(baseState):
def __init__(self,agent,delay,target):
baseState.__init__(self,agent)
self.smartAngle = False
self.target = target
height = target[2]
boomerDelay = 0.05
# if len(agent.allies) < 1:
# boomerDelay = 0
delay = clamp(1.25,.3,delay+boomerDelay)
if delay >= .3:
if height <= 200:
#print("tiny powershot")
self.jumpTimerMax = .1
self.angleTimer = clamp(.15,.05,self.jumpTimerMax/2)
else:
#print("normal powershot")
self.jumpTimerMax = delay-.2
self.angleTimer = clamp(.15, .1, self.jumpTimerMax / 2)
self.delay = delay
if self.delay >= .5:
self.smartAngle = True
self.jumped = False
self.jumpTimer = 0
#print("setting action to powershot")
def update(self):
controller_state = SimpleControllerState()
controller_state.throttle = 0
controller_state.boost = False
ball_local = toLocal(self.agent.ball.location, self.agent.me).normalize()
#ball_local = toLocal(self.target, self.agent.me)
ball_angle = math.atan2(ball_local.data[1], ball_local.data[0])
angle_degrees = correctAngle(math.degrees(ball_angle))
if not self.jumped:
self.jumped = True
controller_state.jump = True
return controller_state
else:
self.jumpTimer += self.agent.deltaTime
if self.jumpTimer < self.angleTimer:
controller_state.pitch = 1
if self.jumpTimer < self.jumpTimerMax:
controller_state.jump = True
else:
controller_state.jump = False
if self.jumpTimer > self.jumpTimerMax:
if self.jumpTimer >= self.delay-.2 and self.jumpTimer < self.delay-.15:
controller_state.jump = False
elif self.jumpTimer >= self.delay-.15 and self.jumpTimer < self.delay:
controller_state.yaw = math.sin(ball_angle)
controller_state.pitch = -math.cos(ball_angle)
controller_state.jump = True
elif self.jumpTimer < self.delay+.1:
controller_state.jump = False
else:
self.active = False
controller_state.jump = False
return controller_state
class DivineRetribution():
def __init__(self,agent,targetCar):
self.agent = agent
self.targetCar = targetCar
self.active = True
def update(self,):
action = demoTarget(self.agent,self.targetCar)
return action
class DemolitionBot():
def __init__(self,agent):
self.agent = agent
self.active = True
def update(self):
target = self.agent.closestEnemyToBall
valid = False
if target.location[2] <= 90:
if ((target.location[1] > self.agent.ball.location[1] and target.location[1] < self.agent.me.location[1]) or
(target.location[1] < self.agent.ball.location[1] and target.location[1] > self.agent.me.location[1])):
valid = True
if valid:
return demoEnemyCar(self.agent,target)
else:
self.active = False
return ShellTime(self.agent)
class GroundShot(baseState):
def __init__(self, agent):
self.agent = agent
self.active = True
def update(self):
return lineupShot(self.agent,3)
class GroundAssault(baseState):
def __init__(self, agent):
self.agent = agent
self.active = True
def update(self):
return lineupShot(self.agent,1)
class HolyGrenade(baseState):
def __init__(self, agent):
self.agent = agent
self.active = True
def update(self):
return handleBounceShot(self.agent)
class HolyProtector(baseState):
def update(self):
return ShellTime(self.agent)
class AerialDefend(baseState):
pass
class TurnTowardsPosition(baseState):
def __init__(self,agent,target,targetCode): #0 = ball.location
baseState.__init__(self,agent)
self.target = target
self.threshold = 1
self.targetCode = targetCode
def update(self):
if self.targetCode == 0:
self.target = self.agent.ball.location
localTarg = toLocal(self.target,self.agent.me)
localAngle = correctAngle(math.degrees(math.atan2(localTarg[1],localTarg[0])))
controls = SimpleControllerState()
if abs(localAngle) > self.threshold:
if self.agent.forward:
if localAngle > 0:
controls.steer = 1
else:
controls.steer = -1
controls.handbrake = True
if self.agent.currentSpd <300:
controls.throttle = .5
else:
if localAngle > 0:
controls.steer = -.5
else:
controls.steer = 1
controls.handbrake = True
if self.agent.currentSpd <300:
controls.throttle = -.5
else:
self.active = False
return controls
class Obstruct(baseState):
def update(self):
if not kickOffTest(self.agent):
return turtleTime(self.agent)
else:
self.active = False
self.agent.activeState = PreemptiveStrike(self.agent)
return self.agent.activeState.update()
"""
def getKickoffPosition(vec):
kickoff_locations = [[2048, 2560], [256, 3848], [0, 4608]]
for i in range(len(kickoff_locations)):
if kickoff_locations[i] == [abs(vec[0]),abs(vec[1])]:
return i
return -1
"""
class Kickoff(baseState):
def __init__(self,agent):
self.agent = agent
self.started = False
self.firstFlip = False
self.secondFlip = False
self.finalFlipDistance = 750
self.active = True
self.startTime = agent.time
self.flipState = None
def fakeKickOffChecker(self):
closestToBall, bDist = findEnemyClosestToLocation(self.agent, self.agent.ball.location)
myDist = findDistance(self.agent.me.location,self.agent.ball.location)
if bDist:
if bDist <= myDist*.75:
return True
else:
return False
return False
def retire(self):
self.active = False
self.agent.activeState = None
self.flipState = None
def update(self):
spd = self.agent.currentSpd
if self.flipState != None:
if self.flipState.active:
controller = self.flipState.update()
if self.agent.time - self.flipState.flip_obj.flipStartedTimer <= 0.15:
if spd < maxPossibleSpeed:
controller.boost = True
return controller
if self.secondFlip:
self.retire()
jumping = False
ballDistance = distance2D(self.agent.me.location, self.agent.ball.location)
if not self.started:
if not kickOffTest(self.agent):
self.started = True
self.startTime = self.agent.time
if self.started and self.agent.time - self.startTime > 2.5:
self.retire()
if not self.firstFlip:
if spd > 1100:
self.flipState = LeapOfFaith(self.agent,0,target = self.agent.ball.location)
self.firstFlip = True
return self.flipState.update()
if ballDistance > self.finalFlipDistance:
destination = self.agent.ball.location
if not self.firstFlip:
if self.agent.me.location[0] > self.agent.ball.location[0]:
destination.data[0] -= 200
else:
destination.data[0] += 200
else:
if self.agent.me.location[0] > self.agent.ball.location[0]:
destination.data[0] -= 5
else:
destination.data[0] += 5
return greedyMover(self.agent, destination)
else:
self.flipState = LeapOfFaith(self.agent,0,self.agent.ball.location)
self.secondFlip = True
return self.flipState.update()
class HeavenylyReprieve(baseState):
def __init__(self,agent,boostloc):
self.agent = agent
self.boostLoc = boostloc
self.active = True
def update(self):
result = inCornerWithBoost(self.agent)
if result != False:
return refuel(self.agent, result[0])
else:
self.active = False
return ShellTime(self.agent)
class PreemptiveStrike(baseState):
def __init__(self,agent):
self.agent = agent
self.started = False
self.firstFlip = False
self.secondFlip = False
self.finalFlipDistance = 850
#self.finalFlipDistance = 1400
self.active = True
self.startTime = agent.time
self.flipState = None
self.kickoff_type = getKickoffPosition(agent.me.location)
self.method = 0
self.setup()
agent.stubbornessTimer = 5
agent.stubborness= agent.stubbornessMax
agent.stubborness= agent.stubbornessMax
def setup(self):
if abs(self.agent.me.location[0]) < 257:
self.method = 1
self.replacement = Kickoff(self.agent)
def rightSelf(self):
controller_state = SimpleControllerState()
if self.agent.me.rotation[2] > 0:
controller_state.roll = -1
elif self.agent.me.rotation[2] < 0:
controller_state.roll = 1
if self.agent.me.rotation[0] > self.agent.velAngle:
controller_state.yaw = -1
elif self.agent.me.rotation[0] < self.agent.velAngle:
controller_state.yaw = 1
if self.agent.me.rotation[0] > 0:
controller_state.pitch = -1
elif self.agent.me.rotation[0] < 0:
controller_state.pitch = 1
controller_state.throttle = 1
return controller_state
def fakeKickOffChecker(self):
closestToBall, bDist = findEnemyClosestToLocation(self.agent, self.agent.ball.location)
myDist = findDistance(self.agent.me.location,self.agent.ball.location)
if bDist:
if bDist <= myDist*.75:
return True
else:
return False
return False
def retire(self):
self.active = False
self.agent.activeState = None
self.flipState = None
def update(self):
if self.method == 1:
action = self.replacement.update()
if not self.replacement.active:
self.retire()
return action
else:
spd = self.agent.currentSpd
if self.flipState != None:
if self.flipState.active:
controller = self.flipState.update()
controller.boost = True
return controller
if self.secondFlip:
self.retire()
jumping = False
ballDistance = distance2D(self.agent.me.location, self.agent.ball.location)
if ballDistance < 200:
self.retire()
if not self.started:
if not kickOffTest(self.agent):
self.started = True
self.startTime = self.agent.time
if self.started and self.agent.time - self.startTime > 2.5:
self.retire()
if not self.firstFlip:
if spd > 1050:
localBall = self.agent.ball.local_location
angle = correctAngle(math.degrees(math.atan2(localBall[1],localBall[0])))
#if self.agent.team == 0:
if angle < 0:
self.flipState = LeapOfFaith(self.agent, 9)
else:
self.flipState = LeapOfFaith(self.agent, 10)
# else:
# if angle > 0:
# self.flipState = LeapOfFaith(self.agent, 9)
# else:
# self.flipState = LeapOfFaith(self.agent, 10)
self.firstFlip = True
controller = self.flipState.update()
controller.boost = True
return controller
destination = self.agent.ball.location
if ballDistance > self.finalFlipDistance:
#destination.data[1] += -sign(self.agent.team)*100
if not self.firstFlip:
#print(self.kickoff_type)
if self.agent.team == 1:
if self.kickoff_type == 0:
if destination[0] > self.agent.me.location[0]:
#print("greater than 0")
destination.data[0] += 1100#1000
else:
destination.data[0] -= 1100#1000
#print("less than 0")
elif self.kickoff_type == 1:
if destination[0] > self.agent.me.location[0]:
#print("greater than 0")
destination.data[0] += 900
else:
destination.data[0] -= 900
#print("less than 0")
elif self.kickoff_type == 2:
destination.data[0] -= 750
else:
if destination[0] > self.agent.me.location[0] or self.kickoff_type == -1:
destination.data[0] += 1100
else:
destination.data[0] -= 1100
else:
if self.kickoff_type == 0:
if destination[0] > self.agent.me.location[0]:
#print("greater than 0")
destination.data[0] += 1100#1000
else:
destination.data[0] -= 1100#1000
#print("less than 0")
elif self.kickoff_type == 1:
if destination[0] > self.agent.me.location[0]:
#print("greater than 0")
destination.data[0] += 900
else:
destination.data[0] -= 900
#print("less than 0")
elif self.kickoff_type == 2:
destination.data[0] += 750
else:
if destination[0] > self.agent.me.location[0] or self.kickoff_type == -1:
destination.data[0] -= 1100
else:
destination.data[0] += 1100
else:
if destination[0] > self.agent.me.location[0]:
destination.data[0] -=25
else:
destination.data[0] += 25
controls = greedyMover(self.agent, destination)
if self.firstFlip and not self.secondFlip:
if self.flipState:
if not self.flipState.active:
if not self.agent.onSurface:
controls = self.rightSelf()
if spd < 2195:
controls.boost = True
else:
controls.boost = False
return controls
else:
if self.agent.onSurface:
self.flipState = LeapOfFaith(self.agent, 0)
self.secondFlip = True
return self.flipState.update()
else:
controls = self.rightSelf()
if spd < maxPossibleSpeed:
controls.boost = True
if ballDistance < 150:
self.retire()
return controls
class DivineGrace(baseState):
def update(self):
controller_state = SimpleControllerState()
controller_state.throttle = 1
if self.agent.onSurface or self.agent.me.location[2] < 120:
self.active = False
# vel = self.agent.me.avelocity.normalize().scale(2500)
# fpos = self.agent.me.location - vel
# fpos.data[2] = self.agent.me.location[2]
#
# controller_state.steer, controller_state.yaw, controller_state.pitch, roll = orientTowardsVector(self.agent,
# fpos)
if self.agent.me.rotation[2] > 0:
controller_state.roll = -1
elif self.agent.me.rotation[2] < 0:
controller_state.roll = 1
if self.agent.me.rotation[0] > self.agent.velAngle:
controller_state.yaw = -1
elif self.agent.me.rotation[0] < self.agent.velAngle:
controller_state.yaw = 1
# if self.agent.me.rotation[1] > 0:
# controller_state.pitch = -1
#
# elif self.agent.me.rotation[1] < 0:
# controller_state.pitch = 1
return controller_state
class WardAgainstEvil(baseState):
def __init__(self,agent):
self.agent = agent
self.active = True
self.timeCreated = self.agent.time
def update(self):
#print(f"We're too scared! {self.agent.time}")
return scaredyCat(self.agent)
class BlessingOfDexterity(baseState):
def __init__(self,agent):
self.agent = agent
self.active = True
self.firstJump= False
self.secondJump = False
self.jumpStart = 0
self.timeCreated = self.agent.time
def update(self):
controller_state = SimpleControllerState()
controller_state.throttle = -1
if not self.firstJump:
controller_state.jump = True
controller_state.pitch = 1
self.firstJump = True
self.jumpStart = self.agent.time
return controller_state
elif self.firstJump and not self.secondJump:
jumpTimer = self.agent.time - self.jumpStart
controller_state.pitch = 1
controller_state.jump = False
if jumpTimer < 0.12:
controller_state.jump = True
if jumpTimer > 0.15:
controller_state.jump = True
self.jumpStart = self.agent.time
self.secondJump = True
return controller_state
elif self.firstJump and self.secondJump:
timer = self.agent.time - self.jumpStart
if timer < 0.15:
controller_state.pitch = 1
else:
controller_state.pitch = -1
controller_state.roll = 1
if timer > .8:
controller_state.roll = 0
if timer > 1.15:
self.active = False
return controller_state
else:
print("halfFlip else conditional called in update. This should not be happening")
class Chase(baseState):
def __init__(self, agent):
self.agent = agent
self.active = True
def update(self):
if not kickOffTest(self.agent):
return efficientMover(self.agent,self.agent.ball,self.agent.maxSpd)
else:
self.active = False
self.agent.activeState = PreemptiveStrike(self.agent)
return self.agent.activeState.update()
class BlessingOfSafety(baseState):
def update(self):
distMin = 2000
if distance2D(Vector([0, 5200 * sign(self.agent.team), 200]),
self.agent.currentHit.pred_vector) < distMin:
return ShellTime(self.agent)
else:
if self.agent.rotationNumber == 2:
if len(self.agent.allies) >=2:
return playBack(self.agent,buffer = 2500)
else:
return playBack(self.agent)
if self.agent.rotationNumber >=3:
return playBack(self.agent,buffer = 5500)
#print("returning default value")
return playBack(self.agent)
class DivineAssistance(baseState):
def update(self):
return secondManSupport(self.agent)
def halfFlipStateManager(agent):
if agent.activeState.active == False:
agent.activeState = BlessingOfDexterity(agent)
else:
if type(agent.activeState) != BlessingOfDexterity:
agent.activeState = BlessingOfDexterity(agent)
class soloDefense(baseState):
def update(self):
if distance2D(Vector([0, 5200 * sign(self.agent.team), 200]),convertStructLocationToVector(self.agent.selectedBallPred))<1500:
return ShellTime(self.agent)
else:
return playBack(self.agent)
class ScaleTheWalls(baseState):
def update(self):
return handleWallShot(self.agent)
class AngelicEmbrace(baseState):
def update(self):
return carry_flick(self.agent,cradled = True)
#return newCarry(self.agent)
class emergencyDefend(baseState):
def update(self):
penetrationPosition = convertStructLocationToVector(self.agent.goalPred)
penetrationPosition.data[1] = 5350 * sign(self.agent.team)
if self.agent.goalPred.game_seconds - self.agent.gameInfo.seconds_elapsed > .1:
if distance2D(self.agent.me.location,penetrationPosition) > 100:
return testMover(self.agent,penetrationPosition,2300)
else:
if penetrationPosition[2] > 300:
self.activeState = LeapOfFaith(self.agent, -1)
return self.activeState.update()
else:
self.activeState = LeapOfFaith(self.agent, 0)
return self.activeState.update()
def parseCarInfo(carList, index, _max = False):
val = 0
best = None
for each in carList:
if _max:
if each[index] > val:
best = each
val = each[index]
else:
if each[index] < val:
best = each
val = each[index]
return best
def teamStateManager(agent):
if len(agent.allies) < 1:
soloStateManager(agent)
return
agentType = type(agent.activeState)
groundHeighCutOff = 120
if agentType != PreemptiveStrike:
if not kickOffTest(agent):
myGoalLoc = center = Vector([0, 5200 * sign(agent.team), 200])
enemyGoalLoc = center = Vector([0, 5200 * sign(agent.team), 200])
ballDistanceFromGoal = distance2D(myGoalLoc, agent.ball)
carDistanceFromGoal = distance2D(myGoalLoc, agent.me)
carDistanceFromEnemyGoal = distance2D(enemyGoalLoc, agent.me)
if ballDistanceFromGoal <= 2000:
agent.contested = True
timeTillBallReady = 6
if agent.contested:
ballStruct = agent.selectedBallPred
timeTillBallReady = agent.ballDelay
else:
if is_in_strike_zone(agent, convertStructLocationToVector(agent.selectedBallPred)):
agent.contested = True
ballStruct = agent.selectedBallPred
timeTillBallReady = agent.ballDelay
else:
agent.selectedBallPred = findSuitableBallPosition(agent, 110, agent.getCurrentSpd(), agent.me.location)
ballStruct = agent.selectedBallPred
goalward = ballHeadedTowardsMyGoal(agent)
agent.openGoal = openGoalOpportunity(agent)
aerialStructs = findAerialTargets(agent)
createBox(agent, hit.pred_vector)
# print(groundHeighCutOff,structHeight)
if agentType == LeapOfFaith:
if agent.activeState.active != False:
return
if agentType == airLaunch:
if agent.activeState.active != False:
return
if agentType == BlessingOfDexterity:
if agent.activeState.active != False:
return
if agentType == DivineGrace:
if agent.activeState.active != False:
return
if agentType == RighteousVolley:
if agent.activeState.active != False:
return
if agentType == DivineGrace:
if agent.activeState.active != False:
return
if agentType == Aerial:
if agent.activeState.active != False:
return
if not agent.onSurface:
if agent.me.location[2] > 165:
if agentType != DivineGrace:
agent.activeState = DivineGrace(agent)
return
# carDistancesFromGoal = []
# cardistancesFromBall = []
# carInfo = []
# for c in agent.allies:
# cdfg = distance2D(myGoalLoc, c.location)
# cdfb = distance2D(agent.ball.location, c.location)
# carDistancesFromGoal.append(cdfg)
# cardistancesFromBall.append(cdfb)
# carInfo.append([cdfg, cdfb, c])
carDistanceFromGoal = distance2D(myGoalLoc, agent.me)
carDistanceFromBall = distance2D(agent.me.location, agent.ball.location)
predLocation = convertStructLocationToVector(agent.selectedBallPred)
if len(agent.allies) == 1: #print 2vX
if agent.me.location[1] * sign(agent.team) < agent.ball.location[1] *sign(agent.team): #bp = -3000 ball = -4000/ 3000,4000 // op = 3000 ball = 4000 /3000,4000
#beyond the ball - demo and retreat if there's a last man, otherwise evac asap
if agent.allies[0].location[1] * sign(agent.team) < agent.ball.location[1] *sign(agent.team):
#get back asap!
if agentType != BlessingOfSafety:
agent.activeState = BlessingOfSafety(agent)
return
else:
#there's a back man, cause some havic
#print("it's clobbering time!")
if agentType != DemolitionBot:
agent.activeState = DemolitionBot(agent)
return
else:
#bot not over extended, check to see if teammate is
if agent.allies[0].location[1] * sign(agent.team) > agent.ball.location[1] * sign(agent.team):
#both bots are in defensive positions
if distance2D(agent.me.location,agent.ball.location) <= distance2D(agent.allies[0].location,agent.ball.location):
#print("this bot is closest to ball, go on offensive")
if goalward:
if agentType != HolyProtector:
agent.activeState = HolyProtector(agent)
return
if structHeight <= groundHeighCutOff:
if agentType != GroundAssault:
agent.activeState = GroundAssault(agent)
return
else:
if agentType != HolyGrenade:
agent.activeState = HolyGrenade(agent)
return
else:
if agentType != BlessingOfSafety:
agent.activeState = BlessingOfSafety(agent)
return
else:
#teammate is closer, play the back man
if agentType != BlessingOfSafety:
agent.activeState = BlessingOfSafety(agent)
return
else: #3vX+
print("why am I in 3v3?")
if goalward:
if agent.activeState != HolyProtector:
agent.activeState = HolyProtector(agent)
return
else:
if predLocation[2] > groundHeighCutOff:
if agentType != HolyGrenade:
agent.activeState = HolyGrenade(agent)
return
else:
if agentType != GroundAssault:
agent.activeState = GroundAssault(agent)
return
# pass
#
# if carDistanceFromGoal > ballDistanceFromGoal + 100:
# if agentType != GroundDefend:
# agent.activeState = GroundDefend(agent)
# return
#
# elif goalward:
# if agentType != GroundDefend:
# agent.activeState = GroundDefend(agent)
# return
#
#
# else:
#
# if structHeight <= groundHeighCutOff:
# if agentType != Dribble:
# agent.activeState = Dribble(agent)
# return
# else:
# if agentType != bounceShot:
# agent.activeState = bounceShot(agent)
# return
else:
if agent.activeState != PreemptiveStrike:
agent.activeState = PreemptiveStrike(agent)
return
def orientationStateManager(agent):
if agent.me.location[2] < 30 or agent.onSurface:
print("resetting orientations")
car_state = CarState(physics=Physics(velocity=Vector3(z=1550,x = random.randrange(-1500,1500),y =random.randrange(-1500,1500 )),location=Vector3(0, 0, 20)))
game_state = GameState(cars={agent.index: car_state})
agent.set_game_state(game_state)
if agent.activeState != DivineGrace:
agent.activeState = DivineGrace(agent)
#return agent.activeState
def launchStateManager(agent):
if agent.activeState:
if agent.activeState.active:
return
else:
if type(agent.activeState) == airLaunch:
agent.activeState = DivineGrace(agent)
else:
if agent.onSurface:
if agent.getCurrentSpd() < 50:
agent.activeState = airLaunch(agent)
else:
agent.activeState = airLaunch(agent)
def facePositionManager(agent):
agentType = type(agent.activeState)
if agentType != TurnTowardsPosition or not agent.activeState.active:
agent.activeState = TurnTowardsPosition(agent,agent.ball.location,0)
def demoTest(agent):
targ = findEnemyClosestToLocation(agent,agent.ball.location)[0]
return demoEnemyCar(agent,targ)
def newTeamStateManager(agent):
agentType = type(agent.activeState)
if agentType != PreemptiveStrike:
if not kickOffTest(agent):
myGoalLoc = Vector([0, 5200 * sign(agent.team), 200])
ballDistanceFromGoal = distance2D(myGoalLoc, agent.ball)
carDistanceFromGoal = distance2D(myGoalLoc, agent.me)
if agentType == LeapOfFaith:
if agent.activeState.active != False:
return
if agentType == Action_chain:
if agent.activeState.active != False:
return
if agentType == airLaunch:
if agent.activeState.active != False:
return
if agentType == BlessingOfDexterity:
if agent.activeState.active != False:
return
if agentType == DivineGrace:
if agent.activeState.active != False:
return
if agentType == RighteousVolley:
if agent.activeState.active != False:
return
fastesthit = find_soonest_hit(agent)
hit = fastesthit
openNet = openGoalOpportunity(agent)
agent.openGoal = openNet
agent.timid = False
scared = False
tempDelay = hit.prediction_time - agent.gameInfo.seconds_elapsed
if tempDelay >= agent.enemyBallInterceptDelay - agent.contestedTimeLimit:
if agent.enemyAttacking:
agent.contested = True
if tempDelay >= agent.enemyBallInterceptDelay + agent.contestedTimeLimit:
if not butterZone(hit.pred_vector):
if ballDistanceFromGoal <= 5000:
agent.timid = True
else:
scared = True
#print(tempDelay,agent.enemyBallInterceptDelay)
#pass
if distance2D(hit.pred_vector,myGoalLoc) <= 2000 or distance2D(agent.enemyTargetVec,myGoalLoc) <= 2000 or ballDistanceFromGoal <= 2000:
agent.contested = True
agent.enemyAttacking = True
agent.timid = False
scared = False
# if not agent.contested:
# if hit.hit_type == 4:
# if agent.hits[1] != None:
# temptime = agent.hits[1].prediction_time - agent.time
# if temptime < agent.enemyBallInterceptDelay - agent.contestedTimeLimit:
# hit = agent.hits[1]
#
# if hit.hit_type == 1:
# if agent.hits[0] != None:
# temptime = agent.hits[0].prediction_time - agent.time
# if temptime < agent.enemyBallInterceptDelay - agent.contestedTimeLimit:
# # if not ballHeadedTowardsMyGoal_testing(agent, agent.hits[0]):
# hit = agent.hits[0]
#
# goalward = ballHeadedTowardsMyGoal_testing(agent, hit)
# agent.goalward = goalward
# agent.currentHit = hit
# agent.ballDelay = hit.prediction_time - agent.gameInfo.seconds_elapsed
# agent.ballGrounded = False
#
# #print(agent.ballDelay, agent.enemyBallInterceptDelay,agent.contested,agent.timid)
#
# if hit.hit_type == 2:
# agent.wallShot = True
# agent.ballGrounded = False
# else:
# agent.wallShot = False
# if hit.hit_type == 1:
# if hit.pred_vector[2] <=agent.groundCutOff:
# agent.ballGrounded = True
# else:
# agent.ballGrounded = False
#
#
#
# createBox(agent, hit.pred_vector)
if agentType == Aerial:
if agent.activeState.active != False:
return
if not agent.onSurface:
if agent.me.location[2] > 170:
if agentType != DivineGrace:
agent.activeState = DivineGrace(agent)
return
if agent.dribbling:
if agentType != AngelicEmbrace:
agent.activeState = AngelicEmbrace(agent)
return
lastManY = 0
if agent.team == 0:
lastManY = math.inf
for ally in agent.allies:
if ally.location[1] < lastManY:
lastManY = ally.location[1]
if agent.me.location[1] < lastManY:
lastManY = agent.me.location[1]
else:
lastManY = -math.inf
for ally in agent.allies:
if ally.location[1] > lastManY:
lastManY = ally.location[1]
if agent.me.location[1] > lastManY:
lastManY = agent.me.location[1]
#determine which man in rotation I am #1, #2, #3, forward
man = 1
if agent.me.location[1] * sign(agent.team) < agent.ball.location[1] *sign(agent.team):
if agent.me.location[1] * sign(agent.team) < hit.pred_vector[1] * sign(agent.team):
if agent.me.location[1] != lastManY:
# if agent.team == 0:
# if agent.me.location[1] > -3500:
# man = 4
# elif agent.team == 1:
# if agent.me.location[1] < 3500:
# man = 4
man = 4
if player_retreat_status(agent.me,agent.team):
if agent.me.location[1] != lastManY:
if distance2D(hit.pred_vector, myGoalLoc) >2000:
man = 4
# elif player_retreat_status(agent.me,agent.team):
# if agent.me.location[1] != lastManY:
# # if agent.team == 0:
# # if agent.me.location[1] > -3500:
# # man = 4
# # elif agent.team == 1:
# # if agent.me.location[1] < 3500:
# # man = 4
# man = 4
if man != 4:
myDist = distance2D(agent.me.location, agent.ball.location)
for ally in agent.allies:
if not ally.demolished:
if ally.location[1] * sign(agent.team) > agent.ball.location[1] * sign(agent.team):
allyDist = distance2D(ally.location, agent.ball.location)
if allyDist < myDist:
if not player_retreat_status(ally,agent.team) or allyDist < 250:
man += 1
man = clamp(3, 0, man)
agent.rotationNumber = man
if man != 1 or openNet:
if not agent.contested:
if hit.hit_type == 4:
if agent.hits[1] != None:
temptime = agent.hits[1].prediction_time - agent.time
if temptime < agent.enemyBallInterceptDelay - agent.contestedTimeLimit:
hit = agent.hits[1]
if hit.hit_type == 1:
if agent.hits[0] != None:
temptime = agent.hits[0].prediction_time - agent.time
if temptime < agent.enemyBallInterceptDelay - agent.contestedTimeLimit:
# if not ballHeadedTowardsMyGoal_testing(agent, agent.hits[0]):
hit = agent.hits[0]
goalward = ballHeadedTowardsMyGoal_testing(agent, hit)
agent.goalward = goalward
agent.currentHit = hit
agent.ballDelay = hit.prediction_time - agent.gameInfo.seconds_elapsed
agent.ballGrounded = False
#print(agent.ballDelay, agent.enemyBallInterceptDelay,agent.contested,agent.timid)
if hit.hit_type == 2:
agent.wallShot = True
agent.ballGrounded = False
else:
agent.wallShot = False
if hit.hit_type == 1:
if hit.pred_vector[2] <=agent.groundCutOff:
agent.ballGrounded = True
else:
agent.ballGrounded = False
createBox(agent, hit.pred_vector)
boostOpportunity = inCornerWithBoost(agent)
if boostOpportunity != False:
if agent.me.boostLevel <=50:
getBoost = False
if agent.team == 0:
if boostOpportunity[1] == 0 or boostOpportunity[1] == 1:
getBoost = True
else:
if boostOpportunity[1] == 2 or boostOpportunity[1] == 3:
getBoost = True
if getBoost:
if agentType != HeavenylyReprieve:
agent.activeState = HeavenylyReprieve(agent,boostOpportunity[0])
return
if man == 1:
if agent.me.boostLevel <=0:
if len(agent.allies) >1:
if distance2D(agent.me.location,hit.pred_vector) > 7000:
if not is_in_strike_zone(agent,hit.pred_vector):
if agentType != BlessingOfSafety:
agent.activeState = BlessingOfSafety(agent)
return
if carDistanceFromGoal > ballDistanceFromGoal:
if agentType != HolyProtector:
agent.activeState = HolyProtector(agent)
return
if goalward:
if hit.hit_type != 2:
if agentType != HolyProtector:
agent.activeState = HolyProtector(agent)
return
else:
if agentType != ScaleTheWalls:
agent.activeState = ScaleTheWalls(agent)
return
else:
if hit.hit_type == 0: # hit.pred_vector[2] <= agent.groundCutOff:
if agentType != GroundAssault:
agent.activeState = GroundAssault(agent)
return
elif hit.hit_type == 1:
if agentType != HolyGrenade:
agent.activeState = HolyGrenade(agent)
return
else:
if agentType != ScaleTheWalls:
agent.activeState = ScaleTheWalls(agent)
return
else:
if agentType != BlessingOfSafety:
agent.activeState = BlessingOfSafety(agent)
return
# elif man == 2:
# if agentType != BlessingOfSafety:
# agent.activeState = BlessingOfSafety(agent)
# return
#
# elif man == 3:
# if agentType != BlessingOfSafety:
# agent.activeState = BlessingOfSafety(agent)
# return
#
# elif man == 4:
# if agentType != BlessingOfSafety:
# agent.activeState = BlessingOfSafety(agent)
# return
else:
agent.activeState = PreemptiveStrike(agent)
def soloStateManager(agent):
agentType = type(agent.activeState)
if agentType != PreemptiveStrike:
if not kickOffTest(agent):
myGoalLoc = Vector([0, 5200 * sign(agent.team), 200])
ballDistanceFromGoal = distance2D(myGoalLoc, agent.ball)
carDistanceFromGoal = distance2D(myGoalLoc, agent.me)
#agent.resetTimer += agent.deltaTime
if agentType == LeapOfFaith:
if agent.activeState.active != False:
return
if agentType == airLaunch:
if agent.activeState.active != False:
return
if agentType == BlessingOfDexterity:
if agent.activeState.active != False:
return
if agentType == DivineGrace:
if agent.activeState.active != False:
return
if agentType == RighteousVolley:
if agent.activeState.active != False:
return
hit = find_soonest_hit(agent)
openNet = openGoalOpportunity(agent)
agent.openGoal = openNet
agent.timid = False
scared = False
tempDelay = hit.prediction_time - agent.time
#print(tempDelay)
if tempDelay >= agent.enemyBallInterceptDelay - .5:
if agent.enemyAttacking:
agent.contested = True
if tempDelay >= agent.enemyBallInterceptDelay + 1:
if not butterZone(hit.pred_vector):
if ballDistanceFromGoal <= 5000:
agent.timid = True
else:
scared = True
#print(tempDelay,agent.enemyBallInterceptDelay)
#pass
if distance2D(hit.pred_vector,myGoalLoc) <= 2000 or distance2D(agent.enemyTargetVec,myGoalLoc) <= 2000:
agent.contested = True
agent.timid = False
scared = False
if not agent.contested or not agent.enemyAttacking:
if agent.hits[0] != None:
temptime = agent.hits[0].prediction_time - agent.gameInfo.seconds_elapsed
#if temptime >=1:
if hit.hit_type != 2:
#if temptime < agent.enemyBallInterceptDelay - .5:
hit = agent.hits[0]
goalward = ballHeadedTowardsMyGoal_testing(agent, hit)
agent.goalward = goalward
agent.currentHit = hit
agent.ballDelay = hit.prediction_time - agent.time
agent.ballGrounded = False
#print(agent.ballDelay, agent.enemyBallInterceptDelay,agent.contested,agent.timid)
if hit.hit_type == 2:
agent.wallShot = True
agent.ballGrounded = False
else:
agent.wallShot = False
if hit.hit_type == 1:
if hit.pred_vector[2] <=agent.groundCutOff:
agent.ballGrounded = True
else:
agent.ballGrounded = False
createBox(agent, hit.pred_vector)
if agentType == Aerial:
if agent.activeState.active != False:
return
if not agent.onSurface:
if agent.me.location[2] > 170:
if agentType != DivineGrace:
agent.activeState = DivineGrace(agent)
return
if agent.dribbling:
if not goalward:
if agentType != AngelicEmbrace:
agent.activeState = AngelicEmbrace(agent)
return
#else:
# agent.resetTimer += agent.deltaTime
# if agent.resetTimer >= 5:
# agent.resetTimer = 0
# print("setting up dribble training")
# #game_state = GameState()
# #self.set_game_state(game_state)
# ball_state = BallState(Physics(location=Vector3(agent.me.location[0], agent.me.location[1], agent.me.location[2]+160),velocity=Vector3(agent.me.velocity[0],agent.me.velocity[1],agent.me.velocity[2])))
# game_state = GameState(ball=ball_state)
# agent.set_game_state(game_state)
# if agentType != AngelicEmbrace:
# agent.activeState = AngelicEmbrace(agent)
# return
# if agent.timid or scared:
# #print(f"being timid {agent.time}")
# if agentType != WardAgainstEvil:
# agent.activeState = WardAgainstEvil(agent)
# return
# if scared or agent.timid:
# if agentType != BlessingOfSafety:
# agent.activeState = BlessingOfSafety(agent)
# return
if carDistanceFromGoal > ballDistanceFromGoal:
if agentType != HolyProtector:
agent.activeState = HolyProtector(agent)
return
elif goalward:
if hit.hit_type !=2:
if agentType != HolyProtector:
agent.activeState = HolyProtector(agent)
return
else:
if agentType != ScaleTheWalls:
agent.activeState = ScaleTheWalls(agent)
#print("scaling walls")
#print(f"scale the walls defensive {agent.time}")
return
else:
if hit.hit_type == 0:
if agentType != GroundAssault:
agent.activeState = GroundAssault(agent)
return
elif hit.hit_type == 1:
if agentType != HolyGrenade:
agent.activeState = HolyGrenade(agent)
return
elif hit.hit_type == 2:
if agentType != ScaleTheWalls:
agent.activeState = ScaleTheWalls(agent)
return
else:
print("we got an eroneous hit_type somehow")
print("rawr")
else:
agent.activeState = PreemptiveStrike(agent)
def soloStateManager_testing(agent):
agentType = type(agent.activeState)
if agentType != PreemptiveStrike:
if not kickOffTest(agent):
myGoalLoc = Vector([0, 5200 * sign(agent.team), 200])
ballDistanceFromGoal = distance2D(myGoalLoc, agent.ball)
carDistanceFromGoal = distance2D(myGoalLoc, agent.me)
#agent.resetTimer += agent.deltaTime
if agentType == LeapOfFaith:
if agent.activeState.active != False:
return
if agentType == Action_chain:
if agent.activeState.active != False:
return
if agentType == airLaunch:
if agent.activeState.active != False:
return
if agentType == BlessingOfDexterity:
if agent.activeState.active != False:
return
if agentType == DivineGrace:
if agent.activeState.active != False:
return
if agentType == RighteousVolley:
if agent.activeState.active != False:
return
hit = find_soonest_hit(agent)
if agent.goalPred != None:
agent.enemyAttacking = True
openNet = openGoalOpportunity(agent)
agent.openGoal = openNet
agent.timid = False
scared = False
tempDelay = hit.time_difference()
#print(tempDelay)
#print(agent.enemyBallInterceptDelay)
if tempDelay >= agent.enemyBallInterceptDelay - agent.contestedTimeLimit:
if agent.enemyAttacking:
#agent.enemyAttacking = True
agent.contested = True
# else:
# print(f"{tempDelay} {agent.enemyBallInterceptDelay}")
if distance2D(hit.pred_vector, myGoalLoc) <= 2000 or distance2D(agent.enemyTargetVec,
myGoalLoc) <= 2000 or ballDistanceFromGoal <= 2000:
if agent.enemyAttacking:
agent.contested = True
agent.timid = False
scared = False
#agent.enemyAttacking = True
# agent.contested = True
# agent.enemyAttacking = True
#if agent.team == 0:
if not agent.contested:
if hit.hit_type == 4:
if agent.hits[1] != None:
temptime = agent.hits[1].prediction_time - agent.time
if temptime < agent.enemyBallInterceptDelay - agent.contestedTimeLimit:
hit = agent.hits[1]
if hit.hit_type == 1:
if agent.hits[0] != None:
temptime = agent.hits[0].prediction_time - agent.time
if temptime < agent.enemyBallInterceptDelay - agent.contestedTimeLimit:
#if not ballHeadedTowardsMyGoal_testing(agent, agent.hits[0]):
hit = agent.hits[0]
# if agent.hits[0] != None:
# if hit.hit_type != 2:
# temptime = agent.hits[0].prediction_time - agent.time
# # if temptime >=1:
#
# if temptime < agent.enemyBallInterceptDelay - agent.contestedTimeLimit:
# if not ballHeadedTowardsMyGoal_testing(agent, agent.hits[0]):
# hit = agent.hits[0]
goalward = ballHeadedTowardsMyGoal_testing(agent, hit)
agent.goalward = goalward
agent.currentHit = hit
agent.ballDelay = hit.prediction_time - agent.time
agent.ballGrounded = False
if hit.hit_type == 2:
agent.wallShot = True
else:
agent.wallShot = False
createBox(agent, hit.pred_vector)
if agentType == Aerial:
if agent.activeState.active != False:
return
if not agent.onSurface:
if agent.me.location[2] > 120:
if agentType != DivineGrace:
agent.activeState = DivineGrace(agent)
return
if agent.dribbling:
#if not goalward:
if agentType != AngelicEmbrace:
agent.activeState = AngelicEmbrace(agent)
return
boostOpportunity = inCornerWithBoost(agent)
if boostOpportunity != False:
if agent.me.boostLevel <= 50:
getBoost = False
if agent.team == 0:
if boostOpportunity[1] == 0 or boostOpportunity[1] == 1:
getBoost = True
else:
if boostOpportunity[1] == 2 or boostOpportunity[1] == 3:
getBoost = True
if getBoost:
if agentType != HeavenylyReprieve:
agent.activeState = HeavenylyReprieve(agent, boostOpportunity[0])
return
# if scared or agent.timid:
# if agentType != BlessingOfSafety:
# agent.activeState = BlessingOfSafety(agent)
# return
if carDistanceFromGoal > ballDistanceFromGoal:
if agentType != HolyProtector:
agent.activeState = HolyProtector(agent)
return
if goalward:
if hit.hit_type !=2:
if agentType != HolyProtector:
agent.activeState = HolyProtector(agent)
return
else:
if agentType != ScaleTheWalls:
agent.activeState = ScaleTheWalls(agent)
return
else:
if hit.hit_type == 0: #hit.pred_vector[2] <= agent.groundCutOff:
if agentType != GroundAssault:
agent.activeState = GroundAssault(agent)
return
elif hit.hit_type == 1 or hit.hit_type == 4:
if agentType != HolyGrenade:
agent.activeState = HolyGrenade(agent)
return
else:
if agentType != ScaleTheWalls:
agent.activeState = ScaleTheWalls(agent)
return
else:
agent.activeState = PreemptiveStrike(agent)
| 2.609375
| 3
|
datamining-toolbox/src/gpn_hack/luigi_tasks/__init__.py
|
SlamJam/gpn-hack
| 0
|
12779360
|
import logging
import luigi
import luigi.contrib.s3
from . import hh, index
# Disable all child loggers
for name in ["botocore", "boto3", "elasticsearch"]:
logging.getLogger(name).propagate = False
class MainTask(luigi.Task):
# 113 - Россия, 1 - Москва, 83 - Смоленск
# areas_ids = luigi.ListParameter([113])
areas_ids = luigi.ListParameter([113])
def requires(self):
return (
[hh.HHClearCompaniesDescriptionsAtArea(area_id) for area_id in self.areas_ids]
+ [hh.HHGetContries()]
+ [index.IndexHH()]
)
| 2.09375
| 2
|
src/kapidox/models.py
|
KDE/kapidox
| 7
|
12779361
|
<reponame>KDE/kapidox
# -*- coding: utf-8 -*-
#
# SPDX-FileCopyrightText: 2016 <NAME> <<EMAIL>>
#
# SPDX-License-Identifier: BSD-2-Clause
import logging
import os.path
import string
from kapidox import utils
## @package kapidox.models
#
# Contains the classes representing the objects used by kapidox
#
class Library(object):
""" Library
"""
def __init__(self, metainfo, products, platforms, all_maintainers):
"""
Constructor of the Library object
Args:
metainfo: (dict) dictionary describing a library
products: (list of Products) list of all already created products
platforms: (dict) dictionary of all platforms for which the library
is available, where the key is a platform and the value
is a restriction. For instance:
{
'Linux': '',
'Windows': 'Tested with Windows 10 only'
}
would work.
all_maintainers: (dict of dict) all possible maintainers, where the main key
is a username/unique pseudo, and the key is a dictionary of name,
email address. For example:
{
'username01': { 'name': '<NAME>', 'email': '<EMAIL>' },
'username02': { 'name': '<NAME>', 'email': '<EMAIL>' }
}
would work.
"""
self.product = None
self.subproduct = None
if 'group' in metainfo:
productname = metainfo['group']
self.part_of_group = True
else:
productname = metainfo['name']
self.part_of_group = False
if utils.serialize_name(productname) not in products:
productname = metainfo['name']
del metainfo['group']
products[utils.serialize_name(metainfo['name'])] = Product(metainfo, all_maintainers)
self.part_of_group = False
logging.warning("Group of {} not found: dropped.".format(metainfo['fancyname']))
self.product = products[utils.serialize_name(productname)]
if self.product is None:
raise ValueError("'{}' does not belong to a product."
.format(metainfo['name']))
if 'subgroup' in metainfo and self.part_of_group:
for sp in self.product.subproducts:
if sp.name == utils.serialize_name(metainfo['subgroup']):
self.subproduct = sp
if self.subproduct is None:
logging.warning("Subgroup {} of library {} not documented, subgroup will be None"
.format(metainfo['subgroup'], metainfo['name']))
if self.subproduct is not None:
self.parent = self.subproduct
self.subproduct.libraries.append(self)
else:
self.parent = self.product
self.product.libraries.append(self)
self.metainfo = metainfo
self.name = metainfo['name']
self.fancyname = metainfo['fancyname']
self.description = metainfo.get('description')
self.maintainers = utils.set_maintainers(metainfo.get('maintainer'), all_maintainers)
self.platforms = platforms
self.outputdir = self._set_outputdir(self.part_of_group)
self.href = '../' + self.outputdir.lower() + '/html/index.html'
self.path = metainfo['path']
self.srcdirs = utils.tolist(metainfo.get('public_source_dirs', ['src']))
self.docdir = utils.tolist(metainfo.get('public_doc_dir', ['docs']))
if 'public_example_dirs' in metainfo:
self.exampledirs = utils.tolist(metainfo.get('public_example_dirs', ['examples']))
else:
# backward compat
self.exampledirs = utils.tolist(metainfo.get('public_example_dir', ['examples']))
self.dependency_diagram = None
self.type = metainfo.get('type', '')
self.portingAid = metainfo.get('portingAid', False)
self.deprecated = metainfo.get('deprecated', False)
self.libraries = metainfo.get('libraries', [])
self.cmakename = metainfo.get('cmakename', '')
self.irc = metainfo.get('irc', self.product.irc)
self.mailinglist = metainfo.get('mailinglist', self.product.mailinglist)
self.repopath = utils.set_repopath(metainfo['repo_id'])
def _extend_parent(self, metainfo, key, key_obj, default):
if key in metainfo:
return metainfo[key]
elif getattr(self.product, key_obj) is not None:
return getattr(self.product, key_obj)
else:
return default
def _set_outputdir(self, grouped):
outputdir = self.name
if grouped:
outputdir = self.product.outputdir + '/' + outputdir
return outputdir.lower()
class Product(object):
""" Product
"""
# TODO: If no name and no group, it will fail !
def __init__(self, metainfo, all_maintainers):
"""
Constructor of the Product object
Args:
metainfo: (dict) dictionary describing a product
all_maintainers: (dict of dict) all possible maintainers, where the main key
is a username/unique pseudo, and the key is a dictionary of name,
email address. For example:
{
'username01': { 'name': '<NAME>', 'email': '<EMAIL>' },
'username02': { 'name': 'Marc Developer2', 'email': '<EMAIL>' }
}
would work.
"""
self.metainfo = metainfo
self.parent = None
# if there is a group, the product is the group
# else the product is directly the library
if 'group_info' in metainfo:
self.name = utils.serialize_name(metainfo['group_info'].get('name', metainfo.get('group')))
self.fancyname = metainfo['group_info'].get('fancyname', string.capwords(self.name))
self.description = metainfo['group_info'].get('description')
self.long_description = metainfo['group_info'].get('long_description', [])
self.maintainers = utils.set_maintainers(metainfo['group_info'].get('maintainer'),
all_maintainers)
self.platforms = metainfo['group_info'].get('platforms')
self.outputdir = self.name
self.href = self.outputdir + '/index.html'
self.logo_url_src = self._set_logo_src(metainfo['path'],
metainfo['group_info'])
self.logo_url = self._set_logo()
self.libraries = [] # We'll set this later
self.subgroups = [] # We'll set this later
self.irc = metainfo['group_info'].get('irc', 'kde-devel')
self.mailinglist = metainfo['group_info'].get('mailinglist', 'kde-devel')
self.subproducts = self._extract_subproducts(metainfo['group_info'])
self.part_of_group = True
elif 'group' not in metainfo:
self.name = utils.serialize_name(metainfo['name'])
self.fancyname = metainfo['fancyname']
self.description = metainfo.get('description')
self.maintainers = utils.set_maintainers(metainfo.get('maintainer'), all_maintainers)
self.platforms = [x['name'] for x in metainfo.get('platforms', [{'name': None}])]
self.outputdir = self.name
self.href = self.outputdir + '/html/index.html'
self.logo_url_src = self._set_logo_src(metainfo['path'], metainfo)
self.logo_url = self._set_logo()
self.libraries = []
self.irc = None
self.mailinglist = None
self.part_of_group = False
else:
raise ValueError("I do not recognize a product in {}."
.format(metainfo['name']))
def _extract_subproducts(self, groupinfo):
subproducts = []
if 'subgroups' in groupinfo:
for sg in groupinfo['subgroups']:
sg
if 'name' in sg:
subproducts.append(Subproduct(sg, self))
return subproducts
def _set_logo(self):
if self.logo_url_src is not None:
filename, ext = os.path.splitext(self.logo_url_src)
return self.outputdir + '/' + self.name + ext
else:
return None
def _set_logo_src(self, path, dct):
defined_not_found = False
if 'logo' in dct:
logo_url = os.path.join(path, dct['logo'])
if os.path.isfile(logo_url):
return logo_url
else:
defined_not_found = True
logo_url = os.path.join(path, 'logo.png')
if os.path.isfile(logo_url):
if defined_not_found:
logging.warning("Defined {} logo file doesn't exist, set back to found logo.png"
.format(self.fancyname))
return logo_url
if defined_not_found:
logging.warning("Defined {} logo file doesn't exist, set back to None"
.format(self.fancyname))
return None
class Subproduct(object):
""" Subproduct
"""
def __init__(self, spinfo, product):
"""
Constructor of the Subproduct object
Args:
spinfo: (dict) description of the subproduct. It is not more than:
{
'name': 'Subproduct Name',
'description': 'This subproduct does this and that',
'order': 3, # this is optional
}
for example.
product: (Product) the product it is part of.
"""
self.fancyname = spinfo['name']
self.name = utils.serialize_name(spinfo['name'])
self.description = spinfo.get('description')
self.order = spinfo.get('order', 99) # If no order, go to end
self.libraries = []
self.product = product
self.parent = product
| 2.453125
| 2
|
setup.py
|
authomatic/foundation-sphinx-theme
| 0
|
12779362
|
<filename>setup.py<gh_stars>0
from setuptools import setup, find_packages
setup(
name='foundation-sphinx-theme',
version='0.0.3',
packages=find_packages(),
package_data={'': ['*.txt', '*.rst', '*.html', '*.css', '*.js', '*.conf']},
author='<NAME>',
author_email='<EMAIL>',
description='',
long_description=open('README.rst').read(),
keywords=['sphinx', 'reStructuredText', 'theme', 'foundation'],
url='https://github.com/peterhudec/foundation-sphinx-theme',
license='MIT',
install_requires=['setuptools'],
classifiers=[
'Development Status :: 3 - Alpha',
"Environment :: Web Environment",
"Intended Audience :: Developers",
"Intended Audience :: System Administrators",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Topic :: Internet",
"Topic :: Software Development :: Documentation",
"Topic :: Text Processing :: Markup",
],
entry_points={
'sphinx.html_themes': [
'foundation = foundation_sphinx_theme',
]
},
)
| 1.171875
| 1
|
annotation_backend/src/server/migrations/0012_auto_20200813_2200.py
|
INK-USC/LEAN-LIFE
| 21
|
12779363
|
# Generated by Django 2.1.7 on 2020-08-13 22:00
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('server', '0011_auto_20200721_1516'),
]
operations = [
migrations.AddField(
model_name='namedentityannotationhistory',
name='annotation',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, related_name='ner_history', to='server.Annotation'),
),
migrations.AddField(
model_name='relationextractionannotationhistory',
name='annotation',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, related_name='re_history', to='server.Annotation'),
),
migrations.AlterField(
model_name='annotation',
name='task',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='annotations', to='server.Task'),
),
]
| 1.578125
| 2
|
coda/coda_mdstore/resourcesync.py
|
unt-libraries/coda
| 2
|
12779364
|
from urllib.parse import urlparse
import warnings
from django.contrib.sitemaps import Sitemap, views
from django.contrib.sites.shortcuts import get_current_site
from django.urls import reverse
from django.core.paginator import EmptyPage, PageNotAnInteger
from django.http import Http404
from django.shortcuts import get_object_or_404
from django.template.response import TemplateResponse
from codalib.bagatom import TIME_FORMAT_STRING
from coda_mdstore.models import Bag
try:
MOST_RECENT_BAGGING_DATE = Bag.objects.latest(
'bagging_date'
).bagging_date.strftime(TIME_FORMAT_STRING)
except Exception:
MOST_RECENT_BAGGING_DATE = '2012-12-12T00:00:00Z'
def index(
request,
sitemaps,
template_name='sitemap_index.xml',
content_type='application/xml',
sitemap_url_name='resourcelist',
mimetype=None
):
"""
This method is overloaded from django.contrib.sitemaps.views.
we need this overload so that we can change the default method of
pagination display in the sitemaps index. it's a bit hacky - but it works.
"""
if mimetype:
warnings.warn(
"The mimetype keyword argument is deprecated, use "
"content_type instead", DeprecationWarning, stacklevel=2
)
content_type = mimetype
req_protocol = 'https' if request.is_secure() else 'http'
req_site = get_current_site(request)
sites = []
for section, site in sitemaps.items():
if callable(site):
site = site()
protocol = req_protocol if site.protocol is None else site.protocol
sitemap_url = reverse(
sitemap_url_name, kwargs={'section': section})
absolute_url = '%s://%s%s' % (protocol, req_site.domain, sitemap_url)
sites.append(absolute_url)
for page in range(2, site.paginator.num_pages + 1):
# we want to change how the pagination is displayed
sites.append(
'%s-%03d.xml' % (absolute_url.replace('-001.xml', ''), page)
)
return TemplateResponse(
request,
template_name,
{
'sitemaps': sites,
'MOST_RECENT_BAGGING_DATE': MOST_RECENT_BAGGING_DATE,
},
content_type=content_type
)
def sitemap(request, sitemaps, section=None,
template_name='sitemap.xml', content_type='application/xml'):
"""
This method is overloaded from django.contrib.sitemaps.views.
we need this overload so that we can handle the urls served up by the other
overloaded method above "index".
"""
req_site = get_current_site(request)
# since we no longer give ?p arguments,
# we want the page to be the 'section'
page = section
# now, the 'section' is really the key of the sitemaps dict seen below
section = '001'
maps = [sitemaps[section]]
urls = []
for site in maps:
try:
if callable(site):
site = site()
u = site.get_urls(page=page, site=req_site)
urls.extend(u)
except EmptyPage:
raise Http404("Page %s empty" % page)
except PageNotAnInteger:
raise Http404("No page \'%s\'" % page)
for u in urls:
bag_name = urlparse(u['location']).path.replace('/bag/', '')
bag = get_object_or_404(Bag, name=bag_name)
u.setdefault('oxum', '%s.%s' % (bag.size, bag.files))
return TemplateResponse(
request,
template_name,
{
'urlset': urls,
'MOST_RECENT_BAGGING_DATE': MOST_RECENT_BAGGING_DATE,
},
content_type=content_type
)
def changelist(request, sitemaps, section=None,
template_name='changelist.xml', content_type='application/xml'):
most_recent_bags = Bag.objects.order_by('-bagging_date', '-name').values(
'name',
'size',
'files',
'bagging_date'
)[:10000]
for b in most_recent_bags:
b['bagging_date'] = b['bagging_date'].strftime(TIME_FORMAT_STRING)
return TemplateResponse(
request,
template_name,
{
'urlset': reversed(most_recent_bags),
'MOST_RECENT_BAGGING_DATE': MOST_RECENT_BAGGING_DATE,
},
content_type=content_type
)
def capabilitylist(
request,
template_name='mdstore/capabilitylist.xml',
content_type='application/xml'
):
return TemplateResponse(
request,
template_name,
{
'MOST_RECENT_BAGGING_DATE': MOST_RECENT_BAGGING_DATE,
},
content_type=content_type
)
# overload the stock sitemap pagination stuff with our own methods
setattr(views, 'index', index)
setattr(views, 'sitemap', sitemap)
setattr(Sitemap, 'limit', 5000)
class BaseSitemap(Sitemap):
lastmod = None
protocol = 'http'
def items(self):
# return the list of all the bags sorted by bagging_date
return Bag.objects.order_by('bagging_date', 'name').values('name')
def location(self, obj):
# if we just return the object it will give a unicode value tuple
return "/bag/%s" % obj['name']
sitemaps = {
'001': BaseSitemap,
}
| 2.125
| 2
|
nesi/devices/keymile/keymile_resources/keymile_interface.py
|
inexio/NESi
| 30
|
12779365
|
# This file is part of the NESi software.
#
# Copyright (c) 2020
# Original Software Design by <NAME> <https://github.com/etingof>.
#
# Software adapted by inexio <https://github.com/inexio>.
# - <NAME> <https://github.com/unkn0wn-user>
# - <NAME> <https://github.com/Connyko65>
# - <NAME> <https://github.com/Dinker1996>
#
# License: https://github.com/inexio/NESi/LICENSE.rst
from nesi.devices.softbox.base_resources.interface import Interface, InterfaceCollection, logging, base
LOG = logging.getLogger(__name__)
class KeyMileInterface(Interface):
"""Represent logical interface resource."""
port_id = base.Field('port_id')
chan_id = base.Field('chan_id')
logport_id = base.Field('logport_id')
# vcc
vcc_profile = base.Field('vcc_profile')
vlan_profile = base.Field('vlan_profile')
number_of_conn_services = base.Field('number_of_conn_services')
reconfiguration_allowed = base.Field('reconfiguration_allowed')
services_connected = base.Field('services_connected')
class KeyMileInterfaceCollection(InterfaceCollection):
"""Represent a collection of interfaces."""
@property
def _resource_type(self):
return KeyMileInterface
| 1.914063
| 2
|
IPL matches data/stadium_details.py
|
jv640/Web-Scraping
| 0
|
12779366
|
import requests
from bs4 import BeautifulSoup
import html5lib as h5l
import json
import pandas as pd
import os
import time
r = requests.get("https://en.wikipedia.org/wiki/List_of_Indian_Premier_League_venues")
htmlContent = r.content
soup = BeautifulSoup(htmlContent, 'html.parser')
stadium_det = [['Stadium', 'Home Teams']]
table = soup.find("tbody").find_all("tr")
table = table[1:] # Removing table Heading
for rows in table:
row = rows.find_all("td")
stadium_name = row[0].getText().strip('\n')
home_team = []
home_teams = row[5].find_all("a")
for team in home_teams:
home_team.append(team.getText())
temp = [stadium_name, home_team]
stadium_det.append(temp)
df = pd.DataFrame(stadium_det)
df.to_csv('Stadium_and_Home_Teams.csv')
print("Done")
| 3.3125
| 3
|
ingestion/Blocktrace/Schemas/Action.py
|
mharrisb1/blocktrace
| 0
|
12779367
|
<gh_stars>0
from dataclasses import dataclass
@dataclass(frozen=True)
class Action:
account: str
name: str
jsonData: dict
| 2.09375
| 2
|
token_service/main/client_credentials_handler.py
|
oslokommune/okdata-token-service
| 0
|
12779368
|
<reponame>oslokommune/okdata-token-service
import json
from aws_xray_sdk.core import patch_all, xray_recorder
from okdata.aws.logging import (
logging_wrapper,
log_add,
log_duration,
hide_suffix,
)
from token_service.main.keycloak_client import ClientTokenClient
from token_service.main.request_utils import (
read_schema,
validate_request_body,
lambda_http_proxy_response,
)
create_token_request_schema = read_schema(
"serverless/documentation/schemas/createClientTokenRequest.json"
)
refresh_token_request_schema = read_schema(
"serverless/documentation/schemas/refreshClientTokenRequest.json"
)
patch_all()
@logging_wrapper
@xray_recorder.capture("create_token")
def create_token(event, context):
body = json.loads(event["body"])
validate_error_response = validate_request_body(body, create_token_request_schema)
if validate_error_response:
return validate_error_response
log_add(client_id=hide_suffix(body["client_id"]))
res, status = log_duration(
lambda: ClientTokenClient(
client_id=body["client_id"], client_secret=body["client_secret"]
).request_token(),
"keycloak_request_token_duration",
)
return lambda_http_proxy_response(status_code=status, response_body=res)
@logging_wrapper
@xray_recorder.capture("refresh_token")
def refresh_token(event, context):
body = json.loads(event["body"])
validate_error_response = validate_request_body(body, refresh_token_request_schema)
if validate_error_response:
return validate_error_response
log_add(client_id=hide_suffix(body["client_id"]))
res, status = log_duration(
lambda: ClientTokenClient(
client_id=body["client_id"], client_secret=body["client_secret"]
).refresh_token(body["refresh_token"]),
"keycloak_refresh_token_duration",
)
return lambda_http_proxy_response(status_code=status, response_body=res)
| 1.9375
| 2
|
catalog/__init__.py
|
stonescar/item-catalog-deploy
| 0
|
12779369
|
<reponame>stonescar/item-catalog-deploy
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from catalog.modules.setup.app import app
if __name__ == '__main__':
app.secret_key = 'items'
app.debug = True
app.run(host='0.0.0.0', port=5000)
| 1.195313
| 1
|
src/streamlink/plugins/drdk.py
|
hymer-up/streamlink
| 5
|
12779370
|
import logging
import re
from streamlink.plugin import Plugin
from streamlink.plugin.api import validate
from streamlink.stream import HLSStream
log = logging.getLogger(__name__)
class DRDK(Plugin):
live_api_url = 'https://www.dr-massive.com/api/page'
url_re = re.compile(r'''
https?://(?:www\.)?dr\.dk/drtv
(/kanal/[\w-]+)
''', re.VERBOSE)
_live_data_schema = validate.Schema(
{'item': {'customFields': {
validate.optional('hlsURL'): validate.url(),
validate.optional('hlsWithSubtitlesURL'): validate.url(),
}}},
validate.get('item'),
validate.get('customFields'),
)
@classmethod
def can_handle_url(cls, url):
return cls.url_re.match(url) is not None
def _get_live(self, path):
params = dict(
ff='idp',
path=path,
)
res = self.session.http.get(self.live_api_url, params=params)
playlists = self.session.http.json(res, schema=self._live_data_schema)
streams = {}
for name, url in playlists.items():
name_prefix = ''
if name == 'hlsWithSubtitlesURL':
name_prefix = 'subtitled_'
streams.update(HLSStream.parse_variant_playlist(
self.session,
url,
name_prefix=name_prefix,
))
return streams
def _get_streams(self):
m = self.url_re.match(self.url)
path = m and m.group(1)
log.debug("Path={0}".format(path))
return self._get_live(path)
__plugin__ = DRDK
| 2.1875
| 2
|
7_NN_L2_Regularizer_to_Reduce_Overfitting.py
|
Yazooliu/Ai_Lab_LinuxNN
| 0
|
12779371
|
<gh_stars>0
#coding:utf-8
# ----------------------------------------
# 以下将实现正则化对过拟合的缓解过程
# -----------------------------------------
#
# 正则化在损失函数中引入了了模型复杂度的指标,利用给w加权重来弱化训练数据的噪声(一般不正则化参数b)
# loss = loss (y 与 y_) + Regularizer * loss (w)
# loss(y 与 y_) 表示模型中所有参数的损失函数,,如MMSE/交叉熵 , 超参数regularizer 是用来给出参数w在总的Loss中所占有的比例,也就是正则化的权重
# loss(w) 表示需要正则化的参数
# loss(w) = tf.contrib.layers.l1_regularizer(REGULARIZER)(w) --- loss(w1) = sum(|wi|)
# loss(w) = tf.contrib.layers.l2_regularizer(REGULARIZER)(w) --- loss(w2) = sum(|wi|^2)
# 把对w权重正则化以后的数据加到losses集合中
# tf.add_to_collection('losses', tf.contrib.layers.l2_regularizer(regularizer)(w))
# 并对总的loss求和
# loss = cem(交叉熵) + tf.add_n(tf.get_collection('losses'))
# -----------------------------------------
# 实现背景:
# 随机产生300 个X[x0,x1]的正态随机分布点:
# 标注Y_ 当x0^2 + x1^2 < 2 时y_ = 1 (红), 其余y_ = 0 (蓝)
# import matplotlib.pyplot as plt - sudo pip install + 模块
# plt.scatter(x坐标,y坐标,c = '颜色')
# plt.show()
#
# 对x轴和y轴的开始和结束点,以及中间的步长打点形成网格区域
# xx, yy = np.mgrid[start:end:steps, start:end:steps]
# 将x轴和y轴拉直, x轴形成一维,y轴形成一维矩阵,并把x 与y 轴一一配对形成网格坐标点,并把这些网格坐标点喂入神经网络
# grid = np.c_[xx.ravel(), yy.ravel()]
# 将上述生成的网格坐标点喂入神经网络,生成的probs就是区域中所有偏红还是偏蓝的量化值
# probs = sess.run(y,feed_dict = {x: grid})
# 将probs 整形成跟xx 相同的维数
# probs = probs.reshape(xx.shape)
# plt.contour(x轴坐标,y轴坐标,该点的高度,levels = [等高线的高度]) # 用levels将指定高对的点描绘上颜色
# plt.show() # 将点都画出来
# 画出数据的决策边界 - x1**2 + x2**2 <= r**2
#-------------------------------------------
# 导入模块,生成模拟数据集
import tensorflow as tf
import numpy as np
import matplotlib.pyplot as plt
BATCH_SIZE = 300
seed = 2
# 基于seed 生成随机数据
rdm = np.random.RandomState(seed)
# 随机返回(300,2)的随机数作为输入数据(x0,x1)
X = rdm.randn(300,2)
# 标记正确答案,认为每一行中x0^2+ x1^2 <2 的为红色,标记Y_ = 1,在圆外面的标记为 Y_ = 0;
Y_ = [int (x0*x0 + x1*x1 < 2) for (x0,x1) in X]
print 'Y_ = \n ',Y_
# 对应Y_,生成数据Y_c,将1赋值成'red', 0赋值成'blue'; 这样可视化显示时,人可以直观区分
Y_c = [ ['red' if y else 'blue'] for y in Y_]
# 将X和Y_ 重新整理, X重新整理为n行2列。Y_重新整理成n行1列
X = np.vstack(X).reshape(-1,2) # N*2
Y_ = np.vstack(Y_).reshape(-1,1) # N*1
Y_c = np.vstack(Y_c).reshape(-1,1) # N*1
#----------------
print '随机返回(300,2)的随机数作为输入数据(x0,x1) , X = :\n', X
print '标记正确答案,认为每一行中x0^2+ x1^2 <2 的为红色,标记Y_ = 1,在圆外面的标记为 Y_ = :\n', Y_
print '对应Y_,生成数据Y_c,将1赋值成red, 0赋值d成blue, Y_c = :\n', Y_c
# ----------------
# 用plt.scatter画出数据集中第0列和第1列元素的点即各行的(x0,x1) ,用各行的Y_c对应的值来表示颜色(c = color颜色)
plt.scatter(X[:,0], X[:,1], c=np.squeeze(Y_c)) # x0 表示横坐标;x1表示纵坐标, c= 颜色 ; Y_ 中的1 表示红色,
plt.show()
# 定义神经网络的输入,参数和输出,定义前向传播过过程
def get_weight(shape,regularizer):
w = tf.Variable(tf.random_normal(shape), dtype = tf.float32)
tf.add_to_collection('losses', tf.contrib.layers.l2_regularizer(regularizer)(w)) # 正则化参数w
return w
def get_bias(shape):
b = tf.Variable(tf.constant(0.01,shape = shape,dtype = tf.float32 ))
return b
# 为输入数据x和标签y_输入值占位
x = tf.placeholder(tf.float32, shape = (None,2 ))
y_ = tf.placeholder(tf.float32, shape = (None,1 ))
# 生成权重值w1 = [2,11 ], 正则化参数regularizer = 0.01
w1 = get_weight([2,11], 0.01) # 神经网络为2个输入x1,x2 隐藏层有11个元素
b1 = get_bias([11]) # 偏执单元值是一个常数,个数是11个。等于隐藏层的个数11
y1 = tf.nn.relu(tf.matmul(x,w1) + b1 ) # 11个元素对应相加
# 第二层神经网络,隐藏层有11个元素,直接输出一个y_out ,所以这里w2是11×1 的矩阵
w2 = get_weight([11,1], 0.01)
b2 = get_bias([1]) # 第二层的偏执单元个数是1个,tf.constant, dtype = float32
# 这一层直接输出,输出层不过激活函数ReLu
y = tf.matmul(y1,w2) + b2
# -------------
# 定义代价函数
loss_mse = tf.reduce_mean(tf.square(y - y_)) # mmse 算法定义代价函数
#
# 这里的total_loss 表示引入对w正则化后的全部loss error
loss_total = loss_mse + tf.add_n(tf.get_collection('losses'))
# 定义反向传播算法- 不含正则化, 否则minimize(loss_total)
train_step = tf.train.AdamOptimizer(0.0001).minimize(loss_mse)
# 训练过程
with tf.Session() as sess:
init_op = tf.global_variables_initializer()
sess.run(init_op)
STEPS = 40000
for i in range(STEPS):
start = (i *BATCH_SIZE)* 300
end = start + BATCH_SIZE
sess.run(train_step, feed_dict = {x:X[start:end], y_:Y_[start:end] })
if i % 2000 == 0:
loss_mse_value = sess.run(loss_mse, feed_dict = {x:X, y_:Y_} )
print("After %d train steps , current loss is %f" %(i, loss_mse_value))
## xx 在-3~+3 步长 = 0.01 , yy 在-3~+3 步长 = 0.01 ,生成二维网格坐标点
xx,yy = np.mgrid[-3:3:0.01, -3:3:0.01]
# 将xx,yy 拉直,并合并成一个2列的矩阵,得到一个网络坐标点的结合
grid = np.c_[xx.ravel(), yy.ravel()]
# 将坐标点喂入神经网络NN,probs 是输出, y 是前向传播算法的输出值y
probs = sess.run(y,feed_dict = {x:grid})
# 将probs 调整xx的样子
probs = probs.reshape(xx.shape)
# print -----
print('没有正则化的参数:')
print "w1: \n",sess.run(w1)
print "b1: \n",sess.run(b1)
print "w2: \n",sess.run(w2)
print "b2: \n",sess.run(b2)
# ----------
plt.scatter(X[:,0], X[:,1],c=np.squeeze(Y_c) )
plt.contour(xx,yy,probs,levels = [0.5])
plt.show()
# 定义反向传播函数,包含正则化 - loss_total
train_step_1 = tf.train.AdamOptimizer(0.0001).minimize(loss_total)
with tf.Session() as sess:
init_op = tf.global_variables_initializer()
sess.run(init_op)
STEPS = 40000
for i in range(STEPS):
start = (i*BATCH_SIZE)%300
end = start + BATCH_SIZE
sess.run(train_step_1, feed_dict = {x:X[start:end], y_:Y_[start:end]} )
if i%2000 == 0:
loss_mse_regularizer_value = sess.run(loss_total,feed_dict= {x:X,y_:Y_})
print("After %d train steps, loss with regularizer is %f " %(i, loss_mse_regularizer_value))
xx,yy = np.mgrid[-3:3:0.01, -3:3:0.01]
grid = np.c_[xx.ravel(), yy.ravel()]
probs = sess.run(y,feed_dict = {x:grid})
probs = probs.reshape(xx.shape)
#--------------------
#
print ('使用正则化后的训练过程')
print "w1: \n",sess.run(w1)
print "b1: \n",sess.run(b1)
print "w2: \n",sess.run(w2)
print "b2: \n",sess.run(b2)
# end of session
plt.scatter(X[:,0], X[:,1], c=np.squeeze(Y_c)) #
plt.contour(xx,yy,probs,levels = [0.5]) # 对probs = 0.5 的所有点上色
plt.show()
| 2.015625
| 2
|
1_multiply.py
|
benanne/theano-tutorial
| 42
|
12779372
|
import theano
import theano.tensor as T
a = T.scalar()
b = T.scalar()
y = a * b
f = theano.function([a, b], y)
print f(1, 2) # 2
print f(3, 3) # 9
| 2.828125
| 3
|
algorithms/sorting-algorithms/bubble_sort/bubble_sort.py
|
olaoluwa-98/ds-and-algorithms
| 2
|
12779373
|
def bubble_sort(arr):
n = len(arr)
# Traverse through all array elements
for i in range(n):
swapped = False
# Last i elements are already
# in place
for j in range(n- i - 1):
# traverse the array from 0 to
# n-i-1. Swap if the element
# found is greater than the
# next element
if arr[j] > arr[j+1]:
arr[j], arr[j+1] = arr[j+1], arr[j]
swapped = True
# IF no two elements were swapped
# by inner loop, then break
if swapped == False:
break
return arr
print("\t\t**BUBBLE SORT**")
for x in range(5):
print("*"*x)
# worst case scenario. array is in descending order
arr = [10, 9, 8, 7, 6, 5, 4, 3, 2, 1] # or generate array using range(10, 0, -1)
print(f"Worst case scenario with input arr = {arr}\nResult:\t{bubble_sort(arr)}\n")
# average case scenario.
arr = [2, 1, 4, 3, 6, 5, 8, 7, 10, 9]
print(f"Average case scenario with input arr = {arr}\nResult:\t{bubble_sort(arr)}\n")
# best case scenario. array is already sorted
arr = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10] # or generate array using range(1, 11)
print(f"Best case scenario with input arr = {arr}\nResult:\t{bubble_sort(arr)}\n")
| 4.46875
| 4
|
reshape_dataset.py
|
wjsutton/play_drawful_with_us
| 0
|
12779374
|
<reponame>wjsutton/play_drawful_with_us<gh_stars>0
# Drawful Submissions, matched with answers
# load python libraries
import pandas as pd
import numpy as np
# load source data sets
responses = pd.read_csv('data\\IronViz Art - Drawful Quiz (Responses) - Form Responses 1.csv')
answers = pd.read_csv('data\\answers.csv')
responders = pd.read_csv('data\\responder_lookup.csv')
sankey_model = pd.read_csv('data\\sankey_model.csv')
# rename columns for responses data set
responses.columns = ['timestamp','img-1','img-2','img-3','img-4','img-5','img-6']
responses['response_id'] = responses.index
# remove response at timestamp 1/20/2022 7:10:59
# Even Tom who originally knew all the answers still got one wrong!
responses = responses.loc[responses['timestamp'] != '1/20/2022 7:10:59']
# unpivot (wide to long) the responses data sets and lookup answers to responders
response_df = pd.melt(responses, id_vars=['response_id','timestamp'], var_name='image', value_name='response')
response_df = pd.merge(answers, response_df,how='left',left_on=['image','answer'],right_on=['image','response'])
response_df = pd.merge(response_df,responders,how='inner',on='Responder')
# create sankey data model
combo_df = response_df[['response_id','image','responder_id']]
combo_df = combo_df.loc[combo_df['response_id'] >= 0]
combo_df = combo_df.pivot(index='response_id',columns='image')['responder_id'].reset_index()
combo_df['Link'] = 'link'
combo_df['Size'] = 1
combo_df['type'] = 'response'
# rename columns
cols = ['ID', 'Step 1', 'Step 2', 'Step 3','Step 4', 'Step 5', 'Step 6','Link', 'Size', 'Type']
combo_df.columns = cols
# aggregrate to find answer combination size
combo_df = combo_df.groupby(['Step 1', 'Step 2', 'Step 3','Step 4', 'Step 5', 'Step 6','Link', 'Type']).agg(Size=('Size','sum')).reset_index()
combo_df['ID'] = combo_df.index
combo_df = combo_df[cols]
# create parameter entry in data set as a single point set to 0
# in tableau this entry will be updated by parameter actions
s1 = [0]
s2 = [0]
s3 = [0]
s4 = [0]
s5 = [0]
s6 = [0]
para_df = pd.DataFrame()
para_df['Step 1'] = s1
para_df['Step 2'] = s2
para_df['Step 3'] = s3
para_df['Step 4'] = s4
para_df['Step 5'] = s5
para_df['Step 6'] = s6
para_df['ID'] = -1
para_df['Link'] = 'link'
para_df['Size'] = 1
para_df['Type'] = 'parameter'
para_df = para_df[cols]
# combine parameter data with responses data for sankey
sankey_df = pd.concat([combo_df,para_df])
# create a general dataset that counts the responses for each image
ironviz_df = response_df.groupby(['image', 'responder_id', 'response']).agg(responses=('response_id','count')).reset_index()
# identify where answers have been correctly interpreted
response_df['correct_answer'] = np.where(response_df['responder_id']==1,1,0)
correct_answers_df = response_df.groupby(['response_id']).agg(correct_answers=('correct_answer','sum')).reset_index()
correct_answers_df = correct_answers_df.groupby(['correct_answers']).agg(player_frequency=('response_id','count')).reset_index()
# add in data for missed answers and concat
missed_options = pd.DataFrame()
missed_options['correct_answers']=[5,6]
missed_options['player_frequency']=[0,0]
correct_answers_df = pd.concat([correct_answers_df,missed_options])
# not sure this is needed
#fooled_by_df = response_df.groupby(['responder_id','response_id']).agg(responses=('response','count')).reset_index()
#fooled_by_df.pivot(index='response_id', columns='responder_id', values='responses')
# create a data set for the radar charts
radar_df = response_df.groupby(['responder_id','image']).agg(responses=('response','count')).reset_index()
radar_df['total_responses'] = len(response_df['response_id'].drop_duplicates())
radar_df['percentage_response'] = radar_df['responses'] / radar_df['total_responses']
radar_df['outer_radius'] = 0.5
# write output data sets to csv
sankey_df.to_csv('output\\sankey_df.csv', encoding='utf-8-sig', index=False)
ironviz_df.to_csv('output\\ironviz_df.csv', encoding='utf-8-sig', index=False)
radar_df.to_csv('output\\radar_df.csv', encoding='utf-8-sig', index=False)
correct_answers_df.to_csv('output\\correct_answers_df.csv', encoding='utf-8-sig', index=False)
# write sankey data to Excel
with pd.ExcelWriter('output\\Sankey Template Multi Level - Drawful.xlsx') as writer:
sankey_df.to_excel(writer, sheet_name='Data', index=False)
sankey_model.to_excel(writer, sheet_name='Model', index=False)
| 2.78125
| 3
|
tests/riscv/APIs/QueryResourceEntropyTest_force.py
|
jeremybennett/force-riscv
| 0
|
12779375
|
#
# Copyright (C) [2020] Futurewei Technologies, Inc.
#
# FORCE-RISCV is licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# THIS SOFTWARE IS PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES OF ANY KIND, EITHER
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO NON-INFRINGEMENT, MERCHANTABILITY OR
# FIT FOR A PARTICULAR PURPOSE.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from riscv.EnvRISCV import EnvRISCV
from riscv.GenThreadRISCV import GenThreadRISCV
from DV.riscv.counter.depdenceSequence import depSequence
from DV.riscv.trees.instruction_tree import LDST_All_instructions
import sys
class MainSequence(depSequence):
def generate(self, **kargs):
self.choiceMod()
self.notice('Warm up resource entropy')
for i in range(100):
#instr = instMap.pick(self)
instr = self.pickWeighted(LDST_All_instructions)
self.genMetaInstruction(instr)
#self.genInstruction(instr)
self.notice('Querying GPR entropy')
entropy_dict0 = self.queryResourceEntropy("GPR")
self.show_notice(entropy_dict0)
self.notice('Querying FPR entropy')
entropy_dict1 = self.queryResourceEntropy("FPR")
self.show_notice(entropy_dict1)
def show_notice(self, entropy_dict):
source_entropy = entropy_dict["Source"]
self.notice("Source entropy state:%s" % source_entropy["State"])
self.notice("Source entropy value:%d" % source_entropy["Entropy"])
self.notice("Source onThreshold:%d" % source_entropy["OnThreshold"])
self.notice("Source offThreshold:%d" % source_entropy["OffThreshold"])
dest_entropy = entropy_dict["Dest"]
self.notice("Dest entropy state:%s" % dest_entropy["State"])
self.notice("Dest entropy value:%d" % dest_entropy["Entropy"])
self.notice("Dest onThreshold:%d" % dest_entropy["OnThreshold"])
self.notice("Dest offThreshold:%d" % dest_entropy["OffThreshold"])
MainSequenceClass = MainSequence
GenThreadClass = GenThreadRISCV
EnvClass = EnvRISCV
| 1.78125
| 2
|
torch3d/nn/utils.py
|
zhangmozhe/torch3d
| 0
|
12779376
|
from collections.abc import Iterable
from itertools import repeat
def _ntuple(n):
def parse(x):
if isinstance(x, Iterable):
return x
return tuple(repeat(x, n))
return parse
_single = _ntuple(1)
| 3.1875
| 3
|
odoo/addons/test_lint/tests/test_onchange_domains.py
|
SHIVJITH/Odoo_Machine_Test
| 0
|
12779377
|
<reponame>SHIVJITH/Odoo_Machine_Test
import ast
import itertools
import os
from . import lint_case
class OnchangeChecker(ast.NodeVisitor):
def visit(self, node):
method = 'visit_' + node.__class__.__name__
visitor = getattr(self, method, self.generic_visit)
return visitor(node)
def generic_visit(self, node):
for field, value in ast.iter_fields(node):
if isinstance(value, list):
for item in value:
if isinstance(item, ast.AST):
yield from self.visit(item)
elif isinstance(value, ast.AST):
yield from self.visit(value)
def matches_onchange(self, node):
if isinstance(node, ast.Call):
if isinstance(node.func, ast.Attribute):
return node.func.attr == 'onchange'
if isinstance(node.func, ast.Name):
return node.func.id == 'onchange'
return False
def visit_FunctionDef(self, node):
walker = ast.walk(node) if any(map(self.matches_onchange, node.decorator_list)) else []
# can stop at the first match: an @onchange function either mentions
# domains or does not
return itertools.islice((
n for n in walker
if isinstance(n, getattr(ast, 'Str', type(None))) and n.s == 'domain'
or isinstance(n, getattr(ast, 'Constant', type(None))) and n.value == 'domain'
), 1)
class TestOnchangeDomains(lint_case.LintCase):
""" Would ideally have been a pylint module but that's slow as molasses
(takes minutes to run, and can blow up entirely depending on the pylint
version)
"""
def test_forbid_domains_in_onchanges(self):
""" Dynamic domains (returning a domain from an onchange) are deprecated
and should not be used in "standard" Odoo anymore
"""
checker = OnchangeChecker()
rs = []
for path in self.iter_module_files('*.py'):
with open(path, 'rb') as f:
t = ast.parse(f.read(), path)
rs.extend(zip(itertools.repeat(os.path.relpath(path)), checker.visit(t)))
rs.sort(key=lambda t: t[0])
assert not rs, "probable domains in onchanges at\n" + '\n'.join(
"- %s:%d" % (path, node.lineno)
for path, node in rs
)
| 2.265625
| 2
|
semantic_aware_models/models/recommendation/bert_recommender.py
|
ITAINNOVA/SAME
| 0
|
12779378
|
<reponame>ITAINNOVA/SAME
from semantic_aware_models.models.recommendation.abstract_recommender import AbstractRecommender
from semantic_aware_models.models.classification.bert_classifier import BertClassifier
from semantic_aware_models.dataset.movielens.movielens_data_model import *
from semantic_aware_models.utils.gpu import GPU
from semantic_aware_models.models.language.bert.inputs import InputExample
import os
import logging
import time
from semantic_aware_models.models.language.bert.data_processors import processors
logger = logging.getLogger(__name__)
logging.basicConfig(level=logging.INFO, filename='bert_classifier.log', filemode='w')
gpu = GPU()
class BertRecommender(AbstractRecommender):
""" Bert Recommender Class """
def __init__(self, ratings_file_path, items_file_path, config_model):
super(BertRecommender, self).__init__()
self.results = {}
self.bert_cbrs = BertClassifier()
# Load data
self.descriptions = ItemUnstructuredDataModel(items_file_path, separator='::')
self.ratings_file_path = ratings_file_path
self.ratings_data_model = RatingDataModel(ratings_file_path=ratings_file_path, separator='::')
self.items_unstructured_columns = ['item_id', 'title', 'description']
self.items_all = self.descriptions.get_item_ids()
# Load Configuration
self.config_model = config_model
self.processor = processors[self.config_model['task_name']]()
self.device = gpu.get_default_device()
def recommend(self, user_id, how_many):
"""
Recommends the best items for a specific user.
:param user_id: Id of the user to recommend.
:param how_many: Number of items that we recommend to the specific user.
:return: Id of the items that the recommender returns.
"""
item_ids_not_seen_from_user = self.ratings_data_model.get_item_ids_not_seen_from_user(user_id, self.items_all)
# print('item_ids_not_seen_from_user:', item_ids_not_seen_from_user)
list_recommend = []
for item_id in item_ids_not_seen_from_user:
preference = self.estimate_preference(user_id, item_id)
list_recommend.append([item_id, preference])
list_recommend.sort(key=lambda x: x[1], reverse=True)
return list_recommend[:how_many]
def estimate_preference(self, user_id, item_id):
"""
Estimate the preference value by a specific user.
:param user_id: Id of the user to recommend.
:param item_id: Id of the item to recommend.
:return: The estimate preference by the sepecific recommender.
"""
if not os.path.isfile(self.config_model['output_dir']):
data = self.processor.get_train_examples(descriptions=self.descriptions, ratings=self.ratings_data_model,
user_id=user_id)
self.bert_cbrs.train_model(config=self.config_model, train_data=data)
# TEST
examples = []
description = self.descriptions.get_description_from_id(item_id=item_id)
rating = self.ratings_data_model.get_preference_value(user_id=user_id, item_id=item_id)
examples.append(InputExample(guid=item_id, text_a=description, text_b=None, label=rating))
result = self.bert_cbrs.test_model(config=self.config_model, test_data=examples)
return (float(result[0]) + 1.0)
def __estimate_preference_rival(self, train_data, test_data):
# TRAIN
timestart = time.time()
self.bert_cbrs.train_model(config=self.config_model, train_data=train_data)
timeend = time.time()
train_time = timeend - timestart
# TEST
timestart = time.time()
preds = self.bert_cbrs.test_model(config=self.config_model, test_data=test_data)
timeend = time.time()
test_time = timeend - timestart
return preds, train_time, test_time
def recommend_rival(self, n_folds, train_test_file_path, recommendation_file_path):
"""
Prepare the predictions to take them to RiVaL Toolkit.
:param n_folds: Number of folds.
:param train_test_file_path: Path with train and input_test files.
:param recommendation_file_path: Path where the suitable files to run RiVaL Toolkit are saved.
:return: The suitable files to run RiVaL Toolkit are saved.
"""
for i in range(n_folds):
test_file_name = train_test_file_path + 'test_bin_verified_sep_' + str(i) + '.csv'
train_file_name = train_test_file_path + 'train_bin_verified_sep_' + str(i) + '.csv'
ratings_train_data_model = RatingDataModel(ratings_file_path=train_file_name, separator=" ")
ratings_test_data_model = RatingDataModel(ratings_file_path=test_file_name, separator=" ")
file_name = open(recommendation_file_path + 'recs_' + str(i) + '.csv', 'w')
user_ids = ratings_test_data_model.get_user_ids()
for user_id in user_ids:
train_data = self.processor.get_train_examples(descriptions=self.descriptions,
ratings=ratings_train_data_model,
user_id=user_id)
test_data = self.processor.get_train_examples(descriptions=self.descriptions,
ratings=ratings_test_data_model,
user_id=user_id)
rating_estimated_list, time_train, time_test = self.__estimate_preference_rival(train_data=train_data,
test_data=test_data)
print(i,';', user_id, ';', time_train, ";", time_test)
items_ids = ratings_test_data_model.get_item_ids_from_user(user_id)
j=0
for i in items_ids:
file_name.write(str(user_id) + "\t" + str(i) + "\t" + str(float(rating_estimated_list[j]) + 1.0) + '\n')
j+=1
| 2.359375
| 2
|
LM-5551 intermediate-aci/dne-dci-intermediate-aci-mission1_health-dashboard/create_snv_apps.py
|
russellpope/devnet-express-dc
| 0
|
12779379
|
#!usr/bin/env python
import cobra.mit.access
import cobra.mit.request
import cobra.mit.session
import cobra.model.fv
import cobra.model.vz
import cobra.model.pol
from credentials import *
def main():
auth = cobra.mit.session.LoginSession(URL, LOGIN, PASSWORD)
session = cobra.mit.access.MoDirectory(auth)
session.login()
root = cobra.model.pol.Uni('')
tenant_snv = cobra.model.fv.Tenant(root, 'SnV')
vrf_snv = cobra.model.fv.Ctx(tenant_snv, name='Superverse')
bd_snv = cobra.model.fv.BD(tenant_snv, name='antigravity')
bd_snv_vrf = cobra.model.fv.RsCtx(bd_snv, tnFvCtxName='Superverse')
bd_snv_subnet = cobra.model.fv.Subnet(bd_snv, ip='10.2.10.1/23')
contracts = (('web', 'http', 'tcp', '80', 'context'), ('database', 'sql', 'tcp', '1433', 'application-profile'))
for contract in contracts:
create_contract(tenant_snv, contract[1], contract[2], contract[3], contract[0], contract[4])
app_names = (('Evolution_X', 'vlan-121', 'vlan-122'), ('Rescue', 'vlan-123', 'vlan-124'),
('Chaos', 'vlan-125', 'vlan-126'), ('Power_Up', 'vlan-127', 'vlan-128'))
for app in app_names:
create_app(tenant_snv, app[0], bd_snv, app[1], app[2])
config_request = cobra.mit.request.ConfigRequest()
config_request.addMo(tenant_snv)
session.commit(config_request)
def create_app(tenant_obj, app_name, bd_object, vlan_web, vlan_db):
app = cobra.model.fv.Ap(tenant_obj, app_name)
epg_web = cobra.model.fv.AEPg(app, 'Web')
epg_web_bd = cobra.model.fv.RsBd(epg_web, tnFvBDName='antigravity')
epg_web_phys_domain = cobra.model.fv.RsDomAtt(epg_web, tDn='uni/phys-SnV_phys')
epg_web_path_a = cobra.model.fv.RsPathAtt(epg_web, tDn='topology/pod-1/protpaths-101-102/pathep-[SnV_FI-1B]', encap=vlan_web)
epg_web_path_b = cobra.model.fv.RsPathAtt(epg_web, tDn='topology/pod-1/protpaths-101-102/pathep-[SnV_FI-1A]', encap=vlan_web)
epg_web_path_c = cobra.model.fv.RsPathAtt(epg_web, tDn='topology/pod-1/paths-101/pathep-[eth1/10]', encap="vlan-10")
epg_web_provided = cobra.model.fv.RsProv(epg_web, tnVzBrCPName='web')
epg_web_consumed = cobra.model.fv.RsCons(epg_web, tnVzBrCPName='database')
epg_db = cobra.model.fv.AEPg(app, 'Database')
epg_db_bd = cobra.model.fv.RsBd(epg_db, tnFvBDName='antigravity')
epg_db_phys_domain = cobra.model.fv.RsDomAtt(epg_db, tDn='uni/phys-SnV_phys')
epg_db_path_a = cobra.model.fv.RsPathAtt(epg_db, tDn='topology/pod-1/protpaths-101-102/pathep-[SnV_FI-1B]', encap=vlan_db)
epg_db_path_b = cobra.model.fv.RsPathAtt(epg_db, tDn='topology/pod-1/protpaths-101-102/pathep-[SnV_FI-1A]', encap=vlan_db)
epg_db_provided = cobra.model.fv.RsProv(epg_db, tnVzBrCPName='database')
def create_contract(tenant_obj, filter_name, protocol, port, contract_name, contract_scope):
filter_obj = cobra.model.vz.Filter(tenant_obj, name=filter_name)
filter_entry = cobra.model.vz.Entry(filter_obj, name='{}-{}'.format(protocol, port), etherT='ip', prot=protocol, dFromPort=port, dToPort=port)
contract = cobra.model.vz.BrCP(tenant_obj, name=contract_name, scope=contract_scope)
contract_subject = cobra.model.vz.Subj(contract, name=filter_name)
subject_filter = cobra.model.vz.RsSubjFiltAtt(contract_subject, tnVzFilterName=filter_name)
if __name__ == '__main__':
main()
| 1.773438
| 2
|
vial/plugins/misc/__init__.py
|
solarnz/vial
| 0
|
12779380
|
import vial
def init():
vial.register_command('VialEscape', '.plugin.escape')
vial.register_command('VialSearchOutline', '.plugin.search_outline')
vial.register_command('VialChangedProjects', '.plugin.changed_projects')
vial.register_command('VialNew', '.plugin.new', complete='file', nargs=1)
vial.register_command('VialFilterqf', '.plugin.filter_qf', nargs=1)
vial.register_command('VialAddProjects', '.plugin.add_projects',
complete='dir', bang=True, nargs='*')
vial.register_command('VialAddIgnoreExtension',
'.plugin.add_ignore_extensions', bang=True, nargs='*')
vial.register_command('VialAddIgnoreDirs', '.plugin.add_ignore_dirs',
complete='dir', bang=True, nargs='*')
vial.register_function('VialIndent()', '.plugin.indent')
| 1.820313
| 2
|
iv/Leetcode/easy/690_employee_importance.py
|
iamsuman/iv
| 2
|
12779381
|
<gh_stars>1-10
# Definition for Employee.
class Employee:
def __init__(self, id: int, importance: int, subordinates: list):
self.id = id
self.importance = importance
self.subordinates = subordinates
class Solution:
def getImportance(self, employees: list, id: int) -> int:
emap = {e.id: e for e in employees}
# print(emap)
def dfs(eid):
employee = emap[eid]
return (employee.importance +
sum(dfs(sid) for sid in employee.subordinates))
return dfs(id)
def getImportance2(self, employees: list, id: int) -> int:
res = 0
imp = {}
sub = {}
empids = []
for emp in employees:
imp[emp.id] = emp.importance
sub[emp.id] = emp.subordinates
if emp.id == id:
empids.append(emp.id)
empids.extend(emp.subordinates)
for empid in empids:
for subid in sub[empid]:
if subid not in empids:
empids.append(subid)
# print(empids)
# print(imp)
for empid in empids:
res += imp[empid]
return res
employees = [[1, 5, [2, 3]], [2, 3, []], [3, 3, []]]; id = 1
employees = [Employee(1,5,[2,3]), Employee(2, 3, []), Employee(3, 3, [])]; id = 1
# employees = [[1,5,[2,3]],[2,3,[4]],[3,4,[]],[4,1,[]]]; id = 1
employees = [Employee(1,5,[2,3]), Employee(2,3,[4]), Employee(3,4,[]), Employee(4,1,[])]; id = 1
employees = [Employee(1,5,[2,3]), Employee(2,3,[4]), Employee(3,4,[]), Employee(4,1,[5]), Employee(5,1,[])]; id = 1
s = Solution()
print(s.getImportance(employees, id))
print(s.getImportance2(employees, id))
| 3.28125
| 3
|
DropStudy/VideoScripts/test_data2.py
|
RossDynamics/team-samara
| 0
|
12779382
|
<reponame>RossDynamics/team-samara
# -*- coding: utf-8 -*-
"""
Created on Fri Feb 8 15:17:51 2019
@author: <NAME>
"""
import os
import cv2
import matplotlib.pyplot as plt
import numpy as np
import pandas as pd
import scipy.interpolate as interp
import scipy.signal as spsig
from scipy import fftpack
from scipy.optimize import curve_fit
os.chdir('Silver Trial Data')
file_name = 's-g21-t02-data'
drop = pd.read_csv(file_name+'.csv')
#plt.plot(drop['Row'], drop['Column'])
os.chdir('..')
os.chdir('2017July07')
#os.chdir('White Background')
for file in os.listdir('.'):
# if file[:3] == 'n-g':
if file[:9] == file_name[:9]:
base = file
os.chdir(base)
for file in os.listdir('.'):
if file[-4:] == '.avi':
vidname = file
video = cv2.VideoCapture(vidname)
ok, frame = video.read()
##Choose box that covers inches, and the width of the tape
#r = cv2.selectROI(frame, False, False)
### Pixels per inch in x-direction
#pixels_to_inch = r[2]/.5
#Pixels per inch in y-direction
#pixels_to_inch = r[3]/6
pixels_to_inch = 23.165
frame_sect_beg = 0
W = 1000
dt = 1/2000
N = 10000
plt.figure(1)
plt.plot(drop['Column']/pixels_to_inch, drop['Row']/pixels_to_inch)
plt.gca().invert_yaxis()
plt.axis('equal')
frame_end = np.size(drop['Column'])
#freq = np.zeros(frame_end-W)
avg_vel_m_s = np.zeros(N-W)
#x = drop['Column'][816:1654]/pixels_to_inch
#y = drop['Row'][816:1654]/pixels_to_inch
#t = drop['FrameNo'][816:1654]
#x = drop['Column'][0:1770]/pixels_to_inch
#y = drop['Row'][0:1770]/pixels_to_inch
#t = drop['FrameNo'][0:1770]
x = drop['Column']/pixels_to_inch
y = drop['Row']/pixels_to_inch
t = drop['FrameNo']
x = x-np.mean(x)
#dx = np.diff(x)
#dy = np.diff(y)
#ds = np.sqrt(dx**2+dy**2)
#threshold = 5*np.mean(ds)
#X = x[:-1]
#Y = y[:-1]
#X = X[ds<threshold]
#Y = Y[ds<threshold]
#dx = np.diff(X)
#dy = np.diff(Y)
#ds = np.sqrt(dx**2+dy**2)
#threshold = 5*np.mean(ds)
#X1 = X[:-1]
#Y1 = Y[:-1]
#X1 = X1[ds<threshold]
#Y1 = Y1[ds<threshold]
#
#plt.plot(X1,Y1)
def removeJumps(X, Y):
ds = np.sqrt(np.diff(X)**2+np.diff(Y)**2)
jumps = ds < 1.2*np.mean(ds)
if jumps.all():
return True, X, Y
else:
indexlist = np.where(jumps==True)
start = indexlist[0][0]
end = indexlist[0][-1]
x = X[start:end+1]; y = Y[start:end+1]
jumps = jumps[start:end+1]
t = np.linspace(0, 1, len(x))
splx = interp.interp1d(t[jumps], x[jumps])
sply = interp.interp1d(t[jumps], y[jumps])
return False, splx(t), sply(t)
good = False
while not good:
good, x, y = removeJumps(x,y)
plt.plot(x,y)
t = t[:len(x)]
dt_new = t.values[-1]*dt/N
spl = interp.UnivariateSpline(t, x, k = 1, s=0)
ts = np.linspace(np.min(t), np.max(t), N)
yinterp = np.interp(ts, t, y)
interped = spl(ts)
b, a = spsig.butter(3, 0.003)
xs = spsig.filtfilt(b, a, interped)
d, c = spsig.butter(3, 0.003)
ys = spsig.filtfilt(d, c, yinterp)
plt.figure(2)
plt.plot(xs, ys)
plt.gca().invert_yaxis()
plt.axis('equal')
while frame_sect_beg+W < N:
frame_sect_end = frame_sect_beg+W
frame_mid = (frame_sect_beg+frame_sect_end)/2
# omega = np.linspace(0, 1/(2*dt), N//2)
# xf = fftpack.fft(xs)
# xwf = fftpack.fft(xs*spsig.blackman(N))
# mag = 2/N*np.abs(xf[0:N//2])
# magw = 2/N*np.abs(xwf[0:N//2])
#
# test = np.zeros(xwf.shape)
# ind = np.argmax(np.abs(xwf[3:100]))
# test[ind+3] = np.abs(xwf[ind+3])
# testsig = fftpack.ifft(test)
# freq[frame_sect_beg] = np.max(test) #in units?
avg_vel_in_s = (ys[frame_sect_end]-ys[frame_sect_beg])/(W*dt_new) # in inches per second
avg_vel_m_s[frame_sect_beg] = avg_vel_in_s/39.37 #in meters per second
frame_sect_beg = frame_sect_beg+1
## Fit Curve (exponential) to velocity data
#def func(x, a):
# return (a*x**a)/x**(a+1)
#
#popt, pcov = curve_fit(func, xs[:np.size(avg_vel_m_s)], avg_vel_m_s)
x_vals = range(0,np.size(avg_vel_m_s))
Z = np.poly1d(np.polyfit(x_vals, avg_vel_m_s, 5))
def findCutoff(T, v):
for cutoff in range(len(T[:-1000])):
ave = np.mean(v[cutoff:])
# std = np.std(v[cutoff:])
if np.abs(v[cutoff]-ave) < .1:
return cutoff
return False
cutoff = findCutoff(ts,avg_vel_m_s)
print(cutoff)
AVG = np.mean(avg_vel_m_s[cutoff:])
peaks1, _ = spsig.find_peaks(xs[cutoff:])
peaks2, _ = spsig.find_peaks(xs[cutoff:], distance=np.mean(np.diff(peaks1))/4)
time_peaks = [ts[cutoff+pk]*dt for pk in peaks2]
plt.figure(3)
plt.plot(ts[cutoff:]*dt, xs[cutoff:])
plt.scatter(time_peaks, [xs[cutoff+pk] for pk in peaks2])
print(time_peaks)
period = np.mean(np.diff(time_peaks))
frequency = 2*np.pi/period
print(frequency)
print(AVG)
plt.figure(4)
plt.plot(avg_vel_m_s)
plt.plot([cutoff,cutoff],[np.min(avg_vel_m_s),np.max(avg_vel_m_s)])
#plt.plot(xs[:np.size(avg_vel_m_s)], func(xs[:np.size(avg_vel_m_s)], *popt), 'r-', label="Fitted Curve")
plt.plot(x_vals,Z(x_vals),'r-')
plt.title('Average velocity of samara')
plt.ylabel('v, m/s')
#plt.figure(2)
#plt.plot(freq)
#plt.title('Frequency of Autorotation')
#plt.figure(3)
#plt.plot(omega, mag)
#plt.plot(omega, magw)
#plt.xlim([0,10])
#
#plt.figure(4)
#plt.plot(ts, xs)
#plt.plot(ts, 10*testsig)
plt.show()
| 1.914063
| 2
|
networking-calico/networking_calico/timestamp.py
|
mikestephen/calico
| 3,973
|
12779383
|
# -*- coding: utf-8 -*-
# Copyright (c) 2018 Tigera, Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import datetime
ZERO = datetime.timedelta(0)
class UTC(datetime.tzinfo):
def utcoffset(self, dt):
return ZERO
def tzname(self, dt):
return "UTC"
def dst(self, dt):
return ZERO
utc = UTC()
def timestamp_now():
utc_now = datetime.datetime.now(utc)
return utc_now.strftime('%Y-%m-%dT%H:%M:%SZ')
# e.g. 2015-05-19T20:32:12Z
| 2.859375
| 3
|
game.py
|
osmith93/str8ts
| 0
|
12779384
|
<reponame>osmith93/str8ts
from board import Cell, Board
vertical = "vertical"
horizontal = "horizontal"
class Game:
EMPTY = 0
BLOCKED = True
def __init__(self, size=9):
self.size = size
self.board = Board(size)
self.selected_cell = None
def load(self, filename):
"""
Load game state from `filename'.
:param filename: str
"""
self.board.load(filename)
def save(self, filename):
"""Save a game state to file."""
pass
| 3.140625
| 3
|
src/enums/__init__.py
|
Freonius/tranquillity
| 0
|
12779385
|
<filename>src/enums/__init__.py
from enum import Enum, IntFlag, auto
class ConnType(Enum):
COUCHDB = auto()
ELASTICSEARCH = auto()
MONGO = auto()
SQLITE = auto()
MYSQL = auto()
PGSQL = auto()
DB2 = auto()
MSSQL = auto()
HAZELCAST = auto()
SPRING_CONFIG = auto()
KAFKA = auto()
MQQT = auto()
RABBIT = auto()
REDIS = auto()
EUREKA = auto()
ORACLE = auto()
class SettingsType(Enum):
ENV = auto()
SQLITE = auto()
API = auto()
YAML = auto()
INI = auto()
PROPERTIES = auto()
DICT = auto()
CSV = auto()
JSON = auto()
BSON = auto()
PICKLE = auto()
SPRING = auto()
| 2.25
| 2
|
noval/toolbar.py
|
bopopescu/NovalIDE
| 0
|
12779386
|
<filename>noval/toolbar.py
# -*- coding: utf-8 -*-
#-------------------------------------------------------------------------------
# Name: toolbar.py
# Purpose:
#
# Author: wukan
#
# Created: 2019-01-16
# Copyright: (c) wukan 2019
# Licence: GPL-3.0
#-------------------------------------------------------------------------------
import tkinter as tk
from tkinter import ttk
import noval.misc as misc
from noval.menu import MenubarMixin
import noval.ui_base as ui_base
import noval.consts as consts
import noval.util.utils as utils
class ToolBar(ui_base.DockFrame):
def __init__(self,parent,orient = tk.HORIZONTAL):
ui_base.DockFrame.__init__(self,consts.DEFAULT_TOOL_BAR_ROW, parent,show=self.IsDefaultShown())
#padx设置工具栏左边距
self._orient = orient
self._commands = []
self.pad_y = 5
def CreateNewSlave(self):
group_frame = ttk.Frame(self)
padx = (0, 10)
group_frame.pack(fill="x",side=tk.RIGHT)
return group_frame
def CreateSlave(self):
slaves = self.pack_slaves()
if len(slaves) == 0:
group_frame = ttk.Frame(self)
if self._orient == tk.HORIZONTAL:
group_frame.pack(fill="x",side=tk.LEFT)
elif self._orient == tk.VERTICAL:
group_frame.pack(fill="y",side=tk.TOP)
else:
group_frame = slaves[0]
return group_frame
def AddButton(self,command_id,image,command_label,handler,accelerator=None,tester=None,pos=-1,style="Toolbutton"):
group_frame = self.CreateSlave()
button = ttk.Button(
group_frame,
command=handler,
image=image,
style=style,##设置样式为Toolbutton(工具栏按钮),如果该参数为空,则button样式为普通button,不是工具栏button,边框有凸起
state=tk.NORMAL,
compound=None,
pad=None,
)
if style is None:
button.configure(text=command_label)
self.SetControlPos(command_id,button,pos)
button.tester = tester
tooltip_text = MenubarMixin.FormatMenuName(command_label)
if accelerator:
tooltip_text += " (" + accelerator + ")"
misc.create_tooltip(button, tooltip_text)
return button
def IsDefaultShown(self):
toolbar_key = self.GetToolbarKey()
return utils.profile_get_int(toolbar_key,True)
def GetToolbarKey(self):
return consts.FRAME_VIEW_VISIBLE_KEY % "toolbar"
def Update(self):
if not self.winfo_ismapped():
return
for group_frame in self.pack_slaves():
for button in group_frame.grid_slaves(0):
if isinstance(button,ttk.Button):
if button.tester and not button.tester():
button["state"] = tk.DISABLED
else:
button["state"] = tk.NORMAL
def AddCombox(self,pos=-1,state='readonly'):
group_frame = self.CreateSlave()
combo = ttk.Combobox(group_frame)
self.SetControlPos(-1,combo,pos)
if state is not None:
combo.state([state])
return combo
def AddLabel(self,text,pos=-1):
group_frame = self.CreateSlave()
label = ttk.Label(group_frame,text=text)
self.SetControlPos(-1,label,pos)
def SetControlPos(self,command_id,ctrl,pos):
'''
pos为-1表示在最后添加控件,不为-1表示在某个位置插入控件
'''
update_layout = False
if pos == -1:
pos = len(self._commands)
self._commands.append([command_id,ctrl])
#这里要插入控件,插入控件后需要重新排列控件
else:
update_layout = True
self._commands.insert(pos,[command_id,ctrl])
if self._orient == tk.HORIZONTAL:
ctrl.grid(row=0,column=pos)
elif self._orient == tk.VERTICAL:
ctrl.grid(row=pos,column=0)
if update_layout:
#重新调整控件的位置
self.UpdateLayout(pos)
def UpdateLayout(self,pos):
for i,data in enumerate(self._commands):
ctrl = data[1]
#所有位置大于pos的控件都需要重新排列
if i>pos:
if self._orient == tk.HORIZONTAL:
ctrl.grid(row=0,column=i)
elif self._orient == tk.VERTICAL:
ctrl.grid(row=i,column=0)
def AddSeparator(self):
slaves = self.pack_slaves()
group_frame = slaves[0]
separator = ttk.Separator(group_frame, orient = tk.VERTICAL)
pos = len(self._commands)
separator.grid(row=0,column=pos,sticky=tk.NSEW, padx=0, pady=3)
self._commands.append([None,separator])
return separator
def EnableTool(self,button_id,enable=True):
for command_button in self._commands:
if command_button[0] == button_id:
button = command_button[1]
if enable:
button["state"] = tk.NORMAL
else:
button["state"] = tk.DISABLED
| 2.609375
| 3
|
02-array-seq/bisect_demo.py
|
oxfordyang2016/learnfluentpython
| 1
|
12779387
|
# BEGIN BISECT_DEMO
#list comprehension
#this is list comprelist ,you can use it do such as cartesian product
#about cartesian product https://en.wikipedia.org/wiki/Cartesian_product
'''
>>> colors = ['black', 'white']
>>> sizes = ['S', 'M', 'L']
>>> tshirts = [(color, size) for color in colors for size in sizes]
>>> tshirts
[('black', 'S'), ('black', 'M'), ('black', 'L'), ('white', 'S'), ('white', 'M'), ('white', 'L')]
'''
'''
fp 25
from my opinion,generator's key is to invoke generator object
for example ,((a,b) for a in 'srting' for b in 'ming') will produce a generator object
such as <generator object <genexpr> at 0x00000000050107E0>
but when you use [k for k in ((a,b) for a in 'srting' for b in 'ming') ],it will produce a list
'''
# sort a list 2 (fp 27)
'''
>>> traveler_ids = [('USA', '31195855'), ('BRA', 'CE342567'),('ESP', 'XDA205856')]
>>> sorted(traveler_ids)
[('BRA', 'CE342567'), ('ESP', 'XDA205856'), ('USA', '31195855')]
'''
import bisect
import sys
HAYSTACK = [1, 4, 5, 6, 8, 12, 15, 20, 21, 23, 23, 26, 29, 30]
NEEDLES = [0, 1, 2, 5, 8, 10, 22, 23, 29, 30, 31]
ROW_FMT = '{0:2d} @ {1:2d} {2}{0:<2d}'
def demo(bisect_fn):
for needle in reversed(NEEDLES):
position = bisect_fn(HAYSTACK, needle) # <1>
offset = position * ' |' # <2>
print(ROW_FMT.format(needle, position, offset)) # <3>
if __name__ == '__main__':
if sys.argv[-1] == 'left': # <4>
bisect_fn = bisect.bisect_left
else:
bisect_fn = bisect.bisect
print('DEMO:', bisect_fn.__name__) # <5>
print('haystack ->', ' '.join('%2d' % n for n in HAYSTACK))
demo(bisect_fn)
# END BISECT_DEMO
| 4
| 4
|
src/main.py
|
Mysigyeong/ADOAG
| 0
|
12779388
|
<reponame>Mysigyeong/ADOAG
import angr
import claripy
import subprocess
import avatar2
import sys
import os
import networkx
import signal
from angr_targets import AvatarGDBConcreteTarget
if __name__ == "__main__" and __package__ is None:
from sys import path
from os.path import dirname as dir
path.append(dir(path[0]))
__package__ = "iCFG"
from ..iCFG.find_indirects import Indirects
from ..iCFG.jump_resolver import IFCCReslover
def _handler(signum, frame):
raise Exception("end of time")
STDIN_FD = 0
GDB_SERVER_IP = "localhost"
GDB_SERVER_PORT = 12345
binary_path = os.path.dirname(os.path.abspath(__file__)) + "/../examples/main.o"
TARGET_BINARY = binary_path
base_addr = 0x400000 # To match addresses to Ghidra
subprocess.Popen("gdbserver %s:%s %s" % (GDB_SERVER_IP,GDB_SERVER_PORT,TARGET_BINARY),
stdout=subprocess.PIPE,
stderr=subprocess.PIPE, shell=True)
avatar_gdb = AvatarGDBConcreteTarget(avatar2.archs.x86.X86_64, GDB_SERVER_IP, GDB_SERVER_PORT)
proj = angr.Project(TARGET_BINARY, main_opts={'base_addr': base_addr}, concrete_target=avatar_gdb, use_sim_procedures=True)
if len(sys.argv) > 1:
binary_path = sys.argv[1]
bin = open(binary_path, 'rb')
indirects = Indirects(bin)
indirects.indirect_list()
indirect_reslover = IFCCReslover(proj, indirects.indirects)
main = proj.loader.main_object.get_symbol("main")
vuln = proj.loader.main_object.get_symbol("vuln")
target = proj.loader.main_object.get_symbol("target")
proj_cfg = angr.Project(binary_path, load_options={'auto_load_libs':False})
cfg = proj_cfg.analyses.CFGFast(
function_starts=[main.rebased_addr],
indirect_jump_resolvers = tuple(
angr.analyses.cfg.indirect_jump_resolvers.default_resolvers.default_indirect_jump_resolvers(
proj_cfg.loader.main_object,
proj_cfg
)) + (indirect_reslover,)
)
src_node = cfg.model.get_any_node(vuln.rebased_addr)
dst_node = cfg.model.get_any_node(target.rebased_addr)
# entry_node = cfg.get_any_node(proj.entry)
print("Now we got CFG!")
# For print CFG as png
# plot_cfg(cfg, "ais3_cfg", asminst=True, remove_imports=True, remove_path_terminator=True)
# ddg = proj.analyses.DDG(cfg=cfg)
# plot_ddg_stmt(ddg.graph, "ddg_stmt", project=proj)
# This is our goal!
paths = networkx.all_simple_paths(cfg.graph, src_node, dst_node)
# paths = networkx.all_simple_paths(cfg.graph, entry_node, vuln_node)
path = []
for _path in paths:
for node in _path:
if (node.addr < indirects.init_base) or (node.addr > indirects.end):
pass
else:
path.append(node.addr)
break
# iCFG will give the path to target
# concrete execution
entry_state = proj.factory.entry_state()
entry_state.options.add(angr.options.SYMBION_SYNC_CLE)
entry_state.options.add(angr.options.SYMBION_KEEP_STUBS_ON_SYNC)
un_init_func_table_addr = 0x404050
simgr = proj.factory.simulation_manager(entry_state)
simgr.use_technique(angr.exploration_techniques.Symbion(find=[vuln.rebased_addr]))
exploration = simgr.run()
vuln_state = None
if len(exploration.stashes['found']) > 0:
vuln_state = exploration.stashes['found'][0]
if vuln_state == None:
print("Something's wrong, I can feel it")
sys.exit(0)
# un_init_func_table_val = int.from_bytes(avatar_gdb.read_memory(un_init_func_table_addr, 8), "little")
# un_init_func_table = claripy.BVV(un_init_func_table_val, 64).reversed
# vuln_state.memory.store(un_init_func_table_addr, un_init_func_table)
#symbolic execution
signal.signal(signal.SIGALRM, _handler)
simgr = proj.factory.simulation_manager(vuln_state)
MEMORY_LOAD_CHUNK = 10
for checkpoint in path:
attempt = 0
while True:
simgr_bak = simgr.copy(deep=True)
signal.alarm(10)
# print(hex(checkpoint))
try:
simgr.explore(find=checkpoint)
except Exception:
print("Timeout!!!")
state = simgr_bak.active[-1]
node = cfg.model.get_any_node(state.addr)
from capstone import *
from capstone.x86 import *
disassembler = Cs(CS_ARCH_X86, CS_MODE_64)
disassembler.detail = True
block_offset = node.addr - indirects.base
assembly = disassembler.disasm(indirects.section.data()[block_offset:block_offset+node.size], indirects.base + block_offset)
unresolved_addr = []
for insn in assembly:
print()
if "mov" in insn.mnemonic:
mov_sim = simgr_bak.copy(deep=True)
print(mov_sim.active)
signal.alarm(10)
try:
mov_sim.explore(find=insn.address)
state = mov_sim.found[0]
print("Current addr:",hex(state.addr), "Mov addr:", hex(insn.address))
except Exception:
print("Can't reach MOV ins!")
exit()
signal.alarm(0)
print("0x%x:\t%s\t%s" %(insn.address, insn.mnemonic, insn.op_str))
if insn.op_count(X86_OP_IMM) != 0:
# print(insn.op_find(X86_OP_IMM, 1).imm)
pass
elif insn.op_count(X86_OP_MEM):
# e.g [rax*8 + 0x404050]
# print(insn.op_str.split(",")[1])
scale = 0
i = insn.op_find(X86_OP_MEM, 1)
c = 0
base = 0
index = 0
disp = 0
size = 8
if "qword" in insn.op_str:
size = 8
elif "dword" in insn.op_str:
size = 4
elif "word" in insn.op_str:
size = 2
elif "byte" in insn.op_str:
size = 1
if i.value.mem.base != 0:
base = state.solver.eval(state.regs.__getattr__(insn.reg_name(i.value.mem.base)))
if i.value.mem.index != 0:
index = state.solver.eval(state.regs.__getattr__(insn.reg_name(i.value.mem.index)))
disp = i.value.mem.disp
scale = i.value.mem.scale
res = disp + base + (index * scale)
# print(hex(res))
# TODO
# maybe we have to check whether res is dereferencable address.
if state.solver.eval(state.memory.load(res, size)) == 0:
unresolved_addr.append(res)
#unresolved_addr = 0x404050 #TODO
for addr in unresolved_addr:
for idx in range(MEMORY_LOAD_CHUNK):
try:
unresolved_variable = claripy.BVV(avatar_gdb.read_memory(addr + (attempt * MEMORY_LOAD_CHUNK + idx) * 8, 8), 64)
simgr_bak.active[-1].memory.store(addr + (attempt * 10 + idx) * 8, unresolved_variable)
except:
pass
simgr = simgr_bak
# print("Alright, let's try again with", hex(unresolved_addr), simgr.active[-1].memory.load(addr + (attempt * 10 + idx) * 8, 8))
attempt += 1
continue
else:
if len(simgr.found) > 0 and checkpoint != path[-1]:
# just checking whether the address of third gate is in un_init_func_table
print(simgr.found[0].memory.load(un_init_func_table_addr, 8))
simgr = proj.factory.simulation_manager(simgr.found[0])
print(hex(checkpoint), "Found! move to next checkpoint.")
break
if len(simgr.found) > 0:
print(simgr.found[0].posix.dumps(STDIN_FD))
else:
print("Not found")
# b'00000000000004199496000000000000041994720000000000'
| 2.140625
| 2
|
ml_enabler/exceptions.py
|
gaoxm/ml-enabler-cli
| 5
|
12779389
|
class InvalidData(Exception):
pass
class InvalidModelResponse(Exception):
pass
class ImageFetchError(Exception):
pass
| 1.476563
| 1
|
make_pizzas.py
|
yiyidhuang/PythonCrashCrouse2nd
| 0
|
12779390
|
# import pizza
# pizza.make_pizza(16, 'pepperoni')
# pizza.make_pizza(12, 'mushrooms', 'green peppers', 'extra cheese')
# from pizza import make_pizza
# make_pizza(16, 'pepperoni')
# make_pizza(12, 'mushrooms', 'green peppers', 'extra cheese')
# from pizza import make_pizza as mp
# mp(16, 'pepperoni')
# mp(12, 'mushrooms', 'green peppers', 'extra cheese')
# import pizza as p
# p.make_pizza(16, 'pepperoni')
# p.make_pizza(12, 'mushrooms', 'green peppers', 'extra cheese')
from pizza import *
make_pizza(16, 'pepperoni')
make_pizza(12, 'mushrooms', 'green peppers', 'extra cheese')
| 2.59375
| 3
|
level2/124_country_numbers.py
|
hyo-jae-jung/programmers
| 0
|
12779391
|
<reponame>hyo-jae-jung/programmers<gh_stars>0
def solution(n):
temp = []
while n/3 > 0:
if n%3 == 0:
temp.append(4)
n = n//3 - 1
else:
temp.append(n%3)
n = n//3
return ''.join(str(i) for i in reversed(temp))
print(solution(1))
print(solution(2))
print(solution(3))
print(solution(4))
print(solution(5))
print(solution(6))
print(solution(7))
print(solution(8))
print(solution(9))
print(solution(20))
print(solution(30))
print(solution(32))
| 3.484375
| 3
|
fedlab_benchmarks/fedavg_v1.1.2/scale/mnist-cnn/client.py
|
KarhouTam/FedLab-benchmarks
| 46
|
12779392
|
import torch
import argparse
import sys
import os
import torchvision
import torchvision.transforms as transforms
from fedlab.core.client.scale.trainer import SubsetSerialTrainer
from fedlab.core.client.scale.manager import ScaleClientPassiveManager
from fedlab.core.network import DistNetwork
from fedlab.utils.logger import Logger
from fedlab.utils.aggregator import Aggregators
from fedlab.utils.functional import load_dict
sys.path.append("../../../")
from models.cnn import CNN_MNIST
if __name__ == "__main__":
parser = argparse.ArgumentParser(description="Distbelief training example")
parser.add_argument("--ip", type=str, default="127.0.0.1")
parser.add_argument("--port", type=str, default="3002")
parser.add_argument("--world_size", type=int)
parser.add_argument("--rank", type=int)
parser.add_argument("--partition", type=str, default="noniid")
parser.add_argument("--gpu", type=str, default="0,1,2,3")
parser.add_argument("--ethernet", type=str, default=None)
args = parser.parse_args()
if args.gpu != "-1":
args.cuda = True
os.environ["CUDA_VISIBLE_DEVICES"] = args.gpu
else:
args.cuda = False
trainset = torchvision.datasets.MNIST(
root='../../../datasets/mnist/',
train=True,
download=True,
transform=transforms.ToTensor())
if args.partition == "noniid":
data_indices = load_dict("mnist_noniid.pkl")
elif args.partition == "iid":
data_indices = load_dict("mnist_iid.pkl")
else:
raise ValueError("invalid partition type ", args.partition)
# Process rank x represent client id from (x-1)*10 - (x-1)*10 +10
# e.g. rank 5 <--> client 40-50
client_id_list = [
i for i in range((args.rank - 1) * 10, (args.rank - 1) * 10 + 10)
]
# get corresponding data partition indices
sub_data_indices = {
idx: data_indices[cid]
for idx, cid in enumerate(client_id_list)
}
model = CNN_MNIST()
aggregator = Aggregators.fedavg_aggregate
network = DistNetwork(address=(args.ip, args.port),
world_size=args.world_size,
rank=args.rank,
ethernet=args.ethernet)
trainer = SubsetSerialTrainer(model=model,
dataset=trainset,
data_slices=sub_data_indices,
aggregator=aggregator,
args={
"batch_size": 100,
"lr": 0.02,
"epochs": 5
})
manager_ = ScaleClientPassiveManager(trainer=trainer, network=network)
manager_.run()
| 2.03125
| 2
|
alternateController/supervised_policy.py
|
ranok92/deepirl
| 2
|
12779393
|
<reponame>ranok92/deepirl<filename>alternateController/supervised_policy.py
import sys
import pdb
from tqdm import tqdm
import torch
import torch.nn as nn
import torch.nn.functional as F
from torch.nn import MSELoss
from torch.utils.data import DataLoader
import torch.optim as optim
import numpy as np
import math
from tensorboardX import SummaryWriter
from sklearn.model_selection import train_test_split
from sklearn.metrics import accuracy_score
from imblearn.over_sampling import RandomOverSampler
from collections import Counter
sys.path.insert(0, '..')
from neural_nets.base_network import BasePolicy
from envs.drone_data_utils import read_training_data
from envs.drone_env_utils import angle_between
from envs.gridworld_drone import GridWorldDrone
from featureExtractor.drone_feature_extractor import DroneFeatureRisk_speedv2
def remove_samples(dataset, label_to_remove, no_of_samples):
'''
Given a dataset for categorical classification, reduces a particular label as provided to the
number of samples entered.
input:
dataset - a dataset in numpy
label_to_remove - the value of the label to adjust
no_of_samples - number of samples of that label to retain.
output:
truncated_dataset - dataset with number of tuples adjusted as required
excess_data - the tuples that were removed from the original dataset
'''
label_counter = Counter(dataset[:, -1])
total_tuples_to_retain = 0
for val in label_counter:
if val != label_to_remove:
total_tuples_to_retain += label_counter[val]
total_tuples_to_retain += no_of_samples
#print('Total data :', total_tuples_to_retain)
truncated_dataset_shape = np.asarray(dataset.shape)
truncated_dataset_shape[0] = total_tuples_to_retain
truncated_dataset_array = np.zeros(truncated_dataset_shape)
excess_data_shape = truncated_dataset_shape
excess_data_shape[0] = dataset.shape[0] - total_tuples_to_retain
excess_data_array = np.zeros(excess_data_shape)
label_counter = 0
excess_data_counter = 0
truncated_array_counter = 0
for i in range(dataset.shape[0]):
if dataset[i, -1] == label_to_remove:
if label_counter < no_of_samples:
truncated_dataset_array[truncated_array_counter, :] = dataset[i, :]
truncated_array_counter += 1
label_counter += 1
else:
excess_data_array[excess_data_counter, :] = dataset[i, :]
excess_data_counter += 1
else:
truncated_dataset_array[truncated_array_counter, :] = dataset[i, :]
truncated_array_counter += 1
return truncated_dataset_array, excess_data_array
def get_quantization_division(raw_value, quantization_value, num_of_divisions):
'''
print('raw value :',raw_value)
print('quantization_value :', quantization_value)
print('num_of_divisions :', num_of_divisions)
'''
base_division = int(num_of_divisions/2)
if raw_value > 0:
raw_value_to_div = int(raw_value/quantization_value) + base_division
else:
raw_value_to_div = math.ceil(raw_value/quantization_value) + base_division
clipped_value = min(max(0, raw_value_to_div), num_of_divisions-1)
#print ('clipped value :', clipped_value)
return clipped_value
def rescale_value(value, current_limits, new_limits):
"""
Given a value and the limits, rescales the value to the new limits
input:
value : float variable containing the value
current_limits : a tuple containing the lower and upper limits
of the value
new_limits : a tuple containing the desired lower and upper
limits.
"""
old_range = current_limits[1] - current_limits[0]
new_range = new_limits[1] - new_limits[0]
return (value-current_limits[0]) / old_range * new_range \
+ new_limits[0]
class SupervisedNetworkRegression(BasePolicy):
def __init__(self, input_size, output_size, hidden_dims=[256]):
super(SupervisedNetworkRegression, self).__init__()
self.hidden = []
self.input_layer = nn.Sequential(
nn.Linear(input_size, hidden_dims[0]),
nn.ELU()
)
for i in range(1, len(hidden_dims)):
self.hidden.append(nn.Sequential(
nn.Linear(hidden_dims[i-1], hidden_dims[i]),
nn.ELU()
))
self.hidden_layer = nn.ModuleList(self.hidden)
self.orientation_layer = nn.Sequential(
nn.Linear(hidden_dims[-1], hidden_dims[-1]),
nn.Sigmoid(),
nn.Linear(hidden_dims[-1], 1)
)
self.speed_layer = nn.Sequential(
nn.Linear(hidden_dims[-1], hidden_dims[-1]),
nn.Sigmoid(),
nn.Linear(hidden_dims[-1], 1)
)
def forward(self, x):
x = self.input_layer(x)
for i in range(len(self.hidden)):
x = self.hidden_layer[i](x)
x_orient = self.orientation_layer(x)
x_speed = self.speed_layer(x)
return x_orient, x_speed
def sample_action(self, state):
x = self.forward(state)
return x
def eval_action_continuous(self, state, state_raw, env):
goal_to_agent_vector = state_raw['goal_state'] - state_raw['agent_state']['position']
signed_angle_between = (np.arctan2(state_raw['agent_state']['orientation'][0],
state_raw['agent_state']['orientation'][1]) -
np.arctan2(goal_to_agent_vector[0],
goal_to_agent_vector[1]))*180/np.pi
if signed_angle_between > 180:
signed_angle_between = signed_angle_between - 360
elif signed_angle_between < -180:
signed_angle_between = 360 + signed_angle_between
output_orient, output_speed = self.forward(state)
#pdb.set_trace()
output_orient = output_orient.detach().cpu().numpy()
output_speed = output_speed.detach().cpu().numpy()
change_in_angle = output_orient - signed_angle_between
orient_action = min(max(-env.max_orient_change, change_in_angle),
env.max_orient_change)
change_in_speed = output_speed - state_raw['agent_state']['speed']
speed_action = min(max(-.8, change_in_speed), .8)
return np.asarray([speed_action, int(orient_action)])
def eval_action(self, state, state_raw, env):
orient_div = env.orient_quantization
num_orient_divs = len(env.orientation_array)
speed_div = env.speed_quantization
num_speed_divs = len(env.speed_array)
ref_vector = np.asarray([-1, 0])
orient_limits = (-30.0, +30.0)
old_limit = (-1, +1)
goal_to_agent_vector = state_raw['goal_state'] - state_raw['agent_state']['position']
signed_angle_between = (np.arctan2(state_raw['agent_state']['orientation'][0],
state_raw['agent_state']['orientation'][1]) -
np.arctan2(goal_to_agent_vector[0],
goal_to_agent_vector[1]))*180/np.pi
if signed_angle_between > 180:
signed_angle_between = signed_angle_between - 360
elif signed_angle_between < -180:
signed_angle_between = 360 + signed_angle_between
orient, speed = self.forward(state)
orient = orient.detach().cpu().numpy()
orient_rescale = rescale_value(orient, (0.0, 1.0), (-30, 30))
speed = speed.detach().cpu().numpy()
speed_rescale = rescale_value(speed, (0.0, 1.0), (0.0, 2.0))
change_in_angle = orient_rescale - signed_angle_between
orient_action = get_quantization_division(change_in_angle, orient_div, num_orient_divs)
change_in_speed = speed_rescale - state_raw['agent_state']['speed']
speed_action = get_quantization_division(change_in_speed, speed_div, num_speed_divs)
'''
print('The change needed in orientation :{}, change in speed :{}'.format(change_in_angle,
change_in_speed))
print('CUrrent heading direction :{}, current s\
#peed{}'.format(env.state['agent_head_dir'], env.state['agent_state']['speed']))
print('The output :', output)
print('The speed action {}, the orient action {}'.format(speed_action,
orient_action))
pdb.set_trace()
'''
return (speed_action * num_orient_divs) + orient_action, \
np.asarray([orient, speed]), \
np.asarray([orient_rescale, speed_rescale])
class SupervisedNetworkClassification(BasePolicy):
def __init__(self, input_size, output_size, hidden_dims=[256]):
super(SupervisedNetworkClassification, self).__init__()
self.hidden = []
self.input_layer = nn.Sequential(
nn.Linear(input_size, hidden_dims[0]),
nn.ELU()
)
for i in range(1, len(hidden_dims)):
self.hidden.append(nn.Sequential(
nn.Linear(hidden_dims[i-1], hidden_dims[i]),
nn.ELU()
))
self.hidden_layer = nn.ModuleList(self.hidden)
self.output_layer = nn.Sequential(
nn.Linear(hidden_dims[-1], output_size),
)
def forward(self, x):
x = self.input_layer(x)
for i in range(len(self.hidden)):
x = self.hidden_layer[i](x)
x = self.output_layer(x)
return x
def sample_action(self, state):
x = self.forward(state)
return x
def eval_action(self, state_vector):
output = self.forward(state_vector)
_, index = torch.max(output, 1)
return index.unsqueeze(1)
class SupervisedPolicyController:
'''
Class to train supervised policies. There are two types of supervised policies, classification based
and regression based.
Training classification based policies:
1. set categorical = True
2. output dims = Number of classes
3.
'''
def __init__(self, input_dims, output_dims,
hidden_dims=[256],
learning_rate=0.001,
categorical=True,
mini_batch_size=200,
policy_path=None,
save_folder=None):
'''
Initialize the class
'''
#parameters for the policy network
self.input_dims = input_dims
self.hidden_dims = hidden_dims
self.output_layer = output_dims
if not categorical:
self.policy = SupervisedNetworkRegression(input_dims, output_dims, hidden_dims=self.hidden_dims)
else:
self.policy = SupervisedNetworkClassification(input_dims, output_dims, hidden_dims=self.hidden_dims)
if policy_path is not None:
self.policy.load(policy_path)
self.device = torch.device(
"cuda" if torch.cuda.is_available() else 'cpu')
self.policy = self.policy.to(self.device)
self.categorical = categorical
#parameters for the optimizer
self.lr = learning_rate
self.optimizer = optim.Adam(self.policy.parameters(), lr=self.lr)
if self.categorical:
self.loss = torch.nn.CrossEntropyLoss()
else:
self.loss = torch.nn.MSELoss()
#parameters for the training
self.mini_batch_size = mini_batch_size
#saving the data
self.test_interval = 1
self.save_folder = None
if save_folder:
self.save_folder = save_folder
self.tensorboard_writer = SummaryWriter(self.save_folder)
def remove_imbalances_from_data(self, training_data_tensor, majority_ratio):
"""
Takes in a dataset with imbalances in the labels and returns a dataset with relative balance
in the labels
input:
training_data_tensor : a tensor of shape (no.of samples x size of each sample(including output))
majority_ratio : a float between 0-1 that denotes how much the non major labels need to be upsampled
wrt to the label with the majority.
output:
x_data : x values of the dataset after balancing as per specifications provided.
y_data : y values of the dataset after balancing as per specifications provided.
"""
majority_label = None
majority_counts = 0
training_data_numpy = training_data_tensor.cpu().numpy()
print('Statistics of labels in the original dataset :', Counter(training_data_numpy[:, -1]))
original_label_counter = Counter(training_data_numpy[:, -1])
for val in original_label_counter:
majority_label = val
majority_counts = original_label_counter[val]
break
samples_to_retain = int(majority_counts*majority_ratio)
truncated_training_data, truncated_majority_samples = remove_samples(training_data_numpy, majority_label, samples_to_retain)
x_data = truncated_training_data[:, 0:-1]
y_data = truncated_training_data[:, -1:]
print('Statistics of labels after removing extra :', Counter(y_data.squeeze()))
#remove imbalances from the data in case of categorical data
ros = RandomOverSampler(random_state=100)
x_data, y_data = ros.fit_resample(x_data, y_data)
x_data = np.concatenate((x_data, truncated_majority_samples[:, 0:-1]), axis=0)
y_data = np.concatenate((y_data, truncated_majority_samples[:, -1]), axis=0)
print('The class distribution after upsampling :', Counter(y_data.squeeze()))
#pdb.set_trace()
return x_data, np.expand_dims(y_data, axis=1)
def scale_regression_output(self, training_dataset, output_limits):
'''
Given the training data, this method scales the data of the output columns
to be standardized i.e. in a range between 0 and 1.
input:
training_dataset: a tensor of shape nxm, where n is the number of tuples in the
dataset and m is the shape of a single tuple including the input and
output
output_limits : a list of length equal to the number columns in the output which contains
tuples denoting the range for values in each of the columns
output:
scaled_training_dataset: a tensor of shape mxn, where the values in the output columns are
scaled accordingly.
'''
no_of_output_columns = len(output_limits)
output_tensor = training_dataset[:, -no_of_output_columns:]
input_tensor = training_dataset[:, 0:-no_of_output_columns]
for i in range(1, no_of_output_columns+1):
mean_val = output_tensor[:, -i].mean()
std_val = output_tensor[:, -i].std()
print("For column: {} \nMean :{}, Std deviation:{}".format(i,
mean_val,
std_val))
min_val = output_limits[-i][0]
max_val = output_limits[-i][1]
range_val = max_val - min_val
output_tensor[:, -i] = (output_tensor[:, -i] - min_val)/range_val
mean_val = output_tensor[:, -i].mean()
std_val = output_tensor[:, -i].std()
print("After normalization:\n For column: {} \nMean :{}, Std deviation:{}".format(i,
mean_val,
std_val))
training_dataset[:, -no_of_output_columns:] = output_tensor
scaled_training_dataset = training_dataset
return input_tensor.cpu().numpy(), output_tensor.cpu().numpy()
def arrange_data(self, parent_folder, test_data_percent=0.2):
'''
loads the data and arranges it in train test format
for classification network it handles imbalances in the labels
data
for regression network it scales the output values
'''
training_data_tensor = read_training_data(parent_folder)
if self.categorical:
y_label_size = 1
else:
y_label_size = self.output_layer
if self.categorical:
majority_ratio = .2
x_data, y_data = self.remove_imbalances_from_data(training_data_tensor,
majority_ratio)
else:
scale_info = [(-180, 180), (0, 2)]
x_data, y_data = self.scale_regression_output(training_data_tensor,
scale_info)
x_data = torch.from_numpy(x_data).to(self.device)
y_data = torch.from_numpy(y_data).to(self.device)
'''
if self.categorical:
y_data_onehot = torch.zeros((y_data.shape[0], self.output_layer)).to(self.device)
pdb.set_trace()
y_data_onehot.scatter_(1, y_data, 1)
y_data = y_data_onehot.type(torch.double)
'''
x_train, x_test, y_train, y_test = train_test_split(x_data,
y_data,
test_size=test_data_percent)
return x_train, x_test, y_train, y_test
def train(self, num_epochs, data_folder):
'''
trains a policy network
'''
x_train, x_test, y_train, y_test = self.arrange_data(data_folder)
data_loader_train = DataLoader(torch.cat((x_train, y_train), 1),
shuffle=True,
batch_size=self.mini_batch_size)
data_loader_test = DataLoader(torch.cat((x_test, y_test), 1),
shuffle=True,
batch_size=self.mini_batch_size)
action_counter = 0
'''
for i in y_train:
if i[0]!=17:
action_counter += 1
'''
counter = 0
if self.categorical:
label_size = 1
else:
label_size = self.output_layer
for i in tqdm(range(num_epochs)):
epoch_loss = []
y_train_pred = torch.zeros(y_train.shape)
for batch, sample in enumerate(data_loader_train):
x_mini_batch = sample[:, 0:-label_size]
if self.categorical:
y_mini_batch = sample[:, -label_size:].type(torch.long)
else:
y_mini_batch = sample[:, -label_size:].type(torch.float)
y_pred_mini_batch = self.policy(x_mini_batch.type(torch.float))
if (i+1)%self.test_interval == 0:
#if the iteration is for eavluation, store the prediction values
#pdb.set_trace()
y_pred_classes = self.policy.eval_action(x_mini_batch.type(torch.float))
y_train_pred[batch*self.mini_batch_size:
batch*self.mini_batch_size+sample.shape[0], :] = y_pred_classes.clone().detach()
loss = self.loss(y_pred_mini_batch, y_mini_batch.squeeze())
counter += 1
loss.backward()
self.optimizer.step()
self.optimizer.zero_grad()
epoch_loss.append(loss.detach())
if self.save_folder:
self.tensorboard_writer.add_scalar('Log_info/loss', loss, i)
if (i+1)%self.test_interval == 0:
y_test_pred = torch.zeros(y_test.shape)
#collecting prediction for test tuples
for batch, sample in enumerate(data_loader_test):
x_mini_batch = sample[:, 0:-label_size]
if self.categorical:
y_mini_batch = sample[:, -label_size:].type(torch.long)
else:
y_mini_batch = sample[:, -label_size:].type(torch.float)
y_pred_mini_batch = self.policy.eval_action(x_mini_batch.type(torch.float))
y_test_pred[batch*self.mini_batch_size:
batch*self.mini_batch_size+sample.shape[0], :] = y_pred_mini_batch.clone().detach()
#pdb.set_trace()
train_accuracy = accuracy_score(y_train, y_train_pred, normalize=True)
test_accuracy = accuracy_score(y_test, y_test_pred, normalize=True)
print("For epoch: {} \n Train accuracy :{} | Test accuracy :{}\n=========".format(i,
train_accuracy,
test_accuracy))
if self.save_folder:
self.tensorboard_writer.add_scalar('Log_info/training_accuracy',
train_accuracy, i)
self.tensorboard_writer.add_scalar('Log_info/testing_accuracy',
test_accuracy, i)
if self.save_folder:
self.tensorboard_writer.close()
self.policy.save(self.save_folder)
def train_regression(self, num_epochs, data_folder):
'''
trains a policy network
'''
x_train, x_test, y_train, y_test = self.arrange_data(data_folder)
data_loader = DataLoader(torch.cat((x_train, y_train), 1),
shuffle=True,
batch_size=self.mini_batch_size)
if self.categorical:
y_train = y_train.type(torch.long)
y_test = y_test.type(torch.long)
else:
y_train = y_train.type(torch.float)
y_test = y_test.type(torch.float)
action_counter = 0
'''
for i in y_train:
if i[0]!=17:
action_counter += 1
'''
counter = 0
if self.categorical:
label_size = 1
else:
label_size = self.output_layer
for i in tqdm(range(num_epochs)):
for batch, sample in enumerate(data_loader):
x_mini_batch = sample[:, 0:-label_size]
if self.categorical:
y_mini_batch = sample[:, -label_size:].type(torch.long)
else:
y_mini_batch = sample[:, -label_size:].type(torch.float)
orient_pred, speed_pred = self.policy(x_mini_batch.type(torch.float))
#
loss_orient = self.loss(orient_pred, y_mini_batch.squeeze()[:, 0])
loss_speed = self.loss(speed_pred, y_mini_batch.squeeze()[:, 1])
#pdb.set_trace()
loss = loss_orient + loss_speed
counter += 1
#print(loss)
loss.backward()
self.optimizer.step()
self.optimizer.zero_grad()
if self.save_folder:
self.tensorboard_writer.add_scalar('Log_info/loss', loss, i)
self.tensorboard_writer.add_scalar('Log_info/speed_loss', loss_speed, i)
self.tensorboard_writer.add_scalar('Log_info/orient_loss', loss_orient, i)
if (i+1)%self.test_interval == 0:
orient_train, speed_train = self.policy(x_train.type(torch.float))
orient_test, speed_test = self.policy(x_test.type(torch.float))
train_loss = self.loss(orient_train.detach(), y_train.squeeze()[:, 0]) + \
self.loss(speed_train.detach(), y_train.squeeze()[:, 1])
test_loss = self.loss(orient_test.detach(), y_test.squeeze()[:, 0]) + \
self.loss(speed_test.detach(), y_test.squeeze()[:, 1])
print("For epoch: {} \n Training loss :{} | Testing loss :{}\n=========".format(i,
train_loss,
test_loss))
if self.save_folder:
self.tensorboard_writer.add_scalar('Log_info/training_loss',
train_loss, i)
self.tensorboard_writer.add_scalar('Log_info/testing_loss',
test_loss.type(torch.float), i)
#print('Loss from speed :{} , loss from orientation :{} ,batch_loss :{}'.format(batch_loss_speed,
# batch_loss_orient,
# batch_loss))
self.tensorboard_writer.close()
if self.save_folder:
self.policy.save(self.save_folder)
def play_policy(self,
num_runs,
env,
max_episode_length,
feat_ext):
'''
Loads up an environment and checks the performance of the agent.
'''
#initialize variables needed for the run
agent_width = 10
obs_width = 10
step_size = 2
grid_size = 10
#load up the environment
#initialize the feature extractor
#container to store the actions for analysis
action_raw_list = []
action_scaled_list = []
#play the environment
for i in range(num_runs):
state = env.reset()
print("Replacing pedestrian :", env.cur_ped)
state_features = feat_ext.extract_features(state)
state_features = torch.from_numpy(state_features).type(torch.FloatTensor).to(self.device)
done = False
t = 0
while t < max_episode_length:
if self.categorical:
action = self.policy.eval_action(state_features)
else:
action, raw_action, scaled_action = self.policy.eval_action(state_features, state, env)
action_raw_list.append(raw_action)
action_scaled_list.append(scaled_action)
#pdb.set_trace()
state, _, done, _ = env.step(action)
state_features = feat_ext.extract_features(state)
state_features = torch.from_numpy(state_features).type(torch.FloatTensor).to(self.device)
t+=1
if done:
break
pdb.set_trace()
def play_regression_policy(self,
num_runs,
max_episode_length,
feat_extractor):
'''
Loads up an environment and checks the performance of the agent.
'''
#initialize variables needed for the run
agent_width = 10
obs_width = 10
step_size = 2
grid_size = 10
#load up the environment
annotation_file = "../envs/expert_datasets/university_students\
/annotation/processed/frame_skip_1/students003_processed_corrected.txt"
env = GridWorldDrone(
display=True,
is_onehot=False,
seed=0,
obstacles=None,
show_trail=False,
is_random=False,
annotation_file=annotation_file,
subject=None,
tick_speed=60,
obs_width=10,
step_size=step_size,
agent_width=agent_width,
replace_subject=True,
segment_size=None,
external_control=True,
step_reward=0.001,
show_comparison=True,
consider_heading=True,
show_orientation=True,
continuous_action=False,
# rows=200, cols=200, width=grid_size)
rows=576,
cols=720,
width=grid_size,
)
#initialize the feature extractor
feat_ext = None
if feat_extractor == "DroneFeatureRisk_speedv2":
feat_ext = DroneFeatureRisk_speedv2(
agent_width=agent_width,
obs_width=obs_width,
step_size=step_size,
grid_size=grid_size,
show_agent_persp=False,
return_tensor=False,
thresh1=18,
thresh2=30,
)
#play the environment
for i in range(num_runs):
state = env.reset()
state_features = feat_ext.extract_features(state)
state_features = torch.from_numpy(state_features).type(torch.FloatTensor).to(self.device)
done = False
t = 0
while t < max_episode_length:
action = self.policy.eval_action(state_features)
state, _, done, _ = env.step(action)
state_features = feat_ext.extract_features(state)
state_features = torch.from_numpy(state_features).type(torch.FloatTensor).to(self.device)
t+=1
if done:
break
if __name__=='__main__':
s_policy = SupervisedPolicyController(80, 35,
categorical=True,
hidden_dims=[1024, 4096, 1024],
mini_batch_size=2000,
#policy_path='./test_balanced_data_categorical/0.pt',
save_folder='./delete_this')
data_folder = '../envs/expert_datasets/university_students/annotation/traj_info/frame_skip_1/\
students003/DroneFeatureRisk_speedv2_with_actions_lag8'
s_policy.train(10, data_folder)
'''
data_folder = '../envs/expert_datasets/university_students/annotation/traj_info/frame_skip_1/\
students003/DroneFeatureRisk_speedv2_with_raw_actions'
s_policy.train_regression(20, data_folder)
s_policy.play_categorical_policy(100, 200, 'DroneFeatureRisk_speedv2')
'''
| 2.34375
| 2
|
nimbus/fabnet/network.py
|
fabregas/nimbusfs-node
| 0
|
12779394
|
"""
Package for interacting on the network at a high level.
"""
import asyncio
import random
import pickle
from .protocol import ManagementProtocol
from .utils import digest, logger, future_list, future_dict
from .storage import ForgetfulStorage
from .node import DHTNode
from .crawling import ValueSpiderCrawl
from .crawling import NodeSpiderCrawl
from .ext_api import ExternalAPI
from .routing import RoutingTable
class Server(object):
"""
High level view of a node instance.
This is the object that should be created
to start listening as an active node on the network.
"""
def __init__(self, ksize=10, alpha=3, node_id=None, storage=None):
"""
Create a server instance. This will start listening on the given port.
Args:
ksize (int): The k parameter from the paper
alpha (int): The alpha parameter from the paper
node_id: The id for this node on the network.
storage: An instance that implements `storage.IStorage`
"""
self.ksize = ksize
self.alpha = alpha
self.storage = storage or ForgetfulStorage()
self.node = DHTNode(node_id or digest(random.getrandbits(255)))
self.protocol = None
self.ext_api = None
self.refresh_loop = asyncio.async(self.refresh_table())
self.loop = asyncio.get_event_loop()
self.__transport = None
self.port = None
def listen(self, port, ext_port):
"""
Start listening on the given port.
"""
self.port = port
self.node.host = '127.0.0.1'
self.node.ext_host = '0.0.0.0'
self.node.port = port
self.node.ext_port = ext_port
router = RoutingTable(self.ksize, self.node)
self.protocol = ManagementProtocol(router, self.node, self.storage,
self.ksize, self.new_node_signal)
self.ext_api = ExternalAPI(self.protocol, self.storage)
bind_addr = ('0.0.0.0', port)
listen = self.loop.create_datagram_endpoint(lambda: self.protocol,
local_addr=bind_addr)
self.__transport, _ = self.loop.run_until_complete(listen)
self.ext_api.listen(self.loop, '0.0.0.0', ext_port)
@asyncio.coroutine
def stop(self, say_bye=True):
if self.__transport is None:
return
logger.info('stopping {} ...'.format(self.node))
if say_bye:
for node in self.protocol.router.iterate():
ret = yield from self.protocol.call_bye(node)
self.__transport.close()
if self.ext_api:
self.ext_api.close()
self.__transport = None
self.ext_api = None
def new_node_signal(self, new_node):
asyncio.async(self.transfer_key_values(new_node))
@asyncio.coroutine
def transfer_key_values(self, node):
"""
Given a new node, send it all the keys/values it should be storing.
@param node: A new node that just joined (or that we just found out
about).
Process:
For each key in storage, get k closest nodes. If newnode is closer
than the furtherst in that list, and the node for this server
is closer than the closest in that list, then store the key/value
on the new node (per section 2.5 of the paper)
"""
ds = []
for key, value in self.storage.iteritems():
keynode = DHTNode(digest(key))
neighbors = self.protocol.router.find_neighbors(keynode)
if len(neighbors) > 0:
new_node_close = node.distance(keynode) < \
neighbors[-1].distance(keynode)
this_node_closest = self.node.distance(keynode) < \
neighbors[0].distance(keynode)
if len(neighbors) == 0 or (new_node_close and this_node_closest):
res = yield from self.protocol.call_store(node, key, value) # FIXME
ds.append(res)
return ds
def bootstrap(self, addrs):
"""
Bootstrap the server by connecting to other known nodes in the network.
Args:
addrs: A `list` of (ip, port) `tuple` pairs.
Note that only IP addresses
are acceptable - hostnames will cause an error.
"""
# if the transport hasn't been initialized yet, wait a second
if self.protocol.transport is None:
return asyncio.call_later(1, self.bootstrap, addrs)
def init_table(results):
nodes = []
for addr, result in results.items():
if result is None:
continue
if result:
nodes.append(DHTNode(*result))
spider = NodeSpiderCrawl(self.protocol, self.node, nodes,
self.ksize, self.alpha)
return spider.find()
ds = {}
for addr in addrs:
ds[addr] = self.protocol.ping(addr, self.node)
if not ds:
ds[None] = asyncio.Future()
ds[None].set_result(None)
return future_dict(ds, init_table)
@asyncio.coroutine
def get_data_block(self, key):
key = digest(key)
stream = self.storage.get(key, None)
return stream
@asyncio.coroutine
def put_data_block(self, key, stream):
key = digest(key)
yield from self.storage.save(key, stream)
# FIXME REMOVE ME
def refresh_table(self):
"""
Refresh buckets that haven't had any lookups in the last hour
(per section 2.3 of the paper).
"""
while True:
yield from asyncio.sleep(3600)
ds = []
for node_id in self.protocol.get_refresh_ids():
node = DHTNode(node_id)
nearest = self.protocol.router.find_neighbors(node, self.alpha)
spider = NodeSpiderCrawl(self.protocol, node, nearest)
ds.append(spider.find())
for future in ds:
res = yield from future
ds = []
# Republish keys older than one hour
for key, value in self.storage.iteritems_older_than(3600):
ds.append(self.set(key, value))
for future in ds:
res = yield from future
def bootstrappable_neighbors(self):
"""
Get a :class:`list` of (ip, port) :class:`tuple` pairs suitable
for use as an argument to the bootstrap method.
The server should have been bootstrapped
already - this is just a utility for getting some neighbors and then
storing them if this server is going down for a while. When it comes
back up, the list of nodes can be used to bootstrap.
"""
neighbors = self.protocol.router.find_neighbors(self.node)
return [tuple(n)[-2:] for n in neighbors]
def inet_visible_ip(self):
"""
Get the internet visible IP's of this node as other nodes see it.
Returns:
A `list` of IP's.
If no one can be contacted, then the `list` will be empty.
"""
def handle(results):
ips = [result[1][0] for result in results if result[0]]
logger.debug("other nodes think our ip is %s", ips)
return ips
ds = []
for neighbor in self.bootstrappable_neighbors():
ds.append(self.protocol.stun(neighbor))
future_list(ds, handle)
def get(self, key):
"""
Get a key if the network has it.
Returns:
:class:`None` if not found, the value otherwise.
"""
node = DHTNode(digest(key))
nearest = self.protocol.router.find_neighbors(node)
if len(nearest) == 0:
logger.warning("There are no known neighbors to get key %s", key)
future = asyncio.Future()
future.set_result(None)
return future
spider = ValueSpiderCrawl(self.protocol, node, nearest, self.ksize,
self.alpha)
return spider.find()
def find_node(self, key):
"""
Get a key if the network has it.
Returns:
:class:`None` if not found, the value otherwise.
"""
node = DHTNode(digest(key))
logger.info('finding node for key: %s', node.hex_id())
nearest = self.protocol.router.find_neighbors(node)
if len(nearest) == 0:
logger.warning("There are no known neighbors to find node %s", key)
future = asyncio.Future()
future.set_result(None)
return future
spider = NodeSpiderCrawl(self.protocol, node, nearest, self.ksize,
self.alpha)
return spider.find()
def set(self, key, value):
"""
Set the given key to the given value in the network.
"""
logger.debug("setting '%s' = '%s' on network", key, value)
dkey = digest(key)
def store(nodes):
logger.debug("setting '%s' on %s", key, nodes)
ds = [self.protocol.call_store(node, dkey, value)
for node in nodes]
return future_list(ds, self._any_respond_success)
node = DHTNode(dkey)
nearest = self.protocol.router.find_neighbors(node)
if len(nearest) == 0:
logger.warning("There are no known neighbors to set key %s", key)
future = asyncio.Future()
future.set_result(False)
return future
spider = NodeSpiderCrawl(self.protocol, node, nearest,
self.ksize, self.alpha)
nodes = spider.find()
while type(nodes) != list:
nodes = yield from nodes
return store(nodes)
def _any_respond_success(self, responses):
"""
Given the result of a DeferredList of calls to peers,
ensure that at least one of them was contacted
and responded with a Truthy result.
"""
if True in responses:
return True
return False
def save_state(self, fname):
"""
Save the state of this node (the alpha/ksize/id/immediate neighbors)
to a cache file with the given fname.
"""
data = {'ksize': self.ksize,
'alpha': self.alpha,
'id': self.node.node_id,
'neighbors': self.bootstrappable_neighbors()}
if len(data['neighbors']) == 0:
logger.warning("No known neighbors, so not writing to cache.")
return
with open(fname, 'w') as f:
pickle.dump(data, f)
@classmethod
def load_state(self, fname):
"""
Load the state of this node (the alpha/ksize/id/immediate neighbors)
from a cache file with the given fname.
"""
with open(fname, 'r') as f:
data = pickle.load(f)
s = Server(data['ksize'], data['alpha'], data['id'])
if len(data['neighbors']) > 0:
s.bootstrap(data['neighbors'])
return s
def save_state_regularly(self, fname, frequency=600):
"""
Save the state of node with a given regularity to the given
filename.
Args:
fname: File name to save retularly to
frequencey: Frequency in seconds that the state should be saved.
By default, 10 minutes.
"""
def _save_cycle(fname, freq):
while True:
yield from asyncio.sleep(freq)
self.save_state(fname)
return asyncio.async(_save_cycle(fname, frequency))
| 3
| 3
|
hloc/match_features.py
|
oldshuren/Hierarchical-Localization
| 0
|
12779395
|
import argparse
import torch
from pathlib import Path
import h5py
import logging
from tqdm import tqdm
import pprint
import numpy as np
from . import matchers
from .utils.base_model import dynamic_load
from .utils.parsers import names_to_pair
'''
A set of standard configurations that can be directly selected from the command
line using their name. Each is a dictionary with the following entries:
- output: the name of the match file that will be generated.
- model: the model configuration, as passed to a feature matcher.
'''
confs = {
'superglue': {
'output': 'matches-superglue',
'model': {
'name': 'superglue',
'weights': 'outdoor',
'sinkhorn_iterations': 50,
},
},
'NN': {
'output': 'matches-NN-mutual-dist.7',
'model': {
'name': 'nearest_neighbor',
'mutual_check': True,
'distance_threshold': 0.7,
},
}
}
def get_model(conf):
device = 'cuda' if torch.cuda.is_available() else 'cpu'
Model = dynamic_load(matchers, conf['model']['name'])
model = Model(conf['model']).eval().to(device)
return model
@torch.no_grad()
def do_match (name0, name1, pairs, matched, num_matches_found, model, match_file, feature_file, query_feature_file, min_match_score, min_valid_ratio):
device = 'cuda' if torch.cuda.is_available() else 'cpu'
pair = names_to_pair(name0, name1)
# Avoid to recompute duplicates to save time
if len({(name0, name1), (name1, name0)} & matched) or pair in match_file:
return num_matches_found
data = {}
feats0, feats1 = query_feature_file[name0], feature_file[name1]
for k in feats1.keys():
data[k+'0'] = feats0[k].__array__()
for k in feats1.keys():
data[k+'1'] = feats1[k].__array__()
data = {k: torch.from_numpy(v)[None].float().to(device)
for k, v in data.items()}
# some matchers might expect an image but only use its size
data['image0'] = torch.empty((1, 1,)+tuple(feats0['image_size'])[::-1])
data['image1'] = torch.empty((1, 1,)+tuple(feats1['image_size'])[::-1])
pred = model(data)
matches = pred['matches0'][0].cpu().short().numpy()
scores = pred['matching_scores0'][0].cpu().half().numpy()
# if score < min_match_score, set match to invalid
matches[ scores < min_match_score ] = -1
num_valid = np.count_nonzero(matches > -1)
if float(num_valid)/len(matches) > min_valid_ratio:
v = pairs.get(name0)
if v is None:
v = set(())
v.add(name1)
pairs[name0] = v
grp = match_file.create_group(pair)
grp.create_dataset('matches0', data=matches)
grp.create_dataset('matching_scores0', data=scores)
matched |= {(name0, name1), (name1, name0)}
num_matches_found += 1
return num_matches_found
@torch.no_grad()
def best_match(conf, global_feature_path, feature_path, match_output_path, query_global_feature_path=None, query_feature_path=None, num_match_required=10,
max_try=None, min_matched=None, pair_file_path=None, num_seq=False, sample_list=None, sample_list_path=None, min_match_score=0.85, min_valid_ratio=0.09):
logging.info('Dyn Matching local features with configuration:'
f'\n{pprint.pformat(conf)}')
assert global_feature_path.exists(), feature_path
global_feature_file = h5py.File(str(global_feature_path), 'r')
if query_global_feature_path is not None:
logging.info(f'(Using query_global_feature_path:{query_global_feature_path}')
query_global_feature_file = h5py.File(str(query_global_feature_path), 'r')
else:
query_global_feature_file = global_feature_file
assert feature_path.exists(), feature_path
feature_file = h5py.File(str(feature_path), 'r')
if query_feature_path is not None:
logging.info(f'(Using query_feature_path:{query_feature_path}')
query_feature_file = h5py.File(str(query_feature_path), 'r')
else:
query_feature_file = feature_file
match_file = h5py.File(str(match_output_path), 'a')
if sample_list_path is not None:
sample_list = json.load(open(str(sample_list_path, 'r')))
# get all sample names
if sample_list is not None:
names = sample_list
q_names = names
else:
names = []
global_feature_file.visititems(
lambda _, obj: names.append(obj.parent.name.strip('/'))
if isinstance(obj, h5py.Dataset) else None)
names = list(set(names))
names.sort()
q_names = []
query_global_feature_file.visititems(
lambda _, obj: q_names.append(obj.parent.name.strip('/'))
if isinstance(obj, h5py.Dataset) else None)
q_names = list(set(q_names))
q_names.sort()
device = 'cuda' if torch.cuda.is_available() else 'cpu'
def tensor_from_names(names, hfile):
desc = [hfile[i]['global_descriptor'].__array__() for i in names]
desc = torch.from_numpy(np.stack(desc, 0)).to(device).float()
return desc
desc = tensor_from_names(names, global_feature_file)
if query_global_feature_path is not None:
q_desc = tensor_from_names(q_names, query_global_feature_file)
else:
q_desc = desc
# descriptors are normalized, dot product indicates how close they are
sim = torch.einsum('id,jd->ij', q_desc, desc)
if max_try is None:
max_try = len(names)
topk = torch.topk(sim, max_try, dim=1).indices.cpu().numpy()
Model = dynamic_load(matchers, conf['model']['name'])
model = Model(conf['model']).eval().to(device)
pairs = {}
matched = set()
for name0, indices in tqdm(zip(q_names, topk)):
num_matches_found = 0
# try sequential neighbor first
if num_seq is not None:
name0_at = names.index(name0)
begin_from = name0_at - num_seq
if begin_from < 0:
begin_from = 0
for i in range(begin_from, name0_at+num_seq):
if i >= len(names):
break
name1 = names[i]
if name0 != name1:
num_matches_found = do_match(name0, name1, pairs, matched, num_matches_found, model, match_file, feature_file, query_feature_file, min_match_score, min_valid_ratio)
# then the global retrievel
for i in indices:
name1 = names[i]
if query_global_feature_path is not None or name0 != name1:
num_matches_found = do_match(name0, name1, pairs, matched, num_matches_found, model, match_file, feature_file, query_feature_file, min_match_score, min_valid_ratio)
if num_matches_found >= num_match_required:
break
if num_matches_found < num_match_required:
logging.warning(f'num match for {name0} found {num_matches_found} less than num_match_required:{num_match_required}')
match_file.close()
if pair_file_path is not None:
if min_matched is not None:
pairs = {k:v for k,v in pairs.items() if len(v) >= min_matched }
pairs_list = []
for n0 in pairs.keys():
for n1 in pairs.get(n0):
pairs_list.append((n0,n1))
with open(str(pair_file_path), 'w') as f:
f.write('\n'.join(' '.join([i, j]) for i, j in pairs_list))
logging.info('Finished exporting matches.')
@torch.no_grad()
def main(conf, pairs, features, export_dir, db_features=None, query_features=None, output_dir=None, exhaustive=False):
logging.info('Matching local features with configuration:'
f'\n{pprint.pformat(conf)}')
if db_features:
feature_path = db_features
else:
feature_path = Path(export_dir, features+'.h5')
assert feature_path.exists(), feature_path
feature_file = h5py.File(str(feature_path), 'r')
if query_features is not None:
logging.info(f'Using query_features {query_features}')
else:
logging.info('No query_features')
query_features = feature_path
assert query_features.exists(), query_features
query_feature_file = h5py.File(str(query_features), 'r')
pairs_name = pairs.stem
if not exhaustive:
assert pairs.exists(), pairs
with open(pairs, 'r') as f:
pair_list = f.read().rstrip('\n').split('\n')
elif exhaustive:
logging.info(f'Writing exhaustive match pairs to {pairs}.')
assert not pairs.exists(), pairs
# get the list of images from the feature file
images = []
feature_file.visititems(
lambda name, obj: images.append(obj.parent.name.strip('/'))
if isinstance(obj, h5py.Dataset) else None)
images = list(set(images))
pair_list = [' '.join((images[i], images[j]))
for i in range(len(images)) for j in range(i)]
with open(str(pairs), 'w') as f:
f.write('\n'.join(pair_list))
device = 'cuda' if torch.cuda.is_available() else 'cpu'
Model = dynamic_load(matchers, conf['model']['name'])
model = Model(conf['model']).eval().to(device)
match_name = f'{features}_{conf["output"]}_{pairs_name}'
if output_dir is None:
output_dir = export_dir
match_path = Path(output_dir, match_name+'.h5')
match_path.parent.mkdir(exist_ok=True, parents=True)
match_file = h5py.File(str(match_path), 'a')
matched = set()
for pair in tqdm(pair_list, smoothing=.1):
name0, name1 = pair.split(' ')
pair = names_to_pair(name0, name1)
# Avoid to recompute duplicates to save time
if len({(name0, name1), (name1, name0)} & matched) \
or pair in match_file:
continue
data = {}
feats0, feats1 = query_feature_file[name0], feature_file[name1]
for k in feats1.keys():
data[k+'0'] = feats0[k].__array__()
for k in feats1.keys():
data[k+'1'] = feats1[k].__array__()
data = {k: torch.from_numpy(v)[None].float().to(device)
for k, v in data.items()}
# some matchers might expect an image but only use its size
data['image0'] = torch.empty((1, 1,)+tuple(feats0['image_size'])[::-1])
data['image1'] = torch.empty((1, 1,)+tuple(feats1['image_size'])[::-1])
pred = model(data)
grp = match_file.create_group(pair)
matches = pred['matches0'][0].cpu().short().numpy()
grp.create_dataset('matches0', data=matches)
if 'matching_scores0' in pred:
scores = pred['matching_scores0'][0].cpu().half().numpy()
grp.create_dataset('matching_scores0', data=scores)
matched |= {(name0, name1), (name1, name0)}
match_file.close()
logging.info('Finished exporting matches.')
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('--export_dir', type=Path)
parser.add_argument('--output_dir', type=Path, required=False)
parser.add_argument('--features', type=str,
default='feats-superpoint-n4096-r1024')
parser.add_argument('--db_features', type=Path)
parser.add_argument('--query_features', type=Path, required=False)
parser.add_argument('--pairs', type=Path)
parser.add_argument('--conf', type=str, default='superglue',
choices=list(confs.keys()))
parser.add_argument('--exhaustive', action='store_true')
# best_match
parser.add_argument('--best_match', action='store_true')
parser.add_argument('--global_feature_path', type=Path)
parser.add_argument('--feature_path', type=Path)
parser.add_argument('--query_global_feature_path', type=Path)
parser.add_argument('--query_feature_path', type=Path)
parser.add_argument('--match_output_path', type=Path)
parser.add_argument('--num_match_required', type=int, default=10)
parser.add_argument('--min_matched', type=int, default=1)
parser.add_argument('--max_try', type=int)
parser.add_argument('--num_seq', type=int)
parser.add_argument('--min_match_score', type=float, default=0.85)
parser.add_argument('--min_valid_ratio', type=float, default=0.09)
parser.add_argument('--sample_list_path', type=Path)
parser.add_argument('--pair_file_path', type=Path)
args = parser.parse_args()
if args.best_match:
best_match(confs[args.conf], args.global_feature_path, args.feature_path, args.match_output_path,
query_global_feature_path=args.query_global_feature_path, query_feature_path=args.query_feature_path,
num_match_required=args.num_match_required, min_matched=args.min_matched, min_match_score=args.min_match_score, min_valid_ratio=args.min_valid_ratio,
max_try=args.max_try, num_seq=args.num_seq, sample_list_path=args.sample_list_path, pair_file_path=args.pair_file_path)
else:
main(
confs[args.conf], args.pairs, args.features,args.export_dir,
db_features=args.db_features, query_features=args.query_features, output_dir=args.output_dir, exhaustive=args.exhaustive)
| 2.203125
| 2
|
HW/hw07/tests/interleave.py
|
IZUMI-Zu/CS61A
| 3
|
12779396
|
<reponame>IZUMI-Zu/CS61A
test = {
'name': 'interleave',
'points': 1,
'suites': [
{
'cases': [
{
'code': r"""
scm> (interleave (list 1 3 5) (list 2 4 6))
(1 2 3 4 5 6)
""",
'hidden': False,
'locked': False
},
{
'code': r"""
scm> (interleave (list 1 3 5) nil)
(1 3 5)
scm> (interleave nil (list 1 3 5))
(1 3 5)
scm> (interleave nil nil)
()
""",
'hidden': False,
'locked': False
},
{
'code': r"""
scm> (interleave (list 1 3 5) (list 2 4))
(1 2 3 4 5)
scm> (interleave (list 2 4) (list 1 3 5))
(2 1 4 3 5)
scm> (interleave (list 1 2) (list 1 2))
(1 1 2 2)
scm> (interleave '(1 2 3 4 5 6) '(7 8))
(1 7 2 8 3 4 5 6)
""",
'hidden': False,
'locked': False
}
],
'scored': True,
'setup': r"""
scm> (load-all ".")
""",
'teardown': '',
'type': 'scheme'
}
]
}
| 1.929688
| 2
|
Python3/0734-Sentence-Similarity/soln.py
|
wyaadarsh/LeetCode-Solutions
| 5
|
12779397
|
class Solution:
def areSentencesSimilar(self, words1, words2, pairs):
"""
:type words1: List[str]
:type words2: List[str]
:type pairs: List[List[str]]
:rtype: bool
"""
if len(words1) != len(words2):
return False
sims = collections.defaultdict(set)
for a, b in pairs:
sims[a].add(b)
return all(w1 == w2 or w2 in sims[w1] or w1 in sims[w2] for w1, w2 in zip(words1, words2))
| 3.453125
| 3
|
coop/models/base.py
|
lionls/coop
| 15
|
12779398
|
<reponame>lionls/coop
import torch
import torch.nn as nn
class Model(nn.Module):
def __init__(self,
hidden_size: int,
latent_dim: int):
super().__init__()
self.hidden_size = hidden_size
self.latent_dim = latent_dim
def forward(self,
src: torch.Tensor,
tgt: torch.Tensor = None,
do_generate: torch.Tensor = False,
**kwargs):
raise NotImplementedError()
@torch.no_grad()
def generate(self,
z: torch.Tensor,
num_beams: int = 4,
max_tokens: int = 256):
raise NotImplementedError()
@staticmethod
def klw(step: int,
interval: int,
r: float = 0.8,
t: float = 0.0,
s: int = 10000):
raise NotImplementedError()
| 2.546875
| 3
|
allhub/activity/starring.py
|
srinivasreddy/allhub
| 2
|
12779399
|
<gh_stars>1-10
from allhub.response import Response
from allhub.util import ErrorAPICode, config
from enum import Enum
class StarringDirection(Enum):
ASC = "asc"
DESC = "desc"
class StarringSort(Enum):
CREATED = "created"
UPDATED = "updated"
_mime_option = "application/vnd.github.v{version}.star+{mime}".format(
version=config.api_version, mime=config.api_mime_type
)
class StarringMixin:
def stargazers(self, owner, repo, starred_at=False, **kwargs):
url = "/repos/{owner}/{repo}/stargazers".format(owner=owner, repo=repo)
self.response = Response(
self.get(
url,
params={"starred_at": starred_at},
**{"Accept": _mime_option},
**kwargs,
),
"StarGazers",
)
return self.response.transform()
def starred(
self,
sort=StarringSort.CREATED,
direction=StarringDirection.DESC,
starred_at=False,
**kwargs,
):
if sort not in StarringSort:
raise ValueError("'sort' must be of type Sort")
if direction not in StarringDirection:
raise ValueError("'direction' must be of type Direction")
url = "/user/starred"
params = [("sort", sort.value), ("direction", direction.value)]
if starred_at:
kwargs.update({"Accept": _mime_option})
self.response = Response(self.get(url, params, **kwargs), "StarRepos")
return self.response.transform()
def starred_by(
self,
username,
sort=StarringSort.CREATED,
direction=StarringDirection.DESC,
starred_at=False,
**kwargs,
):
if sort not in StarringSort:
raise ValueError("'sort' must be of type Sort")
if direction not in StarringDirection:
raise ValueError("'direction' must be of type Direction")
url = "/users/{username}/starred".format(username=username)
params = [("sort", sort.value), ("direction", direction.value)]
if starred_at:
kwargs.update({"Accept": _mime_option})
self.response = Response(self.get(url, params, **kwargs), "StarRepos")
return self.response.transform()
def is_starred(self, owner, repo, **kwargs):
url = "/user/starred/{owner}/{repo}".format(owner=owner, repo=repo)
self.response = Response(self.get(url, **kwargs), "")
status_code = self.response.status_code
if status_code == 204:
is_starred = True
elif status_code == 404:
is_starred = False
else:
raise ErrorAPICode(
"url: {url} supposed to return 204 or 404 but returned {status_code}."
"Maybe try after sometime?".format(url=url, status_code=status_code)
)
return is_starred
def star_repo(self, owner, repo, **kwargs):
url = "/user/starred/{owner}/{repo}".format(owner=owner, repo=repo)
self.response = Response(self.put(url, **{"Content-Length": "0"}, **kwargs), "")
return self.response.status_code == 204
def unstar_repo(self, owner, repo, **kwargs):
url = "/user/starred/{owner}/{repo}".format(owner=owner, repo=repo)
self.response = Response(self.delete(url, **kwargs), "")
return self.response.status_code == 204
| 2.59375
| 3
|
load_data.py
|
sufianj/bert4bsv-tf1.13
| 0
|
12779400
|
<reponame>sufianj/bert4bsv-tf1.13<filename>load_data.py
import pandas as pd
from sklearn.preprocessing import LabelEncoder
from sklearn.model_selection import train_test_split
le = LabelEncoder()
###origine
###df = pd.read_csv("data/train.csv", sep=';', encoding='utf-8') #En cas d'erreur essayez avec d'autres encodings
#mien
#que type 1 bioaggresseurs & type 2 maladies
#df = pd.read_csv("data/stcs_type_50_50.csv", sep=',', encoding='utf-8')
# type 1, 2 + 0 : skos:definitions de FrenchCropUsage
df = pd.read_csv("data/stcs_type_50_50_2701-2900.csv", sep=',', encoding='utf-8')
# Crée les DataFrames train et dev dont BERT aura besoin, en ventillant 1 % des données dans test
###df_bert = pd.DataFrame({'user_id':df['ID'], 'label':le.fit_transform(df['Label']), 'alpha':['a']*df.shape[0], 'text':df['texte'].replace(r'\n',' ',regex=True)})
#mien
df_bert = pd.DataFrame({'user_id':0,'label':le.fit_transform(df['label']), 'alpha':'a', 'text':df['report_text'].replace(r'\n',' ',regex=True)})
df_bert.index = [x for x in range(0, len(df_bert))]
df_bert['user_id'] = df_bert.index
df_bert_train, df_bert_dev = train_test_split(df_bert, test_size=0.01)
# Crée la DataFrame test dont BERT aura besoin
###df_test = pd.read_csv("data/test.csv", sep=';', encoding='utf-8') #En cas d'erreur essayez avec d'autres encodings
#mien
#que type 1 bioaggresseurs & type 2 maladies
#df_test = pd.read_csv("data/stcs_type_50_50_test.csv", sep=',', encoding='utf-8')
# type 1, 2 + 0 : skos:definitions de FrenchCropUsage
df_test = pd.read_csv("data/stcs_type_50_50_2901-3000.csv", sep=',', encoding='utf-8')
###df_bert_test = pd.DataFrame({'user_id':df_test['ID'], 'text':df_test['texte'].replace(r'\n',' ',regex=True)})
#mien
df_bert_test = pd.DataFrame({'user_id':0, 'text':df_test['report_text'].replace(r'\n',' ',regex=True)})
df_bert_test.index = [x for x in range(0, len(df_bert_test))]
df_bert_test['user_id'] = df_bert_test.index
# Enregistre les DataFrames au format .tsv (tab separated values) comme BERT en a besoin
df_bert_train.to_csv('data/train.tsv', sep='\t', index=False, header=False)
df_bert_dev.to_csv('data/dev.tsv', sep='\t', index=False, header=False)
df_bert_test.to_csv('data/test.tsv', sep='\t', index=False, header=True)
| 2.8125
| 3
|
bot.py
|
iryabuhin/proictis_dialogflow_webhook
| 0
|
12779401
|
from app import app, db
from app.models import ProjectInfo
from app import route
| 1.25
| 1
|
Project/fourier.py
|
ART-Students/E-media
| 2
|
12779402
|
<filename>Project/fourier.py
import numpy as np
from matplotlib import pyplot as plt
from PIL import Image
class Fourier:
@staticmethod
def show_plots(path):
plt.figure(figsize=(10, 8))
image = Image.open(path)
image_1 = np.array(image.convert('L'))
image_2 = np.fft.fft2(image_1)
image_3 = np.fft.fftshift(image_2)
plt.subplot(221), plt.imshow(image_1, "gray"), plt.title("Image")
plt.subplot(222), plt.imshow(np.log(np.abs(image_2)),
"gray"), plt.title("Spectrum")
plt.subplot(223), plt.imshow(np.log(np.abs(image_3)),
"gray"), plt.title("Centered")
plt.subplot(224), plt.imshow(np.log(np.abs(0.001+np.angle(image_3))),
"gray"), plt.title("Phase")
plt.show()
| 3.359375
| 3
|
news/config.py
|
ruslan-ok/ruslan
| 0
|
12779403
|
<filename>news/config.py
from task.const import *
app_config = {
'name': APP_NEWS,
'app_title': 'news',
'icon': 'newspaper',
'role': ROLE_NEWS,
'main_view': 'all',
'use_groups': True,
'sort': [
('event', 'event date'),
('name', 'name'),
],
'views': {
'all': {
'icon': 'infinity',
'title': 'all',
},
}
}
| 1.5625
| 2
|
dailyQuestion/2020/2020-10/10-07/python/solution_huosu.py
|
russellgao/algorithm
| 3
|
12779404
|
<filename>dailyQuestion/2020/2020-10/10-07/python/solution_huosu.py
def generateParenthesis(n: int) -> [str]:
result = []
def dfs(S, left, right) :
if len(S) == 2 * n :
result.append("".join(S))
return
if left < n :
S.append("(")
dfs(S, left+1 , right)
S.pop()
if right < left :
S.append(")")
dfs(S,left,right+1)
S.pop()
dfs([],0,0)
return result
if __name__ == "__main__" :
n = 3
result = generateParenthesis(n)
print(result)
| 3.9375
| 4
|
tests/test_main.py
|
OriHoch/kvfile
| 0
|
12779405
|
import datetime
import decimal
def test_sanity():
from kvfile import KVFile
kv = KVFile()
data = dict(
s='value',
i=123,
d=datetime.datetime.fromtimestamp(12325),
n=decimal.Decimal('1234.56'),
ss=set(range(10)),
o=dict(d=decimal.Decimal('1234.58'), n=datetime.datetime.fromtimestamp(12325))
)
for k, v in data.items():
kv.set(k, v)
for k, v in data.items():
assert kv.get(k) == v
assert sorted(kv.keys()) == sorted(data.keys())
assert sorted(kv.items()) == sorted(data.items())
| 2.5
| 2
|
tacred/lifelong/model/module/lstm_layer.py
|
qcw9714/FewShotContinualRE
| 21
|
12779406
|
<filename>tacred/lifelong/model/module/lstm_layer.py
import torch
import torch.nn as nn
import torch.nn.functional as F
import math
import numpy as np
from ..base_model import base_model
class lstm_layer(base_model):
def __init__(self, max_length = 128, input_size = 50, hidden_size = 256, dropout = 0, bidirectional = True, num_layers = 1, config = None):
"""
Args:
input_size: dimention of input embedding
hidden_size: hidden size
dropout: dropout layer on the outputs of each RNN layer except the last layer
bidirectional: if it is a bidirectional RNN
num_layers: number of recurrent layers
activation_function: the activation function of RNN, tanh/relu
"""
super(lstm_layer, self).__init__()
self.device = config['device']
self.max_length = max_length
self.hidden_size = hidden_size
self.input_size = input_size
if bidirectional:
self.output_size = hidden_size * 2
else:
self.output_size = hidden_size
self.lstm = nn.LSTM(input_size, hidden_size, bidirectional = bidirectional, num_layers = num_layers, dropout = dropout)
def init_hidden(self, batch_size = 1, device='cpu'):
self.hidden = (torch.zeros(2, batch_size, self.hidden_size).to(device),
torch.zeros(2, batch_size, self.hidden_size).to(device))
def forward(self, inputs, lengths, inputs_indexs):
packed_embeds = torch.nn.utils.rnn.pack_padded_sequence(inputs, lengths)
lstm_out, hidden = self.lstm(packed_embeds, self.hidden)
permuted_hidden = hidden[0].permute([1,0,2]).contiguous()
permuted_hidden = permuted_hidden.view(-1, self.hidden_size * 2)
output_embedding = permuted_hidden[inputs_indexs]
return output_embedding
def ranking_sequence(self, sequence):
word_lengths = torch.tensor([len(sentence) for sentence in sequence])
rankedi_word, indexs = word_lengths.sort(descending = True)
ranked_indexs, inverse_indexs = indexs.sort()
sequence = [sequence[i] for i in indexs]
return sequence, inverse_indexs
def pad_sequence(self, inputs, padding_value = 0):
self.init_hidden(len(inputs), self.device)
inputs, inputs_indexs = self.ranking_sequence(inputs)
lengths = [len(data) for data in inputs]
pad_inputs = torch.nn.utils.rnn.pad_sequence(inputs, padding_value = padding_value)
return pad_inputs, lengths, inputs_indexs
| 2.765625
| 3
|
tests/editor/i18n_test.py
|
gariel/lazyetl
| 0
|
12779407
|
from unittest import TestCase
from i18n import translate, LangText
langs = {
"lang1": "asd.text=aaa",
"lang2": "asd.text=bbb"
}
class TestLangText(LangText):
def load(self, langname):
super(TestLangText, self)._loadText(langs[langname])
@translate(TestLangText)
class TestTranslation:
def __init__(self):
self.text = "notset"
self.lang("asd.text", self.set_text)
def set_text(self, text):
self.text = text
class StepsTest(TestCase):
def test_should_(self):
testt = TestTranslation()
self.assertEqual(testt.text, "notset")
testt.lang_set("lang1")
self.assertEqual(testt.text, "aaa")
testt.lang_set("lang2")
self.assertEqual(testt.text, "bbb")
| 3.0625
| 3
|
src/main/python/missing_item.py
|
mohnoor94/ProblemsSolving
| 8
|
12779408
|
"""
*** 'Airbnb' interview question ***
Problem statement: https://youtu.be/cdCeU8DJvPM
"""
from numbers import Number
from typing import List
def find_missing1(first: list, second: list) -> Number:
"""
- Time Complexity: O(len(first))
- Space Complexity: O(len(first))
:param first: a list of items
:param second: a copy of the first list but with one missing item
:return: the missed item
"""
set1 = set(first)
set2 = set(second)
for item in set1:
if item not in set2:
return item
return None
def find_missing2(first: list, second: list) -> Number:
"""
- Time Complexity: O(len(first))
- Space Complexity: O(len(first))
:param first: a list of items
:param second: a copy of the first list but with one missing item
:return: the missed item
"""
return list(set(first) - set(second))[0]
def find_missing3(first: List[Number], second: List[Number]) -> Number:
"""
- Time Complexity: O(len(first))
- Space Complexity: O(1) <-- can be more for bigger numbers
:param first: a list of items
:param second: a copy of the first list but with one missing item
:return: the missed item
"""
return sum(first) - sum(second)
def find_missing4(first: List[Number], second: List[Number]) -> Number:
"""
- Time Complexity: O(len(first))
- Space Complexity: O(1)
:param first: a list of items
:param second: a copy of the first list but with one missing item
:return: the missed item
"""
xor_sum = 0
for n in first:
xor_sum ^= n
for n in second:
xor_sum ^= n
return xor_sum
if __name__ == '__main__':
print(find_missing1([4, 12, 9, 5, 6], [4, 9, 12, 6]))
print(find_missing2([4, 12, 9, 5, 6], [4, 9, 12, 6]))
print(find_missing3([4, 12, 9, 5, 6], [4, 9, 12, 6]))
print(find_missing4([4, 12, 9, 5, 6], [4, 9, 12, 6]))
| 4.09375
| 4
|
esp_flasher.py
|
pratikmokashi/ESP-FLASH-TOOL
| 0
|
12779409
|
<gh_stars>0
import os
import time
from tkinter import *
print(os.getcwd())
window = Tk()
window.title("esp flash tool")
window.geometry('500x210')
lbl = Label(window, text="Hello this is esp flashing tool",font=("Arial Bold",10))
lb_make = Label(window,text="click to make the project")
lb_path = Label(window,text="insert your project path")
lb_flash = Label(window,text="click to flash")
lb_make_flash = Label(window,text="click to make and flash")
lb_monitor = Label(window,text="click to monitor")
lb_erase = Label(window,text="click to erase flash")
make_command ='make'
flash_command ='make flash'
make_flash_command ='make && make flash'
monitor_command = 'make monitor'
erase_flash_command = 'make earase_flash'
def make_clicked():
lb_make.configure(text="making")
print(os.getcwd())
os.system(make_command)
def path_clicked():
lb_path.configure(text="path inserted")
res = txt.get()
time.sleep(0.2)
os.chdir(res)
print(os.getcwd())
def flash_clicked():
lb_flash.configure(text="flashing your esp32")
time.sleep(0.2)
os.system(flash_command)
lb_flash.configure(text="esp32 flashed")
def flash_make_clicked():
lb_make_flash.configure(text="making and flashing the esp32")
time.sleep(0.2)
os.system(make_flash_command)
lb_make_flash.configure(text="compiled")
def monitor_clicked():
lb_monitor.configure(text="monitor opened")
time.sleep(0.2)
os.system(monitor_command)
def erase_clicked():
lb_erase.configure(text="erased")
time.sleep(0.2)
os.system(erase_flash_command)
txt = Entry(window,width=10)
txt.grid(column=0, row=2)
btn_path = Button(window, text="PATH",command=path_clicked)
btn_path.grid(column=1, row=2)
btn_make = Button(window, text="MAKE",command=make_clicked)
btn_make.grid(column=1, row=3)
btn_flash= Button(window, text="FLASH",command=flash_clicked)
btn_flash.grid(column=1, row=4)
btn_flash_make= Button(window, text="MAKE+FLASH",command=flash_make_clicked)
btn_flash_make.grid(column=1, row=5)
btn_monitor= Button(window, text="MONITOR",command=monitor_clicked)
btn_monitor.grid(column=1, row=6)
btn_erase= Button(window, text="ERASE",command=erase_clicked)
btn_erase.grid(column=1, row=7)
lbl.grid(column=1, row=0)
lb_path.grid(column=3,row=2)
lb_make.grid(column=3,row=3)
lb_flash.grid(column=3,row=4)
lb_make_flash.grid(column=3,row=5)
lb_monitor.grid(column=3,row=6)
lb_erase.grid(column=3,row=7)
window.mainloop()
| 2.90625
| 3
|
src/simulator/cache/algorithms/landlord.py
|
pskopnik/htc-cache-simulator
| 1
|
12779410
|
<filename>src/simulator/cache/algorithms/landlord.py
from apq import KeyedItem, KeyedPQ
from dataclasses import dataclass, field
from enum import auto, Enum
from typing import Iterable, Optional
from ..state import AccessInfo, FileID, StateDrivenProcessor, StateDrivenOnlineProcessor, Storage
from ...params import parse_user_args, SimpleField
class Mode(Enum):
TOTAL_SIZE = auto()
ACCESS_SIZE = auto()
FETCH_SIZE = auto()
ADD_FETCH_SIZE = auto()
NO_COST = auto()
CONSTANT = auto()
@classmethod
def from_str(cls, val: str) -> 'Mode':
if val == 'total_size':
return cls.TOTAL_SIZE
elif val == 'access_size':
return cls.ACCESS_SIZE
elif val == 'fetch_size':
return cls.FETCH_SIZE
elif val == 'add_fetch_size':
return cls.ADD_FETCH_SIZE
elif val == 'no_cost':
return cls.NO_COST
elif val == 'constant':
return cls.CONSTANT
else:
raise ValueError(f'Unknown {cls.__name__} str value {val!r}')
class Landlord(StateDrivenOnlineProcessor):
"""Processor evicting the file with the lowest "credit" per volume.
Volume refers to the space of the cache's storage medium taken up, i.e.
the size of the cached fraction of the file (referred to as the "total
cached size"). The credit per volume is considered for eviction decisions.
Landlord evicts the file with the lowest credit per volume. This value is
deducted from the credit per volume of all files remaining in the cache.
The Landlord processor can run in several modes. The mode determines how a
file's credit is updated on re-access. Initially the credit is set to the
cost of fetching the file, i.e. the total cached size of the file. Note
this means the initial credit per volume is always 1. During its lifetime
in the cache, the credit per volume decreases when other files are evicted
(see above). When a file in the cache is accessed again its credit is
increased up to its total cached size. A file's credit is never reduced on
re-access. If a mode would reduced a file's credit, the credis is left
unchanged instead.
TOTAL_SIZE - The credit is set to the total cached size of the file. This
emulates LRU.
ACCESS_SIZE - The credit is set to the size of the accessed fraction of
the file.
FETCH_SIZE - The credit is set to the size of the newly fetched fraction
of the file caused by the access.
ADD_FETCH_SIZE - The fetched size is added onto the current credit.
NO_COST - A file's credit is never increased on re-access. This almost
emulates FIFO, but not quite, as the volume credit decreases whenever an
additional fraction of the file is fetched.
CONSTANT - The credit is set to 1.0 on every access. This corresponds to
the GD-SIZE(1) policy.
Landlord is a generalisation of many strategies, including FIFO, LRU,
GreedyDual and GreedyDual-Size.
"""
@dataclass
class Configuration(object):
mode: Mode = field(init=True, default=Mode.TOTAL_SIZE)
@classmethod
def from_user_args(cls, user_args: str) -> 'Landlord.Configuration':
inst = cls()
parse_user_args(user_args, inst, [
SimpleField('mode', Mode.from_str),
])
return inst
class State(StateDrivenProcessor.State):
@dataclass
class _FileInfo(object):
size: int = field(init=True)
access_rent_threshold: float = field(init=True)
class Item(StateDrivenProcessor.State.Item):
def __init__(self, file: FileID):
self._file: FileID = file
@property
def file(self) -> FileID:
return self._file
def __init__(self, configuration: 'Landlord.Configuration') -> None:
self._mode: Mode = configuration.mode
self._pq: KeyedPQ[Landlord.State._FileInfo] = KeyedPQ()
self._rent_threshold: float = 0.0
def pop_eviction_candidates(
self,
file: FileID = '',
ts: int = 0,
ind: int = 0,
requested_bytes: int = 0,
contained_bytes: int = 0,
missing_bytes: int = 0,
in_cache_bytes: int = 0,
free_bytes: int = 0,
required_free_bytes: int = 0,
) -> Iterable[FileID]:
file, running_volume_credit, _ = self._pq.pop() # Raises IndexError if empty
self._rent_threshold = running_volume_credit
return (file,)
def find(self, file: FileID) -> Optional[Item]:
if file in self._pq:
return Landlord.State.Item(file)
else:
return None
def remove(self, item: StateDrivenProcessor.State.Item) -> None:
if not isinstance(item, Landlord.State.Item):
raise TypeError('unsupported item type passed')
self.remove_file(item._file)
def remove_file(self, file: FileID) -> None:
del self._pq[file]
def process_access(self, file: FileID, ind: int, ensure: bool, info: AccessInfo) -> None:
it: Optional[KeyedItem[Landlord.State._FileInfo]]
current_credit: float
try:
it = self._pq[file]
current_credit = (it.value - self._rent_threshold) * it.data.size
except KeyError:
it = None
current_credit = 0.0
total_bytes = info.total_bytes
credit = self._credit(
requested_bytes = info.bytes_requested,
placed_bytes = info.bytes_added,
total_bytes = total_bytes,
current_credit = current_credit,
)
running_volume_credit = credit / total_bytes + self._rent_threshold
if it is None:
it = self._pq.add(file, running_volume_credit, Landlord.State._FileInfo(0, 0.0))
else:
self._pq.change_value(it, running_volume_credit)
it.data.size = total_bytes
it.data.access_rent_threshold = self._rent_threshold
def _credit(
self,
requested_bytes: int = 0,
placed_bytes: int = 0,
total_bytes: int = 0,
current_credit: float = 0.0,
) -> float:
mode = self._mode
if mode is Mode.TOTAL_SIZE:
return float(total_bytes)
elif mode is Mode.ACCESS_SIZE:
return max(current_credit, float(requested_bytes))
elif mode is Mode.FETCH_SIZE:
return max(current_credit, float(placed_bytes))
elif mode is Mode.ADD_FETCH_SIZE:
return current_credit + float(placed_bytes)
elif mode is Mode.NO_COST:
if current_credit == 0.0:
return float(total_bytes)
else:
return current_credit
elif mode is Mode.CONSTANT:
return 1.0
raise NotImplementedError
def _init_state(self) -> 'Landlord.State':
return Landlord.State(self._configuration)
def __init__(
self,
configuration: 'Landlord.Configuration',
storage: Storage,
state: Optional[State] = None,
):
self._configuration: Landlord.Configuration = configuration
super(Landlord, self).__init__(storage, state=state)
| 2.71875
| 3
|
2017/day10.py
|
iKevinY/advent
| 11
|
12779411
|
<gh_stars>10-100
import fileinput
def knot_hash(elems, lengths, pos=0, skip=0):
for l in lengths:
for i in range(l // 2):
x = (pos + i) % len(elems)
y = (pos + l - i - 1) % len(elems)
elems[x], elems[y] = elems[y], elems[x]
pos = pos + l + skip % len(elems)
skip += 1
return elems, pos, skip
# Read puzzle input
line = fileinput.input()[0].strip()
# Part 1
try:
lengths = [int(x) for x in line.split(',')]
elems = knot_hash(range(0, 256), lengths)[0]
print "Product of first two items in list:", elems[0] * elems[1]
except ValueError:
print "Skipping part 1 (can't parse puzzle input into ints)"
# Part 2
lengths = [ord(x) for x in line] + [17, 31, 73, 47, 23]
elems = range(0, 256)
pos = 0
skip = 0
# Perform 64 rounds of Knot Hash
for _ in range(64):
elems, pos, skip = knot_hash(elems, lengths, pos, skip)
# Convert from sparse hash to dense hash
sparse = elems
dense = []
for i in range(16):
res = 0
for j in range(0, 16):
res ^= sparse[(i * 16) + j]
dense.append(res)
print "Knot Hash of puzzle input:", ''.join('%02x' % x for x in dense)
| 3.25
| 3
|
lnkr/import_section.py
|
yjpark/lnkr
| 0
|
12779412
|
<reponame>yjpark/lnkr
import os
import sys
import lnkr
import term
KEY_LOCAL = 'local'
KEY_REMOTE = 'remote'
KEY_MODE = 'mode'
KEY_MODE_WIN = 'mode_win'
MODE_COPY = 'copy'
MODE_LINK = 'link'
MODE_SYMLINK = 'symlink'
windows_mode = sys.platform.startswith('win')
class ImportSection:
def __init__(self, path, key, values):
self.path = path
self.key = key
self.values = values
self.valid = self.parse()
self.loaded = False
self.package_config = None
self.wrapper_config = None
def __str__(self):
if self.valid:
return '[%s] -> {local = "%s", remote = "%s", mode = "%s"}' % (self.key, self.local, self.remote, self.mode)
else:
return 'Invalid: [%s] -> %s' % (self.key, self.values)
def get_section_value(self, key, optional=False):
return lnkr.get_section_value('ImportSection', self.values, key, optional)
def get_mode(self):
if windows_mode:
mode_win = self.get_section_value(KEY_MODE_WIN, True)
if mode_win:
return mode_win
return self.get_section_value(KEY_MODE, True)
def parse(self):
self.local = self.get_section_value(KEY_LOCAL, True)
self.remote = self.get_section_value(KEY_REMOTE, True)
self.mode = self.get_mode()
if self.local is None and self.remote is None:
term.error('Need to provide either "local" or "remote": %s' % term.format_param(self.key))
return False
return True
def do_load(self, package_path):
self.package_config = lnkr.load_package_config(package_path)
self.wrapper_config = lnkr.load_wrapper_config(package_path)
if self.wrapper_config is not None:
self.wrapper_config.set_mode(self.mode)
return self.package_config is not None
def load_local(self):
return self.do_load(os.path.join(self.path, self.local))
def load_remote(self):
term.info('Not Implemented: Import Remote Package')
return False
def check_mode(self):
if self.mode == MODE_COPY:
return True
elif self.mode == MODE_LINK:
return True
elif self.mode == MODE_SYMLINK:
return True
return False
def load(self):
if not self.check_mode():
return False
if self.local is not None:
self.loaded = self.load_local()
elif self.remote is not None:
self.loaded = self.load_remote()
if not self.loaded:
term.error('Load Import Section Failed: %s' % term.format_param(self.key))
def get_component(self, key):
if self.package_config is None:
return None
export_section = self.package_config.get_export_section(key)
if export_section is not None:
return export_section
elif self.wrapper_config is not None:
wrapper_section = self.wrapper_config.get_wrapper_section(key)
if wrapper_section is not None:
return wrapper_section
return None
def new_import_section(path, key, values):
section = ImportSection(path, key, values)
if section.valid:
return section
else:
term.error('Invalid Import Section: %s -> %s' % (key, values))
| 2.1875
| 2
|
venv/lib/python3.6/site-packages/ansible/module_utils/facts/virtual/sunos.py
|
usegalaxy-no/usegalaxy
| 1
|
12779413
|
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import os
from ansible.module_utils.facts.virtual.base import Virtual, VirtualCollector
class SunOSVirtual(Virtual):
"""
This is a SunOS-specific subclass of Virtual. It defines
- virtualization_type
- virtualization_role
- container
"""
platform = 'SunOS'
def get_virtual_facts(self):
virtual_facts = {}
host_tech = set()
guest_tech = set()
# Check if it's a zone
zonename = self.module.get_bin_path('zonename')
if zonename:
rc, out, err = self.module.run_command(zonename)
if rc == 0:
if out.rstrip() == "global":
host_tech.add('zone')
else:
guest_tech.add('zone')
virtual_facts['container'] = 'zone'
# Check if it's a branded zone (i.e. Solaris 8/9 zone)
if os.path.isdir('/.SUNWnative'):
guest_tech.add('zone')
virtual_facts['container'] = 'zone'
# If it's a zone check if we can detect if our global zone is itself virtualized.
# Relies on the "guest tools" (e.g. vmware tools) to be installed
if 'container' in virtual_facts and virtual_facts['container'] == 'zone':
modinfo = self.module.get_bin_path('modinfo')
if modinfo:
rc, out, err = self.module.run_command(modinfo)
if rc == 0:
for line in out.splitlines():
if 'VMware' in line:
guest_tech.add('vmware')
virtual_facts['virtualization_type'] = 'vmware'
virtual_facts['virtualization_role'] = 'guest'
if 'VirtualBox' in line:
guest_tech.add('virtualbox')
virtual_facts['virtualization_type'] = 'virtualbox'
virtual_facts['virtualization_role'] = 'guest'
if os.path.exists('/proc/vz'):
guest_tech.add('virtuozzo')
virtual_facts['virtualization_type'] = 'virtuozzo'
virtual_facts['virtualization_role'] = 'guest'
# Detect domaining on Sparc hardware
virtinfo = self.module.get_bin_path('virtinfo')
if virtinfo:
# The output of virtinfo is different whether we are on a machine with logical
# domains ('LDoms') on a T-series or domains ('Domains') on a M-series. Try LDoms first.
rc, out, err = self.module.run_command("/usr/sbin/virtinfo -p")
# The output contains multiple lines with different keys like this:
# DOMAINROLE|impl=LDoms|control=false|io=false|service=false|root=false
# The output may also be not formatted and the returncode is set to 0 regardless of the error condition:
# virtinfo can only be run from the global zone
if rc == 0:
try:
for line in out.splitlines():
fields = line.split('|')
if fields[0] == 'DOMAINROLE' and fields[1] == 'impl=LDoms':
guest_tech.add('ldom')
virtual_facts['virtualization_type'] = 'ldom'
virtual_facts['virtualization_role'] = 'guest'
hostfeatures = []
for field in fields[2:]:
arg = field.split('=')
if arg[1] == 'true':
hostfeatures.append(arg[0])
if len(hostfeatures) > 0:
virtual_facts['virtualization_role'] = 'host (' + ','.join(hostfeatures) + ')'
except ValueError:
pass
else:
smbios = self.module.get_bin_path('smbios')
if not smbios:
return
rc, out, err = self.module.run_command(smbios)
if rc == 0:
for line in out.splitlines():
if 'VMware' in line:
guest_tech.add('vmware')
virtual_facts['virtualization_type'] = 'vmware'
virtual_facts['virtualization_role'] = 'guest'
elif 'Parallels' in line:
guest_tech.add('parallels')
virtual_facts['virtualization_type'] = 'parallels'
virtual_facts['virtualization_role'] = 'guest'
elif 'VirtualBox' in line:
guest_tech.add('virtualbox')
virtual_facts['virtualization_type'] = 'virtualbox'
virtual_facts['virtualization_role'] = 'guest'
elif 'HVM domU' in line:
guest_tech.add('xen')
virtual_facts['virtualization_type'] = 'xen'
virtual_facts['virtualization_role'] = 'guest'
elif 'KVM' in line:
guest_tech.add('kvm')
virtual_facts['virtualization_type'] = 'kvm'
virtual_facts['virtualization_role'] = 'guest'
virtual_facts['virtualization_tech_guest'] = guest_tech
virtual_facts['virtualization_tech_host'] = host_tech
return virtual_facts
class SunOSVirtualCollector(VirtualCollector):
_fact_class = SunOSVirtual
_platform = 'SunOS'
| 2.03125
| 2
|
docs/snippets/ov_auto_batching.py
|
ryanloney/openvino-1
| 1
|
12779414
|
#include <openvino/runtime/core.hpp>
int main() {
ov::Core core;
auto model = core.read_model("sample.xml");
//! [compile_model]
{
auto compiled_model = core.compile_model(model, "GPU", ov::hint::performance_mode(ov::hint::PerformanceMode::THROUGHPUT));
}
//! [compile_model]
//! [compile_model_no_auto_batching]
{
// disabling the automatic batching
// leaving intact other configurations options that the device selects for the 'throughput' hint
auto compiled_model = core.compile_model(model, "GPU", {ov::hint::performance_mode(ov::hint::PerformanceMode::THROUGHPUT),
ov::hint::allow_auto_batching(false)});
}
//! [compile_model_no_auto_batching]
//! [query_optimal_num_requests]
{
// when the batch size is automatically selected by the implementation
// it is important to query/create and run the sufficient #requests
auto compiled_model = core.compile_model(model, "GPU", ov::hint::performance_mode(ov::hint::PerformanceMode::THROUGHPUT));
auto num_requests = compiled_model.get_property(ov::optimal_number_of_infer_requests);
}
//! [query_optimal_num_requests]
//! [hint_num_requests]
{
// limiting the available parallel slack for the 'throughput' hint via the ov::hint::num_requests
// so that certain parameters (like selected batch size) are automatically accommodated accordingly
auto compiled_model = core.compile_model(model, "GPU", {ov::hint::performance_mode(ov::hint::PerformanceMode::THROUGHPUT),
ov::hint::num_requests(4)});
}
//! [hint_num_requests]
return 0;
}
| 1.859375
| 2
|
django_doc/apps.py
|
bgreatfit/locallibrary
| 0
|
12779415
|
from django.apps import AppConfig
class DjangoDocConfig(AppConfig):
name = 'django_doc'
| 1.15625
| 1
|
bin/start_worker.py
|
zhiyue/cola
| 1
|
12779416
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
'''
Copyright (c) 2013 <NAME> <<EMAIL>>
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
Created on 2013-6-7
@author: Chine
'''
import subprocess
import os
from cola.core.utils import root_dir, get_ip
from cola.core.config import main_conf
def start_worker(master, data_path=None, force=False):
path = os.path.join(root_dir(), 'cola', 'worker', 'watcher.py')
print 'Start worker at %s:%s' % (get_ip(), main_conf.worker.port)
print 'Worker will run in background. Please do not shut down the terminal.'
cmds = ['python', path, '-m', master]
if data_path is not None:
cmds.extend(['-d', data_path])
if force is True:
cmds.append('-f')
subprocess.Popen(cmds)
if __name__ == "__main__":
import argparse
parser = argparse.ArgumentParser('Cola worker')
parser.add_argument('-m', '--master', metavar='master watcher', nargs='?',
default=None, const=None,
help='master connected to(in the former of `ip:port` or `ip`)')
parser.add_argument('-d', '--data', metavar='data root directory', nargs='?',
default=None, const=None,
help='root directory to put data')
parser.add_argument('-f', '--force', metavar='force start', nargs='?',
default=False, const=True, type=bool)
args = parser.parse_args()
master = args.master
if master is None:
connect_to_localhost = raw_input("Connect to localhost? (yes or no) ")
conn = connect_to_localhost.lower().strip()
if conn == 'yes' or conn == 'y':
master = '%s:%s' % (get_ip(), main_conf.master.port)
elif conn == 'no' or conn == 'n':
master = raw_input("Please input the master(form: \"ip:port\" or \"ip\") ")
if ':' not in master:
master += ':%s' % main_conf.master.port
else:
print 'Input illegal!'
else:
if ':' not in master:
master += ':%s' % main_conf.master.port
if master is not None:
start_worker(master, data_path=args.data, force=args.force)
| 2.03125
| 2
|
blog/app/forms.py
|
shazlycode/testsite-blog
| 1
|
12779417
|
from django import forms
from app.models import Post, Comment, Profile
from django.contrib.auth.models import User
class CommentForm(forms.ModelForm):
class Meta:
model = Comment
fields = ('name','email','comment_body' )
class RegisterForm(forms.ModelForm):
username = forms.CharField(max_length=100, label='<NAME>')
first_name = forms.CharField(max_length=100, label='الاسم الاول')
last_name = forms.CharField(max_length=100, label='الاسم الاخير')
email = forms.EmailField(label='البريد الالكتروني')
password1= forms.CharField(widget=forms.PasswordInput(),label='<PASSWORD>رور',min_length=8)
password2 = forms.CharField(widget=forms.PasswordInput(), label='تأكيد كلمة المرور', min_length=8)
class Meta:
model = User
fields = ('username','first_name','last_name','email','password1','<PASSWORD>')
def clean_username(self):
cd = self.cleaned_data
if User.objects.filter(username=cd['username']).exists():
raise forms.ValidationError('اسم المستخدم موجود مسبقا')
return cd['username']
def clean_password2(self):
cd = self.cleaned_data
if cd['password1'] != cd['password2']:
raise forms.ValidationError('كلمة المرور غير متطابقة')
return cd['password2']
class LoginForm(forms.ModelForm):
username= forms.CharField(max_length=100, label='<NAME>')
password= forms.CharField(widget=forms.PasswordInput(), label='<PASSWORD>رور')
class Meta:
model= User
fields= ('username', 'password')
class UserUpdateForm(forms.ModelForm):
first_name = forms.CharField(label='الاسم الأول')
last_name = forms.CharField(label='الاسم الأخير')
email = forms.EmailField(label='البريد الإلكتروني')
class Meta:
model = User
fields = ('first_name', 'last_name', 'email')
class ProfileUpdateForm(forms.ModelForm):
class Meta:
model = Profile
fields = ('image',)
#class ProfileUpdateForm(forms.ModelForm):
# first_name = forms.CharField(max_length=100, label='الاسم الاول')
# last_name = forms.CharField(max_length=100, label='الاسم الاخير')
# email = forms.EmailField(label='البريد الالكتروني')
# class Meta:
# model = User
# fields = ('first_name', 'last_name', 'email')
#
#class ImageUpdateForm(forms.ModelForm):
# class Meta:
# model = Profile
# fields = ('image', )
class NewPost(forms.ModelForm):
post_name= forms.CharField(max_length=500, label='عنوان التدوينة')
post_body= forms.TextInput()
class Meta:
model= Post
fields=('post_name', 'post_body',)
| 2.5625
| 3
|
InayatBashir/Places/urls.py
|
muminfarooq190/InayatBashir.in
| 0
|
12779418
|
<reponame>muminfarooq190/InayatBashir.in
from django.urls import path
from . import views
app_name = 'places'
urlpatterns = [
path('', views.AllPlacesList.as_view(), name='all_places_list'),
path('places/list/<int:pk>/', views.DetailPlace.as_view(), name='detail_place')
]
| 1.9375
| 2
|
newpipe_crash_report_importer/lmtp_server.py
|
TeamNewPipe/CrashReportImporter
| 9
|
12779419
|
import traceback
from email.parser import Parser
import sentry_sdk
from aiosmtpd.controller import Controller
from aiosmtpd.lmtp import LMTP
from aiosmtpd.smtp import Envelope
from . import make_logger
class CustomLMTP(LMTP):
"""
A relatively simple wrapper around the LMTP/SMTP classes that implements some less obtrusive logging around
connections.
Required until https://github.com/aio-libs/aiosmtpd/issues/239 has been resolved.
"""
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.custom_logger = make_logger("lmtp")
def _get_peer_name(self):
return f"{self.session.peer[0]}:{self.session.peer[1]}"
def connection_made(self, *args, **kwargs):
# we have to run the superclass method in advance, as it'll set up self.session, which we'll need in
# _get_peer_name
rv = super().connection_made(*args, **kwargs)
self.custom_logger.info("Client connected: %s", self._get_peer_name())
return rv
def connection_lost(self, *args, **kwargs):
self.custom_logger.info("Client connection lost: %s", self._get_peer_name())
return super().connection_lost(*args, **kwargs)
class LmtpController(Controller):
"""
A custom controller implementation, return LMTP instances instead of SMTP ones.
Inspired by GNU Mailman 3"s LMTPController.
"""
def factory(self):
return CustomLMTP(self.handler, ident="NewPipe crash report importer")
class CrashReportHandler:
"""
Very simple handler which only accepts mail for allowed addresses and stores them into the Sentry database.
"""
def __init__(self, callback: callable):
self.callback = callback
self.logger = make_logger("lmtp_handler")
async def handle_RCPT(
self, server, session, envelope: Envelope, address: str, rcpt_options
):
if address not in ["<EMAIL>", "<EMAIL>"]:
return f"550 not handling mail for address {address}"
envelope.rcpt_tos.append(address)
return "250 OK"
@staticmethod
def convert_to_rfc822_message(envelope: Envelope):
return Parser().parsestr(envelope.content.decode())
async def handle_DATA(self, server, session, envelope: Envelope):
try:
message = self.convert_to_rfc822_message(envelope)
# as the volume of incoming mails is relatively low (< 3 per minute usually) and reporting doesn't take
# very long, we can just do it here and don't require some message queue/worker setup
# the callback is defined as async, but can, due to the low volume, be implemented synchronously, too
await self.callback(message)
except:
# in case an exception happens in the callback (e.g., the message can't be parsed correctly), we don't
# want to notify the sending MTA, but have them report success of delivery
# it's after all not their problem: if they got so far, the message was indeed delivered to our LMTP server
# however, we want the exception to show up in the log
traceback.print_exc()
# also, we want to report all kinds of issues to GlitchTip
sentry_sdk.capture_exception()
# make sure all control flow paths return a string reply!
return "250 Message accepted for delivery"
| 2.234375
| 2
|
Day18/Extract_Unique_Dictionary_Values.py
|
tushartrip1010/100_days_code_py
| 0
|
12779420
|
def Extract_Dictionary_Values(Test_dict):
return [sorted({numbers for ele in Test_dict.values() for numbers in ele})]
Test_dict = {'Challenges': [5, 6, 7, 8],
'are': [10, 11, 7, 5],
'best': [6, 12, 10, 8],
'for': [1, 2, 5]}
print(*(Extract_Dictionary_Values(Test_dict)))
| 3.515625
| 4
|
keyboards/menu_keyboards.py
|
gdetam/aiogram_telegram_bot
| 1
|
12779421
|
<reponame>gdetam/aiogram_telegram_bot<gh_stars>1-10
"""this is menu's keyboards creator."""
from aiogram.types import InlineKeyboardButton, InlineKeyboardMarkup
def create_start_menu():
"""Start keyboard."""
menu_keyboard = InlineKeyboardMarkup(row_width=1)
select_book = InlineKeyboardButton(
text='Выбрать аудиокнигу',
callback_data='select_book')
random_book = InlineKeyboardButton(
text='Случайная аудиокнига',
callback_data='random_book')
menu_keyboard.add(select_book, random_book)
return menu_keyboard
def create_authors_and_genres_menu():
"""Keyboard in response to the 'Select Audiobook' button."""
select_book_keyboard = InlineKeyboardMarkup(row_width=3)
authors = InlineKeyboardButton(
text='авторы',
callback_data='author_page_0')
genres = InlineKeyboardButton(
text='жанры',
callback_data='genre_page_0')
readers = InlineKeyboardButton(
text='исполнители',
callback_data='reader_page_0')
select_book_keyboard.add(authors, genres, readers)
add_menu_button(select_book_keyboard)
return select_book_keyboard
def add_menu_button(keyboard: InlineKeyboardMarkup):
"""Keyboard 'menu'."""
button = InlineKeyboardButton(
text='в меню',
callback_data='menu')
keyboard.add(button)
| 2.65625
| 3
|
src/test/dataset/reader/csv_reader_test.py
|
KlemenGrebovsek/Cargo-stowage-optimization
| 2
|
12779422
|
import unittest
from src.dataset.reader.csv_reader import CSVDatasetReader
from src.dataset.reader.dataset_reader_errors import InvalidFileContentError
class CSVReaderTest(unittest.TestCase):
def test_empty_path(self):
try:
reader = CSVDatasetReader()
_ = reader.read('')
self.fail('Empty path should not be accepted')
except ValueError:
pass
def test_invalid_path(self):
try:
reader = CSVDatasetReader()
_ = reader.read('./random/dataset.csv')
self.fail('Invalid path should not be accepted')
except ValueError:
pass
def test_valid_path(self):
try:
reader = CSVDatasetReader()
_ = reader.read('../../resource/testSet.csv')
except ValueError:
self.fail('Valid path should be accepted')
def test_invalid_file_extension(self):
try:
reader = CSVDatasetReader()
_ = reader.read('./random/dataset.txt')
self.fail('Invalid file extension should not be accepted')
except ValueError:
pass
def test_valid_file_extension(self):
try:
reader = CSVDatasetReader()
_ = reader.read('../../resource/testSet.csv')
except ValueError:
self.fail('Valid file extension should be accepted')
def test_invalid_content(self):
try:
reader = CSVDatasetReader()
_ = reader.read('../../resource/invalidTestSet.csv')
self.fail('Invalid dataset should not be accepted')
except InvalidFileContentError:
pass
def test_empty_content(self):
try:
reader = CSVDatasetReader()
_ = reader.read('../../resource/emptyFile.csv')
self.fail('Invalid dataset should not be accepted')
except InvalidFileContentError:
pass
def test_valid_content(self):
try:
reader = CSVDatasetReader()
dataset = reader.read('../../resource/testSet.csv')
self.assertEqual('TestSet1', dataset.title)
self.assertEqual(120, dataset.total_packages)
self.assertEqual(6, dataset.total_stations)
self.assertEqual(15, dataset.width)
self.assertEqual(15, dataset.height)
except Exception as e:
self.fail(e)
| 3.078125
| 3
|
Theories/DataStructures/QueueAndStack/StackDFS/BSTInorderTraversal/bst_inorder_traversal.py
|
dolong2110/Algorithm-By-Problems-Python
| 1
|
12779423
|
from typing import List
# Definition for a binary tree node.
class TreeNode:
def __init__(self, val=0, left=None, right=None):
self.val = val
self.left = left
self.right = right
# Recursively
# def inorderTraversal(root: TreeNode) -> List[int]:
# traversal_list = []
#
# def traversal(cur_root: TreeNode):
# if cur_root:
# traversal(cur_root.left)
# traversal_list.append(cur_root.val)
# traversal(cur_root.right)
#
# traversal(root)
# return traversal_list
# Iterative
def inorderTraversal(root: TreeNode) -> List[int]:
traversal_list, stack = [], []
cur_root = root
while cur_root or stack:
while cur_root:
stack.append(cur_root)
cur_root = cur_root.left
cur_root = stack.pop()
traversal_list.append(cur_root.val)
cur_root = cur_root.right
return traversal_list
| 4
| 4
|
dimsdk/plugins/ecc.py
|
dimchat/sdk-py
| 0
|
12779424
|
# -*- coding: utf-8 -*-
# ==============================================================================
# MIT License
#
# Copyright (c) 2020 <NAME>
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
# ==============================================================================
import hashlib
from typing import Union, Optional
import ecdsa
from dimp import Dictionary
from dimp import AsymmetricKey, PublicKey, PrivateKey
class ECCPublicKey(Dictionary, PublicKey):
""" ECC Public Key """
def __init__(self, key: dict):
super().__init__(key)
# data in 'PEM' format
data = key['data']
data_len = len(data)
if data_len == 130 or data_len == 128:
data = bytes.fromhex(data)
key = ecdsa.VerifyingKey.from_string(data, curve=ecdsa.SECP256k1, hashfunc=hashlib.sha256)
else:
key = ecdsa.VerifyingKey.from_pem(data, hashfunc=hashlib.sha256)
self.__key = key
self.__data = key.to_string(encoding='uncompressed')
@property
def data(self) -> bytes:
return self.__data
@property
def size(self) -> int:
return self.bits >> 3
@property
def bits(self) -> int:
bits = self.get('sizeInBits')
if bits is None:
return 256 # ECC-256
else:
return int(bits)
def verify(self, data: bytes, signature: bytes) -> bool:
try:
return self.__key.verify(signature=signature, data=data,
hashfunc=hashlib.sha256, sigdecode=ecdsa.util.sigdecode_der)
except ecdsa.BadSignatureError:
return False
class ECCPrivateKey(Dictionary, PrivateKey):
""" ECC Private Key """
def __init__(self, key: Optional[dict] = None):
if key is None:
key = {'algorithm': AsymmetricKey.ECC}
super().__init__(key)
# data in 'PEM' format
data = key.get('data')
if data is None or len(data) == 0:
# generate private key data
key = ecdsa.SigningKey.generate(curve=ecdsa.SECP256k1, hashfunc=hashlib.sha256)
data = key.to_string()
# store private key in PKCS#8 format
pem = key.to_pem(format='pkcs8').decode('utf-8')
# pem = data.hex()
self.__key = key
self.__data = data
self['data'] = pem
self['curve'] = 'SECP256k1'
self['digest'] = 'SHA256'
else:
if len(data) == 64:
data = bytes.fromhex(data)
key = ecdsa.SigningKey.from_string(data, curve=ecdsa.SECP256k1, hashfunc=hashlib.sha256)
else:
key = ecdsa.SigningKey.from_pem(data, hashfunc=hashlib.sha256)
self.__key = key
self.__data = key.to_string()
@property
def data(self) -> bytes:
return self.__data
@property
def size(self) -> int:
return self.bits >> 3
@property
def bits(self) -> int:
bits = self.get('sizeInBits')
if bits is None:
return 256 # ECC-256
else:
return int(bits)
@property
def public_key(self) -> Union[PublicKey]:
key = self.__key.get_verifying_key()
# store public key in X.509 format
pem = key.to_pem().decode('utf-8')
# pem = key.to_string(encoding='uncompressed').hex()
info = {
'algorithm': PublicKey.ECC,
'data': pem,
'curve': 'SECP256k1',
'digest': 'SHA256'
}
return ECCPublicKey(info)
def sign(self, data: bytes) -> bytes:
return self.__key.sign(data=data, hashfunc=hashlib.sha256, sigencode=ecdsa.util.sigencode_der)
| 1.75
| 2
|
scripts/dates.py
|
clayrisser/node-git-filter-repo
| 0
|
12779425
|
<filename>scripts/dates.py<gh_stars>0
# Copyright 2021 Silicon Hills LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from datetime import datetime, timezone, timedelta
SEC = 1
MIN = SEC * 60
HR = MIN * 60
DAY = HR * 24
def create_date(unix_timestamp: int, timezone=0):
return (unix_timestamp, timezone)
def date_from_gitdate(gitdate: str) -> (int, int):
if not isinstance(gitdate, str):
gitdate = gitdate.decode('utf-8')
date_arr = gitdate.split(' ')
gitz = date_arr[1] if len(date_arr) > 1 else '+0000'
tz = tz_from_gitz(gitz)
return (int(date_arr[0]), tz)
def tz_from_gitz(gitz: str) -> int:
gitz_arr = gitz.split(':')
tz_str = gitz_arr[0] + gitz_arr[1] if len(gitz_arr) > 1 else gitz_arr[0]
tz_str = '+' + tz_str if len(tz_str) < 5 else tz_str
if (tz_str[0] != '+' and tz_str[0] != '-') or len(tz_str) < 5:
raise Exception(gitz + ' is an invalid git tz')
tz_arr = [tz_str[0], int(tz_str[1:3]), int(tz_str[3:5])]
tz_minutes = tz_arr[2]
tz_hours = tz_arr[1]
return (((tz_hours * 60) + tz_minutes) * MIN) * (-1 if tz_arr[0] == '-' else 1)
def gitz_from_tz(tz: int) -> str:
tz_hours = int(abs(tz) / HR)
tz_minutes = int(abs(tz) / MIN % 60)
return ('-' if tz < 0 else '+') + str(tz_hours).zfill(2) + str(tz_minutes).zfill(2)
def gitdate_from_date(date: (int, int)) -> str:
gitz = gitz_from_tz(date[1])
return bytes(str(date[0]) + ' ' + gitz, 'utf-8')
def change_tz(date: (int, int), tz: str or int, adjust_time=True) -> (int, int):
if isinstance(tz, str):
tz = tz_from_gitz(tz)
if adjust_time:
return (date[0], tz)
return (date[0] + tz, tz)
def pydate_from_date(date: (int, int)):
pytz = pytz_from_tz(date[1])
return datetime.fromtimestamp(date[0], tz=pytz)
def pytz_from_tz(tz: int):
factor = (-1 if tz < 0 else 1)
hours = int(abs(tz) / HR) * factor
minutes = int(abs(tz) / MIN % 60) * factor
return timezone(timedelta(hours=hours, minutes=minutes))
def pydate_from_gitdate(gitdate: str):
date = date_from_gitdate(gitdate)
return pydate_from_date(date)
def gitz_from_pydate(pydate) -> str:
tz_name = pydate.tzname() or 'UTC+00:00'
if tz_name == 'UTC':
tz_name = 'UTC+00:00'
return tz_name[3:6] + tz_name[7:9]
def tz_from_pydate(pydate) -> int:
gitz = gitz_from_pydate(pydate)
return tz_from_gitz(gitz)
def date_from_pydate(pydate) -> (int, int):
return (
int(pydate.strftime("%s")),
tz_from_gitz(gitz_from_pydate(pydate))
)
def gitdate_from_pydate(pydate) -> str:
date = date_from_pydate(pydate)
return gitdate_from_date(date)
def match(a_date: (int, int), operator, b_date: (int, int), granularity=SEC) -> bool:
a_granular = int(a_date[0] / 1)
b_granular = int(b_date[0] / 1)
if operator == '=':
return a_granular == b_granular
elif operator == '!=':
return a_granular != b_granular
elif operator == '>':
return a_granular > b_granular
elif operator == '<':
return a_granular < b_granular
elif operator == '<=':
return a_granular <= b_granular
elif operator == '>=':
return a_granular >= b_granular
return False
| 2.640625
| 3
|
peregrine/urls.py
|
FlipperPA/peregrine
| 52
|
12779426
|
<gh_stars>10-100
from django.urls import include, path, re_path
from wagtail.core import urls as wagtail_urls
from .views import PostsListView, AuthorPostsListView, CategoryPostsListView, PostsFeed
urlpatterns = [
path("rss/", PostsFeed(), name="peregrine-rss"),
path("author/<str:name>/", AuthorPostsListView.as_view(), name="posts-author"),
path(
"category/<str:name>/", CategoryPostsListView.as_view(), name="posts-category"
),
path("", PostsListView.as_view(), name="posts"),
re_path(r"", include(wagtail_urls)),
]
| 1.9375
| 2
|
PseudoGenerator.py
|
yxn-coder/Inf-Net
| 273
|
12779427
|
# -*- coding: utf-8 -*-
"""Preview
Code for 'Inf-Net: Automatic COVID-19 Lung Infection Segmentation from CT Scans'
submit to Transactions on Medical Imaging, 2020.
First Version: Created on 2020-05-13 (@author: <NAME>)
"""
# ---- base lib -----
import os
import argparse
from datetime import datetime
import cv2
import numpy as np
import random
import shutil
from scipy import misc
# ---- torch lib ----
import torch
from torch.autograd import Variable
import torch.nn.functional as F
# ---- custom lib ----
# NOTES: Here we nly provide Res2Net, you can also replace it with other backbones
from Code.model_lung_infection.InfNet_Res2Net import Inf_Net as Network
from Code.utils.dataloader_LungInf import get_loader, test_dataset
from Code.utils.utils import clip_gradient, adjust_lr, AvgMeter
from Code.utils.format_conversion import binary2edge
def joint_loss(pred, mask):
weit = 1 + 5*torch.abs(F.avg_pool2d(mask, kernel_size=31, stride=1, padding=15) - mask)
wbce = F.binary_cross_entropy_with_logits(pred, mask, reduce='none')
wbce = (weit*wbce).sum(dim=(2, 3)) / weit.sum(dim=(2, 3))
pred = torch.sigmoid(pred)
inter = ((pred * mask)*weit).sum(dim=(2, 3))
union = ((pred + mask)*weit).sum(dim=(2, 3))
wiou = 1 - (inter + 1)/(union - inter+1)
return (wbce + wiou).mean()
def trainer(train_loader, model, optimizer, epoch, opt, total_step):
model.train()
# ---- multi-scale training ----
size_rates = [0.75, 1, 1.25] # replace your desired scale
loss_record1, loss_record2, loss_record3, loss_record4, loss_record5 = AvgMeter(), AvgMeter(), AvgMeter(), AvgMeter(), AvgMeter()
for i, pack in enumerate(train_loader, start=1):
for rate in size_rates:
optimizer.zero_grad()
# ---- data prepare ----
images, gts, edges = pack
images = Variable(images).cuda()
gts = Variable(gts).cuda()
edges = Variable(edges).cuda()
# ---- rescale ----
trainsize = int(round(opt.trainsize*rate/32)*32)
if rate != 1:
images = F.upsample(images, size=(trainsize, trainsize), mode='bilinear', align_corners=True)
gts = F.upsample(gts, size=(trainsize, trainsize), mode='bilinear', align_corners=True)
edges = F.upsample(edges, size=(trainsize, trainsize), mode='bilinear', align_corners=True)
# ---- forward ----
lateral_map_5, lateral_map_4, lateral_map_3, lateral_map_2, lateral_edge = model(images)
# ---- loss function ----
loss5 = joint_loss(lateral_map_5, gts)
loss4 = joint_loss(lateral_map_4, gts)
loss3 = joint_loss(lateral_map_3, gts)
loss2 = joint_loss(lateral_map_2, gts)
loss1 = torch.nn.BCEWithLogitsLoss()(lateral_edge, edges)
loss = loss1 + loss2 + loss3 + loss4 + loss5
# ---- backward ----
loss.backward()
clip_gradient(optimizer, opt.clip)
optimizer.step()
# ---- recording loss ----
if rate == 1:
loss_record1.update(loss1.data, opt.batchsize)
loss_record2.update(loss2.data, opt.batchsize)
loss_record3.update(loss3.data, opt.batchsize)
loss_record4.update(loss4.data, opt.batchsize)
loss_record5.update(loss5.data, opt.batchsize)
# ---- train visualization ----
if i % 5 == 0 or i == total_step:
print('{} Epoch [{:03d}/{:03d}], Step [{:04d}/{:04d}], [lateral-edge: {:.4f}, '
'lateral-2: {:.4f}, lateral-3: {:0.4f}, lateral-4: {:0.4f}, lateral-5: {:0.4f}]'.
format(datetime.now(), epoch, opt.epoch, i, total_step, loss_record1.show(),
loss_record2.show(), loss_record3.show(), loss_record4.show(), loss_record5.show()))
# ---- save model_lung_infection ----
save_path = 'Snapshots/{}/'.format(opt.train_save)
os.makedirs(save_path, exist_ok=True)
if (epoch+1) % 10 == 0:
torch.save(model.state_dict(), save_path + 'Semi-Inf-Net-%d.pth' % (epoch+1))
print('[Saving Snapshot:]', save_path + 'Semi-Inf-Net-%d.pth' % (epoch+1))
def train_module(_train_path, _train_save, _resume_snapshot):
parser = argparse.ArgumentParser()
parser.add_argument('--epoch', type=int, default=10, help='epoch number')
parser.add_argument('--lr', type=float, default=3e-4, help='learning rate')
parser.add_argument('--batchsize', type=int, default=16, help='training batch size')
parser.add_argument('--trainsize', type=int, default=352, help='training dataset size')
parser.add_argument('--clip', type=float, default=0.5, help='gradient clipping margin')
parser.add_argument('--decay_rate', type=float, default=0.1, help='decay rate of learning rate')
parser.add_argument('--decay_epoch', type=int, default=50, help='every n epochs decay learning rate')
parser.add_argument('--train_path', type=str, default=_train_path)
parser.add_argument('--train_save', type=str, default=_train_save)
parser.add_argument('--resume_snapshot', type=str, default=_resume_snapshot)
opt = parser.parse_args()
# ---- build models ----
torch.cuda.set_device(0)
model = Network(channel=32, n_class=1).cuda()
model.load_state_dict(torch.load(opt.resume_snapshot))
params = model.parameters()
optimizer = torch.optim.Adam(params, opt.lr)
image_root = '{}/Imgs/'.format(opt.train_path)
gt_root = '{}/GT/'.format(opt.train_path)
edge_root = '{}/Edge/'.format(opt.train_path)
train_loader = get_loader(image_root, gt_root, edge_root, batchsize=opt.batchsize, trainsize=opt.trainsize)
total_step = len(train_loader)
print("#"*20, "Start Training", "#"*20)
for epoch in range(1, opt.epoch):
adjust_lr(optimizer, opt.lr, epoch, opt.decay_rate, opt.decay_epoch)
trainer(train_loader=train_loader, model=model, optimizer=optimizer,
epoch=epoch, opt=opt, total_step=total_step)
def inference_module(_data_path, _save_path, _pth_path):
model = Network(channel=32, n_class=1)
model.load_state_dict(torch.load(_pth_path))
model.cuda()
model.eval()
os.makedirs(_save_path, exist_ok=True)
# FIXME
image_root = '{}/'.format(_data_path)
# gt_root = '{}/mask/'.format(data_path)
test_loader = test_dataset(image_root, image_root, 352)
for i in range(test_loader.size):
image, name = test_loader.load_data()
#gt = np.asarray(gt, np.float32)
#gt /= (gt.max() + 1e-8)
image = image.cuda()
lateral_map_5, lateral_map_4, lateral_map_3, lateral_map_2, lateral_edge = model(image)
res = lateral_map_2 # final segmentation
#res = F.upsample(res, size=gt.shape, mode='bilinear', align_corners=False)
res = res.sigmoid().data.cpu().numpy().squeeze()
res = (res - res.min()) / (res.max() - res.min() + 1e-8)
misc.imsave(_save_path + '/' + name, res)
def movefiles(_src_dir, _dst_dir):
os.makedirs(_dst_dir, exist_ok=True)
for file_name in os.listdir(_src_dir):
shutil.copyfile(os.path.join(_src_dir, file_name),
os.path.join(_dst_dir, file_name))
if __name__ == '__main__':
slices = './Dataset/TrainingSet/LungInfection-Train/Pseudo-label/DataPrepare'
slices_dir = slices + '/Imgs_split'
slices_pred_seg_dir = slices + '/pred_seg_split'
slices_pred_edge_dir = slices + '/pred_edge_split'
# NOTES: Hybrid-label = Doctor-label + Pseudo-label
semi = './Dataset/TrainingSet/LungInfection-Train/Pseudo-label/DataPrepare/Hybrid-label'
semi_img = semi + '/Imgs'
semi_mask = semi + '/GT'
semi_edge = semi + '/Edge'
if (not os.path.exists(semi_img)) or (len(os.listdir(semi_img)) != 50):
shutil.copytree('Dataset/TrainingSet/LungInfection-Train/Doctor-label/Imgs',
semi_img)
shutil.copytree('Dataset/TrainingSet/LungInfection-Train/Doctor-label/GT',
semi_mask)
shutil.copytree('Dataset/TrainingSet/LungInfection-Train/Doctor-label/Edge',
semi_edge)
print('Copy done')
else:
print('Check done')
slices_lst = os.listdir(slices_dir)
random.shuffle(slices_lst)
print("#" * 20, "\nStart Training (Inf-Net)\nThis code is written for 'Inf-Net: Automatic COVID-19 Lung "
"Infection Segmentation from CT Scans', 2020, arXiv.\n"
"----\nPlease cite the paper if you use this code and dataset. "
"And any questions feel free to contact me "
"via E-mail (<EMAIL>)\n----\n", "#" * 20)
for i, split_name in enumerate(slices_lst):
print('\n[INFO] {} ({}/320)'.format(split_name, i))
# ---- inference ----
test_aux_dir = os.path.join(slices_dir, split_name)
test_aux_save_dir = os.path.join(slices_pred_seg_dir, split_name)
if i == 0:
snapshot_dir = './Snapshots/save_weights/Inf-Net/Inf-Net-100.pth'
else:
snapshot_dir = './Snapshots/semi_training/Semi-Inf-Net_{}/Semi-Inf-Net-10.pth'.format(i-1)
inference_module(_data_path=test_aux_dir, _save_path=test_aux_save_dir, _pth_path=snapshot_dir)
os.makedirs(os.path.join(slices_pred_edge_dir, split_name), exist_ok=True)
for pred_name in os.listdir(test_aux_save_dir):
edge_tmp = binary2edge(os.path.join(test_aux_save_dir, pred_name))
cv2.imwrite(os.path.join(slices_pred_edge_dir, split_name, pred_name), edge_tmp)
# ---- move generation ----
movefiles(test_aux_dir, semi_img)
movefiles(test_aux_save_dir, semi_mask)
movefiles(os.path.join(slices_pred_edge_dir, split_name), semi_edge)
# ---- training ----
train_module(_train_path=semi,
_train_save='semi_training/Semi-Inf-Net_{}'.format(i),
_resume_snapshot=snapshot_dir)
# move img/pseudo-label into `./Dataset/TrainingSet/LungInfection-Train/Pseudo-label`
shutil.copytree(semi_img, './Dataset/TrainingSet/LungInfection-Train/Pseudo-label/Imgs')
shutil.copytree(semi_mask, './Dataset/TrainingSet/LungInfection-Train/Pseudo-label/GT')
shutil.copytree(semi_edge, 'Dataset/TrainingSet/LungInfection-Train/Pseudo-label/Edge')
print('Pseudo Label Generated!')
| 1.976563
| 2
|
source_enhancement_evaluator.py
|
jhuiac/cocktail-party-Visually-derived-Speech-
| 0
|
12779428
|
<filename>source_enhancement_evaluator.py
import argparse
import os
import subprocess
import tempfile
import re
import uuid
import numpy as np
from mediaio import ffmpeg
def pesq(pesq_bin_path, source_file_path, enhanced_file_path):
temp_dir = tempfile.gettempdir()
temp_source_path = os.path.join(temp_dir, str(uuid.uuid4()) + ".wav")
temp_estimated_path = os.path.join(temp_dir, str(uuid.uuid4()) + ".wav")
ffmpeg.downsample(source_file_path, temp_source_path, sample_rate=16000)
ffmpeg.downsample(enhanced_file_path, temp_estimated_path, sample_rate=16000)
output = subprocess.check_output(
[pesq_bin_path, "+16000", temp_source_path, temp_estimated_path]
)
match = re.search("\(Raw MOS, MOS-LQO\):\s+= ([0-9.]+?)\t([0-9.]+?)$", output, re.MULTILINE)
mos = float(match.group(1))
moslqo = float(match.group(2))
os.remove(temp_source_path)
os.remove(temp_estimated_path)
return mos, moslqo
def evaluate(enhancement_dir_path, pesq_bin_path):
pesqs = []
sample_dir_names = os.listdir(enhancement_dir_path)
for sample_dir_name in sample_dir_names:
sample_dir_path = os.path.join(enhancement_dir_path, sample_dir_name)
source_file_path = os.path.join(sample_dir_path, "source.wav")
enhanced_file_path = os.path.join(sample_dir_path, "enhanced.wav")
mos, _ = pesq(pesq_bin_path, source_file_path, enhanced_file_path)
print("pesq: %f" % mos)
pesqs.append(mos)
print("mean pesq: %f" % np.mean(pesqs))
def main():
parser = argparse.ArgumentParser()
parser.add_argument("enhancement_dir", type=str)
parser.add_argument("pesq_bin_path", type=str)
args = parser.parse_args()
evaluate(args.enhancement_dir, args.pesq_bin_path)
if __name__ == "__main__":
main()
| 2.5
| 2
|
prerequisites/matplotlib.py
|
julien-amar/date-a-scientist
| 4
|
12779429
|
<reponame>julien-amar/date-a-scientist
import codecademylib
from matplotlib import pyplot as plt
x = [0, 1, 2, 3, 4, 5]
y1 = [0, 1, 4, 9, 16, 25]
y2 = [0, 1, 8, 27, 64, 125]
# Plot y1 & y2 vs x axis
plt.plot(x, y1, color='pink', marker='o',label='square')
plt.plot(x, y2, color='gray', marker='o',label='cubic')
# Define titles for graph & axis
plt.title('Two Lines on One Graph')
plt.xlabel('Amazing X-axis')
plt.ylabel('Incredible Y-axis')
# Display legend (see: https://matplotlib.org/api/_as_gen/matplotlib.pyplot.legend.html)
plt.legend(loc=4)
plt.show()
| 4.15625
| 4
|
mirumon/api/devices/http_endpoints/registration_controller.py
|
mirumon/mirumon-backend
| 19
|
12779430
|
<filename>mirumon/api/devices/http_endpoints/registration_controller.py
from fastapi import APIRouter, Depends, HTTPException
from starlette import status
from mirumon.api.dependencies.services import get_service
from mirumon.api.dependencies.users.permissions import check_user_scopes
from mirumon.api.devices.http_endpoints.models.create_device_by_shared_key_request import ( # noqa: E501
CreateDeviceBySharedKeyRequest,
)
from mirumon.api.devices.http_endpoints.models.create_device_by_shared_key_response import ( # noqa: E501
CreateDeviceBySharedKeyResponse,
)
from mirumon.api.devices.http_endpoints.models.create_device_request import (
CreateDeviceRequest,
)
from mirumon.api.devices.http_endpoints.models.create_device_response import (
CreateDeviceResponse,
)
from mirumon.application.devices.auth_service import DevicesAuthService
from mirumon.application.devices.device_service import DevicesService
from mirumon.domain.users.scopes import DevicesScopes
from mirumon.resources import strings
router = APIRouter()
@router.post(
"/devices",
status_code=status.HTTP_201_CREATED,
name="devices:create",
summary="Create Device",
description=strings.DEVICES_CREATE_DESCRIPTION,
response_model=CreateDeviceResponse,
dependencies=[Depends(check_user_scopes([DevicesScopes.write]))],
)
async def create_device(
device_params: CreateDeviceRequest,
auth_service: DevicesAuthService = Depends(get_service(DevicesAuthService)),
devices_service: DevicesService = Depends(get_service(DevicesService)),
) -> CreateDeviceResponse:
device = await devices_service.register_new_device(name=device_params.name)
token = auth_service.create_device_token(device)
return CreateDeviceResponse(token=token, name=device.name)
@router.post(
"/devices/by/shared",
status_code=status.HTTP_201_CREATED,
name="devices:create-by-shared",
summary="Create Device by Shared Key",
response_model=CreateDeviceBySharedKeyResponse,
)
async def create_device_by_shared_key(
credentials: CreateDeviceBySharedKeyRequest,
auth_service: DevicesAuthService = Depends(get_service(DevicesAuthService)),
devices_service: DevicesService = Depends(get_service(DevicesService)),
) -> CreateDeviceBySharedKeyResponse:
is_shared_token_valid = auth_service.is_valid_shared_key(credentials.shared_key)
if not is_shared_token_valid:
raise HTTPException(
status_code=status.HTTP_400_BAD_REQUEST,
detail=strings.INVALID_SHARED_KEY,
)
device = await devices_service.register_new_device(name=credentials.name)
token = auth_service.create_device_token(device)
return CreateDeviceBySharedKeyResponse(token=token, name=device.name)
| 2.109375
| 2
|
innexia/innexiaBot/modules/tagall.py
|
MikeOwino/curly-garbanzo
| 0
|
12779431
|
# Copyright (C) 2020-2021 by <EMAIL>, < https://github.com/DevsExpo >.
#
# This file is part of < https://github.com/DevsExpo/FridayUserBot > project,
# and is released under the "GNU v3.0 License Agreement".
# Please see < https://github.com/DevsExpo/blob/master/LICENSE >
#
# All rights reserved.
from pyrogram import filters
from innexiaBot.pyrogramee.pluginshelper import admins_only, get_text
from innexiaBot import pbot
@pbot.on_message(filters.command("tagall") & ~filters.edited & ~filters.bot)
@admins_only
async def tagall(client, message):
await message.reply("`Processing.....`")
sh = get_text(message)
if not sh:
sh = "Hi!"
mentions = ""
async for member in client.iter_chat_members(message.chat.id):
mentions += member.user.mention + " "
n = 4096
kk = [mentions[i : i + n] for i in range(0, len(mentions), n)]
for i in kk:
j = f"<b>{sh}</b> \n{i}"
await client.send_message(message.chat.id, j, parse_mode="html")
__mod_name__ = "Tagall"
__help__ = """
- /tagall : Tag everyone in a chat
"""
| 1.976563
| 2
|
src/nodeList/node/Node.py
|
mnm-team/pydht
| 1
|
12779432
|
<filename>src/nodeList/node/Node.py
class Node:
def __init__(self, id: int, virtId: int = 0, capacity: int = 100, master: bool = True, additional={}) -> None:
assert(type(id) == int)
self.id = id
self.virtId = virtId
self.capacity = capacity
self.master = master
self.keyCount = 0
self.additional = additional
def __lt__(self,other):
return (self.id<other)
def __le__(self,other):
return(self.id<=other)
def __gt__(self,other):
return(self.id>other)
def __ge__(self,other):
return(self.id>=other)
def __eq__(self,other):
return (self.id==other)
def __ne__(self,other):
return not(self.__eq__(self,other))
def __repr__(self) -> str:
return str(self.id)
def __str__(self) -> str:
return self.__repr__()
def sort(self) -> int:
return self.id
def addKey(self,id: int) -> int:
self.keyCount += 1
return self.keyCount
def removeKey(self,id: int) -> None:
self.keyCount -= 1
def getKeyCount(self) -> int:
return self.keyCount
def getCapacity(self) -> int:
return self.capacity
| 3.40625
| 3
|
train.py
|
vitormeriat/nlp-based-text-gcn
| 0
|
12779433
|
from modules.trainer.configs import TrainingConfigs
from modules.trainer.train_model import train_model
from tsne import tsne_visualizer
import matplotlib.pyplot as plt
from sys import argv
def create_training_cfg() -> TrainingConfigs:
conf = TrainingConfigs()
# conf.data_sets = ['20ng', 'R8', 'R52', 'ohsumed', 'mr', 'cora', 'citeseer', 'pubmed']
conf.data_sets = ['R8']
conf.corpus_split_index_dir = 'data/corpus.shuffled/split_index/'
conf.corpus_node_features_dir = 'data/corpus.shuffled/node_features/'
conf.corpus_adjacency_dir = ''
conf.corpus_vocab_dir = 'data/corpus.shuffled/vocabulary/'
conf.adjacency_sets = ['frequency', 'syntactic_dependency',
'linguistic_inquiry', 'semantic', 'graph']
conf.model = 'gcn'
conf.learning_rate = 0.02
conf.epochs = 200
conf.hidden1 = 200
conf.dropout = 0.5
conf.weight_decay = 0.
conf.early_stopping = 10
conf.chebyshev_max_degree = 3
conf.build()
return conf
def train(ds: str, training_cfg: TrainingConfigs):
# Start training
return train_model(ds_name=ds, is_featureless=True, cfg=training_cfg)
def save_history(hist, representation, dataset):
file_name = f'logs/experiments/{representation}_dataset_{dataset}.txt'
with open(file_name, 'w') as my_file:
my_file.writelines(hist)
def create_training_plot(training_history, name="training_history"):
fig, axes = plt.subplots(2, 1)
axes[0].plot(training_history.epoch, training_history.accuracy, c="blue")
axes[0].set_ylabel("Accuracy", size=20)
axes[0].grid(which="both")
axes[1].plot(training_history.epoch, training_history.val_loss,
c="green", label='Validation')
axes[1].plot(training_history.epoch,
training_history.train_loss, c="red", label='Train')
axes[1].set_ylabel("Loss", size=20)
axes[1].set_xlabel("Epoch", size=20)
axes[1].grid(which="both")
axes[1].legend(fontsize=15)
fig = plt.gcf()
fig.set_size_inches(15, 8)
plt.tight_layout()
plt.savefig(f"{name}.jpg", dpi=200)
def batch_train(rp: str, trn_cfg):
'''
Experiments > Graph Representation > Model Hyperparameter Tuning > Run Step
'''
path = 'data/corpus.shuffled/adjacency/'
if rp == 'frequency':
# Default adjacency
trn_cfg.corpus_adjacency_dir = f'{path}/frequency/'
elif rp == 'semantic':
# Semantic adjacency
trn_cfg.corpus_adjacency_dir = f'{path}/semantic/'
elif rp == 'syntactic_dependency':
# Syntactic adjacency
trn_cfg.corpus_adjacency_dir = f'{path}/syntactic_dependency/'
elif rp == 'linguistic_inquiry':
# Semantic adjacency
trn_cfg.corpus_adjacency_dir = f'{path}/linguistic_inquiry/'
elif rp == 'graph':
# Graph adjacency
trn_cfg.corpus_adjacency_dir = f'{path}/graph/'
for ds in trn_cfg.data_sets:
print('\n\n▄▄▄▄▄▄▄▄▄▄▄▄▄▄▄▄▄▄▄▄▄▄▄▄▄▄▄▄▄▄▄▄▄▄▄▄▄▄▄▄▄▄▄▄▄▄▄▄▄▄▄▄▄▄▄▄▄▄▄▄▄▄▄▄▄▄▄▄▄▄▄▄▄ ' + ds)
hist = train(ds=ds, training_cfg=trn_cfg)
save_history(hist, rp, ds)
tsne_visualizer(ds, rp)
create_training_plot(hist)
if __name__ == '__main__':
trn_cfg = create_training_cfg()
if len(argv) < 2:
raise Exception(
"Adjacency Representation name cannot be left blank. Must be one of representation:%r." % trn_cfg.adjacency_sets)
rp_name = argv[1]
#print("------ Working with dataset", ds_name, "------\n")
# ORIGINAL_PAPER = {
# "mr": {"avg": 0.7674, "std": 0.0020},
# "Ohsumed": {"avg": 0.6836, "std": 0.0056},
# "R8": {"avg": 0.9707, "std": 0.0010},
# "R52": {"avg": 0.9356, "std": 0.0018}
# }
# print(ORIGINAL_PAPER[ds_name])
batch_train(rp_name, trn_cfg)
print('\nDone!!!')
| 2.390625
| 2
|
python/excel_reader.py
|
extrabacon/pyspreadsheet
| 24
|
12779434
|
import sys, json, traceback, datetime, glob, xlrd
from xlrd import open_workbook, cellname, xldate_as_tuple, error_text_from_code
def dump_record(record_type, values):
print(json.dumps([record_type, values]));
def parse_cell_value(sheet, cell):
if cell.ctype == xlrd.XL_CELL_DATE:
year, month, day, hour, minute, second = xldate_as_tuple(cell.value, sheet.book.datemode)
return ['date', year, month, day, hour, minute, second]
elif cell.ctype == xlrd.XL_CELL_ERROR:
return ['error', error_text_from_code[cell.value]]
elif cell.ctype == xlrd.XL_CELL_BOOLEAN:
return False if cell.value == 0 else True
elif cell.ctype == xlrd.XL_CELL_EMPTY:
return None
return cell.value
def dump_sheet(sheet, sheet_index, max_rows):
dump_record("s", {
"index": sheet_index,
"name": sheet.name,
"rows": sheet.nrows,
"columns": sheet.ncols,
"visibility": sheet.visibility
})
for rowx in range(max_rows or sheet.nrows):
for colx in range(sheet.ncols):
cell = sheet.cell(rowx, colx)
dump_record("c", [rowx, colx, cellname(rowx, colx), parse_cell_value(sheet, cell)])
def main(cmd_args):
import optparse
usage = "\n%prog [options] [file1] [file2] ..."
oparser = optparse.OptionParser(usage)
oparser.add_option(
"-m", "--meta",
dest = "iterate_sheets",
action = "store_false",
default = True,
help = "dumps only the workbook record, does not load any worksheet")
oparser.add_option(
"-s", "--sheet",
dest = "sheets",
action = "append",
help = "names of the sheets to load - if omitted, all sheets are loaded")
oparser.add_option(
"-r", "--rows",
dest = "max_rows",
default = None,
action = "store",
type = "int",
help = "maximum number of rows to load")
options, args = oparser.parse_args(cmd_args)
# loop on all input files
for file in args:
try:
wb = open_workbook(filename=file, on_demand=True)
sheet_names = wb.sheet_names()
dump_record("w", {
"file": file,
"sheets": sheet_names,
"user": wb.user_name
})
if options.iterate_sheets:
if options.sheets:
for sheet_to_load in options.sheets:
try:
sheet_name = sheet_to_load
if sheet_to_load.isdigit():
sheet = wb.sheet_by_index(int(sheet_to_load))
sheet_name = sheet.name
else:
sheet = wb.sheet_by_name(sheet_to_load)
dump_sheet(sheet, sheet_names.index(sheet_name), options.max_rows)
wb.unload_sheet(sheet_name)
except:
dump_record("error", {
"id": "load_sheet_failed",
"file": file,
"sheet": sheet_name,
"traceback": traceback.format_exc()
})
else:
for sheet_index in range(len(sheet_names)):
try:
sheet = wb.sheet_by_index(sheet_index)
dump_sheet(sheet, sheet_index, options.max_rows)
wb.unload_sheet(sheet_index)
except:
dump_record("error", {
"id": "load_sheet_failed",
"file": file,
"sheet": sheet_index,
"traceback": traceback.format_exc()
})
except:
dump_record("error", {
"id": "open_workbook_failed",
"file": file,
"traceback": traceback.format_exc()
})
sys.exit()
main(sys.argv[1:])
| 2.859375
| 3
|
EC3/Thu.py
|
CSUpengyuyan/ECExperiment
| 0
|
12779435
|
<reponame>CSUpengyuyan/ECExperiment
import thulac
string = open('paper','r',encoding='UTF-8').read()
t = thulac.thulac()
result = t.cut(string)
print(len(result),result)
| 2.71875
| 3
|
Code/lucid_ml/run.py
|
beatobongco/Quadflor
| 0
|
12779436
|
<reponame>beatobongco/Quadflor<filename>Code/lucid_ml/run.py
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import csv, json, random, sys, os, argparse, logging, datetime, traceback
from collections import defaultdict
from pprint import pprint
from timeit import default_timer
from classifying.neural_net import MLP, ThresholdingPredictor
from classifying.stack_lin_reg import LinRegStack
from rdflib.plugins.parsers.ntriples import validate
os.environ['OMP_NUM_THREADS'] = '1' # For parallelization use n_jobs, this gives more control.
import numpy as np
from scipy.stats import entropy
import networkx as nx
import warnings
from utils.processify import processify
from itertools import product
warnings.filterwarnings("ignore", category=UserWarning)
from sklearn.model_selection import KFold, ShuffleSplit
# from sklearn.decomposition import TruncatedSVD
from sklearn.feature_extraction.text import CountVectorizer
from sklearn.feature_extraction.text import TfidfTransformer
from sklearn.linear_model import SGDClassifier, LogisticRegression
from sklearn.metrics import f1_score, recall_score, precision_score
from sklearn.multiclass import OneVsRestClassifier
from sklearn.naive_bayes import BernoulliNB, MultinomialNB
from sklearn.pipeline import FeatureUnion, make_pipeline, Pipeline
from sklearn.preprocessing import MultiLabelBinarizer
from sklearn.svm import LinearSVC
import scipy.sparse as sps
# imports for hyperparameter optimization
from bayes_opt import BayesianOptimization
from hyperopt import fmin, rand, hp
from sklearn.gaussian_process.kernels import Matern
from classifying.br_kneighbor_classifier import BRKNeighborsClassifier
from classifying.kneighbour_l2r_classifier import KNeighborsL2RClassifier
from classifying.meancut_kneighbor_classifier import MeanCutKNeighborsClassifier
from classifying.nearest_neighbor import NearestNeighbor
from classifying.rocchioclassifier import RocchioClassifier
from classifying.stacked_classifier import ClassifierStack
from classifying.tensorflow_models import MultiLabelSKFlow, mlp_base, mlp_soph, cnn, lstm
from utils.Extractor import load_dataset
from utils.metrics import hierarchical_f_measure, f1_per_sample
from utils.nltk_normalization import NltkNormalizer, word_regexp, character_regexp
from utils.persister import Persister
from weighting.SpreadingActivation import SpreadingActivation, BinarySA, OneHopActivation
from weighting.synset_analysis import SynsetAnalyzer
from weighting.bm25transformer import BM25Transformer
from weighting.concept_analysis import ConceptAnalyzer
from weighting.graph_score_vectorizer import GraphVectorizer
from utils.text_encoding import TextEncoder
### SET LOGGING
logging.basicConfig(level=logging.INFO,
format='%(asctime)s %(levelname)s %(message)s')
def _build_features(options):
DATA_PATHS = json.load(options.key_file)
VERBOSE = options.verbose
persister = Persister(DATA_PATHS, options)
if options.persist and persister.is_saved():
X, Y, tr = persister.read()
else:
# --- LOAD DATA ---
fold_list = None
if options.fixed_folds:
X_raw, Y_raw, tr, fold_list = load_dataset(DATA_PATHS, options.data_key, options.fulltext, fixed_folds=True)
else:
X_raw, Y_raw, tr, _ = load_dataset(DATA_PATHS, options.data_key, options.fulltext, fixed_folds=False)
if options.toy_size < 1:
if VERBOSE: print("Just toying with %d%% of the data." % (options.toy_size * 100))
zipped = list(zip(X_raw, Y_raw))
random.shuffle(zipped)
X_raw, Y_raw = zip(*zipped)
toy_slice = int(options.toy_size * len(X_raw))
X_raw, Y_raw = X_raw[:toy_slice], Y_raw[:toy_slice]
if options.verbose: print("Binarizing labels...")
mlb = MultiLabelBinarizer(sparse_output=True, classes=[i[1] for i in sorted(
tr.index_nodename.items())] if options.hierarch_f1 else None)
Y = mlb.fit_transform(Y_raw)
# --- EXTRACT FEATURES ---
input_format = 'filename' if options.fulltext else 'content'
if options.concepts:
if tr is None:
raise ValueError("Unable to extract concepts, since no thesaurus is given!")
concept_analyzer = SynsetAnalyzer().analyze if options.synsets \
else ConceptAnalyzer(tr.thesaurus, input=input_format, persist=options.persist and options.concepts,
persist_dir=options.persist_to, repersist=options.repersist,
file_path=DATA_PATHS[options.data_key]['X']).analyze
concepts = CountVectorizer(input=input_format, analyzer=concept_analyzer, binary=options.binary,
vocabulary=tr.nodename_index if not options.synsets else None,
ngram_range=(1, options.ngram_limit))
# pick max_features from each ngram-group
if options.max_features is not None and options.ngram_limit > 1:
ngram_vectorizers = []
for i in range(1, options.ngram_limit + 1):
i_grams = CountVectorizer(input=input_format, stop_words='english', binary=options.binary,
token_pattern=word_regexp, max_features=options.max_features,
ngram_range=(i, i))
ngram_vectorizers.append(i_grams)
terms = FeatureUnion([(str(i) + "_gram", t) for i, t in enumerate(ngram_vectorizers)])
else:
terms = CountVectorizer(input=input_format, stop_words='english', binary=options.binary,
token_pattern=word_regexp, max_features=options.max_features,
ngram_range=(1, options.ngram_limit))
if options.charngrams:
character_ngrams = CountVectorizer(input=input_format, binary=options.binary,
token_pattern=character_regexp, max_features=options.max_features,
ngram_range=(1, options.char_ngram_limit),
analyzer = 'char_wb')
if options.hierarchical:
hierarchy = tr.nx_graph
if options.prune_tree:
if VERBOSE: print("[Pruning] Asserting tree hierarchy...")
old_edge_count = hierarchy.number_of_edges()
hierarchy = nx.bfs_tree(hierarchy, tr.nx_root)
pruned = old_edge_count - hierarchy.number_of_edges()
if VERBOSE: print("[Pruning] Pruned %d of %d edges (%.2f) to assert a tree hierarchy" % (pruned, old_edge_count, pruned/old_edge_count))
if options.hierarchical == "bell":
activation = SpreadingActivation(hierarchy, decay=1, weighting="bell", root=tr.nx_root)
elif options.hierarchical == "belllog":
activation = SpreadingActivation(hierarchy, decay=1, weighting="belllog", root=tr.nx_root)
elif options.hierarchical == "children":
# weights are already initialized with 1/out_degree, so use basic SA with decay 1
activation = SpreadingActivation(hierarchy, decay=1, weighting="children")
elif options.hierarchical == "binary":
activation = BinarySA(hierarchy)
elif options.hierarchical == "onehop":
activation = OneHopActivation(hierarchy, verbose=VERBOSE)
else:
# basic
activation = SpreadingActivation(tr.nx_graph, firing_threshold=1.0, decay=0.25, weighting=None)
concepts = make_pipeline(concepts, activation)
features = []
if options.graph_scoring_method:
features.append(("graph_vectorizer", GraphVectorizer(method=options.graph_scoring_method, analyzer=concept_analyzer
if options.concepts else NltkNormalizer().split_and_normalize)))
if options.terms:
features.append(("terms", terms))
if options.concepts:
features.append(("concets", concepts))
if options.charngrams:
features.append(("char_ngrams", character_ngrams))
if options.onehot:
features.append(("onehot", TextEncoder(input_format = "filename" if options.fulltext else "content",
max_words=options.max_features, pretrained = options.pretrained_embeddings,
pad_special_symbol=options.pad_special_symbol)))
if len(features) == 0:
raise ValueError("No feature representation specified!")
extractor = FeatureUnion(features)
if VERBOSE: print("Extracting features...")
if VERBOSE > 1: start_ef = default_timer()
X = extractor.fit_transform(X_raw)
if VERBOSE > 1: print(default_timer() - start_ef)
if options.persist:
persister.persist(X, Y, tr)
return X, Y, extractor, mlb, fold_list, X_raw, Y_raw, tr
def _print_feature_info(X, options):
VERBOSE = options.verbose
if VERBOSE:
print("Feature size: {}".format(X.shape[1]))
print("Number of documents: {}".format(X.shape[0]))
# these printouts only make sense if we have BoW representation
if sps.issparse(X):
print("Mean distinct words per document: {}".format(X.count_nonzero() /
X.shape[0]))
words = X.sum(axis=1)
print("Mean word count per document: {} ({})".format(words.mean(), words.std()))
if VERBOSE > 1:
X_tmp = X.todense()
# drop samples without any features...
X_tmp = X_tmp[np.unique(np.nonzero(X_tmp)[0])]
print("[entropy] Dropped {} samples with all zeroes?!".format(X.shape[0] - X_tmp.shape[0]))
X_tmp = X_tmp.T # transpose to compute entropy per sample
h = entropy(X_tmp)
print("[entropy] shape:", h.shape)
print("[entropy] mean entropy per sample {} ({})".format(h.mean(), h.std()))
# print("Mean entropy (base {}): {}".format(X_dense.shape[0], entropy(X_dense, base=X_dense.shape[0]).mean()))
# print("Mean entropy (base e): {}".format(entropy(X_dense).mean()))
# _, _, values = sp.find(X)
# print("Mean value: %.2f (+/- %.2f) " % (values.mean(), 2 * values.std()))
# n_iter = np.ceil(10**6 / (X.shape[0] * 0.9))
# print("Dynamic n_iter = %d" % n_iter)
def _print_label_info(Y, VERBOSE):
if VERBOSE:
print("Y = " + str(Y.shape))
y_sum = Y.sum(axis = 0)
for i in range(1, 5):
print("Number of labels assigned more than", i, "times:" , np.sum(y_sum > i))
# compute avg number of labels per document
sum_of_labels_per_document = Y.sum(axis = 1)
print("Average number of labels per document:" , np.mean(sum_of_labels_per_document))
# compute avg number of documents per label
print("Average number of documents per label:" , np.mean(y_sum))
def _check_interactive(options, X, Y, extractor, mlb):
if options.interactive:
print("Please wait...")
clf = create_classifier(options, Y.shape[1]) # --- INTERACTIVE MODE ---
clf.fit(X, Y)
thesaurus = tr.thesaurus
print("Ready.")
try:
for line in sys.stdin:
x = extractor.transform([line])
y = clf.predict(x)
desc_ids = mlb.inverse_transform(y)[0]
labels = [thesaurus[desc_id]['prefLabel'] for desc_id in desc_ids]
print(*labels)
except KeyboardInterrupt:
exit(1)
exit(0)
def _build_folds(options, fold_list):
validation_set_indices = None
if options.cross_validation:
kf = KFold(n_splits=options.folds, shuffle=True)
elif options.fixed_folds:
fixed_folds = []
# TODO: we assume 10 normal folds and 1 folds with extra samples. need to generalize
basic_folds = range(10)
# we assume the extra data to be in the last fold
# TODO: currently we assume 10 folds (+1 extra)
extra_data = [index for index,x in enumerate(fold_list) if x == 10]
validation_set_indices = []
for i in range(options.folds):
training_fold = [index for index,x in enumerate(fold_list) if x in basic_folds and x != i]
if options.validation_size > 0:
# separate validation from training set here, and rejoin later if appropriate
num_validation_samples = int(len(training_fold) * options.validation_size)
validation_set_indices.append(training_fold[:num_validation_samples])
training_fold = training_fold[num_validation_samples:]
# add more training data from extra samples
if options.extra_samples_factor > 1:
num_extra_samples = int(min((options.extra_samples_factor - 1) * len(training_fold), len(extra_data)))
training_fold += extra_data[:num_extra_samples]
test_fold = [index for index,x in enumerate(fold_list) if x == i]
fixed_folds.append((training_fold, test_fold))
# helper class to conform sklearn's model_selection structure
class FixedFoldsGenerator():
def split(self, X):
return fixed_folds
kf = FixedFoldsGenerator()
else:
kf = ShuffleSplit(test_size=options.test_size, n_splits = 1)
return kf, validation_set_indices
def _run_experiment(X, Y, kf, validation_set_indices, mlb, X_raw, Y_raw, tr, options):
VERBOSE = options.verbose
scores = defaultdict(list)
if options.plot:
all_f1s = []
for iteration, (train, test) in enumerate(kf.split(X)):
if VERBOSE: print("=" * 80)
X_train, X_test, Y_train, Y_test = X[train], X[test], Y[train], Y[test]
clf = create_classifier(options, Y_train.shape[1]) # --- INTERACTIVE MODE ---
# extract a validation set and inform the classifier where to find it
if options.validation_size > 0:
# if we don't have fixed folds, we may pick the validation set randomly
if options.cross_validation or options.one_fold:
train, val = next(ShuffleSplit(test_size=options.validation_size, n_splits = 1).split(X_train))
X_train, X_val, Y_train, Y_val = X_train[train], X_train[val], Y_train[train], Y_train[val]
elif options.fixed_folds:
X_train = X_train
X_val = X[validation_set_indices[iteration]]
Y_val = Y[validation_set_indices[iteration]]
# put validation data at the end of training data and tell classifier the position where they start, if it is able
_, estimator = clf.steps[-1]
if hasattr(estimator, 'validation_data_position'):
estimator.validation_data_position = X_train.shape[0]
else:
raise ValueError("Validation size given although the estimator has no 'validation_data_position' property!")
if sps.issparse(X):
X_train = sps.vstack((X_train, X_val))
else:
X_train = np.vstack((X_train, X_val))
if sps.issparse(Y):
Y_train = sps.vstack((Y_train, Y_val))
else:
Y_train = np.vstack((Y_train, Y_val))
# mlp doesn't seem to like being stuck into a new process...
if options.debug or options.clf_key in {'mlp', 'mlpthr', 'mlpsoph', "cnn", "mlpbase", "lstm"}:
Y_pred, Y_train_pred = fit_predict(X_test, X_train, Y_train, options, tr, clf)
else:
Y_pred, Y_train_pred = fit_predict_new_process(X_test, X_train, Y_train, options, tr, clf)
if options.training_error:
scores['train_f1_samples'].append(f1_score(Y_train, Y_train_pred, average='samples'))
scores['avg_n_labels_pred'].append(np.mean(Y_pred.getnnz(1)))
scores['avg_n_labels_gold'].append(np.mean(Y_test.getnnz(1)))
scores['f1_samples'].append(f1_score(Y_test, Y_pred, average='samples'))
scores['p_samples'].append(precision_score(Y_test, Y_pred, average='samples'))
scores['r_samples'].append(recall_score(Y_test, Y_pred, average='samples'))
scores['f1_micro'].append(f1_score(Y_test, Y_pred, average='micro'))
scores['p_micro'].append(precision_score(Y_test, Y_pred, average='micro'))
scores['r_micro'].append(recall_score(Y_test, Y_pred, average='micro'))
scores['f1_macro'].append(f1_score(Y_test, Y_pred, average='macro'))
scores['p_macro'].append(precision_score(Y_test, Y_pred, average='macro'))
scores['r_macro'].append(recall_score(Y_test, Y_pred, average='macro'))
if options.plot:
all_f1s.append(f1_per_sample(Y_test, Y_pred))
if options.worst:
f1s = f1_per_sample(Y_test, Y_pred)
predicted_labels = [[tr.thesaurus[l]['prefLabel'] for l in y] for y in mlb.inverse_transform(Y_pred)]
f1s_ids = sorted(zip(f1s, [X_raw[i] for i in test],
[[tr.thesaurus[l]['prefLabel'] for l in Y_raw[i]] for i in test], predicted_labels))
pprint(f1s_ids[:options.worst])
if options.hierarch_f1:
scores['hierarchical_f_score'].append(
hierarchical_f_measure(tr, Y_test, Y_pred))
if options.cross_validation and VERBOSE:
print(' <> '.join(["%s : %0.3f" % (key, values[-1]) for key, values in sorted(scores.items())]))
# if options.lsa:
# if VERBOSE: print("Variance explained by SVD:", svd.explained_variance_ratio_.sum())
if VERBOSE: print("=" * 80)
results = {key: (np.array(values).mean(), np.array(values).std()) for key, values in scores.items()}
print(' <> '.join(["%s: %0.3f (+/- %0.3f)" % (key, mean, std) for key, (mean, std) in sorted(results.items())]))
if options.output_file:
write_to_csv(results, options)
if options.plot:
Y_f1 = np.hstack(all_f1s)
Y_f1.sort()
if VERBOSE:
print("Y_f1.shape:", Y_f1.shape, file=sys.stderr)
print("Saving f1 per document as txt numpy to", options.plot)
np.savetxt(options.plot, Y_f1)
return results
def _update_options(options, **parameters):
for param_name, param_value in parameters.items():
print("In automatic optimization trying parameter:", param_name, "with value", param_value)
try:
setattr(options, param_name, param_value)
except AttributeError:
print("Can't find parameter ", param_name, "so we'll not use it.")
continue
return options
def _make_space(options):
space = {}
inits = {}
with open(options.optimization_spaces) as optimization_file:
for line in optimization_file:
# escape comments
if line.startswith("#"):
continue
line = line.strip()
info = line.split(",")
param_name = info[0]
if options.optimization == "random":
left_bound, right_bound = float(info[1]), float(info[2])
param_type = info[3]
try:
param_type = getattr(hp, param_type)
except AttributeError:
print("hyperopt has no attribute", param_type)
continue
space[param_name] = param_type(param_name, left_bound, right_bound)
elif options.optimization == "bayesian":
left_bound, right_bound = float(info[1]), float(info[2])
init_values = list(map(float, info[3:]))
num_init_vals = len(init_values)
inits[param_name] = init_values
space[param_name] = (left_bound, right_bound)
elif options.optimization == "grid":
param_type = info[1]
def get_cast_func(some_string_type):
cast_func = None
if some_string_type == "int":
cast_func = int
elif some_string_type == "float":
cast_func = float
elif some_string_type == "string":
cast_func = str
elif some_string_type == "bool":
cast_func = bool
return cast_func
cast_func = get_cast_func(param_type)
if cast_func is None:
if param_type.startswith("list"):
# determine type in list
list_type = get_cast_func(param_type.split("-")[1])
# assume they are seperated by semicolon
def extract_items(list_string):
return [list_type(x) for x in list_string.split(";")]
cast_func = extract_items
# all possible values
space[param_name] = list(map(cast_func, info[2:]))
if options.optimization == "bayesian":
return space, inits, num_init_vals
else:
return space
def _all_option_combinations(space):
names = [name for name, _ in space.items()]
values = [values for _, values in space.items()]
val_combinations = product(*values)
combinations = []
for combi in val_combinations:
new_param_dict = {}
for i, val in enumerate(combi):
new_param_dict[names[i]] = val
combinations.append(new_param_dict)
return combinations
def run(options):
VERBOSE = options.verbose
### SET SEEDS FOR REPRODUCABILITY
np.random.seed(1337)
random.seed(1337)
###
# load dataset and build feature representation
X, Y, extractor, mlb, fold_list, X_raw, Y_raw, tr = _build_features(options)
_print_feature_info(X, options)
_print_label_info(Y, options)
# go to interactive mode if on
_check_interactive(options, X, Y, extractor, mlb)
if options.predict:
clf = create_classifier(options, Y.shape[1]) # --- INTERACTIVE MODE ---
# thesaurus = tr.thesaurus
with open(options.input_file, 'r') as f:
for line in f:
x = extractor.transform([line])
y = clf.predict(x)
desc_ids = mlb.inverse_transform(y)
# labels = [thesaurus[desc_id]['prefLabel'] for desc_id in desc_ids]
print(desc_ids)
exit(0)
if VERBOSE: print("Performing %d-fold cross-validation..." % (options.folds if options.cross_validation else 1))
# prepare validation over folds
kf, validation_set_indices = _build_folds(options, fold_list)
if options.optimization:
def optimized_experiment(**parameters):
current_options = _update_options(options, **parameters)
results = _run_experiment(X, Y, kf, validation_set_indices, mlb, X_raw, Y_raw, tr, current_options)
# return the f1 score of the previous experiment
return results["f1_samples"][0]
if options.optimization == "bayesian":
gp_params = {"alpha": 1e-5, "kernel" : Matern(nu = 5 / 2)}
space, init_vals, num_init_vals = _make_space(options)
bayesian_optimizer = BayesianOptimization(optimized_experiment, space)
bayesian_optimizer.explore(init_vals)
bayesian_optimizer.maximize(n_iter=options.optimization_iterations - num_init_vals,
acq = 'ei',
**gp_params)
elif options.optimization == "random":
fmin(lambda parameters : optimized_experiment(**parameters),
_make_space(options),
algo=rand.suggest,
max_evals=options.optimization_iterations,
rstate = np.random.RandomState(1337))
elif options.optimization == "grid":
# perform grid-search by running every possible parameter combination
combinations = _all_option_combinations(_make_space(options))
for combi in combinations:
optimized_experiment(**combi)
else:
results = _run_experiment(X, Y, kf, validation_set_indices, mlb, X_raw, Y_raw, tr, options)
def fit_predict(X_test, X_train, Y_train, options, tr, clf):
if options.verbose: print("Fitting", X_train.shape[0], "samples...")
clf.fit(X_train, Y_train)
if options.training_error:
if options.verbose: print("Predicting", X_train.shape[0], "training samples...")
Y_pred_train = clf.predict(X_train)
else:
Y_pred_train = None
if options.verbose: print("Predicting", X_test.shape[0], "samples...")
Y_pred = clf.predict(X_test)
return Y_pred, Y_pred_train
@processify
def fit_predict_new_process(X_test, X_train, Y_train, options, tr, clf):
return fit_predict(X_test, X_train, Y_train, options, tr, clf)
def create_classifier(options, num_concepts):
# Learning 2 Rank algorithm name to ranklib identifier mapping
l2r_algorithm = {'listnet' : "7",
'adarank' : "3",
'ca' : "4",
'lambdamart' : "6"}
# --- BUILD CLASSIFIER ---
sgd = OneVsRestClassifier(SGDClassifier(loss='log', max_iter=options.max_iterations, verbose=max(0,options.verbose-2), penalty=options.penalty, alpha=options.alpha, average=True),
n_jobs=options.jobs)
logregress = OneVsRestClassifier(LogisticRegression(C=64, penalty='l2', dual=False, verbose=max(0,options.verbose-2)),
n_jobs=options.jobs)
l2r_classifier = KNeighborsL2RClassifier(n_neighbors=options.l2r_neighbors, max_iterations=options.max_iterations,
count_concepts=True if options.concepts else False,
number_of_concepts=num_concepts,
count_terms=True if options.terms else False,
algorithm='brute', metric='cosine',
algorithm_id = l2r_algorithm[options.l2r],
l2r_metric = options.l2r_metric + "@20",
n_jobs = options.jobs,
translation_probability = options.translation_prob)
mlp = MLP(verbose=options.verbose, batch_size = options.batch_size, learning_rate = options.learning_rate, epochs = options.max_iterations)
classifiers = {
"nn": NearestNeighbor(use_lsh_forest=options.lshf),
"brknna": BRKNeighborsClassifier(mode='a', n_neighbors=options.k, use_lsh_forest=options.lshf,
algorithm='brute', metric='cosine', auto_optimize_k=options.grid_search),
"brknnb": BRKNeighborsClassifier(mode='b', n_neighbors=options.k, use_lsh_forest=options.lshf,
algorithm='brute', metric='cosine', auto_optimize_k=options.grid_search),
"listnet": l2r_classifier,
"l2rdt": ClassifierStack(base_classifier=l2r_classifier, n_jobs=options.jobs, n=options.k, dependencies=options.label_dependencies),
"mcknn": MeanCutKNeighborsClassifier(n_neighbors=options.k, algorithm='brute', metric='cosine', soft=False),
# alpha 10e-5
"bbayes": OneVsRestClassifier(BernoulliNB(alpha=options.alpha), n_jobs=options.jobs),
"mbayes": OneVsRestClassifier(MultinomialNB(alpha=options.alpha), n_jobs=options.jobs),
"lsvc": OneVsRestClassifier(LinearSVC(C=4, loss='squared_hinge', penalty='l2', dual=False, tol=1e-4),
n_jobs=options.jobs),
"logregress": logregress,
"sgd": sgd,
"rocchio": RocchioClassifier(metric = 'cosine', k = options.k),
"sgddt": ClassifierStack(base_classifier=sgd, n_jobs=options.jobs, n=options.k),
"rocchiodt": ClassifierStack(base_classifier=RocchioClassifier(metric = 'cosine'), n_jobs=options.jobs, n=options.k),
"logregressdt": ClassifierStack(base_classifier=logregress, n_jobs=options.jobs, n=options.k),
"mlp": mlp,
"mlpbase" : MultiLabelSKFlow(batch_size = options.batch_size,
num_epochs=options.max_iterations,
learning_rate = options.learning_rate,
tf_model_path = options.tf_model_path,
optimize_threshold = options.optimize_threshold,
get_model = mlp_base(hidden_activation_function = options.hidden_activation_function),
patience = options.patience,
num_steps_before_validation = options.num_steps_before_validation,
bottleneck_layers = options.bottleneck_layers,
hidden_keep_prob = options.dropout,
gpu_memory_fraction = options.memory),
"mlpsoph" : MultiLabelSKFlow(batch_size = options.batch_size,
num_epochs=options.max_iterations,
learning_rate = options.learning_rate,
tf_model_path = options.tf_model_path,
optimize_threshold = options.optimize_threshold,
get_model = mlp_soph(options.dropout, options.embedding_size,
hidden_layers = options.hidden_layers, self_normalizing = options.snn,
standard_normal = options.standard_normal,
batch_norm = options.batch_norm,
hidden_activation_function = options.hidden_activation_function
),
patience = options.patience,
num_steps_before_validation = options.num_steps_before_validation,
bottleneck_layers = options.bottleneck_layers,
hidden_keep_prob = options.dropout,
gpu_memory_fraction = options.memory,
meta_labeler_phi = options.meta_labeler_phi,
meta_labeler_alpha = options.meta_labeler_alpha,
meta_labeler_min_labels = options.meta_labeler_min_labels,
meta_labeler_max_labels = options.meta_labeler_max_labels),
"cnn": MultiLabelSKFlow(batch_size = options.batch_size,
num_epochs=options.max_iterations,
learning_rate = options.learning_rate,
tf_model_path = options.tf_model_path,
optimize_threshold = options.optimize_threshold,
patience = options.patience,
num_steps_before_validation = options.num_steps_before_validation,
get_model = cnn(options.dropout, options.embedding_size,
hidden_layers = options.hidden_layers,
pretrained_embeddings_path = options.pretrained_embeddings,
trainable_embeddings=options.trainable_embeddings,
dynamic_max_pooling_p=options.dynamic_max_pooling_p,
window_sizes = options.window_sizes,
num_filters = options.num_filters),
bottleneck_layers = options.bottleneck_layers,
hidden_keep_prob = options.dropout,
gpu_memory_fraction = options.memory,
meta_labeler_phi = options.meta_labeler_phi,
meta_labeler_alpha = options.meta_labeler_alpha,
meta_labeler_min_labels = options.meta_labeler_min_labels,
meta_labeler_max_labels = options.meta_labeler_max_labels),
"lstm": MultiLabelSKFlow(batch_size = options.batch_size,
num_epochs=options.max_iterations,
learning_rate = options.learning_rate,
tf_model_path = options.tf_model_path,
optimize_threshold = options.optimize_threshold,
patience = options.patience,
num_steps_before_validation = options.num_steps_before_validation,
get_model = lstm(options.dropout, options.embedding_size,
hidden_layers = options.hidden_layers,
pretrained_embeddings_path = options.pretrained_embeddings,
trainable_embeddings = options.trainable_embeddings,
variational_recurrent_dropout = options.variational_recurrent_dropout,
bidirectional = options.bidirectional,
aggregate_output = options.aggregate_output,
iterate_until_maxlength = options.iterate_until_maxlength,
num_last_outputs = options.pad_special_symbol),
bottleneck_layers = options.bottleneck_layers,
hidden_keep_prob = options.dropout,
gpu_memory_fraction = options.memory,
meta_labeler_phi = options.meta_labeler_phi,
meta_labeler_alpha = options.meta_labeler_alpha,
meta_labeler_min_labels = options.meta_labeler_min_labels,
meta_labeler_max_labels = options.meta_labeler_max_labels,
pretrained_model_path = options.pretrained_model_path),
"nam": ThresholdingPredictor(MLP(verbose=options.verbose, final_activation='sigmoid', batch_size = options.batch_size,
learning_rate = options.learning_rate,
epochs = options.max_iterations),
alpha=options.alpha, stepsize=0.01, verbose=options.verbose),
"mlpthr": LinRegStack(mlp, verbose=options.verbose),
"mlpdt" : ClassifierStack(base_classifier=mlp, n_jobs=options.jobs, n=options.k)
}
# Transformation: either bm25 or tfidf included in pipeline so that IDF of test data is not considered in training
norm = "l2" if options.norm else None
if options.bm25:
trf = BM25Transformer(sublinear_tf=True if options.lsa else False, use_idf=options.idf, norm=norm,
bm25_tf=True, use_bm25idf=True)
elif options.terms or options.concepts:
trf = TfidfTransformer(sublinear_tf=True if options.lsa else False, use_idf=options.idf, norm=norm)
# Pipeline with final estimator ##
if options.graph_scoring_method or options.clf_key in ["bbayes", "mbayes"]:
clf = classifiers[options.clf_key]
# elif options.lsa:
# svd = TruncatedSVD(n_components=options.lsa)
# lsa = make_pipeline(svd, Normalizer(copy=False))
# clf = Pipeline([("trf", trf), ("lsa", lsa), ("clf", classifiers[options.clf_key])])
elif options.terms or options.concepts:
clf = Pipeline([("trf", trf), ("clf", classifiers[options.clf_key])])
else:
clf = Pipeline([("clf", classifiers[options.clf_key])])
return clf
def _generate_parsers():
# meta parser to handle config files
meta_parser = argparse.ArgumentParser(add_help=False)
meta_parser.add_argument('-C', '--config-file', dest='config_file', type=argparse.FileType('r'), default=None, help= \
"Specify a config file containing lines of execution arguments")
meta_parser.add_argument('-d', '--dry', dest='dry', action='store_true', default=False, help= \
"Do nothing but validate command line and config file parameters")
### Parser for the usual command line arguments
parser = argparse.ArgumentParser(parents=[meta_parser])
parser.add_argument('-j', type=int, dest='jobs', default=1, help="Number of jobs (processes) to use when something can be parallelized. -1 means as many as possible.")
parser.add_argument('-o', '--output', dest="output_file", type=str, default='', help= \
"Specify the file name to save the result in. Default: [None]")
parser.add_argument('--input', dest="input_file", type=str, default='', help="Specify the file name to save the result in. Default: [None]")
parser.add_argument('-O',
'--plot',
type=str,
default=None,
help='Plot results to FNAME',
metavar='FNAME')
parser.add_argument('-v', '--verbose', default=0, action="count", help=\
"Specify verbosity level -v for 1, -vv for 2, ... [0]")
parser.add_argument('--debug', action="store_true", dest="debug", default=False, help=
"Enables debug mode. Makes fit_predict method debuggable by not starting a single fold in a new process.")
metric_options = parser.add_argument_group()
metric_options.add_argument('-r', action='store_true', dest='hierarch_f1', default=False, help=
'Calculate hierarchical f-measure (Only usable')
metric_options.add_argument('--worst', type=int, dest='worst', default=0, help=
'Output given number of top badly performing samples by f1_measure.')
# mutually exclusive group for executing
execution_options = parser.add_mutually_exclusive_group(required=True)
execution_options.add_argument('-x', action="store_true", dest="one_fold", default=False, help=
"Run on one fold [False]")
execution_options.add_argument('-X', action="store_true", dest="cross_validation", default=False, help=
"Perform cross validation [False]")
execution_options.add_argument('--fixed_folds', action="store_true", dest="fixed_folds", default=False, help=
"Perform cross validation with fixed folds.")
execution_options.add_argument('-i', '--interactive', action="store_true", dest="interactive", default=False, help= \
"Use whole supplied data as training set and classify new inputs from STDIN")
execution_options.add_argument('--predict', action="store_true", dest="predict", default=False, help="Run a saved model")
# be a little versatile
detailed_options = parser.add_argument_group("Detailed Execution Options")
detailed_options.add_argument('--test-size', type=float, dest='test_size', default=0.1, help=
"Desired relative size for the test set [0.1]")
detailed_options.add_argument('--optimization', type=str, dest='optimization', default=None, help=
"Whether to use Random Search or Bayesian Optimization for hyperparameter search. [None]", choices = ["grid", "random", "bayesian", None])
detailed_options.add_argument('--optimization_spaces', type=str, dest='optimization_spaces', default="default_searchspace", help=
"Path to a file that specifies the search spaces for hyperparameters [default_searchspace]")
detailed_options.add_argument('--optimization_iterations', type=int, dest='optimization_iterations', default=10, help=
"Number of iterations in hyperparameter search. [10]")
detailed_options.add_argument('--val-size', type=float, dest='validation_size', default=0., help=
"Desired relative size of the training set used as validation set [0.]")
detailed_options.add_argument('--folds', type=int, dest='folds', default=10, help=
"Number of folds used for cross validation [10]")
detailed_options.add_argument('--toy', type=float, dest='toy_size', default=1.0, help=
"Eventually use a smaller block of the data set from the very beginning. [1.0]")
detailed_options.add_argument('--extra_samples_factor', type=float, dest='extra_samples_factor', default=1.0, help=
"This option only has an effect when the '--fixed_folds' option is true. The value determines the factor 'x >= 1' by which\
the training set is enriched with samples from the 11th fold. Hence, the total number of training data will be \
x * size of tranining set. By default, the value is x = 1.")
detailed_options.add_argument('--training-error', action="store_true", dest="training_error", default=False, help=\
"Compute training error")
# options to specify the dataset
data_options = parser.add_argument_group("Dataset Options")
data_options.add_argument('-F', '--fulltext', action="store_true", dest="fulltext", default=False,
help="Fulltext instead of titles")
data_options.add_argument('-k', '--key-file', dest="key_file", type=argparse.FileType('r'), default="file_paths.json", help=\
"Specify the file to use as Key file for -K")
data_options.add_argument('-K', '--datakey', type=str, dest="data_key", default='example-titles', help="Prestored key of data.")
# group for feature_options
feature_options = parser.add_argument_group("Feature Options")
feature_options.add_argument('-c', '--concepts', action="store_true", dest="concepts", default=False, help= \
"use concepts [False]")
feature_options.add_argument('-t', '--terms', action="store_true", dest="terms", default=False, help= \
"use terms [True]")
feature_options.add_argument('--charngrams', action="store_true", dest="charngrams", default=False, help= \
"use character n-grams [True]")
feature_options.add_argument('--onehot', action="store_true", dest="onehot", default=False, help= \
"Encode the input words as one hot. [True]")
feature_options.add_argument('--max_features', type=int, dest="max_features", default=None, help= \
"Specify the maximal number of features to be considered for a BoW model [None, i.e., infinity]")
feature_options.add_argument('-s', '--synsets', action="store_true", dest="synsets", default=False, help= \
"use synsets [False]")
feature_options.add_argument('-g', '--graphscoring', dest="graph_scoring_method", type=str, default="", \
help="Use graphscoring method instead of concepts and/or terms", \
choices=["degree", "betweenness", "pagerank", "hits", "closeness", "katz"])
feature_options.add_argument('--prune', action="store_true", dest="prune_tree", default=False, help="Prune polyhierarchy to tree")
feature_options.add_argument('-H', type=str, dest="hierarchical", default="", \
help="Perform spreading activation.", \
choices=['basic', 'bell', 'belllog', 'children', 'binary', 'onehop'])
feature_options.add_argument('-B', '--bm25', dest="bm25", action="store_true", default=False, help=
"Use BM25 instead of TFIDF for final feature transformation")
feature_options.add_argument('-b', '--binary', action="store_true", dest="binary", default=False, help=
"do not count the words but only store their prevalence in a document")
feature_options.add_argument('--no-idf', action="store_false", dest="idf", default=True, help=
"Do not use IDF")
feature_options.add_argument('--no-norm', action="store_false", dest="norm",
default=True, help="Do not normalize values")
feature_options.add_argument('--ngram_limit', type=int, dest="ngram_limit", default=1, help= \
"Specify the n for n-grams to take into account for token-based BoW vectorization. [1]")
feature_options.add_argument('--char_ngram_limit', type=int, dest="char_ngram_limit", default=3, help= \
"Specify the n for character n-grams to take into account for character n-gram based BoW vectorization. [3]")
# group for classifiers
classifier_options = parser.add_argument_group("Classifier Options")
classifier_options.add_argument('-f', '--classifier', dest="clf_key", default="nn", help=
"Specify the final classifier.", choices=["nn", "brknna", "brknnb", "bbayes", "mbayes", "lsvc",
"sgd", "sgddt", "rocchio", "rocchiodt", "logregress", "logregressdt",
"mlp", "listnet", "l2rdt", 'mlpthr', 'mlpdt', 'nam', 'mlpbase', "mlpsoph", "cnn", "lstm"])
classifier_options.add_argument('-a', '--alpha', dest="alpha", type=float, default=1e-7, help= \
"Specify alpha parameter for stochastic gradient descent")
classifier_options.add_argument('-n', dest="k", type=int, default=1, help=
"Specify k for knn-based classifiers. Also used as the count of meta-classifiers considered for each sample in multi value stacking approaches [1]")
classifier_options.add_argument('-l', '--lshf', action="store_true", dest="lshf", default=False, help=
"Approximate nearest neighbors using locality sensitive hashing forests")
classifier_options.add_argument('-L', '--LSA', type=int, dest="lsa", default=None, help=
"Use Latent Semantic Analysis / Truncated Singular Value Decomposition\
with n_components output dimensions", metavar="n_components")
classifier_options.add_argument('-G', '--grid-search', action="store_true", dest="grid_search", default=False, help=
"Performs Grid search to find optimal K")
classifier_options.add_argument('-e', type=int, dest="max_iterations", default=5, help=
"Determine the number of epochs for the training of several classifiers [5]")
classifier_options.add_argument('--patience', type=int, dest="patience", default=5, help=
"Specify the number of steps of no improvement in validation score before training is stopped. [5]")
classifier_options.add_argument('--num_steps_before_validation', type=int, dest="num_steps_before_validation", default=None, help=
"Specify the number of steps before evaluating on the validation set. [None]")
classifier_options.add_argument('--learning_rate', type=float, dest="learning_rate", default=None, help=
"Determine the learning rate for training of several classifiers. If set to 'None', the learning rate is automatically based on an empirical good value and \
adapted to the batch size. [None]")
classifier_options.add_argument('-P', type=str, dest="penalty", default=None, choices=['l1','l2','elasticnet'], help=\
"Penalty term for SGD and other regularized linear models")
classifier_options.add_argument('--l2r-alg', type=str, dest="l2r", default="listnet", choices=['listnet','adarank','ca', 'lambdamart'], help=\
"L2R algorithm to use when classifier is 'listnet'")
classifier_options.add_argument('--l2r-metric', type=str, dest="l2r_metric", default="ERR@k", choices=['MAP', 'NDCG', 'DCG', 'P', 'RR', 'ERR'], help=\
"L2R metric to optimize for when using listnet classifier'")
classifier_options.add_argument('--l2r-translation-prob', action="store_true", dest="translation_prob", default=False, help=
"Whether to include the translation probability from concepts into titles. If set to true, number of jobs must be 1.")
classifier_options.add_argument('--label_dependencies', action="store_true", dest="label_dependencies", default=False, help=
"Whether the ClassifierStack should make use of all label information and thus take into account possible interdependencies.")
classifier_options.add_argument('--l2r-neighbors', dest="l2r_neighbors", type=int, default=45, help=
"Specify n_neighbors argument for KneighborsL2RClassifier.")
classifier_options.add_argument('--batch_size', dest="batch_size", type=int, default=256, help=
"Specify batch size for neural network training.")
# neural network specific options
neural_network_options = parser.add_argument_group("Neural Network Options")
neural_network_options.add_argument('--dropout', type=float, dest="dropout", default=0.5, help=
"Determine the keep probability for all dropout layers.")
neural_network_options.add_argument('--memory', type=float, dest="memory", default=1.0, help=
"Fraction of available GPU-memory to use for experiment.")
neural_network_options.add_argument('--embedding_size', type=int, dest="embedding_size", default=300, help=
"Determine the size of a word embedding vector (for MLP-Soph, CNN, and LSTM if embedding is learned jointly). \
Specify --embedding_size=0 to skip the embedding layer, if applicable. [300]")
neural_network_options.add_argument('--pretrained_embeddings', type=str, dest="pretrained_embeddings", default=None, help=
"Specify the path to a file contraining pretrained word embeddings. The file must have a format where each line consists of the word\
followed by the entries of its vectors, separated by blanks. If None is specified, the word embeddings are zero-initialized and trained\
jointly with the classification task. [None]")
neural_network_options.add_argument('--pretrained_model_path', type=str, dest="pretrained_model_path", default=None, help="Specify path to pretrained model.")
neural_network_options.add_argument('--hidden_activation_function', type=str, dest="hidden_activation_function", default="relu", help=
"Specify the activation function used on the hidden layers in MLP-Base and MLP-Soph. [relu]", choices = ["relu", "tanh", "identity", "swish"])
neural_network_options.add_argument('--trainable_embeddings', action="store_true", dest="trainable_embeddings", default=False, help=
"Whether to keep training the pretrained embeddings further with classification the task or not. [False]")
neural_network_options.add_argument('--hidden_layers', type=int, dest="hidden_layers", nargs='+', default=[1000], help=
"Specify the number of layers and the respective number of units as a list. The i-th element of the list \
specifies the number of units in layer i. [1000]")
neural_network_options.add_argument('--bottleneck_layers', type=int, dest="bottleneck_layers", nargs='+', default=None, help=
"Specify the number of bottleneck layers and the respective number of units as a list. The i-th element of the list \
specifies the number of units in layer i. In contrast to the --hidden_layers option, where the respective model decides\
how to interprete multiple hidden layers, the bottleneck layers are feed forward layers which are pluged in between \
the last layer of a particular model (e.g. CNN, LSTM) and the output layer. (None)")
neural_network_options.add_argument('--standard_normal', action="store_true", dest="standard_normal", default=False, help=
"Whether to normalize the input features to mean = 0 and std = 1 for MLPSoph. [False]")
neural_network_options.add_argument('--batch_norm', action="store_true", dest="batch_norm", default=False, help=
"Whether to apply batch normalization after at a hidden layer in MLP. [False]")
neural_network_options.add_argument('--snn', action="store_true", dest="snn", default=False, help=
"Whether to use SELU activation and -dropout. If set to False, the activation specified in --hidden_activation_function is used. [False]")
neural_network_options.add_argument('--variational_recurrent_dropout', action="store_true", dest="variational_recurrent_dropout", default=False, help=
"Whether to perform dropout on the recurrent unit between states in addition to dropout on the aggregated output. [False]")
neural_network_options.add_argument('--bidirectional', action="store_true", dest="bidirectional", default=False, help=
"When activated, we create two instances of (potentially multi-layered) LSTMs, where one reads the input from left to right and \
the other reads it from right to left. [False]")
neural_network_options.add_argument('--iterate_until_maxlength', action="store_true", dest="iterate_until_maxlength", default=False, help=
"When activated, the LSTM always iterates max_features steps, even if the actual sequence is shorter. Instead, it consumes\
at each additional step the padding symbol. The outputs of steps beyond the actual sequence length are taken into account as well for output aggregation. [False]")
neural_network_options.add_argument('--aggregate_output', type=str, dest='aggregate_output', default="average", help=
"How to aggregate the outputs of an LSTM. 'last' uses the output at the last time step. 'average' takes the mean over all outputs. [average]",
choices = ["average", "last", "attention", "oe-attention", "sum"])
neural_network_options.add_argument('--pad_special_symbol', type=int, dest="pad_special_symbol", default=0, help=
"How many special tokens to pad after each sample for OE-LSTMs. [0]")
neural_network_options.add_argument('--optimize_threshold', action="store_true", dest="optimize_threshold", default=False, help=
"Optimize the prediction threshold on validation set during training. [False]")
neural_network_options.add_argument('--dynamic_max_pooling_p', type=int, dest="dynamic_max_pooling_p", default=1, help=
"Specify the number of chunks (p) to perform max-pooling over. [1]")
neural_network_options.add_argument('--num_filters', type=int, dest="num_filters", default=100, help=
"Specify the number of filters used in a CNN (per window size). [100]")
neural_network_options.add_argument('--window_sizes', type=int, dest="window_sizes", nargs='+', default=[3,4,5], help=
"Specify the window sizes used for extracting features in a CNN. [[3,4,5]]")
neural_network_options.add_argument('--meta_labeler_min_labels', type=int, dest="meta_labeler_min_labels", default=1, help=
"Specify the minimum number of labels to assign the meta labeler can predict. [1]")
neural_network_options.add_argument('--meta_labeler_max_labels', type=int, dest="meta_labeler_max_labels", default=None, help=
"Specify the maximum number of labels to assign the meta labeler can predict. When 'None' is specified, the maximum \
is computed from the data. [None]")
detailed_options.add_argument('--meta_labeler_phi', type=str, dest='meta_labeler_phi', default=None, help=
"Specify whether to predict number of labels from 'score' or from 'content', or whether to use meta labeler at all (None). [None]",
choices = ["content", "score"])
neural_network_options.add_argument('--meta_labeler_alpha', type=float, dest="meta_labeler_alpha", default=0.1, help=
"The alpha-weight of predicting the correct number of labels when doing meta-labeling. [0.1]")
# persistence_options
persistence_options = parser.add_argument_group("Feature Persistence Options")
persistence_options.add_argument('-p', dest="persist", action="store_true", default=False, help=
"Use persisted count vectors or persist if has changed.")
persistence_options.add_argument('--repersist', dest="repersist", action="store_true", default=False, help=
"Persisted features will be recalculated and overwritten.")
persistence_options.add_argument('--persist_to', dest="persist_to", default=os.curdir + os.sep + 'persistence', help=
"Path to persist files.")
persistence_options.add_argument("--tf-model-path", dest="tf_model_path", default=".tmp_best_models", help=
"Directory to store best models for early stopping.")
return meta_parser, parser
def write_to_csv(score, opt):
f = open(opt.output_file, 'a')
if os.stat(opt.output_file).st_size == 0:
for i, (key, _) in enumerate(opt.__dict__.items()):
f.write(key + ";")
for i, (key, _) in enumerate(score.items()):
if i < len(score.items()) - 1:
f.write(key + ";")
else:
f.write(key)
f.write('\n')
f.flush()
f.close()
f = open(opt.output_file, 'r')
reader = csv.reader(f, delimiter=";")
column_names = next(reader)
f.close();
f = open(opt.output_file, 'a')
for i, key in enumerate(column_names):
if i < len(column_names) - 1:
if key in opt.__dict__:
f.write(str(opt.__dict__[key]) + ";")
else:
f.write(str(score[key]) + ";")
else:
if key in opt.__dict__:
f.write(str(opt.__dict__[key]))
else:
f.write(str(score[key]))
f.write('\n')
f.flush()
f.close()
if __name__ == '__main__':
meta_parser, parser = _generate_parsers()
meta_args, remaining = meta_parser.parse_known_args()
start_time = default_timer()
if meta_args.config_file:
lines = meta_args.config_file.readlines()
n_executions = len(lines)
for i, line in enumerate(lines):
if line.startswith('#'): continue
params = line.strip().split()
args = parser.parse_args(remaining + params)
if args.verbose: print("Line args:", args)
if meta_args.dry: continue
try:
run(args)
except Exception as e:
print("Error while executing configuration of line %d:" % (i + 1), file=sys.stderr)
exceptionType, exceptionValue, exceptionTraceback = sys.exc_info()
traceback.print_exception(exceptionType, exceptionValue, exceptionTraceback, file=sys.stderr)
progress = int(100 * (i + 1) / n_executions)
sys.stdout.write("\r[%d%%] " % progress)
sys.stdout.flush()
else:
args = parser.parse_args(remaining)
if args.verbose: print("Args:", args)
if not meta_args.dry: run(args)
print('Duration: ' + str(datetime.timedelta(seconds=default_timer() - start_time)))
| 1.773438
| 2
|
param_table.py
|
sharvadim07/DeletionsIslands
| 0
|
12779437
|
<reponame>sharvadim07/DeletionsIslands
import space
import multiDim
class ParTable:
def __init__(self, par_table_file_name, special_features_cols, add_features_name_val_list):
self.header = None
self.param_tuple_list = []
self.special_features_cols_idx_dict = {}
with open(par_table_file_name, 'r') as par_table_file:
for i, line in enumerate(par_table_file):
if i == 0:
self.header = tuple(line.strip().split('\t') + [add_feature_name_val[0]
for add_feature_name_val in add_features_name_val_list])
for special_feature in special_features_cols:
self.special_features_cols_idx_dict[special_feature] = \
self.header.index(special_feature)
else:
splitted_line_tuple = tuple(line.strip().split('\t') + [add_feature_name_val[1]
for add_feature_name_val in add_features_name_val_list])
if len(splitted_line_tuple) != len(self.header):
raise IOError(
'Num of elements in line not corresponds with header size!')
self.param_tuple_list.append(splitted_line_tuple)
def append(self, par_table_file_name, add_features_name_val_list):
with open(par_table_file_name, 'r') as par_table_file:
for i, line in enumerate(par_table_file):
if i != 0:
splitted_line_tuple = tuple(line.strip().split('\t') + [add_feature_name_val[1]
for add_feature_name_val in add_features_name_val_list])
if len(splitted_line_tuple) != len(self.header):
raise IOError(
'Num of elements in line not corresponds with header size!')
self.param_tuple_list.append(splitted_line_tuple)
def __len__(self):
return len(self.param_tuple_list)
def __getitem__(self, position):
return self.param_tuple_list[position]
def generate_points_tuple(par_table):
points_tuple = tuple([multiDim.MultiDimPoint(param_tuple, par_table.special_features_cols_idx_dict)
for param_tuple in par_table.param_tuple_list])
return points_tuple
# Last run params "-K3", "-A500", "-Z30"
def param_table_processing(args, out_dir, type_of_table):
import re
import os
if type_of_table == 'phydyn':
spec_features = ['mcmc_step', 'glob_iter',
'residual', 'treeLikelihood']
elif type_of_table == 'expdyn':
spec_features = ['mcmc_step', 'glob_iter', 'residual']
elif type_of_table == 'single_cell':
spec_features = ['DataType', 'Cluster', 'BarCode', 'Cell_Id']
if args.par_tables != None:
par_table = None
for i, par_table_file_name in enumerate(args.par_tables.split(',')):
if par_table_file_name == '':
break
if type_of_table == 'phydyn' or type_of_table == 'expdyn':
global_iteration_num = int(
re.search(r'_([0-9]+)', os.path.basename(par_table_file_name)).groups()[0])
add_features_name_val_list = [
['glob_iter', str(global_iteration_num)]]
elif 'single_cell':
#sample_id = int(re.search(r'_([0-9]+)', os.path.basename(par_table_file_name)).groups()[0])
#add_features_name_val_list = [['sample_id', str(sample_id)]]
add_features_name_val_list = []
if i == 0:
par_table = ParTable(par_table_file_name,
spec_features, add_features_name_val_list)
else:
par_table.append(par_table_file_name,
add_features_name_val_list)
# Determine centers of neighborhoods
points_tuple = generate_points_tuple(par_table)
filter_points_tuple = tuple(list(points_tuple)[::10])
#new_space = space.Space(points_tuple, args.num_K_points, args.A_value, args.zone_size_percent)
if args.do_two_step:
new_space = space.Space(points_tuple=filter_points_tuple,
num_K_points=args.num_K_points,
A=args.A_value,
sort_neighb_by_density=True,
neighb_post_process=False,
zone_size_percent=100,
num_of_random_centers_from_isalnd=1,
second_step=True,
min_neighb_part_from_all=0.0025)
# Find best pvalues for point located in found neighborhoods
new_space.find_pvalue_for_points_in_all_neighb()
import numpy
pvalue_points_in_neighb_med = \
numpy.median(
list(new_space.pvalue_points_in_neighb_dict.values()))
# Filter points by pvalue
new_space.filter_points_by_pvalue(
pvalue_threshold=pvalue_points_in_neighb_med)
os.chdir(out_dir)
# Debug
# print_neighborhoods_all_points('param_neighborhoods_1_single_cell.txt',
# new_space,
# [],
# spec_features,
# par_table.header)
points_tuple_in_cur_neighbs = space.get_points_tuple_in_cur_neghbs(
new_space)
points_tuple_not_in_cur_neighbs = tuple([point for point in points_tuple
if point not in points_tuple_in_cur_neighbs])
two_step_space = space.Space(points_tuple=points_tuple_in_cur_neighbs,
num_K_points=args.num_K_points,
A=args.A_value,
zone_size_percent=100,
neighb_post_process=False,
second_step=True,
sort_neighb_by_density=True,
num_of_random_centers_from_isalnd=1,
min_neighb_part_from_all=0.0025)
two_step_space.find_new_centers_of_neighborhoods()
space.add_all_points(two_step_space.points_tuple,
points_tuple_not_in_cur_neighbs, two_step_space)
two_step_space.get_neighborhoods_without_intersection_by_density()
new_space = two_step_space
elif args.do_mod_step:
new_space = space.Space(points_tuple=filter_points_tuple,
num_K_points=args.num_K_points,
A=args.A_value,
sort_neighb_by_density=True,
neighb_post_process=False,
zone_size_percent=100,
num_of_random_centers_from_isalnd=1,
second_step=True,
min_neighb_part_from_all=0.0025)
# Find down quartile of lambda values of neighborhoods
lambda_val_list = sorted(
[neighb.lambda_val for neighb in new_space.neighb_sort_by_feature_list])
quartile_idx = int((len(lambda_val_list)-1)/4)
down_quartile_lambda = lambda_val_list[quartile_idx]
# Reduce number of redundant neighborhoods
new_space.get_neighborhoods_without_intersection_by_density(only_full_intersection=True)
# Find best pvalues for point located in found neighborhoods
new_space.find_pvalue_for_points_in_all_neighb()
#import numpy
# pvalue_points_in_neighb_med = \
# numpy.median(list(new_space.pvalue_points_in_neighb_dict.values()))
p_val_list = list(new_space.pvalue_points_in_neighb_dict.values())
quartile_idx = int((len(p_val_list)-1)/4)
pvalue_points_up_quartile = sorted(p_val_list, reverse=True)[
quartile_idx]
# Filter points by pvalue
new_space.filter_points_by_pvalue(
pvalue_threshold=pvalue_points_up_quartile)
points_tuple_in_cur_neighbs = space.get_points_tuple_in_cur_neghbs(
new_space)
points_tuple_not_in_cur_neighbs = tuple([point for point in points_tuple
if point not in points_tuple_in_cur_neighbs])
# Find graphs of small circles
new_space.find_small_circles_graphs(points_tuple_not_in_cur_neighbs,
down_quartile_lambda)
else:
new_space = space.Space(points_tuple=points_tuple,
num_K_points=args.num_K_points,
A=args.A_value,
sort_neighb_by_density=True,
neighb_post_process=True,
zone_size_percent=100,
num_of_random_centers_from_isalnd=1,
second_step=True,
min_neighb_part_from_all=0.0025)
os.chdir(out_dir)
if type_of_table == 'phydyn':
print_neighborhoods_all_points('param_dyn_neighborhoods_phy.txt',
new_space,
['residual', 'treeLikelihood'],
spec_features,
par_table.header)
elif type_of_table == 'expdyn':
print_neighborhoods_all_points('param_dyn_neighborhoods_exp.txt',
new_space,
['residual'],
spec_features,
par_table.header)
elif type_of_table == 'single_cell':
print_neighborhoods_all_points('param_neighborhoods_single_cell.txt',
new_space,
[],
spec_features,
par_table.header)
###
# Printing results
def print_neighborhoods_all_points(out_file_name,
cur_space,
avg_spec_features,
spec_features,
table_header):
import os
with open(os.path.basename(out_file_name), 'w') as out_file:
for i, neighb in enumerate(cur_space.neighb_sort_by_feature_list):
avg_info = ''
avg_spec_features_val_dict = {}
for avg_spec_feature in avg_spec_features:
avg_spec_feature_val = neighb.calc_neighborhood_avg_spec_feature(
cur_space.points_tuple, avg_spec_feature)
avg_info += ' avg_' + avg_spec_feature + \
'=' + str(avg_spec_feature_val)
avg_spec_features_val_dict[avg_spec_feature] = avg_spec_feature_val
out_file.write('#neighb ' + str(i) + ' info: pValue=' + str(neighb.pvalue) +
' dimension=' + str(neighb.dimension) +
' volume=' + str(neighb.volume) +
' size=' + str(neighb.size) +
str(avg_info) + '\n')
header = [col_head for col_head in table_header if col_head not in spec_features] + \
spec_features + \
['neighb_id', 'neighb_pValue', 'neighb_dimension', 'neighb_volume', 'neighb_size'] + \
['avg_' + avg_spec_feature for avg_spec_feature in avg_spec_features]
out_file.write('\t'.join(header) + '\n')
for point_ind_dist in neighb.closest_points[:neighb.size] + [[neighb.center_point_ind, 0]]:
point = cur_space.points_tuple[point_ind_dist[0]]
line = list(map(str, point.param_tuple)) + \
[str(point.special_features_values_dict[spec_feature]) for spec_feature in spec_features] + \
[str(i), str(neighb.pvalue), str(neighb.dimension), str(neighb.volume), str(neighb.size)] + \
[str(avg_spec_features_val_dict[avg_spec_feature])
for avg_spec_feature in avg_spec_features]
out_file.write('\t'.join(map(str, line)) + '\n')
#space.print_dist_mat(cur_space.dist_matrix, 'dist_matrix.txt')
# Transform dist matrix
#trans_dist_matrix = space.transform_dist_matrix(cur_space.dist_matrix, cur_space.neighb_sort_by_feature_list)
#space.print_dist_mat(trans_dist_matrix, 'transformed_dist_matrix.txt')
###
| 2.59375
| 3
|
h/migrations/versions/6f86796f64e0_add_user_profile_columns.py
|
ssin122/test-h
| 2
|
12779438
|
"""
Add user profile columns
Revision ID: 6f86796f64e0
Revises: <KEY>
Create Date: 2016-07-06 11:28:50.075057
"""
from __future__ import unicode_literals
from alembic import op
import sqlalchemy as sa
revision = '6f86796f64e0'
down_revision = '<KEY>'
def upgrade():
op.add_column('user', sa.Column('display_name', sa.UnicodeText()))
op.add_column('user', sa.Column('description', sa.UnicodeText()))
op.add_column('user', sa.Column('location', sa.UnicodeText()))
op.add_column('user', sa.Column('uri', sa.UnicodeText()))
op.add_column('user', sa.Column('orcid', sa.UnicodeText()))
def downgrade():
op.drop_column('user', 'display_name')
op.drop_column('user', 'description')
op.drop_column('user', 'location')
op.drop_column('user', 'uri')
op.drop_column('user', 'orcid')
| 1.75
| 2
|
pandaharvester/harvestertest/stageOutTest_go_bulk_stager.py
|
tsulaiav/harvester
| 11
|
12779439
|
<filename>pandaharvester/harvestertest/stageOutTest_go_bulk_stager.py
import sys
import os
import os.path
import hashlib
import datetime
import uuid
import random
import string
import time
import threading
import logging
from future.utils import iteritems
from pandaharvester.harvesterconfig import harvester_config
from pandaharvester.harvestercore.job_spec import JobSpec
from pandaharvester.harvestercore.file_spec import FileSpec
from pandaharvester.harvestercore.queue_config_mapper import QueueConfigMapper
from pandaharvester.harvestercore.plugin_factory import PluginFactory
from pandaharvester.harvesterbody.cacher import Cacher
from pandaharvester.harvestercore.db_proxy_pool import DBProxyPool as DBProxy
from pandaharvester.harvestercore.communicator_pool import CommunicatorPool
from pandaharvester.harvestercore import core_utils
#initial variables
fileTableName = 'file_table'
queueName = 'ALCF_Theta'
begin_job_id = 1111
end_job_id = 1113
# connection lock
conLock = threading.Lock()
def dump(obj):
for attr in dir(obj):
if hasattr( obj, attr ):
print( "obj.%s = %s" % (attr, getattr(obj, attr)))
if len(sys.argv) > 1:
queueName = sys.argv[1]
if len(sys.argv) > 2:
begin_job_id = int(sys.argv[2])
if len(sys.argv) > 3:
end_job_id = int(sys.argv[3])
queueConfigMapper = QueueConfigMapper()
queueConfig = queueConfigMapper.get_queue(queueName)
initial_queueConfig_stager = queueConfig.stager
queueConfig.stager['module'] = 'pandaharvester.harvesterstager.go_bulk_stager'
queueConfig.stager['name'] = 'GlobusBulkStager'
modified_queueConfig_stager = queueConfig.stager
pluginFactory = PluginFactory()
# get stage-out plugin
stagerCore = pluginFactory.get_plugin(queueConfig.stager)
# logger
_logger = core_utils.setup_logger('stageOutTest_go_bulk_stager')
tmpLog = core_utils.make_logger(_logger, method_name='stageOutTest_go_bulk_stager')
tmpLog.debug('start')
for loggerName, loggerObj in logging.Logger.manager.loggerDict.iteritems():
#print "loggerName - {}".format(loggerName)
if loggerName.startswith('panda.log'):
if len(loggerObj.handlers) == 0:
continue
if loggerName.split('.')[-1] in ['db_proxy']:
continue
stdoutHandler = logging.StreamHandler(sys.stdout)
stdoutHandler.setFormatter(loggerObj.handlers[0].formatter)
loggerObj.addHandler(stdoutHandler)
msgStr = "plugin={0}".format(stagerCore.__class__.__name__)
tmpLog.debug(msgStr)
msgStr = "Initial queueConfig.stager = {}".format(initial_queueConfig_stager)
tmpLog.debug(msgStr)
msgStr = "Modified queueConfig.stager = {}".format(modified_queueConfig_stager)
tmpLog.debug(msgStr)
scope = 'panda'
proxy = DBProxy()
communicator = CommunicatorPool()
cacher = Cacher(communicator, single_mode=True)
cacher.run()
# check if db lock exits
locked = stagerCore.dbInterface.get_object_lock('dummy_id_for_out_0',lock_interval=120)
if not locked:
tmpLog.debug('DB Already locked by another thread')
# now unlock db
unlocked = stagerCore.dbInterface.release_object_lock('dummy_id_for_out_0')
if unlocked :
tmpLog.debug('unlocked db')
else:
tmpLog.debug(' Could not unlock db')
# loop over the job id's creating various JobSpecs
jobSpec_list = []
for job_id in range(begin_job_id,end_job_id+1):
jobSpec = JobSpec()
jobSpec.jobParams = {
'scopeLog': 'panda',
'logFile': 'log',
}
jobSpec.computingSite = queueName
jobSpec.PandaID = job_id
jobSpec.modificationTime = datetime.datetime.now()
realDataset = 'panda.sgotest.' + uuid.uuid4().hex
ddmEndPointOut = 'BNL-OSG2_DATADISK'
outFiles_scope_str = ''
outFiles_str = ''
realDatasets_str = ''
ddmEndPointOut_str = ''
# create up 5 files for output
for index in range(random.randint(1, 5)):
fileSpec = FileSpec()
assFileSpec = FileSpec()
fileSpec.fileType = 'es_output'
assFileSpec.lfn = 'panda.sgotest.' + uuid.uuid4().hex
fileSpec.lfn = assFileSpec.lfn + '.gz'
fileSpec.scope = 'panda'
outFiles_scope_str += 'panda,'
outFiles_str += fileSpec.lfn + ','
realDatasets_str += realDataset + ","
ddmEndPointOut_str += ddmEndPointOut + ","
assFileSpec.fileType = 'es_output'
assFileSpec.fsize = random.randint(10, 100)
# create source file
hash = hashlib.md5()
hash.update('%s:%s' % (scope, fileSpec.lfn))
hash_hex = hash.hexdigest()
correctedscope = "/".join(scope.split('.'))
assFileSpec.path = "{endPoint}/{scope}/{hash1}/{hash2}/{lfn}".format(endPoint=queueConfig.stager['Globus_srcPath'],
scope=correctedscope,
hash1=hash_hex[0:2],
hash2=hash_hex[2:4],
lfn=assFileSpec.lfn)
if not os.path.exists(os.path.dirname(assFileSpec.path)):
tmpLog.debug("os.makedirs({})".format(os.path.dirname(assFileSpec.path)))
os.makedirs(os.path.dirname(assFileSpec.path))
oFile = open(assFileSpec.path, 'w')
oFile.write(''.join(random.choice(string.ascii_uppercase + string.digits) for _ in range(assFileSpec.fsize)))
oFile.close()
fileSpec.path = assFileSpec.path + '.gz'
fileSpec.add_associated_file(assFileSpec)
#print "dump(fileSpec)"
#dump(fileSpec)
# add output file to jobSpec
jobSpec.add_out_file(fileSpec)
#
tmpLog.debug("file to transfer - {}".format(fileSpec.path))
#print "dump(jobSpec)"
#dump(jobSpec)
# add log file info
outFiles_str += 'log'
realDatasets_str += 'log.'+ uuid.uuid4().hex
ddmEndPointOut_str += 'MWT2-UC_DATADISK'
# remove final ","
outFiles_scope_str = outFiles_scope_str[:-1]
jobSpec.jobParams['scopeOut'] = outFiles_scope_str
jobSpec.jobParams['outFiles'] = outFiles_str
jobSpec.jobParams['realDatasets'] = realDatasets_str
jobSpec.jobParams['ddmEndPointOut'] = ddmEndPointOut_str
msgStr = "jobSpec.jobParams ={}".format(jobSpec.jobParams)
tmpLog.debug(msgStr)
msgStr = "len(jobSpec.get_output_file_attributes()) = {0} type - {1}".format(len(jobSpec.get_output_file_attributes()),type(jobSpec.get_output_file_attributes()))
tmpLog.debug(msgStr)
for key, value in jobSpec.get_output_file_attributes().iteritems():
msgStr = "output file attributes - pre DB {0} {1}".format(key,value)
tmpLog.debug(msgStr)
jobSpec_list.append(jobSpec)
# now load into DB JobSpec's and output FileSpec's from jobSpec_list
tmpStat = proxy.insert_jobs(jobSpec_list)
if tmpStat:
msgStr = "OK Loaded jobs into DB"
tmpLog.debug(msgStr)
else:
msgStr = "NG Could not load jobs into DB"
tmpLog.debug(msgStr)
tmpStat = proxy.insert_files(jobSpec_list)
if tmpStat:
msgStr = "OK Loaded files into DB"
tmpLog.debug(msgStr)
else:
msgStr = "NG Could not load files into DB"
tmpLog.debug(msgStr)
# Now loop over the jobSpec's
for jobSpec in jobSpec_list:
# print out jobSpec PandID
msgStr = "jobSpec PandaID - {}".format(jobSpec.PandaID)
msgStr = "testing zip"
tmpStat, tmpOut = stagerCore.zip_output(jobSpec)
if tmpStat:
msgStr = " OK"
tmpLog.debug(msgStr)
else:
msgStr = " NG {0}".format(tmpOut)
tmpLog.debug(msgStr)
msgStr = "testing trigger_stage_out"
tmpLog.debug(msgStr)
tmpStat, tmpOut = stagerCore.trigger_stage_out(jobSpec)
if tmpStat:
msgStr = " OK "
tmpLog.debug(msgStr)
elif tmpStat == None:
msgStr = " Temporary failure NG {0}".format(tmpOut)
tmpLog.debug(msgStr)
elif not tmpStat:
msgStr = " NG {0}".format(tmpOut)
tmpLog.debug(msgStr)
sys.exit(1)
print
# get the files with the group_id and print out
msgStr = "dummy_transfer_id = {}".format(stagerCore.get_dummy_transfer_id())
files = proxy.get_files_with_group_id(stagerCore.get_dummy_transfer_id())
files = stagerCore.dbInterface.get_files_with_group_id(stagerCore.get_dummy_transfer_id())
msgStr = "checking status for transfer and perhaps ultimately triggering the transfer"
tmpLog.debug(msgStr)
tmpStat, tmpOut = stagerCore.check_stage_out_status(jobSpec)
if tmpStat:
msgStr = " OK"
tmpLog.debug(msgStr)
elif tmpStat == None:
msgStr = " Temporary failure NG {0}".format(tmpOut)
tmpLog.debug(msgStr)
elif not tmpStat:
msgStr = " NG {0}".format(tmpOut)
tmpLog.debug(msgStr)
# sleep for 10 minutes 1 second
msgStr = "Sleep for 601 seconds"
#msgStr = "Sleep for 181 seconds"
tmpLog.debug(msgStr)
#time.sleep(181)
time.sleep(601)
msgStr = "now check the jobs"
tmpLog.debug(msgStr)
for jobSpec in jobSpec_list:
# print out jobSpec PandID
msgStr = "jobSpec PandaID - {}".format(jobSpec.PandaID)
tmpLog.debug(msgStr)
msgStr = "checking status for transfer and perhaps ultimately triggering the transfer"
tmpStat, tmpOut = stagerCore.check_stage_out_status(jobSpec)
if tmpStat:
msgStr = " OK"
tmpLog.debug(msgStr)
elif tmpStat == None:
msgStr = " Temporary failure NG {0}".format(tmpOut)
tmpLog.debug(msgStr)
elif not tmpStat:
msgStr = " NG {0}".format(tmpOut)
tmpLog.debug(msgStr)
# sleep for 3 minutes
msgStr = "Sleep for 180 seconds"
tmpLog.debug(msgStr)
time.sleep(180)
msgStr = "now check the jobs"
tmpLog.debug(msgStr)
for jobSpec in jobSpec_list:
# print out jobSpec PandID
msgStr = "jobSpec PandaID - {}".format(jobSpec.PandaID)
tmpLog.debug(msgStr)
msgStr = "checking status for transfer and perhaps ultimately triggering the transfer"
tmpStat, tmpOut = stagerCore.check_stage_out_status(jobSpec)
if tmpStat:
msgStr = " OK"
tmpLog.debug(msgStr)
elif tmpStat == None:
msgStr = " Temporary failure NG {0}".format(tmpOut)
tmpLog.debug(msgStr)
elif not tmpStat:
msgStr = " NG {0}".format(tmpOut)
tmpLog.debug(msgStr)
| 1.898438
| 2
|
generate/python/__init__.py
|
Luthaf/Chemharp-bindgen
| 0
|
12779440
|
<gh_stars>0
# -* coding: utf-8 -*
"""Generate FFI for Python ctypes module"""
from .ffi import write_ffi
| 1
| 1
|
tests/unit_tests/running_modes/reinforcement_learning/reaction_filters/__init__.py
|
marco-foscato/Lib-INVENT
| 26
|
12779441
|
from tests.unit_tests.running_modes.reinforcement_learning.reaction_filters.test_non_selective_reaction_filter import \
TestNonSelectiveReactionFilters, TestNonSelectiveReactionFiltersNoReaction
from tests.unit_tests.running_modes.reinforcement_learning.reaction_filters.test_selective_reaction_filter import \
TestSelectiveReactionFilter, TestSelectiveReactionFilterSingleReaction
| 1.109375
| 1
|
vertexPlus/apps.py
|
FelixTheC/onlineOrderForm
| 0
|
12779442
|
from django.apps import AppConfig
class VertexplusConfig(AppConfig):
name = 'vertexPlus'
| 1.0625
| 1
|
ECD_control/__init__.py
|
AndrewOriani/ECD_control
| 0
|
12779443
|
<reponame>AndrewOriani/ECD_control
from . import ECD_optimization
from . import ECD_pulse_construction
from .ECD_pulse_construction import FakeStorage, FakeQubit, FakePulse
from .ECD_optimization import BatchOptimizer, OptimizationSweepsAnalysis, OptimizationAnalysis, VisualizationMixin, tf_quantum, optimization_sweeps
__all__=['BatchOptimizer', 'OptimizationSweepsAnalysis', 'OptimizationAnalysis', 'VisualizationMixin', 'tf_quantum', 'optimization_sweeps', 'ECD_pulse_construction', 'FakeStorage', 'FakeQubit', 'FakePulse']
| 0.925781
| 1
|
src/notepad/forms.py
|
vijaykumarmcp/StockChartVisual
| 0
|
12779444
|
from django import forms
from .models import Note
class NoteModelForm(forms.ModelForm):
class Meta:
model=Note
fields=['title','url','image']
| 1.929688
| 2
|
main.py
|
hertai86/GIEAA
| 0
|
12779445
|
<filename>main.py<gh_stars>0
import csv
import os
import matplotlib.pyplot as plt
from mpl_toolkits.axisartist.parasite_axes import HostAxes, ParasiteAxes
gdp_list = {}
edu_list = {}
interval = 0
normalize_factor = 2e10
firstDSpath = os.getcwd() + "\input\GDP by Country.csv"
secondDSpath = os.getcwd() + "\input\BL2013_MF1599_v2.2.csv"
results_path = os.getcwd() + "\output\_results.csv"
with open(firstDSpath, 'r') as gdp_csv_file:
gdp_csv_reader = csv.DictReader(gdp_csv_file)
for line in gdp_csv_reader:
if line['\ufeff"Country Name"'] == "Austria":
for count in range(1961, 1966):
endofinterval = 1965
interval = interval + float(line['' + str(count) + ''])
gdp_list[endofinterval] = interval / normalize_factor
interval = 0
for count in range(1966, 1971):
endofinterval = 1970
interval = interval + float(line['' + str(count) + ''])
gdp_list[endofinterval] = interval / normalize_factor
interval = 0
for count in range(1971, 1976):
endofinterval = 1975
interval = interval + float(line['' + str(count) + ''])
gdp_list[endofinterval] = interval / normalize_factor
interval = 0
for count in range(1976, 1981):
endofinterval = 1980
interval = interval + float(line['' + str(count) + ''])
gdp_list[endofinterval] = interval / normalize_factor
interval = 0
for count in range(1981, 1986):
endofinterval = 1985
interval = interval + float(line['' + str(count) + ''])
gdp_list[endofinterval] = interval / normalize_factor
interval = 0
for count in range(1986, 1991):
endofinterval = 1990
interval = interval + float(line['' + str(count) + ''])
gdp_list[endofinterval] = interval / normalize_factor
interval = 0
for count in range(1991, 1996):
endofinterval = 1995
interval = interval + float(line['' + str(count) + ''])
gdp_list[endofinterval] = interval / normalize_factor
interval = 0
for count in range(1996, 2001):
endofinterval = 2000
interval = interval + float(line['' + str(count) + ''])
gdp_list[endofinterval] = interval / normalize_factor
interval = 0
for count in range(2001, 2006):
endofinterval = 2005
interval = interval + float(line['' + str(count) + ''])
gdp_list[endofinterval] = interval / normalize_factor
interval = 0
for count in range(2006, 2011):
endofinterval = 2010
interval = interval + float(line['' + str(count) + ''])
gdp_list[endofinterval] = interval / normalize_factor
with open(secondDSpath, 'r') as edu_csv_file:
edu_csv_reader = csv.DictReader(edu_csv_file)
for line in edu_csv_reader:
for c in range(1965, 2011):
if line['country'] == "Austria" and '' + str(c) + '' == line['year']:
edu_list[c] = float(line['lhc']) / float(line['lh']) * 100
names = list(edu_list.keys())
gdp_values = list(gdp_list.values())
edu_values = list(edu_list.values())
f = open(results_path, 'w')
i = 0
f.write("year_intrval,gdp_amount,edu_att_rate \n")
while i < len(gdp_values):
f.write(str(names[i]) + "," + str(gdp_values[i]) + "," + str(edu_values[i]) + "\n")
i += 1
f.close()
fig = plt.figure(1)
host = HostAxes(fig, [0.1, 0.1, 0.8, 0.8])
par1 = ParasiteAxes(host, sharex=host)
host.parasites.append(par1)
host.set_ylabel("Density")
host.set_xlabel("Distance")
host.axis["right"].set_visible(True)
par1.axis["right"].set_visible(True)
par1.set_ylabel("Temperature")
par1.axis["right"].major_ticklabels.set_visible(True)
par1.axis["right"].label.set_visible(True)
fig.add_axes(host)
host.set_xlim(1975, 2010)
host.set_ylim(0, 100)
host.set_xlabel("GDP and Success rate")
host.set_ylabel("GDP per five years (*2*10^10")
par1.set_ylabel("Success Rate (%)")
p1, = host.plot(names, gdp_values, label="GDP")
p2, = par1.plot(names, edu_values, label="Success Rate")
par1.set_ylim(0, 100)
host.legend()
host.axis["left"].label.set_color(p1.get_color())
par1.axis["right"].label.set_color(p2.get_color())
plt.show()
| 2.734375
| 3
|
test/basic/kafka_test/producer.py
|
KentWangYQ/mongo2es
| 5
|
12779446
|
<gh_stars>1-10
from datetime import datetime
from kafka.errors import KafkaError
from common.kafka.producer import Producer
producer = Producer()
future = producer.send('kent_topic', {'now': datetime.now().strftime('%Y-%m-%d %H:%M:%S')})
try:
record_metadata = future.get(timeout=10)
except KafkaError as ex:
# Decide what to do if produce request failed...
print(ex)
pass
print(record_metadata.topic)
print(record_metadata.partition)
print(record_metadata.offset)
| 2.546875
| 3
|
client.py
|
blockchainhelppro/Security-Flag-Installation
| 0
|
12779447
|
# -*- coding: utf-8 -*-
#
# Copyright (C) 2009-2016 Red Hat, Inc.
#
# Authors:
# <NAME> <<EMAIL>>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
from gi.repository import GLib, GObject
# force use of pygobject3 in python-slip
import sys
sys.modules['gobject'] = GObject
import dbus.mainloop.glib
import slip.dbus
from decorator import decorator
from firewall import config
from firewall.core.base import DEFAULT_ZONE_TARGET
from firewall.dbus_utils import dbus_to_python
from firewall.functions import b2u
from firewall.core.rich import Rich_Rule
from firewall import errors
from firewall.errors import FirewallError
import dbus
import traceback
exception_handler = None
not_authorized_loop = False
@decorator
def handle_exceptions(func, *args, **kwargs):
"""Decorator to handle exceptions
"""
authorized = False
while not authorized:
try:
return func(*args, **kwargs)
except dbus.exceptions.DBusException as e:
dbus_message = e.get_dbus_message() # returns unicode
dbus_name = e.get_dbus_name()
if not exception_handler:
raise
if "NotAuthorizedException" in dbus_name:
exception_handler("NotAuthorizedException")
elif "org.freedesktop.DBus.Error" in dbus_name:
# dbus error, try again
exception_handler(dbus_message)
else:
authorized = True
if dbus_message:
exception_handler(dbus_message)
else:
exception_handler(b2u(str(e)))
except FirewallError as e:
if not exception_handler:
raise
else:
exception_handler(b2u(str(e)))
except Exception:
if not exception_handler:
raise
else:
exception_handler(b2u(traceback.format_exc()))
if not not_authorized_loop:
break
# zone config setings
class FirewallClientZoneSettings(object):
@handle_exceptions
def __init__(self, settings = None):
if settings:
self.settings = settings
else:
self.settings = ["", "", "", False, DEFAULT_ZONE_TARGET, [], [],
[], False, [], [], [], [], [], [], False]
@handle_exceptions
def __repr__(self):
return '%s(%r)' % (self.__class__, self.settings)
@handle_exceptions
def getVersion(self):
return self.settings[0]
@handle_exceptions
def setVersion(self, version):
self.settings[0] = version
@handle_exceptions
def getShort(self):
return self.settings[1]
@handle_exceptions
def setShort(self, short):
self.settings[1] = short
@handle_exceptions
def getDescription(self):
return self.settings[2]
@handle_exceptions
def setDescription(self, description):
self.settings[2] = description
# self.settings[3] was used for 'immutable'
@handle_exceptions
def getTarget(self):
return self.settings[4] if self.settings[4] != DEFAULT_ZONE_TARGET else "default"
@handle_exceptions
def setTarget(self, target):
self.settings[4] = target if target != "default" else DEFAULT_ZONE_TARGET
@handle_exceptions
def getServices(self):
return self.settings[5]
@handle_exceptions
def setServices(self, services):
self.settings[5] = services
@handle_exceptions
def addService(self, service):
if service not in self.settings[5]:
self.settings[5].append(service)
else:
raise FirewallError(errors.ALREADY_ENABLED, service)
@handle_exceptions
def removeService(self, service):
if service in self.settings[5]:
self.settings[5].remove(service)
else:
raise FirewallError(errors.NOT_ENABLED, service)
@handle_exceptions
def queryService(self, service):
return service in self.settings[5]
@handle_exceptions
def getPorts(self):
return self.settings[6]
@handle_exceptions
def setPorts(self, ports):
self.settings[6] = ports
@handle_exceptions
def addPort(self, port, protocol):
if (port,protocol) not in self.settings[6]:
self.settings[6].append((port,protocol))
else:
raise FirewallError(errors.ALREADY_ENABLED,
"'%s:%s'" % (port, protocol))
@handle_exceptions
def removePort(self, port, protocol):
if (port,protocol) in self.settings[6]:
self.settings[6].remove((port,protocol))
else:
raise FirewallError(errors.NOT_ENABLED,
"'%s:%s'" % (port, protocol))
@handle_exceptions
def queryPort(self, port, protocol):
return (port,protocol) in self.settings[6]
@handle_exceptions
def getProtocols(self):
return self.settings[13]
@handle_exceptions
def setProtocols(self, protocols):
self.settings[13] = protocols
@handle_exceptions
def addProtocol(self, protocol):
if protocol not in self.settings[13]:
self.settings[13].append(protocol)
else:
raise FirewallError(errors.ALREADY_ENABLED, protocol)
@handle_exceptions
def removeProtocol(self, protocol):
if protocol in self.settings[13]:
self.settings[13].remove(protocol)
else:
raise FirewallError(errors.NOT_ENABLED, protocol)
@handle_exceptions
def queryProtocol(self, protocol):
return protocol in self.settings[13]
@handle_exceptions
def getSourcePorts(self):
return self.settings[14]
@handle_exceptions
def setSourcePorts(self, ports):
self.settings[14] = ports
@handle_exceptions
def addSourcePort(self, port, protocol):
if (port,protocol) not in self.settings[14]:
self.settings[14].append((port,protocol))
else:
raise FirewallError(errors.ALREADY_ENABLED,
"'%s:%s'" % (port, protocol))
@handle_exceptions
def removeSourcePort(self, port, protocol):
if (port,protocol) in self.settings[14]:
self.settings[14].remove((port,protocol))
else:
raise FirewallError(errors.NOT_ENABLED,
"'%s:%s'" % (port, protocol))
@handle_exceptions
def querySourcePort(self, port, protocol):
return (port,protocol) in self.settings[14]
@handle_exceptions
def getIcmpBlocks(self):
return self.settings[7]
@handle_exceptions
def setIcmpBlocks(self, icmpblocks):
self.settings[7] = icmpblocks
@handle_exceptions
def addIcmpBlock(self, icmptype):
if icmptype not in self.settings[7]:
self.settings[7].append(icmptype)
else:
raise FirewallError(errors.ALREADY_ENABLED, icmptype)
@handle_exceptions
def removeIcmpBlock(self, icmptype):
if icmptype in self.settings[7]:
self.settings[7].remove(icmptype)
else:
raise FirewallError(errors.NOT_ENABLED, icmptype)
@handle_exceptions
def queryIcmpBlock(self, icmptype):
return icmptype in self.settings[7]
@handle_exceptions
def getIcmpBlockInversion(self):
return self.settings[15]
@handle_exceptions
def setIcmpBlockInversion(self, flag):
self.settings[15] = flag
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def addIcmpBlockInversion(self):
if not self.settings[15]:
self.settings[15] = True
else:
FirewallError(errors.ALREADY_ENABLED, "icmp-block-inversion")
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def removeIcmpBlockInversion(self):
if self.settings[15]:
self.settings[15] = False
else:
FirewallError(errors.NOT_ENABLED, "icmp-block-inversion")
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def queryIcmpBlockInversion(self):
return self.settings[15]
@handle_exceptions
def getMasquerade(self):
return self.settings[8]
@handle_exceptions
def setMasquerade(self, masquerade):
self.settings[8] = masquerade
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def addMasquerade(self):
if not self.settings[8]:
self.settings[8] = True
else:
FirewallError(errors.ALREADY_ENABLED, "masquerade")
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def removeMasquerade(self):
if self.settings[8]:
self.settings[8] = False
else:
FirewallError(errors.NOT_ENABLED, "masquerade")
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def queryMasquerade(self):
return self.settings[8]
@handle_exceptions
def getForwardPorts(self):
return self.settings[9]
@handle_exceptions
def setForwardPorts(self, ports):
self.settings[9] = ports
@handle_exceptions
def addForwardPort(self, port, protocol, to_port, to_addr):
if to_port is None:
to_port = ''
if to_addr is None:
to_addr = ''
if (port,protocol,to_port,to_addr) not in self.settings[9]:
self.settings[9].append((port,protocol,to_port,to_addr))
else:
raise FirewallError(errors.ALREADY_ENABLED, "'%s:%s:%s:%s'" % \
(port, protocol, to_port, to_addr))
@handle_exceptions
def removeForwardPort(self, port, protocol, to_port, to_addr):
if to_port is None:
to_port = ''
if to_addr is None:
to_addr = ''
if (port,protocol,to_port,to_addr) in self.settings[9]:
self.settings[9].remove((port,protocol,to_port,to_addr))
else:
raise FirewallError(errors.NOT_ENABLED, "'%s:%s:%s:%s'" % \
(port, protocol, to_port, to_addr))
@handle_exceptions
def queryForwardPort(self, port, protocol, to_port, to_addr):
if to_port is None:
to_port = ''
if to_addr is None:
to_addr = ''
return (port,protocol,to_port,to_addr) in self.settings[9]
@handle_exceptions
def getInterfaces(self):
return self.settings[10]
@handle_exceptions
def setInterfaces(self, interfaces):
self.settings[10] = interfaces
@handle_exceptions
def addInterface(self, interface):
if interface not in self.settings[10]:
self.settings[10].append(interface)
else:
raise FirewallError(errors.ALREADY_ENABLED, interface)
@handle_exceptions
def removeInterface(self, interface):
if interface in self.settings[10]:
self.settings[10].remove(interface)
else:
raise FirewallError(errors.NOT_ENABLED, interface)
@handle_exceptions
def queryInterface(self, interface):
return interface in self.settings[10]
@handle_exceptions
def getSources(self):
return self.settings[11]
@handle_exceptions
def setSources(self, sources):
self.settings[11] = sources
@handle_exceptions
def addSource(self, source):
if source not in self.settings[11]:
self.settings[11].append(source)
else:
raise FirewallError(errors.ALREADY_ENABLED, source)
@handle_exceptions
def removeSource(self, source):
if source in self.settings[11]:
self.settings[11].remove(source)
else:
raise FirewallError(errors.NOT_ENABLED, source)
@handle_exceptions
def querySource(self, source):
return source in self.settings[11]
@handle_exceptions
def getRichRules(self):
return self.settings[12]
@handle_exceptions
def setRichRules(self, rules):
rules = [ str(Rich_Rule(rule_str=r)) for r in rules ]
self.settings[12] = rules
@handle_exceptions
def addRichRule(self, rule):
rule = str(Rich_Rule(rule_str=rule))
if rule not in self.settings[12]:
self.settings[12].append(rule)
else:
raise FirewallError(errors.ALREADY_ENABLED, rule)
@handle_exceptions
def removeRichRule(self, rule):
rule = str(Rich_Rule(rule_str=rule))
if rule in self.settings[12]:
self.settings[12].remove(rule)
else:
raise FirewallError(errors.NOT_ENABLED, rule)
@handle_exceptions
def queryRichRule(self, rule):
rule = str(Rich_Rule(rule_str=rule))
return rule in self.settings[12]
# zone config
class FirewallClientConfigZone(object):
def __init__(self, bus, path):
self.bus = bus
self.path = path
self.dbus_obj = self.bus.get_object(config.dbus.DBUS_INTERFACE, path)
self.fw_zone = dbus.Interface(
self.dbus_obj,
dbus_interface=config.dbus.DBUS_INTERFACE_CONFIG_ZONE)
self.fw_properties = dbus.Interface(
self.dbus_obj, dbus_interface='org.freedesktop.DBus.Properties')
#TODO: check interface version and revision (need to match client
# version)
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def get_property(self, prop):
return dbus_to_python(self.fw_properties.Get(
config.dbus.DBUS_INTERFACE_CONFIG_ZONE, prop))
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def get_properties(self):
return dbus_to_python(self.fw_properties.GetAll(
config.dbus.DBUS_INTERFACE_CONFIG_ZONE))
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def set_property(self, prop, value):
self.fw_properties.Set(config.dbus.DBUS_INTERFACE_CONFIG_ZONE,
prop, value)
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def getSettings(self):
return FirewallClientZoneSettings(list(dbus_to_python(\
self.fw_zone.getSettings())))
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def update(self, settings):
self.fw_zone.update(tuple(settings.settings))
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def loadDefaults(self):
self.fw_zone.loadDefaults()
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def remove(self):
self.fw_zone.remove()
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def rename(self, name):
self.fw_zone.rename(name)
# version
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def getVersion(self):
return self.fw_zone.getVersion()
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def setVersion(self, version):
self.fw_zone.setVersion(version)
# short
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def getShort(self):
return self.fw_zone.getShort()
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def setShort(self, short):
self.fw_zone.setShort(short)
# description
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def getDescription(self):
return self.fw_zone.getDescription()
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def setDescription(self, description):
self.fw_zone.setDescription(description)
# target
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def getTarget(self):
return self.fw_zone.getTarget()
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def setTarget(self, target):
self.fw_zone.setTarget(target)
# service
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def getServices(self):
return self.fw_zone.getServices()
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def setServices(self, services):
self.fw_zone.setServices(services)
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def addService(self, service):
self.fw_zone.addService(service)
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def removeService(self, service):
self.fw_zone.removeService(service)
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def queryService(self, service):
return self.fw_zone.queryService(service)
# port
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def getPorts(self):
return self.fw_zone.getPorts()
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def setPorts(self, ports):
self.fw_zone.setPorts(ports)
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def addPort(self, port, protocol):
self.fw_zone.addPort(port, protocol)
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def removePort(self, port, protocol):
self.fw_zone.removePort(port, protocol)
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def queryPort(self, port, protocol):
return self.fw_zone.queryPort(port, protocol)
# protocol
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def getProtocols(self):
return self.fw_zone.getProtocols()
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def setProtocols(self, protocols):
self.fw_zone.setProtocols(protocols)
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def addProtocol(self, protocol):
self.fw_zone.addProtocol(protocol)
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def removeProtocol(self, protocol):
self.fw_zone.removeProtocol(protocol)
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def queryProtocol(self, protocol):
return self.fw_zone.queryProtocol(protocol)
# source-port
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def getSourcePorts(self):
return self.fw_zone.getSourcePorts()
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def setSourcePorts(self, ports):
self.fw_zone.setSourcePorts(ports)
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def addSourcePort(self, port, protocol):
self.fw_zone.addSourcePort(port, protocol)
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def removeSourcePort(self, port, protocol):
self.fw_zone.removeSourcePort(port, protocol)
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def querySourcePort(self, port, protocol):
return self.fw_zone.querySourcePort(port, protocol)
# icmp block
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def getIcmpBlocks(self):
return self.fw_zone.getIcmpBlocks()
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def setIcmpBlocks(self, icmptypes):
self.fw_zone.setIcmpBlocks(icmptypes)
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def addIcmpBlock(self, icmptype):
self.fw_zone.addIcmpBlock(icmptype)
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def removeIcmpBlock(self, icmptype):
self.fw_zone.removeIcmpBlock(icmptype)
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def queryIcmpBlock(self, icmptype):
return self.fw_zone.queryIcmpBlock(icmptype)
# icmp-block-inversion
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def getIcmpBlockInversion(self):
return self.fw_zone.getIcmpBlockInversion()
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def setIcmpBlockInversion(self, inversion):
self.fw_zone.setIcmpBlockInversion(inversion)
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def addIcmpBlockInversion(self):
self.fw_zone.addIcmpBlockInversion()
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def removeIcmpBlockInversion(self):
self.fw_zone.removeIcmpBlockInversion()
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def queryIcmpBlockInversion(self):
return self.fw_zone.queryIcmpBlockInversion()
# masquerade
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def getMasquerade(self):
return self.fw_zone.getMasquerade()
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def setMasquerade(self, masquerade):
self.fw_zone.setMasquerade(masquerade)
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def addMasquerade(self):
self.fw_zone.addMasquerade()
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def removeMasquerade(self):
self.fw_zone.removeMasquerade()
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def queryMasquerade(self):
return self.fw_zone.queryMasquerade()
# forward port
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def getForwardPorts(self):
return self.fw_zone.getForwardPorts()
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def setForwardPorts(self, ports):
self.fw_zone.setForwardPorts(ports)
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def addForwardPort(self, port, protocol, toport, toaddr):
if toport is None:
toport = ''
if toaddr is None:
toaddr = ''
self.fw_zone.addForwardPort(port, protocol, toport, toaddr)
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def removeForwardPort(self, port, protocol, toport, toaddr):
if toport is None:
toport = ''
if toaddr is None:
toaddr = ''
self.fw_zone.removeForwardPort(port, protocol, toport, toaddr)
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def queryForwardPort(self, port, protocol, toport, toaddr):
if toport is None:
toport = ''
if toaddr is None:
toaddr = ''
return self.fw_zone.queryForwardPort(port, protocol, toport, toaddr)
# interface
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def getInterfaces(self):
return self.fw_zone.getInterfaces()
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def setInterfaces(self, interfaces):
self.fw_zone.setInterfaces(interfaces)
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def addInterface(self, interface):
self.fw_zone.addInterface(interface)
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def removeInterface(self, interface):
self.fw_zone.removeInterface(interface)
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def queryInterface(self, interface):
return self.fw_zone.queryInterface(interface)
# source
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def getSources(self):
return self.fw_zone.getSources()
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def setSources(self, sources):
self.fw_zone.setSources(sources)
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def addSource(self, source):
self.fw_zone.addSource(source)
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def removeSource(self, source):
self.fw_zone.removeSource(source)
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def querySource(self, source):
return self.fw_zone.querySource(source)
# rich rule
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def getRichRules(self):
return self.fw_zone.getRichRules()
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def setRichRules(self, rules):
self.fw_zone.setRichRules(rules)
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def addRichRule(self, rule):
self.fw_zone.addRichRule(rule)
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def removeRichRule(self, rule):
self.fw_zone.removeRichRule(rule)
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def queryRichRule(self, rule):
return self.fw_zone.queryRichRule(rule)
# service config settings
class FirewallClientServiceSettings(object):
@handle_exceptions
def __init__(self, settings=None):
if settings:
self.settings = settings
else:
self.settings = ["", "", "", [], [], {}, [], []]
@handle_exceptions
def __repr__(self):
return '%s(%r)' % (self.__class__, self.settings)
@handle_exceptions
def getVersion(self):
return self.settings[0]
@handle_exceptions
def setVersion(self, version):
self.settings[0] = version
@handle_exceptions
def getShort(self):
return self.settings[1]
@handle_exceptions
def setShort(self, short):
self.settings[1] = short
@handle_exceptions
def getDescription(self):
return self.settings[2]
@handle_exceptions
def setDescription(self, description):
self.settings[2] = description
@handle_exceptions
def getPorts(self):
return self.settings[3]
@handle_exceptions
def setPorts(self, ports):
self.settings[3] = ports
@handle_exceptions
def addPort(self, port, protocol):
if (port,protocol) not in self.settings[3]:
self.settings[3].append((port,protocol))
else:
raise FirewallError(errors.ALREADY_ENABLED,
"'%s:%s'" % (port, protocol))
@handle_exceptions
def removePort(self, port, protocol):
if (port,protocol) in self.settings[3]:
self.settings[3].remove((port,protocol))
else:
raise FirewallError(errors.NOT_ENABLED,
"'%s:%s'" % (port, protocol))
@handle_exceptions
def queryPort(self, port, protocol):
return (port,protocol) in self.settings[3]
@handle_exceptions
def getProtocols(self):
return self.settings[6]
@handle_exceptions
def setProtocols(self, protocols):
self.settings[6] = protocols
@handle_exceptions
def addProtocol(self, protocol):
if protocol not in self.settings[6]:
self.settings[6].append(protocol)
else:
raise FirewallError(errors.ALREADY_ENABLED, protocol)
@handle_exceptions
def removeProtocol(self, protocol):
if protocol in self.settings[6]:
self.settings[6].remove(protocol)
else:
raise FirewallError(errors.NOT_ENABLED, protocol)
@handle_exceptions
def queryProtocol(self, protocol):
return protocol in self.settings[6]
@handle_exceptions
def getSourcePorts(self):
return self.settings[7]
@handle_exceptions
def setSourcePorts(self, ports):
self.settings[7] = ports
@handle_exceptions
def addSourcePort(self, port, protocol):
if (port,protocol) not in self.settings[7]:
self.settings[7].append((port,protocol))
else:
raise FirewallError(errors.ALREADY_ENABLED,
"'%s:%s'" % (port, protocol))
@handle_exceptions
def removeSourcePort(self, port, protocol):
if (port,protocol) in self.settings[7]:
self.settings[7].remove((port,protocol))
else:
raise FirewallError(errors.NOT_ENABLED,
"'%s:%s'" % (port, protocol))
@handle_exceptions
def querySourcePort(self, port, protocol):
return (port,protocol) in self.settings[7]
@handle_exceptions
def getModules(self):
return self.settings[4]
@handle_exceptions
def setModules(self, modules):
self.settings[4] = modules
@handle_exceptions
def addModule(self, module):
if module not in self.settings[4]:
self.settings[4].append(module)
else:
raise FirewallError(errors.ALREADY_ENABLED, module)
@handle_exceptions
def removeModule(self, module):
if module in self.settings[4]:
self.settings[4].remove(module)
else:
raise FirewallError(errors.NOT_ENABLED, module)
@handle_exceptions
def queryModule(self, module):
return module in self.settings[4]
@handle_exceptions
def getDestinations(self):
return self.settings[5]
@handle_exceptions
def setDestinations(self, destinations):
self.settings[5] = destinations
@handle_exceptions
def setDestination(self, dest_type, address):
if dest_type not in self.settings[5] or \
self.settings[5][dest_type] != address:
self.settings[5][dest_type] = address
else:
raise FirewallError(errors.ALREADY_ENABLED, "'%s:%s'" % \
(dest_type, address))
@handle_exceptions
def removeDestination(self, dest_type, address=None):
if dest_type in self.settings[5]:
if address is not None and self.settings[5][dest_type] != address:
raise FirewallError(errors.NOT_ENABLED, "'%s:%s'" % \
(dest_type, address))
del self.settings[5][dest_type]
else:
raise FirewallError(errors.NOT_ENABLED, "'%s'" % dest_type)
@handle_exceptions
def queryDestination(self, dest_type, address):
return (dest_type in self.settings[5] and \
address == self.settings[5][dest_type])
# ipset config settings
class FirewallClientIPSetSettings(object):
@handle_exceptions
def __init__(self, settings=None):
if settings:
self.settings = settings
else:
self.settings = ["", "", "", "", {}, []]
@handle_exceptions
def __repr__(self):
return '%s(%r)' % (self.__class__, self.settings)
@handle_exceptions
def getVersion(self):
return self.settings[0]
@handle_exceptions
def setVersion(self, version):
self.settings[0] = version
@handle_exceptions
def getShort(self):
return self.settings[1]
@handle_exceptions
def setShort(self, short):
self.settings[1] = short
@handle_exceptions
def getDescription(self):
return self.settings[2]
@handle_exceptions
def setDescription(self, description):
self.settings[2] = description
@handle_exceptions
def getType(self):
return self.settings[3]
@handle_exceptions
def setType(self, ipset_type):
self.settings[3] = ipset_type
@handle_exceptions
def getOptions(self):
return self.settings[4]
@handle_exceptions
def setOptions(self, options):
self.settings[4] = options
@handle_exceptions
def addOption(self, key, value):
if key not in self.settings[4] or self.settings[4][key] != value:
self.settings[4][key] = value
else:
raise FirewallError(errors.ALREADY_ENABLED, "'%s=%s'" % (key,value)
if value else key)
@handle_exceptions
def removeOption(self, key):
if key in self.settings[4]:
del self.settings[4][key]
else:
raise FirewallError(errors.NOT_ENABLED, key)
@handle_exceptions
def queryOption(self, key, value):
return key in self.settings[4] and self.settings[4][key] == value
@handle_exceptions
def getEntries(self):
return self.settings[5]
@handle_exceptions
def setEntries(self, entries):
if "timeout" in self.settings[4] and \
self.settings[4]["timeout"] != "0":
raise FirewallError(errors.IPSET_WITH_TIMEOUT)
self.settings[5] = entries
@handle_exceptions
def addEntry(self, entry):
if "timeout" in self.settings[4] and \
self.settings[4]["timeout"] != "0":
raise FirewallError(errors.IPSET_WITH_TIMEOUT)
if entry not in self.settings[5]:
self.settings[5].append(entry)
else:
raise FirewallError(errors.ALREADY_ENABLED, entry)
@handle_exceptions
def removeEntry(self, entry):
if "timeout" in self.settings[4] and \
self.settings[4]["timeout"] != "0":
raise FirewallError(errors.IPSET_WITH_TIMEOUT)
if entry in self.settings[5]:
self.settings[5].remove(entry)
else:
raise FirewallError(errors.NOT_ENABLED, entry)
@handle_exceptions
def queryEntry(self, entry):
if "timeout" in self.settings[4] and \
self.settings[4]["timeout"] != "0":
raise FirewallError(errors.IPSET_WITH_TIMEOUT)
return entry in self.settings[5]
# ipset config
class FirewallClientConfigIPSet(object):
@handle_exceptions
def __init__(self, bus, path):
self.bus = bus
self.path = path
self.dbus_obj = self.bus.get_object(config.dbus.DBUS_INTERFACE, path)
self.fw_ipset = dbus.Interface(
self.dbus_obj,
dbus_interface=config.dbus.DBUS_INTERFACE_CONFIG_IPSET)
self.fw_properties = dbus.Interface(
self.dbus_obj, dbus_interface='org.freedesktop.DBus.Properties')
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def get_property(self, prop):
return dbus_to_python(self.fw_properties.Get(
config.dbus.DBUS_INTERFACE_CONFIG_IPSET, prop))
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def get_properties(self):
return dbus_to_python(self.fw_properties.GetAll(
config.dbus.DBUS_INTERFACE_CONFIG_IPSET))
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def set_property(self, prop, value):
self.fw_properties.Set(config.dbus.DBUS_INTERFACE_CONFIG_IPSET,
prop, value)
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def getSettings(self):
return FirewallClientIPSetSettings(list(dbus_to_python(\
self.fw_ipset.getSettings())))
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def update(self, settings):
self.fw_ipset.update(tuple(settings.settings))
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def loadDefaults(self):
self.fw_ipset.loadDefaults()
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def remove(self):
self.fw_ipset.remove()
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def rename(self, name):
self.fw_ipset.rename(name)
# version
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def getVersion(self):
return self.fw_ipset.getVersion()
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def setVersion(self, version):
self.fw_ipset.setVersion(version)
# short
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def getShort(self):
return self.fw_ipset.getShort()
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def setShort(self, short):
self.fw_ipset.setShort(short)
# description
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def getDescription(self):
return self.fw_ipset.getDescription()
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def setDescription(self, description):
self.fw_ipset.setDescription(description)
# entry
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def getEntries(self):
return self.fw_ipset.getEntries()
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def setEntries(self, entries):
self.fw_ipset.setEntries(entries)
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def addEntry(self, entry):
self.fw_ipset.addEntry(entry)
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def removeEntry(self, entry):
self.fw_ipset.removeEntry(entry)
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def queryEntry(self, entry):
return self.fw_ipset.queryEntry(entry)
# helper config settings
class FirewallClientHelperSettings(object):
@handle_exceptions
def __init__(self, settings=None):
if settings:
self.settings = settings
else:
self.settings = ["", "", "", "", "", [ ]]
@handle_exceptions
def __repr__(self):
return '%s(%r)' % (self.__class__, self.settings)
@handle_exceptions
def getVersion(self):
return self.settings[0]
@handle_exceptions
def setVersion(self, version):
self.settings[0] = version
@handle_exceptions
def getShort(self):
return self.settings[1]
@handle_exceptions
def setShort(self, short):
self.settings[1] = short
@handle_exceptions
def getDescription(self):
return self.settings[2]
@handle_exceptions
def setDescription(self, description):
self.settings[2] = description
@handle_exceptions
def getFamily(self):
return self.settings[3]
@handle_exceptions
def setFamily(self, ipv):
if ipv is None:
self.settings[3] = ""
self.settings[3] = ipv
@handle_exceptions
def getModule(self):
return self.settings[4]
@handle_exceptions
def setModule(self, module):
self.settings[4] = module
@handle_exceptions
def getPorts(self):
return self.settings[5]
@handle_exceptions
def setPorts(self, ports):
self.settings[5] = ports
@handle_exceptions
def addPort(self, port, protocol):
if (port,protocol) not in self.settings[5]:
self.settings[5].append((port,protocol))
else:
raise FirewallError(errors.ALREADY_ENABLED,
"'%s:%s'" % (port, protocol))
@handle_exceptions
def removePort(self, port, protocol):
if (port,protocol) in self.settings[5]:
self.settings[5].remove((port,protocol))
else:
raise FirewallError(errors.NOT_ENABLED,
"'%s:%s'" % (port, protocol))
@handle_exceptions
def queryPort(self, port, protocol):
return (port,protocol) in self.settings[5]
# helper config
class FirewallClientConfigHelper(object):
@handle_exceptions
def __init__(self, bus, path):
self.bus = bus
self.path = path
self.dbus_obj = self.bus.get_object(config.dbus.DBUS_INTERFACE, path)
self.fw_helper = dbus.Interface(
self.dbus_obj,
dbus_interface=config.dbus.DBUS_INTERFACE_CONFIG_HELPER)
self.fw_properties = dbus.Interface(
self.dbus_obj, dbus_interface='org.freedesktop.DBus.Properties')
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def get_property(self, prop):
return dbus_to_python(self.fw_properties.Get(
config.dbus.DBUS_INTERFACE_CONFIG_HELPER, prop))
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def get_properties(self):
return dbus_to_python(self.fw_properties.GetAll(
config.dbus.DBUS_INTERFACE_CONFIG_HELPER))
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def set_property(self, prop, value):
self.fw_properties.Set(config.dbus.DBUS_INTERFACE_CONFIG_HELPER,
prop, value)
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def getSettings(self):
return FirewallClientHelperSettings(list(dbus_to_python(\
self.fw_helper.getSettings())))
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def update(self, settings):
self.fw_helper.update(tuple(settings.settings))
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def loadDefaults(self):
self.fw_helper.loadDefaults()
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def remove(self):
self.fw_helper.remove()
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def rename(self, name):
self.fw_helper.rename(name)
# version
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def getVersion(self):
return self.fw_helper.getVersion()
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def setVersion(self, version):
self.fw_helper.setVersion(version)
# short
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def getShort(self):
return self.fw_helper.getShort()
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def setShort(self, short):
self.fw_helper.setShort(short)
# description
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def getDescription(self):
return self.fw_helper.getDescription()
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def setDescription(self, description):
self.fw_helper.setDescription(description)
# port
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def getPorts(self):
return self.fw_helper.getPorts()
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def setPorts(self, ports):
self.fw_helper.setPorts(ports)
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def addPort(self, port, protocol):
self.fw_helper.addPort(port, protocol)
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def removePort(self, port, protocol):
self.fw_helper.removePort(port, protocol)
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def queryPort(self, port, protocol):
return self.fw_helper.queryPort(port, protocol)
# family
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def getFamily(self):
return self.fw_helper.getFamily()
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def setFamily(self, ipv):
if ipv is None:
self.fw_helper.setFamily("")
self.fw_helper.setFamily(ipv)
# module
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def getModule(self):
return self.fw_helper.getModule()
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def setModule(self, module):
self.fw_helper.setModule(module)
# service config
class FirewallClientConfigService(object):
@handle_exceptions
def __init__(self, bus, path):
self.bus = bus
self.path = path
self.dbus_obj = self.bus.get_object(config.dbus.DBUS_INTERFACE, path)
self.fw_service = dbus.Interface(
self.dbus_obj,
dbus_interface=config.dbus.DBUS_INTERFACE_CONFIG_SERVICE)
self.fw_properties = dbus.Interface(
self.dbus_obj, dbus_interface='org.freedesktop.DBus.Properties')
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def get_property(self, prop):
return dbus_to_python(self.fw_properties.Get(
config.dbus.DBUS_INTERFACE_CONFIG_SERVICE, prop))
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def get_properties(self):
return dbus_to_python(self.fw_properties.GetAll(
config.dbus.DBUS_INTERFACE_CONFIG_SERVICE))
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def set_property(self, prop, value):
self.fw_properties.Set(config.dbus.DBUS_INTERFACE_CONFIG_SERVICE,
prop, value)
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def getSettings(self):
return FirewallClientServiceSettings(list(dbus_to_python(\
self.fw_service.getSettings())))
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def update(self, settings):
self.fw_service.update(tuple(settings.settings))
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def loadDefaults(self):
self.fw_service.loadDefaults()
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def remove(self):
self.fw_service.remove()
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def rename(self, name):
self.fw_service.rename(name)
# version
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def getVersion(self):
return self.fw_service.getVersion()
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def setVersion(self, version):
self.fw_service.setVersion(version)
# short
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def getShort(self):
return self.fw_service.getShort()
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def setShort(self, short):
self.fw_service.setShort(short)
# description
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def getDescription(self):
return self.fw_service.getDescription()
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def setDescription(self, description):
self.fw_service.setDescription(description)
# port
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def getPorts(self):
return self.fw_service.getPorts()
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def setPorts(self, ports):
self.fw_service.setPorts(ports)
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def addPort(self, port, protocol):
self.fw_service.addPort(port, protocol)
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def removePort(self, port, protocol):
self.fw_service.removePort(port, protocol)
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def queryPort(self, port, protocol):
return self.fw_service.queryPort(port, protocol)
# protocol
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def getProtocols(self):
return self.fw_service.getProtocols()
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def setProtocols(self, protocols):
self.fw_service.setProtocols(protocols)
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def addProtocol(self, protocol):
self.fw_service.addProtocol(protocol)
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def removeProtocol(self, protocol):
self.fw_service.removeProtocol(protocol)
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def queryProtocol(self, protocol):
return self.fw_service.queryProtocol(protocol)
# source-port
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def getSourcePorts(self):
return self.fw_service.getSourcePorts()
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def setSourcePorts(self, ports):
self.fw_service.setSourcePorts(ports)
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def addSourcePort(self, port, protocol):
self.fw_service.addSourcePort(port, protocol)
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def removeSourcePort(self, port, protocol):
self.fw_service.removeSourcePort(port, protocol)
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def querySourcePort(self, port, protocol):
return self.fw_service.querySourcePort(port, protocol)
# module
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def getModules(self):
return self.fw_service.getModules()
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def setModules(self, modules):
self.fw_service.setModules(modules)
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def addModule(self, module):
self.fw_service.addModule(module)
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def removeModule(self, module):
self.fw_service.removeModule(module)
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def queryModule(self, module):
return self.fw_service.queryModule(module)
# destination
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def getDestinations(self):
return self.fw_service.getDestinations()
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def setDestinations(self, destinations):
self.fw_service.setDestinations(destinations)
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def getDestination(self, destination):
return self.fw_service.getDestination(destination)
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def setDestination(self, destination, address):
self.fw_service.setDestination(destination, address)
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def removeDestination(self, destination, address=None):
if address is not None and self.getDestination(destination) != address:
raise FirewallError(errors.NOT_ENABLED, "'%s:%s'" % \
(destination, address))
self.fw_service.removeDestination(destination)
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def queryDestination(self, destination, address):
return self.fw_service.queryDestination(destination, address)
# icmptype config settings
class FirewallClientIcmpTypeSettings(object):
@handle_exceptions
def __init__(self, settings=None):
if settings:
self.settings = settings
else:
self.settings = ["", "", "", []]
@handle_exceptions
def __repr__(self):
return '%s(%r)' % (self.__class__, self.settings)
@handle_exceptions
def getVersion(self):
return self.settings[0]
@handle_exceptions
def setVersion(self, version):
self.settings[0] = version
@handle_exceptions
def getShort(self):
return self.settings[1]
@handle_exceptions
def setShort(self, short):
self.settings[1] = short
@handle_exceptions
def getDescription(self):
return self.settings[2]
@handle_exceptions
def setDescription(self, description):
self.settings[2] = description
@handle_exceptions
def getDestinations(self):
return self.settings[3]
@handle_exceptions
def setDestinations(self, destinations):
self.settings[3] = destinations
@handle_exceptions
def addDestination(self, destination):
# empty means all
if not self.settings[3]:
raise FirewallError(errors.ALREADY_ENABLED, destination)
elif destination not in self.settings[3]:
self.settings[3].append(destination)
else:
raise FirewallError(errors.ALREADY_ENABLED, destination)
@handle_exceptions
def removeDestination(self, destination):
if destination in self.settings[3]:
self.settings[3].remove(destination)
# empty means all
elif not self.settings[3]:
self.setDestinations(list(set(['ipv4','ipv6']) - \
set([destination])))
else:
raise FirewallError(errors.NOT_ENABLED, destination)
@handle_exceptions
def queryDestination(self, destination):
# empty means all
return not self.settings[3] or \
destination in self.settings[3]
# icmptype config
class FirewallClientConfigIcmpType(object):
@handle_exceptions
def __init__(self, bus, path):
self.bus = bus
self.path = path
self.dbus_obj = self.bus.get_object(config.dbus.DBUS_INTERFACE, path)
self.fw_icmptype = dbus.Interface(
self.dbus_obj,
dbus_interface=config.dbus.DBUS_INTERFACE_CONFIG_ICMPTYPE)
self.fw_properties = dbus.Interface(
self.dbus_obj, dbus_interface='org.freedesktop.DBus.Properties')
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def get_property(self, prop):
return dbus_to_python(self.fw_properties.Get(
config.dbus.DBUS_INTERFACE_CONFIG_ICMPTYPE, prop))
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def get_properties(self):
return dbus_to_python(self.fw_properties.GetAll(
config.dbus.DBUS_INTERFACE_CONFIG_ICMPTYPE))
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def set_property(self, prop, value):
self.fw_properties.Set(config.dbus.DBUS_INTERFACE_CONFIG_ICMPTYPE,
prop, value)
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def getSettings(self):
return FirewallClientIcmpTypeSettings(list(dbus_to_python(\
self.fw_icmptype.getSettings())))
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def update(self, settings):
self.fw_icmptype.update(tuple(settings.settings))
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def loadDefaults(self):
self.fw_icmptype.loadDefaults()
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def remove(self):
self.fw_icmptype.remove()
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def rename(self, name):
self.fw_icmptype.rename(name)
# version
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def getVersion(self):
return self.fw_icmptype.getVersion()
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def setVersion(self, version):
self.fw_icmptype.setVersion(version)
# short
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def getShort(self):
return self.fw_icmptype.getShort()
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def setShort(self, short):
self.fw_icmptype.setShort(short)
# description
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def getDescription(self):
return self.fw_icmptype.getDescription()
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def setDescription(self, description):
self.fw_icmptype.setDescription(description)
# destination
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def getDestinations(self):
return self.fw_icmptype.getDestinations()
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def setDestinations(self, destinations):
self.fw_icmptype.setDestinations(destinations)
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def addDestination(self, destination):
self.fw_icmptype.addDestination(destination)
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def removeDestination(self, destination):
self.fw_icmptype.removeDestination(destination)
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def queryDestination(self, destination):
return self.fw_icmptype.queryDestination(destination)
# config.policies lockdown whitelist
class FirewallClientPoliciesLockdownWhitelist(object):
@handle_exceptions
def __init__(self, settings=None):
if settings:
self.settings = settings
else:
self.settings = [ [], [], [], [] ]
@handle_exceptions
def __repr__(self):
return '%s(%r)' % (self.__class__, self.settings)
@handle_exceptions
def getCommands(self):
return self.settings[0]
@handle_exceptions
def setCommands(self, commands):
self.settings[0] = commands
@handle_exceptions
def addCommand(self, command):
if command not in self.settings[0]:
self.settings[0].append(command)
@handle_exceptions
def removeCommand(self, command):
if command in self.settings[0]:
self.settings[0].remove(command)
@handle_exceptions
def queryCommand(self, command):
return command in self.settings[0]
@handle_exceptions
def getContexts(self):
return self.settings[1]
@handle_exceptions
def setContexts(self, contexts):
self.settings[1] = contexts
@handle_exceptions
def addContext(self, context):
if context not in self.settings[1]:
self.settings[1].append(context)
@handle_exceptions
def removeContext(self, context):
if context in self.settings[1]:
self.settings[1].remove(context)
@handle_exceptions
def queryContext(self, context):
return context in self.settings[1]
@handle_exceptions
def getUsers(self):
return self.settings[2]
@handle_exceptions
def setUsers(self, users):
self.settings[2] = users
@handle_exceptions
def addUser(self, user):
if user not in self.settings[2]:
self.settings[2].append(user)
@handle_exceptions
def removeUser(self, user):
if user in self.settings[2]:
self.settings[2].remove(user)
@handle_exceptions
def queryUser(self, user):
return user in self.settings[2]
@handle_exceptions
def getUids(self):
return self.settings[3]
@handle_exceptions
def setUids(self, uids):
self.settings[3] = uids
@handle_exceptions
def addUid(self, uid):
if uid not in self.settings[3]:
self.settings[3].append(uid)
@handle_exceptions
def removeUid(self, uid):
if uid in self.settings[3]:
self.settings[3].remove(uid)
@handle_exceptions
def queryUid(self, uid):
return uid in self.settings[3]
# config.policies
class FirewallClientConfigPolicies(object):
@handle_exceptions
def __init__(self, bus):
self.bus = bus
self.dbus_obj = self.bus.get_object(config.dbus.DBUS_INTERFACE,
config.dbus.DBUS_PATH_CONFIG)
self.fw_policies = dbus.Interface(
self.dbus_obj,
dbus_interface=config.dbus.DBUS_INTERFACE_CONFIG_POLICIES)
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def getLockdownWhitelist(self):
return FirewallClientPoliciesLockdownWhitelist( \
list(dbus_to_python(self.fw_policies.getLockdownWhitelist())))
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def setLockdownWhitelist(self, settings):
self.fw_policies.setLockdownWhitelist(tuple(settings.settings))
# command
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def addLockdownWhitelistCommand(self, command):
self.fw_policies.addLockdownWhitelistCommand(command)
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def removeLockdownWhitelistCommand(self, command):
self.fw_policies.removeLockdownWhitelistCommand(command)
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def queryLockdownWhitelistCommand(self, command):
return dbus_to_python(self.fw_policies.queryLockdownWhitelistCommand(command))
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def getLockdownWhitelistCommands(self):
return dbus_to_python(self.fw_policies.getLockdownWhitelistCommands())
# context
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def addLockdownWhitelistContext(self, context):
self.fw_policies.addLockdownWhitelistContext(context)
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def removeLockdownWhitelistContext(self, context):
self.fw_policies.removeLockdownWhitelistContext(context)
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def queryLockdownWhitelistContext(self, context):
return dbus_to_python(self.fw_policies.queryLockdownWhitelistContext(context))
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def getLockdownWhitelistContexts(self):
return dbus_to_python(self.fw_policies.getLockdownWhitelistContexts())
# user
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def addLockdownWhitelistUser(self, user):
self.fw_policies.addLockdownWhitelistUser(user)
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def removeLockdownWhitelistUser(self, user):
self.fw_policies.removeLockdownWhitelistUser(user)
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def queryLockdownWhitelistUser(self, user):
return dbus_to_python(self.fw_policies.queryLockdownWhitelistUser(user))
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def getLockdownWhitelistUsers(self):
return dbus_to_python(self.fw_policies.getLockdownWhitelistUsers())
# uid
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def getLockdownWhitelistUids(self):
return dbus_to_python(self.fw_policies.getLockdownWhitelistUids())
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def setLockdownWhitelistUids(self, uids):
self.fw_policies.setLockdownWhitelistUids(uids)
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def addLockdownWhitelistUid(self, uid):
self.fw_policies.addLockdownWhitelistUid(uid)
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def removeLockdownWhitelistUid(self, uid):
self.fw_policies.removeLockdownWhitelistUid(uid)
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def queryLockdownWhitelistUid(self, uid):
return dbus_to_python(self.fw_policies.queryLockdownWhitelistUid(uid))
# config.direct
class FirewallClientDirect(object):
@handle_exceptions
def __init__(self, settings=None):
if settings:
self.settings = settings
else:
self.settings = [ [], [], [], ]
@handle_exceptions
def __repr__(self):
return '%s(%r)' % (self.__class__, self.settings)
@handle_exceptions
def getAllChains(self):
return self.settings[0]
@handle_exceptions
def getChains(self, ipv, table):
return [ entry[2] for entry in self.settings[0] \
if entry[0] == ipv and entry[1] == table ]
@handle_exceptions
def setAllChains(self, chains):
self.settings[0] = chains
@handle_exceptions
def addChain(self, ipv, table, chain):
idx = (ipv, table, chain)
if idx not in self.settings[0]:
self.settings[0].append(idx)
@handle_exceptions
def removeChain(self, ipv, table, chain):
idx = (ipv, table, chain)
if idx in self.settings[0]:
self.settings[0].remove(idx)
@handle_exceptions
def queryChain(self, ipv, table, chain):
idx = (ipv, table, chain)
return idx in self.settings[0]
@handle_exceptions
def getAllRules(self):
return self.settings[1]
@handle_exceptions
def getRules(self, ipv, table, chain):
return [ entry[3:] for entry in self.settings[1] \
if entry[0] == ipv and entry[1] == table \
and entry[2] == chain ]
@handle_exceptions
def setAllRules(self, rules):
self.settings[1] = rules
@handle_exceptions
def addRule(self, ipv, table, chain, priority, args):
idx = (ipv, table, chain, priority, args)
if idx not in self.settings[1]:
self.settings[1].append(idx)
@handle_exceptions
def removeRule(self, ipv, table, chain, priority, args):
idx = (ipv, table, chain, priority, args)
if idx in self.settings[1]:
self.settings[1].remove(idx)
@handle_exceptions
def removeRules(self, ipv, table, chain):
for idx in list(self.settings[1]):
if idx[0] == ipv and idx[1] == table and idx[2] == chain:
self.settings[1].remove(idx)
@handle_exceptions
def queryRule(self, ipv, table, chain, priority, args):
idx = (ipv, table, chain, priority, args)
return idx in self.settings[1]
@handle_exceptions
def getAllPassthroughs(self):
return self.settings[2]
@handle_exceptions
def setAllPassthroughs(self, passthroughs):
self.settings[2] = passthroughs
@handle_exceptions
def removeAllPassthroughs(self):
self.settings[2] = []
@handle_exceptions
def getPassthroughs(self, ipv):
return [ entry[1] for entry in self.settings[2] \
if entry[0] == ipv ]
@handle_exceptions
def addPassthrough(self, ipv, args):
idx = (ipv, args)
if idx not in self.settings[2]:
self.settings[2].append(idx)
@handle_exceptions
def removePassthrough(self, ipv, args):
idx = (ipv, args)
if idx in self.settings[2]:
self.settings[2].remove(idx)
@handle_exceptions
def queryPassthrough(self, ipv, args):
idx = (ipv, args)
return idx in self.settings[2]
# config.direct
class FirewallClientConfigDirect(object):
@handle_exceptions
def __init__(self, bus):
self.bus = bus
self.dbus_obj = self.bus.get_object(config.dbus.DBUS_INTERFACE,
config.dbus.DBUS_PATH_CONFIG)
self.fw_direct = dbus.Interface(
self.dbus_obj,
dbus_interface=config.dbus.DBUS_INTERFACE_CONFIG_DIRECT)
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def getSettings(self):
return FirewallClientDirect( \
list(dbus_to_python(self.fw_direct.getSettings())))
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def update(self, settings):
self.fw_direct.update(tuple(settings.settings))
# direct chain
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def addChain(self, ipv, table, chain):
self.fw_direct.addChain(ipv, table, chain)
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def removeChain(self, ipv, table, chain):
self.fw_direct.removeChain(ipv, table, chain)
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def queryChain(self, ipv, table, chain):
return dbus_to_python(self.fw_direct.queryChain(ipv, table, chain))
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def getChains(self, ipv, table):
return dbus_to_python(self.fw_direct.getChains(ipv, table))
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def getAllChains(self):
return dbus_to_python(self.fw_direct.getAllChains())
# direct rule
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def addRule(self, ipv, table, chain, priority, args):
self.fw_direct.addRule(ipv, table, chain, priority, args)
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def removeRule(self, ipv, table, chain, priority, args):
self.fw_direct.removeRule(ipv, table, chain, priority, args)
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def removeRules(self, ipv, table, chain):
self.fw_direct.removeRules(ipv, table, chain)
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def queryRule(self, ipv, table, chain, priority, args):
return dbus_to_python(self.fw_direct.queryRule(ipv, table, chain, priority, args))
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def getRules(self, ipv, table, chain):
return dbus_to_python(self.fw_direct.getRules(ipv, table, chain))
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def getAllRules(self):
return dbus_to_python(self.fw_direct.getAllRules())
# tracked passthrough
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def addPassthrough(self, ipv, args):
self.fw_direct.addPassthrough(ipv, args)
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def removePassthrough(self, ipv, args):
self.fw_direct.removePassthrough(ipv, args)
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def queryPassthrough(self, ipv, args):
return dbus_to_python(self.fw_direct.queryPassthrough(ipv, args))
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def getPassthroughs(self, ipv):
return dbus_to_python(self.fw_direct.getPassthroughs(ipv))
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def getAllPassthroughs(self):
return dbus_to_python(self.fw_direct.getAllPassthroughs())
# config
class FirewallClientConfig(object):
@handle_exceptions
def __init__(self, bus):
self.bus = bus
self.dbus_obj = self.bus.get_object(config.dbus.DBUS_INTERFACE,
config.dbus.DBUS_PATH_CONFIG)
self.fw_config = dbus.Interface(
self.dbus_obj,
dbus_interface=config.dbus.DBUS_INTERFACE_CONFIG)
self.fw_properties = dbus.Interface(
self.dbus_obj, dbus_interface='org.freedesktop.DBus.Properties')
self._policies = FirewallClientConfigPolicies(self.bus)
self._direct = FirewallClientConfigDirect(self.bus)
# properties
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def get_property(self, prop):
return dbus_to_python(self.fw_properties.Get(
config.dbus.DBUS_INTERFACE_CONFIG, prop))
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def get_properties(self):
return dbus_to_python(self.fw_properties.GetAll(
config.dbus.DBUS_INTERFACE_CONFIG))
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def set_property(self, prop, value):
self.fw_properties.Set(config.dbus.DBUS_INTERFACE_CONFIG, prop, value)
# ipset
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def getIPSetNames(self):
return dbus_to_python(self.fw_config.getIPSetNames())
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def listIPSets(self):
return dbus_to_python(self.fw_config.listIPSets())
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def getIPSet(self, path):
return FirewallClientConfigIPSet(self.bus, path)
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def getIPSetByName(self, name):
path = dbus_to_python(self.fw_config.getIPSetByName(name))
return FirewallClientConfigIPSet(self.bus, path)
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def addIPSet(self, name, settings):
if isinstance(settings, FirewallClientIPSetSettings):
path = self.fw_config.addIPSet(name, tuple(settings.settings))
else:
path = self.fw_config.addIPSet(name, tuple(settings))
return FirewallClientConfigIPSet(self.bus, path)
# zone
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def getZoneNames(self):
return dbus_to_python(self.fw_config.getZoneNames())
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def listZones(self):
return dbus_to_python(self.fw_config.listZones())
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def getZone(self, path):
return FirewallClientConfigZone(self.bus, path)
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def getZoneByName(self, name):
path = dbus_to_python(self.fw_config.getZoneByName(name))
return FirewallClientConfigZone(self.bus, path)
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def getZoneOfInterface(self, iface):
return dbus_to_python(self.fw_config.getZoneOfInterface(iface))
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def getZoneOfSource(self, source):
return dbus_to_python(self.fw_config.getZoneOfSource(source))
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def addZone(self, name, settings):
if isinstance(settings, FirewallClientZoneSettings):
path = self.fw_config.addZone(name, tuple(settings.settings))
else:
path = self.fw_config.addZone(name, tuple(settings))
return FirewallClientConfigZone(self.bus, path)
# service
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def getServiceNames(self):
return dbus_to_python(self.fw_config.getServiceNames())
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def listServices(self):
return dbus_to_python(self.fw_config.listServices())
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def getService(self, path):
return FirewallClientConfigService(self.bus, path)
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def getServiceByName(self, name):
path = dbus_to_python(self.fw_config.getServiceByName(name))
return FirewallClientConfigService(self.bus, path)
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def addService(self, name, settings):
if isinstance(settings, FirewallClientServiceSettings):
path = self.fw_config.addService(name, tuple(settings.settings))
else:
path = self.fw_config.addService(name, tuple(settings))
return FirewallClientConfigService(self.bus, path)
# icmptype
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def getIcmpTypeNames(self):
return dbus_to_python(self.fw_config.getIcmpTypeNames())
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def listIcmpTypes(self):
return dbus_to_python(self.fw_config.listIcmpTypes())
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def getIcmpType(self, path):
return FirewallClientConfigIcmpType(self.bus, path)
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def getIcmpTypeByName(self, name):
path = dbus_to_python(self.fw_config.getIcmpTypeByName(name))
return FirewallClientConfigIcmpType(self.bus, path)
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def addIcmpType(self, name, settings):
if isinstance(settings, FirewallClientIcmpTypeSettings):
path = self.fw_config.addIcmpType(name, tuple(settings.settings))
else:
path = self.fw_config.addIcmpType(name, tuple(settings))
return FirewallClientConfigIcmpType(self.bus, path)
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def policies(self):
return self._policies
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def direct(self):
return self._direct
# helper
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def getHelperNames(self):
return dbus_to_python(self.fw_config.getHelperNames())
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def listHelpers(self):
return dbus_to_python(self.fw_config.listHelpers())
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def getHelper(self, path):
return FirewallClientConfigHelper(self.bus, path)
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def getHelperByName(self, name):
path = dbus_to_python(self.fw_config.getHelperByName(name))
return FirewallClientConfigHelper(self.bus, path)
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def addHelper(self, name, settings):
if isinstance(settings, FirewallClientHelperSettings):
path = self.fw_config.addHelper(name, tuple(settings.settings))
else:
path = self.fw_config.addHelper(name, tuple(settings))
return FirewallClientConfigHelper(self.bus, path)
#
class FirewallClient(object):
@handle_exceptions
def __init__(self, bus=None, wait=0, quiet=True):
if not bus:
dbus.mainloop.glib.DBusGMainLoop(set_as_default=True)
try:
self.bus = slip.dbus.SystemBus()
self.bus.default_timeout = None
except Exception:
try:
self.bus = dbus.SystemBus()
except dbus.exceptions.DBusException as e:
raise FirewallError(errors.DBUS_ERROR,
e.get_dbus_message())
else:
print("Not using slip.dbus")
else:
self.bus = bus
self.bus.add_signal_receiver(
handler_function=self._dbus_connection_changed,
signal_name="NameOwnerChanged",
dbus_interface="org.freedesktop.DBus",
arg0=config.dbus.DBUS_INTERFACE)
for interface in [ config.dbus.DBUS_INTERFACE,
config.dbus.DBUS_INTERFACE_IPSET,
config.dbus.DBUS_INTERFACE_ZONE,
config.dbus.DBUS_INTERFACE_DIRECT,
config.dbus.DBUS_INTERFACE_POLICIES,
config.dbus.DBUS_INTERFACE_CONFIG,
config.dbus.DBUS_INTERFACE_CONFIG_IPSET,
config.dbus.DBUS_INTERFACE_CONFIG_ZONE,
config.dbus.DBUS_INTERFACE_CONFIG_SERVICE,
config.dbus.DBUS_INTERFACE_CONFIG_HELPER,
config.dbus.DBUS_INTERFACE_CONFIG_DIRECT,
config.dbus.DBUS_INTERFACE_CONFIG_ICMPTYPE,
config.dbus.DBUS_INTERFACE_CONFIG_POLICIES ]:
self.bus.add_signal_receiver(self._signal_receiver,
dbus_interface=interface,
interface_keyword='interface',
member_keyword='member',
path_keyword='path')
# callbacks
self._callback = { }
self._callbacks = {
# client callbacks
"connection-changed": "connection-changed",
"connection-established": "connection-established",
"connection-lost": "connection-lost",
# firewalld callbacks
"log-denied-changed": "LogDeniedChanged",
"default-zone-changed": "DefaultZoneChanged",
"panic-mode-enabled": "PanicModeEnabled",
"panic-mode-disabled": "PanicModeDisabled",
"reloaded": "Reloaded",
"service-added": "ServiceAdded",
"service-removed": "ServiceRemoved",
"port-added": "PortAdded",
"port-removed": "PortRemoved",
"source-port-added": "SourcePortAdded",
"source-port-removed": "SourcePortRemoved",
"protocol-added": "ProtocolAdded",
"protocol-removed": "ProtocolRemoved",
"masquerade-added": "MasqueradeAdded",
"masquerade-removed": "MasqueradeRemoved",
"forward-port-added": "ForwardPortAdded",
"forward-port-removed": "ForwardPortRemoved",
"icmp-block-added": "IcmpBlockAdded",
"icmp-block-removed": "IcmpBlockRemoved",
"icmp-block-inversion-added": "IcmpBlockInversionAdded",
"icmp-block-inversion-removed": "IcmpBlockInversionRemoved",
"richrule-added": "RichRuleAdded",
"richrule-removed": "RichRuleRemoved",
"interface-added": "InterfaceAdded",
"interface-removed": "InterfaceRemoved",
"zone-changed": "ZoneOfInterfaceChanged", # DEPRECATED, use zone-of-interface-changed instead
"zone-of-interface-changed": "ZoneOfInterfaceChanged",
"source-added": "SourceAdded",
"source-removed": "SourceRemoved",
"zone-of-source-changed": "ZoneOfSourceChanged",
# ipset callbacks
"ipset-entry-added": "EntryAdded",
"ipset-entry-removed": "EntryRemoved",
# direct callbacks
"direct:chain-added": "ChainAdded",
"direct:chain-removed": "ChainRemoved",
"direct:rule-added": "RuleAdded",
"direct:rule-removed": "RuleRemoved",
"direct:passthrough-added": "PassthroughAdded",
"direct:passthrough-removed": "PassthroughRemoved",
"config:direct:updated": "config:direct:Updated",
# policy callbacks
"lockdown-enabled": "LockdownEnabled",
"lockdown-disabled": "LockdownDisabled",
"lockdown-whitelist-command-added": "LockdownWhitelistCommandAdded",
"lockdown-whitelist-command-removed": "LockdownWhitelistCommandRemoved",
"lockdown-whitelist-context-added": "LockdownWhitelistContextAdded",
"lockdown-whitelist-context-removed": "LockdownWhitelistContextRemoved",
"lockdown-whitelist-uid-added": "LockdownWhitelistUidAdded",
"lockdown-whitelist-uid-removed": "LockdownWhitelistUidRemoved",
"lockdown-whitelist-user-added": "LockdownWhitelistUserAdded",
"lockdown-whitelist-user-removed": "LockdownWhitelistUserRemoved",
# firewalld.config callbacks
"config:policies:lockdown-whitelist-updated": "config:policies:LockdownWhitelistUpdated",
"config:ipset-added": "config:IPSetAdded",
"config:ipset-updated": "config:IPSetUpdated",
"config:ipset-removed": "config:IPSetRemoved",
"config:ipset-renamed": "config:IPSetRenamed",
"config:zone-added": "config:ZoneAdded",
"config:zone-updated": "config:ZoneUpdated",
"config:zone-removed": "config:ZoneRemoved",
"config:zone-renamed": "config:ZoneRenamed",
"config:service-added": "config:ServiceAdded",
"config:service-updated": "config:ServiceUpdated",
"config:service-removed": "config:ServiceRemoved",
"config:service-renamed": "config:ServiceRenamed",
"config:icmptype-added": "config:IcmpTypeAdded",
"config:icmptype-updated": "config:IcmpTypeUpdated",
"config:icmptype-removed": "config:IcmpTypeRemoved",
"config:icmptype-renamed": "config:IcmpTypeRenamed",
"config:helper-added": "config:HelperAdded",
"config:helper-updated": "config:HelperUpdated",
"config:helper-removed": "config:HelperRemoved",
"config:helper-renamed": "config:HelperRenamed",
}
# initialize variables used for connection
self._init_vars()
self.quiet = quiet
if wait > 0:
# connect in one second
GLib.timeout_add_seconds(wait, self._connection_established)
else:
self._connection_established()
@handle_exceptions
def _init_vars(self):
self.fw = None
self.fw_ipset = None
self.fw_zone = None
self.fw_helper = None
self.fw_direct = None
self.fw_properties = None
self._config = None
self.connected = False
@handle_exceptions
def getExceptionHandler(self):
return exception_handler
@handle_exceptions
def setExceptionHandler(self, handler):
global exception_handler
exception_handler = handler
@handle_exceptions
def getNotAuthorizedLoop(self):
return not_authorized_loop
@handle_exceptions
def setNotAuthorizedLoop(self, enable):
global not_authorized_loop
not_authorized_loop = enable
@handle_exceptions
def connect(self, name, callback, *args):
if name in self._callbacks:
self._callback[self._callbacks[name]] = (callback, args)
else:
raise ValueError("Unknown callback name '%s'" % name)
@handle_exceptions
def _dbus_connection_changed(self, name, old_owner, new_owner):
if name != config.dbus.DBUS_INTERFACE:
return
if new_owner:
# connection established
self._connection_established()
else:
# connection lost
self._connection_lost()
@handle_exceptions
def _connection_established(self):
try:
self.dbus_obj = self.bus.get_object(config.dbus.DBUS_INTERFACE,
config.dbus.DBUS_PATH)
self.fw = dbus.Interface(self.dbus_obj,
dbus_interface=config.dbus.DBUS_INTERFACE)
self.fw_ipset = dbus.Interface(
self.dbus_obj, dbus_interface=config.dbus.DBUS_INTERFACE_IPSET)
self.fw_zone = dbus.Interface(
self.dbus_obj,
dbus_interface=config.dbus.DBUS_INTERFACE_ZONE)
self.fw_direct = dbus.Interface(
self.dbus_obj, dbus_interface=config.dbus.DBUS_INTERFACE_DIRECT)
self.fw_policies = dbus.Interface(
self.dbus_obj,
dbus_interface=config.dbus.DBUS_INTERFACE_POLICIES)
self.fw_properties = dbus.Interface(
self.dbus_obj, dbus_interface='org.freedesktop.DBus.Properties')
except dbus.exceptions.DBusException as e:
# ignore dbus errors
if not self.quiet:
print ("DBusException", e.get_dbus_message())
return
except Exception as e:
if not self.quiet:
print ("Exception", e)
return
self._config = FirewallClientConfig(self.bus)
self.connected = True
self._signal_receiver(member="connection-established",
interface=config.dbus.DBUS_INTERFACE)
self._signal_receiver(member="connection-changed",
interface=config.dbus.DBUS_INTERFACE)
@handle_exceptions
def _connection_lost(self):
self._init_vars()
self._signal_receiver(member="connection-lost",
interface=config.dbus.DBUS_INTERFACE)
self._signal_receiver(member="connection-changed",
interface=config.dbus.DBUS_INTERFACE)
@handle_exceptions
def _signal_receiver(self, *args, **kwargs):
if "member" not in kwargs or "interface" not in kwargs:
return
signal = kwargs["member"]
interface = kwargs["interface"]
# config signals need special treatment
# pimp signal name
if interface.startswith(config.dbus.DBUS_INTERFACE_CONFIG_ZONE):
signal = "config:Zone" + signal
elif interface.startswith(config.dbus.DBUS_INTERFACE_CONFIG_IPSET):
signal = "config:IPSet" + signal
elif interface.startswith(config.dbus.DBUS_INTERFACE_CONFIG_SERVICE):
signal = "config:Service" + signal
elif interface.startswith(config.dbus.DBUS_INTERFACE_CONFIG_ICMPTYPE):
signal = "config:IcmpType" + signal
elif interface.startswith(config.dbus.DBUS_INTERFACE_CONFIG_HELPER):
signal = "config:Helper" + signal
elif interface == config.dbus.DBUS_INTERFACE_CONFIG:
signal = "config:" + signal
elif interface == config.dbus.DBUS_INTERFACE_CONFIG_POLICIES:
signal = "config:policies:" + signal
elif interface == config.dbus.DBUS_INTERFACE_CONFIG_DIRECT:
signal = "config:direct:" + signal
cb = None
for callback in self._callbacks:
if self._callbacks[callback] == signal and \
self._callbacks[callback] in self._callback:
cb = self._callback[self._callbacks[callback]]
if cb is None:
return
# call back with args converted to python types ...
cb_args = [ dbus_to_python(arg) for arg in args ]
try:
if cb[1]:
# add call data
cb_args.extend(cb[1])
# call back
cb[0](*cb_args)
except Exception as msg:
print(msg)
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def config(self):
return self._config
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def reload(self):
self.fw.reload()
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def complete_reload(self):
self.fw.completeReload()
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def runtimeToPermanent(self):
self.fw.runtimeToPermanent()
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def checkPermanentConfig(self):
self.fw.checkPermanentConfig()
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def get_property(self, prop):
return dbus_to_python(self.fw_properties.Get(
config.dbus.DBUS_INTERFACE, prop))
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def get_properties(self):
return dbus_to_python(self.fw_properties.GetAll(
config.dbus.DBUS_INTERFACE))
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def set_property(self, prop, value):
self.fw_properties.Set(config.dbus.DBUS_INTERFACE, prop, value)
# panic mode
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def enablePanicMode(self):
self.fw.enablePanicMode()
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def disablePanicMode(self):
self.fw.disablePanicMode()
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def queryPanicMode(self):
return dbus_to_python(self.fw.queryPanicMode())
# list functions
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def getZoneSettings(self, zone):
return FirewallClientZoneSettings(list(dbus_to_python(\
self.fw.getZoneSettings(zone))))
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def getIPSets(self):
return dbus_to_python(self.fw_ipset.getIPSets())
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def getIPSetSettings(self, ipset):
return FirewallClientIPSetSettings(list(dbus_to_python(\
self.fw_ipset.getIPSetSettings(ipset))))
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def addEntry(self, ipset, entry):
self.fw_ipset.addEntry(ipset, entry)
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def getEntries(self, ipset):
return self.fw_ipset.getEntries(ipset)
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def setEntries(self, ipset, entries):
return self.fw_ipset.setEntries(ipset, entries)
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def removeEntry(self, ipset, entry):
self.fw_ipset.removeEntry(ipset, entry)
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def queryEntry(self, ipset, entry):
return dbus_to_python(self.fw_ipset.queryEntry(ipset, entry))
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def listServices(self):
return dbus_to_python(self.fw.listServices())
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def getServiceSettings(self, service):
return FirewallClientServiceSettings(list(dbus_to_python(\
self.fw.getServiceSettings(service))))
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def listIcmpTypes(self):
return dbus_to_python(self.fw.listIcmpTypes())
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def getIcmpTypeSettings(self, icmptype):
return FirewallClientIcmpTypeSettings(list(dbus_to_python(\
self.fw.getIcmpTypeSettings(icmptype))))
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def getHelpers(self):
return dbus_to_python(self.fw.getHelpers())
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def getHelperSettings(self, helper):
return FirewallClientHelperSettings(list(dbus_to_python(\
self.fw.getHelperSettings(helper))))
# automatic helper setting
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def getAutomaticHelpers(self):
return dbus_to_python(self.fw.getAutomaticHelpers())
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def setAutomaticHelpers(self, value):
self.fw.setAutomaticHelpers(value)
# log denied
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def getLogDenied(self):
return dbus_to_python(self.fw.getLogDenied())
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def setLogDenied(self, value):
self.fw.setLogDenied(value)
# default zone
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def getDefaultZone(self):
return dbus_to_python(self.fw.getDefaultZone())
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def setDefaultZone(self, zone):
self.fw.setDefaultZone(zone)
# zone
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def getZones(self):
return dbus_to_python(self.fw_zone.getZones())
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def getActiveZones(self):
return dbus_to_python(self.fw_zone.getActiveZones())
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def getZoneOfInterface(self, interface):
return dbus_to_python(self.fw_zone.getZoneOfInterface(interface))
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def getZoneOfSource(self, source):
return dbus_to_python(self.fw_zone.getZoneOfSource(source))
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def isImmutable(self, zone):
return dbus_to_python(self.fw_zone.isImmutable(zone))
# interfaces
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def addInterface(self, zone, interface):
return dbus_to_python(self.fw_zone.addInterface(zone, interface))
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def changeZone(self, zone, interface): # DEPRECATED
return dbus_to_python(self.fw_zone.changeZone(zone, interface))
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def changeZoneOfInterface(self, zone, interface):
return dbus_to_python(self.fw_zone.changeZoneOfInterface(zone,
interface))
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def getInterfaces(self, zone):
return dbus_to_python(self.fw_zone.getInterfaces(zone))
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def queryInterface(self, zone, interface):
return dbus_to_python(self.fw_zone.queryInterface(zone, interface))
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def removeInterface(self, zone, interface):
return dbus_to_python(self.fw_zone.removeInterface(zone, interface))
# sources
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def addSource(self, zone, source):
return dbus_to_python(self.fw_zone.addSource(zone, source))
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def changeZoneOfSource(self, zone, source):
return dbus_to_python(self.fw_zone.changeZoneOfSource(zone, source))
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def getSources(self, zone):
return dbus_to_python(self.fw_zone.getSources(zone))
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def querySource(self, zone, source):
return dbus_to_python(self.fw_zone.querySource(zone, source))
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def removeSource(self, zone, source):
return dbus_to_python(self.fw_zone.removeSource(zone, source))
# rich rules
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def addRichRule(self, zone, rule, timeout=0):
return dbus_to_python(self.fw_zone.addRichRule(zone, rule, timeout))
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def getRichRules(self, zone):
return dbus_to_python(self.fw_zone.getRichRules(zone))
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def queryRichRule(self, zone, rule):
return dbus_to_python(self.fw_zone.queryRichRule(zone, rule))
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def removeRichRule(self, zone, rule):
return dbus_to_python(self.fw_zone.removeRichRule(zone, rule))
# services
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def addService(self, zone, service, timeout=0):
return dbus_to_python(self.fw_zone.addService(zone, service, timeout))
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def getServices(self, zone):
return dbus_to_python(self.fw_zone.getServices(zone))
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def queryService(self, zone, service):
return dbus_to_python(self.fw_zone.queryService(zone, service))
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def removeService(self, zone, service):
return dbus_to_python(self.fw_zone.removeService(zone, service))
# ports
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def addPort(self, zone, port, protocol, timeout=0):
return dbus_to_python(self.fw_zone.addPort(zone, port, protocol, timeout))
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def getPorts(self, zone):
return dbus_to_python(self.fw_zone.getPorts(zone))
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def queryPort(self, zone, port, protocol):
return dbus_to_python(self.fw_zone.queryPort(zone, port, protocol))
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def removePort(self, zone, port, protocol):
return dbus_to_python(self.fw_zone.removePort(zone, port, protocol))
# protocols
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def addProtocol(self, zone, protocol, timeout=0):
return dbus_to_python(self.fw_zone.addProtocol(zone, protocol, timeout))
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def getProtocols(self, zone):
return dbus_to_python(self.fw_zone.getProtocols(zone))
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def queryProtocol(self, zone, protocol):
return dbus_to_python(self.fw_zone.queryProtocol(zone, protocol))
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def removeProtocol(self, zone, protocol):
return dbus_to_python(self.fw_zone.removeProtocol(zone, protocol))
# masquerade
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def addMasquerade(self, zone, timeout=0):
return dbus_to_python(self.fw_zone.addMasquerade(zone, timeout))
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def queryMasquerade(self, zone):
return dbus_to_python(self.fw_zone.queryMasquerade(zone))
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def removeMasquerade(self, zone):
return dbus_to_python(self.fw_zone.removeMasquerade(zone))
# forward ports
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def addForwardPort(self, zone, port, protocol, toport, toaddr,
timeout=0):
if toport is None:
toport = ""
if toaddr is None:
toaddr = ""
return dbus_to_python(self.fw_zone.addForwardPort(zone, port, protocol,
toport, toaddr,
timeout))
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def getForwardPorts(self, zone):
return dbus_to_python(self.fw_zone.getForwardPorts(zone))
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def queryForwardPort(self, zone, port, protocol, toport, toaddr):
if toport is None:
toport = ""
if toaddr is None:
toaddr = ""
return dbus_to_python(self.fw_zone.queryForwardPort(zone,
port, protocol,
toport, toaddr))
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def removeForwardPort(self, zone, port, protocol, toport, toaddr):
if toport is None:
toport = ""
if toaddr is None:
toaddr = ""
return dbus_to_python(self.fw_zone.removeForwardPort(zone,
port, protocol,
toport, toaddr))
# source ports
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def addSourcePort(self, zone, port, protocol, timeout=0):
return dbus_to_python(self.fw_zone.addSourcePort(zone, port, protocol,
timeout))
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def getSourcePorts(self, zone):
return dbus_to_python(self.fw_zone.getSourcePorts(zone))
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def querySourcePort(self, zone, port, protocol):
return dbus_to_python(self.fw_zone.querySourcePort(zone, port, protocol))
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def removeSourcePort(self, zone, port, protocol):
return dbus_to_python(self.fw_zone.removeSourcePort(zone, port,
protocol))
# icmpblock
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def addIcmpBlock(self, zone, icmp, timeout=0):
return dbus_to_python(self.fw_zone.addIcmpBlock(zone, icmp, timeout))
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def getIcmpBlocks(self, zone):
return dbus_to_python(self.fw_zone.getIcmpBlocks(zone))
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def queryIcmpBlock(self, zone, icmp):
return dbus_to_python(self.fw_zone.queryIcmpBlock(zone, icmp))
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def removeIcmpBlock(self, zone, icmp):
return dbus_to_python(self.fw_zone.removeIcmpBlock(zone, icmp))
# icmp block inversion
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def addIcmpBlockInversion(self, zone):
return dbus_to_python(self.fw_zone.addIcmpBlockInversion(zone))
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def queryIcmpBlockInversion(self, zone):
return dbus_to_python(self.fw_zone.queryIcmpBlockInversion(zone))
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def removeIcmpBlockInversion(self, zone):
return dbus_to_python(self.fw_zone.removeIcmpBlockInversion(zone))
# direct chain
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def addChain(self, ipv, table, chain):
self.fw_direct.addChain(ipv, table, chain)
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def removeChain(self, ipv, table, chain):
self.fw_direct.removeChain(ipv, table, chain)
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def queryChain(self, ipv, table, chain):
return dbus_to_python(self.fw_direct.queryChain(ipv, table, chain))
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def getChains(self, ipv, table):
return dbus_to_python(self.fw_direct.getChains(ipv, table))
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def getAllChains(self):
return dbus_to_python(self.fw_direct.getAllChains())
# direct rule
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def addRule(self, ipv, table, chain, priority, args):
self.fw_direct.addRule(ipv, table, chain, priority, args)
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def removeRule(self, ipv, table, chain, priority, args):
self.fw_direct.removeRule(ipv, table, chain, priority, args)
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def removeRules(self, ipv, table, chain):
self.fw_direct.removeRules(ipv, table, chain)
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def queryRule(self, ipv, table, chain, priority, args):
return dbus_to_python(self.fw_direct.queryRule(ipv, table, chain, priority, args))
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def getRules(self, ipv, table, chain):
return dbus_to_python(self.fw_direct.getRules(ipv, table, chain))
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def getAllRules(self):
return dbus_to_python(self.fw_direct.getAllRules())
# direct passthrough
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def passthrough(self, ipv, args):
return dbus_to_python(self.fw_direct.passthrough(ipv, args))
# tracked passthrough
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def getAllPassthroughs(self):
return dbus_to_python(self.fw_direct.getAllPassthroughs())
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def removeAllPassthroughs(self):
self.fw_direct.removeAllPassthroughs()
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def getPassthroughs(self, ipv):
return dbus_to_python(self.fw_direct.getPassthroughs(ipv))
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def addPassthrough(self, ipv, args):
self.fw_direct.addPassthrough(ipv, args)
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def removePassthrough(self, ipv, args):
self.fw_direct.removePassthrough(ipv, args)
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def queryPassthrough(self, ipv, args):
return dbus_to_python(self.fw_direct.queryPassthrough(ipv, args))
# lockdown
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def enableLockdown(self):
self.fw_policies.enableLockdown()
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def disableLockdown(self):
self.fw_policies.disableLockdown()
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def queryLockdown(self):
return dbus_to_python(self.fw_policies.queryLockdown())
# policies
# lockdown white list commands
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def addLockdownWhitelistCommand(self, command):
self.fw_policies.addLockdownWhitelistCommand(command)
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def getLockdownWhitelistCommands(self):
return dbus_to_python(self.fw_policies.getLockdownWhitelistCommands())
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def queryLockdownWhitelistCommand(self, command):
return dbus_to_python(self.fw_policies.queryLockdownWhitelistCommand(command))
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def removeLockdownWhitelistCommand(self, command):
self.fw_policies.removeLockdownWhitelistCommand(command)
# lockdown white list contexts
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def addLockdownWhitelistContext(self, context):
self.fw_policies.addLockdownWhitelistContext(context)
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def getLockdownWhitelistContexts(self):
return dbus_to_python(self.fw_policies.getLockdownWhitelistContexts())
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def queryLockdownWhitelistContext(self, context):
return dbus_to_python(self.fw_policies.queryLockdownWhitelistContext(context))
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def removeLockdownWhitelistContext(self, context):
self.fw_policies.removeLockdownWhitelistContext(context)
# lockdown white list uids
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def addLockdownWhitelistUid(self, uid):
self.fw_policies.addLockdownWhitelistUid(uid)
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def getLockdownWhitelistUids(self):
return dbus_to_python(self.fw_policies.getLockdownWhitelistUids())
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def queryLockdownWhitelistUid(self, uid):
return dbus_to_python(self.fw_policies.queryLockdownWhitelistUid(uid))
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def removeLockdownWhitelistUid(self, uid):
self.fw_policies.removeLockdownWhitelistUid(uid)
# lockdown white list users
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def addLockdownWhitelistUser(self, user):
self.fw_policies.addLockdownWhitelistUser(user)
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def getLockdownWhitelistUsers(self):
return dbus_to_python(self.fw_policies.getLockdownWhitelistUsers())
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def queryLockdownWhitelistUser(self, user):
return dbus_to_python(self.fw_policies.queryLockdownWhitelistUser(user))
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def removeLockdownWhitelistUser(self, user):
self.fw_policies.removeLockdownWhitelistUser(user)
@slip.dbus.polkit.enable_proxy
@handle_exceptions
def authorizeAll(self):
""" Authorize once for all polkit actions. """
self.fw.authorizeAll()
| 1.804688
| 2
|
ska-tmc/ska-tmc-centralnode-low/src/ska_tmc_centralnode_low/central_node_low.py
|
ska-telescope/tmc-prototype
| 3
|
12779448
|
<reponame>ska-telescope/tmc-prototype<gh_stars>1-10
# -*- coding: utf-8 -*-
#
# This file is part of the CentralNode project
#
#
#
# Distributed under the terms of the BSD-3-Clause license.
# See LICENSE.txt for more info.
"""
Central Node is a coordinator of the complete M&C system. Central Node implements the standard set
of state and mode attributes defined by the SKA Control Model.
"""
# PROTECTED REGION ID(CentralNode.additionnal_import) ENABLED START #
import threading
# Tango imports
import tango
from tango import DebugIt, AttrWriteType, DevFailed
from tango.server import run, attribute, command, device_property
# Additional import
from ska.base import SKABaseDevice
from ska.base.commands import ResultCode
from ska.base.control_model import HealthState
from tmc.common.tango_server_helper import TangoServerHelper
from . import const, release
from .device_data import DeviceData
from .startup_telescope_command import StartUpTelescope
from .standby_telescope_command import StandByTelescope
from .assign_resources_command import AssignResources
from .release_resources_command import ReleaseResources
# PROTECTED REGION END # // CentralNode.additional_import
__all__ = [
"CentralNode",
"main",
"AssignResources",
"ReleaseResources",
"StandByTelescope",
"StartUpTelescope",
]
class CentralNode(SKABaseDevice):
"""
Central Node is a coordinator of the complete M&C system.
:Device Properties:
CentralAlarmHandler:
Device name of CentralAlarmHandler
TMAlarmHandler:
Device name of TMAlarmHandler
TMLowSubarrayNodes:
List of TM Low Subarray Node devices
MCCSMasterLeafNodeFQDN:
FQDN of Mccs Master Leaf Node.
:Device Attributes:
telescopeHealthState:
Health state of Telescope
subarray1HealthState:
Health state of SubarrayNode1
activityMessage:
String providing information about the current activity in Central Node.
"""
# -----------------
# Device Properties
# -----------------
CentralAlarmHandler = device_property(
dtype="str",
doc="Device name of CentralAlarmHandler ",
)
TMAlarmHandler = device_property(
dtype="str",
doc="Device name of TMAlarmHandler ",
)
TMLowSubarrayNodes = device_property(
dtype=("str",),
doc="List of TM Low Subarray Node devices",
)
MCCSMasterLeafNodeFQDN = device_property(dtype="str")
# ----------
# Attributes
# ----------
telescopeHealthState = attribute(
dtype=HealthState,
doc="Health state of Telescope",
)
subarray1HealthState = attribute(
dtype=HealthState,
doc="Health state of Subarray1",
)
activityMessage = attribute(
dtype="str",
access=AttrWriteType.READ_WRITE,
doc="Activity Message",
)
# ---------------
# General methods
# ---------------
class InitCommand(SKABaseDevice.InitCommand):
"""
A class for the TMC CentralNode's init_device() method.
"""
def do(self):
"""
Initializes the attributes and properties of the Central Node Low.
:return: A tuple containing a return code and a string message indicating status.
The message is for information purpose only.
:rtype: (ReturnCode, str)
:raises: DevFailed if error occurs while initializing the CentralNode device or if error occurs while
creating device proxy for any of the devices like SubarrayNodeLow or MccsMasterLeafNode.
"""
super().do()
device = self.target
try:
self.logger.info("Device initialisating...")
device_data = DeviceData.get_instance()
device.device_data = device_data
# Get Instance of TangoServerHelper class
this_server = TangoServerHelper.get_instance()
this_server.set_tango_class(device)
device.attr_map = {}
# Initialise Attributes
device.attr_map["telescopeHealthState"]=HealthState.UNKNOWN
device.attr_map["subarray1HealthState"]=HealthState.UNKNOWN
device.attr_map["activityMessage"]=""
device._health_state = HealthState.OK
device._build_state = "{},{},{}".format(
release.name, release.version, release.description
)
device._version_id = release.version
device_data.mccs_controller_fqdn = "low-mccs/control/control"
self.logger.debug(const.STR_INIT_SUCCESS)
except DevFailed as dev_failed:
log_msg = f"{const.ERR_INIT_PROP_ATTR_CN}{dev_failed}"
self.logger.exception(dev_failed)
this_server.write_attr("activityMessage", const.ERR_INIT_PROP_ATTR_CN, False)
tango.Except.throw_exception(
const.STR_CMD_FAILED,
log_msg,
"CentralNode.InitCommand.do()",
tango.ErrSeverity.ERR,
)
for subarray in range(0, len(device.TMLowSubarrayNodes)):
# populate subarray_id-subarray proxy map
tokens = device.TMLowSubarrayNodes[subarray].split("/")
subarray_id = int(tokens[2])
device_data.subarray_FQDN_dict[subarray_id] = device.TMLowSubarrayNodes[
subarray
]
this_server.write_attr("activityMessage", const.STR_CN_INIT_SUCCESS, False)
self.logger.info(device.attr_map["activityMessage"])
return (ResultCode.OK, device.attr_map["activityMessage"])
def always_executed_hook(self):
# PROTECTED REGION ID(CentralNode.always_executed_hook) ENABLED START #
""" Internal construct of TANGO. """
# PROTECTED REGION END # // CentralNode.always_executed_hook
def delete_device(self):
# PROTECTED REGION ID(CentralNode.delete_device) ENABLED START #
""" Internal construct of TANGO. """
# PROTECTED REGION END # // CentralNode.delete_device
# ------------------
# Attributes methods
# ------------------
def read_telescopeHealthState(self):
# PROTECTED REGION ID(CentralNode.telescope_healthstate_read) ENABLED START #
""" Internal construct of TANGO. Returns the Telescope health state."""
return self.attr_map["telescopeHealthState"]
# PROTECTED REGION END # // CentralNode.telescope_healthstate_read
def read_subarray1HealthState(self):
# PROTECTED REGION ID(CentralNode.subarray1_healthstate_read) ENABLED START #
""" Internal construct of TANGO. Returns Subarray1 health state. """
return self.attr_map["subarray1HealthState"]
# PROTECTED REGION END # // CentralNode.subarray1_healthstate_read
def read_activityMessage(self):
# PROTECTED REGION ID(CentralNode.activity_message_read) ENABLED START #
"""Internal construct of TANGO. Returns activity message. """
return self.attr_map["activityMessage"]
# PROTECTED REGION END # // CentralNode.activity_message_read
def write_activityMessage(self, value):
# PROTECTED REGION ID(CentralNode.activity_message_write) ENABLED START #
"""Internal construct of TANGO. Sets the activity message. """
self.update_attr_map("activityMessage", value)
# PROTECTED REGION END # // CentralNode.activity_message_write
def update_attr_map(self, attr, val):
"""
This method updates attribute value in attribute map. Once a thread has acquired a lock,
subsequent attempts to acquire it are blocked, until it is released.
"""
lock = threading.Lock()
lock.acquire()
self.attr_map[attr] = val
lock.release()
# --------
# Commands
# --------
def is_StandByTelescope_allowed(self):
"""
Checks whether this command is allowed to be run in current device state.
:return: True if this command is allowed to be run in current device state.
:rtype: boolean
"""
handler = self.get_command_object("StandByTelescope")
return handler.check_allowed()
@command(
dtype_out="DevVarLongStringArray",
doc_out="[ResultCode, information-only string]",
)
def StandByTelescope(self):
"""
This command invokes Off() command on SubarrayNode, MCCSMasterLeafNode and sets CentralNode into OFF state.
"""
handler = self.get_command_object("StandByTelescope")
(result_code, message) = handler()
return [[result_code], [message]]
def is_StartUpTelescope_allowed(self):
"""
Checks whether this command is allowed to be run in current device state.
:return: True if this command is allowed to be run in current device state.
:rtype: boolean
"""
handler = self.get_command_object("StartUpTelescope")
return handler.check_allowed()
@command(
dtype_out="DevVarLongStringArray",
doc_out="[ResultCode, information-only string]",
)
@DebugIt()
def StartUpTelescope(self):
"""
This command invokes On() command on SubarrayNode, MCCSMasterLeafNode
and sets the Central Node into ON state.
"""
handler = self.get_command_object("StartUpTelescope")
(result_code, message) = handler()
return [[result_code], [message]]
def is_AssignResources_allowed(self):
"""
Checks whether this command is allowed to be run in current device state.
:return: True if this command is allowed to be run in current device state
:rtype: boolean
"""
handler = self.get_command_object("AssignResources")
return handler.check_allowed()
@command(
dtype_in="str",
doc_in="It accepts the subarray id, station ids, station beam id and channels in JSON string format",
)
@DebugIt()
def AssignResources(self, argin):
"""
AssignResources command invokes the AssignResources command on lower level devices.
"""
handler = self.get_command_object("AssignResources")
handler(argin)
def is_ReleaseResources_allowed(self):
"""
Checks whether this command is allowed to be run in current device state.
:return: True if this command is allowed to be run in current device state.
:rtype: boolean
:raises: DevFailed if this command is not allowed to be run in current device state
"""
handler = self.get_command_object("ReleaseResources")
return handler.check_allowed()
@command(
dtype_in="str",
doc_in="The string in JSON format. The JSON contains following values:\nsubarray_id: "
"and release_all boolean as true.",
)
@DebugIt()
def ReleaseResources(self, argin):
"""
Release all the resources assigned to the given Subarray.
"""
handler = self.get_command_object("ReleaseResources")
handler(argin)
def init_command_objects(self):
"""
Initialises the command handlers for commands supported by this device.
"""
super().init_command_objects()
args = (self.device_data, self.state_model, self.logger)
self.assign_resources = AssignResources(*args)
self.release_resources = ReleaseResources(*args)
self.startup_telescope = StartUpTelescope(*args)
self.standby_telescope = StandByTelescope(*args)
self.register_command_object("StartUpTelescope", self.startup_telescope)
self.register_command_object("StandByTelescope", self.standby_telescope)
self.register_command_object("AssignResources", self.assign_resources)
self.register_command_object("ReleaseResources", self.release_resources)
# ----------
# Run server
# ----------
def main(args=None, **kwargs):
# PROTECTED REGION ID(CentralNode.main) ENABLED START #
"""
Runs the CentralNode.
:param args: Arguments internal to TANGO
:param kwargs: Arguments internal to TANGO
:return: CentralNode TANGO object.
"""
return run((CentralNode,), args=args, **kwargs)
# PROTECTED REGION END # // CentralNode.main
if __name__ == "__main__":
main()
| 1.757813
| 2
|
LMRt-example/main.py
|
FossilizedContainers/fossilized-controller
| 1
|
12779449
|
import LMRt
import os
import numpy as np
import pandas as pd
import xarray as xr
# preprocessing
print("\n======== Preprocessing ========\n")
config = 'configs.yml'
recon_iterations = 1
figure = 'graph'
job = LMRt.ReconJob()
job.load_configs(config, verbose=True)
job.load_proxydb(verbose=True)
job.filter_proxydb(verbose=True)
job.seasonalize_proxydb(verbose=True)
job.load_prior(verbose=True)
job.load_obs(verbose=True)
job_dirpath = job.configs['job_dirpath']
seasonalized_prior_path = os.path.join(job_dirpath, 'seasonalized_prior.pkl')
seasonalized_obs_path = os.path.join(job_dirpath, 'seasonalized_obs.pkl')
prior_loc_path = os.path.join(job_dirpath, 'prior_loc.pkl')
obs_loc_path = os.path.join(job_dirpath, 'obs_loc.pkl')
calibed_psm_path = os.path.join(job_dirpath, 'calibed_psm.pkl')
job.calibrate_psm(
seasonalized_prior_path=seasonalized_prior_path,
seasonalized_obs_path=seasonalized_obs_path,
prior_loc_path=prior_loc_path,
obs_loc_path=obs_loc_path,
calibed_psm_path=calibed_psm_path,
verbose=True,
)
job.forward_psm(verbose=True)
job.seasonalize_prior(verbose=True)
job.regrid_prior(verbose=True)
job.save()
print("\n======== Data Assimilation ========\n")
# Data assimilation
job.run(recon_seeds=np.arange(recon_iterations), verbose=True)
print("\n======== Preview of results ========\n")
# Preview of Results
# create the res object for reconstruction results
res = LMRt.ReconRes(job.configs['job_dirpath'], verbose=True)
# get the varialbes from the recon_paths
res.get_vars(['tas', 'nino3.4'], verbose=True)
if(figure_type == 'map'):
# plot the tas field
fig, ax = res.vars['tas'].field_list[0].plot()
fig.savefig("./map.png")
elif(figure_type == 'graph'):
# plot and validate the NINO3.4
from scipy.io import loadmat
data = loadmat('./data/obs/NINO34_BC09.mat')
syr, eyr = 1873, 2000
nyr = eyr-syr+1
nino34 = np.zeros(nyr)
for i in range(nyr):
nino34[i] = np.mean(data['nino34'][i*12:12+i*12])
target_series = LMRt.Series(time=np.arange(syr, eyr+1), value=nino34, label='BC09')
fig, ax = res.vars['nino3.4'].validate(target_series, verbose=True).plot(xlim=[1880, 2000])
fig.savefig("./graph.png")
else:
print("not a valid figure parameter \n")
| 2.234375
| 2
|
src/geocurrency/units/models.py
|
OpenPrunus/geocurrency
| 5
|
12779450
|
"""
Units models
"""
import logging
from datetime import date
import pint.systems
from django.conf import settings
from django.contrib.auth.models import User
from django.db import models
from django.utils.translation import ugettext as _
from geocurrency.converters.models import BaseConverter, ConverterResult, \
ConverterResultDetail, ConverterResultError, ConverterLoadError
from . import UNIT_EXTENDED_DEFINITION, DIMENSIONS, \
UNIT_SYSTEM_BASE_AND_DERIVED_UNITS, \
ADDITIONAL_BASE_UNITS, PREFIX_SYMBOL
from .exceptions import UnitConverterInitError, DimensionNotFound, \
UnitSystemNotFound, UnitNotFound, \
UnitDuplicateError, UnitDimensionError, \
UnitValueError
from .settings import ADDITIONAL_UNITS, PREFIXED_UNITS_DISPLAY
class Quantity:
"""
Quantity class
"""
system = None
unit = None
value = 0
date_obj = None
def __init__(self, system: str, unit: str,
value: float, date_obj: date = None):
"""
Initialize quantity on unit system
"""
self.system = system
self.unit = unit
self.value = value
self.date_obj = date_obj
def __repr__(self):
"""
Look beautiful
"""
return f'{self.value} {self.unit} ({self.system})'
class Unit:
"""
Unit mock for hinting
"""
pass
class UnitSystem:
"""
Pint UnitRegistry wrapper
"""
ureg = None
system_name = None
system = None
_additional_units = set()
def __init__(self, system_name: str = 'SI',
fmt_locale: str = 'en', user: User = None,
key: str = None):
"""
Initialize UnitSystem from name and user / key
information for loading custom units
"""
found = False
for available_system in UnitSystem.available_systems():
if system_name.lower() == available_system.lower():
system_name = available_system
found = True
if not found:
raise UnitSystemNotFound("Invalid unit system")
self.system_name = system_name
try:
additional_units_settings = settings.GEOCURRENCY_ADDITIONAL_UNITS
except AttributeError:
additional_units_settings = ADDITIONAL_UNITS
try:
self.ureg = pint.UnitRegistry(
system=system_name,
fmt_locale=fmt_locale)
self.system = getattr(self.ureg.sys, system_name)
self._load_additional_units(units=ADDITIONAL_BASE_UNITS)
self._load_additional_units(units=additional_units_settings)
if user:
self._load_custom_units(user=user, key=key)
self._rebuild_cache()
except (FileNotFoundError, AttributeError):
raise UnitSystemNotFound("Invalid unit system")
def _rebuild_cache(self):
"""
Rebuild registry cache
It should be in the define method of the registry
"""
self.ureg._build_cache()
def _load_additional_units(
self, units: dict,
redefine: bool = False) -> bool:
"""
Load additional base units in registry
"""
available_units = self.available_unit_names()
if self.system_name not in units:
logging.warning(f"error loading additional units "
f"for {self.system_name}")
return False
added_units = []
for key, items in units[self.system_name].items():
if key not in available_units:
self.ureg.define(
f"{key} = {items['relation']} = {items['symbol']}")
added_units.append(key)
elif redefine:
self.ureg.redefine(
f"{key} = {items['relation']} = {items['symbol']}")
self._additional_units = self._additional_units | set(added_units)
return True
def _load_custom_units(
self,
user: User,
key: str = None,
redefine: bool = False) -> bool:
"""
Load custom units in registry
"""
if user and user.is_authenticated:
if user.is_superuser:
qs = CustomUnit.objects.all()
else:
qs = CustomUnit.objects.filter(user=user)
if key:
qs = qs.filter(key=key)
else:
qs = CustomUnit.objects.filter(pk=-1)
qs = qs.filter(unit_system=self.system_name)
available_units = self.available_unit_names()
added_units = []
for cu in qs:
props = [cu.code, cu.relation]
if cu.symbol:
props.append(cu.symbol)
if cu.alias:
props.append(cu.alias)
definition = " = ".join(props)
if cu.code not in available_units:
self.ureg.define(definition)
added_units.append(cu.code)
elif redefine:
self.ureg.redefine(definition)
else:
logging.error(f"{cu.code} already defined in registry")
self._additional_units = self._additional_units | set(added_units)
return True
def _test_additional_units(self, units: dict) -> bool:
"""
Load and check dimensionality of ADDITIONAL_BASE_UNITS values
"""
if self.system_name not in units:
return False
for key in units[self.system_name].keys():
try:
self.unit(key).dimensionality and True
except pint.errors.UndefinedUnitError:
return False
return True
def add_definition(self, code, relation, symbol, alias):
"""
Add a new unit definition to a UnitSystem, and rebuild cache
:param code: code of the unit
:param relation: relation to other units (e.g.: 3 kg/m)
:param symbol: short unit representation
:param alias: other name for unit
"""
self.ureg.define(f"{code} = {relation} = {symbol} = {alias}")
self._rebuild_cache()
@classmethod
def available_systems(cls) -> [str]:
"""
List of available Unit Systems
:return: Array of string
"""
ureg = pint.UnitRegistry(system='SI')
return dir(ureg.sys)
@classmethod
def is_valid(cls, system: str) -> bool:
"""
Check validity of the UnitSystem
:param system: name of the unit system
"""
us = cls()
return system in us.available_systems()
def current_system(self) -> pint.UnitRegistry:
"""
Return current pint.UnitRegistry
"""
return self.ureg
def unit(self, unit_name):
"""
Create a Object in the UnitSystem
:param unit_name: name of the unit in the unit system
"""
return Unit(unit_system=self, code=unit_name)
def available_unit_names(self) -> [str]:
"""
List of available units for a given Unit system
:return: Array of names of Unit systems
"""
try:
prefixed_units_display = \
settings.GEOCURRENCY_PREFIXED_UNITS_DISPLAY
except AttributeError:
prefixed_units_display = PREFIXED_UNITS_DISPLAY
prefixed_units = []
for key, prefixes in prefixed_units_display.items():
for prefix in prefixes:
prefixed_units.append(prefix + key)
return sorted(prefixed_units +
dir(getattr(self.ureg.sys, self.system_name))
+ list(self._additional_units))
def unit_dimensionality(self, unit: str) -> str:
"""
User friendly representation of the dimension
:param unit: name of the unit to display
:return: Human readable dimension
"""
return Unit.dimensionality_string(
unit_system=self.system,
unit_str=unit)
def available_dimensions(self, ordering: str = 'name') -> {}:
"""
Return available dimensions for the UnitSystem
:param ordering: sort result by attribute
"""
descending = False
if ordering and ordering[0] == '-':
ordering = ordering[1:]
descending = True
if ordering not in ['code', 'name', 'dimension']:
ordering = 'name'
return sorted([Dimension(unit_system=self, code=dim)
for dim in DIMENSIONS.keys()],
key=lambda x: getattr(x, ordering, ''),
reverse=descending)
@property
def _ureg_dimensions(self):
"""
return dimensions with units
"""
dimensions = []
for dim in self.ureg._dimensions:
try:
if not self.ureg.get_compatible_units(dim):
continue
dimensions.append(dim)
except KeyError:
continue
return dimensions
def _get_dimension_dimensionality(self, dimension: str) -> {}:
"""
Return the dimensionality of a dimension
based on the first compatible unit
"""
try:
for dim in self.ureg.get_compatible_units(dimension):
return self.ureg.get_base_units(dim)[1]
except KeyError:
return {}
def _generate_dimension_delta_dictionnary(self) -> {}:
"""
Generate the dict to put in DIMENSIONS
"""
output = {}
for dim in self._ureg_dimensions:
if dim not in DIMENSIONS:
output[dim] = {
'name': f'_({dim})',
'dimension': str(self._get_dimension_dimensionality(dim)),
'symbol': ''
}
return output
def units_per_dimension(self, dimensions: [str]) -> {}:
"""
Return units grouped by dimension
:param dimensions: restrict list of dimensions
"""
output = {}
registry_dimensions = dimensions or DIMENSIONS.keys()
for dim in registry_dimensions:
Dimension(unit_system=self, code=dim)
try:
units = self.ureg.get_compatible_units(dim)
if units:
output[dim] = units
except KeyError:
continue
return output
def units_per_dimensionality(self) -> {}:
"""
List of units per dimension
:return: dict of dimensions, with lists of unit strings
"""
units_array = self.available_unit_names()
output = {}
for unit_str in units_array:
dimension = Unit.dimensionality_string(self, unit_str)
try:
output[dimension].append(unit_str)
except KeyError:
output[dimension] = [unit_str]
return output
@property
def dimensionalities(self) -> [str]:
"""
List of dimensions available in the Unit system
:return: list of dimensions for Unit system
"""
return set([Unit.dimensionality_string(self, unit_str)
for unit_str in dir(self.system)])
class Dimension:
"""
Dimenion of a Unit
"""
unit_system = None
code = None
name = None
dimension = None
def __init__(self, unit_system: UnitSystem, code: str):
"""
Initialize a Dimension in a UnitSystem
"""
try:
dimension = DIMENSIONS[code]
self.unit_system = unit_system
self.code = code
self.name = dimension['name']
self.dimension = dimension['dimension']
except (ValueError, KeyError) as e:
logging.warning(str(e))
self.code = None
if not self.code:
raise DimensionNotFound
def __repr__(self):
"""
Look beautiful
"""
return self.code
def _prefixed_units(self, unit_names):
"""
Add prefixed units to list of units
:param unit_names: list of unit names
"""
unit_list = []
try:
prefixed_units_display = \
settings.GEOCURRENCY_PREFIXED_UNITS_DISPLAY
except AttributeError:
prefixed_units_display = PREFIXED_UNITS_DISPLAY
for unit, prefixes in prefixed_units_display.items():
if unit in unit_names:
for prefix in prefixes:
unit_list.append(
self.unit_system.unit(unit_name=prefix + unit))
return unit_list
def units(self, user=None, key=None) -> [Unit]:
"""
List of units for this dimension
:param user: optional user for custom units
:param key: optional key for custom units
"""
if self.code == '[compounded]':
return self._compounded_units
if self.code == '[custom]':
return self._custom_units(user=user, key=key)
unit_list = []
try:
unit_list.append(
self.unit_system.unit(
UNIT_SYSTEM_BASE_AND_DERIVED_UNITS[
self.unit_system.system_name][self.code]
)
)
except (KeyError, UnitNotFound):
logging.warning(f"unable to find base unit for"
f"unit system {self.unit_system.system_name}"
f" and dimension {self.code}")
try:
unit_list.extend(
[
Unit(unit_system=self.unit_system, pint_unit=unit)
for unit in
self.unit_system.ureg.get_compatible_units(self.code)
])
except KeyError:
logging.warning(f"Cannot find compatible units "
f"for this dimension {self.code}")
unit_names = [str(u) for u in unit_list]
unit_names.extend(self._prefixed_units(unit_names))
return set(sorted(unit_list, key=lambda x: x.name))
@property
def _compounded_units(self):
"""
List units that do not belong to a dimension
"""
available_units = self.unit_system.available_unit_names()
dimensioned_units = []
for dimension_code in [d for d in DIMENSIONS.keys() if
d != '[compounded]' and d != '[custom]']:
dimension = Dimension(
unit_system=self.unit_system,
code=dimension_code)
dimensioned_units.extend([u.code for u in dimension.units()])
return [self.unit_system.unit(au)
for au in set(available_units) - set(dimensioned_units)]
def _custom_units(self, user: User, key: str = None) -> [Unit]:
"""
Return list of custom units
:param user: User owning the units
:param key: optional unit key
"""
if user and user.is_authenticated:
if user.is_superuser:
custom_units = CustomUnit.objects.all()
else:
custom_units = CustomUnit.objects.filter(user=user)
if key:
custom_units = custom_units.filter(key=key)
return [self.unit_system.unit(cu.code) for cu in custom_units]
else:
return []
@property
def base_unit(self):
"""
Base unit for this dimension in this Unit System
"""
try:
return UNIT_SYSTEM_BASE_AND_DERIVED_UNITS[
self.unit_system.system_name][self.code]
except KeyError:
logging.warning(
f'dimension {self.dimension} is not part of '
f'unit system {self.unit_system.system_name}')
return None
class Unit:
"""
Pint Unit wrapper
"""
unit_system = None
code = None
unit = None
def __init__(
self,
unit_system: UnitSystem,
code: str = '',
pint_unit: pint.Unit = None):
"""
Initialize a Unit in a UnitSystem
:param unit_system: UnitSystem instance
:param code: code of the pint.Unit
"""
self.unit_system = unit_system
if pint_unit and isinstance(pint_unit, pint.Unit):
self.code = str(pint_unit)
self.unit = pint_unit
elif code:
self.code = code
try:
self.unit = getattr(unit_system.system, code)
except pint.errors.UndefinedUnitError:
raise UnitNotFound("invalid unit for system")
else:
raise UnitNotFound("invalid unit for system")
def __repr__(self):
return self.code
@classmethod
def is_valid(cls, name: str) -> bool:
"""
Check the validity of a unit in a UnitSystem
"""
try:
us_si = UnitSystem(system_name='SI')
except UnitSystemNotFound:
return False
try:
return us_si.unit(unit_name=name) and True
except pint.errors.UndefinedUnitError:
return False
@property
def name(self) -> str:
"""
Return name of the unit from table of units
"""
return self.unit_name(self.code)
@property
def symbol(self) -> str:
"""
Return symbol for Unit
"""
return self.unit_symbol(self.code)
@property
def dimensions(self) -> [Dimension]:
"""
Return Dimensions of Unit
"""
dimensions = [
Dimension(unit_system=self.unit_system, code=code) for code in
DIMENSIONS.keys()
if DIMENSIONS[code]['dimension'] == str(self.dimensionality)]
return dimensions or '[compounded]'
@staticmethod
def base_unit(unit_str: str) -> (str, str):
"""
Get base unit in case the unit is a prefixed unit
:param unit_str: name of unit to check
:return: base unit name, prefix
"""
prefix = ''
base_str = unit_str
try:
prefixed_units_display = \
settings.GEOCURRENCY_PREFIXED_UNITS_DISPLAY
except AttributeError:
prefixed_units_display = PREFIXED_UNITS_DISPLAY
for base, prefixes in prefixed_units_display.items():
for _prefix in prefixes:
if unit_str == _prefix + base:
prefix = _prefix
base_str = base
return base_str, prefix
@staticmethod
def unit_name(unit_str: str) -> str:
"""
Get translated name from unit string
:param unit_str: Name of unit
"""
base_str, prefix = Unit.base_unit(unit_str=unit_str)
try:
ext_unit = UNIT_EXTENDED_DEFINITION.get(base_str)
return prefix + str(ext_unit['name'])
except (KeyError, TypeError):
logging.error(f'No UNIT_EXTENDED_DEFINITION for unit {base_str}')
return unit_str
@staticmethod
def unit_symbol(unit_str: str) -> str:
"""
Static function to get symbol from unit string
:param unit_str: Name of unit
"""
base_str, prefix = Unit.base_unit(unit_str=unit_str)
try:
prefix_symbol = PREFIX_SYMBOL[prefix]
ext_unit = UNIT_EXTENDED_DEFINITION.get(base_str)
return prefix_symbol + ext_unit['symbol']
except (KeyError, TypeError):
logging.error(f'No UNIT_EXTENDED_DEFINITION for unit {base_str}')
return ''
@staticmethod
def dimensionality_string(unit_system: UnitSystem, unit_str: str) -> str:
"""
Converts pint dimensionality string to human readable string
:param unit_system: UnitSystem
:param unit_str: Unit name
:return: str
"""
ds = str(getattr(
unit_system.ureg, unit_str
).dimensionality).replace('[', '').replace(']', '')
ds = ds.replace(' ** ', '^')
ds = ds.split()
return ' '.join([_(d) for d in ds])
@property
def dimensionality(self):
"""
Return dimensionality of a unit in Pint universe
"""
try:
return self.unit_system.ureg.get_base_units(self.code)[1]
except KeyError:
return ''
@staticmethod
def translated_name(unit_system: UnitSystem, unit_str: str) -> str:
"""
Translated name of the unit
"""
try:
return '{}'.format(unit_system.ureg[unit_str])
except KeyError:
return unit_str
@property
def readable_dimension(self):
"""
Wrapper around Unit.dimensionality_string
"""
return Unit.dimensionality_string(
unit_system=self.unit_system,
unit_str=self.code)
class UnitConverter(BaseConverter):
"""
Conversion between units
"""
base_system = None
base_unit = None
user = None
key = None
def __init__(
self,
base_system: str,
base_unit: str,
user: User = None,
key: key = None,
id: str = None):
"""
Initialize the converter. It converts a payload into a destination unit
"""
try:
super().__init__(id=id)
self.base_system = base_system
self.base_unit = base_unit
self.user = user
self.key = key
self.system = UnitSystem(
system_name=base_system,
user=user,
key=key)
self.unit = Unit(
unit_system=self.system,
code=base_unit)
except (UnitSystemNotFound, UnitNotFound):
raise UnitConverterInitError
def add_data(self, data: []) -> []:
"""
Check data and add it to the dataset
Return list of errors
"""
errors = super().add_data(data)
return errors
def check_data(self, data):
"""
Validates that the data contains
system = str
unit = str
value = float
date_obj ('YYYY-MM-DD')
"""
from .serializers import QuantitySerializer
errors = []
for line in data:
serializer = QuantitySerializer(data=line)
if serializer.is_valid():
self.data.append(serializer.create(serializer.validated_data))
else:
errors.append(serializer.errors)
return errors
@classmethod
def load(cls,
id: str,
user: User = None,
key: str = None) -> BaseConverter:
"""
Load converter from ID
"""
try:
uc = super().load(id)
uc.system = UnitSystem(
system_name=uc.base_system,
user=user,
key=key)
uc.unit = Unit(unit_system=uc.system, code=uc.base_unit)
return uc
except (UnitSystemNotFound, UnitNotFound, KeyError) as e:
raise ConverterLoadError from e
def save(self):
"""
Save the converter to cache
"""
system = self.system
unit = self.unit
self.system = None
self.unit = None
super().save()
self.system = system
self.unit = unit
def convert(self) -> ConverterResult:
"""
Converts data to base unit in base system
"""
result = ConverterResult(id=self.id, target=self.base_unit)
q_ = self.system.ureg.Quantity
for quantity in self.data:
try:
pint_quantity = q_(quantity.value, quantity.unit)
out = pint_quantity.to(self.base_unit)
result.increment_sum(out.magnitude)
detail = ConverterResultDetail(
unit=quantity.unit,
original_value=quantity.value,
date=quantity.date_obj,
conversion_rate=0,
converted_value=out.magnitude
)
result.detail.append(detail)
except pint.UndefinedUnitError:
error = ConverterResultError(
unit=quantity.unit,
original_value=quantity.value,
date=quantity.date_obj,
error=_('Undefined unit in the registry')
)
result.errors.append(error)
except pint.DimensionalityError:
error = ConverterResultError(
unit=quantity.unit,
original_value=quantity.value,
date=quantity.date_obj,
error=_('Dimensionality error, incompatible units')
)
result.errors.append(error)
self.end_batch(result.end_batch())
return result
class UnitConversionPayload:
"""
Unit conversion payload
"""
data = None
base_system = ''
base_unit = ''
key = ''
batch_id = ''
eob = False
def __init__(self,
base_system: UnitSystem,
base_unit: Unit,
data=None,
key: str = None,
batch_id: str = None,
eob: bool = False):
"""
Initialize conversion payload
"""
self.data = data
self.base_system = base_system
self.base_unit = base_unit
self.key = key
self.batch_id = batch_id
self.eob = eob
class CustomUnit(models.Model):
"""
Additional unit for a user
"""
AVAILABLE_SYSTEMS = (
('Planck', 'Planck'),
('SI', 'SI'),
('US', 'US'),
('atomic', 'atomic'),
('cgs', 'CGS'),
('imperial', 'imperial'),
('mks', 'mks'),
)
user = models.ForeignKey(
User,
related_name='units',
on_delete=models.PROTECT)
key = models.CharField(
"Categorization field (e.g.: customer ID)",
max_length=255, default=None, db_index=True, null=True, blank=True)
unit_system = models.CharField(
"Unit system to register the unit in", max_length=20,
choices=AVAILABLE_SYSTEMS)
code = models.SlugField("technical name of the unit (e.g.: myUnit)")
name = models.CharField(
"Human readable name (e.g.: My Unit)",
max_length=255)
relation = models.CharField(
"Relation to an existing unit (e.g.: 12 kg*m/s)", max_length=255)
symbol = models.CharField(
"Symbol to use in a formula (e.g.: myu)",
max_length=20, blank=True, null=True)
alias = models.CharField(
"Other code for this unit (e.g.: mybu)",
max_length=20, null=True, blank=True)
class Meta:
"""
Meta
"""
unique_together = ('user', 'key', 'code')
ordering = ['name', 'code']
def save(self, *args, **kwargs):
"""
Save custom unit to database
"""
us = UnitSystem(system_name=self.unit_system)
self.code = self.code.replace('-', '_')
self.symbol = self.symbol.replace('-', '_')
self.alias = self.alias.replace('-', '_')
if self.code in us.available_unit_names():
raise UnitDuplicateError
try:
us.add_definition(
code=self.code,
relation=self.relation,
symbol=self.symbol,
alias=self.alias)
except ValueError as e:
raise UnitValueError(str(e)) from e
try:
us.unit(self.code).unit.dimensionality
except pint.errors.UndefinedUnitError:
raise UnitDimensionError
return super(CustomUnit, self).save(*args, **kwargs)
| 2.390625
| 2
|