hexsha stringlengths 40 40 | size int64 5 2.06M | ext stringclasses 11 values | lang stringclasses 1 value | max_stars_repo_path stringlengths 3 251 | max_stars_repo_name stringlengths 4 130 | max_stars_repo_head_hexsha stringlengths 40 78 | max_stars_repo_licenses listlengths 1 10 | max_stars_count int64 1 191k ⌀ | max_stars_repo_stars_event_min_datetime stringlengths 24 24 ⌀ | max_stars_repo_stars_event_max_datetime stringlengths 24 24 ⌀ | max_issues_repo_path stringlengths 3 251 | max_issues_repo_name stringlengths 4 130 | max_issues_repo_head_hexsha stringlengths 40 78 | max_issues_repo_licenses listlengths 1 10 | max_issues_count int64 1 116k ⌀ | max_issues_repo_issues_event_min_datetime stringlengths 24 24 ⌀ | max_issues_repo_issues_event_max_datetime stringlengths 24 24 ⌀ | max_forks_repo_path stringlengths 3 251 | max_forks_repo_name stringlengths 4 130 | max_forks_repo_head_hexsha stringlengths 40 78 | max_forks_repo_licenses listlengths 1 10 | max_forks_count int64 1 105k ⌀ | max_forks_repo_forks_event_min_datetime stringlengths 24 24 ⌀ | max_forks_repo_forks_event_max_datetime stringlengths 24 24 ⌀ | content stringlengths 1 1.05M | avg_line_length float64 1 1.02M | max_line_length int64 3 1.04M | alphanum_fraction float64 0 1 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
7dad1f1269de17e831dca23ea74171d92ef7258b | 34,626 | py | Python | ringallreduce_simulator.py | hgao10/horovod_simulation | 3678a7d1d424931f48af4b53ef3293073af71c2e | [
"Apache-2.0"
] | null | null | null | ringallreduce_simulator.py | hgao10/horovod_simulation | 3678a7d1d424931f48af4b53ef3293073af71c2e | [
"Apache-2.0"
] | null | null | null | ringallreduce_simulator.py | hgao10/horovod_simulation | 3678a7d1d424931f48af4b53ef3293073af71c2e | [
"Apache-2.0"
] | null | null | null | import collections
import time
import heapq
from horovod_simulator_config import SimulatorConfig, SchedulingDisc
from utils.logger import get_logger
import typing
from queue import PriorityQueue
# compute iteration time from records
def compute_iteration_time(record, simulator):
logger = get_logger("compute_iteration_time", "DEBUG")
iteration_time_ms = 0
iteration_start_time = 0
for event in record["FP_computation_done"]:
if event.layer == simulator.config.num_layers -1:
if event.iteration == 0:
iteration_start_time = event.time
if event.iteration == 1:
iteration_time_ms = event.time - iteration_start_time
break
logger.debug(f'iteration_time_ms: {iteration_time_ms}')
return iteration_time_ms
def compute_slack_time_FIFO(record, simulator):
'''
compute slack per layer for FIFO
Time difference between when gradients are computed to when gradients are needed
Gradients computed timestamp @ layer i = BP computation time done @ layer i
Gradients consumed timestamp @ layer i = FP computation start @ layer i
= FP computation done @ layer i - FP computation duration @ layer i
'''
logger = get_logger("compute_slack_time_FIFO", "DEBUG")
slack_per_layer_in_ms = {layer: 0 for layer in range(simulator.config.num_layers)}
BP_computation_done_timestamp = {layer: 0 for layer in range(simulator.config.num_layers)}
for event in record["BP_computation_done"]:
if event.iteration == 0:
BP_computation_done_timestamp[event.layer] = event.time
for event in record["FP_computation_done"]:
if event.iteration == 1:
# print(f'layer: {event.layer}, FP_computation_done, {event.time}, fp_layers, {fp_layers[event.layer]}, BP compute done: { BP_computation_done_timestamp[event.layer]}')
slack_per_layer_in_ms[event.layer] = event.time - simulator.fp_layers[event.layer] - BP_computation_done_timestamp[event.layer]
logger.debug(f'slack_per_layer_in_ms: {slack_per_layer_in_ms}')
return slack_per_layer_in_ms
def compute_iteration_and_slack(record, simulator):
compute_iteration_time(record, simulator)
compute_slack_time_FIFO(record, simulator)
def test_run(config):
horovod_simulator = HorovodSimulator(config)
horovod_simulator.run()
compute_iteration_and_slack(horovod_simulator.record, horovod_simulator)
if __name__ == "__main__":
# test1()
test_ring_allreduce_fifo()
| 55.313099 | 210 | 0.6591 |
7daef8b7f43d19ad4b4a4241d53911344a3bad74 | 675 | py | Python | ABNOOrchestrator/ABNOParameters.py | HPNLAB/ABNO-FUTEBOL | 3a1dbee11abd9a808d337a6bbdccba052671d33c | [
"Apache-2.0"
] | null | null | null | ABNOOrchestrator/ABNOParameters.py | HPNLAB/ABNO-FUTEBOL | 3a1dbee11abd9a808d337a6bbdccba052671d33c | [
"Apache-2.0"
] | null | null | null | ABNOOrchestrator/ABNOParameters.py | HPNLAB/ABNO-FUTEBOL | 3a1dbee11abd9a808d337a6bbdccba052671d33c | [
"Apache-2.0"
] | null | null | null | __author__ = 'alejandroaguado'
from xml.etree import ElementTree
| 35.526316 | 51 | 0.638519 |
7daf7e347025b6adafd5d8ff1bdd20e4296b68c6 | 15,533 | py | Python | gcloud/bigtable/_generated/bigtable_table_service_messages_pb2.py | scrapinghub/gcloud-python | 1ec6d636ebf2c4d618aca6b2485fbbfa5f0fde29 | [
"Apache-2.0"
] | null | null | null | gcloud/bigtable/_generated/bigtable_table_service_messages_pb2.py | scrapinghub/gcloud-python | 1ec6d636ebf2c4d618aca6b2485fbbfa5f0fde29 | [
"Apache-2.0"
] | null | null | null | gcloud/bigtable/_generated/bigtable_table_service_messages_pb2.py | scrapinghub/gcloud-python | 1ec6d636ebf2c4d618aca6b2485fbbfa5f0fde29 | [
"Apache-2.0"
] | 2 | 2017-07-30T16:18:23.000Z | 2020-10-14T11:24:18.000Z | # Generated by the protocol buffer compiler. DO NOT EDIT!
# source: google/bigtable/admin/table/v1/bigtable_table_service_messages.proto
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
from google.protobuf import descriptor_pb2
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
from gcloud.bigtable._generated import bigtable_table_data_pb2 as google_dot_bigtable_dot_admin_dot_table_dot_v1_dot_bigtable__table__data__pb2
DESCRIPTOR = _descriptor.FileDescriptor(
name='google/bigtable/admin/table/v1/bigtable_table_service_messages.proto',
package='google.bigtable.admin.table.v1',
syntax='proto3',
serialized_pb=b'\nDgoogle/bigtable/admin/table/v1/bigtable_table_service_messages.proto\x12\x1egoogle.bigtable.admin.table.v1\x1a\x38google/bigtable/admin/table/v1/bigtable_table_data.proto\"\x86\x01\n\x12\x43reateTableRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x10\n\x08table_id\x18\x02 \x01(\t\x12\x34\n\x05table\x18\x03 \x01(\x0b\x32%.google.bigtable.admin.table.v1.Table\x12\x1a\n\x12initial_split_keys\x18\x04 \x03(\t\"!\n\x11ListTablesRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\"K\n\x12ListTablesResponse\x12\x35\n\x06tables\x18\x01 \x03(\x0b\x32%.google.bigtable.admin.table.v1.Table\"\x1f\n\x0fGetTableRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\"\"\n\x12\x44\x65leteTableRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\"2\n\x12RenameTableRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0e\n\x06new_id\x18\x02 \x01(\t\"\x88\x01\n\x19\x43reateColumnFamilyRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x18\n\x10\x63olumn_family_id\x18\x02 \x01(\t\x12\x43\n\rcolumn_family\x18\x03 \x01(\x0b\x32,.google.bigtable.admin.table.v1.ColumnFamily\")\n\x19\x44\x65leteColumnFamilyRequest\x12\x0c\n\x04name\x18\x01 \x01(\tBI\n\"com.google.bigtable.admin.table.v1B!BigtableTableServiceMessagesProtoP\x01\x62\x06proto3'
,
dependencies=[google_dot_bigtable_dot_admin_dot_table_dot_v1_dot_bigtable__table__data__pb2.DESCRIPTOR,])
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
_CREATETABLEREQUEST = _descriptor.Descriptor(
name='CreateTableRequest',
full_name='google.bigtable.admin.table.v1.CreateTableRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='name', full_name='google.bigtable.admin.table.v1.CreateTableRequest.name', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='table_id', full_name='google.bigtable.admin.table.v1.CreateTableRequest.table_id', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='table', full_name='google.bigtable.admin.table.v1.CreateTableRequest.table', index=2,
number=3, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='initial_split_keys', full_name='google.bigtable.admin.table.v1.CreateTableRequest.initial_split_keys', index=3,
number=4, type=9, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=163,
serialized_end=297,
)
_LISTTABLESREQUEST = _descriptor.Descriptor(
name='ListTablesRequest',
full_name='google.bigtable.admin.table.v1.ListTablesRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='name', full_name='google.bigtable.admin.table.v1.ListTablesRequest.name', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=299,
serialized_end=332,
)
_LISTTABLESRESPONSE = _descriptor.Descriptor(
name='ListTablesResponse',
full_name='google.bigtable.admin.table.v1.ListTablesResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='tables', full_name='google.bigtable.admin.table.v1.ListTablesResponse.tables', index=0,
number=1, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=334,
serialized_end=409,
)
_GETTABLEREQUEST = _descriptor.Descriptor(
name='GetTableRequest',
full_name='google.bigtable.admin.table.v1.GetTableRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='name', full_name='google.bigtable.admin.table.v1.GetTableRequest.name', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=411,
serialized_end=442,
)
_DELETETABLEREQUEST = _descriptor.Descriptor(
name='DeleteTableRequest',
full_name='google.bigtable.admin.table.v1.DeleteTableRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='name', full_name='google.bigtable.admin.table.v1.DeleteTableRequest.name', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=444,
serialized_end=478,
)
_RENAMETABLEREQUEST = _descriptor.Descriptor(
name='RenameTableRequest',
full_name='google.bigtable.admin.table.v1.RenameTableRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='name', full_name='google.bigtable.admin.table.v1.RenameTableRequest.name', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='new_id', full_name='google.bigtable.admin.table.v1.RenameTableRequest.new_id', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=480,
serialized_end=530,
)
_CREATECOLUMNFAMILYREQUEST = _descriptor.Descriptor(
name='CreateColumnFamilyRequest',
full_name='google.bigtable.admin.table.v1.CreateColumnFamilyRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='name', full_name='google.bigtable.admin.table.v1.CreateColumnFamilyRequest.name', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='column_family_id', full_name='google.bigtable.admin.table.v1.CreateColumnFamilyRequest.column_family_id', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='column_family', full_name='google.bigtable.admin.table.v1.CreateColumnFamilyRequest.column_family', index=2,
number=3, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=533,
serialized_end=669,
)
_DELETECOLUMNFAMILYREQUEST = _descriptor.Descriptor(
name='DeleteColumnFamilyRequest',
full_name='google.bigtable.admin.table.v1.DeleteColumnFamilyRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='name', full_name='google.bigtable.admin.table.v1.DeleteColumnFamilyRequest.name', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=671,
serialized_end=712,
)
_CREATETABLEREQUEST.fields_by_name['table'].message_type = google_dot_bigtable_dot_admin_dot_table_dot_v1_dot_bigtable__table__data__pb2._TABLE
_LISTTABLESRESPONSE.fields_by_name['tables'].message_type = google_dot_bigtable_dot_admin_dot_table_dot_v1_dot_bigtable__table__data__pb2._TABLE
_CREATECOLUMNFAMILYREQUEST.fields_by_name['column_family'].message_type = google_dot_bigtable_dot_admin_dot_table_dot_v1_dot_bigtable__table__data__pb2._COLUMNFAMILY
DESCRIPTOR.message_types_by_name['CreateTableRequest'] = _CREATETABLEREQUEST
DESCRIPTOR.message_types_by_name['ListTablesRequest'] = _LISTTABLESREQUEST
DESCRIPTOR.message_types_by_name['ListTablesResponse'] = _LISTTABLESRESPONSE
DESCRIPTOR.message_types_by_name['GetTableRequest'] = _GETTABLEREQUEST
DESCRIPTOR.message_types_by_name['DeleteTableRequest'] = _DELETETABLEREQUEST
DESCRIPTOR.message_types_by_name['RenameTableRequest'] = _RENAMETABLEREQUEST
DESCRIPTOR.message_types_by_name['CreateColumnFamilyRequest'] = _CREATECOLUMNFAMILYREQUEST
DESCRIPTOR.message_types_by_name['DeleteColumnFamilyRequest'] = _DELETECOLUMNFAMILYREQUEST
CreateTableRequest = _reflection.GeneratedProtocolMessageType('CreateTableRequest', (_message.Message,), dict(
DESCRIPTOR = _CREATETABLEREQUEST,
__module__ = 'google.bigtable.admin.table.v1.bigtable_table_service_messages_pb2'
# @@protoc_insertion_point(class_scope:google.bigtable.admin.table.v1.CreateTableRequest)
))
_sym_db.RegisterMessage(CreateTableRequest)
ListTablesRequest = _reflection.GeneratedProtocolMessageType('ListTablesRequest', (_message.Message,), dict(
DESCRIPTOR = _LISTTABLESREQUEST,
__module__ = 'google.bigtable.admin.table.v1.bigtable_table_service_messages_pb2'
# @@protoc_insertion_point(class_scope:google.bigtable.admin.table.v1.ListTablesRequest)
))
_sym_db.RegisterMessage(ListTablesRequest)
ListTablesResponse = _reflection.GeneratedProtocolMessageType('ListTablesResponse', (_message.Message,), dict(
DESCRIPTOR = _LISTTABLESRESPONSE,
__module__ = 'google.bigtable.admin.table.v1.bigtable_table_service_messages_pb2'
# @@protoc_insertion_point(class_scope:google.bigtable.admin.table.v1.ListTablesResponse)
))
_sym_db.RegisterMessage(ListTablesResponse)
GetTableRequest = _reflection.GeneratedProtocolMessageType('GetTableRequest', (_message.Message,), dict(
DESCRIPTOR = _GETTABLEREQUEST,
__module__ = 'google.bigtable.admin.table.v1.bigtable_table_service_messages_pb2'
# @@protoc_insertion_point(class_scope:google.bigtable.admin.table.v1.GetTableRequest)
))
_sym_db.RegisterMessage(GetTableRequest)
DeleteTableRequest = _reflection.GeneratedProtocolMessageType('DeleteTableRequest', (_message.Message,), dict(
DESCRIPTOR = _DELETETABLEREQUEST,
__module__ = 'google.bigtable.admin.table.v1.bigtable_table_service_messages_pb2'
# @@protoc_insertion_point(class_scope:google.bigtable.admin.table.v1.DeleteTableRequest)
))
_sym_db.RegisterMessage(DeleteTableRequest)
RenameTableRequest = _reflection.GeneratedProtocolMessageType('RenameTableRequest', (_message.Message,), dict(
DESCRIPTOR = _RENAMETABLEREQUEST,
__module__ = 'google.bigtable.admin.table.v1.bigtable_table_service_messages_pb2'
# @@protoc_insertion_point(class_scope:google.bigtable.admin.table.v1.RenameTableRequest)
))
_sym_db.RegisterMessage(RenameTableRequest)
CreateColumnFamilyRequest = _reflection.GeneratedProtocolMessageType('CreateColumnFamilyRequest', (_message.Message,), dict(
DESCRIPTOR = _CREATECOLUMNFAMILYREQUEST,
__module__ = 'google.bigtable.admin.table.v1.bigtable_table_service_messages_pb2'
# @@protoc_insertion_point(class_scope:google.bigtable.admin.table.v1.CreateColumnFamilyRequest)
))
_sym_db.RegisterMessage(CreateColumnFamilyRequest)
DeleteColumnFamilyRequest = _reflection.GeneratedProtocolMessageType('DeleteColumnFamilyRequest', (_message.Message,), dict(
DESCRIPTOR = _DELETECOLUMNFAMILYREQUEST,
__module__ = 'google.bigtable.admin.table.v1.bigtable_table_service_messages_pb2'
# @@protoc_insertion_point(class_scope:google.bigtable.admin.table.v1.DeleteColumnFamilyRequest)
))
_sym_db.RegisterMessage(DeleteColumnFamilyRequest)
DESCRIPTOR.has_options = True
DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), b'\n\"com.google.bigtable.admin.table.v1B!BigtableTableServiceMessagesProtoP\001')
import abc
from grpc.beta import implementations as beta_implementations
from grpc.early_adopter import implementations as early_adopter_implementations
from grpc.framework.alpha import utilities as alpha_utilities
from grpc.framework.common import cardinality
from grpc.framework.interfaces.face import utilities as face_utilities
# @@protoc_insertion_point(module_scope)
| 39.224747 | 1,208 | 0.771905 |
7dafc11fd8fb86ab44db99cb63fe8f3a5c118843 | 277 | py | Python | influencer-detection/src/api/influencers/api/v1.py | luisblazquezm/influencer-detection | bd8aec83cbd8e5fbb3231824b5e274c47f491501 | [
"Apache-2.0"
] | 4 | 2021-05-22T16:33:41.000Z | 2021-11-22T23:44:40.000Z | influencer-detection/src/api/influencers/api/v1.py | Alburrito/influencer-detection | bd8aec83cbd8e5fbb3231824b5e274c47f491501 | [
"Apache-2.0"
] | null | null | null | influencer-detection/src/api/influencers/api/v1.py | Alburrito/influencer-detection | bd8aec83cbd8e5fbb3231824b5e274c47f491501 | [
"Apache-2.0"
] | 2 | 2021-05-21T16:34:14.000Z | 2021-09-29T12:59:49.000Z | #!flask/bin/python
# Copyright 2021 Luis Blazquez Miambres (@luisblazquezm)
# See LICENSE for details.
from flask_restx import Api
api = Api(version='1.0',
title='Influencer Detection Project',
description="**PORBI Influencer Detection project's Flask RESTX API**") | 27.7 | 75 | 0.747292 |
7db08f8c76f09b843e5f713087e6a5fa445f6755 | 7,543 | py | Python | core/models/sparse_bp_cnn.py | JeremieMelo/L2ight | 67f93b66ddf8bb5a365834b84ed6acdbc4f48eaf | [
"MIT"
] | 7 | 2021-11-02T16:21:47.000Z | 2022-03-09T06:01:25.000Z | core/models/sparse_bp_cnn.py | JeremieMelo/L2ight | 67f93b66ddf8bb5a365834b84ed6acdbc4f48eaf | [
"MIT"
] | null | null | null | core/models/sparse_bp_cnn.py | JeremieMelo/L2ight | 67f93b66ddf8bb5a365834b84ed6acdbc4f48eaf | [
"MIT"
] | null | null | null | '''
Description:
Author: Jiaqi Gu (jqgu@utexas.edu)
Date: 2021-10-24 16:23:50
LastEditors: Jiaqi Gu (jqgu@utexas.edu)
LastEditTime: 2021-10-24 16:23:50
'''
from collections import OrderedDict
from typing import Callable, Dict, List, Optional, Tuple, Union
import numpy as np
import torch
import torch.nn.functional as F
from pyutils.general import logger
from torch import Tensor, nn
from torch.types import Device, _size
from .layers.activation import ReLUN
from .layers.custom_conv2d import MZIBlockConv2d
from .layers.custom_linear import MZIBlockLinear
from .sparse_bp_base import SparseBP_Base
__all__ = ["SparseBP_MZI_CNN"]
| 31.298755 | 113 | 0.56158 |
7db09573c7b7fdf192db6f472bcb81e9222dc388 | 9,017 | py | Python | lite/demo/python/mobilenetv1_full_api.py | 714627034/Paddle-Lite | 015ba88a4d639db0b73603e37f83e47be041a4eb | [
"Apache-2.0"
] | 3 | 2021-06-17T11:00:13.000Z | 2021-08-10T10:28:59.000Z | lite/demo/python/mobilenetv1_full_api.py | 714627034/Paddle-Lite | 015ba88a4d639db0b73603e37f83e47be041a4eb | [
"Apache-2.0"
] | null | null | null | lite/demo/python/mobilenetv1_full_api.py | 714627034/Paddle-Lite | 015ba88a4d639db0b73603e37f83e47be041a4eb | [
"Apache-2.0"
] | null | null | null | # Copyright (c) 2019 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
'''
Paddle-Lite full python api demo
'''
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import argparse
from paddlelite.lite import *
import numpy as np
import platform
# Command arguments
parser = argparse.ArgumentParser()
parser.add_argument(
"--model_dir", default="", type=str, help="Non-combined Model dir path")
parser.add_argument("--model_file", default="", type=str, help="Model file")
parser.add_argument(
"--param_file", default="", type=str, help="Combined model param file")
parser.add_argument(
"--input_shape",
default=[1, 3, 224, 224],
nargs='+',
type=int,
required=False,
help="Model input shape, eg: 1 3 224 224. Defalut: 1 3 224 224")
parser.add_argument(
"--backend",
default="",
type=str,
help="To use a particular backend for execution. Should be one of: arm|opencl|x86|x86_opencl|metal|nnadapter"
)
parser.add_argument(
"--image_path", default="", type=str, help="The path of test image file")
parser.add_argument(
"--label_path", default="", type=str, help="The path of label file")
parser.add_argument(
"--print_results",
type=bool,
default=False,
help="Print results. Default: False")
parser.add_argument(
"--nnadapter_device_names",
default="",
type=str,
help="Set nnadapter device names")
parser.add_argument(
"--nnadapter_context_properties",
default="",
type=str,
help="Set nnadapter context properties")
parser.add_argument(
"--nnadapter_model_cache_dir",
default="",
type=str,
help="Set nnadapter model cache dir")
parser.add_argument(
"--nnadapter_subgraph_partition_config_path",
default="",
type=str,
help="Set nnadapter subgraph partition config path")
parser.add_argument(
"--nnadapter_mixed_precision_quantization_config_path",
default="",
type=str,
help="Set nnadapter mixed precision quantization config path")
if __name__ == '__main__':
args = parser.parse_args()
RunModel(args)
| 38.046414 | 113 | 0.663303 |
7db0ea31fec60827935b4f0a8b82bcb2927b7b8f | 2,218 | py | Python | discord_ui/errors.py | brotherelric/discord-ui | 6f8cd994aff66fb51f385d23907b3a612546cbe9 | [
"MIT"
] | 26 | 2021-08-21T15:34:13.000Z | 2022-03-26T13:50:48.000Z | discord_ui/errors.py | brotherelric/discord-ui | 6f8cd994aff66fb51f385d23907b3a612546cbe9 | [
"MIT"
] | 12 | 2021-08-21T15:24:00.000Z | 2022-02-25T02:49:01.000Z | discord_ui/errors.py | brotherelric/discord-ui | 6f8cd994aff66fb51f385d23907b3a612546cbe9 | [
"MIT"
] | 10 | 2021-09-03T12:31:13.000Z | 2022-03-29T06:24:06.000Z | from discord.ext.commands import BadArgument | 69.3125 | 171 | 0.656898 |
7db12d202da616ad47115b53db18fc8f8d4df1a8 | 178 | py | Python | Python/bot_2.py | maurovasconcelos/Ola-Mundo | 526c6c271fbe916c4f9f22153828e4d8c726a544 | [
"MIT"
] | 1 | 2021-02-16T17:36:53.000Z | 2021-02-16T17:36:53.000Z | Python/bot_2.py | maurovasconcelos/Ola-Mundo | 526c6c271fbe916c4f9f22153828e4d8c726a544 | [
"MIT"
] | null | null | null | Python/bot_2.py | maurovasconcelos/Ola-Mundo | 526c6c271fbe916c4f9f22153828e4d8c726a544 | [
"MIT"
] | null | null | null | from selenium import webdriver
navegador = webdriver.Chrome()
navegador.get("https://webstatic-sea.mihoyo.com/ys/event/signin-sea/index.html?act_id=e202102251931481&lang=pt-pt") | 44.5 | 115 | 0.808989 |
7db1db7e3a4320d312306a5a9f6265803704a70b | 18,177 | py | Python | vendor/istio.io/api/python/istio_api/envoy/config/filter/http/jwt_auth/v2alpha1/config_pb2.py | PinZhang/istio | dce455456d77ca5af34ba5848f9704577349c6bd | [
"Apache-2.0"
] | 794 | 2018-07-12T06:08:10.000Z | 2019-05-12T20:06:00.000Z | vendor/istio.io/api/python/istio_api/envoy/config/filter/http/jwt_auth/v2alpha1/config_pb2.py | PinZhang/istio | dce455456d77ca5af34ba5848f9704577349c6bd | [
"Apache-2.0"
] | 25 | 2018-07-20T08:53:42.000Z | 2019-05-05T06:23:11.000Z | vendor/istio.io/api/python/istio_api/envoy/config/filter/http/jwt_auth/v2alpha1/config_pb2.py | PinZhang/istio | dce455456d77ca5af34ba5848f9704577349c6bd | [
"Apache-2.0"
] | 116 | 2018-07-12T15:24:17.000Z | 2019-05-10T10:11:32.000Z | # Generated by the protocol buffer compiler. DO NOT EDIT!
# source: envoy/config/filter/http/jwt_auth/v2alpha1/config.proto
import sys
_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
from google.protobuf import descriptor_pb2
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
from google.protobuf import duration_pb2 as google_dot_protobuf_dot_duration__pb2
DESCRIPTOR = _descriptor.FileDescriptor(
name='envoy/config/filter/http/jwt_auth/v2alpha1/config.proto',
package='istio.envoy.config.filter.http.jwt_auth.v2alpha1',
syntax='proto3',
serialized_pb=_b('\n7envoy/config/filter/http/jwt_auth/v2alpha1/config.proto\x12\x30istio.envoy.config.filter.http.jwt_auth.v2alpha1\x1a\x1egoogle/protobuf/duration.proto\"k\n\x07HttpUri\x12\x0b\n\x03uri\x18\x01 \x01(\t\x12\x11\n\x07\x63luster\x18\x02 \x01(\tH\x00\x12*\n\x07timeout\x18\x03 \x01(\x0b\x32\x19.google.protobuf.DurationB\x14\n\x12http_upstream_type\"^\n\nDataSource\x12\x12\n\x08\x66ilename\x18\x01 \x01(\tH\x00\x12\x16\n\x0cinline_bytes\x18\x02 \x01(\x0cH\x00\x12\x17\n\rinline_string\x18\x03 \x01(\tH\x00\x42\x0b\n\tspecifier\"\x87\x03\n\x07JwtRule\x12\x0e\n\x06issuer\x18\x01 \x01(\t\x12\x11\n\taudiences\x18\x02 \x03(\t\x12S\n\x0bremote_jwks\x18\x03 \x01(\x0b\x32<.istio.envoy.config.filter.http.jwt_auth.v2alpha1.RemoteJwksH\x00\x12R\n\nlocal_jwks\x18\x04 \x01(\x0b\x32<.istio.envoy.config.filter.http.jwt_auth.v2alpha1.DataSourceH\x00\x12\x0f\n\x07\x66orward\x18\x05 \x01(\x08\x12Q\n\x0c\x66rom_headers\x18\x06 \x03(\x0b\x32;.istio.envoy.config.filter.http.jwt_auth.v2alpha1.JwtHeader\x12\x13\n\x0b\x66rom_params\x18\x07 \x03(\t\x12\x1e\n\x16\x66orward_payload_header\x18\x08 \x01(\tB\x17\n\x15jwks_source_specifier\"\x8c\x01\n\nRemoteJwks\x12K\n\x08http_uri\x18\x01 \x01(\x0b\x32\x39.istio.envoy.config.filter.http.jwt_auth.v2alpha1.HttpUri\x12\x31\n\x0e\x63\x61\x63he_duration\x18\x02 \x01(\x0b\x32\x19.google.protobuf.Duration\"/\n\tJwtHeader\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x14\n\x0cvalue_prefix\x18\x02 \x01(\t\"~\n\x11JwtAuthentication\x12H\n\x05rules\x18\x01 \x03(\x0b\x32\x39.istio.envoy.config.filter.http.jwt_auth.v2alpha1.JwtRule\x12\x1f\n\x17\x61llow_missing_or_failed\x18\x02 \x01(\x08\x42\x39Z7istio.io/api/envoy/config/filter/http/jwt_auth/v2alpha1b\x06proto3')
,
dependencies=[google_dot_protobuf_dot_duration__pb2.DESCRIPTOR,])
_HTTPURI = _descriptor.Descriptor(
name='HttpUri',
full_name='istio.envoy.config.filter.http.jwt_auth.v2alpha1.HttpUri',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='uri', full_name='istio.envoy.config.filter.http.jwt_auth.v2alpha1.HttpUri.uri', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='cluster', full_name='istio.envoy.config.filter.http.jwt_auth.v2alpha1.HttpUri.cluster', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='timeout', full_name='istio.envoy.config.filter.http.jwt_auth.v2alpha1.HttpUri.timeout', index=2,
number=3, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
_descriptor.OneofDescriptor(
name='http_upstream_type', full_name='istio.envoy.config.filter.http.jwt_auth.v2alpha1.HttpUri.http_upstream_type',
index=0, containing_type=None, fields=[]),
],
serialized_start=141,
serialized_end=248,
)
_DATASOURCE = _descriptor.Descriptor(
name='DataSource',
full_name='istio.envoy.config.filter.http.jwt_auth.v2alpha1.DataSource',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='filename', full_name='istio.envoy.config.filter.http.jwt_auth.v2alpha1.DataSource.filename', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='inline_bytes', full_name='istio.envoy.config.filter.http.jwt_auth.v2alpha1.DataSource.inline_bytes', index=1,
number=2, type=12, cpp_type=9, label=1,
has_default_value=False, default_value=_b(""),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='inline_string', full_name='istio.envoy.config.filter.http.jwt_auth.v2alpha1.DataSource.inline_string', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
_descriptor.OneofDescriptor(
name='specifier', full_name='istio.envoy.config.filter.http.jwt_auth.v2alpha1.DataSource.specifier',
index=0, containing_type=None, fields=[]),
],
serialized_start=250,
serialized_end=344,
)
_JWTRULE = _descriptor.Descriptor(
name='JwtRule',
full_name='istio.envoy.config.filter.http.jwt_auth.v2alpha1.JwtRule',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='issuer', full_name='istio.envoy.config.filter.http.jwt_auth.v2alpha1.JwtRule.issuer', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='audiences', full_name='istio.envoy.config.filter.http.jwt_auth.v2alpha1.JwtRule.audiences', index=1,
number=2, type=9, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='remote_jwks', full_name='istio.envoy.config.filter.http.jwt_auth.v2alpha1.JwtRule.remote_jwks', index=2,
number=3, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='local_jwks', full_name='istio.envoy.config.filter.http.jwt_auth.v2alpha1.JwtRule.local_jwks', index=3,
number=4, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='forward', full_name='istio.envoy.config.filter.http.jwt_auth.v2alpha1.JwtRule.forward', index=4,
number=5, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='from_headers', full_name='istio.envoy.config.filter.http.jwt_auth.v2alpha1.JwtRule.from_headers', index=5,
number=6, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='from_params', full_name='istio.envoy.config.filter.http.jwt_auth.v2alpha1.JwtRule.from_params', index=6,
number=7, type=9, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='forward_payload_header', full_name='istio.envoy.config.filter.http.jwt_auth.v2alpha1.JwtRule.forward_payload_header', index=7,
number=8, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
_descriptor.OneofDescriptor(
name='jwks_source_specifier', full_name='istio.envoy.config.filter.http.jwt_auth.v2alpha1.JwtRule.jwks_source_specifier',
index=0, containing_type=None, fields=[]),
],
serialized_start=347,
serialized_end=738,
)
_REMOTEJWKS = _descriptor.Descriptor(
name='RemoteJwks',
full_name='istio.envoy.config.filter.http.jwt_auth.v2alpha1.RemoteJwks',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='http_uri', full_name='istio.envoy.config.filter.http.jwt_auth.v2alpha1.RemoteJwks.http_uri', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='cache_duration', full_name='istio.envoy.config.filter.http.jwt_auth.v2alpha1.RemoteJwks.cache_duration', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=741,
serialized_end=881,
)
_JWTHEADER = _descriptor.Descriptor(
name='JwtHeader',
full_name='istio.envoy.config.filter.http.jwt_auth.v2alpha1.JwtHeader',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='name', full_name='istio.envoy.config.filter.http.jwt_auth.v2alpha1.JwtHeader.name', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='value_prefix', full_name='istio.envoy.config.filter.http.jwt_auth.v2alpha1.JwtHeader.value_prefix', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=883,
serialized_end=930,
)
_JWTAUTHENTICATION = _descriptor.Descriptor(
name='JwtAuthentication',
full_name='istio.envoy.config.filter.http.jwt_auth.v2alpha1.JwtAuthentication',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='rules', full_name='istio.envoy.config.filter.http.jwt_auth.v2alpha1.JwtAuthentication.rules', index=0,
number=1, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='allow_missing_or_failed', full_name='istio.envoy.config.filter.http.jwt_auth.v2alpha1.JwtAuthentication.allow_missing_or_failed', index=1,
number=2, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=932,
serialized_end=1058,
)
_HTTPURI.fields_by_name['timeout'].message_type = google_dot_protobuf_dot_duration__pb2._DURATION
_HTTPURI.oneofs_by_name['http_upstream_type'].fields.append(
_HTTPURI.fields_by_name['cluster'])
_HTTPURI.fields_by_name['cluster'].containing_oneof = _HTTPURI.oneofs_by_name['http_upstream_type']
_DATASOURCE.oneofs_by_name['specifier'].fields.append(
_DATASOURCE.fields_by_name['filename'])
_DATASOURCE.fields_by_name['filename'].containing_oneof = _DATASOURCE.oneofs_by_name['specifier']
_DATASOURCE.oneofs_by_name['specifier'].fields.append(
_DATASOURCE.fields_by_name['inline_bytes'])
_DATASOURCE.fields_by_name['inline_bytes'].containing_oneof = _DATASOURCE.oneofs_by_name['specifier']
_DATASOURCE.oneofs_by_name['specifier'].fields.append(
_DATASOURCE.fields_by_name['inline_string'])
_DATASOURCE.fields_by_name['inline_string'].containing_oneof = _DATASOURCE.oneofs_by_name['specifier']
_JWTRULE.fields_by_name['remote_jwks'].message_type = _REMOTEJWKS
_JWTRULE.fields_by_name['local_jwks'].message_type = _DATASOURCE
_JWTRULE.fields_by_name['from_headers'].message_type = _JWTHEADER
_JWTRULE.oneofs_by_name['jwks_source_specifier'].fields.append(
_JWTRULE.fields_by_name['remote_jwks'])
_JWTRULE.fields_by_name['remote_jwks'].containing_oneof = _JWTRULE.oneofs_by_name['jwks_source_specifier']
_JWTRULE.oneofs_by_name['jwks_source_specifier'].fields.append(
_JWTRULE.fields_by_name['local_jwks'])
_JWTRULE.fields_by_name['local_jwks'].containing_oneof = _JWTRULE.oneofs_by_name['jwks_source_specifier']
_REMOTEJWKS.fields_by_name['http_uri'].message_type = _HTTPURI
_REMOTEJWKS.fields_by_name['cache_duration'].message_type = google_dot_protobuf_dot_duration__pb2._DURATION
_JWTAUTHENTICATION.fields_by_name['rules'].message_type = _JWTRULE
DESCRIPTOR.message_types_by_name['HttpUri'] = _HTTPURI
DESCRIPTOR.message_types_by_name['DataSource'] = _DATASOURCE
DESCRIPTOR.message_types_by_name['JwtRule'] = _JWTRULE
DESCRIPTOR.message_types_by_name['RemoteJwks'] = _REMOTEJWKS
DESCRIPTOR.message_types_by_name['JwtHeader'] = _JWTHEADER
DESCRIPTOR.message_types_by_name['JwtAuthentication'] = _JWTAUTHENTICATION
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
HttpUri = _reflection.GeneratedProtocolMessageType('HttpUri', (_message.Message,), dict(
DESCRIPTOR = _HTTPURI,
__module__ = 'envoy.config.filter.http.jwt_auth.v2alpha1.config_pb2'
# @@protoc_insertion_point(class_scope:istio.envoy.config.filter.http.jwt_auth.v2alpha1.HttpUri)
))
_sym_db.RegisterMessage(HttpUri)
DataSource = _reflection.GeneratedProtocolMessageType('DataSource', (_message.Message,), dict(
DESCRIPTOR = _DATASOURCE,
__module__ = 'envoy.config.filter.http.jwt_auth.v2alpha1.config_pb2'
# @@protoc_insertion_point(class_scope:istio.envoy.config.filter.http.jwt_auth.v2alpha1.DataSource)
))
_sym_db.RegisterMessage(DataSource)
JwtRule = _reflection.GeneratedProtocolMessageType('JwtRule', (_message.Message,), dict(
DESCRIPTOR = _JWTRULE,
__module__ = 'envoy.config.filter.http.jwt_auth.v2alpha1.config_pb2'
# @@protoc_insertion_point(class_scope:istio.envoy.config.filter.http.jwt_auth.v2alpha1.JwtRule)
))
_sym_db.RegisterMessage(JwtRule)
RemoteJwks = _reflection.GeneratedProtocolMessageType('RemoteJwks', (_message.Message,), dict(
DESCRIPTOR = _REMOTEJWKS,
__module__ = 'envoy.config.filter.http.jwt_auth.v2alpha1.config_pb2'
# @@protoc_insertion_point(class_scope:istio.envoy.config.filter.http.jwt_auth.v2alpha1.RemoteJwks)
))
_sym_db.RegisterMessage(RemoteJwks)
JwtHeader = _reflection.GeneratedProtocolMessageType('JwtHeader', (_message.Message,), dict(
DESCRIPTOR = _JWTHEADER,
__module__ = 'envoy.config.filter.http.jwt_auth.v2alpha1.config_pb2'
# @@protoc_insertion_point(class_scope:istio.envoy.config.filter.http.jwt_auth.v2alpha1.JwtHeader)
))
_sym_db.RegisterMessage(JwtHeader)
JwtAuthentication = _reflection.GeneratedProtocolMessageType('JwtAuthentication', (_message.Message,), dict(
DESCRIPTOR = _JWTAUTHENTICATION,
__module__ = 'envoy.config.filter.http.jwt_auth.v2alpha1.config_pb2'
# @@protoc_insertion_point(class_scope:istio.envoy.config.filter.http.jwt_auth.v2alpha1.JwtAuthentication)
))
_sym_db.RegisterMessage(JwtAuthentication)
DESCRIPTOR.has_options = True
DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), _b('Z7istio.io/api/envoy/config/filter/http/jwt_auth/v2alpha1'))
# @@protoc_insertion_point(module_scope)
| 45.329177 | 1,704 | 0.760301 |
7db2d15a3db81041f88feba1273d33752a9d0183 | 1,730 | py | Python | filestream.py | ziyua/filestream | b79e9dc550d39c6bd5685eb0311f11d3a63537d9 | [
"Apache-2.0"
] | null | null | null | filestream.py | ziyua/filestream | b79e9dc550d39c6bd5685eb0311f11d3a63537d9 | [
"Apache-2.0"
] | null | null | null | filestream.py | ziyua/filestream | b79e9dc550d39c6bd5685eb0311f11d3a63537d9 | [
"Apache-2.0"
] | null | null | null | #!/usr/bin/python
# -*- coding: gb2312 -*-
import fileinput
import os
if __name__ == '__main__':
fs = FileStream('1.txt', 1024)
print fs.cuttimes()
print fs.lastsize()
while 1:
fby = fs.getstream()
if fby is not None:
print '--------'
print fby, len(fby)
else:
break
| 25.441176 | 54 | 0.514451 |
7db33bbd439b5c7268b5e21a4ea3bb5bcb4b092b | 3,148 | py | Python | libvirt_vm_optimizer/util/arg_parser.py | atiratree/libvirt-vm-optimizer | a022391ea86e3609e3b9c01fc2b84279939a26ab | [
"MIT"
] | 1 | 2019-01-16T18:59:59.000Z | 2019-01-16T18:59:59.000Z | libvirt_vm_optimizer/util/arg_parser.py | suomiy/libvirt-vm-optimizer | a022391ea86e3609e3b9c01fc2b84279939a26ab | [
"MIT"
] | null | null | null | libvirt_vm_optimizer/util/arg_parser.py | suomiy/libvirt-vm-optimizer | a022391ea86e3609e3b9c01fc2b84279939a26ab | [
"MIT"
] | 1 | 2021-05-04T00:06:12.000Z | 2021-05-04T00:06:12.000Z | import argparse
from argparse import ArgumentError
from libvirt_vm_optimizer.util.utils import Profile
| 39.848101 | 126 | 0.542567 |
7db3b96495442c5054ba6d121c6a02f0d28e7612 | 8,323 | py | Python | lib_bgp_data/collectors/mrt/mrt_base/mrt_file.py | jfuruness/lib_bgp_data | 25f7d57b9e2101c7aefb325e8d728bd91f47d557 | [
"BSD-3-Clause"
] | 16 | 2018-09-24T05:10:03.000Z | 2021-11-29T19:18:59.000Z | lib_bgp_data/collectors/mrt/mrt_base/mrt_file.py | jfuruness/lib_bgp_data | 25f7d57b9e2101c7aefb325e8d728bd91f47d557 | [
"BSD-3-Clause"
] | 4 | 2019-10-09T18:54:17.000Z | 2021-03-05T14:02:50.000Z | lib_bgp_data/collectors/mrt/mrt_base/mrt_file.py | jfuruness/lib_bgp_data | 25f7d57b9e2101c7aefb325e8d728bd91f47d557 | [
"BSD-3-Clause"
] | 3 | 2018-09-17T17:35:18.000Z | 2020-03-24T16:03:31.000Z | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""This module contains class MRT_File.
The MRT_File class contains the functionality to load and parse
mrt files. This is done through a series of steps, detailed in README.
"""
__authors__ = ["Justin Furuness", "Matt Jaccino"]
__credits__ = ["Justin Furuness", "Matt Jaccino", "Cameron Morris"]
__Lisence__ = "BSD"
__maintainer__ = "Justin Furuness"
__email__ = "jfuruness@gmail.com"
__status__ = "Production"
import os
import logging
from .tables import MRT_Announcements_Table
from ....utils import utils
from ....utils.base_classes import File
| 42.464286 | 78 | 0.622252 |
7db66263d9d342b5a826306669c5a5214abeb3e7 | 1,611 | py | Python | demo.py | mhy12345/rcaudio | 90fcc3c2d2586905c7f35ea5c2ac6b2c2cf70029 | [
"MIT"
] | 31 | 2018-09-27T03:35:06.000Z | 2022-01-11T09:49:26.000Z | demo.py | yeashen/rcaudio | 90fcc3c2d2586905c7f35ea5c2ac6b2c2cf70029 | [
"MIT"
] | 3 | 2018-11-20T07:49:24.000Z | 2021-01-06T11:48:41.000Z | demo.py | yeashen/rcaudio | 90fcc3c2d2586905c7f35ea5c2ac6b2c2cf70029 | [
"MIT"
] | 7 | 2019-04-23T06:32:23.000Z | 2020-09-25T14:18:32.000Z | from rcaudio import *
import time
import logging
logging.basicConfig(level=logging.INFO,
format='%(asctime)s %(filename)s[line:%(lineno)d] %(levelname)s %(message)s')
demo2()
| 22.690141 | 85 | 0.556797 |
7db6acccc13d73c452c9d80805e389c51f138158 | 346 | py | Python | Backend/linux.py | TheInvincibleLearner/simranquirky.github.io | 21a2524b321493b9ff82eb8b4fcc10af8f8face7 | [
"MIT"
] | null | null | null | Backend/linux.py | TheInvincibleLearner/simranquirky.github.io | 21a2524b321493b9ff82eb8b4fcc10af8f8face7 | [
"MIT"
] | 10 | 2021-09-29T13:25:21.000Z | 2021-10-05T13:51:36.000Z | Backend/linux.py | TheInvincibleLearner/simranquirky.github.io | 21a2524b321493b9ff82eb8b4fcc10af8f8face7 | [
"MIT"
] | 7 | 2021-09-22T13:26:35.000Z | 2021-10-05T03:07:43.000Z | #!/usr/bin/python3
print("content-type: text/html")
print()
import subprocess as sp
import cgi
fs = cgi.FieldStorage()
cmd = fs.getvalue("command")
output = sp.getoutput("sudo "+cmd)
print("<body style='padding: 40px;'>")
print('<h1 style="color:#df405a;" >Output</h1>')
print("<pre>{}</pre>".format(output))
print("</body>")
| 20.352941 | 49 | 0.635838 |
7db6de9a9058b5930c41c0b6f46d74899e0a554e | 1,334 | py | Python | tests/test_swagger_registry.py | niall-byrne/flask-restful-swagger | 4ce4284627f27e1e8d58ff922abbefe9e7fd8c21 | [
"MIT"
] | 667 | 2015-01-05T02:11:09.000Z | 2022-03-30T02:01:04.000Z | tests/test_swagger_registry.py | Deepstatsanalysis/flask-restful-swagger | 4ce4284627f27e1e8d58ff922abbefe9e7fd8c21 | [
"MIT"
] | 83 | 2015-01-05T19:39:23.000Z | 2021-11-22T16:39:52.000Z | tests/test_swagger_registry.py | Deepstatsanalysis/flask-restful-swagger | 4ce4284627f27e1e8d58ff922abbefe9e7fd8c21 | [
"MIT"
] | 184 | 2015-01-05T19:20:23.000Z | 2022-03-21T10:32:34.000Z | from flask import Flask
from flask_restful_swagger.swagger import SwaggerRegistry
try:
from unittest.mock import patch
except ImportError:
from mock import patch
| 26.68 | 67 | 0.676912 |
7db77614b73b30faa1f8658a19a8d335313caf9b | 1,921 | py | Python | gcp_census/bigquery/bigquery_handler.py | ocadotechnology/gcp-census | 6ce7c55a798efd83c07b9677081d26bb6113e2ed | [
"Apache-2.0"
] | 40 | 2017-05-18T12:39:11.000Z | 2021-12-02T11:24:18.000Z | gcp_census/bigquery/bigquery_handler.py | ocadotechnology/gcp-census | 6ce7c55a798efd83c07b9677081d26bb6113e2ed | [
"Apache-2.0"
] | 35 | 2017-05-18T12:41:36.000Z | 2019-11-15T10:06:19.000Z | gcp_census/bigquery/bigquery_handler.py | ocadotechnology/gcp-census | 6ce7c55a798efd83c07b9677081d26bb6113e2ed | [
"Apache-2.0"
] | 7 | 2018-01-18T12:39:51.000Z | 2018-08-16T09:17:00.000Z | import logging
import webapp2
from googleapiclient.errors import HttpError
from gcp_census.bigquery.bigquery_client import BigQuery
from gcp_census.bigquery.bigquery_task import BigQueryTask
| 36.942308 | 80 | 0.660073 |
7db8db74363fb05b1c46621fca683280e13e4190 | 67 | py | Python | Solutions/Python/Posix command(7 kyu).py | collenirwin/Codewars-Solutions | 14bad3878d3fc37c7e73cbaaaa24cd28f759ce3b | [
"MIT"
] | null | null | null | Solutions/Python/Posix command(7 kyu).py | collenirwin/Codewars-Solutions | 14bad3878d3fc37c7e73cbaaaa24cd28f759ce3b | [
"MIT"
] | null | null | null | Solutions/Python/Posix command(7 kyu).py | collenirwin/Codewars-Solutions | 14bad3878d3fc37c7e73cbaaaa24cd28f759ce3b | [
"MIT"
] | null | null | null | from os import popen | 16.75 | 26 | 0.701493 |
7db9356e6b7de8c8a7ac0cabb607897d76784d53 | 3,056 | py | Python | resources/src/gcp_iam_service_account.py | kfirz/deployster | b95fdb9cf150eee765f7ef3dbdee3666119e76f9 | [
"Apache-2.0"
] | null | null | null | resources/src/gcp_iam_service_account.py | kfirz/deployster | b95fdb9cf150eee765f7ef3dbdee3666119e76f9 | [
"Apache-2.0"
] | 19 | 2017-12-28T19:39:37.000Z | 2018-04-18T23:24:45.000Z | resources/src/gcp_iam_service_account.py | kfirz/deployster | b95fdb9cf150eee765f7ef3dbdee3666119e76f9 | [
"Apache-2.0"
] | 1 | 2018-04-06T16:50:49.000Z | 2018-04-06T16:50:49.000Z | #!/usr/bin/env python3.6
import argparse
import json
import sys
from typing import Sequence, MutableSequence
from dresources import DAction, action
from external_services import ExternalServices
from gcp import GcpResource
if __name__ == "__main__":
main()
| 39.688312 | 108 | 0.620419 |
7dbac9eb3255daf0f5135503edb3305af6da290c | 806 | py | Python | tests/consumtodb_test.py | thomas-for-aiven/monitor | bd712fed77c3b3cea3e1dd0f99318043ff3dc166 | [
"MIT"
] | null | null | null | tests/consumtodb_test.py | thomas-for-aiven/monitor | bd712fed77c3b3cea3e1dd0f99318043ff3dc166 | [
"MIT"
] | null | null | null | tests/consumtodb_test.py | thomas-for-aiven/monitor | bd712fed77c3b3cea3e1dd0f99318043ff3dc166 | [
"MIT"
] | null | null | null | #!/usr/bin/python3
import pytest
import monitor.monitorshared as m
import monitor.consumtodb as con
def test_db_connection(tmpdir):
"test postgres connection"
conf = m.Configuration('configx.ini', "test")
# in case the field is empty
if conf.db_host == '':
pytest.skip("no broker configured in config.ini")
db_handle = con.connect_db(conf)
# function will fail if cannot connect
assert db_handle
| 23.028571 | 57 | 0.691067 |
7dbc7331779b26c50f838cb805bfffb5e23cfa30 | 542 | py | Python | pytorch3dunet/unet3d/config.py | VolkerH/pytorch-3dunet | 01ee7d53ef1c8edb2bd45d76faf7df447144fb67 | [
"MIT"
] | null | null | null | pytorch3dunet/unet3d/config.py | VolkerH/pytorch-3dunet | 01ee7d53ef1c8edb2bd45d76faf7df447144fb67 | [
"MIT"
] | null | null | null | pytorch3dunet/unet3d/config.py | VolkerH/pytorch-3dunet | 01ee7d53ef1c8edb2bd45d76faf7df447144fb67 | [
"MIT"
] | null | null | null | import argparse
import torch
import yaml
| 27.1 | 97 | 0.714022 |
7dbdd82b44af43747c667e7503af84473c437bc0 | 4,630 | py | Python | Code/chatbot.py | pavithra-b-reddy/Chatbot-CS310 | 0cae72c974272d00ee5db3c980f48c0dbfa16e2b | [
"MIT"
] | null | null | null | Code/chatbot.py | pavithra-b-reddy/Chatbot-CS310 | 0cae72c974272d00ee5db3c980f48c0dbfa16e2b | [
"MIT"
] | null | null | null | Code/chatbot.py | pavithra-b-reddy/Chatbot-CS310 | 0cae72c974272d00ee5db3c980f48c0dbfa16e2b | [
"MIT"
] | null | null | null | # This codes are referenced from the Github repo (https://github.com/parulnith/Building-a-Simple-Chatbot-in-Python-using-NLTK/blob/master/chatbot.py)
# Loading the required packages
import nltk
import random
import string
import warnings
from sklearn.feature_extraction.text import TfidfVectorizer
from sklearn.metrics.pairwise import cosine_similarity
from textblob import TextBlob
# Setup
warnings.filterwarnings('ignore') # Ignore warning messages
f = open('corpus_linguistics.txt', 'r') # opening the corpus
text = f.read() # reading the corpus
# Convert all text from corpus to lower case
text = text.lower()
# Perform tokenization
sent_tokens = nltk.sent_tokenize(text)
word_tokens = nltk.word_tokenize(text)
# Initialize set of greetings and responses
user_greetings = ["hi", "hello", "good morning", "hey", "what's up"]
bot_greetings = ["Hello, how may I be of assistance?"]
user_gratitude = ["thank you", "thanks", "that was helpful"]
bot_gratitude = ["You're welcome! Is there anything else you need?",
"Happy to help! Are there other questions that I could help "
"with?"]
bot_exit_text = ["Thank you for using my services. Have a great day!",
"Hope I was helpful. See you later :)", "Bye!"]
languages = {"en": "English", "fr": "French", "es": "Spanish",
"la": "Latin"}
# Text Preprocessing
lemmatizer = nltk.stem.WordNetLemmatizer() # Text Lemmatization
# Function to perform lemmatization
remove_punct_dict = dict((ord(punct), None) for punct in string.punctuation)
# Function to perform normalization
# Generating response
# Perform sentiment analysis
# Language detection
# Interact with chatbot framework based on input from user
| 35.615385 | 149 | 0.649028 |
7dbe53a8adce10ce3864cc27ec6021037f99abf0 | 2,714 | py | Python | relo/core/log.py | cwoebker/relo | db11dea794b4b241578f8de4f11d9dbbbbabf894 | [
"BSD-3-Clause"
] | null | null | null | relo/core/log.py | cwoebker/relo | db11dea794b4b241578f8de4f11d9dbbbbabf894 | [
"BSD-3-Clause"
] | null | null | null | relo/core/log.py | cwoebker/relo | db11dea794b4b241578f8de4f11d9dbbbbabf894 | [
"BSD-3-Clause"
] | null | null | null | #!/usr/bin/env python
# encoding: utf-8
import sys
LEVEL = {
'NORMAL': 0,
'INFO': 1,
'DEBUG': 2,
'CRITICAl': 0,
'ERROR': 0,
'EXCEPTION': 0,
}
logger = Logger() | 26.096154 | 71 | 0.562638 |
7dbeb142bc5611ae233fb17f68720f678cc9d5f9 | 2,031 | py | Python | client/src/proto3/socket_server.py | andrhahn/pi-spy | 04013565c83eb20db85688c0abb23d6f83d3fbaa | [
"MIT"
] | 1 | 2020-08-17T18:32:06.000Z | 2020-08-17T18:32:06.000Z | client/src/proto3/socket_server.py | andrhahn/pi-spy | 04013565c83eb20db85688c0abb23d6f83d3fbaa | [
"MIT"
] | null | null | null | client/src/proto3/socket_server.py | andrhahn/pi-spy | 04013565c83eb20db85688c0abb23d6f83d3fbaa | [
"MIT"
] | null | null | null | import SocketServer
import io
import logging
import struct
import threading
import PIL.Image
import pika
import config
logging.basicConfig(level=logging.INFO)
if __name__ == "__main__":
print 'Connecting to queue server'
queue_connection = pika.BlockingConnection(
pika.ConnectionParameters(host=config.get('queue_server_host'), port=int(config.get('queue_server_port'))))
socket_server_port = int(config.get('socket_server_port'))
print 'Starting socket server on port ', socket_server_port
socket_server = ThreadedTCPServer((config.get('socket_server_host'), socket_server_port), RequestHandler)
try:
socket_server.serve_forever()
except KeyboardInterrupt:
pass
print 'Closing queue connection'
queue_connection.close()
print 'Stopping socket server'
socket_server.shutdown()
socket_server.server_close()
| 24.46988 | 115 | 0.652388 |
7dbec2776e2389304afbc67d66e86856becf8f17 | 537 | py | Python | lib/showFaces.py | ZakDoesGaming/OregonTrail | 90cab35536ac5c6ba9e772ac5c29c914017c9c23 | [
"MIT"
] | 6 | 2018-05-07T04:04:58.000Z | 2021-05-15T17:44:16.000Z | lib/showFaces.py | ZakDoesGaming/OregonTrail | 90cab35536ac5c6ba9e772ac5c29c914017c9c23 | [
"MIT"
] | null | null | null | lib/showFaces.py | ZakDoesGaming/OregonTrail | 90cab35536ac5c6ba9e772ac5c29c914017c9c23 | [
"MIT"
] | 2 | 2017-05-27T17:06:23.000Z | 2020-08-26T17:57:10.000Z | from pygame import image | 35.8 | 91 | 0.690875 |
7dbf4c0c61fb56b588d550f32b9ba42ac0a71e93 | 3,506 | py | Python | Thirdparty/libpsd/build.py | stinvi/dava.engine | 2b396ca49cdf10cdc98ad8a9ffcf7768a05e285e | [
"BSD-3-Clause"
] | 26 | 2018-09-03T08:48:22.000Z | 2022-02-14T05:14:50.000Z | Thirdparty/libpsd/build.py | ANHELL-blitz/dava.engine | ed83624326f000866e29166c7f4cccfed1bb41d4 | [
"BSD-3-Clause"
] | null | null | null | Thirdparty/libpsd/build.py | ANHELL-blitz/dava.engine | ed83624326f000866e29166c7f4cccfed1bb41d4 | [
"BSD-3-Clause"
] | 45 | 2018-05-11T06:47:17.000Z | 2022-02-03T11:30:55.000Z | import os
import shutil
import build_utils
| 31.585586 | 104 | 0.72162 |
7dc01542f166fdf824058cb9a8b3de627c4cc58f | 53 | py | Python | molpal/__init__.py | mchaker/lab-molpal | f4db7ee2ca51515b4246604867a93a3aac08107d | [
"MIT"
] | 1 | 2022-03-27T10:17:25.000Z | 2022-03-27T10:17:25.000Z | molpal/__init__.py | mchaker/lab-molpal | f4db7ee2ca51515b4246604867a93a3aac08107d | [
"MIT"
] | 2 | 2022-03-27T20:08:23.000Z | 2022-03-28T11:47:11.000Z | molpal/__init__.py | mchaker/lab-molpal | f4db7ee2ca51515b4246604867a93a3aac08107d | [
"MIT"
] | 1 | 2022-03-27T20:20:01.000Z | 2022-03-27T20:20:01.000Z | from .explorer import Explorer
__version__ = "1.0.2" | 17.666667 | 30 | 0.754717 |
7dc06bfcfd35ab80fe1f5fe2ede0d1828b1336ca | 6,192 | py | Python | Data.py | praenubilus/lc-tool | 6af4c557d2780758a4d53cd70554e16b70809859 | [
"MIT"
] | null | null | null | Data.py | praenubilus/lc-tool | 6af4c557d2780758a4d53cd70554e16b70809859 | [
"MIT"
] | null | null | null | Data.py | praenubilus/lc-tool | 6af4c557d2780758a4d53cd70554e16b70809859 | [
"MIT"
] | null | null | null | import subprocess
import os.path
import json
import time
import urllib.parse
from typing import Any, Tuple
import config
from requests_html import HTMLSession
from markdownify import markdownify
| 34.786517 | 94 | 0.581234 |
7dc0ff0cd5dcfd9cb62fcfb00a3e84da41a487f5 | 1,407 | py | Python | examples/python/WeightedCentroidalVoronoi.py | mparno/sdot2d | f632824fc4f0285eab6de911cca8932f69ece705 | [
"BSD-3-Clause"
] | null | null | null | examples/python/WeightedCentroidalVoronoi.py | mparno/sdot2d | f632824fc4f0285eab6de911cca8932f69ece705 | [
"BSD-3-Clause"
] | null | null | null | examples/python/WeightedCentroidalVoronoi.py | mparno/sdot2d | f632824fc4f0285eab6de911cca8932f69ece705 | [
"BSD-3-Clause"
] | null | null | null | import pysdot as ot
import numpy as np
import matplotlib.pyplot as plt
numPts = 100
xbnds = [0.0,1.0] # minimum and maximum x values
ybnds = [0.0,1.0] # minimum and maximum y values
Ns = [50,50]
bbox = ot.BoundingBox(xbnds[0],xbnds[1],ybnds[0],ybnds[1])
grid = ot.RegularGrid(bbox, Ns[0], Ns[1])
dens = np.ones(Ns)
for i in range(Ns[0]):
for j in range(Ns[1]):
pt = grid.Center(i,j)
dens[i,j] = np.exp(-30.0*( (pt[0]-0.5)**2 + (pt[1]-0.5)**2))
dist = ot.DiscretizedDistribution(grid,dens)
# Construct the Centroidal Voronoi diagram. This function uses Lloyd's algorithm
# with latin hypercube samples as initial points (https://en.wikipedia.org/wiki/Lloyd%27s_algorithm)
# Arguments to BuildCentroidal are:
# - The bounding box
# - The number of seed points (same as number of cells) in the Voronoi diagram
# - The maximum number of allowed iterations in Lloyd's algorithm
# - A tolerance on the maximum distance between a cell centroid and seed point.
diag = ot.LaguerreDiagram.BuildCentroidal(bbox,numPts,1000,0.001,dist)
areas = diag.Areas(dist)
# Plot the resulting centroidal Voronoi diagram
fig, axs = plt.subplots(ncols=2,figsize=(14,6))
ot.PlotDiagram(diag, axs[0], distribution=dist, cell_colors=areas)
axs[0].set_title('Weighted CVD')
axs[1].imshow(dens.T,extent=[xbnds[0],xbnds[1],ybnds[0],ybnds[1]],origin='lower',alpha=0.8)
axs[1].set_title('Density')
plt.show()
| 33.5 | 100 | 0.713575 |
7dc1969b2d44d9ad370f7f09a3b9e9919cb4e854 | 589 | py | Python | Combinatorialifier.py | Theta291/Partial-Application-in-Python | db503fbf7a1c173c01fca86a858875e38c41997a | [
"MIT"
] | null | null | null | Combinatorialifier.py | Theta291/Partial-Application-in-Python | db503fbf7a1c173c01fca86a858875e38c41997a | [
"MIT"
] | null | null | null | Combinatorialifier.py | Theta291/Partial-Application-in-Python | db503fbf7a1c173c01fca86a858875e38c41997a | [
"MIT"
] | null | null | null | #Exercise: Try to make a function that accepts a function of only positional arguments and returns a function that takes the same number of positional arguments and, given they are all iterators, attempts every combination of one arguments from each iterator.
#Skills: Partial application, Iteration
papplycomboreverse = lambda fun, xiter : lambda *args : [fun(*args, x) for x in xiter]
| 45.307692 | 259 | 0.726655 |
7dc217926986aef9243e5b82602418597122bc4f | 6,780 | py | Python | api/api_funct.py | pjclock/haproxy-wi | 2ea59c892ae24d824d29dd0cee580c969f64cc87 | [
"Apache-2.0"
] | null | null | null | api/api_funct.py | pjclock/haproxy-wi | 2ea59c892ae24d824d29dd0cee580c969f64cc87 | [
"Apache-2.0"
] | null | null | null | api/api_funct.py | pjclock/haproxy-wi | 2ea59c892ae24d824d29dd0cee580c969f64cc87 | [
"Apache-2.0"
] | 1 | 2019-11-19T14:59:25.000Z | 2019-11-19T14:59:25.000Z | import os
import sys
os.chdir(os.path.dirname(__file__))
sys.path.append(os.path.dirname(__file__))
sys.path.append(os.path.join(sys.path[0], '/var/www/haproxy-wi/app/'))
from bottle import route, run, template, hook, response, request, post
import sql
import funct
| 22.450331 | 149 | 0.629499 |
7dc490740f712aa8ee9b1a1e793a10bb7cab5ed9 | 27,885 | py | Python | trove-11.0.0/trove/guestagent/datastore/experimental/vertica/service.py | scottwedge/OpenStack-Stein | 7077d1f602031dace92916f14e36b124f474de15 | [
"Apache-2.0"
] | 1 | 2020-04-08T07:42:19.000Z | 2020-04-08T07:42:19.000Z | trove/guestagent/datastore/experimental/vertica/service.py | ttcong/trove | 1db2dc63fdd5409eafccebe79ff2900d0535ed13 | [
"Apache-2.0"
] | 5 | 2019-08-14T06:46:03.000Z | 2021-12-13T20:01:25.000Z | trove/guestagent/datastore/experimental/vertica/service.py | ttcong/trove | 1db2dc63fdd5409eafccebe79ff2900d0535ed13 | [
"Apache-2.0"
] | 2 | 2020-03-15T01:24:15.000Z | 2020-07-22T20:34:26.000Z | # Copyright [2015] Hewlett-Packard Development Company, L.P.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import subprocess
import tempfile
from oslo_log import log as logging
from oslo_utils import netutils
from six.moves import configparser
from trove.common import cfg
from trove.common.db import models
from trove.common import exception
from trove.common.i18n import _
from trove.common import instance as rd_instance
from trove.common.stream_codecs import PropertiesCodec
from trove.common import utils
from trove.guestagent.common.configuration import ConfigurationManager
from trove.guestagent.common.configuration import ImportOverrideStrategy
from trove.guestagent.common import guestagent_utils
from trove.guestagent.common import operating_system
from trove.guestagent.common.operating_system import FileMode
from trove.guestagent.datastore.experimental.vertica import system
from trove.guestagent.datastore import service
from trove.guestagent import pkg
from trove.guestagent import volume
LOG = logging.getLogger(__name__)
CONF = cfg.CONF
packager = pkg.Package()
DB_NAME = 'db_srvr'
MOUNT_POINT = CONF.vertica.mount_point
# We will use a fake configuration file for the options managed through
# configuration groups that we apply directly with ALTER DB ... SET ...
FAKE_CFG = os.path.join(MOUNT_POINT, "vertica.cfg.fake")
| 45.048465 | 79 | 0.58146 |
7dc4cee1dbb027b9999c91c7ea99faa307db2e19 | 15,179 | py | Python | waymo_open_dataset/waymo_detection_dataset.py | abahnasy/IDP | c131a597ad72105f67f0ff8850f4eb8275a9800b | [
"MIT"
] | null | null | null | waymo_open_dataset/waymo_detection_dataset.py | abahnasy/IDP | c131a597ad72105f67f0ff8850f4eb8275a9800b | [
"MIT"
] | null | null | null | waymo_open_dataset/waymo_detection_dataset.py | abahnasy/IDP | c131a597ad72105f67f0ff8850f4eb8275a9800b | [
"MIT"
] | null | null | null | """ Waymo dataset with votes.
Author: Ahmed Bahnasy
Date: 2020
"""
import os
import sys
import numpy as np
import pickle
from torch.utils.data import Dataset
import scipy.io as sio # to load .mat files for depth points
BASE_DIR = os.path.dirname(os.path.abspath(__file__))
# ROOT_DIR = os.path.dirname(BASE_DIR)
sys.path.append(BASE_DIR)
sys.path.append(os.path.join(BASE_DIR, '..', 'utils'))
from box_util import get_corners_from_labels_array
import pc_util
import waymo_utils
from model_util_waymo import WaymoDatasetConfig
DC = WaymoDatasetConfig() # dataset specific config
MAX_NUM_OBJ = 128 # maximum number of objects allowed per scene
# RAW_LABELS = {0: 'TYPE_UNKNOWN', 1: 'TYPE_VEHICLE' , 2: 'TYPE_PEDESTRIAN', 3: 'TYPE_SIGN', 4: 'TYPE_CYCLIST'}
def viz_votes(pc, point_votes, point_votes_mask):
""" Visualize point votes and point votes mask labels
pc: (N,3 or 6), point_votes: (N,9), point_votes_mask: (N,)
"""
inds = (point_votes_mask==1)
pc_obj = pc[inds,0:3]
pc_obj_voted1 = pc_obj + point_votes[inds,0:3]
pc_obj_voted2 = pc_obj + point_votes[inds,3:6]
pc_obj_voted3 = pc_obj + point_votes[inds,6:9]
pc_util.write_ply(pc_obj, 'pc_obj.ply')
pc_util.write_ply(pc_obj_voted1, 'pc_obj_voted1.ply')
pc_util.write_ply(pc_obj_voted2, 'pc_obj_voted2.ply')
pc_util.write_ply(pc_obj_voted3, 'pc_obj_voted3.ply')
def viz_obb(pc, label, mask, angle_classes, angle_residuals,
size_classes, size_residuals):
""" Visualize oriented bounding box ground truth
pc: (N,3)
label: (K,3) K == MAX_NUM_OBJ
mask: (K,)
angle_classes: (K,)
angle_residuals: (K,)
size_classes: (K,)
size_residuals: (K,3)
"""
oriented_boxes = []
K = label.shape[0]
for i in range(K):
if mask[i] == 0: continue
obb = np.zeros(7)
obb[0:3] = label[i,0:3]
heading_angle = DC.class2angle(angle_classes[i], angle_residuals[i])
box_size = DC.class2size(size_classes[i], size_residuals[i])
obb[3:6] = box_size
obb[6] = -1 * heading_angle
print(obb)
oriented_boxes.append(obb)
pc_util.write_oriented_bbox(oriented_boxes, 'gt_obbs.ply')
pc_util.write_ply(label[mask==1,:], 'gt_centroids.ply')
def get_sem_cls_statistics():
""" Compute number of objects for each semantic class """
d = WaymoDetectionVotesDataset(use_height=True, augment=False)
sem_cls_cnt = {}
for i in range(len(d)):
if i%10==0: print(i)
sample = d[i]
pc = sample['point_clouds']
sem_cls = sample['sem_cls_label']
mask = sample['box_label_mask']
for j in sem_cls:
if mask[j] == 0: continue
if sem_cls[j] not in sem_cls_cnt:
sem_cls_cnt[sem_cls[j]] = 0
sem_cls_cnt[sem_cls[j]] += 1
print(sem_cls_cnt)
if __name__=='__main__':
d = WaymoDetectionVotesDataset(use_height=True, augment=False)
# for i in range(len(d)):
sample = d[0]
print(sample['vote_label'].shape, sample['vote_label_mask'].shape)
pc_util.write_ply(sample['point_clouds'], 'pc.ply')
viz_votes(sample['point_clouds'], sample['vote_label'], sample['vote_label_mask'])
viz_obb(sample['point_clouds'], sample['center_label'], sample['box_label_mask'],
sample['heading_class_label'], sample['heading_residual_label'],
sample['size_class_label'], sample['size_residual_label'])
| 46.277439 | 180 | 0.620792 |
7dc5dc988616aaca00dd30fca002242eb44adc92 | 2,792 | py | Python | smoketests/tests/test_dir_test.py | erlware-deprecated/sinan | 5172974e75f30bd7050b99ff2e6849501008ec44 | [
"MIT"
] | 7 | 2015-06-03T19:18:39.000Z | 2022-01-26T10:39:03.000Z | smoketests/tests/test_dir_test.py | ericbmerritt/sinan | 36d89008a332e1283fc9f7081165e91fb0547885 | [
"MIT"
] | 3 | 2020-06-25T04:15:54.000Z | 2020-06-25T04:16:13.000Z | smoketests/tests/test_dir_test.py | ericbmerritt/sinan | 36d89008a332e1283fc9f7081165e91fb0547885 | [
"MIT"
] | 3 | 2015-11-27T10:33:31.000Z | 2018-07-31T22:56:32.000Z | import unittest
import sin_testing as st
import pexpect
import os
if __name__ == '__main__':
unittest.main()
| 31.370787 | 79 | 0.523997 |
7dc85646e762b266d883108a8fd66e58db5c4d2f | 7,362 | py | Python | budgetportal/tests/test_management_commands.py | fluenty/datamanager | 97ba9d58d4527b7d61b730ea4896f09a56e6ae60 | [
"MIT"
] | null | null | null | budgetportal/tests/test_management_commands.py | fluenty/datamanager | 97ba9d58d4527b7d61b730ea4896f09a56e6ae60 | [
"MIT"
] | null | null | null | budgetportal/tests/test_management_commands.py | fluenty/datamanager | 97ba9d58d4527b7d61b730ea4896f09a56e6ae60 | [
"MIT"
] | null | null | null | from budgetportal.models import (
FinancialYear,
Sphere,
Government,
Department,
Programme,
)
from django.core.management import call_command
from django.test import TestCase
from tempfile import NamedTemporaryFile
from StringIO import StringIO
import yaml
| 40.450549 | 108 | 0.664493 |
7dc8bc4931a3fbdfc68cead350a1a5f0c0c77747 | 898 | py | Python | src/fedservice/utils.py | rohe/fedservice | 1460d21217b804cac0f38fa26ffa24bee7cf6dad | [
"Apache-2.0"
] | 3 | 2018-11-28T12:01:31.000Z | 2020-12-16T21:43:29.000Z | src/fedservice/utils.py | peppelinux/fedservice | 0dc5fd0bd33e181b6a1a9bbef6835b2ce5d2f568 | [
"Apache-2.0"
] | 13 | 2020-02-10T15:33:37.000Z | 2022-02-01T16:43:36.000Z | src/fedservice/utils.py | peppelinux/fedservice | 0dc5fd0bd33e181b6a1a9bbef6835b2ce5d2f568 | [
"Apache-2.0"
] | 4 | 2019-05-29T10:04:48.000Z | 2020-10-14T09:52:53.000Z | import json
import logging
import ssl
import sys
from oidcrp.exception import ResponseError
logger = logging.getLogger(__name__)
| 22.45 | 71 | 0.609131 |
7dc8ee9b4d42b514db819f378b41dbecf850d307 | 3,884 | py | Python | plugins/rd_bot.py | deg4uss3r/rd_bot | f82a929b59331d2ee67067369cd6ffe92a4fa7e6 | [
"MIT"
] | null | null | null | plugins/rd_bot.py | deg4uss3r/rd_bot | f82a929b59331d2ee67067369cd6ffe92a4fa7e6 | [
"MIT"
] | 3 | 2016-09-17T09:51:49.000Z | 2019-11-13T20:58:37.000Z | plugins/rd_bot.py | deg4uss3r/rd_bot | f82a929b59331d2ee67067369cd6ffe92a4fa7e6 | [
"MIT"
] | null | null | null | from __future__ import unicode_literals
import requests
import json
import os
import sys
outputs = []
| 28.77037 | 205 | 0.583162 |
7dcb26b78425a819dd55aa11592a32323ece117a | 6,855 | py | Python | swss.py | andycranston/swss | 22db6b2e2eb5711d4fd06bd7a094342ad6be8b62 | [
"MIT"
] | null | null | null | swss.py | andycranston/swss | 22db6b2e2eb5711d4fd06bd7a094342ad6be8b62 | [
"MIT"
] | null | null | null | swss.py | andycranston/swss | 22db6b2e2eb5711d4fd06bd7a094342ad6be8b62 | [
"MIT"
] | null | null | null | #! /usr/bin/python3
#
# @(!--#) @(#) swss.py, version 002, 27-july-2018
#
# open a series of home pages and take a screen shot of each one
#
################################################################################################
#
# imports
#
import sys
import os
import argparse
import glob
import shutil
import tempfile
import time
import datetime
from selenium import webdriver
from selenium.webdriver.common.keys import Keys
from selenium.common.exceptions import *
################################################################################################
#########################################################################
#########################################################################
#########################################################################
#########################################################################
#########################################################################
#########################################################################
################################################################################################
##########################################################################
progname = os.path.basename(sys.argv[0])
sys.exit(main())
# end of file
| 26.467181 | 156 | 0.484318 |
7dcb91ee413942a4aa1e8d201a6cf906a3130f7f | 1,041 | py | Python | api/migrations/versions/0be658f07ac6_state_consumed.py | eve-git/namex | 130f261500ce595b291d5428c32e1f6cc38ea505 | [
"Apache-2.0"
] | 4 | 2018-10-05T23:41:05.000Z | 2019-06-19T16:17:50.000Z | api/migrations/versions/0be658f07ac6_state_consumed.py | eve-git/namex | 130f261500ce595b291d5428c32e1f6cc38ea505 | [
"Apache-2.0"
] | 635 | 2018-05-31T04:12:46.000Z | 2022-03-31T18:45:42.000Z | api/migrations/versions/0be658f07ac6_state_consumed.py | thorwolpert/namex | b9d927774e4c0da0255ca5aaa7ed1890283956fd | [
"Apache-2.0"
] | 71 | 2018-05-14T20:47:55.000Z | 2022-03-31T23:08:30.000Z | """state consumed
Revision ID: 0be658f07ac6
Revises: bd1e892d0609
Create Date: 2021-07-18 21:26:04.588007
"""
from alembic import op
import sqlalchemy as sa
from sqlalchemy.sql import table, column
from sqlalchemy import String
# revision identifiers, used by Alembic.
revision = '0be658f07ac6'
down_revision = 'bd1e892d0609'
branch_labels = None
depends_on = None
| 24.785714 | 68 | 0.612872 |
7dcbb0c32530744ec259355b1498ba6ed0c58b39 | 2,878 | py | Python | tests/test_pydantic.py | hCaptcha/hmt-basemodels | 5108bf4ddf68d670607daf8d68302c01631c4be2 | [
"MIT"
] | 3 | 2020-09-08T15:03:31.000Z | 2021-06-30T19:00:45.000Z | tests/test_pydantic.py | humanprotocol/hmt-basemodels | 5108bf4ddf68d670607daf8d68302c01631c4be2 | [
"MIT"
] | 43 | 2019-02-28T17:43:42.000Z | 2022-02-13T11:37:08.000Z | tests/test_pydantic.py | hCaptcha/hmt-basemodels | 5108bf4ddf68d670607daf8d68302c01631c4be2 | [
"MIT"
] | 5 | 2019-05-09T15:58:07.000Z | 2020-12-09T23:24:24.000Z | from unittest import TestCase, mock
from copy import deepcopy
from pydantic.error_wrappers import ValidationError
from basemodels.pydantic import Manifest
from basemodels.pydantic.manifest.data.taskdata import TaskDataEntry
SIMPLE = {
"job_mode": "batch",
"request_type": "image_label_multiple_choice",
"requester_accuracy_target": 0.8,
"requester_description": "pyhcaptcha internal_id: 69efdbe1-e586-42f8-bf05-a5745f75402a",
"requester_max_repeats": 7,
"requester_min_repeats": 3,
"requester_question": {"en": "deploy to only certain sites"},
"requester_restricted_answer_set": {"one": {"en": "one"}},
"task_bid_price": -1,
"unsafe_content": False,
"oracle_stake": 0.05,
"recording_oracle_addr": "0x6a0E68eA5F706339dd6bd354F53EfcB5B9e53E49",
"reputation_oracle_addr": "0x6a0E68eA5F706339dd6bd354F53EfcB5B9e53E49",
"reputation_agent_addr": "0x6a0E68eA5F706339dd6bd354F53EfcB5B9e53E49",
"groundtruth_uri": "https://hmt-jovial-lamport.hcaptcha.com/pyhcaptcha-client/taskdata/sha1:bf21a9e8fbc5a3846fb05b4fa0859e0917b2202f.json",
"taskdata_uri": "https://hmt-jovial-lamport.hcaptcha.com/pyhcaptcha-client/taskdata/sha1:97d170e1550eee4afc0af065b78cda302a97674c.json",
"job_total_tasks": 0,
"job_api_key": "417714f0-7ce6-412b-b394-0d2ae58a8c6d",
"restricted_audience": {
"sitekey": [
{"dfe03e7c-f417-4726-8b14-ae033a3cc66e": {"score": 1}},
{"dfe03e7c-f417-4726-8b12-ae033a3cc66a": {"score": 1}},
]
},
}
TASK = {
"task_key": "407fdd93-687a-46bb-b578-89eb96b4109d",
"datapoint_uri": "https://domain.com/file1.jpg",
"datapoint_hash": "f4acbe8562907183a484498ba901bfe5c5503aaa",
"metadata": {
"key_1": "value_1",
"key_2": "value_2",
}
}
| 33.858824 | 143 | 0.673384 |
7dcc1a030ac1c718e3ae0328b8bf873af5f2d223 | 1,064 | py | Python | olutils/path.py | OctaveLauby/olutils | 9d0741fe2a3ce527be60be2bf1a6904c3340e488 | [
"Apache-2.0"
] | 1 | 2020-10-23T17:11:42.000Z | 2020-10-23T17:11:42.000Z | olutils/path.py | OctaveLauby/olutils | 9d0741fe2a3ce527be60be2bf1a6904c3340e488 | [
"Apache-2.0"
] | 4 | 2019-05-09T12:53:33.000Z | 2020-12-03T13:49:26.000Z | olutils/path.py | OctaveLauby/olutils | 9d0741fe2a3ce527be60be2bf1a6904c3340e488 | [
"Apache-2.0"
] | null | null | null | from os.path import exists
def get_next_path(path_frmt: str, start: int = 1) -> str:
"""Return next available path based on path_frmt (1 positional-placeholder)"""
return path_frmt.format(get_next_path_index(path_frmt, start=start))
def get_next_path_index(path_frmt: str, start: int = 1) -> int:
"""Get next index of given path format (1 positional-placeholder)
Raises:
ValueError: if path_frmt does not contain one and only one positional-placeholder
such as '{}' or '{:03d}'
"""
try:
# Try some random int to check path_frmt consistency
assert "3823243077" in path_frmt.format(3823243077)
except (IndexError, AssertionError):
# IndexError means more than one placeholder, AssertionError means none
raise ValueError("path_frmt must contain only one positional-placeholder") from None
except KeyError:
raise ValueError("path_frmt must contain no named-placeholder") from None
i = start
while exists(path_frmt.format(i)):
i += 1
return i
| 36.689655 | 92 | 0.68797 |
7dccf7de030e74e41a66762279d9a43fa3b28e62 | 63 | py | Python | env/lib/python2.7/site-packages/certifi/__init__.py | wagnermarkd/stationary-hud | 96eb0457e52a7e8a691e8ae101d43353db038f57 | [
"MIT"
] | 6 | 2021-09-18T07:19:54.000Z | 2021-09-18T07:20:07.000Z | venv/Lib/site-packages/certifi/__init__.py | Airren/mxonline-python | f16c7039b5a8ac7d2d743c83c9f44f77f02e1432 | [
"MIT"
] | 10 | 2020-01-20T13:52:07.000Z | 2022-03-12T00:12:31.000Z | venv/Lib/site-packages/certifi/__init__.py | Airren/mxonline-python | f16c7039b5a8ac7d2d743c83c9f44f77f02e1432 | [
"MIT"
] | 1 | 2016-08-24T01:08:34.000Z | 2016-08-24T01:08:34.000Z | from .core import where, old_where
__version__ = "2016.02.28"
| 15.75 | 34 | 0.746032 |
7dcd9cbc95d9ac46a0346d6a8f8325d12f3bf6be | 681 | py | Python | setup.py | jacobschaer/qt_compat | 8121500c1fb6f95d3cfff033410e055a187a39c9 | [
"MIT"
] | null | null | null | setup.py | jacobschaer/qt_compat | 8121500c1fb6f95d3cfff033410e055a187a39c9 | [
"MIT"
] | null | null | null | setup.py | jacobschaer/qt_compat | 8121500c1fb6f95d3cfff033410e055a187a39c9 | [
"MIT"
] | null | null | null | from setuptools import setup, find_packages
setup(
name="QtCompat",
version="0.1",
packages=find_packages(),
scripts=[],
# Project uses reStructuredText, so ensure that the docutils get
# installed or upgraded on the target machine
install_requires=[],
package_data={
},
# metadata for upload to PyPI
author="Jacob Schaer",
author_email="",
description="PyQt4, 5 and Pyside Compatibility Library",
license="MIT",
keywords="pyqt4 pyqt5 pyside compatibility",
url="https://github.com/jacobschaer/qt_compat/", # project home page, if any
# could also include long_description, download_url, classifiers, etc.
) | 28.375 | 82 | 0.690162 |
7dcde3e58f5df9d50ac28aa03a0ad0516f667fbc | 2,730 | py | Python | test/patterns/joined_validation/test_joined_validation.py | acheshkov/aibolit | eed2fafa9fbc5f3359510cd80fee2ae8311d7ed8 | [
"MIT"
] | null | null | null | test/patterns/joined_validation/test_joined_validation.py | acheshkov/aibolit | eed2fafa9fbc5f3359510cd80fee2ae8311d7ed8 | [
"MIT"
] | null | null | null | test/patterns/joined_validation/test_joined_validation.py | acheshkov/aibolit | eed2fafa9fbc5f3359510cd80fee2ae8311d7ed8 | [
"MIT"
] | null | null | null | import os
from unittest import TestCase
from aibolit.patterns.joined_validation.joined_validation import JoinedValidation
from pathlib import Path
| 33.292683 | 81 | 0.630037 |
7dce2ac1c38976edf285448acc824af893571906 | 971 | py | Python | python_structure/data_structures/lists_tuples_dictionaries/tuple_defs.py | bangyen/pascal-triangle | 0831348e93c274bdd38bba5c3aeeda7596ab97ee | [
"MIT"
] | 1 | 2020-03-11T10:20:53.000Z | 2020-03-11T10:20:53.000Z | python_structure/data_structures/lists_tuples_dictionaries/tuple_defs.py | bangyen/pascal-triangle | 0831348e93c274bdd38bba5c3aeeda7596ab97ee | [
"MIT"
] | 1 | 2020-07-06T15:45:01.000Z | 2020-07-06T15:50:32.000Z | python_structure/data_structures/lists_tuples_dictionaries/tuple_defs.py | bangyen/pascal-triangle | 0831348e93c274bdd38bba5c3aeeda7596ab97ee | [
"MIT"
] | 1 | 2020-07-02T05:21:58.000Z | 2020-07-02T05:21:58.000Z | """
Global tuple to avoid make a new one each time a method is called
"""
my_tuple = ("London", 123, 18.2)
if __name__ == '__main__':
main_tuple = city_tuple_declaration()
print(main_tuple)
print(my_tuple)
tuple_get_element(5)
print(bool_to_string_translator(tuple_has_element("London")))
print(bool_to_string_translator(tuple_has_not_element("London")))
| 22.068182 | 69 | 0.676622 |
7dcea3fbbfd1ee77dfca864ce3a07a6ca9ff127e | 389 | py | Python | annotations/filters.py | acdh-oeaw/ner-annotator | ee8f72248669b848eb273644d80ad52dc495a07c | [
"MIT"
] | 1 | 2019-01-02T15:05:30.000Z | 2019-01-02T15:05:30.000Z | annotations/filters.py | acdh-oeaw/ner-annotator | ee8f72248669b848eb273644d80ad52dc495a07c | [
"MIT"
] | 8 | 2020-02-11T23:02:04.000Z | 2021-06-10T20:39:58.000Z | annotations/filters.py | acdh-oeaw/ner-annotator | ee8f72248669b848eb273644d80ad52dc495a07c | [
"MIT"
] | 1 | 2019-01-02T15:05:31.000Z | 2019-01-02T15:05:31.000Z | import django_filters
from . models import NerSample
| 24.3125 | 62 | 0.678663 |
7dcf866c0422d8f7d07418dae857b071849168bc | 51 | py | Python | m3o_plugin/postcode.py | JustIceQAQ/play_m3o_in_python | 140b1f07cb574d1f0a2890503ae9e73ce3907f2b | [
"MIT"
] | null | null | null | m3o_plugin/postcode.py | JustIceQAQ/play_m3o_in_python | 140b1f07cb574d1f0a2890503ae9e73ce3907f2b | [
"MIT"
] | null | null | null | m3o_plugin/postcode.py | JustIceQAQ/play_m3o_in_python | 140b1f07cb574d1f0a2890503ae9e73ce3907f2b | [
"MIT"
] | null | null | null | # TODO Postcode: https://m3o.com/postcode/overview
| 25.5 | 50 | 0.764706 |
7dd0263f686636079c3320a5eff927b93bd01ba9 | 3,016 | py | Python | learning_algorithms/hysteretic_q_matrix.py | swj0418/Reinforcement_Learning_Framework | 1ac6bbe31cee5ea7f1e5f28d8b53aa3985b39db8 | [
"Apache-2.0"
] | 1 | 2019-07-01T11:47:33.000Z | 2019-07-01T11:47:33.000Z | learning_algorithms/hysteretic_q_matrix.py | swj0418/Reinforcement_Learning_Framework | 1ac6bbe31cee5ea7f1e5f28d8b53aa3985b39db8 | [
"Apache-2.0"
] | 1 | 2019-04-13T05:46:14.000Z | 2019-04-13T05:46:14.000Z | learning_algorithms/hysteretic_q_matrix.py | swj0418/Reinforcement_Learning_Framework | 1ac6bbe31cee5ea7f1e5f28d8b53aa3985b39db8 | [
"Apache-2.0"
] | null | null | null | import numpy as np | 33.511111 | 110 | 0.623342 |
7dd0a1a9133fdf0ceb0199e1c5e7bef38b12567d | 14,581 | py | Python | psiz/keras/layers/kernel.py | asuiconlab/psiz | 4f05348cf43d2d53ff9cc6dee633de385df883e3 | [
"Apache-2.0"
] | null | null | null | psiz/keras/layers/kernel.py | asuiconlab/psiz | 4f05348cf43d2d53ff9cc6dee633de385df883e3 | [
"Apache-2.0"
] | null | null | null | psiz/keras/layers/kernel.py | asuiconlab/psiz | 4f05348cf43d2d53ff9cc6dee633de385df883e3 | [
"Apache-2.0"
] | null | null | null | # -*- coding: utf-8 -*-
# Copyright 2020 The PsiZ Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""Module of TensorFlow kernel layers.
Classes:
GroupAttention: A simple group-specific attention layer.
Kernel: A kernel that allows the user to separately specify a
distance and similarity function.
AttentionKernel: A kernel that uses group-specific attention
weights and allows the user to separately specify a distance
and similarity function.
GroupAttentionVariational: A variational group attention layer.
"""
import numpy as np
import tensorflow as tf
from tensorflow.python.keras import backend as K
import psiz.keras.constraints as pk_constraints
import psiz.keras.initializers as pk_initializers
from psiz.keras.layers.variational import Variational
from psiz.keras.layers.distances.minkowski import WeightedMinkowski
from psiz.models.base import GroupLevel
| 32.692825 | 79 | 0.613812 |
7dd13c6ad4dc8afcb18c82aeecd32fc176c29e34 | 1,261 | py | Python | apps/user/migrations/0005_auto_20190804_1443.py | tiger-fight-tonight/E-Server | 3939bc3f8c090441cc2af17f4e6cb777642fb792 | [
"Apache-2.0"
] | 6 | 2019-07-18T16:21:17.000Z | 2020-11-19T04:47:02.000Z | apps/user/migrations/0005_auto_20190804_1443.py | tiger-fight-tonight/E-Server | 3939bc3f8c090441cc2af17f4e6cb777642fb792 | [
"Apache-2.0"
] | null | null | null | apps/user/migrations/0005_auto_20190804_1443.py | tiger-fight-tonight/E-Server | 3939bc3f8c090441cc2af17f4e6cb777642fb792 | [
"Apache-2.0"
] | null | null | null | # Generated by Django 2.1.7 on 2019-08-04 06:43
import datetime
from django.db import migrations, models
import uuid
| 35.027778 | 181 | 0.634417 |
7dd3bf8d2c2f8bbd741f59dd7b443601e8b83316 | 282 | py | Python | scripts/get_plat_name.py | uuosio/gscdk | 995d99948d7090032f76b05656cad29c6cfbb647 | [
"BSD-3-Clause"
] | 6 | 2021-09-03T09:02:39.000Z | 2022-01-12T06:31:09.000Z | scripts/get_plat_name.py | learnforpractice/gscdk | dc17c43fa2be28500f38897a29bbbd9eb9c7ada7 | [
"BSD-3-Clause"
] | 1 | 2021-11-01T16:46:09.000Z | 2021-11-04T12:51:45.000Z | scripts/get_plat_name.py | learnforpractice/gscdk | dc17c43fa2be28500f38897a29bbbd9eb9c7ada7 | [
"BSD-3-Clause"
] | 2 | 2021-11-10T01:56:15.000Z | 2022-01-13T14:27:31.000Z | import platform
#check the platform for linux, macos, windows
if platform.system() == "Linux":
print("manylinux1_x86_64")
elif platform.system() == "Windows":
print("win-amd64")
elif platform.system() == "Darwin":
print("macosx_10_15_x86_64")
else:
print("Unknown")
| 25.636364 | 45 | 0.691489 |
7dd3f523efb7218a00299577b756498b0e6e336c | 508 | py | Python | submissions/mirror-reflection/solution.py | Wattyyy/LeetCode | 13a9be056d0a0c38c2f8c8222b11dc02cb25a935 | [
"MIT"
] | null | null | null | submissions/mirror-reflection/solution.py | Wattyyy/LeetCode | 13a9be056d0a0c38c2f8c8222b11dc02cb25a935 | [
"MIT"
] | 1 | 2022-03-04T20:24:32.000Z | 2022-03-04T20:31:58.000Z | submissions/mirror-reflection/solution.py | Wattyyy/LeetCode | 13a9be056d0a0c38c2f8c8222b11dc02cb25a935 | [
"MIT"
] | null | null | null | # https://leetcode.com/problems/mirror-reflection
| 24.190476 | 55 | 0.36811 |
7dd470fef059403a7425a058aa8ed792b44ec169 | 4,290 | py | Python | sdk/python/kulado_azure/batch/get_account.py | kulado/kulado-azure | f3a408fa0405fe6ae93e0049b2ae0f0e266f1cf6 | [
"ECL-2.0",
"Apache-2.0"
] | null | null | null | sdk/python/kulado_azure/batch/get_account.py | kulado/kulado-azure | f3a408fa0405fe6ae93e0049b2ae0f0e266f1cf6 | [
"ECL-2.0",
"Apache-2.0"
] | null | null | null | sdk/python/kulado_azure/batch/get_account.py | kulado/kulado-azure | f3a408fa0405fe6ae93e0049b2ae0f0e266f1cf6 | [
"ECL-2.0",
"Apache-2.0"
] | null | null | null | # coding=utf-8
# *** WARNING: this file was generated by the Kulado Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import json
import warnings
import kulado
import kulado.runtime
from .. import utilities, tables
| 44.226804 | 226 | 0.675991 |
7dd4c10b342878f52f717eef146ce0ddd5328f2c | 1,988 | py | Python | run/run_fd_tgv_conv.py | huppd/PINTimpact | 766b2ef4d2fa9e6727965e48a3fba7b752074850 | [
"MIT"
] | null | null | null | run/run_fd_tgv_conv.py | huppd/PINTimpact | 766b2ef4d2fa9e6727965e48a3fba7b752074850 | [
"MIT"
] | null | null | null | run/run_fd_tgv_conv.py | huppd/PINTimpact | 766b2ef4d2fa9e6727965e48a3fba7b752074850 | [
"MIT"
] | null | null | null | """ running converferce for finite differences and Taylor-Green vortex """
import os
from math import pi
import xml.etree.ElementTree as ET
import platform_paths as pp
import manipulator as ma
# load parameter file
ma.set_ids('../XML/parameterTGVTime.xml')
TREE = ET.parse('../XML/parameterTGVTime.xml')
ROOT = TREE.getroot()
ma.set_parameter(ROOT, 'withoutput', 1)
ma.set_parameter(ROOT, 'initial guess', 'zero')
# ma.set_parameter( ROOT, 'refinement level', 1 )
# make executable ready
EXE = 'peri_navier3DTime'
os.chdir(pp.EXE_PATH)
os.system('make '+EXE+' -j4')
CASE_PATH = ['']*4
RUNS = range(1)
RES = [10]
STS = [0.1, 10., 1.]
NFS = [72]
ma.set_parameter(ROOT, 'nx', 65)
ma.set_parameter(ROOT, 'ny', 65)
ma.set_parameter(ROOT, 'nz', 5)
CASE_PATH[0] = pp.DATA_PATH + '/FDTGV_conv2'
pp.mkdir(CASE_PATH, 0)
for re in RES:
CASE_PATH[1] = '/re_'+str(re)
pp.mkdir(CASE_PATH, 1)
for st in STS:
CASE_PATH[2] = '/a2_'+str(st)
pp.mkdir(CASE_PATH, 2)
for nf in NFS:
CASE_PATH[3] = '/nt_'+str(nf)
pp.mkdir(CASE_PATH, 3)
#
pp.chdir(CASE_PATH, 3)
#
ma.set_parameter(ROOT, 'Re', re)
ma.set_parameter(ROOT, 'alpha2', 2.*pi*st*re)
ma.set_parameter(ROOT, 'nf', nf)
ma.set_parameter(ROOT, 'npx', 1)
ma.set_parameter(ROOT, 'npy', 1)
ma.set_parameter(ROOT, 'npz', 1)
ma.set_parameter(ROOT, 'npf', 12)
TREE.write('parameter3D.xml')
# nptot = npx[i]*npy[i]*npf[i]
nptot = 12
mem = int(max(1024, 60*1024/nptot))
for run in RUNS:
print()
print(CASE_PATH)
exeString = \
pp.exe_pre(nptot, ' -N -R "rusage[mem=' +
str(mem) + ']" -W 6:00', run) + \
pp.EXE_PATH+'/'+EXE
print(exeString)
os.system(exeString)
| 27.611111 | 74 | 0.551308 |
7dd56ee0d12643635fe1de9999d5c50d9f66ca84 | 14,617 | py | Python | DCSCN.py | dattv/DCSCN-Tensorflow | eaed09c1d39236617f970b16f555ae88cfa49280 | [
"MIT"
] | 3 | 2019-06-18T13:04:26.000Z | 2019-06-25T07:59:10.000Z | DCSCN.py | dattv/DCSCN-Tensorflow | eaed09c1d39236617f970b16f555ae88cfa49280 | [
"MIT"
] | null | null | null | DCSCN.py | dattv/DCSCN-Tensorflow | eaed09c1d39236617f970b16f555ae88cfa49280 | [
"MIT"
] | null | null | null | """
"""
import logging
import os
import random
import time
import matplotlib
import matplotlib.pyplot as plt
import numpy as np
import tensorflow as tf
from helper import loader, utility as util
matplotlib.use("agg")
INPUT_IMAGE_DIR = "input"
INTERPOLATED_IMAGE_DIR = "interpolated"
TRUE_IMAGE_DIR = "true"
| 46.256329 | 126 | 0.59205 |
7dd643437e0865cafce1491b350b4e99be342f2c | 27 | py | Python | tests/tests.py | cjapp/tkinter_simpleEncodeDecode | 15520d73c51bb1a6a316414b2e8fb50b7be8f942 | [
"MIT"
] | null | null | null | tests/tests.py | cjapp/tkinter_simpleEncodeDecode | 15520d73c51bb1a6a316414b2e8fb50b7be8f942 | [
"MIT"
] | null | null | null | tests/tests.py | cjapp/tkinter_simpleEncodeDecode | 15520d73c51bb1a6a316414b2e8fb50b7be8f942 | [
"MIT"
] | null | null | null |
from .context import main
| 9 | 25 | 0.777778 |
7dd7abdb00a4ee3724c7dfc992569e2f8f38d9dd | 23,149 | py | Python | ofa/tutorial/imagenet_eval_helper.py | johsnows/once-for-all | fac2a6388e70873666b848a316aa58c7b2e17031 | [
"Apache-2.0"
] | null | null | null | ofa/tutorial/imagenet_eval_helper.py | johsnows/once-for-all | fac2a6388e70873666b848a316aa58c7b2e17031 | [
"Apache-2.0"
] | null | null | null | ofa/tutorial/imagenet_eval_helper.py | johsnows/once-for-all | fac2a6388e70873666b848a316aa58c7b2e17031 | [
"Apache-2.0"
] | null | null | null | import os.path as osp
import numpy as np
import math
from tqdm import tqdm
import torch.nn as nn
import torch.backends.cudnn as cudnn
import torch.utils.data
from torchvision import transforms, datasets
from ofa.utils import AverageMeter, accuracy
from ofa.model_zoo import ofa_specialized
from ofa.imagenet_classification.elastic_nn.utils import set_running_statistics
import copy
import random
net_id = ['pixel1_lat@143ms_top1@80.1_finetune@75', 'pixel1_lat@132ms_top1@79.8_finetune@75',
'pixel1_lat@79ms_top1@78.7_finetune@75', 'pixel1_lat@58ms_top1@76.9_finetune@75',
'pixel1_lat@40ms_top1@74.9_finetune@25', 'pixel1_lat@28ms_top1@73.3_finetune@25',
'pixel1_lat@20ms_top1@71.4_finetune@25', 'pixel2_lat@62ms_top1@75.8_finetune@25',
'pixel2_lat@50ms_top1@74.7_finetune@25', 'pixel2_lat@35ms_top1@73.4_finetune@25',
'pixel2_lat@25ms_top1@71.5_finetune@25', 'note10_lat@64ms_top1@80.2_finetune@75',
'note10_lat@50ms_top1@79.7_finetune@75', 'note10_lat@41ms_top1@79.3_finetune@75',
'note10_lat@16ms_top1@75.5_finetune@25', 'note10_lat@11ms_top1@73.6_finetune@25',
'note10_lat@8ms_top1@71.4_finetune@25', 'note8_lat@65ms_top1@76.1_finetune@25',
'note8_lat@49ms_top1@74.9_finetune@25', 'note8_lat@31ms_top1@72.8_finetune@25',
'note8_lat@22ms_top1@70.4_finetune@25', 's7edge_lat@88ms_top1@76.3_finetune@25',
's7edge_lat@58ms_top1@74.7_finetune@25', 's7edge_lat@41ms_top1@73.1_finetune@25',
's7edge_lat@29ms_top1@70.5_finetune@25', 'LG-G8_lat@24ms_top1@76.4_finetune@25',
'LG-G8_lat@16ms_top1@74.7_finetune@25', 'LG-G8_lat@11ms_top1@73.0_finetune@25',
'LG-G8_lat@8ms_top1@71.1_finetune@25', '1080ti_gpu64@27ms_top1@76.4_finetune@25',
'1080ti_gpu64@22ms_top1@75.3_finetune@25', '1080ti_gpu64@15ms_top1@73.8_finetune@25',
'1080ti_gpu64@12ms_top1@72.6_finetune@25', 'v100_gpu64@11ms_top1@76.1_finetune@25',
'v100_gpu64@9ms_top1@75.3_finetune@25', 'v100_gpu64@6ms_top1@73.0_finetune@25',
'v100_gpu64@5ms_top1@71.6_finetune@25', 'tx2_gpu16@96ms_top1@75.8_finetune@25',
'tx2_gpu16@80ms_top1@75.4_finetune@25', 'tx2_gpu16@47ms_top1@72.9_finetune@25',
'tx2_gpu16@35ms_top1@70.3_finetune@25', 'cpu_lat@17ms_top1@75.7_finetune@25',
'cpu_lat@15ms_top1@74.6_finetune@25', 'cpu_lat@11ms_top1@72.0_finetune@25',
'cpu_lat@10ms_top1@71.1_finetune@25', 'flops@595M_top1@80.0_finetune@75',
'flops@482M_top1@79.6_finetune@75', 'flops@389M_top1@79.1_finetune@75', ]
sort_net_id=['tx2_gpu16@35ms_top1@70.3_finetune@25', 'note8_lat@22ms_top1@70.4_finetune@25', 's7edge_lat@29ms_top1@70.5_finetune@25',
'cpu_lat@10ms_top1@71.1_finetune@25', 'LG-G8_lat@8ms_top1@71.1_finetune@25', 'pixel1_lat@20ms_top1@71.4_finetune@25',
'note10_lat@8ms_top1@71.4_finetune@25', 'pixel2_lat@25ms_top1@71.5_finetune@25', 'v100_gpu64@5ms_top1@71.6_finetune@25',
'cpu_lat@11ms_top1@72. 0_finetune@25', '1080ti_gpu64@12ms_top1@72.6_finetune@25', 'note8_lat@31ms_top1@72.8_finetune@25',
'tx2_gpu16@47ms_top1@72.9_finetune@25', 'v100_gpu64@6ms_top1@73.0_finetune@25', 'LG-G8_lat@11ms_to p1@73.0_finetune@25',
's7edge_lat@41ms_top1@73.1_finetune@25', 'pixel1_lat@28ms_top1@73.3_finetune@25', 'pixel2_lat@35ms_top1@73.4_finetune@25',
'note10_lat@11ms_top1@73.6_finetune@25', '1080ti_gpu 64@15ms_top1@73.8_finetune@25', 'cpu_lat@15ms_top1@74.6_finetune@25',
's7edge_lat@58ms_top1@74.7_finetune@25', 'LG-G8_lat@16ms_top1@74.7_finetune@25', 'pixel2_lat@50ms_top1@74.7_finetune@25',
'note8_lat@49ms_top1@74.9_finetune@25', 'pixel1_lat@40ms_top1@74.9_finetune@25', '1080ti_gpu64@22ms_top1@75.3_finetune@25',
'v100_gpu64@9ms_top1@75.3_finetune@25', 'tx2_gpu16@80ms_top1@75.4_finetune@25', 'note10_lat@16ms_top1@75.5_finetune@25',
'cpu_lat@17ms_top1@75.7_finetune@25', 'tx2_gpu16@96ms_top1@75.8_finetune@25', 'pixel2_lat@62ms_top1@75.8_finetune@25',
'v100_gpu64@11ms_top1@76.1_finetune@25', 'note8_lat@65ms_top1@76.1_finetune@25', 's7edge_lat@88ms_top1@76.3_finetune@25',
'1080ti_gpu64@27ms_top1@76.4_finetune@25', 'LG-G8_lat@24ms_top1@76.4_finetune@25', 'pixel1_lat@58ms_top1@76.9_finetune@75',
'pixel1_lat@79ms_top1@78.7_finetune@75', 'flops@389M_top1@79.1_finetune@75', 'note10_lat@41ms_top1@79.3_finetune@75',
'flops@482M_top1@79.6_finetune@75', 'note10_lat@50ms_top1@79.7_finetune@75', 'pixel1_lat@132ms_top1@79.8_finetune@75',
'flops@595M_top1@80.0_finetune@75', 'pixel1_lat@143ms_top1@80.1_finetune@75', 'note10_lat@64ms_top1@80.2_finetune@75']
| 44.093333 | 177 | 0.607154 |
7dd999fb131d09d1bf5880249af5cf7d95c80d95 | 8,713 | py | Python | python/housinginsights/sources/cama.py | mrkem598/housing-insights | 05dffebad690bf727cbcbec53128d2fb69166e4c | [
"MIT"
] | null | null | null | python/housinginsights/sources/cama.py | mrkem598/housing-insights | 05dffebad690bf727cbcbec53128d2fb69166e4c | [
"MIT"
] | null | null | null | python/housinginsights/sources/cama.py | mrkem598/housing-insights | 05dffebad690bf727cbcbec53128d2fb69166e4c | [
"MIT"
] | null | null | null | # Script is deprecated, as of September 18, 2017.
# zoneUnitCount now calculated with LoadData's _get_residential_units()
#
from pprint import pprint
import os
import sys
import requests
from collections import OrderedDict
import csv
import datetime
PYTHON_PATH = os.path.abspath(os.path.join(os.path.dirname(__file__), os.pardir, os.pardir))
sys.path.append(PYTHON_PATH)
from housinginsights.sources.base import BaseApiConn
from housinginsights.tools.logger import HILogger
logger = HILogger(name=__file__, logfile="sources.log")
if __name__ == '__main__':
# Pushes everything from the logger to the command line output as well.
my_api = CamaApiConn()
csvfile = my_api.get_csv()
| 39.247748 | 177 | 0.572019 |
7dd9c7a745a3b97ae9face412cad220abf628e7d | 402 | py | Python | certbot_dns_cfproxy/__init__.py | ProfFan/certbot-dns-cfproxy | 999038999642e5ba070e7089d62146b4f05caa46 | [
"Apache-1.1"
] | 2 | 2019-02-04T06:06:00.000Z | 2022-03-04T09:12:31.000Z | certbot_dns_cfproxy/__init__.py | ProfFan/certbot-dns-cfproxy | 999038999642e5ba070e7089d62146b4f05caa46 | [
"Apache-1.1"
] | null | null | null | certbot_dns_cfproxy/__init__.py | ProfFan/certbot-dns-cfproxy | 999038999642e5ba070e7089d62146b4f05caa46 | [
"Apache-1.1"
] | null | null | null | """
The `~certbot_dns_cfproxy.dns_cfproxy` plugin automates the process of
completing a ``dns-01`` challenge (`~acme.challenges.DNS01`) by creating, and
subsequently removing, TXT records using the CFProxy API.
Examples
--------
.. code-block:: bash
:caption: To acquire a certificate for ``example.com``
certbot certonly \\
-a certbot-dns-cfproxy:dns-cfproxy \\
-d example.com
"""
| 23.647059 | 77 | 0.699005 |
7ddaf2399cff82d2687308f91fcf9cc720123562 | 1,234 | py | Python | web-app/servers/card-recognize/app.py | woojae9488/HLF_chaincode | 6737f70c1daea766fa567b08454b8b63a7a97d4a | [
"Apache-2.0"
] | 2 | 2020-02-10T00:08:11.000Z | 2020-02-19T02:06:14.000Z | web-app/servers/card-recognize/app.py | woojae9488/HLF_chaincode | 6737f70c1daea766fa567b08454b8b63a7a97d4a | [
"Apache-2.0"
] | 14 | 2020-04-26T13:56:38.000Z | 2020-09-23T06:27:53.000Z | web-app/servers/card-recognize/app.py | woojae9488/HLF_chaincode | 6737f70c1daea766fa567b08454b8b63a7a97d4a | [
"Apache-2.0"
] | 3 | 2020-04-25T11:28:57.000Z | 2022-03-28T12:19:25.000Z | from flask import Flask, make_response, request
from flask_cors import CORS
import json
from config import *
from StudentCard import *
from ApiError import *
App = Flask(__name__)
cors = CORS(App,
resources={r'*': {'origins': ENV.ADDR_API_GATEWAY}},
headers='Content-Type: application/json')
if __name__ == '__main__':
App.run(host='0.0.0.0', port=ENV.PORT)
| 26.826087 | 64 | 0.63047 |
7ddb8e0adf2de6b7f5b4a9514a61cad048355467 | 2,604 | py | Python | smart_home/power_controller.py | achuchev/-SmartHome-AlexaLambda | 0e8bfe30c76688a209ee4bc8d40016478d537aba | [
"MIT"
] | null | null | null | smart_home/power_controller.py | achuchev/-SmartHome-AlexaLambda | 0e8bfe30c76688a209ee4bc8d40016478d537aba | [
"MIT"
] | null | null | null | smart_home/power_controller.py | achuchev/-SmartHome-AlexaLambda | 0e8bfe30c76688a209ee4bc8d40016478d537aba | [
"MIT"
] | 1 | 2020-06-27T15:20:29.000Z | 2020-06-27T15:20:29.000Z | import logging
from smart_home.mqtt_client import MQTTClient
from smart_home.utils_lambda import get_utc_timestamp, error_response, success_response, get_request_message_id, get_mqtt_topics_from_request, get_request_name, get_friendly_name_from_request
| 37.2 | 191 | 0.661674 |
7ddd6afc3df36a52da70783ec74e257d9596b945 | 4,082 | py | Python | components/mpas-seaice/testing_and_setup/testcases/advection/plot_testcase.py | Fa-Li/E3SM | a91995093ec6fc0dd6e50114f3c70b5fb64de0f0 | [
"zlib-acknowledgement",
"FTL",
"RSA-MD"
] | 235 | 2018-04-23T16:30:06.000Z | 2022-03-21T17:53:12.000Z | components/mpas-seaice/testing_and_setup/testcases/advection/plot_testcase.py | Fa-Li/E3SM | a91995093ec6fc0dd6e50114f3c70b5fb64de0f0 | [
"zlib-acknowledgement",
"FTL",
"RSA-MD"
] | 2,372 | 2018-04-20T18:12:34.000Z | 2022-03-31T23:43:17.000Z | components/mpas-seaice/testing_and_setup/testcases/advection/plot_testcase.py | Fa-Li/E3SM | a91995093ec6fc0dd6e50114f3c70b5fb64de0f0 | [
"zlib-acknowledgement",
"FTL",
"RSA-MD"
] | 254 | 2018-04-20T20:43:32.000Z | 2022-03-30T20:13:38.000Z | from netCDF4 import Dataset
import matplotlib
import matplotlib.pyplot as plt
from matplotlib.patches import Polygon
from matplotlib.collections import PatchCollection
import matplotlib.cm as cm
import numpy as np
#-------------------------------------------------------------
#-------------------------------------------------------------
#-------------------------------------------------------------------------------
if __name__ == "__main__":
plot_testcase()
| 31.160305 | 184 | 0.533317 |
7dddf53059511b42c5f3c624d5d6af6d11789231 | 87 | py | Python | ABC_A/ABC063_A.py | ryosuke0825/atcoder_python | 185cdbe7db44ecca1aaf357858d16d31ce515ddb | [
"MIT"
] | null | null | null | ABC_A/ABC063_A.py | ryosuke0825/atcoder_python | 185cdbe7db44ecca1aaf357858d16d31ce515ddb | [
"MIT"
] | null | null | null | ABC_A/ABC063_A.py | ryosuke0825/atcoder_python | 185cdbe7db44ecca1aaf357858d16d31ce515ddb | [
"MIT"
] | null | null | null | a, b = map(int, input().split())
if a+b >= 10:
print("error")
else:
print(a+b)
| 14.5 | 32 | 0.517241 |
7ddf431c5c2dcc581f44d2c5411d8380ca8401f0 | 2,278 | py | Python | aeropy/filehandling/paraview.py | belac626/AeroPy | 4f045306427e08b742237b7393ce9602f1072d60 | [
"MIT"
] | null | null | null | aeropy/filehandling/paraview.py | belac626/AeroPy | 4f045306427e08b742237b7393ce9602f1072d60 | [
"MIT"
] | null | null | null | aeropy/filehandling/paraview.py | belac626/AeroPy | 4f045306427e08b742237b7393ce9602f1072d60 | [
"MIT"
] | null | null | null | #### import the simple module from the paraview
from paraview.simple import *
#### disable automatic camera reset on 'Show'
paraview.simple._DisableFirstRenderCameraReset()
network_number = 2
filename = 'test_network'
directory = 'C:\\Users\\leal26\\Documents\\GitHub\\AeroPy\\aeropy\\CST\\'
# get active view
renderView = GetActiveViewOrCreate('RenderView')
assembly = []
for i in range(1,network_number+1):
# create a new 'XML Structured Grid Reader'
test_network_vts = XMLStructuredGridReader(FileName=[directory + filename + str(i)+'.vts'])
# show data in view
test_network_vtsDisplay = Show(test_network_vts, renderView)
# trace defaults for the display properties.
test_network_vtsDisplay.Representation = 'Surface With Edges'
test_network_vtsDisplay.ColorArrayName = [None, '']
test_network_vtsDisplay.OSPRayScaleFunction = 'PiecewiseFunction'
test_network_vtsDisplay.SelectOrientationVectors = 'None'
test_network_vtsDisplay.ScaleFactor = 0.1
test_network_vtsDisplay.SelectScaleArray = 'None'
test_network_vtsDisplay.GlyphType = 'Arrow'
test_network_vtsDisplay.GlyphTableIndexArray = 'None'
test_network_vtsDisplay.DataAxesGrid = 'GridAxesRepresentation'
test_network_vtsDisplay.PolarAxes = 'PolarAxesRepresentation'
test_network_vtsDisplay.ScalarOpacityUnitDistance = 0.3272506722223079
# init the 'PiecewiseFunction' selected for 'OSPRayScaleFunction'
test_network_vtsDisplay.OSPRayScaleFunction.Points = [2.326428429822192, 0.0, 0.5, 0.0, 37.626781425423815, 1.0, 0.5, 0.0]
# reset view to fit data
renderView.ResetCamera()
# update the view to ensure updated data information
renderView.Update()
#### saving camera placements for all active views
# current camera placement for renderView1
renderView.CameraPosition = [0.12476075744808501, 3.1845058646858693, 0.3710215545807592]
renderView.CameraFocalPoint = [0.5, 0.5, 0.0037752263491506906]
renderView.CameraViewUp = [-0.30729811760225784, -0.17101732138568032, 0.9361201539888863]
renderView.CameraParallelScale = 0.7079657120931511
#### uncomment the following to render all views
# RenderAllViews()
# alternatively, if you want to write images, you can use SaveScreenshot(...). | 43.807692 | 127 | 0.763389 |
7de07a2c955d17b395e18d20843ee393cc3f7511 | 21,804 | py | Python | pyFIRS/utils.py | Ecotrust/pyFIRS | f4bd8e11b24f125c59b69b04a7c3d11eabc0e81b | [
"BSD-3-Clause"
] | 3 | 2019-05-01T01:38:05.000Z | 2020-02-06T01:42:00.000Z | pyFIRS/utils.py | Ecotrust/pyFIRS | f4bd8e11b24f125c59b69b04a7c3d11eabc0e81b | [
"BSD-3-Clause"
] | 1 | 2019-09-19T00:56:58.000Z | 2019-09-19T00:56:58.000Z | pyFIRS/utils.py | Ecotrust/pyFIRS | f4bd8e11b24f125c59b69b04a7c3d11eabc0e81b | [
"BSD-3-Clause"
] | null | null | null | import glob
import json
import os
import subprocess
import time
import xml.etree.ElementTree as ET
from xml.etree.ElementTree import ParseError
import geopandas as gpd
import rasterio
import numpy as np
from shapely.geometry import Polygon
def listlike(arg):
'''Checks whether an argument is list-like, returns boolean'''
return not hasattr(arg, "strip") and (hasattr(arg, "__getitem__")
or hasattr(arg, "__iter__"))
def clean_dir(dir_to_clean, file_extensions):
'''Deletes files with specified extension(s) from a directory.
This function is intended to help cleanup outputs from command line
tools that we do not want to keep. Files to be deleted will be
identified using a wildcard with that file extension in dir_to_clean.
Parameters
----------
dir_to_clean: string, path
path to directory to delete files from
file_extension: string or list-like of strings
file extensions that will be used for identifying files to remove,
such as ['.tfw', '.kml'].
'''
if listlike(file_extensions):
for ext in file_extensions:
to_rem = glob.glob(os.path.join(dir_to_clean, '*{}'.format(ext)))
for file in to_rem:
os.remove(file)
print("Removed {:,d} files with extension {}.".format(
len(to_rem), ext))
elif type(file_extension) == str:
to_rem = glob.glob(os.path.join(dir_to_clean, '*{}'.format(ext)))
for file in to_rem:
os.remove(file)
print("Removed {:,d} files with extension {}.".format(
len(to_rem), ext))
else:
raise (TypeError,
'file_extensions needs to be a string or list-like of strings.')
def clean_buffer_polys(poly_shp,
tile_shp,
odir,
simp_tol=None,
simp_topol=None):
"""Removes polygons within the buffer zone of a tile.
This function removes polygons from a shapefile that fall in the buffered
area of point cloud tile. When building footprints or tree crowns (for
example) are delineated from a point cloud, a buffer around the tile is
generally be used to avoid edge effects. This tool computes the centroid of
each polygon and determines whether it falls within the bounds of the
unbuffered tile. It outputs a new shapefile containing only those polygons
whose centroids fall within the unbuffered tile.
The polygons may be simplified using optional arguments simp_tol and
simp_topol to reduce the number of points that define their boundaries.
Parameters
----------
polygons_shp: string, path to shapefile (required)
A shapefile containing the polygons delineated within a buffered tile.
tile_shp: string, path to shapefile (required)
A shapefile containing the bounds of the tile WITHOUT buffers
odir: string, path to directory (required)
Path to the output directory for the new shapefile
simp_tol = numeric,
Tolerance level for simplification. All points within a simplified
geometry will be no more than simp_tol from the original.
simp_topol = boolean (optional)
Whether or not to preserve topology of polygons. If False, a quicker
algorithm will be used, but may produce self-intersecting or otherwise
invalid geometries.
"""
fname = os.path.basename(poly_shp)
outfile = os.path.join(odir, fname)
os.makedirs(odir, exist_ok=True)
tile_boundary = gpd.read_file(tile_shp)
polys = gpd.read_file(poly_shp)
# boolean indicator of whether each polygon falls within tile boundary
clean_polys_ix = polys.centroid.within(tile_boundary.loc[0].geometry)
# retrieve the polygons within the boundary
clean_polys = polys[clean_polys_ix]
if simp_tol:
clean_polys = clean_polys.simplify(simp_tol, simp_topol)
if len(clean_polys) > 0:
clean_polys.to_file(outfile)
def clip_tile_from_shp(in_raster, in_shp, odir, buffer=0):
'''Clips a raster image to the bounding box of a shapefile.
The input raster will be clipped using a rasterio command line tool. The
output raster will have the same name and file type as the input raster, and
will be written to the output directory, odir. The process is executed using
subprocess.run().
Parameters
----------
in_raster: string, path to file
raster image to be clipped
in_shp: string, path to file
shapefile from which bounding box is calculated to clip the raster
odir: string, path
output directory where clipped raster will be stored
buffer: numeric
additional buffer to add to total bounding box of shapefile when
clipping the raster
Returns
-------
proc_clip: CompletedProcess
The result of executing subprocess.run using the rio clip command.
'''
basename = os.path.basename(in_raster)
# read the shapefile using geopandas and calculate its bounds
gdf = gpd.read_file(in_shp)
tile_bnds = ' '.join(str(x) for x in gdf.buffer(buffer).total_bounds)
# create the output directory if it doesn't already exist
os.makedirs(odir, exist_ok=True)
outfile = os.path.join(odir, basename)
# clip the raster
proc_clip = subprocess.run(
['rio', 'clip', in_raster, outfile, '--bounds', tile_bnds],
stderr=subprocess.PIPE,
stdout=subprocess.PIPE)
return proc_clip
def convert_project(infile, outfile, crs):
'''Converts a raster to another format and specifies its projection.
Uses rasterio command line tool executed using subprocess. The file
generated will have the same name and be in the same folder as the input
file.
Parameters
----------
infile: string, path to file
input raster to be converted
outfile: string, path to file
output raster to be generated
crs: string
specification of coordinate reference system to use following rasterio
command line tool (RIO) formatting (e.g., 'EPSG:3857')
Returns
-------
proc_convert: CompletedProcess
result of executing subprocess.run using rio convert
proc_project: CompletedProcess
result of executing subprocess.run using rio edit-info
'''
# convert the file to the new format
proc_convert = subprocess.run(['rio', 'convert', infile, outfile],
stderr=subprocess.PIPE,
stdout=subprocess.PIPE)
# add the projection info
proc_project = subprocess.run(['rio', 'edit-info', '--crs', crs, outfile],
stderr=subprocess.PIPE,
stdout=subprocess.PIPE)
return proc_convert, proc_project
def validation_summary(xml_dir, verbose=False):
'''
Generates a summary of validation results for a directory of lidar files
Parameters
----------
xml_dir : string, path to directory
directory containing xml files produced by LASvalidate
verbose : boolean
whether or not to include the messages describing why any files
produced warning or failed validation.
Returns
-------
summary_report : a printed report
'''
xmls = glob.glob(os.path.join(xml_dir, '*.xml'))
passed = 0
warnings = 0
failed = 0
parse_errors = 0
warning_messages = []
failed_messages = []
for validation_report in xmls:
try:
tile_id = os.path.basename(validation_report).split('.')[0]
tree = ET.parse(validation_report)
root = tree.getroot()
result = root.find('report').find('summary').text.strip()
if result == 'pass':
passed += 1
else:
variable = root.find('report').find('details').find(
result).find('variable').text
note = root.find('report').find('details').find(result).find(
'note').text
if result == 'fail':
failed += 1
failed_messages.append('{} -> {} | {} : {}'.format(
tile_id, result, variable, note))
elif result == 'warning':
warnings += 1
warning_messages.append('{} -> {} | {} : {}'.format(
tile_id, result, variable, note))
except ParseError:
parse_errors += 1
summary = '''LASvalidate Summary
====================
Passed: {:,d}
Failed: {:,d}
Warnings: {:,d}
ParseErrors: {:,d}
'''.format(passed, failed, warnings, parse_errors)
details = '''Details
========
{}
{}
'''.format('\n'.join(failed_messages), '\n'.join(warning_messages))
print(summary)
if verbose:
print(details)
def move_invalid_tiles(xml_dir, dest_dir):
'''Moves lidar data that fail validation checks into a new directory
Parameters
----------
xml_dir : string, path to directory
where the xml reports produced by LASvalidate can be found
dest_dir : str, path to directory
where you would like the point cloud and associated files to be moved
Returns
-------
A printed statement about how many tiles were moved.
'''
xmls = glob.glob(os.path.join(xml_dir, '*.xml'))
invalid_dir = dest_dir
num_invalid = 0
for validation_report in xmls:
tile_id = os.path.basename(validation_report).split('.')[0]
tree = ET.parse(validation_report)
root = tree.getroot()
result = root.find('report').find('summary').text.strip()
if result == 'fail':
# move the lidar file to a different folder
os.makedirs(invalid_dir, exist_ok=True)
for invalid_file in glob.glob(
os.path.join(xml_dir, tile_id + '*')):
basename = os.path.basename(invalid_file)
os.rename(invalid_file, os.path.join(invalid_dir, basename))
num_invalid += 1
print('Moved files for {} invalid tiles to {}'.format(
num_invalid, invalid_dir))
def get_bbox_as_poly(infile, epsg=None):
"""Uses PDAL's info tool to extract the bounding box of a file as a
shapely Polygon. If an EPSG code is provided, a GeoDataFrame is returned.
Parameters
----------
infile : str, path to file
path to input file that PDAL can read
epsg : int
EPSG code defining the coordinate reference system. Optional.
Returns
-------
bbox_poly : Polygon or GeoDataFrame
By default (no EPSG is provided), a shapely Polygon with the bounding
box as its coordinates is returned. If an EPSG code is specified,
bbox_poly is returned as a GeoPandas GeoDataFrame.
"""
result = subprocess.run(['pdal', 'info', infile],
stderr=subprocess.PIPE,
stdout=subprocess.PIPE)
json_result = json.loads(result.stdout.decode())
coords = json_result['stats']['bbox']['native']['boundary']['coordinates']
geometry = Polygon(*coords)
if epsg:
bbox_poly = gpd.GeoDataFrame(
geometry=[geometry], crs={'init': 'epsg:{}'.format(epsg)})
else:
bbox_poly = Polygon(*coords)
return bbox_poly
def fname(path):
"""returns the filename as basename split from extension.
Parameters
-----------
path : str, path to file
filepath from which filename will be sliced
Returns
--------
filename : str
name of file, split from extension
"""
filename = os.path.basename(path).split('.')[0]
return filename
def annulus(inner_radius, outer_radius, dtype=np.uint8):
"""Generates a flat, donut-shaped (annular) structuring element.
A pixel is within the neighborhood if the euclidean distance between
it and the origin falls between the inner and outer radii (inclusive).
Parameters
----------
inner_radius : int
The inner radius of the annular structuring element
outer_radius : int
The outer radius of the annular structuring element
dtype : data-type
The data type of the structuring element
Returns
-------
selem : ndarray
The structuring element where elements of the neighborhood are 1
and 0 otherwise
"""
L = np.arange(-outer_radius, outer_radius + 1)
X, Y = np.meshgrid(L, L)
selem = np.array(
((X**2 + Y**2) <= outer_radius**2) * (
(X**2 + Y**2) >= inner_radius**2),
dtype=dtype)
return selem
def inspect_failures(failed_dir):
"""Prints error messages reported for tiles that failed in the lidar
processing pipeline.
Parameters
----------
failed_dir : string, path to directory
path to directory containing text files indicating any tiles which
failed processing
"""
failed = glob.glob(os.path.join(failed_dir, '*.txt'))
for filename in failed:
with open(filename) as f:
print([line for line in f.readlines() if line.rstrip() != ''])
print('----------------------')
def processing_summary(all_tiles, already_finished, processing_tiles,
finished_dir, failed_dir, start_time):
"""Prints a summary indicating progress of a lidar processing pipeline.
Parameters
----------
all_tiles : list-like
all tiles within a lidar acquisition
already_finished : list-like
tiles which were successfully processed in a previous execution of the
processing pipeline
processing_tiles : list-like
tiles which are being processed during the currently executing pipeline
finished_dir : string, path to directory
path to directory containing text files indicating any tiles which have
finished processing
failed_dir : string, path to directory
path to directory containing text files indicating any tiles which
failed processing
start_time : float
time the pipeline execution began, produced by time.time()
"""
failed = glob.glob(os.path.join(failed_dir, '*.txt'))
finished = glob.glob(os.path.join(finished_dir, '*.txt'))
summary = '''
Processing Summary
-------------------
{:>5,d} tiles in acquisition
{:>5,d} tiles previously finished in acquisition
{:>5,d} tiles being processed in this run
{:>5,d} tiles from this run finished
{:>5,d} tiles failed
'''.format(
len(all_tiles), len(already_finished), len(processing_tiles),
len(finished) - (len(all_tiles) - len(processing_tiles)), len(failed))
total_percent_unfinished = int(70 * (1 - len(finished) / len(all_tiles)))
total_percent_finished = int(70 * len(finished) / len(all_tiles))
total_percent_failed = int(70 * len(failed) / len(all_tiles))
this_run_unfinished = int(70 - 70*(len(finished) - (len(all_tiles) - \
len(processing_tiles))) / len(processing_tiles))
this_run_finished = int(70*(len(finished) - (len(all_tiles) - \
len(processing_tiles))) / len(processing_tiles))
progress_bars = '|' + '=' * this_run_finished + ' '* this_run_unfinished +\
'!' * total_percent_failed + '| {:.1%} this run\n'.format((len(finished)\
- (len(all_tiles) - len(processing_tiles))) / len(processing_tiles)) + \
'|' + '=' * total_percent_finished + ' ' * total_percent_unfinished + '!' \
* total_percent_failed + '| {:.1%} total'.format(len(finished) / \
len(all_tiles))
print(summary)
print(progress_bars)
time_to_complete(start_time, len(processing_tiles),
len(finished) - (len(all_tiles) - len(processing_tiles)))
def print_dhms(s):
"""Prints number of days, hours, minutes, and seconds
represented by number of seconds provided as input.
Parameters
----------
s : numeric
seconds
"""
days = s // (24 * 3600)
s = s % (24 * 3600)
hours = s // 3600
s %= 3600
minutes = s // 60
s %= 60
seconds = s
if days > 0:
print(f'{days:2.0f}d {hours:2.0f}h {minutes:2.0f}m {seconds:2.0f}s')
elif hours > 0:
print(f' {hours:2.0f}h {minutes:2.0f}m {seconds:2.0f}s')
else:
print(f' {minutes:2.0f}m {seconds:2.0f}s')
def time_to_complete(start_time, num_jobs, jobs_completed):
"""Prints elapsed time and estimated time of completion.
Parameters
----------
start_time : float
time the pipeline execution began, produced by time.time()
num_jobs : int
total number of jobs to be completed
jobs_completed : int
number of jobs completed so far
"""
if jobs_completed == 0:
print('\nNo jobs completed yet.')
else:
time_now = time.time()
elapsed = time_now - start_time
prop_complete = jobs_completed / num_jobs
est_completion = elapsed / prop_complete
time_left = est_completion - elapsed
print('\nelapsed: ', end='\t')
print_dhms(elapsed)
print('remaining: ', end='\t')
print_dhms(time_left)
def make_buffered_fishnet(xmin, ymin, xmax, ymax, crs, spacing=1000,
buffer=50):
"""Makes a GeoDataFrame with a fishnet grid that has overlapping edges.
Converts an existing lidar tiling scheme into one that has overlapping
tiles and which is aligned with a grid based on the spacing parameter.
Parameters
----------
xmin, ymin, xmax, ymax : numeric
Values indicating the extent of the existing lidar data.
crs : Coordinate Reference System
Must be readable by GeoPandas to create a GeoDataFrame.
spacing : int
Length and width of tiles in new tiling scheme prior to buffering
buffer : int
Amount of overlap between neighboring tiles.
"""
xmin, ymin = (
np.floor(np.array([xmin, ymin]) // spacing) * spacing).astype(int)
xmax, ymax = (
np.ceil(np.array([xmax, ymax]) // spacing) * spacing).astype(int) + spacing
xx, yy = np.meshgrid(
np.arange(xmin, xmax + spacing, spacing),
np.arange(ymin, ymax + spacing, spacing))
xx_leftbuff = xx[:, :-1] - buffer
xx_rightbuff = xx[:, 1:] + buffer
yy_downbuff = yy[:-1, :] - buffer
yy_upbuff = yy[1:, :] + buffer
ll = np.stack((
xx_leftbuff[1:, :].ravel(), # skip top row
yy_downbuff[:, :-1].ravel())).T # skip right-most column
ul = np.stack((
xx_leftbuff[:-1, :].ravel(), # skip bottom row
yy_upbuff[:, :-1].ravel())).T # skip right-most column
ur = np.stack((
xx_rightbuff[:-1, :].ravel(), # skip bottom row
yy_upbuff[:, 1:].ravel())).T # skip left-most column
lr = np.stack((
xx_rightbuff[1:, :].ravel(), # skip top row
yy_downbuff[:, 1:].ravel())).T # skip left-most column
buff_fishnet = np.stack([ll, ul, ur, lr])
polys = [
Polygon(buff_fishnet[:, i, :]) for i in range(buff_fishnet.shape[1])
]
ll_names = [x for x in (ll + buffer).astype(int).astype(str)]
tile_ids = [
'_'.join(tile) + '_{}'.format(str(spacing)) for tile in ll_names
]
buff_fishnet_gdf = gpd.GeoDataFrame(geometry=polys, crs=crs)
buff_fishnet_gdf['tile_id'] = tile_ids
return buff_fishnet_gdf.set_index('tile_id')
def get_intersecting_tiles(src_tiles, new_tiles):
"""Identifies tiles from src that intersect tiles in new_tiles.
This function is intended to identify the files which should be read for
retiling a lidar acquisition into the new_tiles layout.
src_tiles is expected to have a 'file_name' field.
Parameters
----------
src_tiles : GeoDataFrame
Original tiling scheme for lidar acquisition
new_tiles : GeoDataFrame
New tiling scheme for lidar acquisition, such as one created by the
make_buffered_fishnet function
Returns
-------
joined_tiles : GeoDataFrame
Each row shows a tile from new_tiles that intersected with one or more
tiles from src_tiles. The list of tiles from src_tiles that intersect
each tile in new_tiles are formatted as a space-delimited string.
"""
joined = gpd.sjoin(new_tiles, src_tiles)
joined_tiles = joined.groupby(level=0)['file_name'].apply(list).apply(
' '.join).to_frame()
joined_tiles.index.name = 'tile_id'
joined_tiles = joined_tiles.rename({
'file_name': 'intersecting_files'
},
axis=1)
return joined_tiles
def parse_coords_from_tileid(tile_id):
"""Get the coordinates of the lower left corner of the tile, assuming the
tile has been named in the pattern {XMIN}_{YMIN}_{LENGTH}.
Parameters
----------
tile_id : string
assumed tile_id follows the naming convention of {LLX}_{LLY}_{LENGTH}
where:
LLX = x-coordinate of lower-left corner of tile (in projected units)
LLY = y-coordinate of lower-left corner of tile (in projected units)
LENGTH = length of the raster (in projected units), assumed to be a
square tile shape
Returns
-------
llx, lly, length : int
x- and y- coordinates of lower-left corner and length of raster
"""
tile_parts = tile_id.split('_')
if len(tile_parts) == 2:
llx, lly = [int(coord) for coord in tile_parts]
length = 1000 # assumed tile width if not explicit in tile_id
elif len(tile_parts) == 3:
llx, lly, length = [int(coord) for coord in tile_parts]
return llx, lly, length
| 34.175549 | 83 | 0.631948 |
7de18177bc8f9c705a1427b2d13f1d6f74890139 | 1,308 | py | Python | test/test_message.py | Smac01/Stego | 0bcf94642871e611b6731676591a571ff40ce4a0 | [
"MIT"
] | null | null | null | test/test_message.py | Smac01/Stego | 0bcf94642871e611b6731676591a571ff40ce4a0 | [
"MIT"
] | null | null | null | test/test_message.py | Smac01/Stego | 0bcf94642871e611b6731676591a571ff40ce4a0 | [
"MIT"
] | null | null | null | #!/usr/bin/env python3
import unittest
import sys
sys.path.insert(0, '.')
from random import choice
from PIL import Image
from stego.encoder import embed
from stego.decoder import extract, _decompress, IncorrectPassword
from stego.base import make_array, as_string, extract_metadata
images = ['test/rgba.png', 'test/cmyk.tiff', 'test/greyscale.bmp']
image = choice(images)
message = b'Pixels -> smallest unit(small colored square) that constitutes an images.'
key = b'my_secret_key'
if __name__ == '__main__':
unittest.main() | 25.647059 | 86 | 0.769113 |
7de18971fbc5070d8520ba99ee3a0260bc65c1f5 | 638 | py | Python | tests/test_simulation_utils.py | burgersmoke/epysurv | 606aaf1d50a8d39473dc245e78e5b2eb122c2ba3 | [
"MIT"
] | 8 | 2019-07-29T09:44:43.000Z | 2021-06-29T20:54:57.000Z | tests/test_simulation_utils.py | burgersmoke/epysurv | 606aaf1d50a8d39473dc245e78e5b2eb122c2ba3 | [
"MIT"
] | 31 | 2019-03-29T23:05:33.000Z | 2021-10-04T16:26:46.000Z | tests/test_simulation_utils.py | burgersmoke/epysurv | 606aaf1d50a8d39473dc245e78e5b2eb122c2ba3 | [
"MIT"
] | 4 | 2019-09-25T12:54:22.000Z | 2021-08-11T06:40:45.000Z | import pandas as pd
from rpy2 import robjects
from epysurv.simulation.utils import add_date_time_index_to_frame, r_list_to_frame
| 29 | 82 | 0.716301 |
7de40d6d686a2a728856437129c7cee7420fe46e | 95 | py | Python | assemble/tool/assemble_CodeBlockUnixMake.py | vbloodv/blood | 1c6f6c54e937e8d7064f72f32cbbcc8fdaa2677d | [
"MIT"
] | 2 | 2015-08-27T20:02:04.000Z | 2015-08-31T12:33:13.000Z | assemble/tool/assemble_CodeBlockUnixMake.py | vbloodv/blood | 1c6f6c54e937e8d7064f72f32cbbcc8fdaa2677d | [
"MIT"
] | null | null | null | assemble/tool/assemble_CodeBlockUnixMake.py | vbloodv/blood | 1c6f6c54e937e8d7064f72f32cbbcc8fdaa2677d | [
"MIT"
] | null | null | null | import cmake
cmake.buildCmake(
'CodeBlockUnixMake',
'../../',
'../../assemble/'
)
| 11.875 | 24 | 0.536842 |
7de5842a34fa630d341aa9d9c697b199184bb9ae | 655 | py | Python | examples/python/bunny_pieline.py | Willyzw/vdbfusion | ca9107a3f44e43629b149ea80c9cd21d9f274baa | [
"MIT"
] | 119 | 2022-02-08T15:25:25.000Z | 2022-03-29T12:16:35.000Z | examples/python/bunny_pieline.py | arenas7307979/vdbfusion | 7ed8d3142b4b6e164633516f0ed435e1065e5212 | [
"MIT"
] | 10 | 2022-02-09T07:54:23.000Z | 2022-03-25T03:12:47.000Z | examples/python/bunny_pieline.py | arenas7307979/vdbfusion | 7ed8d3142b4b6e164633516f0ed435e1065e5212 | [
"MIT"
] | 11 | 2022-02-08T15:33:44.000Z | 2022-02-22T02:47:27.000Z | #!/usr/bin/env python3
# @file cow_pipeline.py
# @author Ignacio Vizzo [ivizzo@uni-bonn.de]
#
# Copyright (c) 2021 Ignacio Vizzo, all rights reserved
import argh
from datasets import BunnyGeneratedDataset as Dataset
from vdbfusion_pipeline import VDBFusionPipeline as Pipeline
def main(
data_source: str,
config: str = "config/bunny.yaml",
visualize: bool = False,
):
"""Help here!"""
dataset = Dataset(data_source, apply_pose=True)
pipeline = Pipeline(dataset, config, map_name="bunny")
pipeline.run()
pipeline.visualize() if visualize else None
if __name__ == "__main__":
argh.dispatch_command(main)
| 25.192308 | 60 | 0.708397 |
7de74902240dafd5d3ece0f149442d4593ed9d43 | 1,091 | py | Python | tests/test_dashboard_generator_generate_widget.py | phelewski/aws-codepipeline-dashboard | c32fbfb01b383be9b5f813fac4ed36074e3ddc7e | [
"MIT"
] | null | null | null | tests/test_dashboard_generator_generate_widget.py | phelewski/aws-codepipeline-dashboard | c32fbfb01b383be9b5f813fac4ed36074e3ddc7e | [
"MIT"
] | 5 | 2021-04-02T18:12:58.000Z | 2021-05-21T12:15:30.000Z | tests/test_dashboard_generator_generate_widget.py | phelewski/aws-codepipeline-dashboard | c32fbfb01b383be9b5f813fac4ed36074e3ddc7e | [
"MIT"
] | null | null | null | import os
import pytest
from dashboard_generator import DashboardGenerator
| 29.486486 | 84 | 0.6022 |
7de837001eba6d36074503fa3a70a1bcb083d08b | 795 | py | Python | opencadd/tests/structure/test_superposition_mda.py | pipaj97/opencadd | 4fcf090bd612a22df9d617473ae458316a4cb4b6 | [
"MIT"
] | 39 | 2020-08-14T07:33:21.000Z | 2022-03-30T02:05:19.000Z | opencadd/tests/structure/test_superposition_mda.py | Allend95/opencadd | 1fde238e3cf8e5e47e8266a504d9df0196505e97 | [
"MIT"
] | 94 | 2020-06-29T12:47:46.000Z | 2022-02-13T19:16:25.000Z | opencadd/tests/structure/test_superposition_mda.py | Allend95/opencadd | 1fde238e3cf8e5e47e8266a504d9df0196505e97 | [
"MIT"
] | 11 | 2020-11-11T17:12:38.000Z | 2022-03-21T09:23:39.000Z | """
Tests for opencadd.structure.superposition.engines.mda
"""
import pytest
from opencadd.structure.core import Structure
from opencadd.structure.superposition.engines.mda import MDAnalysisAligner
| 28.392857 | 98 | 0.733333 |
7de96af0dde6dfcdb1cd866b9ae2a4a947f6d2c2 | 274 | py | Python | examples/python/hello2.py | redcodestudios/legion_script | 565fb9bc6fe1dd9d9cf1a2111fd4e4cef4cd3565 | [
"MIT"
] | 13 | 2020-10-21T16:34:50.000Z | 2022-03-16T14:37:30.000Z | examples/python/hello2.py | redcodestudios/legion_script | 565fb9bc6fe1dd9d9cf1a2111fd4e4cef4cd3565 | [
"MIT"
] | null | null | null | examples/python/hello2.py | redcodestudios/legion_script | 565fb9bc6fe1dd9d9cf1a2111fd4e4cef4cd3565 | [
"MIT"
] | null | null | null | import engine
print("Python: Script 2")
result = engine.query(Color)
print("Python: Query colors from Script 2")
for c in result:
c.string()
print("--------------------")
| 18.266667 | 47 | 0.638686 |
7de9ef0f997041de89ca689516f1669065c3dc15 | 4,389 | py | Python | io_scene_halo/file_tag/import_tag.py | AerialDave144/Halo-Asset-Blender-Development-Toolset | f1b0c0b22806ebabaf0126ad864896193c02307f | [
"MIT"
] | null | null | null | io_scene_halo/file_tag/import_tag.py | AerialDave144/Halo-Asset-Blender-Development-Toolset | f1b0c0b22806ebabaf0126ad864896193c02307f | [
"MIT"
] | null | null | null | io_scene_halo/file_tag/import_tag.py | AerialDave144/Halo-Asset-Blender-Development-Toolset | f1b0c0b22806ebabaf0126ad864896193c02307f | [
"MIT"
] | null | null | null | # ##### BEGIN MIT LICENSE BLOCK #####
#
# MIT License
#
# Copyright (c) 2022 Steven Garcia
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
#
# ##### END MIT LICENSE BLOCK #####
import bpy
from ..global_functions import tag_format
from ..file_model import build_scene as build_scene_model
from ..file_physics import build_scene as build_scene_physics
from ..file_animation import build_scene as build_scene_animation
from ..file_collision import build_scene as build_scene_collision
from ..file_level import build_scene as build_scene_level
from ..file_camera_track import build_scene as build_camera_track
from ..file_model.process_file_mode_retail import process_file_mode_retail as process_mode
from ..file_model.process_file_mod2_retail import process_file_mod2_retail as process_mod2
from ..file_collision.process_file_retail import process_file_retail as process_collision_retail
from ..file_physics.process_file_retail import process_file_retail as process_physics_retail
from ..file_animation.process_file_retail import process_file_retail as process_animation_retail
from ..file_level.h1.process_file_retail import process_file_retail as process_level_retail
from ..file_level.h2.process_file import process_file_retail as process_h2_level
from ..file_camera_track.process_file_retail import process_file_retail as process_camera_track_retail
if __name__ == '__main__':
bpy.ops.import_scene.model()
| 39.1875 | 125 | 0.75188 |
7deaa318807e1bc9f791206624ba77111ef68405 | 1,537 | py | Python | pines/smartread.py | jpn--/pine | 3980a9f0b09dd36b2fed7e52750847637be5f067 | [
"MIT"
] | 2 | 2017-08-09T02:42:37.000Z | 2020-06-16T14:14:16.000Z | pines/smartread.py | jpn--/pine | 3980a9f0b09dd36b2fed7e52750847637be5f067 | [
"MIT"
] | null | null | null | pines/smartread.py | jpn--/pine | 3980a9f0b09dd36b2fed7e52750847637be5f067 | [
"MIT"
] | null | null | null |
import gzip, os, struct, zipfile, io
| 30.137255 | 94 | 0.657775 |
7dec49d76923ecb9be4b17a5cb4c2e8eb1b3a170 | 510 | py | Python | lib/clckwrkbdgr/time.py | umi0451/dotfiles | c618811be788d995fe01f6a16b355828d7efdd36 | [
"MIT"
] | 2 | 2017-04-16T14:54:17.000Z | 2020-11-12T04:15:00.000Z | lib/clckwrkbdgr/time.py | clckwrkbdgr/dotfiles | 292dac8c3211248b490ddbae55fe2adfffcfcf58 | [
"MIT"
] | null | null | null | lib/clckwrkbdgr/time.py | clckwrkbdgr/dotfiles | 292dac8c3211248b490ddbae55fe2adfffcfcf58 | [
"MIT"
] | null | null | null | from __future__ import absolute_import
from time import *
import datetime
import six
def get_utctimestamp(mtime=None): # pragma: no cover
""" Converts local mtime (timestamp) to integer UTC timestamp.
If mtime is None, returns current UTC time.
"""
if mtime is None:
if six.PY2:
return int((datetime.datetime.now() - datetime.datetime(1970, 1, 1)).total_seconds())
return int(datetime.datetime.utcnow().timestamp())
return int(calendar.timegm(datetime.datetime.fromtimestamp(mtime).timetuple()))
| 31.875 | 88 | 0.754902 |
7dee18b24646c7a504a2809135076549b5a544fa | 135 | py | Python | configutator/__version.py | innovate-invent/configutator | 372b45c44a10171b8518e61f2a7974969304c33a | [
"MIT"
] | null | null | null | configutator/__version.py | innovate-invent/configutator | 372b45c44a10171b8518e61f2a7974969304c33a | [
"MIT"
] | 1 | 2017-09-22T05:52:54.000Z | 2017-09-22T05:52:54.000Z | configutator/__version.py | innovate-invent/configutator | 372b45c44a10171b8518e61f2a7974969304c33a | [
"MIT"
] | null | null | null | __version__ = [1, 0, 2]
__versionstr__ = '.'.join([str(i) for i in __version__])
if __name__ == '__main__':
print(__versionstr__) | 22.5 | 56 | 0.666667 |
7dee5b01ddca7ca6f3f444bdaf770ca84c443c68 | 572 | py | Python | tests/integration/test_serialise.py | csiro-easi/eo-datasets | 7805c569763f828cb0ace84c93932bddb882a6a3 | [
"Apache-2.0"
] | null | null | null | tests/integration/test_serialise.py | csiro-easi/eo-datasets | 7805c569763f828cb0ace84c93932bddb882a6a3 | [
"Apache-2.0"
] | null | null | null | tests/integration/test_serialise.py | csiro-easi/eo-datasets | 7805c569763f828cb0ace84c93932bddb882a6a3 | [
"Apache-2.0"
] | null | null | null | from pathlib import Path
from typing import Dict
from eodatasets3 import serialise
from .common import assert_same, dump_roundtrip
| 30.105263 | 84 | 0.791958 |
7deee6c010d48a8d2b8631423560a24cab9c77a0 | 4,369 | py | Python | src/plot/plot-bb/plot_methods.py | bcrafton/speed_read | 3e9c0c873e49e4948a216aae14ec0d4654d1a62c | [
"MIT"
] | null | null | null | src/plot/plot-bb/plot_methods.py | bcrafton/speed_read | 3e9c0c873e49e4948a216aae14ec0d4654d1a62c | [
"MIT"
] | null | null | null | src/plot/plot-bb/plot_methods.py | bcrafton/speed_read | 3e9c0c873e49e4948a216aae14ec0d4654d1a62c | [
"MIT"
] | 2 | 2020-11-08T12:51:23.000Z | 2021-12-02T23:16:48.000Z |
import numpy as np
import matplotlib.pyplot as plt
####################
####################
comp_pJ = 22. * 1e-12 / 32. / 16.
num_layers = 6
num_comparator = 8
results = np.load('results.npy', allow_pickle=True).item()
y_mean = np.zeros(shape=(2, 2, 2, 2, num_layers))
y_std = np.zeros(shape=(2, 2, 2, 2, num_layers))
y_mac_per_cycle = np.zeros(shape=(2, 2, 2, 2, num_layers))
y_mac_per_pJ = np.zeros(shape=(2, 2, 2, 2, num_layers))
cycle = np.zeros(shape=(2, 2, 2, 2, num_layers))
nmac = np.zeros(shape=(2, 2, 2, 2, num_layers))
array = np.zeros(shape=(2, 2, 2, 2, num_layers))
y_ron = np.zeros(shape=(2, 2, 2, 2, num_layers))
y_roff = np.zeros(shape=(2, 2, 2, 2, num_layers))
y_adc = np.zeros(shape=(2, 2, 2, 2, num_layers, num_comparator))
y_energy = np.zeros(shape=(2, 2, 2, 2, num_layers))
array_util = np.zeros(shape=(2, 2, 2, 2, num_layers))
for key in sorted(results.keys()):
(skip, cards, alloc, profile) = key
alloc = 1 if alloc == 'block' else 0
layer_results = results[key]
max_cycle = 0
for layer in range(num_layers):
rdict = merge_dicts(layer_results[layer])
############################
y_mean[skip][cards][alloc][profile][layer] = np.mean(rdict['mean'])
y_std[skip][cards][alloc][profile][layer] = np.mean(rdict['std'])
############################
y_ron[skip][cards][alloc][profile][layer] = np.sum(rdict['ron'])
y_roff[skip][cards][alloc][profile][layer] = np.sum(rdict['roff'])
y_adc[skip][cards][alloc][profile][layer] = np.sum(rdict['adc'], axis=0)
y_energy[skip][cards][alloc][profile][layer] += y_ron[skip][cards][alloc][profile][layer] * 2e-16
y_energy[skip][cards][alloc][profile][layer] += y_roff[skip][cards][alloc][profile][layer] * 2e-16
y_energy[skip][cards][alloc][profile][layer] += np.sum(y_adc[skip][cards][alloc][profile][layer] * np.array([1,2,3,4,5,6,7,8]) * comp_pJ)
y_mac_per_cycle[skip][cards][alloc][profile][layer] = np.sum(rdict['nmac']) / np.sum(rdict['cycle'])
y_mac_per_pJ[skip][cards][alloc][profile][layer] = np.sum(rdict['nmac']) / 1e12 / np.sum(y_energy[skip][cards][alloc][profile][layer])
############################
cycle[skip][cards][alloc][profile][layer] = np.mean(rdict['cycle'])
nmac[skip][cards][alloc][profile][layer] = np.mean(rdict['nmac'])
array[skip][cards][alloc][profile][layer] = np.mean(rdict['array'])
############################
max_cycle = max(max_cycle, np.mean(rdict['cycle']))
############################
for layer in range(num_layers):
rdict = merge_dicts(layer_results[layer])
############################
y_cycle = np.mean(rdict['cycle'])
y_stall = np.mean(rdict['stall'])
y_array = np.mean(rdict['array'])
array_util[skip][cards][alloc][profile][layer] = (y_array * y_cycle - y_stall) / (y_array * max_cycle)
############################
####################
layers = np.array(range(1, 6+1))
skip_none = int(np.max(cycle[1, 0, 0, 0]))
skip_layer = int(np.max(cycle[1, 0, 0, 1]))
skip_block = int(np.max(cycle[1, 0, 1, 1]))
cards_none = int(np.max(cycle[1, 1, 0, 0]))
cards_layer = int(np.max(cycle[1, 1, 0, 1]))
cards_block = int(np.max(cycle[1, 1, 1, 1]))
height = [skip_none, skip_layer, skip_block, cards_none, cards_layer, cards_block]
x = ['skip/none', 'skip/layer', 'skip/block', 'cards/none', 'cards/layer', 'cards/block']
####################
plt.rcParams.update({'font.size': 12})
####################
plt.cla()
plt.clf()
plt.close()
plt.ylabel('# Cycles')
# plt.xlabel('Method')
plt.xticks(range(len(x)), x, rotation=45)
width = 0.2
plt.bar(x=x, height=height, width=width)
ax = plt.gca()
for i, h in enumerate(height):
# print (i, h)
ax.text(i - width, h + np.min(height)*0.02, str(h), fontdict={'size': 12})
fig = plt.gcf()
fig.set_size_inches(9, 5)
plt.tight_layout()
fig.savefig('cycles.png', dpi=300)
####################
| 29.721088 | 145 | 0.559396 |
7def97525f164a67b3206cba17eb1da34b5d6234 | 3,154 | py | Python | build/getversion.py | timgates42/subversion | 0f088f530747140c6783c2eeb77ceff8e8613c42 | [
"Apache-2.0"
] | 3 | 2017-01-03T03:20:56.000Z | 2018-12-24T22:05:09.000Z | build/getversion.py | timgates42/subversion | 0f088f530747140c6783c2eeb77ceff8e8613c42 | [
"Apache-2.0"
] | 3 | 2016-06-12T17:02:25.000Z | 2019-02-03T11:08:18.000Z | build/getversion.py | timgates42/subversion | 0f088f530747140c6783c2eeb77ceff8e8613c42 | [
"Apache-2.0"
] | 3 | 2017-01-21T00:15:13.000Z | 2020-11-04T07:23:50.000Z | #!/usr/bin/env python
#
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
#
#
# getversion.py - Parse version numbers from C header files.
#
import os
import re
import sys
import traceback
__all__ = ['Parser', 'Result']
def svn_extractor(parser, include_file):
'''Pull values from svn.version.h'''
p.search('SVN_VER_MAJOR', 'major')
p.search('SVN_VER_MINOR', 'minor')
p.search('SVN_VER_PATCH', 'patch')
try:
r = p.parse(include_file)
except IOError:
typ, val, tb = sys.exc_info()
msg = ''.join(traceback.format_exception_only(typ, val))
usage_and_exit(msg)
sys.stdout.write("%d.%d.%d" % (r.major, r.minor, r.patch))
def sqlite_extractor(parser, include_file):
'''Pull values from sqlite3.h'''
p.search('SQLITE_VERSION_NUMBER', 'version')
try:
r = p.parse(include_file)
except IOError:
typ, val, tb = sys.exc_info()
msg = ''.join(traceback.format_exception_only(typ, val))
usage_and_exit(msg)
major = r.version / 1000000
minor = (r.version - (major * 1000000)) / 1000
micro = (r.version - (major * 1000000) - (minor * 1000))
sys.stdout.write("%d.%d.%d" % (major, minor, micro))
extractors = {
'SVN' : svn_extractor,
# 'SQLITE' : sqlite_extractor, # not used
}
if __name__ == '__main__':
if len(sys.argv) == 3:
extractor = extractors[sys.argv[1]]
include_file = sys.argv[2]
else:
usage_and_exit("Incorrect number of arguments")
# Extract and print the version number
p = Parser()
extractor(p, include_file)
| 27.426087 | 63 | 0.675333 |
7df019fabd0e51d78fd4be6cb16ea6706b50e9be | 1,136 | py | Python | redis/p02-vote/client/c02.py | JoseIbanez/testing | 4d6ff310cd63a8b2f8e1abcfbea0f17b23220021 | [
"MIT"
] | 1 | 2016-09-15T03:58:30.000Z | 2016-09-15T03:58:30.000Z | redis/p02-vote/client/c02.py | JoseIbanez/testing | 4d6ff310cd63a8b2f8e1abcfbea0f17b23220021 | [
"MIT"
] | 1 | 2020-09-13T08:44:50.000Z | 2020-09-13T08:44:50.000Z | redis/p02-vote/client/c02.py | JoseIbanez/testing | 4d6ff310cd63a8b2f8e1abcfbea0f17b23220021 | [
"MIT"
] | null | null | null | #!/usr/bin/python
import httplib
import random
import argparse
import sys
#Get options
parser = argparse.ArgumentParser(
description='Testing vote app')
parser.add_argument(
'-port',
type=int,
help='port of server',
default=8000)
parser.add_argument(
'-host',
type=str,
help='server name/ip',
default="localhost")
args = parser.parse_args()
#Color table
colorList = ["blue", "orange", "red", "green", "yellow" ]
colorSize = len(colorList) - 1
#Connect with server
conn = httplib.HTTPConnection(args.host, args.port)
#initial request
conn.request("GET", "/")
r1 = conn.getresponse()
#print(r1.status, r1.reason)
print(r1.read())
#vote loop
count = 0
while count < 100 :
count = count + 1
nColor = random.randint(0, colorSize)
conn.request("GET", "/v1/vote?color="+colorList[nColor])
r1 = conn.getresponse()
#print(r1.read())
print
# view current results
conn.request("GET", "/v1/listVotes")
r1 = conn.getresponse()
print(r1.read())
conn.request("GET", "/v1/listWorkers")
r1 = conn.getresponse()
print(r1.read())
conn.close() | 17.476923 | 60 | 0.647007 |
7df1653c3e5e087f0be1c1ff434e58035664ddf1 | 8,161 | py | Python | vm_manager/vm_functions/admin_functionality.py | NeCTAR-RC/bumblebee | 8ba4c543695c83ea1ca532012203f05189438e23 | [
"Apache-2.0"
] | 3 | 2021-11-19T10:45:17.000Z | 2022-02-15T21:57:58.000Z | vm_manager/vm_functions/admin_functionality.py | NeCTAR-RC/bumblebee | 8ba4c543695c83ea1ca532012203f05189438e23 | [
"Apache-2.0"
] | null | null | null | vm_manager/vm_functions/admin_functionality.py | NeCTAR-RC/bumblebee | 8ba4c543695c83ea1ca532012203f05189438e23 | [
"Apache-2.0"
] | null | null | null | from uuid import UUID
import django_rq
import logging
from datetime import datetime, timezone, timedelta
from django.core.mail import mail_managers
from django.db.models import Count
from django.db.models.functions import TruncDay
from django.http import HttpResponse, HttpResponseRedirect, Http404
from django.shortcuts import render
from operator import itemgetter
from researcher_workspace.utils import offset_month_and_year
from vm_manager.models import Instance, Resize, Volume
from vm_manager.utils.utils import get_nectar
from vm_manager.vm_functions.resize_vm import downsize_expired_supersized_vms
from vm_manager.utils.Check_ResearchDesktop_Availability import \
check_availability
logger = logging.getLogger(__name__)
| 41.01005 | 92 | 0.638892 |
7df1d68440ff66d19aea439b6b82acb020e94a8f | 365 | py | Python | scripts/af_renameSG.py | aaronfang/small-Scripts | 890b10ab19fa9cdf2415aaf2dc08b81cc64fc79d | [
"MIT"
] | 1 | 2018-03-08T16:34:00.000Z | 2018-03-08T16:34:00.000Z | scripts/af_renameSG.py | aaronfang/personal_scripts | 890b10ab19fa9cdf2415aaf2dc08b81cc64fc79d | [
"MIT"
] | null | null | null | scripts/af_renameSG.py | aaronfang/personal_scripts | 890b10ab19fa9cdf2415aaf2dc08b81cc64fc79d | [
"MIT"
] | null | null | null | # rename shading group name to material name but with SG ended
import pymel.core as pm
import re
selSG = pm.ls(sl=True,fl=True)
for SG in selSG:
curMat = pm.listConnections(SG,d=1)
for mat in curMat:
if pm.nodeType(mat) == 'blinn' or pm.nodeType(mat) == 'lambert':
sgNM = re.split("_mat",str(mat))[0]+"SG"
pm.rename(SG,sgNM)
| 33.181818 | 72 | 0.632877 |
8145be52efea74b03f5b43c1e53cd198f1584621 | 729 | py | Python | conftest.py | berpress/MT5WT | 10826f974cd5aef14e8771e18ca0314f27a902e3 | [
"Apache-2.0"
] | null | null | null | conftest.py | berpress/MT5WT | 10826f974cd5aef14e8771e18ca0314f27a902e3 | [
"Apache-2.0"
] | 1 | 2019-11-26T18:12:24.000Z | 2019-11-26T18:12:24.000Z | conftest.py | berpress/MT5WT | 10826f974cd5aef14e8771e18ca0314f27a902e3 | [
"Apache-2.0"
] | null | null | null | import pytest
from common.common import NETTING_ACCOUNT
from fixture.application import Application
| 24.3 | 79 | 0.699588 |
81473d4251ec4022292a6f33a138c31e6bf74550 | 3,855 | py | Python | chapps/tests/test_util/test_util.py | easydns/chapps | 3bb54e795de1f91c6cf749f9aeb16b1211584809 | [
"MIT"
] | 1 | 2022-03-23T13:36:43.000Z | 2022-03-23T13:36:43.000Z | chapps/tests/test_util/test_util.py | easydns/chapps | 3bb54e795de1f91c6cf749f9aeb16b1211584809 | [
"MIT"
] | 2 | 2022-02-25T18:12:15.000Z | 2022-03-07T22:32:13.000Z | chapps/tests/test_util/test_util.py | easydns/chapps | 3bb54e795de1f91c6cf749f9aeb16b1211584809 | [
"MIT"
] | null | null | null | """CHAPPS Utilities Tests
.. todo::
Write tests for :class:`~chapps.util.VenvDetector`
"""
import pytest
from pprint import pprint as ppr
from chapps.util import AttrDict, PostfixPolicyRequest
pytestmark = pytest.mark.order(1)
| 33.521739 | 82 | 0.628275 |
8147a9961c8020efb6e931a049f2c9b13c430d08 | 84 | py | Python | PMMH/apps/game/map/admin.py | metinberkkaratas/ProjectMagic-MightofHeroes | 578697e637aba0f18b4f83762bf1c87fb20db2ee | [
"MIT"
] | null | null | null | PMMH/apps/game/map/admin.py | metinberkkaratas/ProjectMagic-MightofHeroes | 578697e637aba0f18b4f83762bf1c87fb20db2ee | [
"MIT"
] | 4 | 2021-03-19T02:37:45.000Z | 2022-02-10T11:18:04.000Z | PMMH/apps/game/map/admin.py | metinberkkaratas/ProjectMagic-MightofHeroes | 578697e637aba0f18b4f83762bf1c87fb20db2ee | [
"MIT"
] | 1 | 2019-10-21T20:32:20.000Z | 2019-10-21T20:32:20.000Z | from django.contrib import admin
from .models import Map
admin.site.register(Map)
| 14 | 32 | 0.797619 |
8148c634d7eb81e51ee23984bd4ad754b8ff54d8 | 816 | py | Python | models/__init__.py | pgodet/star_flow | cedb96ff339d11abf71d12d09e794593a742ccce | [
"Apache-2.0"
] | 10 | 2020-11-17T12:55:00.000Z | 2022-01-13T07:23:55.000Z | models/__init__.py | pgodet/star_flow | cedb96ff339d11abf71d12d09e794593a742ccce | [
"Apache-2.0"
] | 1 | 2021-01-02T22:46:07.000Z | 2021-01-02T22:46:07.000Z | models/__init__.py | pgodet/star_flow | cedb96ff339d11abf71d12d09e794593a742ccce | [
"Apache-2.0"
] | 1 | 2021-01-26T10:53:02.000Z | 2021-01-26T10:53:02.000Z | from . import pwcnet
from . import pwcnet_irr
from . import pwcnet_occ_joint
from . import pwcnet_irr_occ_joint
from . import tr_flow
from . import tr_features
from . import IRR_PWC
from . import IRR_PWC_occ_joint
from . import STAR
PWCNet = pwcnet.PWCNet
PWCNet_irr = pwcnet_irr.PWCNet
PWCNet_occ_joint = pwcnet_occ_joint.PWCNet
PWCNet_irr_occ_joint = pwcnet_irr_occ_joint.PWCNet
TRFlow = tr_flow.TRFlow
TRFlow_occjoint = tr_flow.TRFlow_occjoint
TRFlow_irr = tr_flow.TRFlow_irr
TRFlow_irr_occjoint = tr_flow.TRFlow_irr_occjoint
TRFeat = tr_features.TRFeat
TRFeat_occjoint = tr_features.TRFeat_occjoint
TRFeat_irr_occjoint = tr_features.TRFeat_irr_occjoint
# -- With refinement ---
IRR_PWC = IRR_PWC.PWCNet
IRR_occ_joint = IRR_PWC_occ_joint.PWCNet
StarFlow = STAR.StarFlow
| 24 | 53 | 0.792892 |
81497b94baefb829f896f3159d41b2c5f1e8542b | 2,386 | py | Python | blkdiscovery/blkid.py | jaredeh/blkdiscovery | b2a73f12adfe784157783d94dbdb17a79810a680 | [
"MIT"
] | null | null | null | blkdiscovery/blkid.py | jaredeh/blkdiscovery | b2a73f12adfe784157783d94dbdb17a79810a680 | [
"MIT"
] | null | null | null | blkdiscovery/blkid.py | jaredeh/blkdiscovery | b2a73f12adfe784157783d94dbdb17a79810a680 | [
"MIT"
] | null | null | null | import os
import re
#hack for python2 support
try:
from .blkdiscoveryutil import *
except:
from blkdiscoveryutil import *
if __name__ == '__main__':
import pprint
pp = pprint.PrettyPrinter(indent=4)
l = Blkid()
devdata = l.call_blkid()
pp.pprint(devdata)
disks = l.find_disks(devdata)
pp.pprint(disks)
details = l.details()
pp.pprint(details)
| 29.45679 | 75 | 0.538139 |
81498134e7422fff5c181158b8b48f05fe97fab6 | 6,217 | py | Python | cosa/analyzers/bmc_ltl.py | zsisco/CoSA | b7a5107fcbae9b3ed3726fbcf9240b39252ef551 | [
"BSD-3-Clause"
] | 52 | 2018-02-26T19:01:03.000Z | 2022-02-24T08:30:00.000Z | cosa/analyzers/bmc_ltl.py | zsisco/CoSA | b7a5107fcbae9b3ed3726fbcf9240b39252ef551 | [
"BSD-3-Clause"
] | 28 | 2018-06-07T22:18:23.000Z | 2020-10-27T01:21:01.000Z | cosa/analyzers/bmc_ltl.py | zsisco/CoSA | b7a5107fcbae9b3ed3726fbcf9240b39252ef551 | [
"BSD-3-Clause"
] | 12 | 2018-08-16T20:02:46.000Z | 2021-01-20T18:17:45.000Z | # Copyright 2018 Cristian Mattarei
#
# Licensed under the modified BSD (3-clause BSD) License.
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import re
from pysmt.shortcuts import And, Or, Solver, TRUE, FALSE, Not, EqualsOrIff, Implies, Iff, Symbol, BOOL
from cosa.utils.logger import Logger
from cosa.utils.formula_mngm import substitute, get_free_variables
from cosa.representation import TS
from cosa.encoders.ltl import LTLEncoder, verification_type
from cosa.problem import VerificationStatus, VerificationType
from cosa.analyzers.mcsolver import TraceSolver, BMCSolver, VerificationStrategy
from cosa.analyzers.bmc_temporal import BMCTemporal
from cosa.analyzers.bmc_safety import BMCSafety
| 32.212435 | 102 | 0.579218 |
81499daf35ebda15aaf719c6405036c7a52b937c | 4,623 | py | Python | paleomix/nodes/bowtie2.py | MikkelSchubert/paleomix | 5c6414060088ba178ff1c400bdbd45d2f6b1aded | [
"MIT"
] | 33 | 2015-04-08T10:44:19.000Z | 2021-11-01T14:23:40.000Z | paleomix/nodes/bowtie2.py | MikkelSchubert/paleomix | 5c6414060088ba178ff1c400bdbd45d2f6b1aded | [
"MIT"
] | 41 | 2015-07-17T12:46:16.000Z | 2021-10-13T06:47:25.000Z | paleomix/nodes/bowtie2.py | MikkelSchubert/paleomix | 5c6414060088ba178ff1c400bdbd45d2f6b1aded | [
"MIT"
] | 19 | 2015-01-23T07:09:39.000Z | 2021-04-06T09:30:21.000Z | #!/usr/bin/python3
#
# Copyright (c) 2012 Mikkel Schubert <MikkelSch@gmail.com>
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
#
from typing import Any, Iterable, Optional, Type, Union
import paleomix.common.versions as versions
from paleomix.common.command import (
AtomicCmd,
InputFile,
OptionsType,
OutputFile,
ParallelCmds,
TempOutputFile,
)
from paleomix.node import CommandNode, Node, NodeError
from paleomix.nodes.bwa import (
_get_max_threads,
_get_node_description,
_new_cleanup_command,
)
BOWTIE2_VERSION = versions.Requirement(
call=("bowtie2", "--version"),
regexp=r"version (\d+\.\d+\.\d+)",
specifiers=">=2.3.0",
)
def _bowtie2_template(
call: Any,
reference: str,
iotype: Union[Type[InputFile], Type[OutputFile]] = InputFile,
**kwargs: Any
):
return AtomicCmd(
call,
extra_files=[
iotype(reference + postfix)
for postfix in (
".1.bt2",
".2.bt2",
".3.bt2",
".4.bt2",
".rev.1.bt2",
".rev.2.bt2",
)
],
requirements=[BOWTIE2_VERSION],
**kwargs
)
| 30.019481 | 79 | 0.605667 |
8149af93bae69dfead5a5f9ae4e810455dcc5116 | 976 | py | Python | modu_01/04_02_lab.py | 94JuHo/study_for_deeplearning | ababf482b6a24d94b5f860ea9a68e34fe324d182 | [
"MIT"
] | null | null | null | modu_01/04_02_lab.py | 94JuHo/study_for_deeplearning | ababf482b6a24d94b5f860ea9a68e34fe324d182 | [
"MIT"
] | null | null | null | modu_01/04_02_lab.py | 94JuHo/study_for_deeplearning | ababf482b6a24d94b5f860ea9a68e34fe324d182 | [
"MIT"
] | null | null | null | import os
os.environ['TF_CPP_MIN_LOG_LEVEL'] = '2' #
import tensorflow as tf
#using matrix
x_data = [[73., 80., 75.], [93., 88., 93.,], [89., 91., 90.], [96., 98., 100.], [73., 66., 70.]]
y_data = [[152.], [185.], [180.], [196.], [142.]]
X = tf.placeholder(tf.float32, shape=[None, 3]) #n . tensorflow none .
Y = tf.placeholder(tf.float32, shape=[None, 1])
W = tf.Variable(tf.random_normal([3, 1]), name='weight')
b = tf.Variable(tf.random_normal([1]), name='bias')
hypothesis = tf.matmul(X, W)+b
cost = tf.reduce_mean(tf.square(hypothesis - Y))
optimizer = tf.train.GradientDescentOptimizer(learning_rate=1e-5)
train = optimizer.minimize(cost)
sess = tf.Session()
sess.run(tf.global_variables_initializer())
for step in range(2001):
cost_val, hy_val, _ = sess.run([cost, hypothesis, train], feed_dict={X: x_data, Y: y_data})
if step % 10 == 0:
print(step, "Cost:", cost_val, "\nPrediction:\n", hy_val)
| 32.533333 | 96 | 0.655738 |
814a3026ad57c4136a0d1ae0e01b8c8cbeaa23f5 | 3,291 | py | Python | freezer/storage/fslike.py | kwu83tw/freezer | 5aaab1e70bf957436d888fbc3fce7af8d25eb124 | [
"MIT"
] | 141 | 2015-10-18T02:53:47.000Z | 2022-03-10T11:31:30.000Z | freezer/storage/fslike.py | kwu83tw/freezer | 5aaab1e70bf957436d888fbc3fce7af8d25eb124 | [
"MIT"
] | 1 | 2016-10-31T01:56:10.000Z | 2016-10-31T01:56:10.000Z | freezer/storage/fslike.py | kwu83tw/freezer | 5aaab1e70bf957436d888fbc3fce7af8d25eb124 | [
"MIT"
] | 50 | 2015-10-27T12:16:08.000Z | 2022-02-14T07:14:01.000Z | # (c) Copyright 2014,2015 Hewlett-Packard Development Company, L.P.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import abc
from oslo_serialization import jsonutils as json
from freezer.storage import physical
| 32.264706 | 74 | 0.606806 |
814afb82812c77d3cd59d4373c1636829f4ef2dc | 1,558 | py | Python | rubicon/repository/asynchronous/s3.py | gforsyth/rubicon | b77c0bdfc73d3f5666f76c83240706c10cd2e88c | [
"Apache-2.0"
] | null | null | null | rubicon/repository/asynchronous/s3.py | gforsyth/rubicon | b77c0bdfc73d3f5666f76c83240706c10cd2e88c | [
"Apache-2.0"
] | null | null | null | rubicon/repository/asynchronous/s3.py | gforsyth/rubicon | b77c0bdfc73d3f5666f76c83240706c10cd2e88c | [
"Apache-2.0"
] | null | null | null | from rubicon.repository.asynchronous import AsynchronousBaseRepository
from rubicon.repository.utils import json
| 33.869565 | 73 | 0.67715 |
814b91426dfa04f0937c2eaf434296d7b486ca56 | 1,660 | py | Python | examples/multidata_example.py | zssherman/ACT | db87008aa6649d3d21b79ae97ea0f11d7f1f1935 | [
"BSD-3-Clause"
] | 62 | 2020-01-13T19:48:49.000Z | 2022-03-22T07:56:37.000Z | examples/multidata_example.py | zssherman/ACT | db87008aa6649d3d21b79ae97ea0f11d7f1f1935 | [
"BSD-3-Clause"
] | 215 | 2020-01-07T20:17:11.000Z | 2022-03-31T18:49:57.000Z | examples/multidata_example.py | zssherman/ACT | db87008aa6649d3d21b79ae97ea0f11d7f1f1935 | [
"BSD-3-Clause"
] | 16 | 2020-01-13T21:25:55.000Z | 2022-03-26T18:01:29.000Z | """
==================================================
Example on how to plot multiple datasets at a time
==================================================
This is an example of how to download and
plot multiple datasets at a time.
.. image:: ../../multi_ds_plot1.png
"""
import act
import matplotlib.pyplot as plt
# Place your username and token here
username = ''
token = ''
act.discovery.download_data(username, token, 'sgpceilC1.b1', '2019-01-01', '2019-01-07')
# Read in CEIL data and correct it
ceil_ds = act.io.armfiles.read_netcdf('sgpceilC1.b1/sgpceilC1.b1.201901*.nc')
ceil_ds = act.corrections.ceil.correct_ceil(ceil_ds, -9999.)
# Read in the MET data
met_ds = act.io.armfiles.read_netcdf(
act.tests.sample_files.EXAMPLE_MET_WILDCARD)
# You can use tuples if the datasets in the tuple contain a
# datastream attribute. This is required in all ARM datasets.
display = act.plotting.TimeSeriesDisplay(
(ceil_ds, met_ds), subplot_shape=(2, ), figsize=(15, 10))
display.plot('backscatter', 'sgpceilC1.b1', subplot_index=(0, ))
display.plot('temp_mean', 'sgpmetE13.b1', subplot_index=(1, ))
display.day_night_background('sgpmetE13.b1', subplot_index=(1, ))
plt.show()
# You can also use a dictionary so that you can customize
# your datastream names to something that may be more useful.
display = act.plotting.TimeSeriesDisplay(
{'ceiliometer': ceil_ds, 'met': met_ds},
subplot_shape=(2, ), figsize=(15, 10))
display.plot('backscatter', 'ceiliometer', subplot_index=(0, ))
display.plot('temp_mean', 'met', subplot_index=(1, ))
display.day_night_background('met', subplot_index=(1, ))
plt.show()
ceil_ds.close()
met_ds.close()
| 32.54902 | 88 | 0.689759 |
814bbe8913aa4c1ed64cfd661e62c150faebc750 | 805 | py | Python | gpdata.py | masenov/bullet | be7148c93e3bf8111923063b599f2e9f7ea929b8 | [
"Zlib"
] | null | null | null | gpdata.py | masenov/bullet | be7148c93e3bf8111923063b599f2e9f7ea929b8 | [
"Zlib"
] | null | null | null | gpdata.py | masenov/bullet | be7148c93e3bf8111923063b599f2e9f7ea929b8 | [
"Zlib"
] | null | null | null | flat_x = x.flatten()
flat_y = y.flatten()
flat_z = z.flatten()
size = flat_x.shape[0]
filename = 'landscapeData.h'
open(filename, 'w').close()
f = open(filename, 'a')
f.write('#include "LinearMath/btScalar.h"\n#define Landscape01VtxCount 4\n#define Landscape01IdxCount 4\nbtScalar Landscape01Vtx[] = {\n')
for i in range(size):
f.write(str(flat_x[i])+'f,'+str(flat_y[i])+'f,'+str(flat_z[i])+'f,\n')
f.write('};\n')
f.write('btScalar Landscape01Nml[] = {\n')
for i in range(size):
f.write('1.0f,1.0f,1.0f,\n')
f.write('};\n')
f.write('btScalar Landscape01Tex[] = {\n')
for i in range(size):
f.write('1.0f,1.0f,1.0f,\n')
f.write('};\n')
f.write('unsigned short Landscape01Idx[] = {\n')
for i in range(size):
f.write(str(i)+','+str(i+1)+','+str(i+2)+',\n')
f.write('};\n')
f.close()
| 23.676471 | 138 | 0.62236 |
814d356177b5fffd6e85621ee2f5863452f63451 | 2,776 | py | Python | samples/create_project.py | zuarbase/server-client-python | 1e5e02a550727d72fa90c3d8e4caa4c5f416dc74 | [
"CC0-1.0",
"MIT"
] | 470 | 2016-09-14T23:38:48.000Z | 2022-03-31T07:59:53.000Z | samples/create_project.py | zuarbase/server-client-python | 1e5e02a550727d72fa90c3d8e4caa4c5f416dc74 | [
"CC0-1.0",
"MIT"
] | 772 | 2016-09-09T18:15:44.000Z | 2022-03-31T22:01:08.000Z | samples/create_project.py | zuarbase/server-client-python | 1e5e02a550727d72fa90c3d8e4caa4c5f416dc74 | [
"CC0-1.0",
"MIT"
] | 346 | 2016-09-10T00:05:00.000Z | 2022-03-30T18:55:47.000Z | ####
# This script demonstrates how to use the Tableau Server Client
# to create new projects, both at the root level and how to nest them using
# parent_id.
#
#
# To run the script, you must have installed Python 3.6 or later.
####
import argparse
import logging
import sys
import tableauserverclient as TSC
if __name__ == '__main__':
main()
| 40.823529 | 101 | 0.699568 |
814e51bb73ef3a0faf2172d4b70fb37c15405587 | 2,146 | py | Python | tests/test_threading.py | nmandery/rasterio | ba5e90c487bd1930f52e57dba999e889b4df9ade | [
"BSD-3-Clause"
] | 1,479 | 2015-01-10T12:35:07.000Z | 2021-10-18T16:17:15.000Z | tests/test_threading.py | nmandery/rasterio | ba5e90c487bd1930f52e57dba999e889b4df9ade | [
"BSD-3-Clause"
] | 1,819 | 2015-01-06T21:56:25.000Z | 2021-10-20T02:28:27.000Z | tests/test_threading.py | nmandery/rasterio | ba5e90c487bd1930f52e57dba999e889b4df9ade | [
"BSD-3-Clause"
] | 509 | 2015-01-06T20:59:12.000Z | 2021-10-18T14:14:57.000Z | from threading import Thread
import time
import unittest
import rasterio as rio
from rasterio.env import get_gdal_config
def test_child_thread_inherits_env():
"""A new thread inherit's the main thread's env"""
t1 = Thread(target=func)
with rio.Env(FROM_MAIN=True):
t1.start()
assert get_gdal_config('FROM_MAIN') is True
assert get_gdal_config('lol') is None
t1.join()
def test_child_thread_isolation():
"""Child threads have isolated environments"""
t1 = Thread(target=func, args=('is_t1', True, 'is_t2'))
t2 = Thread(target=func, args=('is_t2', True, 'is_t1'))
t1.start()
t2.start()
t1.join()
t2.join()
if __name__ == '__main__':
unittest.main()
| 25.855422 | 75 | 0.584809 |
81507c4f325c3f7f550df8daa74e43be479e3fc4 | 4,600 | py | Python | dm_construction/environments_test.py | frangipane/dm_construction | c84dcbd13ef6896a57da04fe62be85297178552a | [
"Apache-2.0"
] | 25 | 2020-07-16T12:35:07.000Z | 2022-03-25T11:02:54.000Z | dm_construction/environments_test.py | frangipane/dm_construction | c84dcbd13ef6896a57da04fe62be85297178552a | [
"Apache-2.0"
] | 2 | 2021-01-11T11:40:21.000Z | 2021-06-15T12:43:28.000Z | dm_construction/environments_test.py | LaudateCorpus1/dm_construction | f9d59f6ccb8818b71f971387704f2db8f2b3323a | [
"Apache-2.0"
] | 7 | 2020-08-20T13:04:37.000Z | 2021-11-19T18:55:09.000Z | #!/usr/bin/python
#
# Copyright 2020 DeepMind Technologies Limited
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Lint as: python3
"""Tests the open source construction environments."""
from absl import flags
from absl.testing import absltest
from absl.testing import parameterized
import dm_construction
import numpy as np
FLAGS = flags.FLAGS
flags.DEFINE_string("backend", "docker", "")
def _make_random_action(action_spec, observation):
"""Makes a random action given an action spec and observation."""
# Sample the random action.
action = {}
for name, spec in action_spec.items():
if name == "Index":
value = np.random.randint(observation["n_edge"])
elif spec.dtype in (np.int32, np.int64, int):
value = np.random.randint(spec.minimum, spec.maximum + 1)
else:
value = np.random.uniform(spec.minimum, spec.maximum)
action[name] = value
return action
def _random_unroll(env, seed=1234, num_steps=10, difficulty=5,
random_choice_before_reset=False):
"""Take random actions in the given environment."""
np.random.seed(seed)
action_spec = env.action_spec()
if random_choice_before_reset:
np.random.choice([8], p=[1.])
timestep = env.reset(difficulty=difficulty)
trajectory = [timestep]
actions = [None]
for _ in range(num_steps):
if timestep.last():
if random_choice_before_reset:
np.random.choice([8], p=[1.])
timestep = env.reset(difficulty=difficulty)
action = _make_random_action(action_spec, timestep.observation)
timestep = env.step(action)
trajectory.append(timestep)
actions.append(action)
return trajectory, actions
if __name__ == "__main__":
absltest.main()
| 35.658915 | 80 | 0.722609 |
8150b4a9e126831d7b3a5289d0e53064e11cb629 | 225 | py | Python | Modulo_3/semana 2/imagenes/imagen.py | rubens233/cocid_python | 492ebdf21817e693e5eb330ee006397272f2e0cc | [
"MIT"
] | null | null | null | Modulo_3/semana 2/imagenes/imagen.py | rubens233/cocid_python | 492ebdf21817e693e5eb330ee006397272f2e0cc | [
"MIT"
] | null | null | null | Modulo_3/semana 2/imagenes/imagen.py | rubens233/cocid_python | 492ebdf21817e693e5eb330ee006397272f2e0cc | [
"MIT"
] | null | null | null | from tkinter import *
ventana = Tk()
ventana.geometry("500x500")
ventana.title('PythonGuides')
img = PhotoImage(file='./logo.png')
img = img.subsample(3, 3)
Label( ventana, image=img ).pack(fill="both")
ventana.mainloop() | 18.75 | 45 | 0.711111 |
815225f9552d4d71ea93b38bd616e126479cf8c1 | 476 | py | Python | htdfsdk/utils.py | youngqqcn/htdfsdk | c22f213a967c8233bb6ccfb01bf148112efd44db | [
"MIT"
] | 2 | 2021-01-21T01:46:29.000Z | 2021-03-12T05:59:19.000Z | htdfsdk/utils.py | youngqqcn/htdfsdk | c22f213a967c8233bb6ccfb01bf148112efd44db | [
"MIT"
] | null | null | null | htdfsdk/utils.py | youngqqcn/htdfsdk | c22f213a967c8233bb6ccfb01bf148112efd44db | [
"MIT"
] | null | null | null | #coding:utf8
#author: yqq
#date: 2020/12/15 5:38
#descriptions:
from decimal import Decimal, getcontext
# getcontext()
if __name__ == '__main__':
assert htdf_to_satoshi(139623.71827296) == 13962371827296
assert htdf_to_satoshi('139623.71827296') == 13962371827296
assert htdf_to_satoshi(13962371827296) == 13962371827296 * 10 ** 8
pass
| 21.636364 | 70 | 0.714286 |
81523ae13c659215630baf70c984ec0ce5e2200e | 1,213 | py | Python | hanzi_font_deconstructor/scripts/create_training_data.py | chanind/hanzi-font-deconstructor | ce41b2a5c0e66b8a83d6c734678446d1d32a18b7 | [
"MIT"
] | null | null | null | hanzi_font_deconstructor/scripts/create_training_data.py | chanind/hanzi-font-deconstructor | ce41b2a5c0e66b8a83d6c734678446d1d32a18b7 | [
"MIT"
] | null | null | null | hanzi_font_deconstructor/scripts/create_training_data.py | chanind/hanzi-font-deconstructor | ce41b2a5c0e66b8a83d6c734678446d1d32a18b7 | [
"MIT"
] | null | null | null | from dataclasses import asdict
from hanzi_font_deconstructor.common.generate_training_data import (
STROKE_VIEW_BOX,
get_training_input_svg_and_masks,
)
from os import path, makedirs
from pathlib import Path
import shutil
import argparse
PROJECT_ROOT = Path(__file__).parents[2]
DEST_FOLDER = PROJECT_ROOT / "data"
parser = argparse.ArgumentParser(
description="Generate training data for a model to deconstruct hanzi into strokes"
)
parser.add_argument("--max-strokes-per-img", default=5, type=int)
parser.add_argument("--total-images", default=50, type=int)
args = parser.parse_args()
if __name__ == "__main__":
# create and empty the dest folder
if path.exists(DEST_FOLDER):
shutil.rmtree(DEST_FOLDER)
makedirs(DEST_FOLDER)
makedirs(DEST_FOLDER / "sample_svgs")
# create the data
data = {
"viewbox": STROKE_VIEW_BOX,
"imgs": [],
}
for i in range(args.total_images):
(img_svg, stroke_masks) = get_training_input_svg_and_masks(256)
label = f"{i}-{len(stroke_masks)}"
with open(DEST_FOLDER / "sample_svgs" / f"{label}.svg", "w") as img_file:
img_file.write(img_svg)
print(".")
print("Done!")
| 29.585366 | 86 | 0.698269 |
815535942d00809101f7b9f361c4f256b557f56f | 1,321 | py | Python | examples/generated_sample_regression.py | micheleantonazzi/gibson-dataset | cb5fc81061bbda1a653d6fc7b625b14c8a517f3c | [
"MIT"
] | 3 | 2021-10-31T17:43:50.000Z | 2022-03-21T08:55:01.000Z | examples/generated_sample_regression.py | micheleantonazzi/gibson-dataset | cb5fc81061bbda1a653d6fc7b625b14c8a517f3c | [
"MIT"
] | null | null | null | examples/generated_sample_regression.py | micheleantonazzi/gibson-dataset | cb5fc81061bbda1a653d6fc7b625b14c8a517f3c | [
"MIT"
] | null | null | null | from generic_dataset.data_pipeline import DataPipeline
from generic_dataset.generic_sample import synchronize_on_fields
from generic_dataset.sample_generator import SampleGenerator
import numpy as np
import generic_dataset.utilities.save_load_methods as slm
pipeline_rgb_to_gbr = DataPipeline().add_operation(lambda data, engine: (data[:, :, [2, 1, 0]], engine))
# To model a regression problem, label_set parameter must be empty
GeneratedSampleRegression = SampleGenerator(name='GeneratedSampleRegression', label_set=set()).add_dataset_field(field_name='rgb_image', field_type=np.ndarray, save_function=slm.save_compressed_numpy_array, load_function=slm.load_compressed_numpy_array) \
.add_dataset_field(field_name='bgr_image', field_type=np.ndarray, save_function=slm.save_cv2_image_bgr, load_function=slm.load_cv2_image_bgr) \
.add_field(field_name='field_3', field_type=int) \
.add_custom_pipeline(method_name='create_pipeline_convert_rgb_to_bgr', elaborated_field='rgb_image', final_field='bgr_image', pipeline=pipeline_rgb_to_gbr) \
.add_custom_method(method_name='field_3_is_positive', function=field_3_is_positive) \
.generate_sample_class() | 62.904762 | 255 | 0.824375 |