hexsha
stringlengths 40
40
| size
int64 5
2.06M
| ext
stringclasses 10
values | lang
stringclasses 1
value | max_stars_repo_path
stringlengths 3
248
| max_stars_repo_name
stringlengths 5
125
| max_stars_repo_head_hexsha
stringlengths 40
78
| max_stars_repo_licenses
listlengths 1
10
| max_stars_count
int64 1
191k
⌀ | max_stars_repo_stars_event_min_datetime
stringlengths 24
24
⌀ | max_stars_repo_stars_event_max_datetime
stringlengths 24
24
⌀ | max_issues_repo_path
stringlengths 3
248
| max_issues_repo_name
stringlengths 5
125
| max_issues_repo_head_hexsha
stringlengths 40
78
| max_issues_repo_licenses
listlengths 1
10
| max_issues_count
int64 1
67k
⌀ | max_issues_repo_issues_event_min_datetime
stringlengths 24
24
⌀ | max_issues_repo_issues_event_max_datetime
stringlengths 24
24
⌀ | max_forks_repo_path
stringlengths 3
248
| max_forks_repo_name
stringlengths 5
125
| max_forks_repo_head_hexsha
stringlengths 40
78
| max_forks_repo_licenses
listlengths 1
10
| max_forks_count
int64 1
105k
⌀ | max_forks_repo_forks_event_min_datetime
stringlengths 24
24
⌀ | max_forks_repo_forks_event_max_datetime
stringlengths 24
24
⌀ | content
stringlengths 5
2.06M
| avg_line_length
float64 1
1.02M
| max_line_length
int64 3
1.03M
| alphanum_fraction
float64 0
1
| count_classes
int64 0
1.6M
| score_classes
float64 0
1
| count_generators
int64 0
651k
| score_generators
float64 0
1
| count_decorators
int64 0
990k
| score_decorators
float64 0
1
| count_async_functions
int64 0
235k
| score_async_functions
float64 0
1
| count_documentation
int64 0
1.04M
| score_documentation
float64 0
1
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
542ed8915929d9082655f10271231d6e8237f5b5
| 2,848
|
py
|
Python
|
bin/models_vs_uniprot_check/ViPhOG_chunks_rank_summ.py
|
alexcorm/emg-viral-pipeline
|
f367002f0e1e375840e5696323bde65f7accb31f
|
[
"Apache-2.0"
] | 30
|
2020-05-18T14:02:34.000Z
|
2022-03-16T20:04:25.000Z
|
bin/models_vs_uniprot_check/ViPhOG_chunks_rank_summ.py
|
lynceuslq/emg-viral-pipeline
|
53a99b84ed93428ee88d61e529bcf6799f5eec94
|
[
"Apache-2.0"
] | 45
|
2020-04-30T09:45:03.000Z
|
2022-03-21T09:10:21.000Z
|
bin/models_vs_uniprot_check/ViPhOG_chunks_rank_summ.py
|
lynceuslq/emg-viral-pipeline
|
53a99b84ed93428ee88d61e529bcf6799f5eec94
|
[
"Apache-2.0"
] | 12
|
2020-06-02T12:43:49.000Z
|
2022-02-22T13:09:13.000Z
|
#!/usr/bin/env python3
import os
import re
import glob
import sys
import operator
import ast
import argparse
###############################################################################################
# This script was written as part of the analysis conducted on the output generated by #
# hmmsearch, when the ViPhOG database was searched against UniProtKB. The ViPhOG profile HMM #
# files were stored in different directories, each containing maximum 2000 files and named #
# using a sequential number from 1 to 16 (hmm1...hmm16). For each one of these a corresponding#
# output directory was generated, each containing a domtbl output file for each of the files #
# stored in the hmm directories. The output directories were named using the same sequential #
# numbers as the directories storing the hmm files (hmm1domtbl...hmm16domtbl). #
###############################################################################################
parser = argparse.ArgumentParser(description = "Step 3: Generate summary tables for each taxonomic rank. Make sure to run the script from within the directory containing the domtbl output directories (check comment block for guidance) and following the scripts that execute Step 1 and Step 2")
parser.add_argument("-i", "--input", dest = "input_file", help = "Path to summary chunk file", required = True)
if len(sys.argv) == 1:
parser.print_help()
else:
args = parser.parse_args()
summ_file = args.input_file
with open(summ_file) as input_file:
header_line = input_file.readline().rstrip()
taxa_ranks = []
for x,y in enumerate(header_line.split("\t")):
if x >= 2:
taxa_ranks.append((x, y))
for x,y in taxa_ranks:
input_file.seek(0)
next(input_file)
with open(f"{os.path.splitext(summ_file)[0]}_{y}.tsv", "w") as output_file:
output_file.write("ViPhOG\t#_taxons\tMost_significant\tMax_min_score\tOverlapping_taxons\tNext_max_score\n")
for line in input_file:
line = line.rstrip()
viphog_id = line.split("\t")[0]
rank_hits = ast.literal_eval(line.split("\t")[x])
total_hits = len(rank_hits)
most_significant = ""
score_range = ""
overlap = ""
next_max_score = ""
if total_hits > 0:
rank_hits_sorted = sorted(rank_hits, key = operator.itemgetter(2), reverse = True)
most_significant = rank_hits_sorted[0][0]
score_range = (rank_hits_sorted[0][2], rank_hits_sorted[0][3])
if total_hits > 1:
overlap = []
for elem in rank_hits_sorted[1:]:
if elem[2] >= score_range[1]:
overlap.append((elem[0], elem[2]))
if len(overlap) < 1:
overlap = ""
next_max_score = rank_hits_sorted[1][2]
output_file.write("{}\t{}\t{}\t{}\t{}\t{}\n".format(viphog_id, total_hits, most_significant, score_range, overlap, next_max_score))
| 45.935484
| 293
| 0.652388
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1,358
| 0.476826
|
54318f46c52690013bfe7cc4791a2d7dcc84bf04
| 6,349
|
py
|
Python
|
bika/lims/permissions.py
|
hocinebendou/bika.gsoc
|
85bc0c587de7f52073ae0e89bddbc77bf875f295
|
[
"MIT"
] | null | null | null |
bika/lims/permissions.py
|
hocinebendou/bika.gsoc
|
85bc0c587de7f52073ae0e89bddbc77bf875f295
|
[
"MIT"
] | null | null | null |
bika/lims/permissions.py
|
hocinebendou/bika.gsoc
|
85bc0c587de7f52073ae0e89bddbc77bf875f295
|
[
"MIT"
] | null | null | null |
""" All permissions are defined here.
They are also defined in permissions.zcml.
The two files must be kept in sync.
bika.lims.__init__ imports * from this file, so
bika.lims.PermName or bika.lims.permissions.PermName are
both valid.
"""
from Products.CMFCore.permissions import AddPortalContent
# Add Permissions:
# ----------------
AddAnalysis = 'BIKA: Add Analysis'
AddAnalysisProfile = 'BIKA: Add AnalysisProfile'
AddAnalysisRequest = 'BIKA: Add Analysis Request'
AddAnalysisSpec = 'BIKA: Add AnalysisSpec'
AddAttachment = 'BIKA: Add Attachment'
AddARTemplate = 'BIKA: Add ARTemplate'
AddBatch = 'BIKA: Add Batch'
AddClient = 'BIKA: Add Client'
AddClientFolder = 'BIKA: Add ClientFolder'
AddInvoice = 'BIKA: Add Invoice'
AddMethod = 'BIKA: Add Method'
AddMultifile = 'BIKA: Add Multifile'
AddPricelist = 'BIKA: Add Pricelist'
AddProduct = 'BIKA: Add Product'
AddProductCategory = 'BIKA: Add ProductCategory'
AddStockItem = 'BIKA: Add StockItem'
AddSupplyOrder = 'BIKA: Add SupplyOrder'
AddInventoryOrder = 'BIKA: Add Inventory Order'
AddSample = 'BIKA: Add Sample'
AddSampleMatrix = 'BIKA: Add SampleMatrix'
AddSamplePartition = 'BIKA: Add SamplePartition'
AddSamplePoint = 'BIKA: Add SamplePoint'
AddStorageLocation = 'BIKA: Add StorageLocation'
AddSamplingDeviation = 'BIKA: Add SamplingDeviation'
AddSamplingRound = 'BIKA: Add SamplingRound'
AddSRTemplate = 'BIKA: Add SRTemplate'
AddStorageLevel = 'BIKA: Add StorageLevel'
AddStorageUnit = 'BIKA: Add StorageUnit'
AddSubGroup = 'BIKA: Add Sub-group'
# Default Archetypes Add Permission
ADD_CONTENT_PERMISSION = AddPortalContent
# Add Permissions for specific types, if required
ADD_CONTENT_PERMISSIONS = {
'ARAnalysisSpec': AddAnalysisSpec,
'AnalysisProfile': AddAnalysisProfile,
'Analysis': AddAnalysis,
'AnalysisRequest': AddAnalysisRequest,
'Attachment': AddAttachment,
'Batch': AddBatch,
'Client': AddClient,
'Invoice': AddInvoice,
'Method': AddMethod,
'Multifile': AddMultifile,
'SupplyOrder': AddSupplyOrder,
'Order': AddInventoryOrder,
'Sample': AddSample,
'SampleMatrix': AddSampleMatrix,
'SamplePartition': AddSamplePartition,
'SamplingDeviation': AddSamplingDeviation,
'SamplingRound': AddSamplingRound,
'SubGroup': AddSubGroup,
'StorageLevel': AddStorageLevel,
'StorageUnit': AddStorageUnit,
}
# Very Old permissions:
# ---------------------
ManageBika = 'BIKA: Manage Bika'
DispatchOrder = 'BIKA: Dispatch Order'
ManageAnalysisRequests = 'BIKA: Manage Analysis Requests'
ManageSamples = 'BIKA: Manage Samples'
ManageSuppliers = 'BIKA: Manage Reference Suppliers'
ManageReference = 'BIKA: Manage Reference'
PostInvoiceBatch = 'BIKA: Post Invoice batch'
ManagePricelists = 'BIKA: Manage Pricelists'
# This allows to edit all client fields, and perform admin tasks on Clients.
ManageClients = 'BIKA: Manage Clients'
# this is for creating and transitioning worksheets
ManageWorksheets = 'BIKA: Manage Worksheets'
# this is for adding/editing/exporting analyses on worksheets
EditWorksheet = 'BIKA: Edit Worksheet'
RejectWorksheet = 'BIKA: Reject Worksheet'
ImportInstrumentResults = "BIKA: Import Instrument Results"
AccessJSONAPI = 'BIKA: Access JSON API'
# New or changed permissions:
# ---------------------------
DispatchInventoryOrder = 'BIKA: Dispatch Inventory Order'
ReceiveInventoryOrder = 'BIKA: Receive Inventory Order'
StoreInventoryOrder = 'BIKA: Store Inventory Order'
SampleSample = 'BIKA: Sample Sample'
PreserveSample = 'BIKA: Preserve Sample'
ReceiveSample = 'BIKA: Receive Sample'
ExpireSample = 'BIKA: Expire Sample'
DisposeSample = 'BIKA: Dispose Sample'
ImportAnalysis = 'BIKA: Import Analysis'
Retract = "BIKA: Retract"
Verify = 'BIKA: Verify'
VerifyOwnResults = 'BIKA: Verify own results'
Publish = 'BIKA: Publish'
EditSample = 'BIKA: Edit Sample'
EditAR = 'BIKA: Edit AR'
ResultsNotRequested = 'BIKA: Results not requested'
ManageInvoices = 'BIKA: Manage Invoices'
ViewResults = 'BIKA: View Results'
EditResults = 'BIKA: Edit Results'
EditFieldResults = 'BIKA: Edit Field Results'
ViewRetractedAnalyses = 'BIKA: View Retracted Analyses'
CancelAndReinstate = 'BIKA: Cancel and reinstate'
# For adding login credentials to Contacts.
ManageLoginDetails = 'BIKA: Manage Login Details'
Assign = 'BIKA: Assign analyses'
Unassign = 'BIKA: Unassign analyses'
# Field permissions
EditARContact = "BIKA: Edit AR Contact"
ViewLogTab = 'BIKA: View Log Tab'
# Edit AR
# -----------------------------------------------------------------------------
# Allows to set values for AR fields in AR view
#
# Only takes effect if:
# - The AR's 'cancellation_state' is 'active'
# - The AR's 'review_state' is in:
# 'sample_registered', 'to_be_sampled', 'sampled', 'to_be_preserved',
# 'sample_due', 'sample_received', 'to_be_verified', 'attachment_due'
EditAR = 'BIKA: Edit AR'
# Edit Sample Partition
# -----------------------------------------------------------------------------
# Allows to set a Container and/or Preserver for a Sample Partition.
# See AR view: Sample Partitions table and Sample Partitions tab
#
# Only takes effect if:
# - The Sample's 'cancellation_state' is 'active'
# - The Sample's 'review_state' is in:
# 'sample_registered', 'to_be_sampled', 'sampled', 'to_be_preserved',
# 'sample_due', 'sample_received', 'to_be_verified', 'attachment_due'
EditSamplePartition = 'BIKA: Edit Sample Partition'
# Edit Client
# ----------------------------------------------
# Allows access to 'Edit' and 'Contacts' tabs from Client View
EditClient = 'BIKA: Edit Client'
# Manage Supply Orders
# ----------------------------------------------
# Allows access to 'Supply Orders' tab in Client context
ManageSupplyOrders = 'BIKA: Manage Supply Orders'
# Batch-specific permissions
# ----------------------------------------------
EditBatch = 'BIKA: Edit Batch'
CloseBatch = 'BIKA: Close Batch'
ReopenBatch = 'BIKA: Reopen Batch'
# Sampling Round permissions
# --------------------------
CloseSamplingRound = 'BIKA: Close SamplingRound'
ReopenSamplingRound = 'BIKA: Reopen SamplingRound'
# Manage AR Imports
# ----------------------------------------------
ManageARImport = 'BIKA: Manage ARImport'
# Manage AR Priorities
# ----------------------------------------------
ManageARPriority = 'BIKA: Manage ARPriority'
| 34.505435
| 79
| 0.695228
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 4,276
| 0.673492
|
5431f5aaf571f8d48be62c018da65e8a8b984c28
| 5,140
|
py
|
Python
|
python/venv/lib/python2.7/site-packages/openstack/tests/unit/telemetry/v2/test_sample.py
|
sjsucohort6/openstack
|
8471e6e599c3f52319926a582358358ef84cbadb
|
[
"MIT"
] | null | null | null |
python/venv/lib/python2.7/site-packages/openstack/tests/unit/telemetry/v2/test_sample.py
|
sjsucohort6/openstack
|
8471e6e599c3f52319926a582358358ef84cbadb
|
[
"MIT"
] | null | null | null |
python/venv/lib/python2.7/site-packages/openstack/tests/unit/telemetry/v2/test_sample.py
|
sjsucohort6/openstack
|
8471e6e599c3f52319926a582358358ef84cbadb
|
[
"MIT"
] | null | null | null |
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import mock
import testtools
from openstack.telemetry.v2 import sample
SAMPLE = {
'id': None,
'metadata': {'1': 'one'},
'meter': '2',
'project_id': '3',
'recorded_at': '4',
'resource_id': '5',
'source': '6',
'timestamp': '7',
'type': '8',
'unit': '9',
'user_id': '10',
'volume': '11.1',
}
OLD_SAMPLE = {
'counter_name': '1',
'counter_type': '2',
'counter_unit': '3',
'counter_volume': '4',
'message_id': None,
'project_id': '5',
'recorded_at': '6',
'resource_id': '7',
'resource_metadata': '8',
'source': '9',
'timestamp': '10',
'user_id': '11',
}
class TestSample(testtools.TestCase):
def test_basic(self):
sot = sample.Sample(SAMPLE)
self.assertIsNone(sot.resource_key)
self.assertIsNone(sot.resources_key)
self.assertEqual('/meters/%(meter)s', sot.base_path)
self.assertEqual('metering', sot.service.service_type)
self.assertTrue(sot.allow_create)
self.assertFalse(sot.allow_retrieve)
self.assertFalse(sot.allow_update)
self.assertFalse(sot.allow_delete)
self.assertTrue(sot.allow_list)
def test_make_new(self):
sot = sample.Sample(SAMPLE)
self.assertIsNone(sot.id)
self.assertEqual(SAMPLE['metadata'], sot.metadata)
self.assertEqual(SAMPLE['meter'], sot.meter)
self.assertEqual(SAMPLE['project_id'], sot.project_id)
self.assertEqual(SAMPLE['recorded_at'], sot.recorded_at)
self.assertEqual(SAMPLE['resource_id'], sot.resource_id)
self.assertIsNone(sot.sample_id)
self.assertEqual(SAMPLE['source'], sot.source)
self.assertEqual(SAMPLE['timestamp'], sot.generated_at)
self.assertEqual(SAMPLE['type'], sot.type)
self.assertEqual(SAMPLE['unit'], sot.unit)
self.assertEqual(SAMPLE['user_id'], sot.user_id)
self.assertEqual(SAMPLE['volume'], sot.volume)
def test_make_old(self):
sot = sample.Sample(OLD_SAMPLE)
self.assertIsNone(sot.id)
self.assertIsNone(sot.sample_id),
self.assertEqual(OLD_SAMPLE['counter_name'], sot.meter)
self.assertEqual(OLD_SAMPLE['counter_type'], sot.type)
self.assertEqual(OLD_SAMPLE['counter_unit'], sot.unit)
self.assertEqual(OLD_SAMPLE['counter_volume'], sot.volume)
self.assertEqual(OLD_SAMPLE['project_id'], sot.project_id)
self.assertEqual(OLD_SAMPLE['recorded_at'], sot.recorded_at)
self.assertEqual(OLD_SAMPLE['resource_id'], sot.resource_id)
self.assertEqual(OLD_SAMPLE['resource_metadata'], sot.metadata)
self.assertEqual(OLD_SAMPLE['source'], sot.source)
self.assertEqual(OLD_SAMPLE['timestamp'], sot.generated_at)
self.assertEqual(OLD_SAMPLE['user_id'], sot.user_id)
def test_list(self):
sess = mock.Mock()
resp = mock.Mock()
resp.body = [SAMPLE, OLD_SAMPLE]
sess.get = mock.Mock(return_value=resp)
path_args = {'meter': 'name_of_meter'}
found = sample.Sample.list(sess, path_args=path_args)
self.assertEqual(2, len(found))
first = found[0]
self.assertIsNone(first.id)
self.assertIsNone(first.sample_id)
self.assertEqual(SAMPLE['metadata'], first.metadata)
self.assertEqual(SAMPLE['meter'], first.meter)
self.assertEqual(SAMPLE['project_id'], first.project_id)
self.assertEqual(SAMPLE['recorded_at'], first.recorded_at)
self.assertEqual(SAMPLE['resource_id'], first.resource_id)
self.assertEqual(SAMPLE['source'], first.source)
self.assertEqual(SAMPLE['timestamp'], first.generated_at)
self.assertEqual(SAMPLE['type'], first.type)
self.assertEqual(SAMPLE['unit'], first.unit)
self.assertEqual(SAMPLE['user_id'], first.user_id)
self.assertEqual(SAMPLE['volume'], first.volume)
def test_create(self):
sess = mock.Mock()
resp = mock.Mock()
resp.body = [SAMPLE]
sess.post = mock.Mock(return_value=resp)
data = {'id': None,
'meter': 'temperature',
'project_id': 'project',
'resource_id': 'resource',
'type': 'gauge',
'unit': 'instance',
'volume': '98.6'}
new_sample = sample.Sample.new(**data)
new_sample.create(sess)
url = '/meters/temperature'
sess.post.assert_called_with(url, service=new_sample.service,
json=[data])
self.assertIsNone(new_sample.id)
| 36.978417
| 75
| 0.637743
| 3,949
| 0.768288
| 0
| 0
| 0
| 0
| 0
| 0
| 1,400
| 0.272374
|
54324dc90f9df188cfe21f89b7c0b9336f381fe0
| 7,645
|
py
|
Python
|
data_convert/convert_text_to_tree.py
|
wlof-2/Text2Relation
|
a1321e3627fee4714d2c39c964d93d12d0802467
|
[
"MIT"
] | null | null | null |
data_convert/convert_text_to_tree.py
|
wlof-2/Text2Relation
|
a1321e3627fee4714d2c39c964d93d12d0802467
|
[
"MIT"
] | null | null | null |
data_convert/convert_text_to_tree.py
|
wlof-2/Text2Relation
|
a1321e3627fee4714d2c39c964d93d12d0802467
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
# -*- coding:utf-8 -*-
import os
import json
from collections import Counter, defaultdict
from data_convert.format.text2tree import Entity_Type, Text2Tree
from data_convert.task_format.event_extraction import Event, DyIEPP, Conll04
from data_convert.utils import read_file, check_output, data_counter_to_table, get_schema, output_schema
from nltk.corpus import stopwords
Ace_Entity_Type = {"ORG": "<ORG>", "VEH": "<VEH>", "WEA": "<WEA>",
"LOC": "<LOC>", "FAC": "<FAC>", "PER": "<PER>", "GPE": "<GPE>"}
Sci_Entity_Type = {'Metric': '<Metric>', 'Task': '<Task>',
'OtherScientificTerm': '<OtherScientificTerm>', 'Generic': '<Generic>', 'Material': '<Material>', 'Method': '<Method>'}
Conll04_Type = {'Org': '<Org>', 'Peop': '<Peop>',
'Other': '<Other>', 'Loc': '<Loc>'}
english_stopwords = set(stopwords.words('english') + ["'s", "'re", "%"])
def convert_file_tuple(file_tuple, data_class=Event, target_class=Text2Tree,
output_folder='data/text2tree/framenet', entity_Type = dict(),
ignore_nonevent=False, zh=False,
mark_tree=False, type_format='subtype'):
counter = defaultdict(Counter)
data_counter = defaultdict(Counter)
relation_schema_set = set()
span_output_folder = output_folder + '_span'
if not os.path.exists(span_output_folder):
os.makedirs(span_output_folder)
# in_filename a example is "data/raw_data/dyiepp_ace2005/train.json"
# out_filename a example is 'data/text2tree/ace2005_event/train'
for in_filename, output_filename in file_tuple(output_folder):
span_output_filename = output_filename.replace(
output_folder, span_output_folder)
relation_output = open(output_filename + '.json', 'w')
span_relation_output = open(span_output_filename + '.json', 'w')
for line in read_file(in_filename):
document = data_class(json.loads(line.strip()))
for sentence in document.generate_relations():
if ignore_nonevent and len(sentence['relations']) == 0:
continue
# souce is the sentence text tokens
# target is the corresponding relations annotations
source, target = target_class.annotate_predicate_arguments(
tokens=sentence['tokens'],
predicate_arguments=sentence['relations'],
entities=sentence['entities'],
Entity_Type = entity_Type,
zh=zh
)
# Test if we only consider there are relations in the sentence
# if target == "<Temp_S> <Temp_E>":
# continue
# The event knowledge schema, used in constrained decoder
# sentence['tokens'] is the sentence schema information, event['tokens']
# is the event trigger text span index
for relation in sentence['relations']:
relation_schema_set.add(relation['type'])
sep = '' if zh else ' '
counter['type'].update([relation['type']])
data_counter[in_filename].update(['relation'])
for argument in relation['arguments']:
data_counter[in_filename].update(['argument'])
data_counter[in_filename].update(['sentence'])
relation_output.write(json.dumps(
{'text': source, 'relation': target}, ensure_ascii=False) + '\n')
# for tokens and entities in one sentence
span_source, span_target = target_class.annotate_predicate_entities(
tokens=sentence['tokens'],
entities=sentence['entities'],
Entity_Type = entity_Type,
zh=zh,
mark_tree=mark_tree
)
# write the span format data, name entity format
span_relation_output.write(
json.dumps({'text': span_source, 'relation': span_target}, ensure_ascii=False) + '\n')
relation_output.close()
span_relation_output.close()
check_output(output_filename)
check_output(span_output_filename)
print('\n')
relation_type_list = list(set([schema for schema in relation_schema_set]))
schema_output_file=os.path.join(output_folder, 'relation.schema')
with open(schema_output_file, 'w') as output:
output.write(json.dumps(relation_type_list) + '\n')
# output_schema(event_schema_set, output_file=os.path.join(
# span_output_folder, 'event.schema'))
print('Pred:', len(counter['pred']), counter['pred'].most_common(10))
print('Type:', len(counter['type']), counter['type'].most_common(10))
print('Role:', len(counter['role']), counter['role'].most_common(10))
print(data_counter_to_table(data_counter))
print('\n\n\n')
def convert_ace2005_event(output_folder='data/new_text2tree/ace2005_event', type_format='subtype',
ignore_nonevent=False, mark_tree=False):
from data_convert.task_format.event_extraction import ace2005_en_file_tuple
convert_file_tuple(file_tuple=ace2005_en_file_tuple,
output_folder=output_folder,
ignore_nonevent=ignore_nonevent,
mark_tree=mark_tree,
type_format=type_format,
entity_Type=Ace_Entity_Type,
)
def convert_sci_event(output_folder='data/new_text2tree/sci_relastion_', type_format='subtype',
ignore_nonevent=False, mark_tree=False):
from data_convert.task_format.event_extraction import sci_file_tuple
convert_file_tuple(file_tuple=sci_file_tuple,
output_folder=output_folder,
ignore_nonevent=ignore_nonevent,
entity_Type= Sci_Entity_Type,
mark_tree=mark_tree,
type_format=type_format,
data_class=DyIEPP,
)
def convert_conll04_relation(output_folder='data/new_text2tree/conll04_relation_', type_format='subtype',
ignore_nonevent=False, mark_tree=False):
from data_convert.task_format.event_extraction import conll04_file_tuple
convert_file_tuple(file_tuple=conll04_file_tuple,
output_folder=output_folder,
ignore_nonevent=ignore_nonevent,
entity_Type=Conll04_Type,
mark_tree=mark_tree,
type_format=type_format,
data_class=Conll04,
)
if __name__ == "__main__":
type_format_name = 'subtype'
convert_ace2005_event("data/new_text2tree/one_ie_ace2005_%s" % type_format_name,
type_format=type_format_name,
ignore_nonevent=False,
mark_tree=False
)
# """
# convert_sci_event("data/new_text2tree/sci_relation_%s" % type_format_name,
# type_format=type_format_name,
# ignore_nonevent=False,
# mark_tree=False)
# """
# convert_conll04_relation("data/new_text2tree/conll04_relation_%s" % type_format_name,
# type_format=type_format_name,
# ignore_nonevent=False,
# mark_tree=False)
| 45.778443
| 138
| 0.597515
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1,970
| 0.257685
|
5432a871244c2f1064853af01dd1344e9304f2e3
| 1,246
|
py
|
Python
|
arachnado/utils/spiders.py
|
wigginzz/arachnado
|
8de92625262958e886263b4ccb189f4fc62d7400
|
[
"MIT"
] | 2
|
2017-12-26T14:50:14.000Z
|
2018-06-12T07:04:08.000Z
|
arachnado/utils/spiders.py
|
wigginzz/arachnado
|
8de92625262958e886263b4ccb189f4fc62d7400
|
[
"MIT"
] | null | null | null |
arachnado/utils/spiders.py
|
wigginzz/arachnado
|
8de92625262958e886263b4ccb189f4fc62d7400
|
[
"MIT"
] | null | null | null |
from scrapy.utils.misc import walk_modules
from scrapy.utils.spider import iter_spider_classes
def get_spider_cls(url, spider_packages, default):
"""
Return spider class based on provided url.
:param url: if it looks like `spider://spidername` it tries to load spider
named `spidername`, otherwise it returns default spider class
:param spider_packages: a list of package names that will be searched for
spider classes
:param default: the class that is returned when `url` doesn't start with
`spider://`
"""
if url.startswith('spider://'):
spider_name = url[len('spider://'):]
return find_spider_cls(spider_name, spider_packages)
return default
def find_spider_cls(spider_name, spider_packages):
"""
Find spider class which name is equal to `spider_name` argument
:param spider_name: spider name to look for
:param spider_packages: a list of package names that will be searched for
spider classes
"""
for package_name in spider_packages:
for module in walk_modules(package_name):
for spider_cls in iter_spider_classes(module):
if spider_cls.name == spider_name:
return spider_cls
| 35.6
| 78
| 0.690209
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 657
| 0.527287
|
543307112090d54acedcff9238e2cea7185b6c19
| 1,165
|
py
|
Python
|
Social_Encoders.py
|
Haroon96/GraphRec-WWW19
|
fc28eee70fad927d761c15cab97de52f5955dcfd
|
[
"MIT"
] | null | null | null |
Social_Encoders.py
|
Haroon96/GraphRec-WWW19
|
fc28eee70fad927d761c15cab97de52f5955dcfd
|
[
"MIT"
] | null | null | null |
Social_Encoders.py
|
Haroon96/GraphRec-WWW19
|
fc28eee70fad927d761c15cab97de52f5955dcfd
|
[
"MIT"
] | null | null | null |
import torch
import torch.nn as nn
from torch.nn import init
import torch.nn.functional as F
class Social_Encoder(nn.Module):
def __init__(self, features, embed_dim, social_adj_lists, aggregator, base_model=None, cuda="cpu"):
super(Social_Encoder, self).__init__()
self.features = features
self.social_adj_lists = social_adj_lists
self.aggregator = aggregator
if base_model != None:
self.base_model = base_model
self.embed_dim = embed_dim
self.device = cuda
self.linear1 = nn.Linear(2 * self.embed_dim, self.embed_dim) #
def forward(self, nodes):
to_neighs = []
for node in nodes:
to_neighs.append(self.social_adj_lists.get(int(node), {}))
neigh_feats = self.aggregator.forward(nodes, to_neighs) # user-user network
self_feats = self.features(torch.LongTensor(nodes.cpu().numpy())).to(self.device)
self_feats = self_feats.t()
# self-connection could be considered.
combined = torch.cat([self_feats, neigh_feats], dim=1)
combined = F.relu(self.linear1(combined))
return combined
| 32.361111
| 103
| 0.654936
| 1,069
| 0.917597
| 0
| 0
| 0
| 0
| 0
| 0
| 63
| 0.054077
|
5433996009680b5160e896f44a3bff1c9d65a2bb
| 3,280
|
py
|
Python
|
deephub/utils/__main__.py
|
deeplab-ai/deephub
|
b1d271436fab69cdfad14f19fa2e29c5338f18d6
|
[
"Apache-2.0"
] | 8
|
2019-10-17T12:46:13.000Z
|
2020-03-12T08:09:40.000Z
|
deephub/utils/__main__.py
|
deeplab-ai/deephub
|
b1d271436fab69cdfad14f19fa2e29c5338f18d6
|
[
"Apache-2.0"
] | 12
|
2019-10-22T13:11:56.000Z
|
2022-02-10T00:23:30.000Z
|
deephub/utils/__main__.py
|
deeplab-ai/deephub
|
b1d271436fab69cdfad14f19fa2e29c5338f18d6
|
[
"Apache-2.0"
] | 1
|
2019-10-17T13:21:27.000Z
|
2019-10-17T13:21:27.000Z
|
import click
import time
from deephub.common.io import resolve_glob_pattern
from deephub.models.feeders.tfrecords.meta import generate_fileinfo, get_fileinfo, TFRecordValidationError, \
TFRecordInfoMissingError
@click.group()
def cli():
"""
General purpose CLI utils.
"""
pass
@cli.command()
@click.argument('pattern', type=str)
@click.option('--force', is_flag=True, default=False,
help='It will forcefully regenerate meta data even for tfrecords that'
'have not changed.')
@click.option('--compression_type', type=str, default='', help="""Compression type of the tfrecord file. Options:
'' for no compression
'GZIP' for gzip compression""")
def generate_metadata(pattern, force, compression_type):
"""
Generate metadata for tfrecord files.
With this util you can generate metadata from tfrecords based on a matching
glob pattern.
Example: Generate metadata for training dataset
deep utils generate-metadata 'dataset/train-*'
"""
files = resolve_glob_pattern(pattern)
click.echo(f"{len(files)} files matched with the pattern.")
with click.progressbar(files) as files:
for fpath in files:
try:
generate_fileinfo(fpath, compression_type=compression_type)
except Exception as e:
click.echo(f'Skipping file {fpath} because of: {e!s}')
click.echo('Finished generating metadata')
@cli.command()
@click.argument('pattern', type=str)
def total_examples(pattern) -> int:
"""
Get total examples for all the files matched with the given input file pattern.
"""
files = resolve_glob_pattern(pattern)
click.echo(f"{len(files)} files matched with the pattern.")
total_rows = 0
for file in files:
try:
total_rows += get_fileinfo(file).total_records
except Exception:
pass
click.echo(f"Total number of examples: {total_rows}")
@cli.command()
@click.argument('pattern', type=str)
@click.option('--shallow-check/--deep-check', default=True,
help='Flag in order to control shallow or deep md5 hash check. With shallow-check only the size of'
'each file will be validated, while with deep-check both size and md5 hash of each file will be'
'validated.')
def validate(pattern: str, shallow_check: bool):
"""
Validate each one of the files matched using the input file pattern.
"""
start = time.time()
files = resolve_glob_pattern(pattern)
click.echo(f"{len(files)} files matched with the pattern.")
with click.progressbar(files) as files:
for file in files:
try:
get_fileinfo(file, shallow_check) # inside here happens the validation step too
except TFRecordValidationError:
raise
except TFRecordInfoMissingError:
raise
except Exception as e: # Probably not a valid tfrecords file
click.echo(f'Probably not a valid tf_record file {e}')
end = time.time()
click.echo(f"Total execution time: {end - start}")
| 33.814433
| 115
| 0.633232
| 0
| 0
| 0
| 0
| 3,052
| 0.930488
| 0
| 0
| 1,502
| 0.457927
|
543471083e8ed6e6fd0d08082e7de83061292ab1
| 10,072
|
py
|
Python
|
utils_mit_im.py
|
putama/visualcomposition
|
ada3d8e71b79a5f3e239718f3cdac58eca5e1327
|
[
"MIT"
] | null | null | null |
utils_mit_im.py
|
putama/visualcomposition
|
ada3d8e71b79a5f3e239718f3cdac58eca5e1327
|
[
"MIT"
] | null | null | null |
utils_mit_im.py
|
putama/visualcomposition
|
ada3d8e71b79a5f3e239718f3cdac58eca5e1327
|
[
"MIT"
] | null | null | null |
import numpy as np
import cPickle
import os
from scipy.io import loadmat
import time
import h5py
import json
import copy
import bz2
def unique_rows(a):
b = np.ascontiguousarray(a).view(np.dtype((np.void, a.dtype.itemsize * a.shape[1])))
_, idx = np.unique(b, return_index=True)
return a[idx], idx;
def setdiff2d(a1, a2):
assert a1.dtype == a2.dtype;
#only works with numpy >= 1.7
versplit = [int(x) for x in np.__version__.split('.')];
assert versplit[0]>=1 and versplit[1]>=7;
a1_rows = a1.view([('', a1.dtype)] * a1.shape[1])
a2_rows = a2.view([('', a2.dtype)] * a2.shape[1])
return np.setdiff1d(a1_rows, a2_rows).view(a1.dtype).reshape(-1, a1.shape[1])
def argtopk(a, k):
ind = np.argpartition(a,-k)[-k:]
srtind = ind[np.argsort(a[ind])[::-1]];
return srtind;
def get_dir_list(dirPath, extension = None):
onlydirs = [ os.path.join(dirPath,f) for f in os.listdir(dirPath) if os.path.isdir(os.path.join(dirPath,f)) ];
if extension!= None:
onlydirs = [f for f in onlydirs if os.path.splitext(f)[1]==extension];
onlydirs.sort();
return onlydirs;
#extension with "." e.g. .jpg
def get_file_list(dirPath, extension = None):
onlyfiles = [ os.path.join(dirPath,f) for f in os.listdir(dirPath) if os.path.isfile(os.path.join(dirPath,f)) ];
if extension!= None:
onlyfiles = [f for f in onlyfiles if os.path.splitext(f)[1]==extension];
onlyfiles.sort();
return onlyfiles;
def get_file_list_prefix(dirPath, prefix, extension=None):
onlyfiles = [ os.path.join(dirPath,f) for f in os.listdir(dirPath) if os.path.isfile(os.path.join(dirPath,f)) and f.startswith(prefix) ];
if extension!= None:
onlyfiles = [f for f in onlyfiles if os.path.splitext(f)[1]==extension];
onlyfiles.sort();
return onlyfiles;
def list_to_indexed_dict(lvar):
dvar = {};
for ind, item in enumerate(lvar):
dvar[item]=ind;
return dvar;
def tic_toc_print(interval, string):
global tic_toc_print_time_old
if 'tic_toc_print_time_old' not in globals():
tic_toc_print_time_old = time.time()
print string
else:
new_time = time.time()
if new_time - tic_toc_print_time_old > interval:
tic_toc_print_time_old = new_time;
print string
def mkdir(output_dir):
return mkdir_if_missing(output_dir);
def mkdir_if_missing(output_dir):
"""
def mkdir_if_missing(output_dir)
"""
if not os.path.exists(output_dir):
try:
os.makedirs(output_dir)
return True;
except: #generally happens when many processes try to make this dir
return False;
def recurse_get_mat_struct(v, curr_field=None):
accum_dict = {};
if type(v).__name__ != 'mat_struct':
if type(v).__name__ == 'ndarray':
#sometimes we have nested mat_structs ...
numel = v.size;
found_nested_structs = False;
for x in range(numel):
if type(v.item(x)).__name__ == 'mat_struct':
if found_nested_structs == False:
accum_dict[curr_field]=[];
found_nested_structs = True;
if found_nested_structs:
newdict = recurse_get_mat_struct(v.item(x), curr_field);
accum_dict[curr_field].append(newdict);
if found_nested_structs == False:
accum_dict[curr_field] = v;
else:
accum_dict[curr_field] = v;
else:
for field in v._fieldnames:
local_dict = recurse_get_mat_struct( getattr(v, field), field );
if field not in local_dict:
accum_dict[field] = copy.deepcopy(local_dict);
else:
accum_dict[field] = copy.deepcopy(local_dict[field]);
if curr_field not in accum_dict:
ret_dict = {};
ret_dict[curr_field] = copy.deepcopy(accum_dict);
accum_dict = ret_dict;
return accum_dict;
def mat_to_dict(mat_name):
matfile = loadmat(mat_name, squeeze_me=True, struct_as_record=False);
var_keys = matfile.keys();
allVarDict = {};
for v in var_keys:
if v.startswith('__') == True:
continue;
dictData = {};
for field in matfile[v]._fieldnames:
localDict = recurse_get_mat_struct( getattr(matfile[v], field), field );
if field not in localDict:
dictData[field] = copy.deepcopy(localDict);
else:
dictData[field] = copy.deepcopy(localDict[field]);
allVarDict[v] = dictData;
return allVarDict;
def save_variables_h5(h5_file_name, var, info, overwrite = False):
if info is None:
return save_variables_h5_dict(h5_file_name, var, overwrite)
if os.path.exists(h5_file_name) and overwrite == False:
raise Exception('{:s} exists and over write is false.'.format(h5_file_name))
# Construct the dictionary
assert(type(var) == list); assert(type(info) == list);
with h5py.File(h5_file_name, 'w') as f:
for i in range(len(info)):
d = f.create_dataset(info[i], data=var[i], chunks=True, compression="gzip", compression_opts=9);
def rec_get_keys(fh, src, keyList):
if src!='' and type(fh[src]).__name__ == 'Dataset':
keyList.append(src);
return keyList;
if src!='':
moreSrcs = fh[src].keys();
else:
moreSrcs = fh.keys();
for kk in moreSrcs:
if src=='':
keyList = rec_get_keys(fh, kk, keyList);
else:
keyList = rec_get_keys(fh, src+'/'+kk, keyList);
return keyList;
def get_h5_keys(h5_file_name):
if os.path.exists(h5_file_name):
with h5py.File(h5_file_name,'r') as f:
keyList = rec_get_keys(f, '', []);
return keyList;
else:
raise Exception('{:s} does not exists.'.format(h5_file_name))
def save_variables_h5_dict(h5_file_name, dictVar, overwrite = False):
if os.path.exists(h5_file_name) and overwrite == False:
raise Exception('{:s} exists and over write is false.'.format(h5_file_name))
# Construct the dictionary
assert(type(dictVar) == dict);
with h5py.File(h5_file_name, 'w') as f:
for key in dictVar:
d = f.create_dataset(key, data=dictVar[key], chunks=True, compression="gzip", compression_opts=9);
def load_variablesh5(h5_file_name):
if os.path.exists(h5_file_name):
with h5py.File(h5_file_name,'r') as f:
d = {};
h5keys = get_h5_keys(h5_file_name);
for key in h5keys:
d[key] = f[key].value;
return d
else:
raise Exception('{:s} does not exists.'.format(h5_file_name))
def save_variables(pickle_file_name, var, info, overwrite = False):
"""
def save_variables(pickle_file_name, var, info, overwrite = False)
"""
fext = os.path.splitext(pickle_file_name)[1]
if fext =='.h5':
return save_variables_h5(pickle_file_name, var, info, overwrite);
elif fext == '.pkl' or fext == '.pklz':
if os.path.exists(pickle_file_name) and overwrite == False:
raise Exception('{:s} exists and over write is false.'.format(pickle_file_name))
if info is not None:
# Construct the dictionary
assert(type(var) == list); assert(type(info) == list);
d = {}
for i in xrange(len(var)):
d[info[i]] = var[i]
else: #we have the dictionary in var
d = var;
if fext == '.pkl':
with open(pickle_file_name, 'wb') as f:
cPickle.dump(d, f, cPickle.HIGHEST_PROTOCOL)
else:
with bz2.BZ2File(pickle_file_name, 'w') as f:
cPickle.dump(d, f, cPickle.HIGHEST_PROTOCOL)
else:
raise Exception('{:s}: extension unknown'.format(fext))
def load_variables(pickle_file_name):
"""
d = load_variables(pickle_file_name)
Output:
d is a dictionary of variables stored in the pickle file.
"""
fext = os.path.splitext(pickle_file_name)[1]
if fext =='.h5':
return load_variablesh5(pickle_file_name);
elif fext == '.pkl' or fext == '.pklz':
if os.path.exists(pickle_file_name):
if fext == '.pkl':
with open(pickle_file_name, 'rb') as f:
d = cPickle.load(f)
else:
with bz2.BZ2File(pickle_file_name, 'r') as f:
d = cPickle.load(f)
return d
else:
raise Exception('{:s} does not exists.'.format(pickle_file_name))
elif fext == '.json':
with open(pickle_file_name, 'r') as fh:
data = json.load(fh)
return data
else:
raise Exception('{:s}: extension unknown'.format(fext))
#wrappers for load_variables and save_variables
def load(pickle_file_name):
return load_variables(pickle_file_name);
def save(pickle_file_name, var, info, overwrite = False):
return save_variables(pickle_file_name, var, info, overwrite);
def calc_pr_ovr_noref(counts, out):
"""
[P, R, score, ap] = calc_pr_ovr(counts, out, K)
Input :
counts : number of occurrences of this word in the ith image
out : score for this image
Output :
P, R : precision and recall
score : score which corresponds to the particular precision and recall
ap : average precision
"""
#binarize counts
out = out.astype(np.float64)
counts = np.array(counts > 0, dtype=np.float32);
tog = np.hstack((counts[:,np.newaxis].astype(np.float64), out[:, np.newaxis].astype(np.float64)))
ind = np.argsort(out)
ind = ind[::-1]
score = np.array([tog[i,1] for i in ind])
sortcounts = np.array([tog[i,0] for i in ind])
tp = sortcounts;
fp = sortcounts.copy();
for i in xrange(sortcounts.shape[0]):
if sortcounts[i] >= 1:
fp[i] = 0.;
elif sortcounts[i] < 1:
fp[i] = 1.;
tp = np.cumsum(tp)
fp = np.cumsum(fp)
# P = np.cumsum(tp)/(np.cumsum(tp) + np.cumsum(fp));
P = tp / np.maximum(tp + fp, np.finfo(np.float64).eps)
numinst = np.sum(counts);
R = tp/numinst
ap = voc_ap(R,P)
return P, R, score, ap
def voc_ap(rec, prec):
# correct AP calculation
# first append sentinel values at the end
mrec = np.concatenate(([0.], rec, [1.]))
mpre = np.concatenate(([0.], prec, [0.]))
# compute the precision envelope
for i in range(mpre.size - 1, 0, -1):
mpre[i - 1] = np.maximum(mpre[i - 1], mpre[i])
# to calculate area under PR curve, look for points
# where X axis (recall) changes value
i = np.where(mrec[1:] != mrec[:-1])[0]
# and sum (\Delta recall) * prec
ap = np.sum((mrec[i + 1] - mrec[i]) * mpre[i + 1])
return ap
| 32.807818
| 141
| 0.652502
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1,588
| 0.157665
|
5435607e763096b9e0e81fbf68d44b9c31b6852e
| 1,085
|
py
|
Python
|
python_teste/python_aulas/aula_94.py
|
BrunoDantasMoreira/projectsPython
|
bd73ab0b3c067456407f227ed2ece42e7f21ddfc
|
[
"MIT"
] | 1
|
2020-07-27T14:18:08.000Z
|
2020-07-27T14:18:08.000Z
|
python_teste/python_aulas/aula_94.py
|
BrunoDantasMoreira/projectsPython
|
bd73ab0b3c067456407f227ed2ece42e7f21ddfc
|
[
"MIT"
] | null | null | null |
python_teste/python_aulas/aula_94.py
|
BrunoDantasMoreira/projectsPython
|
bd73ab0b3c067456407f227ed2ece42e7f21ddfc
|
[
"MIT"
] | null | null | null |
dict = {}
lista = []
soma = 0
while True:
dict['nome'] = str(input('Nome: ')).capitalize()
dict['sexo'] = str(input('Sexo: ')).strip().upper()[0]
while dict['sexo'] not in 'MF':
print('ERRO! Por favor, digite apenas M ou F')
dict['sexo'] = str(input('Sexo: ')).strip().upper()[0]
dict['idade'] = int(input('Idade: '))
soma += dict['idade']
lista.append(dict.copy())
opção = str(input('Quer continuar? ')).strip().upper()[0]
while opção not in 'SN':
print('ERRO! Responda apenas S ou N.')
opção = str(input('Quer continuar? ')).strip().upper()[0]
if opção == 'N':
break
print('-='*30)
print(f'A) Ao todo temos {len(lista)} pessoas cadastradas.')
media = soma / len(lista)
print(f'B) A media de idade é de {media:5.2f} anos')
print('C) As mulheres cadastradas foram ', end='')
for p in lista:
if p['sexo'] == 'F':
print(f'{p["nome"]}', end=' ')
print()
print('D) As pessoas com idade maior que a média são ', end='')
for c in lista:
if c['idade'] > media:
print(f'{c["nome"]}', end=' ')
| 33.90625
| 65
| 0.562212
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 430
| 0.392336
|
543805ee596eba6c41f93710a63dc5eaf28196da
| 7,894
|
py
|
Python
|
nlp/layers/linears.py
|
zhihao-chen/NLP-experiments
|
c7512276050f5b8489adb4c745fa970ea8119646
|
[
"MIT"
] | 4
|
2021-11-10T03:49:28.000Z
|
2022-03-24T02:18:44.000Z
|
nlp/layers/linears.py
|
zhihao-chen/NLP-experiments
|
c7512276050f5b8489adb4c745fa970ea8119646
|
[
"MIT"
] | null | null | null |
nlp/layers/linears.py
|
zhihao-chen/NLP-experiments
|
c7512276050f5b8489adb4c745fa970ea8119646
|
[
"MIT"
] | 1
|
2021-11-14T18:01:18.000Z
|
2021-11-14T18:01:18.000Z
|
# -*- coding: utf8 -*-
"""
======================================
Project Name: NLP
File Name: linears
Author: czh
Create Date: 2021/11/15
--------------------------------------
Change Activity:
======================================
"""
import math
import torch
import torch.nn as nn
import torch.nn.functional as func
from torch.nn.parameter import Parameter
class Linears(nn.Module):
def __init__(self, input_dim: int, output_dim: int = 1, bias: bool = True):
super().__init__()
self.fn1 = nn.Linear(input_dim, input_dim)
self.fn2 = nn.Linear(input_dim, input_dim)
self.fn3 = nn.Linear(input_dim, output_dim, bias=bias)
nn.init.orthogonal_(self.fn1.weight, gain=1)
nn.init.orthogonal_(self.fn2.weight, gain=1)
nn.init.orthogonal_(self.fn3.weight, gain=1)
def forward(self, hidden_states: torch.Tensor, encoder_hidden_states: torch.Tensor):
logits = self.fn3(torch.tanh(
self.fn1(hidden_states).unsqueeze(2) + self.fn2(encoder_hidden_states).unsqueeze(1)
)).squeeze()
return logits
class EntityLinears(nn.Module):
def __init__(self, input_dim: int, output_dim: int = 1, bias: bool = True):
super().__init__()
self.head = Linears(input_dim=input_dim, output_dim=output_dim, bias=bias)
self.tail = Linears(input_dim=input_dim, output_dim=output_dim, bias=bias)
def forward(self, hidden_states: torch.Tensor, encoder_hidden_states: torch.Tensor):
# [bsz, num_triples, seq_len, output_dim]
return self.head(hidden_states, encoder_hidden_states), self.tail(hidden_states, encoder_hidden_states)
class FeedForwardNetwork(nn.Module):
def __init__(self, input_size, hidden_size, output_size, dropout_rate=0):
super(FeedForwardNetwork, self).__init__()
self.dropout_rate = dropout_rate
self.linear1 = nn.Linear(input_size, hidden_size)
self.linear2 = nn.Linear(hidden_size, output_size)
def forward(self, x):
x_proj = func.dropout(func.relu(self.linear1(x)), p=self.dropout_rate, training=self.training)
x_proj = self.linear2(x_proj)
return x_proj
class PoolerStartLogits(nn.Module):
"""
bert_ner_span
"""
def __init__(self, hidden_size, num_classes):
super(PoolerStartLogits, self).__init__()
self.dense = nn.Linear(hidden_size, num_classes)
def forward(self, hidden_states):
x = self.dense(hidden_states)
return x
class PoolerEndLogits(nn.Module):
"""
bert_ner_span
"""
def __init__(self, hidden_size, num_classes):
super(PoolerEndLogits, self).__init__()
self.dense_0 = nn.Linear(hidden_size, hidden_size)
self.activation = nn.Tanh()
self.LayerNorm = nn.LayerNorm(hidden_size)
self.dense_1 = nn.Linear(hidden_size, num_classes)
def forward(self, hidden_states, start_positions=None):
x = self.dense_0(torch.cat([hidden_states, start_positions], dim=-1))
x = self.activation(x)
x = self.LayerNorm(x)
x = self.dense_1(x)
return x
class MultiNonLinearClassifier(nn.Module):
def __init__(self, hidden_size, num_label, dropout_rate, act_func="gelu", intermediate_hidden_size=None):
super(MultiNonLinearClassifier, self).__init__()
self.num_label = num_label
self.intermediate_hidden_size = hidden_size if intermediate_hidden_size is None else intermediate_hidden_size
self.classifier1 = nn.Linear(hidden_size, self.intermediate_hidden_size)
self.classifier2 = nn.Linear(self.intermediate_hidden_size, self.num_label)
self.dropout = nn.Dropout(dropout_rate)
self.act_func = act_func
def forward(self, input_features):
features_output1 = self.classifier1(input_features)
if self.act_func == "gelu":
features_output1 = func.gelu(features_output1)
elif self.act_func == "relu":
features_output1 = func.relu(features_output1)
elif self.act_func == "tanh":
features_output1 = func.tanh(features_output1)
else:
raise ValueError
features_output1 = self.dropout(features_output1)
features_output2 = self.classifier2(features_output1)
return features_output2
class SingleLinearClassifier(nn.Module):
def __init__(self, hidden_size, num_label):
super(SingleLinearClassifier, self).__init__()
self.num_label = num_label
self.classifier = nn.Linear(hidden_size, num_label)
def forward(self, input_features):
features_output = self.classifier(input_features)
return features_output
class BERTTaggerClassifier(nn.Module):
def __init__(self, hidden_size, num_label, dropout_rate, act_func="gelu", intermediate_hidden_size=None):
super(BERTTaggerClassifier, self).__init__()
self.num_label = num_label
self.intermediate_hidden_size = hidden_size if intermediate_hidden_size is None else intermediate_hidden_size
self.classifier1 = nn.Linear(hidden_size, self.intermediate_hidden_size)
self.classifier2 = nn.Linear(self.intermediate_hidden_size, self.num_label)
self.dropout = nn.Dropout(dropout_rate)
self.act_func = act_func
def forward(self, input_features):
features_output1 = self.classifier1(input_features)
if self.act_func == "gelu":
features_output1 = func.gelu(features_output1)
elif self.act_func == "relu":
features_output1 = func.relu(features_output1)
elif self.act_func == "tanh":
features_output1 = func.tanh(features_output1)
else:
raise ValueError
features_output1 = self.dropout(features_output1)
features_output2 = self.classifier2(features_output1)
return features_output2
class ClassifierLayer(nn.Module):
# https://github.com/Akeepers/LEAR/blob/master/utils/model_utils.py
def __init__(self, class_num, out_features, bias=True):
super(ClassifierLayer, self).__init__()
self.class_num = class_num
self.out_features = out_features
self.weight = Parameter(torch.Tensor(class_num, out_features))
if bias:
self.bias = Parameter(torch.Tensor(class_num))
else:
self.register_parameter('bias', None)
self.reset_parameters()
def reset_parameters(self) -> None:
nn.init.kaiming_uniform_(self.weight, a=math.sqrt(5))
if self.bias is not None:
fan_in, _ = nn.init._calculate_fan_in_and_fan_out(self.weight)
bound = 1 / math.sqrt(fan_in)
nn.init.uniform_(self.bias, -bound, bound)
def forward(self, inputs):
x = torch.mul(inputs, self.weight)
# (class_num, 1)
x = torch.sum(x, -1) # [-1, class_num]
if self.bias is not None:
x = x + self.bias
return x
def extra_repr(self):
return 'class_num={}, out_features={}, bias={}'.format(
self.class_num, self.out_features, self.bias is not None)
class MultiNonLinearClassifierForMultiLabel(nn.Module):
# https://github.com/Akeepers/LEAR/blob/master/utils/model_utils.py
def __init__(self, hidden_size, num_label, dropout_rate):
super(MultiNonLinearClassifierForMultiLabel, self).__init__()
self.num_label = num_label
self.classifier1 = nn.Linear(hidden_size, hidden_size)
self.classifier2 = ClassifierLayer(num_label, hidden_size)
self.dropout = nn.Dropout(dropout_rate)
def forward(self, input_features):
features_output1 = self.classifier1(input_features)
features_output1 = func.gelu(features_output1)
features_output1 = self.dropout(features_output1)
features_output2 = self.classifier2(features_output1)
return features_output2
| 38.8867
| 117
| 0.673676
| 7,481
| 0.947682
| 0
| 0
| 0
| 0
| 0
| 0
| 617
| 0.078161
|
5438824c4ced393aa643d5e74bfabb01555d5d5c
| 2,037
|
py
|
Python
|
components/siren.py
|
TalaoDAO/ecole42
|
2236f24527966195c953f222f9715ee967348b0f
|
[
"Apache-2.0"
] | 1
|
2021-09-22T16:30:57.000Z
|
2021-09-22T16:30:57.000Z
|
components/siren.py
|
TalaoDAO/credential-repository
|
d36c694d9e90ead8a35bd8cc5be47c6d951474ba
|
[
"Apache-2.0"
] | null | null | null |
components/siren.py
|
TalaoDAO/credential-repository
|
d36c694d9e90ead8a35bd8cc5be47c6d951474ba
|
[
"Apache-2.0"
] | null | null | null |
import requests
def company(SIREN):
r = requests.get('https://entreprise.data.gouv.fr/api/sirene/v2/siren/'+SIREN+'')
json_object = r.json()
settings = dict()
if json_object['sirene']['status'] == 404:
return None
if json_object['sirene']['data']['siege_social']['nom_raison_sociale'] != None :
settings['name'] = json_object['sirene']['data']['siege_social']['nom_raison_sociale']
else :
settings['name'] = ''
try :
settings['address'] = json_object['sirene']['data']['siege_social']['numero_voie'] + ' ' + json_object['sirene']['data']['siege_social']['type_voie'] + ' ' + json_object['sirene']['data']['siege_social']['libelle_voie'] + ' ' + json_object['sirene']['data']['siege_social']['code_postal'] + ' ' + json_object['sirene']['data']['siege_social']['libelle_commune']
except :
settings['address'] = ''
if json_object['sirene']['data']['total_results'] != None :
settings['group'] = json_object['sirene']['data']['total_results']
else :
settings['group'] = ''
Dictionnaire_effectifs = {'NN': "No staff members", '00': '0', '01': "1-2", '02': "3-5",
'03': "6-9", '11': "10-19", '12': "20-49", '21': "50-99", '22': "100-199",
'31': "200-249", '32': "250-499", '41': "500-999", '42': "1000-1999",
'51': "2000-4999", '52': "5000-9999", '53': "+10 000"}
if json_object['sirene']['data']['siege_social']['tranche_effectif_salarie'] != None :
settings['staff'] = Dictionnaire_effectifs[json_object['sirene']['data']['siege_social']['tranche_effectif_salarie']]
else :
settings['staff'] = ''
if json_object['sirene']['data']['siege_social']['libelle_activite_principale_entreprise'] != None :
settings['activity'] = json_object['sirene']['data']['siege_social']['libelle_activite_principale_entreprise']
else :
settings['activity'] = ''
return settings
| 55.054054
| 370
| 0.571919
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 977
| 0.479627
|
5438db8d908a649df431fff16b0d49559bcdf6d6
| 2,036
|
py
|
Python
|
Week 2/medt_opdracht_9.py
|
zowie93/ISCRIPT
|
fa3e5122be8ef47b23c23554ec9e1c04b37da562
|
[
"MIT"
] | null | null | null |
Week 2/medt_opdracht_9.py
|
zowie93/ISCRIPT
|
fa3e5122be8ef47b23c23554ec9e1c04b37da562
|
[
"MIT"
] | null | null | null |
Week 2/medt_opdracht_9.py
|
zowie93/ISCRIPT
|
fa3e5122be8ef47b23c23554ec9e1c04b37da562
|
[
"MIT"
] | null | null | null |
"""
Opdracht 9 - Loonbrief
https://dodona.ugent.be/nl/exercises/990750894/
"""
# functie voor start amount
def get_start_amount():
# return start amount
return int(input("Start bedrag: "))
# functie voor salaris
def get_salary():
# array van salaris maken
salary = []
# count op nul zetten
count = 0
# wanneer de loop niet wordt afgebroken
while True:
# counter bijhouden voor werknemer id
count += 1
# input voor het salaris betreffende werknemer
input_salary = input("Werknemer " + str(count) + ": ")
# lengte controleren en controleren op het woordje stop
if len(salary) > 3 and input_salary.lower() == "stop":
break
# salaris toevoegen aan de array
salary.append(int(input_salary))
# returnen van salaris
return salary
# functie voor gemiddeld salaris
def get_average_salary(salary):
# totaal aantal salaris
total = sum(salary)
# aantal werknemmers
contributors = len(salary)
# gemiddeld salaris per werknemer
average = total / contributors
# returnen van gemiddelde
return average
def print_salary(start_amount, salary, average):
# count op nul zetten
count = 0
# begin salaris of opgegeven bedrag
total = start_amount
# for loop die door het salaris heen gaat
for amount in salary:
# telkens 1tje optellen
count += 1
# loon erbij optellen
total = total + amount
# uitprinten van totaal bedrag werknemer
print("Werknemer #" + str(count) + " fluistert €" + str(total))
# uitprinten gemiddelde loon
print("Gemiddeld loon: €" + str("{0:.2f}".format(average)))
def main():
# begin bedrag verkrijgen
start_amount = get_start_amount()
# salaris verkrijgen
salary = get_salary()
# gemiddelde verkrijgen
average = get_average_salary(salary)
# printen van alle benodigde dingen
print_salary(start_amount, salary, average)
if __name__ == '__main__':
main()
| 24.238095
| 71
| 0.651768
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 978
| 0.479412
|
543993f4662d66952cafe8284d07a22ac01ccee7
| 1,845
|
py
|
Python
|
4.1.1-simple-object-tracking-video.py
|
CleverYh/opencv_py
|
20b28e8ef20fa3015f4f7c20ed69fed954c16805
|
[
"MIT"
] | 2
|
2020-04-05T13:44:13.000Z
|
2020-07-06T08:53:58.000Z
|
4.1.1-simple-object-tracking-video.py
|
CleverYh/opencv_py
|
20b28e8ef20fa3015f4f7c20ed69fed954c16805
|
[
"MIT"
] | null | null | null |
4.1.1-simple-object-tracking-video.py
|
CleverYh/opencv_py
|
20b28e8ef20fa3015f4f7c20ed69fed954c16805
|
[
"MIT"
] | null | null | null |
# coding: utf-8
from cv2 import cv2
import numpy as np
cap = cv2.VideoCapture(0)
while(1):
# Take each frame
_, frame = cap.read()
# Convert BGR to HSV
hsv = cv2.cvtColor(frame, cv2.COLOR_BGR2HSV)
# define range of blue color in HSV
lower_blue = np.array([110,50,50])
upper_blue = np.array([130,255,255])
# Threshold the HSV image to get only blue colors
mask = cv2.inRange(hsv, lower_blue, upper_blue)
# Bitwise-AND mask and original image
res = cv2.bitwise_and(frame,frame, mask= mask)
cv2.imshow('frame',frame)
cv2.imshow('mask',mask)
cv2.imshow('res',res)
k = cv2.waitKey(5) & 0xFF
if k == 27:
break
cv2.destroyAllWindows()
# OBJECT TRACKING
# Take each frame of the video
# Convert from BGR to HSV color-space
# We threshold the HSV image for a range of blue color
# Now extract the blue object alone, we can do whatever on that image we want.
# HOW TO FINDHSV VALUES TO TRACK?
# It is very simple and you can use the same function, cv2.cvtColor(). Instead of passing an image, you just pass the BGR values you want. For example, to find the HSV value of Green, try following commands in Python terminal:
# >>> green = np.uint8([[[0,255,0 ]]])
# >>> hsv_green = cv2.cvtColor(green,cv2.COLOR_BGR2HSV)
# >>> print hsv_green
# [[[ 60 255 255]]]
# Now you take [H-10, 100,100] and [H+10, 255, 255] as lower bound and upper bound respectively. Apart from this method, you can use any image editing tools like GIMP or any online converters to find these values, but don’t forget to adjust the HSV ranges.
# Now you take [H-10, 100,100] and [H+10, 255, 255] as lower bound and upper bound respectively. Apart from this method, you can use any image editing tools like GIMP or any online converters to find these values, but don’t forget to adjust the HSV ranges.
| 37.653061
| 256
| 0.701897
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1,315
| 0.711195
|
5439f19ce894429f825edd092b433b960bae49d4
| 9,411
|
py
|
Python
|
src/peering/azext_peering/custom.py
|
michimune/azure-cli-extensions
|
697e2c674e5c0825d44c72d714542fe01331e107
|
[
"MIT"
] | 1
|
2022-03-22T15:02:32.000Z
|
2022-03-22T15:02:32.000Z
|
src/peering/azext_peering/custom.py
|
michimune/azure-cli-extensions
|
697e2c674e5c0825d44c72d714542fe01331e107
|
[
"MIT"
] | 1
|
2021-02-10T22:04:59.000Z
|
2021-02-10T22:04:59.000Z
|
src/peering/azext_peering/custom.py
|
michimune/azure-cli-extensions
|
697e2c674e5c0825d44c72d714542fe01331e107
|
[
"MIT"
] | 1
|
2021-06-03T19:31:10.000Z
|
2021-06-03T19:31:10.000Z
|
# --------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# --------------------------------------------------------------------------------------------
# pylint: disable=line-too-long
# pylint: disable=too-many-statements
# pylint: disable=too-many-lines
# pylint: disable=too-many-locals
# pylint: disable=unused-argument
import json
def list_peering_legacy(cmd, client,
peering_location=None,
kind=None):
return client.list(peering_location=peering_location, kind=kind)
def create_peering_asn(cmd, client,
name,
peer_asn=None,
emails=None,
phone=None,
peer_name=None,
validation_state=None):
body = {}
body['peer_asn'] = peer_asn # number
body.setdefault('peer_contact_info', {})['emails'] = None if emails is None else emails.split(',')
body.setdefault('peer_contact_info', {})['phone'] = None if phone is None else phone.split(',')
body['peer_name'] = peer_name # str
body['validation_state'] = validation_state # str
return client.create_or_update(peer_asn_name=name, peer_asn=body)
def update_peering_asn(cmd, client,
name,
peer_asn=None,
emails=None,
phone=None,
peer_name=None,
validation_state=None):
body = client.get(peer_asn_name=name).as_dict()
body.peer_asn = peer_asn # number
body.peer_contact_info.emails = None if emails is None else emails.split(',')
body.peer_contact_info.phone = None if phone is None else phone.split(',')
body.peer_name = peer_name # str
body.validation_state = validation_state # str
return client.create_or_update(peer_asn_name=name, peer_asn=body)
def delete_peering_asn(cmd, client,
name):
return client.delete(peer_asn_name=name)
def list_peering_asn(cmd, client):
return client.list_by_subscription()
def list_peering_location(cmd, client,
kind=None,
direct_peering_type=None):
return client.list(kind=kind, direct_peering_type=direct_peering_type)
def create_peering(cmd, client,
resource_group,
name,
kind,
location,
sku_name=None,
sku_tier=None,
sku_family=None,
sku_size=None,
direct_connections=None,
direct_peer_asn=None,
direct_direct_peering_type=None,
exchange_connections=None,
exchange_peer_asn=None,
peering_location=None,
tags=None):
body = {}
body.setdefault('sku', {})['name'] = sku_name # str
body.setdefault('sku', {})['tier'] = sku_tier # str
body.setdefault('sku', {})['family'] = sku_family # str
body.setdefault('sku', {})['size'] = sku_size # str
body['kind'] = kind # str
body.setdefault('direct', {})['connections'] = json.loads(direct_connections) if isinstance(direct_connections, str) else direct_connections
body.setdefault('direct', {}).setdefault('peer_asn', {})['id'] = direct_peer_asn
body.setdefault('direct', {})['direct_peering_type'] = direct_direct_peering_type # str
# body.setdefault('exchange', {})['connections'] = json.loads(exchange_connections) if isinstance(exchange_connections, str) else exchange_connections
# body.setdefault('exchange', {}).setdefault('peer_asn', {})['id'] = exchange_peer_asn
body['peering_location'] = peering_location # str
body['location'] = location # str
body['tags'] = tags # dictionary
return client.create_or_update(resource_group_name=resource_group, peering_name=name, peering=body)
def update_peering(cmd, client,
resource_group,
name,
sku_name=None,
sku_tier=None,
sku_family=None,
sku_size=None,
kind=None,
direct_connections=None,
direct_peer_asn=None,
direct_direct_peering_type=None,
exchange_connections=None,
exchange_peer_asn=None,
peering_location=None,
location=None,
tags=None):
body = client.get(resource_group_name=resource_group, peering_name=name).as_dict()
body.sku.name = sku_name # str
body.sku.tier = sku_tier # str
body.sku.family = sku_family # str
body.sku.size = sku_size # str
body.kind = kind # str
body.direct.connections = json.loads(direct_connections) if isinstance(direct_connections, str) else direct_connections
body.direct.peer_asn = direct_peer_asn
body.direct.direct_peering_type = direct_direct_peering_type # str
body.exchange.connections = json.loads(exchange_connections) if isinstance(exchange_connections, str) else exchange_connections
body.exchange.peer_asn = exchange_peer_asn
body.peering_location = peering_location # str
body.location = location # str
body.tags = tags # dictionary
return client.create_or_update(resource_group_name=resource_group, peering_name=name, peering=body)
def delete_peering(cmd, client,
resource_group,
name):
return client.delete(resource_group_name=resource_group, peering_name=name)
def list_peering(cmd, client,
resource_group):
if resource_group is not None:
return client.list_by_resource_group(resource_group_name=resource_group)
return client.list_by_subscription()
def list_peering_service_location(cmd, client):
return client.list()
def create_peering_service_prefix(cmd, client,
resource_group,
peering_service_name,
name,
prefix=None):
return client.create_or_update(resource_group_name=resource_group, peering_service_name=peering_service_name, prefix_name=name, prefix=prefix)
def update_peering_service_prefix(cmd, client,
resource_group,
peering_service_name,
name,
prefix=None):
return client.create_or_update(resource_group_name=resource_group, peering_service_name=peering_service_name, prefix_name=name, prefix=prefix)
def delete_peering_service_prefix(cmd, client,
resource_group,
peering_service_name,
name):
return client.delete(resource_group_name=resource_group, peering_service_name=peering_service_name, prefix_name=name)
def list_peering_service_prefix(cmd, client,
resource_group,
peering_service_name):
return client.list_by_peering_service(resource_group_name=resource_group, peering_service_name=peering_service_name)
def list_peering_service_provider(cmd, client):
return client.list()
def create_peering_service(cmd, client,
resource_group,
name,
location,
peering_service_location=None,
peering_service_provider=None,
tags=None):
body = {}
body['peering_service_location'] = peering_service_location # str
body['peering_service_provider'] = peering_service_provider # str
body['location'] = location # str
body['tags'] = tags # dictionary
return client.create_or_update(resource_group_name=resource_group, peering_service_name=name, peering_service=body)
def update_peering_service(cmd, client,
resource_group,
name,
peering_service_location=None,
peering_service_provider=None,
location=None,
tags=None):
body = client.get(resource_group_name=resource_group, peering_service_name=name).as_dict()
body.peering_service_location = peering_service_location # str
body.peering_service_provider = peering_service_provider # str
body.location = location # str
body.tags = tags # dictionary
return client.create_or_update(resource_group_name=resource_group, peering_service_name=name, peering_service=body)
def delete_peering_service(cmd, client,
resource_group,
name):
return client.delete(resource_group_name=resource_group, peering_service_name=name)
def list_peering_service(cmd, client,
resource_group):
if resource_group is not None:
return client.list_by_resource_group(resource_group_name=resource_group)
return client.list_by_subscription()
| 42.013393
| 154
| 0.604824
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1,267
| 0.13463
|
543ac48e108696b4125575c0e8b5fa9098b4ddb3
| 830
|
py
|
Python
|
votes/migrations/0004_team.py
|
aiventimptner/horizon
|
6e2436bfa81cad55fefd4c0bb67df3c36a9b6deb
|
[
"MIT"
] | null | null | null |
votes/migrations/0004_team.py
|
aiventimptner/horizon
|
6e2436bfa81cad55fefd4c0bb67df3c36a9b6deb
|
[
"MIT"
] | 1
|
2021-06-10T19:59:07.000Z
|
2021-06-10T19:59:07.000Z
|
votes/migrations/0004_team.py
|
aiventimptner/horizon
|
6e2436bfa81cad55fefd4c0bb67df3c36a9b6deb
|
[
"MIT"
] | null | null | null |
# Generated by Django 3.1.4 on 2020-12-30 00:27
from django.conf import settings
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('votes', '0003_auto_20201229_1301'),
]
operations = [
migrations.CreateModel(
name='Team',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=150)),
('slug', models.SlugField()),
('created', models.DateTimeField(auto_now_add=True)),
('members', models.ManyToManyField(related_name='teams', to=settings.AUTH_USER_MODEL)),
],
),
]
| 31.923077
| 114
| 0.609639
| 704
| 0.848193
| 0
| 0
| 0
| 0
| 0
| 0
| 130
| 0.156627
|
543ac83c6ae50796c548f885ed09b3775131b174
| 576
|
py
|
Python
|
Python/Day 21/score.py
|
Aswinpkrishnan94/Fabulous-Python
|
bafba6d5b3889008299c012625b4a9e1b63b1d44
|
[
"MIT"
] | null | null | null |
Python/Day 21/score.py
|
Aswinpkrishnan94/Fabulous-Python
|
bafba6d5b3889008299c012625b4a9e1b63b1d44
|
[
"MIT"
] | null | null | null |
Python/Day 21/score.py
|
Aswinpkrishnan94/Fabulous-Python
|
bafba6d5b3889008299c012625b4a9e1b63b1d44
|
[
"MIT"
] | null | null | null |
from turtle import Turtle
FONT = ("Arial", 10, "normal")
ALIGN = "center"
class Score(Turtle):
def __init__(self):
super().__init__()
self.score = 0
self.color("White")
self.penup()
self.goto(0, 270)
self.update()
self.hideturtle()
def update(self):
self.write(f"Score : {self.score}", align=ALIGN, font=FONT)
def game_over(self):
self.goto(0,0)
self.write(f"GAME OVER", align=ALIGN, font=FONT)
def inc(self):
self.score += 1
self.clear()
self.update()
| 23.04
| 67
| 0.553819
| 500
| 0.868056
| 0
| 0
| 0
| 0
| 0
| 0
| 65
| 0.112847
|
543c4f51f177e890cbcf4f4101beb26f2ee15486
| 81
|
py
|
Python
|
tests/integration/testdata/buildcmd/PyLayerMake/layer.py
|
renanmontebelo/aws-sam-cli
|
b5cfc46aa9726b5cd006df8ecc08d1b4eedeb9ea
|
[
"BSD-2-Clause",
"Apache-2.0"
] | 2,959
|
2018-05-08T21:48:56.000Z
|
2020-08-24T14:35:39.000Z
|
tests/integration/testdata/buildcmd/PyLayerMake/layer.py
|
renanmontebelo/aws-sam-cli
|
b5cfc46aa9726b5cd006df8ecc08d1b4eedeb9ea
|
[
"BSD-2-Clause",
"Apache-2.0"
] | 1,469
|
2018-05-08T22:44:28.000Z
|
2020-08-24T20:19:24.000Z
|
tests/integration/testdata/buildcmd/PyLayerMake/layer.py
|
renanmontebelo/aws-sam-cli
|
b5cfc46aa9726b5cd006df8ecc08d1b4eedeb9ea
|
[
"BSD-2-Clause",
"Apache-2.0"
] | 642
|
2018-05-08T22:09:19.000Z
|
2020-08-17T09:04:37.000Z
|
import numpy
def layer_method():
return {"pi": "{0:.2f}".format(numpy.pi)}
| 13.5
| 45
| 0.617284
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 13
| 0.160494
|
543cd354a10448d8c328281db21e317c63dd0072
| 5,520
|
py
|
Python
|
bcbio/qc/coverage.py
|
markdunning/bcbio-nextgen
|
37b69efcc5b2b3713b8d5cd207cece4cb343380d
|
[
"MIT"
] | null | null | null |
bcbio/qc/coverage.py
|
markdunning/bcbio-nextgen
|
37b69efcc5b2b3713b8d5cd207cece4cb343380d
|
[
"MIT"
] | null | null | null |
bcbio/qc/coverage.py
|
markdunning/bcbio-nextgen
|
37b69efcc5b2b3713b8d5cd207cece4cb343380d
|
[
"MIT"
] | null | null | null |
"""Coverage based QC calculations.
"""
import glob
import os
import subprocess
from bcbio.bam import ref, readstats, utils
from bcbio.distributed import transaction
from bcbio.heterogeneity import chromhacks
import bcbio.pipeline.datadict as dd
from bcbio.provenance import do
from bcbio.variation import coverage as cov
from bcbio.variation import bedutils
def run(bam_file, data, out_dir):
"""Run coverage QC analysis
"""
out = dict()
out_dir = utils.safe_makedir(out_dir)
if dd.get_coverage(data) and dd.get_coverage(data) not in ["None"]:
merged_bed_file = bedutils.clean_file(dd.get_coverage_merged(data), data, prefix="cov-", simple=True)
target_name = "coverage"
elif dd.get_coverage_interval(data) != "genome":
merged_bed_file = dd.get_variant_regions_merged(data)
target_name = "variant_regions"
else:
merged_bed_file = None
target_name = "genome"
avg_depth = cov.get_average_coverage(target_name, merged_bed_file, data)
if target_name == "coverage":
out_files = cov.coverage_region_detailed_stats(target_name, merged_bed_file, data, out_dir)
else:
out_files = []
out['Avg_coverage'] = avg_depth
samtools_stats_dir = os.path.join(out_dir, os.path.pardir, 'samtools')
from bcbio.qc import samtools
samtools_stats = samtools.run(bam_file, data, samtools_stats_dir)["metrics"]
out["Total_reads"] = total_reads = int(samtools_stats["Total_reads"])
out["Mapped_reads"] = mapped = int(samtools_stats["Mapped_reads"])
out["Mapped_paired_reads"] = int(samtools_stats["Mapped_paired_reads"])
out['Duplicates'] = dups = int(samtools_stats["Duplicates"])
if total_reads:
out["Mapped_reads_pct"] = 100.0 * mapped / total_reads
if mapped:
out['Duplicates_pct'] = 100.0 * dups / mapped
if dd.get_coverage_interval(data) == "genome":
mapped_unique = mapped - dups
else:
mapped_unique = readstats.number_of_mapped_reads(data, bam_file, keep_dups=False)
out['Mapped_unique_reads'] = mapped_unique
if merged_bed_file:
ontarget = readstats.number_of_mapped_reads(
data, bam_file, keep_dups=False, bed_file=merged_bed_file, target_name=target_name)
out["Ontarget_unique_reads"] = ontarget
if mapped_unique:
out["Ontarget_pct"] = 100.0 * ontarget / mapped_unique
out['Offtarget_pct'] = 100.0 * (mapped_unique - ontarget) / mapped_unique
if dd.get_coverage_interval(data) != "genome":
# Skip padded calculation for WGS even if the "coverage" file is specified
# the padded statistic makes only sense for exomes and panels
padded_bed_file = bedutils.get_padded_bed_file(out_dir, merged_bed_file, 200, data)
ontarget_padded = readstats.number_of_mapped_reads(
data, bam_file, keep_dups=False, bed_file=padded_bed_file, target_name=target_name + "_padded")
out["Ontarget_padded_pct"] = 100.0 * ontarget_padded / mapped_unique
if total_reads:
out['Usable_pct'] = 100.0 * ontarget / total_reads
indexcov_files = _goleft_indexcov(bam_file, data, out_dir)
out_files += [x for x in indexcov_files if x and utils.file_exists(x)]
out = {"metrics": out}
if len(out_files) > 0:
out["base"] = out_files[0]
out["secondary"] = out_files[1:]
return out
def _goleft_indexcov(bam_file, data, out_dir):
"""Use goleft indexcov to estimate coverage distributions using BAM index.
Only used for whole genome runs as captures typically don't have enough data
to be useful for index-only summaries.
"""
if not dd.get_coverage_interval(data) == "genome":
return []
out_dir = utils.safe_makedir(os.path.join(out_dir, "indexcov"))
out_files = [os.path.join(out_dir, "%s-indexcov.%s" % (dd.get_sample_name(data), ext))
for ext in ["roc", "ped", "bed.gz"]]
if not utils.file_uptodate(out_files[-1], bam_file):
with transaction.tx_tmpdir(data) as tmp_dir:
tmp_dir = utils.safe_makedir(os.path.join(tmp_dir, dd.get_sample_name(data)))
gender_chroms = [x.name for x in ref.file_contigs(dd.get_ref_file(data)) if chromhacks.is_sex(x.name)]
gender_args = "--sex %s" % (",".join(gender_chroms)) if gender_chroms else ""
cmd = "goleft indexcov --directory {tmp_dir} {gender_args} -- {bam_file}"
try:
do.run(cmd.format(**locals()), "QC: goleft indexcov")
except subprocess.CalledProcessError as msg:
if not ("indexcov: no usable" in str(msg) or
("indexcov: expected" in str(msg) and "sex chromosomes, found:" in str(msg))):
raise
for out_file in out_files:
orig_file = os.path.join(tmp_dir, os.path.basename(out_file))
if utils.file_exists(orig_file):
utils.copy_plus(orig_file, out_file)
# MultiQC needs non-gzipped/BED inputs so unpack the file
out_bed = out_files[-1].replace(".bed.gz", ".tsv")
if utils.file_exists(out_files[-1]) and not utils.file_exists(out_bed):
with transaction.file_transaction(data, out_bed) as tx_out_bed:
cmd = "gunzip -c %s > %s" % (out_files[-1], tx_out_bed)
do.run(cmd, "Unpack indexcov BED file")
out_files[-1] = out_bed
return [x for x in out_files if utils.file_exists(x)]
| 46.386555
| 115
| 0.664312
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1,162
| 0.210507
|
543cef330851534694d86f1be5bca5d7e8614e34
| 1,210
|
py
|
Python
|
shrike-examples/contoso/utils/arg_utils.py
|
lynochka/azure-ml-problem-sets
|
e7e69de763444c5603e4455e35e69e917081a4cc
|
[
"MIT"
] | 3
|
2021-07-27T16:28:51.000Z
|
2021-11-15T18:29:02.000Z
|
shrike-examples/contoso/utils/arg_utils.py
|
lynochka/azure-ml-problem-sets
|
e7e69de763444c5603e4455e35e69e917081a4cc
|
[
"MIT"
] | null | null | null |
shrike-examples/contoso/utils/arg_utils.py
|
lynochka/azure-ml-problem-sets
|
e7e69de763444c5603e4455e35e69e917081a4cc
|
[
"MIT"
] | 7
|
2021-08-09T15:04:03.000Z
|
2022-03-09T05:48:56.000Z
|
"""
Utility functions for argument parsing
"""
import argparse
def str2bool(val):
"""
Resolving boolean arguments if they are not given in the standard format
Arguments:
val (bool or string): boolean argument type
Returns:
bool: the desired value {True, False}
"""
if isinstance(val, bool):
return val
if isinstance(val, str):
if val.lower() in ("yes", "true", "t", "y", "1"):
return True
elif val.lower() in ("no", "false", "f", "n", "0"):
return False
else:
raise argparse.ArgumentTypeError("Boolean value expected.")
def str2intlist(val):
"""Converts comma separated string of integers into list of integers
Args:
val (str): comma separate string of integers
"""
return commastring2list(int)(val)
def commastring2list(output_type=str):
"""Returns a lambda function which converts a comma separated string into a list of a given type
Args:
output_type (function, optional): string type conversion function. Defaults to str.
Returns:
function: lambda function
"""
return lambda input_str: list(map(output_type, input_str.split(",")))
| 25.744681
| 100
| 0.638017
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 725
| 0.599174
|
543dd03030508ee683df7a6d3985dc5051235db5
| 277
|
py
|
Python
|
tests/test_learner.py
|
luksurious/faster-teaching
|
1493311d5b723ca3f216f537bda8db5907196443
|
[
"MIT"
] | 2
|
2020-08-06T13:21:51.000Z
|
2021-04-15T04:29:03.000Z
|
tests/test_learner.py
|
luksurious/faster-teaching
|
1493311d5b723ca3f216f537bda8db5907196443
|
[
"MIT"
] | null | null | null |
tests/test_learner.py
|
luksurious/faster-teaching
|
1493311d5b723ca3f216f537bda8db5907196443
|
[
"MIT"
] | null | null | null |
from concepts.letter_addition import LetterAddition
from learners.sim_memoryless_learner import SimMemorylessLearner
def test_see_example():
concept = LetterAddition(6)
learner = SimMemorylessLearner(concept, list(range(0, 7)))
learner.see_example(((0, 1), 10))
| 27.7
| 64
| 0.776173
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
543dded51722ade60b4b464e9cde6ba374678fe4
| 2,536
|
py
|
Python
|
piper/jde.py
|
miketarpey/piper
|
d1620727889228d61fbe448f4747cef9351ede59
|
[
"BSD-2-Clause-FreeBSD"
] | null | null | null |
piper/jde.py
|
miketarpey/piper
|
d1620727889228d61fbe448f4747cef9351ede59
|
[
"BSD-2-Clause-FreeBSD"
] | 24
|
2021-02-03T17:06:13.000Z
|
2021-04-02T13:09:13.000Z
|
piper/jde.py
|
miketarpey/piper
|
d1620727889228d61fbe448f4747cef9351ede59
|
[
"BSD-2-Clause-FreeBSD"
] | null | null | null |
import logging
import pandas as pd
from datetime import datetime
from typing import (
Any,
Callable,
Dict,
Hashable,
Iterable,
List,
NamedTuple,
Optional,
Pattern,
Set,
Tuple,
Union,
)
logger = logging.getLogger(__name__)
# add_jde_batch() {{{1
def add_jde_batch(df: pd.DataFrame,
col_prefix: str = 'ed',
userid: str = 'userid',
batch: str = 'ABC',
start: int = 100,
step: int = 100) -> pd.DataFrame:
''' Add 'standard' JDE timestamp/default columns.
For given dataframe, adds the following standard Z-file columns.
User ID (edus)
Batch Number (edbt)
Transaction Number (edtn)
Line Number (edln)
Examples
--------
from piper.defaults import *
from piper.jde import *
.. code-block:
%%piper
sample_sales() >>
select('-target_profit', '-location', '-month') >>
reset_index(drop=True) >>
add_jde_batch(start=3) >>
head(tablefmt='plain')
edus edbt edtn edln product target_sales actual_sales actual_profit
0 userid ABC_20210331 1 3 Beachwear 31749 29209 1753
1 userid ABC_20210331 1 103 Beachwear 37833 34050 5448
2 userid ABC_20210331 1 203 Jeans 29485 31549 4417
3 userid ABC_20210331 1 303 Jeans 37524 40901 4090
Parameters
----------
df : the pandas dataframe object
col_prefix : 2 character (e.g. 'ed') column name prefix to be
applied to the added columns
userid : default userid text value
batch : 2 character prefix to concatenated to current timestamp
trans_no : start number in xxln column
step : step increment in xxln column
Returns
-------
A pandas dataframe
'''
timestamp = datetime.now().strftime('_%Y%m%d')
start_position = 0
range_seq = range(start, (df.shape[0]+1)*step, step)
df.insert(start_position, f'{col_prefix}us', userid)
df.insert(start_position+1, f'{col_prefix}bt', batch + timestamp)
df.insert(start_position+2, f'{col_prefix}tn', 1)
df.insert(start_position+3, f'{col_prefix}ln', pd.Series(range_seq))
return df
| 27.868132
| 109
| 0.542587
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1,658
| 0.653785
|
543e07ad4f4ef4e280a96b2a4575d3e61db5448a
| 2,159
|
py
|
Python
|
codes/utils.py
|
epfml/byzantine-robust-noniid-optimizer
|
0e27349ac99235251110d54dd102fda0091bf274
|
[
"MIT"
] | 7
|
2021-06-22T03:12:15.000Z
|
2022-01-06T16:11:14.000Z
|
codes/utils.py
|
epfml/byzantine-robust-noniid-optimizer
|
0e27349ac99235251110d54dd102fda0091bf274
|
[
"MIT"
] | null | null | null |
codes/utils.py
|
epfml/byzantine-robust-noniid-optimizer
|
0e27349ac99235251110d54dd102fda0091bf274
|
[
"MIT"
] | 2
|
2021-12-12T13:28:02.000Z
|
2022-02-18T13:22:20.000Z
|
import os
import shutil
import logging
class BColors(object):
HEADER = "\033[95m"
OK_BLUE = "\033[94m"
OK_CYAN = "\033[96m"
OK_GREEN = "\033[92m"
WARNING = "\033[93m"
FAIL = "\033[91m"
END_C = "\033[0m"
BOLD = "\033[1m"
UNDERLINE = "\033[4m"
def touch(fname: str, times=None, create_dirs: bool = False):
if create_dirs:
base_dir = os.path.dirname(fname)
if not os.path.exists(base_dir):
os.makedirs(base_dir)
with open(fname, "a"):
os.utime(fname, times)
def touch_dir(base_dir: str) -> None:
if not os.path.exists(base_dir):
os.makedirs(base_dir)
def accuracy(output, target, topk=(1,)):
"""Computes the precision@k for the specified values of k"""
maxk = max(topk)
batch_size = target.size(0)
_, pred = output.topk(maxk, 1, True, True)
pred = pred.t()
correct = pred.eq(target.view(1, -1).expand_as(pred))
res = []
for k in topk:
correct_k = correct[:k].view(-1).float().sum(0)
res.append(correct_k.mul_(100.0 / batch_size))
return res
def top1_accuracy(output, target):
return accuracy(output, target, topk=(1,))[0].item()
def log(*args, **kwargs):
pass
def log_dict(*args, **kwargs):
pass
def initialize_logger(log_root):
if not os.path.exists(log_root):
os.makedirs(log_root)
else:
shutil.rmtree(log_root)
os.makedirs(log_root)
print(f"Logging files to {log_root}")
# Only to file; One dict per line; Easy to process
json_logger = logging.getLogger("stats")
json_logger.setLevel(logging.INFO)
fh = logging.FileHandler(os.path.join(log_root, "stats"))
fh.setLevel(logging.INFO)
fh.setFormatter(logging.Formatter("%(message)s"))
json_logger.addHandler(fh)
debug_logger = logging.getLogger("debug")
debug_logger.setLevel(logging.INFO)
ch = logging.StreamHandler()
ch.setLevel(logging.INFO)
ch.setFormatter(logging.Formatter("%(message)s"))
debug_logger.addHandler(ch)
fh = logging.FileHandler(os.path.join(log_root, "debug"))
fh.setLevel(logging.INFO)
debug_logger.addHandler(fh)
| 25.104651
| 64
| 0.642427
| 238
| 0.110236
| 0
| 0
| 0
| 0
| 0
| 0
| 284
| 0.131542
|
543e913c7932efd8a58e4692b8be276e0e6a692e
| 2,090
|
py
|
Python
|
setup.py
|
robertjanes/drawbot
|
5a0a2ce55cda3f87624ae8c028d9d59aceee3897
|
[
"BSD-2-Clause"
] | null | null | null |
setup.py
|
robertjanes/drawbot
|
5a0a2ce55cda3f87624ae8c028d9d59aceee3897
|
[
"BSD-2-Clause"
] | null | null | null |
setup.py
|
robertjanes/drawbot
|
5a0a2ce55cda3f87624ae8c028d9d59aceee3897
|
[
"BSD-2-Clause"
] | null | null | null |
#!/usr/bin/env python
from __future__ import division, absolute_import, print_function
from setuptools import setup
import os
import re
import shutil
_versionRE = re.compile(r'__version__\s*=\s*\"([^\"]+)\"')
# read the version number for the settings file
with open('drawBot/drawBotSettings.py', "r") as settings:
code = settings.read()
found = _versionRE.search(code)
assert found is not None, "drawBot __version__ not found"
__version__ = found.group(1)
externalTools = ("ffmpeg", "gifsicle", "mkbitmap", "potrace")
externalToolsSourceRoot = os.path.join(os.path.dirname(__file__), "Resources", "externalTools")
externalToolsDestRoot = os.path.join(os.path.dirname(__file__), "drawBot", "context", "tools")
# copy all external tools into drawBot.context.tools folder
for externalTool in externalTools:
source = os.path.join(externalToolsSourceRoot, externalTool)
dest = os.path.join(externalToolsDestRoot, externalTool)
shutil.copyfile(source, dest)
os.chmod(dest, 0o775)
setup(name="drawBot",
version=__version__,
description="DrawBot is a powerful tool that invites you to write simple Python scripts to generate two-dimensional graphics. The builtin graphics primitives support rectangles, ovals, (bezier) paths, polygons, text objects and transparency.",
author="Just van Rossum, Erik van Blokland, Frederik Berlaen",
author_email="frederik@typemytype.com",
url="http://drawbot.com",
license="BSD",
packages=[
"drawBot",
"drawBot.context",
"drawBot.context.tools",
"drawBot.ui"
],
package_data={
"drawBot": [
"context/tools/ffmpeg",
"context/tools/gifsicle",
"context/tools/mkbitmap",
"context/tools/potrace"
]
},
install_requires=[
"pyobjc",
"fontTools",
"booleanOperations",
"pillow"
],
include_package_data=True,
)
# remove all external tools
for externalTool in externalTools:
dest = os.path.join(externalToolsDestRoot, externalTool)
os.remove(dest)
| 32.65625
| 247
| 0.688517
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 890
| 0.425837
|
543ed68a45e19a13dd2f2c914498c100fd410df9
| 784
|
py
|
Python
|
src/homework/i_dictionaries_sets/dictionary.py
|
acc-cosc-1336-spring-2022/acc-cosc-1336-spring-2022-WillCapo
|
426db13aa4d5f6005d7079007ff5fdf114ef649e
|
[
"MIT"
] | null | null | null |
src/homework/i_dictionaries_sets/dictionary.py
|
acc-cosc-1336-spring-2022/acc-cosc-1336-spring-2022-WillCapo
|
426db13aa4d5f6005d7079007ff5fdf114ef649e
|
[
"MIT"
] | null | null | null |
src/homework/i_dictionaries_sets/dictionary.py
|
acc-cosc-1336-spring-2022/acc-cosc-1336-spring-2022-WillCapo
|
426db13aa4d5f6005d7079007ff5fdf114ef649e
|
[
"MIT"
] | 1
|
2022-02-09T02:28:56.000Z
|
2022-02-09T02:28:56.000Z
|
def get_p_distance(list1, list2):
count = 0
i = 0
while i < len(list1):
if (list1[i] != list2[i]):
count += .1
i += 1
return count
def get_p_distance_matrix(list1, list2, list3, list4):
dna1 = get_p_distance(list1, list1), get_p_distance(list1, list2), get_p_distance(list1, list3), get_p_distance(list1, list4)
dna2 = get_p_distance(list2, list1), get_p_distance(list2, list2), get_p_distance(list2, list3), get_p_distance(list2, list4)
dna3 = get_p_distance(list3, list1), get_p_distance(list3, list2), get_p_distance(list3, list3), get_p_distance(list3, list4)
dna4 = get_p_distance(list4, list1), get_p_distance(list4, list2), get_p_distance(list4, list3), get_p_distance(list4, list4)
return dna1, dna2, dna3, dna4
| 52.266667
| 129
| 0.69898
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
543fd7e53080b049a8ec4e7ace7dac2f370068e8
| 38,634
|
py
|
Python
|
pysnmp-with-texts/ZHONE-COM-IP-FILTER-MIB.py
|
agustinhenze/mibs.snmplabs.com
|
1fc5c07860542b89212f4c8ab807057d9a9206c7
|
[
"Apache-2.0"
] | 8
|
2019-05-09T17:04:00.000Z
|
2021-06-09T06:50:51.000Z
|
pysnmp-with-texts/ZHONE-COM-IP-FILTER-MIB.py
|
agustinhenze/mibs.snmplabs.com
|
1fc5c07860542b89212f4c8ab807057d9a9206c7
|
[
"Apache-2.0"
] | 4
|
2019-05-31T16:42:59.000Z
|
2020-01-31T21:57:17.000Z
|
pysnmp-with-texts/ZHONE-COM-IP-FILTER-MIB.py
|
agustinhenze/mibs.snmplabs.com
|
1fc5c07860542b89212f4c8ab807057d9a9206c7
|
[
"Apache-2.0"
] | 10
|
2019-04-30T05:51:36.000Z
|
2022-02-16T03:33:41.000Z
|
#
# PySNMP MIB module ZHONE-COM-IP-FILTER-MIB (http://snmplabs.com/pysmi)
# ASN.1 source file:///Users/davwang4/Dev/mibs.snmplabs.com/asn1/ZHONE-COM-IP-FILTER-MIB
# Produced by pysmi-0.3.4 at Wed May 1 15:47:04 2019
# On host DAVWANG4-M-1475 platform Darwin version 18.5.0 by user davwang4
# Using Python version 3.7.3 (default, Mar 27 2019, 09:23:15)
#
Integer, OctetString, ObjectIdentifier = mibBuilder.importSymbols("ASN1", "Integer", "OctetString", "ObjectIdentifier")
NamedValues, = mibBuilder.importSymbols("ASN1-ENUMERATION", "NamedValues")
ValueSizeConstraint, SingleValueConstraint, ValueRangeConstraint, ConstraintsUnion, ConstraintsIntersection = mibBuilder.importSymbols("ASN1-REFINEMENT", "ValueSizeConstraint", "SingleValueConstraint", "ValueRangeConstraint", "ConstraintsUnion", "ConstraintsIntersection")
ifIndex, = mibBuilder.importSymbols("IF-MIB", "ifIndex")
SnmpAdminString, = mibBuilder.importSymbols("SNMP-FRAMEWORK-MIB", "SnmpAdminString")
ModuleCompliance, NotificationGroup = mibBuilder.importSymbols("SNMPv2-CONF", "ModuleCompliance", "NotificationGroup")
Gauge32, Counter64, iso, Integer32, ModuleIdentity, ObjectIdentity, IpAddress, Unsigned32, MibIdentifier, TimeTicks, MibScalar, MibTable, MibTableRow, MibTableColumn, Bits, NotificationType, Counter32 = mibBuilder.importSymbols("SNMPv2-SMI", "Gauge32", "Counter64", "iso", "Integer32", "ModuleIdentity", "ObjectIdentity", "IpAddress", "Unsigned32", "MibIdentifier", "TimeTicks", "MibScalar", "MibTable", "MibTableRow", "MibTableColumn", "Bits", "NotificationType", "Counter32")
DisplayString, TextualConvention = mibBuilder.importSymbols("SNMPv2-TC", "DisplayString", "TextualConvention")
zhoneModules, zhoneIp = mibBuilder.importSymbols("Zhone", "zhoneModules", "zhoneIp")
ZhoneRowStatus, ZhoneAdminString = mibBuilder.importSymbols("Zhone-TC", "ZhoneRowStatus", "ZhoneAdminString")
comIpFilter = ModuleIdentity((1, 3, 6, 1, 4, 1, 5504, 6, 58))
comIpFilter.setRevisions(('2005-01-10 10:16', '2005-01-03 09:24', '2004-12-21 09:25', '2004-08-30 11:00', '2004-04-06 00:17', '2001-01-17 08:48', '2000-09-11 16:22',))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
if mibBuilder.loadTexts: comIpFilter.setRevisionsDescriptions(('changed portAccessArg1, portAccessArg2 to more intuitive names.', 'changed portArg1, portArg2 to IP addresses', 'added Port_Access', 'V01.01.02 - Add type field to mcastControlList.', 'V01.01.01 - Implementation of multicast-control-list.', 'V01.01.00 - Added keyword markup, updated SMI, Added the filterStmtRenumTable and filterStatsTable', 'V01.00.00 - Initial Release',))
if mibBuilder.loadTexts: comIpFilter.setLastUpdated('200501100015Z')
if mibBuilder.loadTexts: comIpFilter.setOrganization('Zhone Technologies, Inc.')
if mibBuilder.loadTexts: comIpFilter.setContactInfo(' Postal: Zhone Technologies, Inc. @ Zhone Way 7001 Oakport Street Oakland, CA 94621 USA Toll-Free: +1 877-ZHONE20 (+1 877-946-6320) Tel: +1-510-777-7000 Fax: +1-510-777-7001 E-mail: support@zhone.com')
if mibBuilder.loadTexts: comIpFilter.setDescription('Zhone IP Filter MIB Module. IP Software Minneapolis, MN')
filter = ObjectIdentity((1, 3, 6, 1, 4, 1, 5504, 4, 1, 8))
if mibBuilder.loadTexts: filter.setStatus('current')
if mibBuilder.loadTexts: filter.setDescription('The MIB module representing IP filter specifications in Zhone Technologies products. IP filtering is typically performed to enhance network security by limiting what access is allowed between two networks. Filtering is also effective in eliminating certain denial-of-service attacks. Packet filtering also provides a framework for sanity checking packet headers, and rejecting packets that are unlikely (or that should be impossible). In this way, packet filtering can prevent certain unfortunate mistakes from shutting a network down.')
if mibBuilder.loadTexts: filter.setReference("RFC1812, 'Requirements for IP Version 4 Routers,' ftp://ftp.isi.edu/in-notes/rfc1812.txt. RFC2267, 'Network Ingress Filtering: Defeating Denial of Service Attacks which employ IP Source Address Spoofing,' ftp://ftp.isi.edu/in-notes/rfc2267.txt. RFC2474, 'Definition of the Differentiated Services Field (DS Field) in the IPv4 and IPv6 Headers', ftp://ftp.isi.edu/in-notes/rfc2474.txt. D. Brent Chapman, 'Network (In)Security Through IP Packet Filtering,' Proceedings of the 3rd USENIX Security Symposium, Sept. 1992. Andrew Molitor, 'An Architecture for Advanced Packet Filtering,' Proceedings of the 5th USENIX Security Symposium, June. 1995. Paul Russell, 'Linux IPCHAINS-HOWTO,' http://www.rustcorp.com/linux/ipchains/HOWTO.html, v1.0.7, Mar. 1999.")
filterGlobal = ObjectIdentity((1, 3, 6, 1, 4, 1, 5504, 4, 1, 8, 1))
if mibBuilder.loadTexts: filterGlobal.setStatus('current')
if mibBuilder.loadTexts: filterGlobal.setDescription('Global filter provisioning information.')
fltGlobalIndexNext = MibScalar((1, 3, 6, 1, 4, 1, 5504, 4, 1, 8, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 2147483647))).setMaxAccess("readonly")
if mibBuilder.loadTexts: fltGlobalIndexNext.setStatus('current')
if mibBuilder.loadTexts: fltGlobalIndexNext.setDescription('The next available filter spec table index (filterSpecIndex). A GET on this object increments the value by one. A GETNEXT on this object will always return zero.')
fltGlobalTimeout = MibScalar((1, 3, 6, 1, 4, 1, 5504, 4, 1, 8, 1, 2), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 2147483647))).setUnits('seconds').setMaxAccess("readwrite")
if mibBuilder.loadTexts: fltGlobalTimeout.setStatus('current')
if mibBuilder.loadTexts: fltGlobalTimeout.setDescription('Filter inconsistency timeout in seconds. A filter spec is considered to be in an inconsistent state when the value of the objects fltSpecVersion1 and fltSpecVersion2 are not equal. This timeout indicates the minimum number of seconds a filter may be in an inconsistent state before the filter spec becomes invalid and the default action for a filter is used as the filter. Provided fltGlobalTimeout is long enough, it should ensure that both an old modification is permanently stalled (ensuring exclusive access) as well as enough time to repair a filter. Default is five seconds.')
filterSpecTable = MibTable((1, 3, 6, 1, 4, 1, 5504, 4, 1, 8, 2), )
if mibBuilder.loadTexts: filterSpecTable.setStatus('current')
if mibBuilder.loadTexts: filterSpecTable.setDescription("The filter specification table contains specifications for the IP filtering module. Rows are indexed by a single integer index (filterSpecIndex). The fltGlobalIndexNext object is used to determine the next index value. Each row points to a sequence of rows (statements) in the filterStatementTable. When any row in that sequence is modified, created, or removed, the fltSpecVersion1 and fltSpecVersion2 objects must be incremented. Rows are created by assigning fltSpecIndex and setting fltSpecRowStatus to 'createAndGo'. All columnar objects in this table have default values, so no objects other than the index value need be set to create a row. Rows are removed by setting fltSpecRowStatus to 'destroy'. When a row is removed, each row in filterStatementTable with the same fltSpecIndex is automatically removed.")
filterSpecEntry = MibTableRow((1, 3, 6, 1, 4, 1, 5504, 4, 1, 8, 2, 1), ).setIndexNames((0, "ZHONE-COM-IP-FILTER-MIB", "fltSpecIndex"))
if mibBuilder.loadTexts: filterSpecEntry.setStatus('current')
if mibBuilder.loadTexts: filterSpecEntry.setDescription('An entry in the filterSpecTable.')
fltSpecIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 5504, 4, 1, 8, 2, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 2147483647)))
if mibBuilder.loadTexts: fltSpecIndex.setStatus('current')
if mibBuilder.loadTexts: fltSpecIndex.setDescription('The index that identifies an entry in the filterSpecTable. The fltGlobalIndexNext object is used to determine the next value of this object.')
fltSpecName = MibTableColumn((1, 3, 6, 1, 4, 1, 5504, 4, 1, 8, 2, 1, 2), ZhoneAdminString()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: fltSpecName.setStatus('current')
if mibBuilder.loadTexts: fltSpecName.setDescription('The filter name associated with this filter specification. This name should indicate the nature of the filter. The default value is an empty string.')
fltSpecDesc = MibTableColumn((1, 3, 6, 1, 4, 1, 5504, 4, 1, 8, 2, 1, 3), SnmpAdminString()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: fltSpecDesc.setStatus('current')
if mibBuilder.loadTexts: fltSpecDesc.setDescription('Textual description of the filter specification. This should briefly describe the nature of the filter defined by the associated filter statements. The default value is an empty string.')
fltSpecVersion1 = MibTableColumn((1, 3, 6, 1, 4, 1, 5504, 4, 1, 8, 2, 1, 4), Unsigned32()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: fltSpecVersion1.setStatus('current')
if mibBuilder.loadTexts: fltSpecVersion1.setDescription('The version number of the filter specification. This is used to flag any changes in the statements that comprise a filter. Each time a modification occurs to an object in a filter spec (including the the list of filter statements of the same fltSpecIndex in filterStatementTable), the value of this object, and fltSpecVersion2 must be incremented. The manager adding, deleting, or modifying a filter statement or statements must increment this version number in the following manner. A read of fltSpecVersion1 returns its current value. A write to fltSpecVersion1 must be one greater than its current value. A successful write of this object transfers ownership to the manager, where the manager must subsequently perform any desired modifications to the filter spec and then write the new value of fltSpecVersion1 to the fltSpecVersion2 object to release ownership. When fltSpecVersion1 does not equal to fltSpecVersion2, the filter spec is in an inconsistent state. If the filter spec remains in an inconsistent state longer than the time specified in fltGlobalTimeout, the filter spec is declared invalid and the filter spec does not become active. The previously provisioned filter spec will remain active. If no previous filter spec was provisioned for this interface, a default action is used. It is up to the manager to fix the invalid filter spec and bring it into a consistent state.')
fltSpecVersion2 = MibTableColumn((1, 3, 6, 1, 4, 1, 5504, 4, 1, 8, 2, 1, 5), Unsigned32()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: fltSpecVersion2.setStatus('current')
if mibBuilder.loadTexts: fltSpecVersion2.setDescription('The version number of the filter specification. The value of this object must be equal to fltSpecVersion1, otherwise the filter spec is inconsistent. See fltSpecVersion1 for details.')
fltSpecLanguageVersion = MibTableColumn((1, 3, 6, 1, 4, 1, 5504, 4, 1, 8, 2, 1, 6), Unsigned32()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: fltSpecLanguageVersion.setStatus('current')
if mibBuilder.loadTexts: fltSpecLanguageVersion.setDescription('The language version of the filter. The language version further details the meaning and use of the objects in filterStatmentTable. The definitions of the filter languages is beyond the scope of this description.')
fltSpecRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 5504, 4, 1, 8, 2, 1, 7), ZhoneRowStatus()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: fltSpecRowStatus.setStatus('current')
if mibBuilder.loadTexts: fltSpecRowStatus.setDescription('Zhone convention to support row creation and deletion. This is the only object required to create or destroy a row in this table.')
filterStatementTable = MibTable((1, 3, 6, 1, 4, 1, 5504, 4, 1, 8, 3), )
if mibBuilder.loadTexts: filterStatementTable.setStatus('current')
if mibBuilder.loadTexts: filterStatementTable.setDescription("This table contains the filter specification statements for the IP filtering module. A complete filter specification is comprised of all the linked statements (rows) that are pointed to by an entry in the filterSpecTable. Filter statements are linked together by fltSpecIndex, and are ordered within the comprised filter using fltStmtIndex. A statement can only be owned by one filter spec. Rows are created by assigning fltSpecIndex and fltStmtIndex, and setting fltStmtRowStatus to 'createAndGo'. All columnar objects in this table have default values, so no objects other than the index values need be set to create a row. Rows are destroyed by setting fltStmtRowStatus to 'delete'. When rows are created or destroyed, the version of the corresponding filter spec row is incremented.")
filterStatementEntry = MibTableRow((1, 3, 6, 1, 4, 1, 5504, 4, 1, 8, 3, 1), ).setIndexNames((0, "ZHONE-COM-IP-FILTER-MIB", "fltSpecIndex"), (0, "ZHONE-COM-IP-FILTER-MIB", "fltStmtIndex"))
if mibBuilder.loadTexts: filterStatementEntry.setStatus('current')
if mibBuilder.loadTexts: filterStatementEntry.setDescription('An entry in the filterStatement table. Each entry represents one of a sequence of statements that comprise a filter. Each filter statement consists of an index, specific packet header fields, and arbitrary packet offsets and values. Some objects in this entry define ranges for specific packet header fields. These objects define comparison operations on the field they share in the following manner: Low High Compare Method for field f --- ---- ------------------------------------------- 0 0 no comparison on the field 0 H less than or equal to High (f <= H) L 0 exact match (L == f) L H inclusive between comparison (L <= f <= H) ')
fltStmtIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 5504, 4, 1, 8, 3, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 2147483647)))
if mibBuilder.loadTexts: fltStmtIndex.setStatus('current')
if mibBuilder.loadTexts: fltStmtIndex.setDescription('The table index that identifies a filter statement. These indicies should be sparse to allow for insertion into the list.')
fltStmtIpSrcAddrLow = MibTableColumn((1, 3, 6, 1, 4, 1, 5504, 4, 1, 8, 3, 1, 2), IpAddress().clone(hexValue="00000000")).setMaxAccess("readcreate")
if mibBuilder.loadTexts: fltStmtIpSrcAddrLow.setStatus('current')
if mibBuilder.loadTexts: fltStmtIpSrcAddrLow.setDescription('The inclusive lower bound for the source IP address range. See the filterStatementEntry description for details.')
fltStmtIpSrcAddrHigh = MibTableColumn((1, 3, 6, 1, 4, 1, 5504, 4, 1, 8, 3, 1, 3), IpAddress().clone(hexValue="00000000")).setMaxAccess("readcreate")
if mibBuilder.loadTexts: fltStmtIpSrcAddrHigh.setStatus('current')
if mibBuilder.loadTexts: fltStmtIpSrcAddrHigh.setDescription('The inclusive upper bound for the source IP address range. See the filterStatementEntry description for details.')
fltStmtSrcPortLow = MibTableColumn((1, 3, 6, 1, 4, 1, 5504, 4, 1, 8, 3, 1, 4), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: fltStmtSrcPortLow.setStatus('current')
if mibBuilder.loadTexts: fltStmtSrcPortLow.setDescription('The inclusive lower bound for the transport layer source port range. See the filterStatementEntry description for details.')
fltStmtSrcPortHigh = MibTableColumn((1, 3, 6, 1, 4, 1, 5504, 4, 1, 8, 3, 1, 5), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: fltStmtSrcPortHigh.setStatus('current')
if mibBuilder.loadTexts: fltStmtSrcPortHigh.setDescription('The inclusive upper bound for the transport layer source port range. See the filterStatementEntry description for details.')
fltStmtIpDstAddrLow = MibTableColumn((1, 3, 6, 1, 4, 1, 5504, 4, 1, 8, 3, 1, 6), IpAddress().clone(hexValue="00000000")).setMaxAccess("readcreate")
if mibBuilder.loadTexts: fltStmtIpDstAddrLow.setStatus('current')
if mibBuilder.loadTexts: fltStmtIpDstAddrLow.setDescription('The inclusive lower bound for the destination IP address range. See the filterStatementEntry description for details.')
fltStmtIpDstAddrHigh = MibTableColumn((1, 3, 6, 1, 4, 1, 5504, 4, 1, 8, 3, 1, 7), IpAddress().clone(hexValue="00000000")).setMaxAccess("readcreate")
if mibBuilder.loadTexts: fltStmtIpDstAddrHigh.setStatus('current')
if mibBuilder.loadTexts: fltStmtIpDstAddrHigh.setDescription('The inclusive upper bound for the destination IP address range. See the filterStatementEntry description for details.')
fltStmtDstPortLow = MibTableColumn((1, 3, 6, 1, 4, 1, 5504, 4, 1, 8, 3, 1, 8), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: fltStmtDstPortLow.setStatus('current')
if mibBuilder.loadTexts: fltStmtDstPortLow.setDescription('The inclusive lower bound for the transport layer destination port range. See the filterStatementEntry description for details.')
fltStmtDstPortHigh = MibTableColumn((1, 3, 6, 1, 4, 1, 5504, 4, 1, 8, 3, 1, 9), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: fltStmtDstPortHigh.setStatus('current')
if mibBuilder.loadTexts: fltStmtDstPortHigh.setDescription('The inclusive upper bound for the transport layer destination port range. See the filterStatementEntry description for details.')
fltStmtIpProtocol = MibTableColumn((1, 3, 6, 1, 4, 1, 5504, 4, 1, 8, 3, 1, 10), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5))).clone(namedValues=NamedValues(("any", 1), ("ip", 2), ("tcp", 3), ("udp", 4), ("icmp", 5))).clone('any')).setMaxAccess("readcreate")
if mibBuilder.loadTexts: fltStmtIpProtocol.setStatus('current')
if mibBuilder.loadTexts: fltStmtIpProtocol.setDescription('The IP protocol value that is to be matched. The enum values are as follows: any(1) : any protocol type is a match (wildcard) ip(2) : raw IP packet tcp(3) : TCP packet udp(4) : UDP packet icmp(5) : ICMP packet The default value is any(1).')
fltStmtArbValueBase = MibTableColumn((1, 3, 6, 1, 4, 1, 5504, 4, 1, 8, 3, 1, 11), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6, 7))).clone(namedValues=NamedValues(("none", 1), ("ip", 2), ("udp", 3), ("tcp", 4), ("icmp", 5), ("ipOptions", 6), ("tcpOptions", 7))).clone('none')).setMaxAccess("readcreate")
if mibBuilder.loadTexts: fltStmtArbValueBase.setStatus('current')
if mibBuilder.loadTexts: fltStmtArbValueBase.setDescription('This field identifies the protocol header to which the arbitrary value comparison applies. The enum values are as follows: none(1) : no arbitrary value comparison ip(2) : base is IP header udp(3) : base is UDP header tcp(4) : base is TCP header icmp(5) : base is ICMP header ipOptions(6) : base is IP options header tcpOptions(7) : base is TCP options header The default value is none(1).')
fltStmtArbOffset = MibTableColumn((1, 3, 6, 1, 4, 1, 5504, 4, 1, 8, 3, 1, 12), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 64))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: fltStmtArbOffset.setStatus('current')
if mibBuilder.loadTexts: fltStmtArbOffset.setDescription('The offset, in octets, from the beginning of the header to the most significant octet for the arbitrary value comparison.')
fltStmtArbMask = MibTableColumn((1, 3, 6, 1, 4, 1, 5504, 4, 1, 8, 3, 1, 13), Unsigned32()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: fltStmtArbMask.setStatus('current')
if mibBuilder.loadTexts: fltStmtArbMask.setDescription('This object is mask for for arbitrary value comparisons. The non-zero bits in this field determine the size of the arbitrary field.')
fltStmtArbValueLow = MibTableColumn((1, 3, 6, 1, 4, 1, 5504, 4, 1, 8, 3, 1, 14), Unsigned32()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: fltStmtArbValueLow.setStatus('current')
if mibBuilder.loadTexts: fltStmtArbValueLow.setDescription('This object is the inclusive lower bound for arbitrary value comparison. See the filterStatementEntry description for details.')
fltStmtArbValueHigh = MibTableColumn((1, 3, 6, 1, 4, 1, 5504, 4, 1, 8, 3, 1, 15), Unsigned32()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: fltStmtArbValueHigh.setStatus('current')
if mibBuilder.loadTexts: fltStmtArbValueHigh.setDescription('This object is the inclusive upper bound for arbitrary value comparison. See the filterStatementEntry description for details.')
fltStmtModifier = MibTableColumn((1, 3, 6, 1, 4, 1, 5504, 4, 1, 8, 3, 1, 16), Bits().clone(namedValues=NamedValues(("notIpSrc", 0), ("notSrcPort", 1), ("notDstIp", 2), ("notPortDst", 3), ("notProtocol", 4), ("notArbitrary", 5), ("notStatement", 6)))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: fltStmtModifier.setStatus('current')
if mibBuilder.loadTexts: fltStmtModifier.setDescription('Filter statement modifier. The bits set in this object logically negate the results of the comparisons made on their respecive fields as shown : notIpSrcAddr(1) : fltStmtIpSrcAddrLow, fltStmtIpSrcAddrHigh notSrcPort(2) : fltStmtSrcPortLow, fltStmtSrcPortHigh notIpDstAddr(3) : fltStmtIpDstAddrLow, fltStmtIpDstAddrHigh notDstPort(4) : fltStmtDstPortLow, fltStmtDstPortHigh notIpProtocol(5) : fltStmtIpProtocol notArbitrary(6) : fltStmtArbValueLow, fltStmtArbValueHigh notStatement(7) : negate outcome of the entire statement No bits set (the default) specifies to use all outcomes as is.')
fltStmtAction = MibTableColumn((1, 3, 6, 1, 4, 1, 5504, 4, 1, 8, 3, 1, 17), Bits().clone(namedValues=NamedValues(("reset", 0), ("permit", 1), ("deny", 2), ("forward", 3), ("reject", 4), ("log", 5))).clone(namedValues=NamedValues(("deny", 2)))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: fltStmtAction.setStatus('current')
if mibBuilder.loadTexts: fltStmtAction.setDescription('Filter statement action. The bits set in this object specify actions to take on packets matching this statement. Supported actions are: reset(0) : Return a TCP reset packet to the packet sender and drop the packet. This cannot be specified with permit. permit(1) : Stop filtering the packet and allow it to be sent on the associated interface. This cannot be specified with deny. deny(2) : Stop filtering the packet and discard it. This cannot be specified with permit. forward(3) : Forward the packet the IP address specified in fltStmtActionArg. reject(4) : Return an ICMP destination unreachable packet (type 3) to the packet sender with code 13 (communication administratively prohibited). This cannot be specified permit. log(5) : Write the packet to the log stream. There are some mutually exclusive bits: reset(0) and permit(1), permit(1) and deny(2), permit(1) and reject(4). No bits set implies to continue filtering on the packet.')
fltStmtActionArg = MibTableColumn((1, 3, 6, 1, 4, 1, 5504, 4, 1, 8, 3, 1, 18), Integer32()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: fltStmtActionArg.setStatus('current')
if mibBuilder.loadTexts: fltStmtActionArg.setDescription('Filter statement action argument. The meaning of this object depends on the value of fltStmtAction: forward(3) : An IP address to forward the packet to. The value of this object must be non-zero. All other values of fltStmtAction have no relation to this object. The default is zero.')
fltStmtRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 5504, 4, 1, 8, 3, 1, 19), ZhoneRowStatus()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: fltStmtRowStatus.setStatus('current')
if mibBuilder.loadTexts: fltStmtRowStatus.setDescription('Zhone convention to support row creation and deletion. This is the only object required to create or destroy a row in this table.')
filterStmtRenumTable = MibTable((1, 3, 6, 1, 4, 1, 5504, 4, 1, 8, 4), )
if mibBuilder.loadTexts: filterStmtRenumTable.setStatus('current')
if mibBuilder.loadTexts: filterStmtRenumTable.setDescription('This table provides a mechanism for renumbering individual filter statments within their particular filter spec.')
filterStmtRenumEntry = MibTableRow((1, 3, 6, 1, 4, 1, 5504, 4, 1, 8, 4, 1), )
filterStatementEntry.registerAugmentions(("ZHONE-COM-IP-FILTER-MIB", "filterStmtRenumEntry"))
filterStmtRenumEntry.setIndexNames(*filterStatementEntry.getIndexNames())
if mibBuilder.loadTexts: filterStmtRenumEntry.setStatus('current')
if mibBuilder.loadTexts: filterStmtRenumEntry.setDescription('An entry in the filterStmtRenumTable.')
fltStmtIndexNew = MibTableColumn((1, 3, 6, 1, 4, 1, 5504, 4, 1, 8, 4, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 2147483647))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: fltStmtIndexNew.setStatus('current')
if mibBuilder.loadTexts: fltStmtIndexNew.setDescription("The new statement index for the filter statement. Reading this object will return the same value as the 'fltStmtIndex' portion of its index. Writing to this object will cause the corresponding filter statement to be relocated to the position identified by the value written here. If no statement exists at the current index, 'no such instance' will be returned. If a statement already exists at the new index then 'inconsistent value' is returned. For example, to move the second statement of filter #4 to the third position (e.g. to make room for a new statement #2), the following SNMP set-request would be issued: fltStmtIndexNew.4.2 = 3 There is no default value for this object as it is derived from the fltStmtIndex.")
filterStatsTable = MibTable((1, 3, 6, 1, 4, 1, 5504, 4, 1, 8, 5), )
if mibBuilder.loadTexts: filterStatsTable.setStatus('current')
if mibBuilder.loadTexts: filterStatsTable.setDescription('This table provides ingress and egress IP filter statistics for each interface. This table is indexed by the ifIndex of the interface and the direction (ingress or egress) of traffic being filtered. This is a read-only table.')
filterStatsEntry = MibTableRow((1, 3, 6, 1, 4, 1, 5504, 4, 1, 8, 5, 1), ).setIndexNames((0, "IF-MIB", "ifIndex"), (0, "ZHONE-COM-IP-FILTER-MIB", "fltStatDirection"))
if mibBuilder.loadTexts: filterStatsEntry.setStatus('current')
if mibBuilder.loadTexts: filterStatsEntry.setDescription('An entry in the filterStatsTable. There will be an entry for each filter provisioned on an interface. There can be, at most, two filters provisioned per interface; one for ingress filtering and the other for egress filtering.')
fltStatDirection = MibTableColumn((1, 3, 6, 1, 4, 1, 5504, 4, 1, 8, 5, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("ingress", 1), ("egress", 2))))
if mibBuilder.loadTexts: fltStatDirection.setStatus('current')
if mibBuilder.loadTexts: fltStatDirection.setDescription('The direction for which this set of statistics is kept: ingress or egress.')
fltStatResetPkts = MibTableColumn((1, 3, 6, 1, 4, 1, 5504, 4, 1, 8, 5, 1, 2), Counter32()).setUnits('packets').setMaxAccess("readonly")
if mibBuilder.loadTexts: fltStatResetPkts.setStatus('current')
if mibBuilder.loadTexts: fltStatResetPkts.setDescription('The number of discarded packets for which a TCP reset packet was sent.')
fltStatPermitPkts = MibTableColumn((1, 3, 6, 1, 4, 1, 5504, 4, 1, 8, 5, 1, 3), Counter32()).setUnits('packets').setMaxAccess("readonly")
if mibBuilder.loadTexts: fltStatPermitPkts.setStatus('current')
if mibBuilder.loadTexts: fltStatPermitPkts.setDescription('The number of permitted packets.')
fltStatDenyPkts = MibTableColumn((1, 3, 6, 1, 4, 1, 5504, 4, 1, 8, 5, 1, 4), Counter32()).setUnits('packets').setMaxAccess("readonly")
if mibBuilder.loadTexts: fltStatDenyPkts.setStatus('current')
if mibBuilder.loadTexts: fltStatDenyPkts.setDescription('The number of discarded packets.')
fltStatForwardPkts = MibTableColumn((1, 3, 6, 1, 4, 1, 5504, 4, 1, 8, 5, 1, 5), Counter32()).setUnits('packets').setMaxAccess("readonly")
if mibBuilder.loadTexts: fltStatForwardPkts.setStatus('current')
if mibBuilder.loadTexts: fltStatForwardPkts.setDescription('The number of packets forwarded to the IP address specified in the filter.')
fltStatRejectPkts = MibTableColumn((1, 3, 6, 1, 4, 1, 5504, 4, 1, 8, 5, 1, 6), Counter32()).setUnits('packets').setMaxAccess("readonly")
if mibBuilder.loadTexts: fltStatRejectPkts.setStatus('current')
if mibBuilder.loadTexts: fltStatRejectPkts.setDescription('The number of discarded packets for which an ICMP destination unreachable packet with code 13 was sent.')
fltStatLogPkts = MibTableColumn((1, 3, 6, 1, 4, 1, 5504, 4, 1, 8, 5, 1, 7), Counter32()).setUnits('packets').setMaxAccess("readonly")
if mibBuilder.loadTexts: fltStatLogPkts.setStatus('current')
if mibBuilder.loadTexts: fltStatLogPkts.setDescription('The number of logged packets.')
fltStatDefaultPkts = MibTableColumn((1, 3, 6, 1, 4, 1, 5504, 4, 1, 8, 5, 1, 8), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: fltStatDefaultPkts.setStatus('current')
if mibBuilder.loadTexts: fltStatDefaultPkts.setDescription('The number of packets that pass through the filter without matching upon which the default action is used.')
fltStatSpecVersion = MibTableColumn((1, 3, 6, 1, 4, 1, 5504, 4, 1, 8, 5, 1, 9), Unsigned32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: fltStatSpecVersion.setStatus('current')
if mibBuilder.loadTexts: fltStatSpecVersion.setDescription('The version of the filter being used on this interface.')
fltStatSpecIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 5504, 4, 1, 8, 5, 1, 10), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 2147483647))).setMaxAccess("readonly")
if mibBuilder.loadTexts: fltStatSpecIndex.setStatus('current')
if mibBuilder.loadTexts: fltStatSpecIndex.setDescription('The index of the filter specification being used on this interface. If there is no filter configured for an interface, the entry will not exist in this table.')
mcastControl = ObjectIdentity((1, 3, 6, 1, 4, 1, 5504, 4, 1, 8, 6))
if mibBuilder.loadTexts: mcastControl.setStatus('current')
if mibBuilder.loadTexts: mcastControl.setDescription('The MIB module representing Multicast control list specifications in Zhone Technologies products. The First application of multicast control list is to accept of deny a IGMP request to join or leave a IGMP group. Any IGMP request to join a group is accepted only if the group address is available in the Multicast Control list pointed by a field in the ip-interface-record.')
mcastControlListTable = MibTable((1, 3, 6, 1, 4, 1, 5504, 4, 1, 8, 6, 1), )
if mibBuilder.loadTexts: mcastControlListTable.setStatus('current')
if mibBuilder.loadTexts: mcastControlListTable.setDescription('Multicast control list table conatins the one of the IP Address that can be allowed to join to by a IGMP join request from IP interface that has the the multicast control list in its ip-interfce-profile. The address to the table is the multicast control list ID and the precedence. The Row status in the table contains indication of whether the row is being created or destroyed. ')
mcastControlListEntry = MibTableRow((1, 3, 6, 1, 4, 1, 5504, 4, 1, 8, 6, 1, 1), ).setIndexNames((0, "ZHONE-COM-IP-FILTER-MIB", "mcastControlListControlId"), (0, "ZHONE-COM-IP-FILTER-MIB", "mcastControlListControlPrecedence"))
if mibBuilder.loadTexts: mcastControlListEntry.setStatus('current')
if mibBuilder.loadTexts: mcastControlListEntry.setDescription('An entry in the Multicast Control List.')
mcastControlListControlId = MibTableColumn((1, 3, 6, 1, 4, 1, 5504, 4, 1, 8, 6, 1, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 2147483647)))
if mibBuilder.loadTexts: mcastControlListControlId.setStatus('current')
if mibBuilder.loadTexts: mcastControlListControlId.setDescription('Description.')
mcastControlListControlPrecedence = MibTableColumn((1, 3, 6, 1, 4, 1, 5504, 4, 1, 8, 6, 1, 1, 2), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 2147483647)))
if mibBuilder.loadTexts: mcastControlListControlPrecedence.setStatus('current')
if mibBuilder.loadTexts: mcastControlListControlPrecedence.setDescription('Description.')
mcastControlListRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 5504, 4, 1, 8, 6, 1, 1, 3), ZhoneRowStatus()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: mcastControlListRowStatus.setStatus('current')
if mibBuilder.loadTexts: mcastControlListRowStatus.setDescription('Description.')
mcastControlListIpAddress = MibTableColumn((1, 3, 6, 1, 4, 1, 5504, 4, 1, 8, 6, 1, 1, 4), IpAddress()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: mcastControlListIpAddress.setStatus('current')
if mibBuilder.loadTexts: mcastControlListIpAddress.setDescription('multicast ip address.')
mcastControlListType = MibTableColumn((1, 3, 6, 1, 4, 1, 5504, 4, 1, 8, 6, 1, 1, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("normal", 1), ("always-on", 2), ("periodic", 3))).clone('normal')).setMaxAccess("readcreate")
if mibBuilder.loadTexts: mcastControlListType.setStatus('current')
if mibBuilder.loadTexts: mcastControlListType.setDescription('Defines the video stream type. normal - join and leave when desired. Used for video. always-on - always joined. Meant for EBS, not video. periodic - will join and leave after task complete. Not meant for video. Used to download the tv guide.')
portAccessControl = ObjectIdentity((1, 3, 6, 1, 4, 1, 5504, 4, 1, 8, 7))
if mibBuilder.loadTexts: portAccessControl.setStatus('current')
if mibBuilder.loadTexts: portAccessControl.setDescription('This MIB represents the port access control list in Zhone products. It is used to control access to internal ports. Initially it is used just for TELNET (23) , but in theory could be used for other ports as well.')
portAccessNextIndex = MibScalar((1, 3, 6, 1, 4, 1, 5504, 4, 1, 8, 7, 1), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: portAccessNextIndex.setStatus('current')
if mibBuilder.loadTexts: portAccessNextIndex.setDescription('Description: A hint for the next free index should the manager want to create a new entry.')
portAccessTable = MibTable((1, 3, 6, 1, 4, 1, 5504, 4, 1, 8, 7, 2), )
if mibBuilder.loadTexts: portAccessTable.setStatus('current')
if mibBuilder.loadTexts: portAccessTable.setDescription('Contains the list of entries that control port access on this device.')
portAccessEntry = MibTableRow((1, 3, 6, 1, 4, 1, 5504, 4, 1, 8, 7, 2, 1), ).setIndexNames((0, "ZHONE-COM-IP-FILTER-MIB", "portAccessIndex"))
if mibBuilder.loadTexts: portAccessEntry.setStatus('current')
if mibBuilder.loadTexts: portAccessEntry.setDescription('This contains the entry that is to be accepted. Currently only used to control access to port 23. arg1, arg2 provide IP Address/mask to allow in.')
portAccessIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 5504, 4, 1, 8, 7, 2, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 100)))
if mibBuilder.loadTexts: portAccessIndex.setStatus('current')
if mibBuilder.loadTexts: portAccessIndex.setDescription('The index of this entry in table. 100 entries should be more than enough.')
portAccessRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 5504, 4, 1, 8, 7, 2, 1, 2), ZhoneRowStatus()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: portAccessRowStatus.setStatus('current')
if mibBuilder.loadTexts: portAccessRowStatus.setDescription('Description.: used to create/delete entries in the table.')
portAccessNumber = MibTableColumn((1, 3, 6, 1, 4, 1, 5504, 4, 1, 8, 7, 2, 1, 3), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 1023))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: portAccessNumber.setStatus('current')
if mibBuilder.loadTexts: portAccessNumber.setDescription('PortNumber that this applies to, 1..1023 supported.')
portAccessSrcAddr = MibTableColumn((1, 3, 6, 1, 4, 1, 5504, 4, 1, 8, 7, 2, 1, 4), IpAddress()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: portAccessSrcAddr.setStatus('current')
if mibBuilder.loadTexts: portAccessSrcAddr.setDescription('The IP address that we will accept packets from.')
portAccessNetMask = MibTableColumn((1, 3, 6, 1, 4, 1, 5504, 4, 1, 8, 7, 2, 1, 5), IpAddress()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: portAccessNetMask.setStatus('current')
if mibBuilder.loadTexts: portAccessNetMask.setDescription('portAccessNetMask - used to pass the range that we will accept with regards to portAccessSrcAddr.')
mibBuilder.exportSymbols("ZHONE-COM-IP-FILTER-MIB", fltStmtIpDstAddrLow=fltStmtIpDstAddrLow, fltStmtIpSrcAddrHigh=fltStmtIpSrcAddrHigh, mcastControlListIpAddress=mcastControlListIpAddress, fltSpecVersion1=fltSpecVersion1, fltStatSpecIndex=fltStatSpecIndex, portAccessSrcAddr=portAccessSrcAddr, fltStatSpecVersion=fltStatSpecVersion, portAccessNumber=portAccessNumber, fltStmtIpProtocol=fltStmtIpProtocol, fltStmtModifier=fltStmtModifier, fltSpecLanguageVersion=fltSpecLanguageVersion, fltStmtSrcPortLow=fltStmtSrcPortLow, mcastControlListControlPrecedence=mcastControlListControlPrecedence, fltStmtActionArg=fltStmtActionArg, fltSpecVersion2=fltSpecVersion2, filterStmtRenumEntry=filterStmtRenumEntry, filterStmtRenumTable=filterStmtRenumTable, portAccessTable=portAccessTable, mcastControlListControlId=mcastControlListControlId, fltStmtIpDstAddrHigh=fltStmtIpDstAddrHigh, fltStmtRowStatus=fltStmtRowStatus, comIpFilter=comIpFilter, portAccessControl=portAccessControl, fltStatDirection=fltStatDirection, mcastControl=mcastControl, fltStmtArbValueLow=fltStmtArbValueLow, mcastControlListTable=mcastControlListTable, filterGlobal=filterGlobal, fltSpecIndex=fltSpecIndex, PYSNMP_MODULE_ID=comIpFilter, fltStmtSrcPortHigh=fltStmtSrcPortHigh, filterStatsTable=filterStatsTable, fltStmtArbMask=fltStmtArbMask, fltGlobalIndexNext=fltGlobalIndexNext, fltStmtIndexNew=fltStmtIndexNew, mcastControlListRowStatus=mcastControlListRowStatus, filterStatsEntry=filterStatsEntry, fltStmtArbValueBase=fltStmtArbValueBase, fltStatLogPkts=fltStatLogPkts, fltStatResetPkts=fltStatResetPkts, fltStatPermitPkts=fltStatPermitPkts, mcastControlListType=mcastControlListType, portAccessIndex=portAccessIndex, fltStmtDstPortLow=fltStmtDstPortLow, fltGlobalTimeout=fltGlobalTimeout, filterStatementTable=filterStatementTable, fltStatDefaultPkts=fltStatDefaultPkts, filter=filter, fltStmtArbOffset=fltStmtArbOffset, portAccessEntry=portAccessEntry, portAccessNextIndex=portAccessNextIndex, fltStatRejectPkts=fltStatRejectPkts, mcastControlListEntry=mcastControlListEntry, filterStatementEntry=filterStatementEntry, fltStmtIndex=fltStmtIndex, filterSpecTable=filterSpecTable, fltSpecRowStatus=fltSpecRowStatus, fltStmtArbValueHigh=fltStmtArbValueHigh, portAccessNetMask=portAccessNetMask, portAccessRowStatus=portAccessRowStatus, fltStmtAction=fltStmtAction, fltStmtIpSrcAddrLow=fltStmtIpSrcAddrLow, filterSpecEntry=filterSpecEntry, fltStatDenyPkts=fltStatDenyPkts, fltSpecDesc=fltSpecDesc, fltSpecName=fltSpecName, fltStmtDstPortHigh=fltStmtDstPortHigh, fltStatForwardPkts=fltStatForwardPkts)
| 168.707424
| 2,566
| 0.786354
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 18,647
| 0.482658
|
5442d7409922b392e57d7544f376052f8505514b
| 11,160
|
py
|
Python
|
watertap/examples/flowsheets/case_studies/municipal_treatment/municipal_treatment.py
|
avdudchenko/watertap
|
ac8d59e015688ff175a8087d2d52272e4f1fe84f
|
[
"BSD-3-Clause-LBNL"
] | 4
|
2021-11-06T01:13:22.000Z
|
2022-02-08T21:16:38.000Z
|
watertap/examples/flowsheets/case_studies/municipal_treatment/municipal_treatment.py
|
avdudchenko/watertap
|
ac8d59e015688ff175a8087d2d52272e4f1fe84f
|
[
"BSD-3-Clause-LBNL"
] | 233
|
2021-10-13T12:53:44.000Z
|
2022-03-31T21:59:50.000Z
|
watertap/examples/flowsheets/case_studies/municipal_treatment/municipal_treatment.py
|
avdudchenko/watertap
|
ac8d59e015688ff175a8087d2d52272e4f1fe84f
|
[
"BSD-3-Clause-LBNL"
] | 12
|
2021-11-01T19:11:03.000Z
|
2022-03-08T22:20:58.000Z
|
###############################################################################
# WaterTAP Copyright (c) 2021, The Regents of the University of California,
# through Lawrence Berkeley National Laboratory, Oak Ridge National
# Laboratory, National Renewable Energy Laboratory, and National Energy
# Technology Laboratory (subject to receipt of any required approvals from
# the U.S. Dept. of Energy). All rights reserved.
#
# Please see the files COPYRIGHT.md and LICENSE.md for full copyright and license
# information, respectively. These files are also available online at the URL
# "https://github.com/watertap-org/watertap/"
#
###############################################################################
from pyomo.environ import (
ConcreteModel,
value,
TransformationFactory,
units as pyunits,
assert_optimal_termination,
)
from pyomo.network import Arc, SequentialDecomposition
from pyomo.util.check_units import assert_units_consistent
from idaes.core import FlowsheetBlock
from idaes.core.util import get_solver
from idaes.generic_models.unit_models import Product
import idaes.core.util.scaling as iscale
from idaes.generic_models.costing import UnitModelCostingBlock
from watertap.core.util.initialization import assert_degrees_of_freedom
from watertap.core.wt_database import Database
import watertap.core.zero_order_properties as prop_ZO
from watertap.unit_models.zero_order import (
FeedZO,
MunicipalDrinkingZO,
WaterPumpingStationZO,
PumpZO,
CoagulationFlocculationZO,
SedimentationZO,
OzoneZO,
FixedBedZO,
GACZO,
UVZO,
IonExchangeZO,
ChlorinationZO,
StorageTankZO,
BackwashSolidsHandlingZO,
)
from watertap.core.zero_order_costing import ZeroOrderCosting
def main():
m = build()
set_operating_conditions(m)
assert_degrees_of_freedom(m, 0)
initialize_system(m) # initialization needed for ozone unit
results = solve(m)
display_results(m)
add_costing(m)
initialize_costing(m)
assert_degrees_of_freedom(m, 0)
assert_units_consistent(m)
results = solve(m)
display_costing(m)
return m, results
def build():
# flowsheet set up
m = ConcreteModel()
m.db = Database()
m.fs = FlowsheetBlock(default={"dynamic": False})
m.fs.prop = prop_ZO.WaterParameterBlock(
default={"solute_list": ["tds", "tss", "toc"]}
)
# unit models
m.fs.feed = FeedZO(default={"property_package": m.fs.prop})
m.fs.intake_pump = WaterPumpingStationZO(
default={
"property_package": m.fs.prop,
"database": m.db,
"process_subtype": "raw",
}
)
m.fs.coag_and_floc = CoagulationFlocculationZO(
default={"property_package": m.fs.prop, "database": m.db}
)
m.fs.sedimentation = SedimentationZO(
default={"property_package": m.fs.prop, "database": m.db}
)
m.fs.ozonation = OzoneZO(default={"property_package": m.fs.prop, "database": m.db})
m.fs.gravity_basin = FixedBedZO(
default={
"property_package": m.fs.prop,
"database": m.db,
"process_subtype": "gravity_basin",
}
)
m.fs.gac = GACZO(
default={
"property_package": m.fs.prop,
"database": m.db,
"process_subtype": "pressure_vessel",
}
)
m.fs.backwash_pump = WaterPumpingStationZO(
default={
"property_package": m.fs.prop,
"database": m.db,
"process_subtype": "treated",
}
)
m.fs.uv = UVZO(default={"property_package": m.fs.prop, "database": m.db})
m.fs.anion_exchange = IonExchangeZO(
default={
"property_package": m.fs.prop,
"database": m.db,
"process_subtype": "anion_exchange",
}
)
m.fs.chlorination = ChlorinationZO(
default={"property_package": m.fs.prop, "database": m.db}
)
m.fs.storage = StorageTankZO(
default={"property_package": m.fs.prop, "database": m.db}
)
m.fs.recharge_pump = WaterPumpingStationZO(
default={
"property_package": m.fs.prop,
"database": m.db,
"process_subtype": "treated",
}
)
m.fs.product = Product(default={"property_package": m.fs.prop})
# connections
m.fs.s01 = Arc(source=m.fs.feed.outlet, destination=m.fs.intake_pump.inlet)
m.fs.s02 = Arc(source=m.fs.intake_pump.outlet, destination=m.fs.coag_and_floc.inlet)
m.fs.s03 = Arc(
source=m.fs.coag_and_floc.outlet, destination=m.fs.sedimentation.inlet
)
m.fs.s04 = Arc(source=m.fs.sedimentation.treated, destination=m.fs.ozonation.inlet)
m.fs.s05 = Arc(source=m.fs.ozonation.treated, destination=m.fs.gravity_basin.inlet)
m.fs.s06 = Arc(source=m.fs.gravity_basin.treated, destination=m.fs.gac.inlet)
m.fs.s07 = Arc(source=m.fs.gac.treated, destination=m.fs.uv.inlet)
m.fs.s08 = Arc(source=m.fs.gac.byproduct, destination=m.fs.backwash_pump.inlet)
m.fs.s09 = Arc(source=m.fs.uv.treated, destination=m.fs.anion_exchange.inlet)
m.fs.s10 = Arc(
source=m.fs.anion_exchange.treated, destination=m.fs.chlorination.inlet
)
m.fs.s11 = Arc(source=m.fs.chlorination.treated, destination=m.fs.storage.inlet)
m.fs.s12 = Arc(source=m.fs.storage.outlet, destination=m.fs.recharge_pump.inlet)
m.fs.s13 = Arc(source=m.fs.recharge_pump.outlet, destination=m.fs.product.inlet)
TransformationFactory("network.expand_arcs").apply_to(m)
# scaling
iscale.calculate_scaling_factors(m)
return m
def set_operating_conditions(m):
# ---specifications---
# feed
flow_vol = 0.9224 * pyunits.m**3 / pyunits.s
conc_mass_tds = 0.63 * pyunits.kg / pyunits.m**3
conc_mass_tss = 0.006525 * pyunits.kg / pyunits.m**3
conc_mass_toc = 0.004 * pyunits.kg / pyunits.m**3
m.fs.feed.flow_vol[0].fix(flow_vol)
m.fs.feed.conc_mass_comp[0, "tds"].fix(conc_mass_tds)
m.fs.feed.conc_mass_comp[0, "tss"].fix(conc_mass_tss)
m.fs.feed.conc_mass_comp[0, "toc"].fix(conc_mass_toc)
solve(m.fs.feed)
# intake pump
m.fs.intake_pump.load_parameters_from_database()
m.fs.intake_pump.electricity.fix(93.2)
# coagulation and flocculation
m.fs.coag_and_floc.load_parameters_from_database(use_default_removal=True)
# sedimentation
m.fs.sedimentation.load_parameters_from_database(use_default_removal=True)
# # ozonation
m.fs.ozonation.load_parameters_from_database(use_default_removal=True)
# fixed bed gravity basin
m.fs.gravity_basin.load_parameters_from_database(use_default_removal=True)
# granular activated carbon
m.fs.gac.load_parameters_from_database(use_default_removal=True)
# backwash pump
m.fs.backwash_pump.load_parameters_from_database()
m.fs.backwash_pump.electricity.fix(37.3)
# uv aop
m.fs.uv.load_parameters_from_database(use_default_removal=True)
m.fs.uv.uv_reduced_equivalent_dose.fix(200)
m.fs.uv.uv_transmittance_in.fix(0.90)
# anion exchange
m.fs.anion_exchange.load_parameters_from_database(use_default_removal=True)
# chlorination
m.fs.chlorination.load_parameters_from_database(use_default_removal=True)
# storage
m.fs.storage.load_parameters_from_database(use_default_removal=True)
m.fs.storage.storage_time.fix(6)
# recharge pump
m.fs.recharge_pump.load_parameters_from_database()
m.fs.recharge_pump.electricity.fix(186.4)
def initialize_system(m):
seq = SequentialDecomposition()
seq.options.tear_set = []
seq.options.iterLim = 1
seq.run(m, lambda u: u.initialize())
def solve(blk, solver=None, tee=False, check_termination=True):
if solver is None:
solver = get_solver()
results = solver.solve(blk, tee=tee)
if check_termination:
assert_optimal_termination(results)
return results
def display_results(m):
unit_list = [
"feed",
"intake_pump",
"coag_and_floc",
"sedimentation",
"ozonation",
"gravity_basin",
"gac",
"backwash_pump",
"uv",
"anion_exchange",
"chlorination",
"storage",
"recharge_pump",
"product",
]
for u in unit_list:
m.fs.component(u).report()
def add_costing(m):
m.fs.costing = ZeroOrderCosting()
# typing aid
costing_kwargs = {"default": {"flowsheet_costing_block": m.fs.costing}}
m.fs.intake_pump.costing = UnitModelCostingBlock(**costing_kwargs)
m.fs.coag_and_floc.costing = UnitModelCostingBlock(**costing_kwargs)
m.fs.sedimentation.costing = UnitModelCostingBlock(**costing_kwargs)
m.fs.ozonation.costing = UnitModelCostingBlock(**costing_kwargs)
m.fs.gravity_basin.costing = UnitModelCostingBlock(**costing_kwargs)
m.fs.gac.costing = UnitModelCostingBlock(**costing_kwargs)
m.fs.backwash_pump.costing = UnitModelCostingBlock(**costing_kwargs)
m.fs.uv.costing = UnitModelCostingBlock(**costing_kwargs)
m.fs.anion_exchange.costing = UnitModelCostingBlock(**costing_kwargs)
m.fs.chlorination.costing = UnitModelCostingBlock(**costing_kwargs)
m.fs.storage.costing = UnitModelCostingBlock(**costing_kwargs)
m.fs.recharge_pump.costing = UnitModelCostingBlock(**costing_kwargs)
m.fs.costing.cost_process()
m.fs.costing.add_electricity_intensity(m.fs.product.properties[0].flow_vol)
m.fs.costing.add_LCOW(m.fs.product.properties[0].flow_vol)
def initialize_costing(m):
m.fs.costing.initialize()
def display_costing(m):
m.fs.costing.total_capital_cost.display()
m.fs.costing.total_operating_cost.display()
m.fs.costing.LCOW.display()
print("\nUnit Capital Costs\n")
for u in m.fs.costing._registered_unit_costing:
print(
u.name,
" : ",
value(pyunits.convert(u.capital_cost, to_units=pyunits.USD_2018)),
)
print("\nUtility Costs\n")
for f in m.fs.costing.flow_types:
print(
f,
" : ",
value(
pyunits.convert(
m.fs.costing.aggregate_flow_costs[f],
to_units=pyunits.USD_2018 / pyunits.year,
)
),
)
print("")
total_capital_cost = value(
pyunits.convert(m.fs.costing.total_capital_cost, to_units=pyunits.MUSD_2018)
)
print(f"Total Capital Costs: {total_capital_cost:.2f} M$")
total_operating_cost = value(
pyunits.convert(
m.fs.costing.total_operating_cost, to_units=pyunits.MUSD_2018 / pyunits.year
)
)
print(f"Total Operating Costs: {total_operating_cost:.2f} M$/year")
electricity_intensity = value(
pyunits.convert(
m.fs.costing.electricity_intensity, to_units=pyunits.kWh / pyunits.m**3
)
)
print(f"Electricity Intensity: {electricity_intensity:.4f} kWh/m^3")
LCOW = value(
pyunits.convert(m.fs.costing.LCOW, to_units=pyunits.USD_2018 / pyunits.m**3)
)
print(f"Levelized Cost of Water: {LCOW:.4f} $/m^3")
if __name__ == "__main__":
m, results = main()
| 32.631579
| 88
| 0.66819
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 2,129
| 0.190771
|
54439c9a0c52b928b7dce1ab1fcc8ffac580ad8b
| 2,680
|
py
|
Python
|
lib/googlecloudsdk/sql/tools/instances/delete.py
|
IsaacHuang/google-cloud-sdk
|
52afa5d1a75dff08f4f5380c5cccc015bf796ca5
|
[
"Apache-2.0"
] | null | null | null |
lib/googlecloudsdk/sql/tools/instances/delete.py
|
IsaacHuang/google-cloud-sdk
|
52afa5d1a75dff08f4f5380c5cccc015bf796ca5
|
[
"Apache-2.0"
] | null | null | null |
lib/googlecloudsdk/sql/tools/instances/delete.py
|
IsaacHuang/google-cloud-sdk
|
52afa5d1a75dff08f4f5380c5cccc015bf796ca5
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2013 Google Inc. All Rights Reserved.
"""Deletes a Cloud SQL instance."""
from googlecloudapis.apitools.base import py as apitools_base
from googlecloudsdk.calliope import base
from googlecloudsdk.calliope import exceptions
from googlecloudsdk.core import log
from googlecloudsdk.core.util import console_io
from googlecloudsdk.sql import util
class Delete(base.Command):
"""Deletes a Cloud SQL instance."""
@staticmethod
def Args(parser):
"""Args is called by calliope to gather arguments for this command.
Args:
parser: An argparse parser that you can use to add arguments that go
on the command line after this command. Positional arguments are
allowed.
"""
parser.add_argument(
'instance',
help='Cloud SQL instance ID.')
def Run(self, args):
"""Deletes a Cloud SQL instance.
Args:
args: argparse.Namespace, The arguments that this command was invoked
with.
Returns:
A dict object representing the operations resource describing the delete
operation if the delete was successful.
Raises:
HttpException: A http error response was received while executing api
request.
ToolException: An error other than http error occured while executing the
command.
"""
sql_client = self.context['sql_client']
sql_messages = self.context['sql_messages']
resources = self.context['registry']
util.ValidateInstanceName(args.instance)
instance_ref = resources.Parse(args.instance, collection='sql.instances')
if not console_io.PromptContinue(
'All of the instance data will be lost when the instance is deleted.'):
return None
try:
result = sql_client.instances.Delete(
sql_messages.SqlInstancesDeleteRequest(
instance=instance_ref.instance,
project=instance_ref.project))
operation_ref = resources.Create(
'sql.operations',
operation=result.operation,
project=instance_ref.project,
instance=instance_ref.instance,
)
unused_operation = sql_client.operations.Get(operation_ref.Request())
log.DeletedResource(instance_ref)
except apitools_base.HttpError as error:
raise exceptions.HttpException(util.GetErrorMessage(error))
def Display(self, unused_args, result):
"""Display prints information about what just happened to stdout.
Args:
unused_args: The same as the args in Run.
result: A dict object representing the operations resource describing the
delete operation if the delete was successful.
"""
self.format(result)
| 31.904762
| 79
| 0.701493
| 2,319
| 0.865299
| 0
| 0
| 377
| 0.140672
| 0
| 0
| 1,301
| 0.485448
|
5445ab0135e0f3ff0f80b808bab631bc81bb1f98
| 804
|
py
|
Python
|
nsapiwrapper/exceptions.py
|
DolphDev/nsapiwrapper
|
cd67be445cfc4845f822ff815f3fb265f75061c9
|
[
"MIT"
] | null | null | null |
nsapiwrapper/exceptions.py
|
DolphDev/nsapiwrapper
|
cd67be445cfc4845f822ff815f3fb265f75061c9
|
[
"MIT"
] | null | null | null |
nsapiwrapper/exceptions.py
|
DolphDev/nsapiwrapper
|
cd67be445cfc4845f822ff815f3fb265f75061c9
|
[
"MIT"
] | null | null | null |
"""Exceptions for this library"""
class NSBaseError(Exception):
"""Base Error for all custom exceptions"""
pass
class RateLimitReached(NSBaseError):
"""Rate Limit was reached"""
class NSServerBaseException(NSBaseError):
"""Exceptions that the server returns"""
pass
class APIError(NSServerBaseException):
"""General API Error"""
pass
class Forbidden(APIError):
pass
class ConflictError(APIError):
"""ConflictError from Server"""
pass
class NotFound(APIError):
"""Nation/Region Not Found"""
pass
class APIRateLimitBan(APIError):
"""Server has banned your IP"""
pass
class APIUsageError(APIError):
pass
class InternalServerError(APIError):
pass
class CloudflareServerError(APIError):
pass
class BadRequest(APIError):
pass
| 18.272727
| 46
| 0.705224
| 746
| 0.927861
| 0
| 0
| 0
| 0
| 0
| 0
| 257
| 0.319652
|
544703b0ead742e49b1d2aa2223e76a2cd97299b
| 62,639
|
py
|
Python
|
src.py
|
edbezci/mapOverlayHumanoid
|
95d5e16fb983a7384abea6f51599483274ff0f62
|
[
"MIT"
] | null | null | null |
src.py
|
edbezci/mapOverlayHumanoid
|
95d5e16fb983a7384abea6f51599483274ff0f62
|
[
"MIT"
] | null | null | null |
src.py
|
edbezci/mapOverlayHumanoid
|
95d5e16fb983a7384abea6f51599483274ff0f62
|
[
"MIT"
] | null | null | null |
# lines 1-4 imports the necessary libraries
import pygame
import os
import random
import math
import sys
import hlp
import intro
import dsb # this is the last module with the description files
'''
declaring some global variables beacause in Python, we can set global variables that can be used in future functions
setting the variables false allows us to activate them in the game loop, or vice versa
creating empty lists as global variables allows us to access them outside of the functions they are being used
'''
cursor = False
randomLine = False
randomTimer = True
run = False
stop = False
start = False
clear = False
lines = []
colours = []
brutecolours = []
points = []
line_name = []
intersect_name = []
orderList = []
# initialise Pygame library, it is necessary in Programs using Pygame
pygame.init()
line_colour = pygame.Color(50, 50, 120)
# initialise window size at 800 * 550 with a caption
display = pygame.display.set_mode((1280, 550), pygame.FULLSCREEN |
pygame.DOUBLEBUF | pygame.HWSURFACE)
pygame.display.set_caption("Line Segment Intersection Visualisation Tool")
# frames per second determines how many frames should be refreshed per second
clock = pygame.time.Clock()
# load cursor image for inserting line, os.path method points to the path of the cursor image file
pointer = pygame.image.load(os.path.join("resources", "pointer.png"))
# BitterFont text used throughout the program
bitterfont = os.path.abspath("resources/bitterfont.otf")
def AddPoints(p):
'''
this function takes a point as an argument, then append the 'points' list by using iteration over every item in the points list
if that point is already in the list, the function does nothing
if not, the function appends the points list object with the argument p.
'''
# make sure we're referring to the points object outside of this function
global points
# step through all the current items in points list
for point in points:
# is p the same as the current item
if point == p:
# if so, stop stepping through and drop out of this function without doing anything
return
# if we get here, we've gone through the whole list without a match
# add the new point to the list
points.append(p)
def TransValue(OldValue, oldMax, oldMin):
'''
scales the data
'''
newMax = 350
newMin = 0
OldRange = (oldMax - oldMin)
NewRange = (newMax - newMin)
NewValue = int((((OldValue - oldMin) * NewRange) / OldRange) + newMin)
return NewValue
def GenerateRandomLine():
'''
generates random lines
'''
x1 = random.randrange(51, 450) # randomly choses between 51 and 450
y1 = random.randrange(50, 450) # randomly choses between 50 and 450
x2 = random.randrange(51, 450) # randomly choses between 51 and 450
y2 = random.randrange(50, 450) # randomly choses between 50 and 450
# calls for the AddNewLine function to create new lines
AddNewLine([(x1, y1), (x2, y2)])
def CheckIntersect(p1, p2, q1, q2):
'''
this function determines if two lines intersect
p1,p2, q1, q2 are start and end points of the lines
it uses Cramer's rule of linear algebra to determine whether lines intersect
'''
# getting the distance between end points by accessing the second index of the p1 and p2 list items and appointing it to variable a1
a1 = p2[1] - p1[1]
b1 = p1[0] - p2[0] # same as above but accessing to the first index
c1 = a1 * p1[0] + b1 * p1[1]
a2 = q2[1] - q1[1] # same as a1 but for q instead of p
b2 = q1[0] - q2[0] # same as b1 but for q instead of p
c2 = a2 * q1[0] + b2 * q1[1]
d = (a1 * b2 - a2 * b1) # finding the determinant
if d == 0: # paralel or same line, determinant is zero
return
x = int((c1 * b2 - c2 * b1) / d) # solving for x
y = int((a1 * c2 - a2 * c1) / d) # solving for y
if min(p1[0], p2[0]) <= x <= max(p1[0], p2[0]) and min(p1[1], p2[1]) <= y <= max(p1[1], p2[1]):
if min(q1[0], q2[0]) <= x <= max(q1[0], q2[0]) and min(q1[1], q2[1]) <= y <= max(q1[1], q2[1]):
# found the intersection by checking solution of x and y for existing points
AddPoints((x, y))
return True # returns true
return False
def BruteForceMain():
'''
this function is the Brute-Force Algorithm function with main display loop
'''
# acessing the global variables
global cursor, lines, brutecolours, points, randomLine, randomTimer, run, stop, clear, intersect_name
# first the lines are accessing necessary global variables
global display, line_name, orderList
pygame.display.set_caption("Brute-Force Algorithm") # adding a caption
# setting the display for the algorithm
display = pygame.display.set_mode((1280, 550), pygame.FULLSCREEN)
cursor = False # until while true line, which is the main loop, lines below creating the default values
randomLine = False # again the default placeholder for the randomline
clickedPos = [] # default place holder value for position
orderList = [] # same for the order list, empty now all these values will be appended during the game loop
efficiency = 0 # default place holder value for algorithm efficieny
eventQueue = [] # event queue place holder, empty now
back = 0 # if this becomes one, you go back
while True: # starting the game loop
# pygame method to fill the screen, takes colours and a display object
display.fill((0, 0, 0))
# pygame method, iterates over the events in pygame to determine what we are doing with every event
for event in pygame.event.get():
if event.type == pygame.QUIT: # this one quits
pygame.quit() # putting the quit pygame method
exit() # takes the user from GUI to the script for exiting
# Here is to tell the computer to recognise if a keybord key is pressed.
if event.type == pygame.KEYUP:
if event.key == pygame.K_ESCAPE: # if that keyboard key is ESC
exit() # call for the exit function.
'''
if mouse clicked on the below coordinates, create a line
pygame GUI property detecting when mouse click is on
MOUSEBUTTONDOWN and MOUSEBUTTONUP should be used as a small loops so that the computer can understand when that instance of the mouse movement is over
'''
if cursor == True and event.type == pygame.MOUSEBUTTONDOWN:
if event.button == 1: # pygame method defining the button in the GUI
mouse_pos = pygame.mouse.get_pos() # displays the mouse position on the screen
# pygame property pos[0] is the mouse cursor in the X axis and pos[1] is the Y axis
if 50 < pos[0] < 450 and 50 < pos[1] < 450:
# here it adds the clicked postion corresponding to the positon of the mouse
clickedPos.append(pos)
if event.type == pygame.MOUSEBUTTONUP:
randomTimer = True # turning the random from false to true so the timer can activate
for i in range(0, 41): # choosing coordinates for drawing, exiting the previous iteration, range (0,41) goes between 0 and 40
# for the pygame method of drawing below, we need to determine the position on the screen as a tuple object
pos = i * 10 + 50
# pygame method, takes display, colour, and positions of where the lines start and end. i.e, starts in (50,pos) ends in (450,pos), 1 at the end is the width of the line
pygame.draw.line(display, line_colour, (50, pos), (450, pos), 1)
# same as above but takes pos as y, by doing so and iterating through the range, you cover all the plane
pygame.draw.line(display, line_colour, (pos, 50), (pos, 450), 1)
i = 0 # index determining for data structure, taking it back to zero
for line in lines: # iterating through lines which is a global variable for the priority queue aka eventQueue
'''
having [i] next to colour allows me to colour each line differently
each line has tuple object in the global variable
line[0] accesses the nth item's first coordinates in the iteration and drawing ends in the line[1], nth item's second object
'''
pygame.draw.line(display, brutecolours[i], line[0], line[1], 1)
# calling the hlp.AddText function that was created before in the script
hlp.AddText(line_name[i], line[0])
i += 1 # remember, need to increase the index.
orderList = [] # creating the placeholder list object to secure future items
i = 50
while i < 450: # this is the start of the brute force algorithm, it uses a try and error methods by iterating through all existing points
j = 0 # that's why it enumarates through all possible points on the screen to go through, thus, I have the second while loop here
for point in points: # 450 is the max number of points on the display, therefore, indexing goes until 450 i < 450
if point[0] == i: # while trying all the points, if the x value of the selected point intersects with the given index
# then add it to the orderList
orderList.append(intersect_name[j])
j += 1 # as before, increse indexing values by one
i += 1 # as before in the previous function, increase the index by one
n = len(lines) # finding out how many lines are drawn already
for point in points: # iterating over the points
# use this pygame method to draw a small circle where the lines intersect
pygame.draw.circle(display, hlp.red, point, 3)
efficiency = n * n # this is the efficieny formula for the brute-force algorithm
if cursor == True: # arrange the mouse cursors
pygame.mouse.set_visible(False)
pos = pygame.mouse.get_pos() # this is a pygame method for mouse cursor
# the cursor with the existing pointer image, pygame method called display.blit which adds a spirit to the screen
display.blit(pointer, pos)
# if you clicked on the screen, this checks the number of clicks and starts drawing
if len(clickedPos) > 0:
# again pygame method to draw, if clicked then draw this
pygame.draw.circle(display, hlp.white, clickedPos[0], 2)
# if clicked then draw this
pygame.draw.line(display, hlp.white, clickedPos[0], pos, 1)
if len(clickedPos) >= 2: # if the cursor is in a positon which is longer than 2 that can draw lines, if you clicked on more or equal to 2 times, which means begining and end for the lines
# then add lines according to the points saved in the clickedPos object. [0] is the begining index and clickedPos[1] is the ending index.
AddNewLine([clickedPos[0], clickedPos[1]])
cursor = False # disable the cursor after drawing
clickedPos = [] # empty the placeholder after drawing the line
else: # now you are entering into the scene of mouse action
# again pygame GUI method enabling mouse action on the screen to interact
pygame.mouse.set_visible(True)
if randomLine == True: # if mouse clicked on the randomline
GenerateRandomLine() # then create a random line, calling the existing function
randomLine = False # turn it off after drawing so it would not keep drawing forever
randomTimer = False # and stop the timer so it won't go forever
if clear == True: # clear action is enabled, clear back all the placeholders to default
lines = [] # everything is back to the default value
colours = [] # everything is back to the default value
brutecolours = [] # everything is back to the default value
points = [] # everything is back to the default value
orderList = [] # everything is back to the default value
efficiency = 0 # everything is back to the default value
eventQueue = [] # everything is back to the default value
intersect_name = [] # everything is back to the default value
line_name = [] # everything is back to the default value
clear = False
'''
adding text positions and texts for the frame
calling existing functions, giving text, position and when applicable the action
my helper functions are button and addtext that help me in my larger script.
'''
# adding the texts and buttons as above function
hlp.AddText("(0,0)", (30, 25))
hlp.AddText("(50,0)", (430, 25))
hlp.AddText("(0,50)", (30, 450))
hlp.AddText("(50,50)", (430, 450))
hlp.Button("Clear", 200, 5, 100, 30, ClearActive)
hlp.Button("Random Segment", 50, 500, 180, 30, RandomActive)
hlp.Button("Insert Segment", 280, 500, 180, 35, CursorActive)
hlp.Button("Exit", 500, 5, 100,
30, sys.exit)
back = hlp.ButtonWithReturn("Back", 900, 5, 100, 30, 1)
if back > 0: # if back has a value, which means it has been clicked, stop the bigger loop that we started, i.e. the game loop, and break the game loop
break
# calls the helper function
nxt = hlp.ButtonWithReturn("Next", 700, 5, 100, 30, 1)
if nxt > 0: # so if the next button is clicked
# calls for the description function
hlp.Description(dsb.bf_desc)
# pygame method to draw an object
pygame.draw.rect(display, line_colour, [500, 50, 750, 490], 2)
# adding the text on the given location
hlp.AddText("Brute-Force Algorithm", (520, 70))
# adding the text on the given location.
hlp.AddText("Order List:", (520, 120))
# creating indexing i and x, y positions to display on the GUI, this is an important way to assign values to a tuplae object
i, o_x, o_y = 0, 540, 150
'''
iterating through the existing values in the orderList.
because we don't want the texts to overlap on the screen
most of the numbers below are finetuning to prevent overlapping of the texts for the order list and the eventqueue list.
'''
for val in orderList: # going through the items in the orderList
# calling the helper function to add the text of the values in the orderList
hlp.AddText(val, (o_x, o_y), (255, 255, 255))
o_x += 50 # moving 50 pix on the x axis for each item
i += 1 # going to next item by increasing the index
if i % 14 == 0: # check if the line ends
o_x = 540 # text is on the edge, there no more horizontol space
o_y += 20 # # go to the next line by adding 20 to the y axis
# adding the text on the given location
hlp.AddText("Efficiency O(n*n):", (520, 480))
# adding the text on the given location
hlp.AddText(str(efficiency), (540, 505), (255, 255, 255))
# updates the screen every turn
pygame.display.flip()
# will not run more than 30 frames per second
clock.tick(90)
intro.Introduction2() # calls back the introduction function
def BentleyMain():
'''
this function is the Bentley-Ottmann Algorithm function with main display loop
'''
global cursor, lines, colours, points, randomLine, randomTimer, run, stop, clear, intersect_name
# first the lines are accessing necessary global variables
global display, line_name, orderList
pygame.display.set_caption("Bentley-Ottmann Algorithm") # adding a caption
# setting the display for the algorithm
display = pygame.display.set_mode((1280, 550), pygame.FULLSCREEN)
cursor = False # until while true line, which is the main loop, lines below creating the default values
randomLine = False # again the default placeholder for the randomline
clickedPos = [] # default place holder value for position
efficiency = 0 # default place holder value for algorithm efficieny
eventQueue = [] # event queue place holder, empty now
orderList = [] # same for the order list, empty now all these values will be appended during the game loop
x = 50 # location of the x value on the screen
back = 0 # if this becomes one, you go back
while True: # starting the game loop
# pygame method to fill the screen. takes colours and a display object
display.fill((0, 0, 0))
# pygame method, iterates over the events in pygame to determine what we are doing with every event
for event in pygame.event.get():
if event.type == pygame.QUIT: # this one quits
pygame.quit() # putting the quit pygame method
exit() # takes the user from GUI to the script for exiting
# Here is to tell the computer to recognise if a keybord key is pressed.
if event.type == pygame.KEYUP:
if event.key == pygame.K_ESCAPE: # if that keyboard key is ESC
exit() # call for the exit function.
'''
if mouse clicked on the below coordinates, create a line
pygame GUI property detecting when mouse click is on
MOUSEBUTTONDOWN and MOUSEBUTTONUP should be used as a small loops so that the computer can understand when that instance of the mouse movement is over
'''
if cursor == True and event.type == pygame.MOUSEBUTTONDOWN:
if event.button == 1: # pygame method defining the button in the GUI
mouse_pos = pygame.mouse.get_pos() # displays the mouse position on the screen
# pygame property pos[0] is the mouse cursor in the X axis and pos[1] is the Y axis
if 50 < pos[0] < 450 and 50 < pos[1] < 450:
# here it adds the clicked postion corresponding to the positon of the mouse
clickedPos.append(pos)
if event.type == pygame.MOUSEBUTTONUP:
randomTimer = True # turning the random from false to true so the timer can activate
for i in range(0, 41): # choosing coordinates for drawing, exiting the previous iteration, range (0,41) goes between 0 and 40
# for the pygame method of drawing below, we need to determine the position on the screen as a tuple object
pos = i * 10 + 50
# pygame method, takes display, colour, and positions of where the lines start and end. i.e, starts in (50,pos) ends in (450,pos), 1 at the end is the width of the line
pygame.draw.line(display, line_colour, (50, pos), (450, pos), 1)
# same as above but takes pos as y, by doing so and iterating through the range, you cover all the plane
pygame.draw.line(display, line_colour, (pos, 50), (pos, 450), 1)
i = 0 # index determining for data structure, taking it back to zero
for line in lines: # iterating through lines which is a global variable for the priority queue aka eventQueue
'''
having [i] next to colour allows me to colour each line differently
each line has tuple object in the global variable
line[0] accesses the nth item's first coordinates in the iteration and drawing ends in the line[1], nth item's second object
'''
pygame.draw.line(display, colours[i], line[0], line[1], 1)
# calling the addText function that was created before in the script
hlp.AddText(line_name[i], line[0])
'''
nested indexing, as I am accessing the first item of the first item in the line object which is in the lines global variable
result of this nested indexing should access a point of x- coordinated saved in a tuple
'''
if x == line[0][0]:
# if that begining point of the line's x coordinates equals to the preset x, then append the queue list with the name of this line
eventQueue.append(line_name[i])
if x == line[1][0]: # again the nested indexing
# removes the line from the queue if the end of the line's x coordinates equals to x variable
eventQueue.remove(line_name[i])
# increasing the index number at the end of the iteration loop so I can access the other items saved
i += 1
if stop == True: # tells to stop if stop is clicked
run = False # turns off the run, if it is stop, then run must be false
x = 50 # set x to default
# if I don't make the stop false at the end of this clause, there would be a logic error as stop must be false after it was used otherwise, it will be true forever
stop = False
if run == True: # tells it to start if run is clicked
cursor = False # when it is running cursor can't draw any newlines
randomLine = False # again no new random lines too
x += 1 # since I am scanning, the x value should scan the screen pixel after pixel, thus, adding 1 to the x value
# this draws the scan line on the screen
pygame.draw.line(display, hlp.red, (x, 50), (x, 450), 1)
# j and k are placeholders to keep track of the index
j = 0
k = 0
# iterating through points to draw the intersection circle in the run
for point in points:
# if the first item's x value is smaller or equal to the present x variable
if point[0] <= x:
# use this pygame method to draw a small circle where the lines intersect
pygame.draw.circle(display, hlp.white, point, 3)
k += 1 # increase the placeholders value
if point[0] == x: # if x value is already equal to the preset x
# then append the orderList with the name of the intersection
orderList.append(intersect_name[j])
j += 1 # increase the j once more
if k > 0: # so it means there is already an intersection
n = len(lines) # check how many lines were drawn already
if n > 0: # if the number of lines are more than 0, it means that there are existing lines
# measure the algorithm's speed
efficiency = (n + k) * math.log10(n)
'''
since the display stars from 50th pixel, I substract 50 from that, and the script uses //8 as divide without remainers to convert the x values pixel to coordinates
this is so it can be used to name the incident of intersection
'''
c = (x - 50) // 8
# adding the text as well for the intersection
hlp.AddText("(X, Y) = (" + str(c) + ", 0)",
(200, 470), (255, 255, 255))
if cursor == True: # arrange the mouse cursors
pygame.mouse.set_visible(False)
pos = pygame.mouse.get_pos() # this is a pygame method for mouse cursor
# the cursor with the existing pointer image, pygame method called display.blit which adds a spirit to the screen
display.blit(pointer, pos)
# if you clicked on the screen, this checks the number of clicks and starts drawing
if len(clickedPos) > 0:
# again pygame method to draw, if clicked then draw this
pygame.draw.circle(display, hlp.white, clickedPos[0], 2)
# if clicked then draw this
pygame.draw.line(display, hlp.white, clickedPos[0], pos, 1)
if len(clickedPos) >= 2: # if the cursor is in a positon which is longer than 2 that can draw lines, if you clicked on more or equal to 2 times, which means begining and end for the lines
# then add lines according to the points saved in the clickedPos object. [0] is the begining index and clickedPos[1] is the ending index.
AddNewLine([clickedPos[0], clickedPos[1]])
cursor = False # disable the cursor after drawing
clickedPos = [] # empty the placeholder after drawing the line
else: # now you are entering into the scene of mouse action
# again pygame GUI method enabling mouse action on the screen to interact
pygame.mouse.set_visible(True)
if randomLine == True: # if mouse clicked on the randomline
GenerateRandomLine() # then create a random line, calling the existing function
randomLine = False # turn it off after drawing so it would not keep drawing forever
randomTimer = False # and stop the timer so it won't go forever
if run == True and x > 450: # if run function is enabled however the x value is out of the screen
x = 50 # put x back to the default of 50
run = False # and disable the run
if clear == True: # clear action is enabled, clear back all the placeholders to default
lines = [] # everything is back to the default value
colours = [] # everything is back to the default value
points = [] # everything is back to the default value
orderList = [] # everything is back to the default value
efficiency = 0 # everything is back to the default value
eventQueue = [] # everything is back to the default value
intersect_name = [] # everything is back to the default value
line_name = [] # everything is back to the default value
x = 50 # everything is back to the default value
run = False # everything is back to the default value
clear = False # everything is back to the default value
'''
adding text positions and texts for the frame
calling existing functions, giving text, position and when applicable the action
my helper functions are button and addtext that help me in my larger script
'''
# adding text positions and texts for the frame
hlp.AddText("(0,0)", (30, 25))
hlp.AddText("(50,0)", (430, 25))
hlp.AddText("(0,50)", (30, 450))
hlp.AddText("(50,50)", (430, 450))
# drawing buttons and determining positions
hlp.Button("Run", 80, 5, 100, 35, RunActive)
hlp.Button("Stop", 200, 5, 100, 35, StopActive)
hlp.Button("Clear", 320, 5, 100, 30, ClearActive)
hlp.Button("Random Segment", 50, 500, 180, 30, RandomActive)
hlp.Button("Insert Segment", 280, 500, 180, 35, CursorActive)
hlp.Button("Exit", 500, 5, 100,
30, sys.exit)
back = hlp.ButtonWithReturn("Back", 900, 5, 100, 30, 1)
if back > 0: # if back has a value, which means it has been clicked, stop the bigger loop that we started, i.e. the game loop, and break the game loop
break
# calls the helper function
nxt = hlp.ButtonWithReturn("Next", 700, 5, 100, 30, 1)
if nxt > 0: # so if the next button is clicked
# calls for the description function
hlp.Description(dsb.bo_desc)
text = ["If you are learning to play, it is recommended", # and displays this text
"you chose your own starting area."]
# pygame method to draw an object
pygame.draw.rect(display, line_colour, [500, 50, 750, 490], 2)
# adding the text on the given location
hlp.AddText("Bentley-Ottmann Algorithm", (520, 70))
# adding the text on the given location
hlp.AddText("Event Queue:", (520, 120))
# creating indexing i and x, y positions to display on the GUI, this is an important way to assign values to a tuplae object
i, o_x, o_y = 0, 540, 150
'''
iterating through the existing values in the eventQueue
because we don't want the texts to overlap on the screen
most of the numbers below are finetuning to prevent overlapping of the texts for the order list and the eventqueue list
'''
for val in eventQueue:
# val is each text saved in the eventQueue, and these values are not to overlap on the screen
hlp.AddText(val, (o_x, o_y), (255, 255, 255))
o_x += 30 # therefore for each value, I'm adding +30 for each one
i += 1 # adding one to the index to access to the next item
if i % 23 == 0: # 23rd item appears on the righest point on the screen so for the next one you need to go on the y axis
o_x = 540 # text is on the edge, there no more horizontol space
# text needs to appear on the next line, so adding 20 onto the y axis, vertical move
o_y += 20
hlp.AddText("Order List:", (520, 200)) # adding the text
i, o_x, o_y = 0, 540, 230
for val in orderList: # same as above iteration but for the order list this time
hlp.AddText(val, (o_x, o_y), (255, 255, 255))
o_x += 50 # adding to x axis
i += 1 # increasing the index
if i % 14 == 0: # this is 14, because the text has less horizontal space to appear.
o_x = 540 # reached the end of the line
o_y += 20 # go to the next line, move vertical, thus adding to the y value
# adding the text on the given location
hlp.AddText("Efficiency O((n+k)logn):", (520, 480))
# adding the text on the given location
hlp.AddText(str(efficiency), (540, 505), (255, 255, 255))
# updates the screen every turn
pygame.display.flip()
# will not run more than 30 frames per second
clock.tick(30)
intro.Introduction2() # calls back the introduction function
def ShamosHoeyMain():
'''
this function is the Shamos-Hoey Algorithm function with main display loop
'''
global cursor, lines, colours, points, randomLine, randomTimer, run, stop, clear, intersect_name
global display, line_name # first the lines are accessing necessary global variables
pygame.display.set_caption("Shamos-Hoey Algorithm") # adding a caption
# setting the display for the algorithm
display = pygame.display.set_mode((1280, 550), pygame.FULLSCREEN)
cursor = False # until while true line, which is the main loop, lines below creating the default values
randomLine = False # again the default placeholder for the randomline
clickedPos = [] # default place holder value for position
firstPoint = None # first intersection point identified
efficiency = 0 # default place holder value for algorithm efficieny
eventQueue = [] # event queue place holder, empty now
run = False
x = 50 # location of the x value on the screen
back = 0 # if this becomes one, you go back
while True: # starting the game loop
# pygame method to fill the screen, takes colours and a display object
display.fill((0, 0, 0))
# pygame method, iterates over the events in pygame to determine what we are doing with every event
for event in pygame.event.get():
if event.type == pygame.QUIT: # this one quits
pygame.quit() # putting the quit pygame method
exit() # takes the user from GUI to the script for exiting
# Here is to tell the computer to recognise if a keybord key is pressed.
if event.type == pygame.KEYUP:
if event.key == pygame.K_ESCAPE: # if that keyboard key is ESC
exit() # call for the exit function.
'''
if mouse clicked on the below coordinates, create a line
pygame GUI property detecting when mouse click is on
MOUSEBUTTONDOWN and MOUSEBUTTONUP should be used as a small loops so that the computer can understand when that instance of the mouse movement is over
'''
if cursor == True and event.type == pygame.MOUSEBUTTONDOWN:
if event.button == 1: # pygame method defining the button in the GUI
mouse_pos = pygame.mouse.get_pos() # displays the mouse position on the screen
# pygame property pos[0] is the mouse cursor in the X axis and pos[1] is the Y axis
if 50 < pos[0] < 450 and 50 < pos[1] < 450:
# here it adds the clicked postion corresponding to the positon of the mouse
clickedPos.append(pos)
if event.type == pygame.MOUSEBUTTONUP:
randomTimer = True # turning the random from false to true so the timer can activate
for i in range(0, 41): # choosing coordinates for drawing, exiting the previous iteration, range (0,41) goes between 0 and 40
# for the pygame method of drawing below, we need to determine the position on the screen as a tuple object
pos = i * 10 + 50
# pygame method, takes display, colour, and positions of where the lines start and end. i.e, starts in (50,pos) ends in (450,pos), 1 at the end is the width of the line
pygame.draw.line(display, line_colour, (50, pos), (450, pos), 1)
# same as above but takes pos as y, by doing so and iterating through the range, you cover all the plane
pygame.draw.line(display, line_colour, (pos, 50), (pos, 450), 1)
i = 0 # index determining for data structure, taking it back to zero
for line in lines: # iterating through lines which is a global variable for the priority queue aka eventQueue
'''
having [i] next to colour allows me to colour each line differently
each line has tuple object in the global variable
line[0] accesses the nth item's first coordinates in the iteration and drawing ends in the line[1], nth item's second object
'''
pygame.draw.line(display, colours[i], line[0], line[1], 1)
# calling the addText function that was created before in the script
hlp.AddText(line_name[i], line[0])
'''
nested indexing, as I am accessing the first item of the first item in the line object which is in the lines global variable
result of this nested indexing should access a point of x- coordinated saved in a tuple
' '''
if x == line[0][0]:
# if that begining point of the line's x coordinates equals to the preset x, then append the queue list with the name of this line
eventQueue.append(line_name[i])
if x == line[1][0]: # again the nested indexing
# removes the line from the queue if the end of the line's x coordinates equals to x variable
eventQueue.remove(line_name[i])
# increasing the index number at the end of the iteration loop so I can access the other items saved
i += 1
if stop == True: # tells to stop if stop is clicked
run = False # turns off the run, if it is stop, then run must be false
x = 50 # set x to default
# if I don't make the stop false at the end of this clause, there would be a logic error as stop must be false after it was used otherwise, it will be true forever
stop = False
eventQueue = [] # empties the eventQueue
if run == True: # tells it to start if run is clicked
cursor = False # when it is running cursor can't draw any newlines
randomLine = False # again no new random lines too
x += 1 # since I am scanning, the x value should scan the screen pixel after pixel, thus, adding 1 to the x value
# this draws the scan line on the screen
pygame.draw.line(display, hlp.red, (x, 50), (x, 450), 1)
# iterating through points to draw the intersection circle in the run
for point in points:
# if the first item's x value is smaller or equal to the present x variable
if point[0] == x:
firstPoint = point # having a designated first point variable
run = False # setting variables to default.
x = 50 # setting variables to default.
eventQueue = [] # setting variables to default.
efficiency = 0 # setting variables to default.
break # break the loop
n = len(lines) # number of existing lines
if n > 0: # if the number of lines are more than 0, it means that there are existing lines
efficiency = n * math.log10(n) # measure the algorithm's speed
'''
since the display stars from 50th pixel, I substract 50 from that, and the script uses //8 as divide without remainers to convert the x values pixel to coordinates
this is so it can be used to name the incident of intersection
'''
c = (x - 50) // 8
# adding the text as well for the intersection
hlp.AddText("(X, Y) = (" + str(c) + ", 0)", (200, 470),
hlp.white) # adding the intersection
if firstPoint != None: # if there is a first point
# use this pygame method of drawing a circle.
pygame.draw.circle(display, hlp.white, firstPoint, 3)
if cursor == True: # arrange the mouse cursors
pygame.mouse.set_visible(False)
pos = pygame.mouse.get_pos() # this is a pygame method for mouse cursor
# the cursor with the existing pointer image, pygame method called display.blit which adds a spirit to the screen
display.blit(pointer, pos)
# if you clicked on the screen, this checks the number of clicks and starts drawing
if len(clickedPos) > 0:
pygame.draw.circle(display, hlp.white, clickedPos[0], 2)
# if clicked then draw this
pygame.draw.line(display, hlp.white, clickedPos[0], pos, 1)
if len(clickedPos) >= 2: # if the cursor is in a positon which is longer than 2 that can draw lines, if you clicked on more or equal to 2 times, which means begining and end for the lines
# then add lines according to the points saved in the clickedPos object. [0] is the begining index and clickedPos[1] is the ending index.
AddNewLine([clickedPos[0], clickedPos[1]])
cursor = False # disable the cursor after drawing
clickedPos = [] # empty the placeholder after drawing the line
else: # now you are entering into the scene of mouse action
# again pygame GUI method enabling mouse action on the screen to interact
pygame.mouse.set_visible(True)
if randomLine == True: # if mouse clicked on the randomline
GenerateRandomLine() # then create a random line, calling the existing function
randomLine = False # turn it off after drawing so it would not keep drawing forever
randomTimer = False # and stop the timer so it won't go forever
if run == True and x > 450: # if run function is enabled however the x value is out of the screen
x = 50 # put x back to the default of 50
run = False # and disable the run
if clear == True: # clear action is enabled, clear back all the placeholders to default
lines = [] # everything is back to the default value
colours = [] # everything is back to the default value
points = [] # everything is back to the default value
efficiency = 0 # everything is back to the default value
firstPoint = None # everything is back to the default value
eventQueue = [] # everything is back to the default value
intersect_name = [] # everything is back to the default value
line_name = [] # everything is back to the default value
x = 50 # everything is back to the default value
run = False # everything is back to the default value
clear = False # everything is back to the default value
'''
adding text positions and texts for the frame
calling existing functions, giving text, position and when applicable the action
my helper functions are button and addtext that help me in my larger script.
'''
# adding text positions and texts for the frame
hlp.AddText("(0,0)", (30, 25))
hlp.AddText("(50,0)", (430, 25))
hlp.AddText("(0,50)", (30, 450))
hlp.AddText("(50,50)", (430, 450))
# drawing buttons and determining positions
hlp.Button("Run", 80, 5, 100, 35, RunActive)
hlp.Button("Stop", 200, 5, 100, 35, StopActive)
hlp.Button("Clear", 320, 5, 100, 30, ClearActive)
hlp.Button("Random Segment", 50, 500, 180, 30, RandomActive)
hlp.Button("Insert Segment", 280, 500, 180, 35, CursorActive)
hlp.Button("Exit", 500, 5, 100,
30, sys.exit)
back = hlp.ButtonWithReturn("Back", 900, 5, 100, 30, 1)
if back > 0: # if back has a value, which means it has been clicked, stop the bigger loop that we started, i.e. the game loop, and break the game loop
break
# calls the helper function
nxt = hlp.ButtonWithReturn("Next", 700, 5, 100, 30, 1)
if nxt > 0: # so if the next button is clicked
# calls for the description function
hlp.Description(dsb.sh_desc)
# pygame method to draw an object
pygame.draw.rect(display, line_colour, [500, 50, 750, 490], 2)
# adding caption, frame size, texts, buttons and their positions
# adding the text on the given location
hlp.AddText("Shamos-Hoey Algorithm", (520, 70))
# adding the text on the given location
hlp.AddText("Event Queue:", (520, 120))
# creating indexing i and x, y positions to display on the GUI, this is an important way to assign values to a tuplae object
i, o_x, o_y = 0, 540, 150
'''
iterating through the existing values in the eventQueue.
because we don't want the texts to overlap on the screen
most of the numbers below are finetuning to prevent overlapping of the texts for the order list and the eventqueue list.
'''
for val in eventQueue:
# val is each text saved in the eventQueue, and these values are not to overlap on the screen
# calling the helper function.
hlp.AddText(val, (o_x, o_y), hlp.white)
o_x += 30 # adding 30 to the x-axis for each item.
i += 1 # adding one to the index to access to the next item
if i % 23 == 0: # 23rd item appears on the righest point on the screen so for the next one you need to go on the y axis
o_x = 540 # text is on the edge, there no more horizontol space
# text needs to appear on the next line, so adding 20 onto the y axis, vertical move
o_y += 20 # go to the next line by adding 20 to the y axis
# adding the text on the given location
hlp.AddText("Efficiency O(nlogn):", (520, 200))
# adding the text on the given location
hlp.AddText(str(efficiency), (540, 230), hlp.white)
# updates the screen every turn
pygame.display.flip()
# will not run more than 30 frames per second
clock.tick(30)
intro.Introduction2() # calls back the introduction function
def Efficiency():
'''
this function compares the efficiency of the algorithms
'''
pygame.display.set_caption("Efficiency Comparison")
display = pygame.display.set_mode(
(1280, 550), pygame.FULLSCREEN | pygame.DOUBLEBUF)
n = 0 # number segment
k = 0 # intersection
posX1 = 180 # position to appear
posX2 = 400 # position to appear
posY = 20 # position to appear
bPos = 450 # position to appear
bo = 0 # bentley-ottmann placeholders
bf = 0 # brute-force placeholders
sh = 0 # shamos-hoey placeholders
bog = 0 # bentley-Ottman placeholders
bfg = 0 # brute-force placeholders
shg = 0 # shamos-hoey placeholders
while True: # starting the initial loop with first game events, ie. quit and mouse button
# starting the initial loop with first game events, ie. quit and mouse button
display.fill((0, 0, 0))
# display.blit(hlp.dscbg,(0,0))
# pygame method, iterates over the events in pygame to determine what we are doing with every event
# again iterating as an important pygame method to set the features.
for event in pygame.event.get():
if event.type == pygame.QUIT: # this one quits
pygame.quit() # putting the quit pygame method
exit() # takes the user from GUI to the script for exiting
# Here is to tell the computer to recognise if a keybord key is pressed.
if event.type == pygame.KEYUP:
if event.key == pygame.K_ESCAPE: # if that keyboard key is ESC
exit() # call for the exit function.
# starting the initial loop with first game events, i.e. quit and mouse button
if event.type == pygame.MOUSEBUTTONDOWN:
if event.button == 1: # pygame method defining the button in the GUI
pos = pygame.mouse.get_pos() # displays the mouse position on the screen
# starting the initial loop with first game events, ie. quit and mouse button
if posX1 < pos[0] < posX1 + 130 and posY < pos[1] < posY + 60:
# getting the number of lines
lineTxt = hlp.InsertNumber("Line Number:")
if lineTxt != "": # if the string is not empty
try:
# input gives string so this one turns it into an integer
n = int(lineTxt)
except: # if that is not happening
n = 0 # make n equals to zero, this is a error-handling method by managing the possible error by wrong input, i.e. linetxt can't be converted to an integer
# same as above but for the intersect number
elif posX2 < pos[0] < posX2 + 170 and posY < pos[1] < posY + 60:
intersectTxt = hlp.InsertNumber("Intersect Number :")
if intersectTxt != "":
try:
k = int(intersectTxt)
except:
k = 0
if n > 0:
# using established algorithm efficiency calculation for every algorithm
bo = int((n + k) * math.log10(n))
bog = bo # number to be used in the graph string
# using established algorithm efficiency calculation for every algorithm
bf = int(n * n)
bfg = bf # number to be used in the graph string
# using established algorithm efficiency calculation for every algorithm
sh = int(n * math.log10(n))
shg = sh # number to be used in the graph string
if bo > 350 or bf > 350 or sh > 350: # multiply by 350 for later on to use for rectangle object below
m = max(bo, bf, sh)
bo = int((bo / m) * 350)
bf = int((bf / m) * 350)
sh = int((sh / m) * 350)
if bo == 0: # handling zeros for graphs below
bo = 1 # handling zeros for graphs below
if bf == 0: # handling zeros for graphs below
bf = 1 # handling zeros for graphs below
if sh == 0: # handling zeros for graphs below
sh = 1 # handling zeros for graphs below
# setting the texts and buttons
hlp.Button("Insert Line", posX1, posY, 130, 30, None)
hlp.Button("Insert Intersect", posX2, posY, 160, 30, None)
hlp.AddText("Line: " + str(n), (600, 20), hlp.white)
hlp.AddText("Intersect: " + str(k), (600, 50), hlp.white)
hlp.AddText("BF", (180, 460), hlp.white)
hlp.AddText("BO", (330, 460), hlp.white)
hlp.AddText("SH", (480, 460), hlp.white)
# pygame method, takes display, colour, and positions of where the lines start and end
pygame.draw.line(display, line_colour, (100, 100), (100, 500), 2)
# pygame method, takes display, colour, and positions of where the lines start and end
pygame.draw.line(display, line_colour, (50, 450), (650, 450), 2)
if bf > 0: # comparing here which one is better, if bf exists
# comparing here which one is better
hlp.AddText(str(bfg), (165, bPos - bf - 30), hlp.white)
pygame.draw.rect(display, hlp.button_colour, (165, bPos - bf, 50, bf)
) # drawing a rectangular bar on the screen
if bo > 0: # comparing here which one is better, if bo exists
# comparing here which one is better
hlp.AddText(str(bog), (315, bPos - bo - 30), hlp.white)
pygame.draw.rect(display, hlp.button_colour, (315, bPos - bo, 50, bo)
) # drawing a rectangular bar on the screen
if sh > 0: # comparing here which one is better, if sh exists
# comparing here which one is better
hlp.AddText(str(shg), (465, bPos - sh - 30), hlp.white)
# drawing a rectangular bar on the screen. # bPos- algorithm name determines the rectangle's dimensions
pygame.draw.rect(display, hlp.button_colour,
(465, bPos - sh, 50, sh))
# setting and drawing the next/back buttons
hlp.Button("Exit", 350, 500, 100,
30, sys.exit)
back = hlp.ButtonWithReturn("Back", 650, 500, 100, 30, 1)
if back > 0:
break
nxt = hlp.ButtonWithReturn("Next", 500, 500, 100, 30, 1)
if nxt > 0:
hlp.Description(dsb.effic_desc)
pygame.display.flip() # updates the screen every turn
clock.tick(60) # will not run more than 15 frames per second
intro.Introduction2() # calls back the introduction function
def Efficiency2():
'''
this function compares the efficiency of the algorithms
'''
pygame.display.set_caption("Efficiency Comparison")
display = pygame.display.set_mode(
(1280, 550), pygame.FULLSCREEN | pygame.DOUBLEBUF)
n = range(10, 1001) # number segment
bet = False
posX1 = 180 # position to appear
posX2 = 400 # position to appear
posY = 20 # position to appear
bPos = 450 # position to appear
sheffc = [i * math.log10(i) for i in n] # it is a list comprehension method for sh algoritm efficiency.
bfeffc = [i**2 for i in n] # it is a list comprehension method for bf algoritm efficiency.
boeffc = [((i + (((i**2) - i) / 2)) * math.log10(i)) for i in n] # it is a list comprehension method for bo algoritm efficiency.
topalg = sheffc + bfeffc + boeffc # here compiles all efficency into one list
mx = max(topalg) # getting the max value from the list
mn = min(topalg) # getting the min value from the list
transsheffc = [TransValue(i, mx, mn) for i in sheffc] #here it starts a list comprehension to normalize the values for across three efficiencies
transshefc2 = random.sample(transsheffc, 550) #then getting 550 values to represent equally across the pixels
transshefc2.sort() # sorting in descending order
shno = 0 #starting an index for iteration
shpoints = [] #placeholder value
for i in transshefc2[:200]: #here it uses indexing and iteration for creating display pixel points for sh algoritm. First one is the x value, other one is y value.
shpoints.append((100 + shno, 450 - int(i))) #here it uses indexing and iteration for creating display pixel points for sh algoritm. First one is the x value, other one is y value.
shno += 1 #here it uses indexing and iteration for creating display pixel points for sh algoritm. First one is the x value, other one is y value.
for i in transshefc2[200:349]: #here it uses indexing and iteration for creating display pixel points for sh algoritm. First one is the x value, other one is y value.
shpoints.append((100 + shno, 450 - (int(i + 2)))) #here it uses indexing and iteration for creating display pixel points for sh algoritm. First one is the x value, other one is y value.
shno += 1 #here it uses indexing and iteration for creating display pixel points for sh algoritm. First one is the x value, other one is y value.
for i in transshefc2[349:]: #here it uses indexing and iteration for creating display pixel points for sh algoritm. First one is the x value, other one is y value.
shpoints.append((100 + shno, 450 - (int(i + 4)))) #here it uses indexing and iteration for creating display pixel points for sh algoritm. First one is the x value, other one is y value.
shno += 1 #here it uses indexing and iteration for creating display pixel points for sh algoritm. First one is the x value, other one is y value.
transbfeffc = [TransValue(i, mx, mn) for i in bfeffc] # between lines 910 and 917, same as above but for bf algoritm
transbfeffc2 = random.sample(transbfeffc, 550)
transbfeffc2.sort()
bfno = 0
bfpoints = []
for i in(transbfeffc2):
bfpoints.append((100 + bfno, 450 - int(i)))
bfno += 1
transboeffc = [TransValue(i, mx, mn) for i in boeffc] # between lines 919 and 926, same as above but for bo algoritm
transboeffc2 = random.sample(transboeffc, 550)
transboeffc2.sort()
bono = 0
bopoints = []
for i in(transboeffc2):
bopoints.append((100 + bono, 450 - int(i)))
bono += 1
while True: # starting the initial loop with first game events, ie. quit and mouse button
# starting the initial loop with first game events, ie. quit and mouse button
display.fill((0, 0, 0))
# display.blit(hlp.dscbg,(0,0))
# pygame method, iterates over the events in pygame to determine what we are doing with every event
# again iterating as an important pygame method to set the features.
for event in pygame.event.get():
if event.type == pygame.QUIT: # this one quits
pygame.quit() # putting the quit pygame method
exit() # takes the user from GUI to the script for exiting
# Here is to tell the computer to recognise if a keybord key is pressed.
if event.type == pygame.KEYUP:
if event.key == pygame.K_ESCAPE: # if that keyboard key is ESC
exit() # call for the exit function.
# starting the initial loop with first game events, i.e. quit and mouse button
if event.type == pygame.MOUSEBUTTONDOWN:
if event.button == 1: # pygame method defining the button in the GUI
pos = pygame.mouse.get_pos() # displays the mouse position on the screen
# starting the initial loop with first game events, ie. quit and mouse button
if posX2 < pos[0] < posX2 + 170 and posY < pos[1] < posY + 60:
bet = True
hlp.Button("Start", posX2, posY, 160, 30, None)
hlp.AddText("Lines: 10, 100, 1000", (600, 20), hlp.white)
hlp.AddText("10", (115, 460), hlp.white)
hlp.AddText("100", (350, 460), hlp.white)
hlp.AddText("1000", (650, 460), hlp.white)
hlp.AddText("max", (50, 100), hlp.white)
hlp.AddText("0", (50, 460), hlp.white)
sidefont = pygame.font.Font(bitterfont, 16)
sidetext = sidefont.render("Algorithm Efficiency", True, hlp.white)
sidetext = pygame.transform.rotate(sidetext, 90)
display.blit(sidetext, (70, 235))
# pygame method, takes display, colour, and positions of where the lines start and end
pygame.draw.line(display, line_colour, (100, 100), (100, 500), 2)
# pygame method, takes display, colour, and positions of where the lines start and end
pygame.draw.line(display, line_colour, (50, 450), (650, 450), 2)
if bet:
pygame.draw.lines(display, (62, 150, 81), False, bfpoints, 4)
pygame.draw.lines(display, (255, 255, 0), False, shpoints, 4)
pygame.draw.lines(display, (255, 0, 0), False, bopoints, 4)
hlp.AddText("Brute Force", (750, 150), hlp.white)
hlp.AddText("Bentley-Ottmann", (750, 250), hlp.white)
hlp.AddText("Shamos-Hoey", (750, 350), hlp.white)
pygame.draw.line(display, (62, 150, 81), (720, 160), (740, 160), 4)
pygame.draw.line(display, (255, 0, 0), (720, 260), (740, 260), 4)
pygame.draw.line(display, (255, 255, 0), (720, 360), (740, 360), 4)
hlp.AddText("n=10;100;1000", (720, 390), hlp.white)
hlp.AddText("Brute Force = " + str(round(bfeffc[9])) + "; " + str(
round(bfeffc[499])) + "; " + str(round(bfeffc[989])), (720, 405), hlp.white)
hlp.AddText("Bentley-Ottmann = " + str(round(boeffc[9])) + "; " + str(
round(boeffc[499])) + "; " + str(round(boeffc[989])), (720, 420), hlp.white)
hlp.AddText("Shamos-Hoey = " + str(round(sheffc[9])) + "; " + str(
round(sheffc[499])) + "; " + str(round(sheffc[989])), (720, 435), hlp.white)
hlp.Button("Exit", 350, 500, 100,
30, sys.exit)
back = hlp.ButtonWithReturn("Back", 650, 500, 100, 30, 1)
if back > 0:
break
nxt = hlp.ButtonWithReturn("Next", 500, 500, 100, 30, 1)
if nxt > 0:
hlp.Description(dsb.effic_desc)
pygame.display.flip() # updates the screen every turn
clock.tick(60) # will not run more than 15 frames per second
intro.Introduction2() # calls back the introduction function
def AddNewColour():
'''
this function selects random colours and appends the global colours variable
used for adding random colour to each line
'''
global colours # accessing the variable
r = random.randrange(1, 255) # choosing the red tone
g = random.randrange(1, 255) # choosing the green tone
b = random.randrange(1, 255) # choosing the blue tone
randomColour = pygame.Color(r, g, b) # appointing the colour
colours.append(randomColour) # appending the global variable
def AddNewLine(newLine):
'''
this function adds a new line to the list
it iterates through the lines list item and checks whether they intersect
if so, it appoints a name for the intersecting lines and appends the intersect lines list
'''
global lines, line_name, intersect_name
name = str(1 + len(lines)) # appointing a name
i = 0 # appointing default index for the coming iteration below
for line in lines:
# checking whether new line and existing line intersect
status = CheckIntersect(newLine[0], newLine[1], line[0], line[1])
if status:
intsec_name = line_name[i] + "." + name # appointing a name
intersect_name.append(intsec_name) # appending the list
i += 1 # increasing the index by one
l = newLine
# indexing the newline's points and sorting from start to end in the next line
if(newLine[0][0] > newLine[1][0]):
l = [newLine[1], newLine[0]]
lines.append(l) # appending the new line
line_name.append(name) # appending the name of the new line.
AddNewColour()
ChangeColour()
def ChangeColour():
'''
this function changes the line colours to white for the brute force algorithm
it iterates through the different lines and appoints a new colour for each line
'''
global intersect_name, colours, brutecolours
brutecolours = colours[:] # copies the colours variable
for name in intersect_name: # iterates through the items
sp = name.split(".") # splits the string object
# appoints each splitted names to converted integer objects
n1 = int(sp[0])
n2 = int(sp[1])
brutecolours[n1 - 1] = hlp.white # making them white
brutecolours[n2 - 1] = hlp.white # making them white
def CursorActive():
'''
acessing and activating the cursor image to be used
this is for when the user wishes to draw their own line segments
'''
global cursor
cursor = True # activating the cursor
def RandomActive():
'''
accessing the existing global variables of random timer and lines
if random timer is on create random lines
this activates the action for the button, i.e. it gives the action to the button
'''
global randomLine, randomTimer
if randomTimer == True: # if random timer is on
randomLine = True # create the random lines
def RunActive():
'''
empities the orderlist and runs the system with the button click
'''
global run, orderList
run = True
orderList = [] # empties the list object
def StopActive():
'''
stops the system when stop button is clicked
'''
global stop
stop = True
def ClearActive():
'''
clears existing system
'''
global clear
clear = True
# activate flag for introduction menu
def StartGame():
global start # access the global variable
start = True # enable it
| 56.279425
| 200
| 0.61765
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 34,756
| 0.554862
|
544732e628a00b56caac8c9cd412468f1e74169a
| 8,514
|
py
|
Python
|
iologik/e2210.py
|
shannon-jia/iologik
|
bda254ee1cdb3f4d724fbb9d6fe993257f1cce52
|
[
"MIT"
] | null | null | null |
iologik/e2210.py
|
shannon-jia/iologik
|
bda254ee1cdb3f4d724fbb9d6fe993257f1cce52
|
[
"MIT"
] | null | null | null |
iologik/e2210.py
|
shannon-jia/iologik
|
bda254ee1cdb3f4d724fbb9d6fe993257f1cce52
|
[
"MIT"
] | null | null | null |
import aiohttp
import asyncio
import async_timeout
import logging
from collections import namedtuple, deque
from .events import Events
from html.parser import HTMLParser
log = logging.getLogger(__name__)
class Parser(HTMLParser):
def handle_starttag(self, tag, attrs):
log.debug("Encountered a start tag: {}".format(tag))
def handle_endtag(self, tag):
log.debug("Encountered an end tag : {}".format(tag))
def handle_data(self, data):
log.debug("Encountered some data : {}".format(data))
if self.callback:
self.callback(data)
def set_callback(self, callback):
self.callback = callback
class E2210(object):
''' Moxa iologik E2210 module
12 inputs and 8 outputs
'''
MAX_INPUTS = 12
MAX_OUTPUTS = 8
GET_PATH = 'getParam.cgi'
SET_PATH = 'setParam.cgi'
SYS_INFO = ['DATE', 'TIME', 'IP', 'LOC', 'DESC',
'FWR_V', 'MOD_NAME', 'SN_NUM', 'MAC_ADDR']
def __init__(self, loop,
url=None,
events=None,
line=0,
addr=1,
handle_events=None):
self.loop = loop or None
self.url = url
self.line = line
self.addr = addr
self.events = events or Events()
self.parser = Parser()
self.parser.set_callback(self.received)
self.handle_events = handle_events
self.connection = None
self.changed = True
self.fail = True
self.command = namedtuple('Command', 'name method params completed')
self.setting = {'System': {},
'DIMode': ['DI' for i in range(self.MAX_INPUTS)],
'DIStatus': [0 for i in range(self.MAX_INPUTS)],
'DIFilter': [200 for i in range(self.MAX_INPUTS)],
'DOMode': ['DO' for i in range(self.MAX_OUTPUTS)],
'DOStatus': [1 for i in range(self.MAX_OUTPUTS)]
}
self.CMDS = {
'get_sys_info': ('get',
'&'.join(['{}=?'.format(i) for i in self.SYS_INFO])),
'get_di_mode': ('get',
'&'.join(['DIMode_{:02d}=?'.format(i) for i in range(self.MAX_INPUTS)])),
'set_di_mode': ('set',
'&'.join(['DIMode_{:02d}=0'.format(i) for i in range(self.MAX_INPUTS)])),
'get_di_status': ('get',
'&'.join(['DIStatus_{:02d}=?'.format(i) for i in range(self.MAX_INPUTS)])),
'set_di_filter_low': ('set',
'&'.join(['DIFilter_{:02d}={}'.format(i, self.setting['DIFilter'][i]) for i in range(0, self.MAX_OUTPUTS//2)])),
'set_di_filter_high': ('set',
'&'.join(['DIFilter_{:02d}={}'.format(i, self.setting['DIFilter'][i]) for i in range(self.MAX_OUTPUTS//2, self.MAX_OUTPUTS)])),
'get_do_mode': ('get',
'&'.join(['DOMode_{:02d}=?'.format(i) for i in range(self.MAX_OUTPUTS)])),
'set_do_mode': ('set',
'&'.join(['DOMode_{:02d}=0'.format(i) for i in range(self.MAX_OUTPUTS)])),
'get_do_status': ('get',
'&'.join(['DOStatus_{:02d}=?'.format(i) for i in range(self.MAX_OUTPUTS)])),
'set_do_status': ('set',
'&'.join(['DOStatus_{:02d}=1'.format(i) for i in range(self.MAX_OUTPUTS)])),
}
self.cmd_deque = deque()
for name in self.CMDS:
self.append_cmd(name)
# start to poll http server
self.restart_poll()
def poll(self):
pass
def do_output(self, addr, which, action, deadtime):
if which >= self.MAX_OUTPUTS or which < 0:
return
status = (action == 'Activate' and 0 or 1)
params = 'DOStatus_{:02d}={}'.format(which, status)
self.cmd_deque.appendleft(self.command('do_outputs',
'set', params, False))
def append_cmd(self, cmd_name=None):
cmd = self.CMDS.get(cmd_name)
if cmd:
self.cmd_deque.append(self.command(cmd_name,
cmd[0], cmd[1], False))
def received(self, data):
log.debug("Encountered some data : {}".format(data))
l = data.split('=')
if len(l) != 2:
return
reg = l[0]
val = l[1]
if reg in self.SYS_INFO:
self.setting['System'][reg] = val
elif reg.startswith('DIMode'):
n = int(reg.split('_')[1])
if n < 0 or n >= self.MAX_INPUTS:
return
self.setting['DIMode'][n] = (val == '0' and 'DI' or 'COUNTER')
elif reg.startswith('DIStatus'):
n = int(reg.split('_')[1])
if n < 0 or n >= self.MAX_INPUTS:
return
self.setting['DIStatus'][n] = (val == '0' and 'ALARM' or 'NORMAL')
event_type = 'Auxiliary Input'
event = 'MXI_{}_{}_{}'.format(self.line, self.addr, n)
condition = (val == '0' and True or False)
self.events.append(event, event_type, condition)
elif reg.startswith('DIFilter'):
n = int(reg.split('_')[1])
if n < 0 or n >= self.MAX_INPUTS:
return
self.setting['DIFilter'][n] = int(val)
elif reg.startswith('DOMode'):
n = int(reg.split('_')[1])
if n < 0 or n >= self.MAX_OUTPUTS:
return
self.setting['DOMode'][n] = (val == '0' and 'DO' or 'PULSE')
elif reg.startswith('DOStatus'):
n = int(reg.split('_')[1])
if n < 0 or n >= self.MAX_OUTPUTS:
return
self.setting['DOStatus'][n] = (val == '0' and 'OFF' or 'ON')
else:
log.warn("Do not care it: {}".format(data))
def processor(self):
if not self.events:
return
if callable(self.handle_events):
return self.handle_events(self.events)
else:
log.warn('No master to processor {}'.format(self.events))
def restart_poll(self):
asyncio.ensure_future(self.loop_polling())
async def _fetch(self, params, method='get'):
endpoint = (method == 'get' and self.GET_PATH or self.SET_PATH)
async with aiohttp.ClientSession() as session:
with async_timeout.timeout(20):
async with session.get('{}/{}?{}'.format(self.url,
endpoint,
params)) as response:
if response.status >= 200 and response.status <= 300:
self.parser.feed(await response.text())
async def _request(self):
try:
self.cmd = self.cmd_deque.popleft()
except IndexError:
self.append_cmd('get_di_status')
self.append_cmd('get_do_status')
self.cmd = self.cmd_deque.popleft()
log.debug('Request: {}'.format(self.cmd.name))
x = await self._fetch(self.cmd.params,
method=self.cmd.method)
async def loop_polling(self):
try:
while True:
try:
await self._request()
self.connection = True
self.processor()
except Exception as err:
log.error("Cmd {} failed, with Error: {} "
"Will retry in {} seconds"
.format(self.cmd.name, err, 10))
self.connection = False
if self.connection is not True:
self.changed = True
self.cmd_deque.append(self.cmd)
self.fail = True
await asyncio.sleep(10)
else:
self.changed = False
self.fail = False
log.info("{} Successfully requested. ".format(self.cmd.name))
# poll connection state every 1s
await asyncio.sleep(0.5)
except asyncio.CancelledError:
self.connection = False
except Exception as err:
log.error("Failed to access http server with Error: {}".format(err))
self.connection = False
| 38.7
| 161
| 0.499765
| 8,301
| 0.974982
| 0
| 0
| 0
| 0
| 2,171
| 0.254992
| 1,355
| 0.15915
|
5448e80da68c244752c3380cbc4f039308ae3d65
| 7,009
|
py
|
Python
|
apps/cmdb/verify/operate.py
|
yanshicheng/super-ops
|
dd39fe971bfd0f912cab155b82e41a09aaa47892
|
[
"Apache-2.0"
] | null | null | null |
apps/cmdb/verify/operate.py
|
yanshicheng/super-ops
|
dd39fe971bfd0f912cab155b82e41a09aaa47892
|
[
"Apache-2.0"
] | 1
|
2022-01-17T09:34:14.000Z
|
2022-01-18T13:32:20.000Z
|
apps/cmdb/verify/operate.py
|
yanshicheng/super_ops
|
dd39fe971bfd0f912cab155b82e41a09aaa47892
|
[
"Apache-2.0"
] | null | null | null |
from ..models import Classify, Fields, Asset, AssetBind, ClassifyBind
from django.db.models import Q
from collections import OrderedDict
from django.forms.models import model_to_dict
class OperateInstance:
@staticmethod
def get_classify(id):
"""通过ID 查找指定分类表"""
return Classify.objects.filter(id=id).first()
# 获取类型表的 子表
@staticmethod
def get_children_classify(p_tid):
"""通过 主表ID 查找 子分类表 pid=id"""
children_classify = Classify.objects.filter(pid=p_tid)
if children_classify:
return children_classify
return None
@staticmethod
def get_parent_classify_classify(pid):
parent_classify_classify_obj = Classify.objects.filter(id=pid).first()
if parent_classify_classify_obj:
return parent_classify_classify_obj
return None
# parent_classify
@staticmethod
def get_parent_classify_bind(pid):
"""通过分类表主ID 查找 关系绑定表数据"""
parent_bind_obj = ClassifyBind.objects.filter(parent_classify_id=pid)
if parent_bind_obj:
return parent_bind_obj
return None
@staticmethod
def get_child_classify_bind(cid):
"""通过 child_classify_id 获取表关系记录"""
child_classify_obj = ClassifyBind.objects.filter(child_classify_id=cid)
if child_classify_obj:
return child_classify_obj
return None
@staticmethod
def get_classify_bind(pid, cid):
"""根据 parent_classify_id 和 child_classify_id 返回分类关系表"""
classify_bind_obj = ClassifyBind.objects.filter(
parent_classify_id=pid, child_classify_id=cid
).first()
if classify_bind_obj:
return classify_bind_obj
return None
@staticmethod
def get_abs_asset_bind(p_id, c_id):
"""根据 parent_asset_id child_asset_id 查询 asset_bind 记录"""
asset_bind = AssetBind.objects.filter(
parent_asset_id=p_id, child_asset_id=c_id
).first()
if asset_bind:
return asset_bind
return None
@staticmethod
def get_asset_bind(t_id):
"""
根据 classify_bind_id 查找 资产绑定记录
"""
asset_bind = AssetBind.objects.filter(classify_bind_id=t_id)
if asset_bind:
return asset_bind
return None
@staticmethod
def get_parent_asset_bind(t_id, p_id):
"""根据 表关系ID 主资产ID, 获取资产数据"""
asset_bind = AssetBind.objects.filter(
classify_bind=t_id, parent_asset_id=p_id
)
if asset_bind:
return asset_bind
return None
@staticmethod
def get_child_asset_bind(t_id, c_id):
"""根据 表关系ID 子资产ID 获取资产数据"""
asset_bind = AssetBind.objects.filter(
classify_bind_id=t_id, child_asset_id=c_id
)
if asset_bind:
return asset_bind
return None
@staticmethod
def get_c_asset_bind(c_id):
"""根据 子资产ID 获取资产数据"""
asset_bind = AssetBind.objects.filter(child_asset_id=c_id)
if asset_bind:
return asset_bind
return None
# @staticmethod
# def create_asset(c_id, *args):
# asset_obj = Asset.objects.create(asset_key=get_md5(*args), classify_id_id=c_id)
# asset_obj.save()
# return asset_obj
@staticmethod
def get_asset(id):
"""根据 ID 获取资产记录"""
asset_obj = Asset.objects.filter(id=id).first()
if asset_obj:
return asset_obj
return None
@staticmethod
def get_classify_asset(id, cid):
"""根据 分类表ID 资产表 ID 获取资产数据"""
asset_obj = Asset.objects.filter(id=id, classify_classify_id=cid).first()
if asset_obj:
return asset_obj
return None
@staticmethod
def get_all_asset(s_id):
asset_all_obj = Asset.objects.filter(classify_id=s_id)
if asset_all_obj:
return asset_all_obj
return None
@staticmethod
def get_classify_field(c_id):
"""根据分类表ID返回 fields 字段表"""
field_obj = Fields.objects.filter(classify_id=c_id).first()
if field_obj:
return field_obj
return None
@staticmethod
def get_all_field_map(c_id):
field_all = Classify.objects.filter(id=c_id).values()
if field_all:
return field_all
return None
@staticmethod
def get_asset_bind_exists(c_id):
"""查询 parent_asset_id 或者 child_asset_id 等于指定id的资产"""
field_all = AssetBind.objects.filter(
Q(parent_asset_id=c_id) | Q(child_asset_id=c_id)
)
if field_all:
return field_all
return None
@staticmethod
def get_p_bind_asset(id, pid):
"""通过主资产ID 和 分类ID 查询关联下 所有的数据"""
# 获取关联数据类型
classify_bind = OperateInstance.get_parent_classify_bind(pid)
l_c = []
if classify_bind:
for t_r in classify_bind:
data = OrderedDict()
asset_re_all = OperateInstance.get_parent_asset_bind(t_r.id, id)
data['classify_name'] = t_r.child_classify.name
data['classify_id'] = t_r.child_classify.id
data['parent_classify_name'] = t_r.child_classify.pid.name
data['fields'] = t_r.child_classify.fields.fields
if asset_re_all:
data['data'] = [model_to_dict(i.child_asset) for i in asset_re_all]
else:
data['data'] = []
l_c.append(data)
return l_c
return []
def get_c_bind_asset(id, cid):
"""通过子资产ID 和 分类ID 查询关联下 所有的数据"""
# 查询到所有的关联表记录
classify_bind = OperateInstance.get_child_classify_bind(cid)
l_c = []
if classify_bind:
# 循环关联表记录
for t_r in classify_bind:
asset_re_all = OperateInstance.get_child_asset_bind(t_r.id, id)
if not asset_re_all:
continue
data = OrderedDict()
data['classify_name'] = t_r.parent_classify.name
data['classify_id'] = t_r.parent_classify.id
data['parent_classify_name'] = t_r.parent_classify.pid.name
data['fields'] = t_r.parent_classify.fields.fields
if asset_re_all:
data['data'] = [model_to_dict(i.parent_asset) for i in asset_re_all]
else:
data['data'] = []
l_c.append(data)
return l_c
return []
@staticmethod
def get_p_classify_bind(pid):
""" 根据 主 classify_id 返回所有 关联数据 """
parent_bind_obj = ClassifyBind.objects.filter(parent_classify_id=pid)
if parent_bind_obj:
return parent_bind_obj
return []
def get_c_classify_bind(cid):
""" 根据 子 classify_id 返回所有 关联数据 """
parent_bind_obj = ClassifyBind.objects.filter(child_classify_id=cid)
if parent_bind_obj:
return parent_bind_obj
return []
| 31.859091
| 89
| 0.610786
| 7,309
| 0.975183
| 0
| 0
| 5,608
| 0.748232
| 0
| 0
| 1,465
| 0.195464
|
544b2254aa27aedc58e9f1dae64e313ac23e420d
| 525
|
py
|
Python
|
glass/mirror.py
|
fwcd/glass
|
eba5321753a41e4ebb28f6933ec554c104cb0f4c
|
[
"MIT"
] | 2
|
2021-02-01T23:06:35.000Z
|
2022-01-12T15:39:30.000Z
|
glass/mirror.py
|
fwcd/glass
|
eba5321753a41e4ebb28f6933ec554c104cb0f4c
|
[
"MIT"
] | 1
|
2022-03-18T04:07:58.000Z
|
2022-03-19T18:00:08.000Z
|
glass/mirror.py
|
fwcd/glass
|
eba5321753a41e4ebb28f6933ec554c104cb0f4c
|
[
"MIT"
] | null | null | null |
import subprocess
from pathlib import Path
from urllib.parse import urlparse
def mirror_repo(repo_url, target_dir):
repo_dir = Path(str(target_dir) + urlparse(repo_url).path)
repo_dir.parent.mkdir(parents=True, exist_ok=True)
if repo_dir.exists():
print(f'Updating from {repo_url}...')
subprocess.run(['git', 'remote', 'update'], cwd=str(repo_dir))
else:
print(f'Mirroring from {repo_url}...')
subprocess.run(['git', 'clone', repo_url, '--mirror'], cwd=str(repo_dir.parent))
| 35
| 88
| 0.67619
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 104
| 0.198095
|
544bbee47e78ee286a199342f8cffdd22f773ed2
| 3,880
|
py
|
Python
|
modeling/__init__.py
|
WinstonHuTiger/BOEMD-UNet
|
f81a0506b8b8a90fd783afcda61f28acb113fc77
|
[
"MIT"
] | 2
|
2021-10-03T11:49:32.000Z
|
2021-12-15T11:40:52.000Z
|
modeling/__init__.py
|
WinstonHuTiger/BOEMD-UNet
|
f81a0506b8b8a90fd783afcda61f28acb113fc77
|
[
"MIT"
] | null | null | null |
modeling/__init__.py
|
WinstonHuTiger/BOEMD-UNet
|
f81a0506b8b8a90fd783afcda61f28acb113fc77
|
[
"MIT"
] | null | null | null |
import os
import torch
from modeling.unet import *
from modeling.bAttenUnet import MDecoderUNet, MMultiBAUNet, MMultiBUNet
def build_model(args, nchannels, nclass, model='unet'):
if model == 'unet':
return UNet(
n_channels=nchannels,
n_classes=nclass,
bilinear=True,
dropout=args.dropout,
dropp=args.drop_p
)
elif model == "batten-unet":
return MDecoderUNet(
n_channels=nchannels,
n_classes=nclass,
bilinear=True,
attention="attn"
)
elif model == 'prob-unet':
return ProbUNet(
n_channels=nchannels,
n_classes=nclass,
bilinear=True,
dropout=args.dropout,
dropp=args.drop_p
)
elif model == 'multi-unet':
return MultiUNet(
n_channels=nchannels,
n_classes=nclass,
bilinear=True,
dropout=args.dropout,
dropp=args.drop_p
)
elif model == 'decoder-unet':
return DecoderUNet(
n_channels=nchannels,
n_classes=nclass,
bilinear=True,
dropout=args.dropout,
dropp=args.drop_p
)
elif model == "multi-bunet":
return MMultiBUNet(
n_channels=nchannels,
n_classes=nclass,
bilinear=True,
dropout=args.dropout,
dropp=args.drop_p
)
elif model == "multi-atten-bunet":
return MMultiBAUNet(
n_channels=nchannels,
n_classes=nclass,
bilinear=True,
dropout=args.dropout,
dropp=args.drop_p
)
elif model == 'attn-unet':
return DecoderUNet(
n_channels=nchannels,
n_classes=nclass,
bilinear=True,
dropout=args.dropout,
dropp=args.drop_p,
attention='attn'
)
elif model == 'pattn-unet':
return DecoderUNet(
n_channels=nchannels,
n_classes=nclass,
bilinear=True,
dropout=args.dropout,
dropp=args.drop_p,
attention='prob',
)
elif model == 'pattn-unet-al':
return DecoderUNet(
n_channels=nchannels,
n_classes=nclass,
bilinear=True,
dropout=args.dropout,
dropp=args.drop_p,
attention='prob-al',
)
elif model == 'battn-unet-one':
return MDecoderUNet(
n_channels=nchannels,
# one head output
n_classes=1,
bilinear=True,
attention="attn"
)
else:
raise NotImplementedError
def build_transfer_learning_model(args, nchannels, nclass, pretrained, model='unet'):
"""
param args:
param nclass: number of classes
param pretrained: path to the pretrained model parameters
"""
# hard coded class number for pretained UNet on BraTS
pre_model = UNet(
n_channels=args.nchannels,
n_classes=3,
bilinear=True,
dropout=args.dropout,
dropp=args.drop_p
)
if not os.path.isfile(pretrained):
raise RuntimeError("no checkpoint found at {}".format(pretrained))
params = torch.load(pretrained)
pre_model.load_state_dict(params['state_dict'])
m = UNet(
n_channels=args.nchannels,
n_classes=nclass,
bilinear=pre_model.bilinear,
dropout=args.dropout,
dropp=args.drop_p
)
assert args.nchannels == pre_model.n_channels
m.inc = pre_model.inc
m.down1 = pre_model.down1
m.down2 = pre_model.down2
m.down3 = pre_model.down3
m.down4 = pre_model.down4
m.up1 = pre_model.up1
m.up2 = pre_model.up2
m.up3 = pre_model.up3
m.up4 = pre_model.up4
return m
| 27.51773
| 85
| 0.559794
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 422
| 0.108763
|
544c328461515102957fb6ba2f7ecaadd80e93ff
| 1,356
|
py
|
Python
|
A.py
|
JK-Incorporated/EYN-DOS
|
6dc331655b5fd04e6d37651ea79ac4e204bfd52e
|
[
"BSD-3-Clause"
] | null | null | null |
A.py
|
JK-Incorporated/EYN-DOS
|
6dc331655b5fd04e6d37651ea79ac4e204bfd52e
|
[
"BSD-3-Clause"
] | null | null | null |
A.py
|
JK-Incorporated/EYN-DOS
|
6dc331655b5fd04e6d37651ea79ac4e204bfd52e
|
[
"BSD-3-Clause"
] | null | null | null |
import os
from os import listdir
from os.path import isfile, join
dir_path = os.path.dirname(os.path.realpath(__file__))
filesys = [f for f in listdir(dir_path) if isfile(join(dir_path, f))]
def get_dir_size(path=dir_path):
total = 0
with os.scandir(dir_path) as it:
for entry in it:
if entry.is_file():
total += entry.stat().st_size
elif entry.is_dir():
total += get_dir_size(entry.path)
return total/1024
size=0
for path, dirs, files in os.walk(dir_path):
for f in files:
fp = os.path.join(path, f)
size += os.path.getsize(fp)
while True:
command_lineA=input("A:\> ")
if command_lineA==("B:"):
print("")
os.system("python3 B.py")
print("")
if command_lineA==("C:"):
print("")
os.system("python3 C.py")
print("")
if command_lineA==("D:"):
print("")
os.system("python3 D.py")
print("")
if command_lineA==("E:"):
print("")
os.system("python3 E.py")
print("")
if command_lineA==("dir"):
print("")
print("ERROR EYN_A1")
print("")
if command_lineA==("listdir"):
print("")
print("ERROR EYN_A1")
print("")
if command_lineA==("end"):
print("")
exit()
| 22.229508
| 69
| 0.526549
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 152
| 0.112094
|
544ec34dfb38023e11066f7adf551926d37772c9
| 3,111
|
py
|
Python
|
api_site/src/api_x/application/entry/bankcard_views.py
|
webee/pay
|
b48c6892686bf3f9014bb67ed119506e41050d45
|
[
"W3C"
] | 1
|
2019-10-14T11:51:49.000Z
|
2019-10-14T11:51:49.000Z
|
api_site/src/api_x/application/entry/bankcard_views.py
|
webee/pay
|
b48c6892686bf3f9014bb67ed119506e41050d45
|
[
"W3C"
] | null | null | null |
api_site/src/api_x/application/entry/bankcard_views.py
|
webee/pay
|
b48c6892686bf3f9014bb67ed119506e41050d45
|
[
"W3C"
] | null | null | null |
# coding=utf-8
from __future__ import unicode_literals
from api_x.utils import response
from api_x.utils.entry_auth import verify_request
from flask import request
from . import application_mod as mod
from .. import dba
from .. import bankcard
from api_x.utils.parser import to_bool
from pytoolbox.util.log import get_logger
logger = get_logger(__name__)
@mod.route('/bankcard/<card_no>/bin', methods=['GET'])
@verify_request('app_query_bin')
def query_bin(card_no):
try:
bankcard_bin = bankcard.query_bin_cache(card_no)
card_bin_info = bankcard_bin.to_dict()
return response.success(data=card_bin_info)
except Exception as e:
logger.exception(e)
return response.bad_request(msg=e.message)
@mod.route('/users/<user_id>/bankcards/bind', methods=['POST'])
@verify_request('app_bind_bankcard')
def bind_bankcard(user_id):
data = request.params
channel = request.channel
card_no = data['card_no']
acct_name = data['account_name']
is_corporate_account = to_bool(data['is_corporate_account'])
province_code = data['province_code']
city_code = data['city_code']
brabank_name = data['branch_bank_name']
user_map = channel.get_user_map(user_id)
if user_map is None:
return response.bad_request(msg='user not exists: [{0}]'.format(user_id))
account_user_id = user_map.account_user_id
bankcard_id = bankcard.bind_bankcard(account_user_id, card_no, acct_name, is_corporate_account,
province_code, city_code, brabank_name)
return response.success(id=bankcard_id)
@mod.route('/users/<user_id>/bankcards/<int:bankcard_id>/unbind', methods=['POST'])
@verify_request('app_unbind_bankcard')
def unbind_bankcard(user_id, bankcard_id):
try:
bankcard.unbind_bankcard(user_id, bankcard_id)
response.success()
except Exception as e:
logger.exception(e)
response.fail()
@mod.route('/users/<user_id>/bankcards', methods=['GET'])
@verify_request('app_list_user_bankcards')
def list_user_bankcards(user_id):
channel = request.channel
user_map = channel.get_user_map(user_id)
if user_map is None:
return response.bad_request(msg='user not exists: [{0}]'.format(user_id))
account_user_id = user_map.account_user_id
bankcards = dba.query_all_bankcards(account_user_id)
bankcards = [bc.to_dict() for bc in bankcards]
return response.success(data=bankcards)
@mod.route('/users/<user_id>/bankcards/<int:bankcard_id>', methods=['GET'])
@verify_request('app_get_user_bankcard')
def get_user_bankcard(user_id, bankcard_id):
channel = request.channel
user_map = channel.get_user_map(user_id)
if user_map is None:
return response.bad_request(msg='user not exists: [{0}]'.format(user_id))
account_user_id = user_map.account_user_id
bc = dba.query_bankcard_by_id(bankcard_id)
if bc is None or bc.user_id != account_user_id:
return response.bad_request(msg='user [{0}] has no bankcard [{1}]'.format(user_id, bankcard_id))
return response.success(data=bc.to_dict())
| 34.566667
| 104
| 0.724204
| 0
| 0
| 0
| 0
| 2,739
| 0.880424
| 0
| 0
| 524
| 0.168435
|
544eed2f5a6fd341973e64324b8db14d8a1824d5
| 2,928
|
py
|
Python
|
httpd/httpd.py
|
protocollabs/dmprd
|
c39e75532ae73458b8239b2d21ca69e42b68929f
|
[
"MIT"
] | 1
|
2018-09-05T08:16:00.000Z
|
2018-09-05T08:16:00.000Z
|
httpd/httpd.py
|
protocollabs/dmprd
|
c39e75532ae73458b8239b2d21ca69e42b68929f
|
[
"MIT"
] | 8
|
2017-01-08T19:11:16.000Z
|
2018-09-24T12:20:40.000Z
|
httpd/httpd.py
|
protocollabs/dmprd
|
c39e75532ae73458b8239b2d21ca69e42b68929f
|
[
"MIT"
] | 2
|
2017-08-23T12:41:02.000Z
|
2018-08-17T08:11:35.000Z
|
import asyncio
import os
try:
from aiohttp import web
except ImportError:
web = None
class Httpd(object):
def __init__(self):
if not web:
print('httpd is specified in conf but aiohttp not available')
return
self.app = web.Application()
self._setup_routes()
self._run_app()
def _run_app(self):
loop = asyncio.get_event_loop()
handler = self.app.make_handler()
f = loop.create_server(handler, '0.0.0.0', 9000)
srv = loop.run_until_complete(f)
print('serving on', srv.sockets[0].getsockname())
async def handler_index(self, request):
data = '''
<!doctype html>
<html>
<head>
<meta charset="utf-8">
<meta content="width=device-width" name="viewport">
<meta content="yes" name="apple-mobile-web-app-capable">
<meta content="IE=edge,chrome=1" http-equiv="X-UA-Compatible">
<title>DMPR</title>
<script type="text/javascript" src="static/js/vis.min.js"></script>
<script type="text/javascript" src="static/js/script.js"></script>
<link href="static/css/bootstrap-3.3.6.css" rel="stylesheet" />
<link href="static/css/style.css" rel="stylesheet" />
<link href="static/css/vis.min.css" rel="stylesheet" type="text/css" />
<link href="static/css/style.css" rel="stylesheet" type="text/css" />
</head>
<body>
<div class="container-fluid">
<div class="row">
<div class="col-sm-3 col-lg-2">
<nav class="navbar navbar-default navbar-fixed-side">
<div class="container">
<div class="navbar-header">
<button class="navbar-toggle" data-target=".navbar-collapse" data-toggle="collapse">
<span class="sr-only">Toggle navigation</span>
<span class="icon-bar"></span>
<span class="icon-bar"></span>
<span class="icon-bar"></span>
</button>
<a class="navbar-brand" href="./">DMPR</a>
</div>
<div class="collapse navbar-collapse">
<ul class="nav navbar-nav">
<li class="active"><a href="#">Topology</a></li>
<li class=""><a href="#">Logging</a></li>
</ul>
</div>
</div>
</nav>
</div>
<div class="col-sm-9 col-lg-10 content">
<div id="mynetwork"></div>
</div>
</div>
</div>
<script src="static/js/bootstrap-3.3.6.js"></script>
</body>
</html>
'''
data = str.encode(data)
return web.Response(body=data, content_type='text/html')
def _setup_routes(self):
absdir = os.path.dirname(os.path.realpath(__file__))
app_path = os.path.join(absdir, 'www', 'static')
self.app.router.add_get('/', self.handler_index)
self.app.router.add_static('/static', app_path, show_index=True)
| 34.046512
| 100
| 0.565574
| 2,831
| 0.966872
| 0
| 0
| 0
| 0
| 2,036
| 0.695355
| 1,995
| 0.681352
|
5451d6245307e0c41240f5d6be7ea9013b165899
| 196
|
py
|
Python
|
SImple-Number.py
|
TonikaHristova/Loops
|
55b3f1608cf81d185fe98366450b527350d86f3b
|
[
"MIT"
] | null | null | null |
SImple-Number.py
|
TonikaHristova/Loops
|
55b3f1608cf81d185fe98366450b527350d86f3b
|
[
"MIT"
] | null | null | null |
SImple-Number.py
|
TonikaHristova/Loops
|
55b3f1608cf81d185fe98366450b527350d86f3b
|
[
"MIT"
] | null | null | null |
import math
num = int(input())
is_prime = True
if num < 2:
print("Not prime")
for i in range(2, int(math.sqrt(num)+1)):
if num / i == 0:
is_prime = False
print(is_prime)
| 9.8
| 41
| 0.571429
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 11
| 0.056122
|
54520f95709f73e2e760152d29139cc05ba229e9
| 218
|
py
|
Python
|
badgify/apps.py
|
BrendanBerkley/django-badgify
|
61203e92cb76982f778caf168d371a72a401db10
|
[
"MIT"
] | null | null | null |
badgify/apps.py
|
BrendanBerkley/django-badgify
|
61203e92cb76982f778caf168d371a72a401db10
|
[
"MIT"
] | null | null | null |
badgify/apps.py
|
BrendanBerkley/django-badgify
|
61203e92cb76982f778caf168d371a72a401db10
|
[
"MIT"
] | null | null | null |
from django.apps import AppConfig
class BadgifyConfig(AppConfig):
name = 'badgify'
verbose_name = 'Badgify'
def ready(self):
super(BadgifyConfig, self).ready()
self.module.autodiscover()
| 19.818182
| 42
| 0.674312
| 181
| 0.830275
| 0
| 0
| 0
| 0
| 0
| 0
| 18
| 0.082569
|
545268aad6cd438a8b86741579655c5f5b28ba41
| 249
|
py
|
Python
|
test/test_i18n.py
|
timgates42/uliweb
|
80c0459c5e5d257b665eb2e1d0b5f68ad55c42f1
|
[
"BSD-2-Clause"
] | 202
|
2015-01-12T08:10:48.000Z
|
2021-11-08T09:04:32.000Z
|
test/test_i18n.py
|
timgates42/uliweb
|
80c0459c5e5d257b665eb2e1d0b5f68ad55c42f1
|
[
"BSD-2-Clause"
] | 30
|
2015-01-01T09:07:17.000Z
|
2021-06-03T12:58:45.000Z
|
test/test_i18n.py
|
timgates42/uliweb
|
80c0459c5e5d257b665eb2e1d0b5f68ad55c42f1
|
[
"BSD-2-Clause"
] | 58
|
2015-01-12T03:28:54.000Z
|
2022-01-14T01:58:08.000Z
|
from uliweb.i18n import ugettext_lazy as _
def test_1():
"""
>>> x = _('Hello')
>>> print repr(x)
ugettext_lazy('Hello')
"""
def test_1():
"""
>>> x = _('Hello {0}')
>>> print x.format('name')
Hello name
"""
| 16.6
| 42
| 0.48996
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 167
| 0.670683
|
545316d49d38f35bdeec6536c47e60475a119d98
| 1,041
|
py
|
Python
|
KeyBoardControlImageCaptue.py
|
Prashant-1305/Tello-Drone
|
11c3f845a9887c66ac7e52e042dfd28f23555d2e
|
[
"MIT"
] | null | null | null |
KeyBoardControlImageCaptue.py
|
Prashant-1305/Tello-Drone
|
11c3f845a9887c66ac7e52e042dfd28f23555d2e
|
[
"MIT"
] | null | null | null |
KeyBoardControlImageCaptue.py
|
Prashant-1305/Tello-Drone
|
11c3f845a9887c66ac7e52e042dfd28f23555d2e
|
[
"MIT"
] | null | null | null |
import KeyPressModule as kp
from djitellopy import tello
import time
import cv2
global img
kp.init()
skynet = tello.Tello()
skynet.connect()
print(skynet.get_battery())
skynet.streamon()
def getKeyboardInput():
lr, fb, ud, yv = 0, 0, 0, 0
speed = 50
if kp.getKey("LEFT"): lr = -speed
elif kp.getKey("RIGHT"): lr = speed
if kp.getKey("UP"): fb = speed
elif kp.getKey("DOWN"): fb = -speed
if kp.getKey("u"): ud = speed
elif kp.getKey("d"): ud = -speed
if kp.getKey("c"): yv = speed
elif kp.getKey("a"): yv = -speed
if kp.getKey("t"): skynet.takeoff()
if kp.getKey("l"): skynet.land(); time.sleep(3)
if kp.getKey('s'):
cv2.imwrite(f'Resources/Images/{time.time()}.jpg',img)
time.sleep(1)
return [lr, fb, ud, yv]
while True:
keyVals = getKeyboardInput()
skynet.send_rc_control(keyVals[0], keyVals[1], keyVals[2], keyVals[3])
img = skynet.get_frame_read().frame
#timg = cv2.resize(img, (360, 240))
cv2.imshow("Image", img)
cv2.waitKey(1)
| 22.148936
| 74
| 0.616715
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 123
| 0.118156
|
545376512fee3de8e6da4487e774ee09c7ad912d
| 1,479
|
py
|
Python
|
cnns/foolbox/foolbox_2_3_0/tests/test_model_zoo.py
|
anonymous-user-commits/perturb-net
|
66fc7c4a1234fa34b92bcc85751f0a6e23d80a23
|
[
"MIT"
] | 12
|
2021-07-27T07:18:24.000Z
|
2022-03-09T13:52:20.000Z
|
cnns/foolbox/foolbox_2_3_0/tests/test_model_zoo.py
|
anonymous-user-commits/perturb-net
|
66fc7c4a1234fa34b92bcc85751f0a6e23d80a23
|
[
"MIT"
] | 2
|
2021-08-03T09:21:33.000Z
|
2021-12-29T14:25:30.000Z
|
cnns/foolbox/foolbox_2_3_0/tests/test_model_zoo.py
|
anonymous-user-commits/perturb-net
|
66fc7c4a1234fa34b92bcc85751f0a6e23d80a23
|
[
"MIT"
] | 3
|
2021-11-18T14:46:40.000Z
|
2022-01-03T15:47:23.000Z
|
from foolbox import zoo
import numpy as np
import foolbox
import sys
import pytest
from foolbox.zoo.model_loader import ModelLoader
from os.path import join, dirname
@pytest.fixture(autouse=True)
def unload_foolbox_model_module():
# reload foolbox_model from scratch for every run
# to ensure atomic tests without side effects
module_names = ["foolbox_model", "model"]
for module_name in module_names:
if module_name in sys.modules:
del sys.modules[module_name]
test_data = [
# private repo won't work on travis
# ('https://github.com/bethgelab/AnalysisBySynthesis.git', (1, 28, 28)),
# ('https://github.com/bethgelab/convex_adversarial.git', (1, 28, 28)),
# ('https://github.com/bethgelab/mnist_challenge.git', 784)
(join("file://", dirname(__file__), "data/model_repo"), (3, 224, 224))
]
@pytest.mark.parametrize("url, dim", test_data)
def test_loading_model(url, dim):
# download model
model = zoo.get_model(url)
# create a dummy image
x = np.zeros(dim, dtype=np.float32)
x[:] = np.random.randn(*x.shape)
# run the model
logits = model.forward_one(x)
probabilities = foolbox.utils.softmax(logits)
predicted_class = np.argmax(logits)
# sanity check
assert predicted_class >= 0
assert np.sum(probabilities) >= 0.9999
# TODO: delete fmodel
def test_non_default_module_throws_error():
with pytest.raises(RuntimeError):
ModelLoader.get(key="other")
| 27.90566
| 76
| 0.694388
| 0
| 0
| 0
| 0
| 836
| 0.565247
| 0
| 0
| 484
| 0.327248
|
54538684df9453f633582e0d87edd283242082a7
| 8,464
|
py
|
Python
|
tests/unit/nistbeacon/test_nistbeacon.py
|
urda/py_nist_beacon
|
0251970ec31bc370c326c4c3c3b93a5513bdc028
|
[
"Apache-2.0"
] | 11
|
2017-05-06T02:42:34.000Z
|
2021-02-11T10:13:09.000Z
|
tests/unit/nistbeacon/test_nistbeacon.py
|
urda/nistbeacon
|
0251970ec31bc370c326c4c3c3b93a5513bdc028
|
[
"Apache-2.0"
] | 31
|
2015-12-13T12:04:10.000Z
|
2021-01-27T02:34:34.000Z
|
tests/unit/nistbeacon/test_nistbeacon.py
|
urda/py_nist_beacon
|
0251970ec31bc370c326c4c3c3b93a5513bdc028
|
[
"Apache-2.0"
] | 1
|
2015-12-25T03:50:25.000Z
|
2015-12-25T03:50:25.000Z
|
"""
Copyright 2015-2017 Peter Urda
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from unittest import TestCase
from unittest.mock import (
Mock,
patch,
)
import requests.exceptions
from requests import Response
from nistbeacon import (
NistBeacon,
NistBeaconValue,
)
from tests.test_data.nist_records import local_record_json_db
class TestNistBeacon(TestCase):
@classmethod
def setUpClass(cls):
cls.init_timestamp = 1378395540
cls.expected_first = local_record_json_db[cls.init_timestamp]
cls.expected_first_next = local_record_json_db[cls.init_timestamp + 60]
cls.reference_previous = 1447872960
cls.reference_timestamp = 1447873020
cls.reference_next = 1447873080
cls.expected_current = local_record_json_db[cls.reference_timestamp]
cls.expected_next = local_record_json_db[cls.reference_next]
cls.expected_previous = local_record_json_db[cls.reference_previous]
# Perform conversions from json data to record objects
cls.expected_first = NistBeaconValue.from_json(cls.expected_first)
cls.expected_first_next = NistBeaconValue.from_json(
cls.expected_first_next
)
cls.expected_current = NistBeaconValue.from_json(cls.expected_current)
cls.expected_next = NistBeaconValue.from_json(cls.expected_next)
cls.expected_previous = NistBeaconValue.from_json(
cls.expected_previous
)
@patch('requests.get')
def test_get_first_record(self, requests_get_patched):
mock_response = Mock(spec=Response)
mock_response.status_code = 200
mock_response.text = self.expected_first.xml
requests_get_patched.return_value = mock_response
expected = self.expected_first
actual_download_false = NistBeacon.get_first_record(download=False)
actual_download_true = NistBeacon.get_first_record(download=True)
self.assertEqual(expected, actual_download_false)
self.assertEqual(expected, actual_download_true)
self.assertIsNot(expected, actual_download_false)
self.assertIsNot(expected, actual_download_true)
self.assertIsNot(actual_download_false, actual_download_true)
@patch('requests.get')
def test_get_next(self, requests_get_patched):
mock_response = Mock(spec=Response)
mock_response.status_code = 200
mock_response.text = self.expected_next.xml
requests_get_patched.return_value = mock_response
next_record = NistBeacon.get_next(self.reference_timestamp)
self.assertEqual(self.expected_next, next_record)
@patch('requests.get')
def test_get_previous(self, requests_get_patched):
mock_response = Mock(spec=Response)
mock_response.status_code = 200
mock_response.text = self.expected_previous.xml
requests_get_patched.return_value = mock_response
previous_record = NistBeacon.get_previous(
self.reference_timestamp
)
self.assertEqual(self.expected_previous, previous_record)
@patch('requests.get')
def test_get_record(self, requests_get_patched):
mock_response = Mock(spec=Response)
mock_response.status_code = 200
mock_response.text = self.expected_current.xml
requests_get_patched.return_value = mock_response
record = NistBeacon.get_record(self.reference_timestamp)
self.assertEqual(self.expected_current, record)
@patch('requests.get')
def test_get_last_record(self, requests_get_patched):
mock_response = Mock(spec=Response)
mock_response.status_code = 200
mock_response.text = self.expected_current.xml
requests_get_patched.return_value = mock_response
last_record = NistBeacon.get_last_record()
self.assertIsInstance(last_record, NistBeaconValue)
@patch('requests.get')
def test_get_last_record_404(self, requests_get_patched):
mock_response = Mock(spec=Response)
mock_response.status_code = 404
requests_get_patched.return_value = mock_response
self.assertIsNone(NistBeacon.get_last_record())
@patch('requests.get')
def test_get_last_record_exceptions(self, requests_get_patched):
exceptions_to_test = [
requests.exceptions.RequestException(),
requests.exceptions.ConnectionError(),
requests.exceptions.HTTPError(),
requests.exceptions.URLRequired(),
requests.exceptions.TooManyRedirects(),
requests.exceptions.Timeout(),
]
for exception_to_test in exceptions_to_test:
requests_get_patched.side_effect = exception_to_test
self.assertIsNone(NistBeacon.get_last_record())
@patch('requests.get')
def test_chain_check_empty_input(self, requests_get_patched):
mock_response = Mock(spec=Response)
mock_response.status_code = 404
requests_get_patched.return_value = mock_response
# noinspection PyTypeChecker
self.assertFalse(NistBeacon.chain_check(None))
@patch('requests.get')
def test_chain_check_majority(self, requests_get_patched):
first_response = Mock(spec=Response)
first_response.status_code = 200
first_response.text = self.expected_current.xml
previous_response = Mock(spec=Response)
previous_response.status_code = 200
previous_response.text = self.expected_previous.xml
next_response = Mock(spec=Response)
next_response.status_code = 200
next_response.text = self.expected_next.xml
requests_get_patched.side_effect = [
first_response,
previous_response,
next_response,
]
self.assertTrue(
NistBeacon.chain_check(
self.expected_current.timestamp
)
)
@patch('requests.get')
def test_chain_check_init(self, requests_get_patched):
first_response = Mock(spec=Response)
first_response.status_code = 200
first_response.text = self.expected_first.xml
previous_response = Mock(spec=Response)
previous_response.status_code = 404
next_response = Mock(spec=Response)
next_response.status_code = 200
next_response.text = self.expected_first_next.xml
requests_get_patched.side_effect = [
first_response,
previous_response,
next_response,
]
self.assertTrue(
NistBeacon.chain_check(
self.init_timestamp,
)
)
@patch('requests.get')
def test_chain_check_last(self, requests_get_patched):
first_response = Mock(spec=Response)
first_response.status_code = 200
first_response.text = self.expected_current.xml
previous_response = Mock(spec=Response)
previous_response.status_code = 200
previous_response.text = self.expected_previous.xml
next_response = Mock(spec=Response)
next_response.status_code = 404
requests_get_patched.side_effect = [
first_response,
previous_response,
next_response,
]
self.assertTrue(
NistBeacon.chain_check(
self.expected_current.timestamp,
)
)
@patch('requests.get')
def test_chain_check_no_records_around(self, requests_get_patched):
first_response = Mock(spec=Response)
first_response.status_code = 200
first_response.text = self.expected_current.xml
none_response = Mock(spec=Response)
none_response.status_code = 404
requests_get_patched.side_effect = [
first_response,
none_response,
none_response,
]
self.assertFalse(
NistBeacon.chain_check(
self.expected_current.timestamp
)
)
| 34.129032
| 79
| 0.69258
| 7,630
| 0.901465
| 0
| 0
| 7,522
| 0.888705
| 0
| 0
| 813
| 0.096054
|
54539ddc987a464c0db1b706648667e1f538fd7a
| 5,417
|
py
|
Python
|
aae/auto_pose/visualization/render_pose.py
|
shbe-aau/multi-pose-estimation
|
0425ed9dcc7969f0281cb435615abc33c640e157
|
[
"MIT"
] | 4
|
2021-12-28T09:25:06.000Z
|
2022-01-13T12:55:44.000Z
|
aae/auto_pose/visualization/render_pose.py
|
shbe-aau/multi-view-pose-estimation
|
22cea6cd09684fe655fb2214bc14856f589048e1
|
[
"MIT"
] | null | null | null |
aae/auto_pose/visualization/render_pose.py
|
shbe-aau/multi-view-pose-estimation
|
22cea6cd09684fe655fb2214bc14856f589048e1
|
[
"MIT"
] | 1
|
2022-01-13T13:00:15.000Z
|
2022-01-13T13:00:15.000Z
|
import cv2
import numpy as np
import os
from auto_pose.meshrenderer import meshrenderer
from auto_pose.ae.utils import lazy_property
class PoseVisualizer:
def __init__(self, mp_pose_estimator, downsample=1, vertex_scale=False):
self.downsample = downsample
self.vertex_scale = [mp_pose_estimator.train_args.getint('Dataset', 'VERTEX_SCALE')] if not vertex_scale else [1.]
if hasattr(mp_pose_estimator, 'class_2_objpath'):
self.classes, self.ply_model_paths = zip(*mp_pose_estimator.class_2_objpath.items())
else:
# For BOP evaluation (sry!):
self.classes = mp_pose_estimator.class_2_codebook.keys()
all_model_paths = eval(mp_pose_estimator.train_args.get('Paths', 'MODEL_PATH'))
base_path = '/'.join(all_model_paths[0].split('/')[:-3])
itodd_paths = [os.path.join(base_path, 'itodd/models/obj_0000{: 02d}.ply'.format(i)) for i in range(29)]
all_model_paths = all_model_paths + itodd_paths
all_model_paths = [model_p.replace('YCB_VideoDataset/original2sixd/bop_models/', 'bop/original/ycbv/models_eval/') for model_p in all_model_paths]
self.ply_model_paths = []
for cb_name in mp_pose_estimator.class_2_codebook.values():
for model_path in all_model_paths:
bop_dataset = cb_name.split('_')[0]
bop_dataset = 'ycbv' if bop_dataset == 'original2sixd' else bop_dataset
model_type, obj, obj_id = cb_name.split('_')[-3:]
model_name = obj + '_' + obj_id
if bop_dataset in model_path and model_name in model_path:
self.ply_model_paths.append(model_path)
print(('renderer', 'Model paths: ', self.ply_model_paths))
@lazy_property
def renderer(self):
return meshrenderer.Renderer(self.ply_model_paths,
samples=1,
vertex_tmp_store_folder='.',
vertex_scale=float(self.vertex_scale[0])) # 1000 for models in meters
def render_poses(self, image, camK, pose_ests, dets, vis_bbs=True, vis_mask=False, all_pose_estimates_rgb=None, depth_image=None, waitKey=True):
W_d = image.shape[1] / self.downsample
H_d = image.shape[0] / self.downsample
print( [self.classes.index(pose_est.name) for pose_est in pose_ests])
bgr, depth,_ = self.renderer.render_many(obj_ids = [self.classes.index(pose_est.name) for pose_est in pose_ests],
W = W_d,
H = H_d,
K = camK.copy(),
Rs = [pose_est.trafo[:3,:3] for pose_est in pose_ests],
ts = [pose_est.trafo[:3,3] for pose_est in pose_ests],
near = 10,
far = 10000)
image_show = cv2.resize(image,(W_d,H_d))
if all_pose_estimates_rgb is not None:
image_show_rgb = image_show.copy()
g_y = np.zeros_like(bgr)
g_y[:,:,1]= bgr[:,:,1]
image_show[bgr > 0] = g_y[bgr > 0]*2./3. + image_show[bgr > 0]*1./3.
if all_pose_estimates_rgb is not None:
bgr, depth,_ = self.renderer.render_many(obj_ids = [clas_idx for clas_idx in all_class_idcs],
W = W_d,
H = H_d,
K = camK.copy(),
Rs = [pose_est.trafo[:3,:3] for pose_est in pose_ests],
ts = [pose_est.trafo[:3,3] for pose_est in pose_ests],
near = 10,
far = 10000)
bgr = cv2.resize(bgr,(W_d,H_d))
b_y = np.zeros_like(bgr)
b_y[:,:,0]= bgr[:,:,0]
image_show_rgb[bgr > 0] = b_y[bgr > 0]*2./3. + image_show_rgb[bgr > 0]*1./3.
if np.any(depth_image):
depth_show = depth_image.copy()
depth_show = np.dstack((depth_show,depth_show,depth_show))
depth_show[bgr[:,:,0] > 0] = g_y[bgr[:,:,0] > 0]*2./3. + depth_show[bgr[:,:,0] > 0]*1./3.
cv2.imshow('depth_refined_pose', depth_show)
if vis_bbs:
# for label,box,score in zip(labels,boxes,scores):
for det in dets:
# box = box.astype(np.int32) / self.downsample
# xmin, ymin, xmax, ymax = box[0], box[1], box[0] + box[2], box[1] + box[3]
xmin, ymin, xmax, ymax = int(det.xmin * W_d), int(det.ymin * H_d), int(det.xmax * W_d), int(det.ymax * H_d)
label, score = list(det.classes.items())[0]
try:
cv2.putText(image_show, '%s : %1.3f' % (label,score), (xmin, ymax+20), cv2.FONT_ITALIC, .5, (0,0,255), 2)
cv2.rectangle(image_show,(xmin,ymin),(xmax,ymax),(255,0,0),2)
if all_pose_estimates_rgb is not None:
cv2.putText(image_show_rgb, '%s : %1.3f' % (label,score), (xmin, ymax+20), cv2.FONT_ITALIC, .5, (0,0,255), 2)
cv2.rectangle(image_show_rgb,(xmin,ymin),(xmax,ymax),(255,0,0),2)
except:
print('failed to plot boxes')
if all_pose_estimates_rgb is not None:
cv2.imshow('rgb_pose', image_show_rgb)
cv2.imshow('refined_pose', image_show)
if waitKey:
cv2.waitKey(0)
else:
cv2.waitKey(1)
return (image_show)
| 47.517544
| 158
| 0.567288
| 5,269
| 0.972679
| 0
| 0
| 281
| 0.051874
| 0
| 0
| 549
| 0.101348
|
5454b8f602a3ea5235a7102af61b547b5c4c3b31
| 1,128
|
py
|
Python
|
client/nodes/common/docker_subsriber.py
|
CanboYe/BusEdge
|
2e53e1d1d82559fc3e9f0029b2f0faf4e356b210
|
[
"MIT",
"Apache-2.0",
"BSD-2-Clause",
"BSD-3-Clause"
] | 2
|
2021-08-17T14:14:28.000Z
|
2022-02-02T02:09:33.000Z
|
client/nodes/common/docker_subsriber.py
|
cmusatyalab/gabriel-BusEdge
|
528a6ee337882c6e709375ecd7ec7e201083c825
|
[
"MIT",
"Apache-2.0",
"BSD-2-Clause",
"BSD-3-Clause"
] | null | null | null |
client/nodes/common/docker_subsriber.py
|
cmusatyalab/gabriel-BusEdge
|
528a6ee337882c6e709375ecd7ec7e201083c825
|
[
"MIT",
"Apache-2.0",
"BSD-2-Clause",
"BSD-3-Clause"
] | 1
|
2021-09-01T16:18:29.000Z
|
2021-09-01T16:18:29.000Z
|
# SPDX-FileCopyrightText: 2021 Carnegie Mellon University
#
# SPDX-License-Identifier: Apache-2.0
import cv2
import numpy as np
import rospy
from gabriel_protocol import gabriel_pb2
from std_msgs.msg import UInt8MultiArray
def run_node(client_filter, source_name):
rospy.init_node(source_name + "_subscriber_node")
rospy.loginfo("Initialized subscriber node for " + source_name)
sub = rospy.Subscriber(
source_name,
UInt8MultiArray,
callback,
callback_args=(client_filter,),
queue_size=1,
buff_size=2 ** 24,
)
# spin() simply keeps python from exiting until this node is stopped
rospy.spin()
def callback(data, args):
client_filter = args[0]
serialized_message = data.data
# client_filter.send_serialized(serialized_message)
# TODO: this is inefficient becuase we deserialize the binary data,
# need to either modify the gabriel library or change the way
# we save the extra fields.
input_frame = gabriel_pb2.InputFrame()
input_frame.ParseFromString(serialized_message)
client_filter.send(input_frame)
| 29.684211
| 72
| 0.720745
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 433
| 0.383865
|
54562608a59ce9476a71d70e032f5d5bf8f6d75b
| 138
|
py
|
Python
|
datx/base_station.py
|
ipipdotnet/datx-python
|
68d6e99363abc6ae48714be38aa90a5ae6e20fd4
|
[
"Apache-2.0"
] | 39
|
2018-03-13T02:48:36.000Z
|
2021-03-18T07:51:54.000Z
|
datx/base_station.py
|
ipipdotnet/datx-python
|
68d6e99363abc6ae48714be38aa90a5ae6e20fd4
|
[
"Apache-2.0"
] | 1
|
2018-11-06T08:30:31.000Z
|
2018-11-06T08:30:31.000Z
|
datx/base_station.py
|
ipipdotnet/datx-python
|
68d6e99363abc6ae48714be38aa90a5ae6e20fd4
|
[
"Apache-2.0"
] | 10
|
2018-04-28T02:07:08.000Z
|
2020-11-09T04:26:47.000Z
|
# -*- coding: utf-8 -*-
"""
:copyright: ©2018 by IPIP.net
"""
from .district import District
class BaseStation(District):
pass
| 15.333333
| 33
| 0.623188
| 37
| 0.266187
| 0
| 0
| 0
| 0
| 0
| 0
| 67
| 0.482014
|
5456722cbb51619ad54be3201718c3cfa01f24c7
| 13,034
|
py
|
Python
|
cogs/user.py
|
billydevyt/RoboBilly
|
6d79ab9626a6d6b487dd73688ad7187212e7864c
|
[
"MIT"
] | 6
|
2020-11-07T16:46:18.000Z
|
2021-01-03T11:52:39.000Z
|
cogs/user.py
|
billyeatcookies/RoboBilly
|
6d79ab9626a6d6b487dd73688ad7187212e7864c
|
[
"MIT"
] | 3
|
2020-11-30T01:52:41.000Z
|
2021-01-03T11:53:18.000Z
|
cogs/user.py
|
billyeatcookies/RoboBilly
|
6d79ab9626a6d6b487dd73688ad7187212e7864c
|
[
"MIT"
] | 7
|
2021-04-17T07:27:58.000Z
|
2021-08-31T15:21:42.000Z
|
"""
User module
"""
import discord
import random
import asyncio
from discord.ext import commands
from discord.ext.commands import has_permissions, MissingPermissions, BadArgument
import requests, json, pyfiglet
from datetime import timedelta, datetime
class User(commands.Cog):
api_key = "bbde6a19c33fb4c3962e36b8187abbf8"
base_url = "http://api.openweathermap.org/data/2.5/weather?"
def __init__(self, bot):
self.bot = bot
def get_embed(self, _title, _description, icon):
embed = discord.Embed(title=_title, description=_description, color= discord.Color.dark_theme())
embed.set_thumbnail(url=icon)
return embed
def get_weather(self, city_name):
complete_url = self.base_url + "appid=" + self.api_key + "&q=" + city_name
response = requests.get(complete_url)
x = response.json()
if x["cod"] != "404":
# store the value of "main"
# key in variable y
y = x["main"]
# store the value corresponding
# to the "temp" key of y
current_temperature = y["temp"]
# store the value corresponding
# to the "pressure" key of y
current_pressure = y["pressure"]
# store the value corresponding
# to the "humidity" key of y
current_humidiy = y["humidity"]
# store the value of "weather"
# key in variable z
z = x["weather"]
# store the value corresponding
# to the "description" key at
# the 0th index of z
weather_description = z[0]["description"]
# print following values
result = ("Temperature (in kelvin unit) = " + str(current_temperature) + "\natmospheric pressure (in hPa unit) = " + str(current_pressure) + "\nhumidity (in percentage) = " + str(current_humidiy) + "\ndescription = " + str(weather_description))
return result
else:
print(" City Not Found : " + city_name)
return "That city might be in moon"
@commands.command()
async def say(self, ctx, *, arg):
async with ctx.channel.typing():
thing = arg
await (ctx.channel).send(thing)
print("Event: Repeated {ctx.author.name}: ", arg)
@commands.command()
async def hi(self, ctx):
async with ctx.channel.typing():
thing = "hello human!"
await (ctx.channel).send(thing)
print("Event: I said Hi to ", ctx.author.name)
@commands.command()
async def info(self, ctx, *, member: discord.Member):
async with ctx.channel.typing():
await asyncio.sleep(2)
avatar = member.avatar_url
fmt = 'Joined basement on {0.joined_at} \njoined Discord on {0.created_at} \nThis member has {1} roles.'
msg = self.get_embed("Info of {0.display_name}".format(member), fmt.format(member, len(member.roles)), avatar)
await ctx.send(embed=msg)
print(ctx.author.name, " checked info of ", member.name)
@info.error
async def info_error(self, ctx, error):
if isinstance(error, commands.BadArgument):
await ctx.send('I could not find that member...')
@commands.command(pass_context=True)
async def weather(self, ctx, a: str):
async with ctx.channel.typing():
msg = self.get_weather(a)
await asyncio.sleep(2)
await ctx.send(embed=discord.Embed(title=f"Weather status at {a}", description=msg, color=discord.Color.dark_theme()))
print("Event. weather checked on user's command: ", ctx.author.name, ", location: ", a)
@commands.command()
async def bing(self, ctx):
async with ctx.channel.typing():
thing = discord.Embed(title="Bong!", description="Sounds like something " + "https://www.bing.com/"+" would know!", color=discord.Color.dark_theme())
await (ctx.channel).send(embed=thing)
print("Event. I binged, bong! : ", ctx.author.name)
@commands.command()
async def google(self, ctx):
await ctx.send("It is quite important that you **google your problems before asking** someone. Most of your questions have already been answered at least once online because you are definitely not the only one with this particular question. Additionally, each programming language, API, or program should be well documented in its official documentation. \nRefer to this page: https://duck-dev.github.io/general/how-to-google/")
print("Event. how to google! : ", ctx.author.name)
@commands.command()
async def dontasktoask(self, ctx):
async with ctx.channel.typing():
thing = discord.Embed(title="Don't ask to ask, Just ask!", description="Ask your question, instead of asking to help \nhttps://dontasktoask.com/", color=discord.Color.dark_theme())
await (ctx.channel).send(embed = thing)
print("Event. ", ctx.author.name, " did ask to ask!")
@commands.command(name='goodnight', aliases=['night', 'gn'])
async def goodnight(self, ctx, *, args = "nothing"):
async with ctx.channel.typing():
thing = discord.Embed(title="Good Night", description="Sleep tight", color= discord.Color.dark_theme())
await (ctx.channel).send(embed=thing)
print(f"Event. {ctx.author.name} said good night")
@commands.command(name='goodmorning', aliases=['morning', 'gm'])
async def goodmorning(self, ctx, *, args = "nothing"):
async with ctx.channel.typing():
thing = discord.Embed(title="Good Morning", description="Wishing you a good day", color= discord.Color.dark_theme())
await (ctx.channel).send(embed=thing)
print(f"Event. {ctx.author.name} said good morning")
@commands.group()
async def git(self, ctx):
"""
A set of funny ~~useful~~ git commands.
"""
if ctx.invoked_subcommand is None:
await ctx.send('> See: `[]help git`')
@git.command()
async def push(self, ctx, remote: str, branch: str):
await ctx.send('Pushing {} to {}'.format(remote, branch))
@git.command()
async def blame(self, ctx, branch: str):
await ctx.send('#blame{}'.format(branch))
@git.command()
async def lick(self, ctx, user):
if random.choice([True, False]):
await ctx.send('*licks {}, Mmm tastes good*'.format(user))
else:
await ctx.send('*licks {}, euh tastes kinda bad*'.format(user))
@git.command()
async def commit(self, ctx, *, message: str):
await ctx.send('Commiting {}'.format(message))
@git.command()
async def pull(self, ctx, branch: str):
await ctx.send('Pulling {}'.format(branch))
@git.command()
async def status(self, ctx, user: discord.Member=None):
if user:
await ctx.send("On branch {0}\nYour branch is up to date with 'origin/main'. \nstatus: {1}".format(user.display_name, user.status))
else:
await ctx.send("On branch main\nYour branch is up to date with 'origin/main'. \nstatus: {}".format(ctx.author.status))
@git.command()
async def merge(self, ctx, thing, anotherthing):
await ctx.send('Merging {0} to {1}'.format(thing, anotherthing))
@git.command()
async def add(self, ctx, *, thing):
msg = await ctx.send('Adding {0}...'.format(thing))
await asyncio.sleep(2)
await msg.edit(content='Added {0} to changes.\n`{1} additions and {2} deletions.`'.format(thing, random.randint(10, 1000), random.randint(10, 1000)))
@git.command()
async def out(self, ctx, *, thing):
await ctx.send('https://tenor.com/view/the-office-steve-carell-please-leave-get-out-move-gif-3579774')
@commands.command(name='codeblocks', aliases=['codeblock', 'cb', 'myst'])
async def codeblocks(self, ctx, *args):
async with ctx.channel.typing():
thing = discord.Embed(title="Code Blocks", description="""**__Use codeblocks to send code in a message!__**
To make a codeblock, surround your code with \`\`\`
\`\`\`cs
// your code here
\`\`\`
`In order use C# syntax highlighting add cs after the three back ticks`
To send lengthy code, paste it into <https://paste.myst.rs/> and send the link of the paste into chat.""", color=discord.Color.dark_theme())
await (ctx.channel).send(embed=thing)
print(f"Event: {ctx.author.name} used codeblocks")
@commands.command(name='example', aliases=['Example', 'eg', 'eg.'])
async def example(self, ctx, *args):
async with ctx.channel.typing():
thing = discord.Embed(title="Example", description="""**__How to create a Minimal, Reproducible Example__**
When asking a question, people will be better able to provide help if you provide code that they can easily understand and use to reproduce the problem. This is referred to by community members as creating a minimal, reproducible example (**reprex**), a minimal, complete and verifiable example (**mcve**), or a minimal, workable example (**mwe**). Regardless of how it's communicated to you, it boils down to ensuring your code that reproduces the problem follows the following guidelines:
**__Your code examples should be…__**
» **Minimal** – Use as little code as possible that still produces the same problem
» **Complete** – Provide all parts someone else needs to reproduce your problem in the question itself
» **Reproducible** – Test the code you're about to provide to make sure it reproduces the problem
""", color=discord.Color.dark_theme())
await (ctx.channel).send(embed=thing)
print(f"Event: {ctx.author.name} used example")
@commands.command(name='pastemyst', aliases=['pm', 'pastebin', 'PasteMyst', 'paste'])
async def pastemyst(self, ctx, *, args = "nothing"):
async with ctx.channel.typing():
thing = discord.Embed(title="How to use PasteMyst", description="> 1. paste your code in https://paste.myst.rs/\n> 2. copy the link of the website completely\n> 3. send the link into chat.", color=discord.Color.dark_theme())
await (ctx.channel).send(embed=thing)
print(f"Event: {ctx.author.name} used how to use pastemyst")
@commands.group(name="ascii")
async def ascii(self, ctx):
if ctx.invoked_subcommand is None:
await ctx.trigger_typing()
embed = discord.Embed(title="Ascii Modules", description="use []ascii <module>", color = discord.Color.dark_theme())
embed.add_field(name="Word", value="Shows ascii art of given text.", inline=False)
embed.add_field(name="Fonts", value="See available Fonts.", inline=False)
embed.set_footer(text="use []ascii <module> <args>")
await ctx.send(embed=embed)
@ascii.command(name="word", aliases=["w", "Word", "W"])
async def word(self, ctx, word:str = "hey", font:str = "standard"):
try:
result = pyfiglet.figlet_format(word, font = font)
except:
result = f"There is no font called {font}."
await ctx.send("```\n" + result + "\n```")
@ascii.command(name="fonts", aliases=["font", "f"])
async def fonts(self, ctx, page:int=1):
total_pages = 4
with open('./cogs/fonts.json', 'r') as f:
try:
data = json.load(f)
if page == 1:
page_data = data['fonts1']
page_no = 1
elif page == 2:
page_data = data['fonts2']
page_no = 2
elif page == 3:
page_data = data['fonts3']
page_no = 3
elif page == 4:
page_data = data['fonts4']
page_no = 4
elif page is None:
page_data = data['fonts1']
page_no = 1
else:
page_data = "more fonts will be added in future"
page_no = 0
except:
print("fonts.json loading error")
if page_data is not None:
Separator = "\n"
fields = Separator.join(page_data)
#embeding
embed = discord.Embed(color = discord.Color.dark_theme())
embed.set_author(name='Ascii Art')
embed.add_field(name='Fonts page', value=fields, inline=False)
if page_no != 0:
embed.set_footer(text=f"page: {page_no}/{total_pages}")
else:
embed.set_footer(text="use []ascii fonts <page_no>")
await ctx.send(embed=embed)
else:
print("looks like there's a problem with page_data")
#===================================== ADD COG ======================================#
def setup(bot):
bot.add_cog(User(bot))
| 43.15894
| 490
| 0.603192
| 12,655
| 0.970103
| 0
| 0
| 10,583
| 0.811269
| 9,665
| 0.740897
| 4,701
| 0.360368
|
5459131a00c531976bbf1bad787c4cbce19610f5
| 622
|
py
|
Python
|
wsu/tools/simx/simx/python/simx/protomap/util.py
|
tinyos-io/tinyos-3.x-contrib
|
3aaf036722a2afc0c0aad588459a5c3e00bd3c01
|
[
"BSD-3-Clause",
"MIT"
] | 1
|
2020-02-28T20:35:09.000Z
|
2020-02-28T20:35:09.000Z
|
wsu/tools/simx/simx/python/simx/protomap/util.py
|
tinyos-io/tinyos-3.x-contrib
|
3aaf036722a2afc0c0aad588459a5c3e00bd3c01
|
[
"BSD-3-Clause",
"MIT"
] | null | null | null |
wsu/tools/simx/simx/python/simx/protomap/util.py
|
tinyos-io/tinyos-3.x-contrib
|
3aaf036722a2afc0c0aad588459a5c3e00bd3c01
|
[
"BSD-3-Clause",
"MIT"
] | null | null | null |
def sync_read(socket, size):
"""
Perform a (temporary) blocking read.
The amount read may be smaller than the amount requested if a
timeout occurs.
"""
timeout = socket.gettimeout()
socket.settimeout(None)
try:
return socket.recv(size)
finally:
socket.settimeout(timeout)
def sync_write(socket, data):
"""
Perform a (temporary) blocking write.
"""
timeout = socket.gettimeout()
socket.settimeout(None)
try:
while data:
sent = socket.send(data)
data = data[sent:]
finally:
socket.settimeout(timeout)
| 22.214286
| 65
| 0.607717
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 192
| 0.308682
|
545b4ee6fb3b667ccf9bf2aadc9dfb4077e4dee6
| 976
|
py
|
Python
|
mergeKsortedlist.py
|
ZhouLihua/leetcode
|
7a711e450756fb7b5648e938879d690e583f5957
|
[
"MIT"
] | 2
|
2019-05-16T03:11:44.000Z
|
2019-10-25T03:20:05.000Z
|
mergeKsortedlist.py
|
ZhouLihua/leetcode
|
7a711e450756fb7b5648e938879d690e583f5957
|
[
"MIT"
] | null | null | null |
mergeKsortedlist.py
|
ZhouLihua/leetcode
|
7a711e450756fb7b5648e938879d690e583f5957
|
[
"MIT"
] | null | null | null |
#Definition for singly-linked list.
class ListNode(object):
def __init__(self, x):
self.val = x
self.next = None
import sys
class Solution(object):
def mergeKLists(self, lists):
"""
:type lists: List[ListNode]
:rtype: ListNode
"""
temp = ListNode(-1)
result = temp
null_lists = 0
while lists:
while null_lists > 0:
lists.remove(None)
null_lists -= 1
min_node = ListNode(sys.maxint)
order = -1
for index, node in enumerate(lists):
if not node:
null_lists += 1
continue
if node.val < min_node.val:
order, min_node = index, node
if order != -1:
temp.next = ListNode(min_node.val)
temp = temp.next
lists[order] = lists[order].next
return result.next
| 27.885714
| 50
| 0.482582
| 926
| 0.94877
| 0
| 0
| 0
| 0
| 0
| 0
| 111
| 0.11373
|
545c039475e437fcfe31a7978e08b358e2864ddd
| 1,334
|
py
|
Python
|
f5/bigip/tm/vcmp/test/unit/test_virtual_disk.py
|
nghia-tran/f5-common-python
|
acb23a6e5830a119b460c19a578654113419f5c3
|
[
"Apache-2.0"
] | 272
|
2016-02-23T06:05:44.000Z
|
2022-02-20T02:09:32.000Z
|
f5/bigip/tm/vcmp/test/unit/test_virtual_disk.py
|
nghia-tran/f5-common-python
|
acb23a6e5830a119b460c19a578654113419f5c3
|
[
"Apache-2.0"
] | 1,103
|
2016-02-11T17:48:03.000Z
|
2022-02-15T17:13:37.000Z
|
f5/bigip/tm/vcmp/test/unit/test_virtual_disk.py
|
nghia-tran/f5-common-python
|
acb23a6e5830a119b460c19a578654113419f5c3
|
[
"Apache-2.0"
] | 167
|
2016-02-11T17:48:21.000Z
|
2022-01-17T20:13:05.000Z
|
# Copyright 2017 F5 Networks Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import mock
import pytest
from f5.bigip.tm.vcmp.virtual_disk import Virtual_Disk
from f5.sdk_exception import UnsupportedMethod
@pytest.fixture
def FakeResource():
mo = mock.MagicMock()
return Virtual_Disk(mo)
def test_create(FakeResource):
with pytest.raises(UnsupportedMethod) as ex:
FakeResource.create()
assert "does not support the create method" in str(ex.value)
def test_update(FakeResource):
with pytest.raises(UnsupportedMethod) as ex:
FakeResource.update()
assert "does not support the update method" in str(ex.value)
def test_modify(FakeResource):
with pytest.raises(UnsupportedMethod) as ex:
FakeResource.modify()
assert "does not support the modify method" in str(ex.value)
| 29.644444
| 74
| 0.749625
| 0
| 0
| 0
| 0
| 89
| 0.066717
| 0
| 0
| 675
| 0.505997
|
545c6c254ab620127f5ce9a6e6a0f63adc08b458
| 1,281
|
py
|
Python
|
tinylinks/admin.py
|
lavindiuss/django-shorter
|
50bc018e762b396cd9bc71991f6ea1329aaceddd
|
[
"MIT"
] | null | null | null |
tinylinks/admin.py
|
lavindiuss/django-shorter
|
50bc018e762b396cd9bc71991f6ea1329aaceddd
|
[
"MIT"
] | null | null | null |
tinylinks/admin.py
|
lavindiuss/django-shorter
|
50bc018e762b396cd9bc71991f6ea1329aaceddd
|
[
"MIT"
] | null | null | null |
"""Admin sites for the ``django-tinylinks`` app."""
from django.contrib import admin
from django.template.defaultfilters import truncatechars
from django.utils.translation import ugettext_lazy as _
from django.template.loader import render_to_string
from tinylinks.forms import TinylinkAdminForm
from tinylinks.models import Tinylink, TinylinkLog
class TinylinkAdmin(admin.ModelAdmin):
list_display = ('short_url', 'url_truncated', 'amount_of_views', 'user',
'last_checked', 'status', 'validation_error',)
search_fields = ['short_url', 'long_url']
form = TinylinkAdminForm
fieldsets = [
('Tinylink', {'fields': ['user', 'long_url', 'short_url', ]}),
]
def url_truncated(self, obj):
return truncatechars(obj.long_url, 60)
url_truncated.short_description = _('Long URL')
def status(self, obj):
if not obj.is_broken:
return _('OK')
return _('Link broken')
status.short_description = _('Status')
admin.site.register(Tinylink, TinylinkAdmin)
class TinylinkLogAdmin(admin.ModelAdmin):
list_display = ('tinylink', 'datetime', 'remote_ip', 'tracked')
readonly_fields = ('datetime',)
date_hierarchy = 'datetime'
admin.site.register(TinylinkLog, TinylinkLogAdmin)
| 28.466667
| 76
| 0.699454
| 826
| 0.644809
| 0
| 0
| 0
| 0
| 0
| 0
| 301
| 0.234973
|
545c8aae9bf713a7f6422a8269de2049905dd92f
| 562
|
py
|
Python
|
wk11frontend.py
|
alvaro-root/pa2_2021
|
fee3931f9e10a7d39af9bf2ce5f033e41621bbda
|
[
"MIT"
] | null | null | null |
wk11frontend.py
|
alvaro-root/pa2_2021
|
fee3931f9e10a7d39af9bf2ce5f033e41621bbda
|
[
"MIT"
] | null | null | null |
wk11frontend.py
|
alvaro-root/pa2_2021
|
fee3931f9e10a7d39af9bf2ce5f033e41621bbda
|
[
"MIT"
] | null | null | null |
import requests
import json
def main():
host = "http://localhost:5006"
urlpattern = "/user/"
response = requests.post(f"{host}{urlpattern}", json={'key1': 'random value'})
if 199 < response.status_code < 300:
for k, v in response.headers.items():
print(f"{k} -> {v}")
print(f"{'=' * 50}")
body = json.loads(response.text)
for k, v in body.items():
print(f"{k} -> {v}")
else:
print(f"Something bad happened: {response.status_code}")
if __name__ == "__main__":
main()
| 21.615385
| 82
| 0.551601
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 170
| 0.302491
|
545fd8631d933f37ee5ed9022359f6f1a7a06f4b
| 73
|
py
|
Python
|
software/python/XilinxKcu1500Pgp3/__init__.py
|
ejangelico/cryo-on-epix-hr-dev
|
354bf205a67d3c43b4e815823dd78cec85d3b672
|
[
"BSD-3-Clause-LBNL"
] | 1
|
2021-05-24T22:01:54.000Z
|
2021-05-24T22:01:54.000Z
|
software/python/XilinxKcu1500Pgp3/__init__.py
|
ejangelico/cryo-on-epix-hr-dev
|
354bf205a67d3c43b4e815823dd78cec85d3b672
|
[
"BSD-3-Clause-LBNL"
] | 1
|
2021-02-25T20:27:36.000Z
|
2021-03-31T17:55:08.000Z
|
software/python/XilinxKcu1500Pgp3/__init__.py
|
ejangelico/cryo-on-epix-hr-dev
|
354bf205a67d3c43b4e815823dd78cec85d3b672
|
[
"BSD-3-Clause-LBNL"
] | 4
|
2020-10-21T21:39:37.000Z
|
2021-07-24T02:19:34.000Z
|
#!/usr/bin/env python
from XilinxKcu1500Pgp3.XilinxKcu1500Pgp3 import *
| 18.25
| 49
| 0.808219
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 21
| 0.287671
|
545fe80c1b80eb166756266947e1f74465ae48f6
| 2,517
|
py
|
Python
|
files/files.py
|
StevenKangWei/tools
|
f0de7d2202dbe979b06ba8344addad6df6e96320
|
[
"MIT"
] | 15
|
2021-07-06T13:03:09.000Z
|
2022-03-05T04:18:13.000Z
|
files/files.py
|
StevenKangWei/tools
|
f0de7d2202dbe979b06ba8344addad6df6e96320
|
[
"MIT"
] | 1
|
2021-12-03T05:39:24.000Z
|
2021-12-03T05:39:24.000Z
|
files/files.py
|
StevenKangWei/tools
|
f0de7d2202dbe979b06ba8344addad6df6e96320
|
[
"MIT"
] | 5
|
2021-07-30T09:31:31.000Z
|
2022-01-03T06:30:25.000Z
|
#!/usr/bin/python
import os
import glob
import traceback
import datetime
import dandan
from flask import Flask
from flask import abort
from flask import send_file
from flask import send_from_directory
from flask import render_template
from werkzeug.routing import BaseConverter
import config
__VERSION__ = "0.0.1.1"
dirname = os.path.dirname(os.path.abspath(__file__))
favicon = os.path.join(dirname, "static/images/favicon.ico")
class RegexConverter(BaseConverter):
def __init__(self, map, *args):
self.map = map
self.regex = args[0]
server = Flask(__name__)
server.url_map.converters['regex'] = RegexConverter
def get_data():
data = dandan.value.AttrDict()
data.info.name = "Files"
data.info.current_time = datetime.datetime.now()
return data
def get_info(filepath):
info = dandan.value.AttrDict()
info.filepath = filepath
info.basename = os.path.basename(filepath)
info.size = os.path.getsize(filepath)
info.mtime = datetime.datetime.fromtimestamp(os.path.getmtime(filepath))
if os.path.isfile(filepath):
info.type = "file"
elif os.path.isdir(filepath):
info.type = 'dir'
return info
def get_response(filename=""):
basket = os.path.join(dirname, "basket")
if not os.path.exists(basket):
return "basket not exists."
# abort(404)
filepath = os.path.join(basket, filename)
if not os.path.exists(filepath):
return "file not exists {}".format(filepath)
# abort(404)
if os.path.isfile(filepath):
return send_file(filepath)
children = os.listdir(filepath)
data = get_data()
data.filename = filename
if filename:
data.title = '/{}/'.format(filename)
else:
data.title = "/"
data.items = [get_info(os.path.join(filepath, child)) for child in children]
return render_template("index.html", **data)
@server.route('/')
@server.route("/<regex('.+'):filename>")
def index(filename=""):
if filename == "favicon.ico" and os.path.exists(favicon):
return send_file(favicon)
else:
return get_response(filename)
def main():
try:
print("run server {}:{}".format(config.host, config.port))
server.run(host=config.host, port=config.port, debug=config.debug, threaded=True)
except Exception:
traceback.print_exc()
return
if __name__ == "__main__":
main()
| 25.683673
| 90
| 0.642034
| 127
| 0.050457
| 0
| 0
| 233
| 0.092571
| 0
| 0
| 247
| 0.098133
|
546042473af828587af78168aa3e36324191b2db
| 2,961
|
py
|
Python
|
jdcloud_sdk/services/iotcore/models/DeviceVO.py
|
Tanc009/jdcloud-sdk-python
|
8b045c99bc5b73ca7348e950b6f01e03a27982f5
|
[
"Apache-2.0"
] | 14
|
2018-04-19T09:53:56.000Z
|
2022-01-27T06:05:48.000Z
|
jdcloud_sdk/services/iotcore/models/DeviceVO.py
|
Tanc009/jdcloud-sdk-python
|
8b045c99bc5b73ca7348e950b6f01e03a27982f5
|
[
"Apache-2.0"
] | 15
|
2018-09-11T05:39:54.000Z
|
2021-07-02T12:38:02.000Z
|
jdcloud_sdk/services/iotcore/models/DeviceVO.py
|
Tanc009/jdcloud-sdk-python
|
8b045c99bc5b73ca7348e950b6f01e03a27982f5
|
[
"Apache-2.0"
] | 33
|
2018-04-20T05:29:16.000Z
|
2022-02-17T09:10:05.000Z
|
# coding=utf8
# Copyright 2018 JDCLOUD.COM
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# NOTE: This class is auto generated by the jdcloud code generator program.
class DeviceVO(object):
def __init__(self, deviceId=None, deviceName=None, parentId=None, deviceType=None, status=None, identifier=None, secret=None, description=None, activatedTime=None, lastConnectedTime=None, createdTime=None, updatedTime=None, productKey=None, productName=None, productSecret=None, model=None, manufacturer=None, dynamicRegister=None, deviceCollectorType=None, lastDisconnectTime=None, orderId=None):
"""
:param deviceId: (Optional) 设备ID
:param deviceName: (Optional) 设备名称
:param parentId: (Optional) 父级设备Id
:param deviceType: (Optional) 设备类型,同产品类型,0-普通设备,1-网关,2-Edge
:param status: (Optional) 设备状态,0-未激活,1-激活离线,2-激活在线
:param identifier: (Optional) 设备标识符
:param secret: (Optional) 设备秘钥
:param description: (Optional) 设备描述
:param activatedTime: (Optional) 激活时间
:param lastConnectedTime: (Optional) 最后连接时间
:param createdTime: (Optional) 注册时间
:param updatedTime: (Optional) 修改时间
:param productKey: (Optional) 产品Key
:param productName: (Optional) 产品名称
:param productSecret: (Optional) 产品秘钥
:param model: (Optional) 设备型号
:param manufacturer: (Optional) 设备厂商
:param dynamicRegister: (Optional) 是否开启动态注册,0:关闭,1:开启,开启动态注册的设备认证类型为一型一密,否则为一机一密
:param deviceCollectorType: (Optional) 设备采集器类型
:param lastDisconnectTime: (Optional) 最后离线时间
:param orderId: (Optional) 订单号
"""
self.deviceId = deviceId
self.deviceName = deviceName
self.parentId = parentId
self.deviceType = deviceType
self.status = status
self.identifier = identifier
self.secret = secret
self.description = description
self.activatedTime = activatedTime
self.lastConnectedTime = lastConnectedTime
self.createdTime = createdTime
self.updatedTime = updatedTime
self.productKey = productKey
self.productName = productName
self.productSecret = productSecret
self.model = model
self.manufacturer = manufacturer
self.dynamicRegister = dynamicRegister
self.deviceCollectorType = deviceCollectorType
self.lastDisconnectTime = lastDisconnectTime
self.orderId = orderId
| 43.544118
| 401
| 0.69875
| 2,592
| 0.794361
| 0
| 0
| 0
| 0
| 0
| 0
| 1,988
| 0.609255
|
54607def7c2c2dd5026968fee33155a24a8770a7
| 155
|
py
|
Python
|
satyrus/sat/types/__init__.py
|
lucasvg/Satyrus3-FinalProject-EspTopsOTM
|
024785752abdc46e3463d8c94df7c3da873c354d
|
[
"MIT"
] | null | null | null |
satyrus/sat/types/__init__.py
|
lucasvg/Satyrus3-FinalProject-EspTopsOTM
|
024785752abdc46e3463d8c94df7c3da873c354d
|
[
"MIT"
] | null | null | null |
satyrus/sat/types/__init__.py
|
lucasvg/Satyrus3-FinalProject-EspTopsOTM
|
024785752abdc46e3463d8c94df7c3da873c354d
|
[
"MIT"
] | null | null | null |
from .array import Array
from .string import String
from .problem import Constraint, Loop
from .main import SatType, Var, Number
from .expr import Expr
| 31
| 39
| 0.780645
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
54615497a597809e722b75e586e88b607f457119
| 470
|
py
|
Python
|
magma/backend/util.py
|
Kuree/magma
|
be2439aa897768c5810be72e3a55a6f772ac83cf
|
[
"MIT"
] | null | null | null |
magma/backend/util.py
|
Kuree/magma
|
be2439aa897768c5810be72e3a55a6f772ac83cf
|
[
"MIT"
] | null | null | null |
magma/backend/util.py
|
Kuree/magma
|
be2439aa897768c5810be72e3a55a6f772ac83cf
|
[
"MIT"
] | null | null | null |
import os
__magma_codegen_debug_info = False
if os.environ.get("MAGMA_CODEGEN_DEBUG_INFO", False):
__magma_codegen_debug_info = True
def set_codegen_debug_info(val):
global __magma_codegen_debug_info
__magma_codegen_debug_info = val
def get_codegen_debug_info():
return __magma_codegen_debug_info
def make_relative(path):
cwd = os.getcwd()
common_prefix = os.path.commonprefix([cwd, path])
return os.path.relpath(path, common_prefix)
| 21.363636
| 53
| 0.77234
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 26
| 0.055319
|
546277ddd1038ab1b79d6538508e871a2186c14c
| 3,560
|
py
|
Python
|
src/backend/main.py
|
tuimac/servertools
|
ceda2685a248d700f48aea4f93887b0f89a264a8
|
[
"MIT"
] | null | null | null |
src/backend/main.py
|
tuimac/servertools
|
ceda2685a248d700f48aea4f93887b0f89a264a8
|
[
"MIT"
] | null | null | null |
src/backend/main.py
|
tuimac/servertools
|
ceda2685a248d700f48aea4f93887b0f89a264a8
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python3
from subprocess import Popen, PIPE, DEVNULL, run
import socket
import sys
import traceback
import argparse
import time
import logging
import os
logger = logging.getLogger("django")
def startProcess(command, port):
try:
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
result = sock.connect_ex((socket.gethostbyname(socket.gethostname()), int(port)))
if result == 0: raise OSError("[Errno 98] Address already in use")
sock.close()
#popen = Popen(command, stdout=DEVNULL, stderr=PIPE)
popen = os.popen(' '.join(command))
time.sleep(3)
print("Start httptracker is sucess.")
except OSError as e:
raise e
except Exception as e:
raise e
except PermissionError as e:
raise e
def stopProcess(command):
try:
killCommand = ['pkill', '-f', ' '.join(command)]
run(killCommand)
time.sleep(2)
print("Stop httptracker is sucess.")
except OSError as e:
raise e
except Exception as e:
raise e
except PermissionError as e:
raise e
class CustomArgparse(argparse.ArgumentParser):
def error(self, message):
if message == "":
print("[Error] Argument is wrong...<(^^;)\n", file=sys.stderr)
else:
print("[Error] " + message + "\n", file=sys.stderr)
self.print_help()
sys.exit(2)
def main():
try:
parser = CustomArgparse(
prog = "httptracker",
description = "Track HTTP request to the end of the host.\nex) httptracker --mode start -p 80",
add_help = True
)
parser.add_argument(
"-m",
"--mode",
dest = "mode",
nargs = 1,
required = True,
help = "[Required]Select modes which are 'start', 'restart', 'stop' to execute httptracker.",
)
parser.add_argument(
"-p",
"--port",
dest = "port",
nargs = 1,
type = int,
default = 8000,
required = False,
help = "[Optional]Direct port which httptracker process use. Default is 8000/tcp."
)
parser.add_argument(
"-i",
"--ipaddress",
dest = "ipaddress",
nargs = 1,
type = str,
default = "0.0.0.0",
required = False,
help = "[Optional]Direct listen ip address which httptracker process use. Default is 0.0.0.0 .",
)
args = parser.parse_args()
ipaddress = ""
port = ""
if args.ipaddress != "0.0.0.0": ipaddress= args.ipaddress[0]
else: ipaddress = args.ipaddress
if args.port != 8000: port = str(args.port[0])
else: port = str(args.port)
command = ["python3", os.path.dirname(__file__) + "/manage.py", "runserver", ipaddress + ":" + port]
if args.mode:
mode = args.mode[0]
if mode == "start":
startProcess(command, port)
elif mode == "restart":
stopProcess(command)
startProcess(command, port)
elif mode == "stop":
stopProcess(command)
else:
parser.error('Argument "--mode" need only "start", "restart", "stop".')
except SystemExit:
pass
except:
print("[Error] " + traceback.format_exc().splitlines()[-1], file=sys.stderr)
if __name__ == '__main__':
main()
| 30.169492
| 108
| 0.538202
| 301
| 0.084551
| 0
| 0
| 0
| 0
| 0
| 0
| 808
| 0.226966
|
5463fe7521a3910ac70e77bb4ec4fc1c354e171b
| 35
|
py
|
Python
|
pyble/const/characteristic/sensor_location.py
|
bgromov/PyBLEWrapper
|
8a5d016e65b3c259391ddc97c371ab4b1b5c61b5
|
[
"MIT"
] | 14
|
2015-03-30T23:11:36.000Z
|
2020-04-07T00:57:12.000Z
|
pyble/const/characteristic/sensor_location.py
|
bgromov/PyBLEWrapper
|
8a5d016e65b3c259391ddc97c371ab4b1b5c61b5
|
[
"MIT"
] | 3
|
2016-05-17T06:11:07.000Z
|
2017-05-15T16:43:09.000Z
|
pyble/const/characteristic/sensor_location.py
|
bgromov/PyBLEWrapper
|
8a5d016e65b3c259391ddc97c371ab4b1b5c61b5
|
[
"MIT"
] | 11
|
2016-03-11T08:53:03.000Z
|
2019-03-11T21:32:13.000Z
|
NAME="Sensor Location"
UUID=0x2A5D
| 11.666667
| 22
| 0.8
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 17
| 0.485714
|
546484ce8b5ed762d88a0033bf3308f52967f631
| 296
|
py
|
Python
|
active-learning/seq_data.py
|
ansunsujoe/ml-research
|
7ab529a5ec1d420385e64b9eebf87e0847b85afd
|
[
"MIT"
] | null | null | null |
active-learning/seq_data.py
|
ansunsujoe/ml-research
|
7ab529a5ec1d420385e64b9eebf87e0847b85afd
|
[
"MIT"
] | null | null | null |
active-learning/seq_data.py
|
ansunsujoe/ml-research
|
7ab529a5ec1d420385e64b9eebf87e0847b85afd
|
[
"MIT"
] | null | null | null |
import random
from tqdm import tqdm
def random_seq():
return [str(random.randint(1, 9)) for x in range(random.randint(2, 15))]
if __name__ == "__main__":
with open("sequences-1-train.txt", "w") as f:
for i in tqdm(range(5000)):
f.write(",".join(random_seq()) + "\n")
| 29.6
| 76
| 0.614865
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 43
| 0.14527
|
546488ac5fe6da6a714985e1c5c6692b62df9032
| 3,585
|
py
|
Python
|
datatest/main.py
|
ajhynes7/datatest
|
78742e98de992807286655f5685a2dc33a7b452e
|
[
"Apache-2.0"
] | 277
|
2016-05-12T13:22:49.000Z
|
2022-03-11T00:18:32.000Z
|
datatest/main.py
|
ajhynes7/datatest
|
78742e98de992807286655f5685a2dc33a7b452e
|
[
"Apache-2.0"
] | 57
|
2016-05-18T01:03:32.000Z
|
2022-02-17T13:48:43.000Z
|
datatest/main.py
|
ajhynes7/datatest
|
78742e98de992807286655f5685a2dc33a7b452e
|
[
"Apache-2.0"
] | 16
|
2016-05-22T11:35:19.000Z
|
2021-12-01T19:41:42.000Z
|
"""Datatest main program"""
import sys as _sys
from unittest import TestProgram as _TestProgram
from unittest import defaultTestLoader as _defaultTestLoader
try:
from unittest.signals import installHandler
except ImportError:
installHandler = None
from datatest import DataTestRunner
__unittest = True
__datatest = True
class DataTestProgram(_TestProgram):
def __init__(self, module='__main__', defaultTest=None, argv=None,
testRunner=DataTestRunner, testLoader=_defaultTestLoader,
exit=True, verbosity=1, failfast=None, catchbreak=None,
buffer=None, ignore=False):
self.ignore = ignore
_TestProgram.__init__(self,
module=module,
defaultTest=defaultTest,
argv=argv,
testRunner=testRunner,
testLoader=testLoader,
exit=exit,
verbosity=verbosity,
failfast=failfast,
catchbreak=catchbreak,
buffer=buffer)
def runTests(self):
try:
if self.catchbreak and installHandler:
installHandler()
except AttributeError:
pass # does not have catchbreak attribute
if self.testRunner is None:
self.testRunner = DataTestRunner
if isinstance(self.testRunner, type):
try:
kwds = ['verbosity', 'failfast', 'buffer', 'warnings', 'ignore']
kwds = [attr for attr in kwds if hasattr(self, attr)]
kwds = dict((attr, getattr(self, attr)) for attr in kwds)
testRunner = self.testRunner(**kwds)
except TypeError:
if 'warnings' in kwds:
del kwds['warnings']
testRunner = self.testRunner(**kwds)
else:
# assumed to be a TestRunner instance
testRunner = self.testRunner
self.result = testRunner.run(self.test)
if self.exit:
_sys.exit(not self.result.wasSuccessful())
if _sys.version_info[:2] == (3, 1): # Patch methods for Python 3.1.
def __init__(self, module='__main__', defaultTest=None, argv=None,
testRunner=DataTestRunner, testLoader=_defaultTestLoader,
exit=True, ignore=False):
self.ignore = ignore
_TestProgram.__init__(self,
module=module,
defaultTest=defaultTest,
argv=argv,
testRunner=testRunner,
testLoader=testLoader,
exit=exit)
DataTestProgram.__init__ = __init__
elif _sys.version_info[:2] == (2, 6): # Patch runTests() for Python 2.6.
def __init__(self, module='__main__', defaultTest=None, argv=None,
testRunner=DataTestRunner, testLoader=_defaultTestLoader,
exit=True, ignore=False):
self.exit = exit # <- 2.6 does not handle exit argument.
self.ignore = ignore
_TestProgram.__init__(self,
module=module,
defaultTest=defaultTest,
argv=argv,
testRunner=testRunner,
testLoader=testLoader)
DataTestProgram.__init__ = __init__
main = DataTestProgram
| 38.138298
| 80
| 0.538633
| 1,883
| 0.525244
| 0
| 0
| 0
| 0
| 0
| 0
| 301
| 0.083961
|
54655fd5e9013ea6eec439615853e317aa7b100b
| 17,503
|
py
|
Python
|
zvmsdk/vmops.py
|
jasealpers/python-zvm-sdk
|
feb19dd40915b1a6cad74e7ccda17bc76d015ea5
|
[
"Apache-2.0"
] | 9
|
2017-06-13T17:46:33.000Z
|
2019-01-08T03:00:00.000Z
|
zvmsdk/vmops.py
|
jasealpers/python-zvm-sdk
|
feb19dd40915b1a6cad74e7ccda17bc76d015ea5
|
[
"Apache-2.0"
] | 4
|
2018-07-18T21:41:21.000Z
|
2019-01-07T06:05:15.000Z
|
zvmsdk/vmops.py
|
jasealpers/python-zvm-sdk
|
feb19dd40915b1a6cad74e7ccda17bc76d015ea5
|
[
"Apache-2.0"
] | 20
|
2017-02-27T09:46:13.000Z
|
2019-05-29T23:17:52.000Z
|
# Copyright 2017 IBM Corp.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import os
import six
from zvmsdk import config
from zvmsdk import dist
from zvmsdk import exception
from zvmsdk import log
from zvmsdk import smtclient
from zvmsdk import database
from zvmsdk import utils as zvmutils
_VMOPS = None
CONF = config.CONF
LOG = log.LOG
def get_vmops():
global _VMOPS
if _VMOPS is None:
_VMOPS = VMOps()
return _VMOPS
class VMOps(object):
def __init__(self):
self._smtclient = smtclient.get_smtclient()
self._dist_manager = dist.LinuxDistManager()
self._pathutils = zvmutils.PathUtils()
self._namelist = zvmutils.get_namelist()
self._GuestDbOperator = database.GuestDbOperator()
self._ImageDbOperator = database.ImageDbOperator()
def get_power_state(self, userid):
"""Get power status of a z/VM instance."""
return self._smtclient.get_power_state(userid)
def _get_cpu_num_from_user_dict(self, dict_info):
cpu_num = 0
for inf in dict_info:
if 'CPU ' in inf:
cpu_num += 1
return cpu_num
def _get_max_memory_from_user_dict(self, dict_info):
with zvmutils.expect_invalid_resp_data():
mem = dict_info[0].split(' ')[4]
return zvmutils.convert_to_mb(mem) * 1024
def get_info(self, userid):
power_stat = self.get_power_state(userid)
perf_info = self._smtclient.get_image_performance_info(userid)
if perf_info:
try:
max_mem_kb = int(perf_info['max_memory'].split()[0])
mem_kb = int(perf_info['used_memory'].split()[0])
num_cpu = int(perf_info['guest_cpus'])
cpu_time_us = int(perf_info['used_cpu_time'].split()[0])
except (ValueError, TypeError, IndexError, AttributeError,
KeyError) as err:
LOG.error('Parse performance_info encounter error: %s',
str(perf_info))
raise exception.SDKInternalError(msg=str(err),
modID='guest')
return {'power_state': power_stat,
'max_mem_kb': max_mem_kb,
'mem_kb': mem_kb,
'num_cpu': num_cpu,
'cpu_time_us': cpu_time_us}
else:
# virtual machine in shutdown state or not exists
dict_info = self._smtclient.get_user_direct(userid)
return {
'power_state': power_stat,
'max_mem_kb': self._get_max_memory_from_user_dict(dict_info),
'mem_kb': 0,
'num_cpu': self._get_cpu_num_from_user_dict(dict_info),
'cpu_time_us': 0}
def instance_metadata(self, instance, content, extra_md):
pass
def add_instance_metadata(self):
pass
def is_reachable(self, userid):
"""Reachable through IUCV communication channel."""
return self._smtclient.get_guest_connection_status(userid)
def guest_start(self, userid):
""""Power on z/VM instance."""
LOG.info("Begin to power on vm %s", userid)
self._smtclient.guest_start(userid)
LOG.info("Complete power on vm %s", userid)
def guest_stop(self, userid, **kwargs):
LOG.info("Begin to power off vm %s", userid)
self._smtclient.guest_stop(userid, **kwargs)
LOG.info("Complete power off vm %s", userid)
def guest_softstop(self, userid, **kwargs):
LOG.info("Begin to soft power off vm %s", userid)
self._smtclient.guest_softstop(userid, **kwargs)
LOG.info("Complete soft power off vm %s", userid)
def guest_pause(self, userid):
LOG.info("Begin to pause vm %s", userid)
self._smtclient.guest_pause(userid)
LOG.info("Complete pause vm %s", userid)
def guest_unpause(self, userid):
LOG.info("Begin to unpause vm %s", userid)
self._smtclient.guest_unpause(userid)
LOG.info("Complete unpause vm %s", userid)
def guest_reboot(self, userid):
"""Reboot a guest vm."""
LOG.info("Begin to reboot vm %s", userid)
self._smtclient.guest_reboot(userid)
LOG.info("Complete reboot vm %s", userid)
def guest_reset(self, userid):
"""Reset z/VM instance."""
LOG.info("Begin to reset vm %s", userid)
self._smtclient.guest_reset(userid)
LOG.info("Complete reset vm %s", userid)
def live_migrate_vm(self, userid, destination, parms, action):
"""Move an eligible, running z/VM(R) virtual machine transparently
from one z/VM system to another within an SSI cluster."""
# Check guest state is 'on'
state = self.get_power_state(userid)
if state != 'on':
LOG.error("Failed to live migrate guest %s, error: "
"guest is inactive, cann't perform live migrate." %
userid)
raise exception.SDKConflictError(modID='guest', rs=1,
userid=userid)
# Do live migrate
if action.lower() == 'move':
LOG.info("Moving the specific vm %s", userid)
self._smtclient.live_migrate_move(userid, destination, parms)
LOG.info("Complete move vm %s", userid)
if action.lower() == 'test':
LOG.info("Testing the eligiblity of specific vm %s", userid)
self._smtclient.live_migrate_test(userid, destination)
def create_vm(self, userid, cpu, memory, disk_list,
user_profile, max_cpu, max_mem, ipl_from,
ipl_param, ipl_loadparam):
"""Create z/VM userid into user directory for a z/VM instance."""
LOG.info("Creating the user directory for vm %s", userid)
info = self._smtclient.create_vm(userid, cpu, memory,
disk_list, user_profile,
max_cpu, max_mem, ipl_from,
ipl_param, ipl_loadparam)
# add userid into smapi namelist
self._smtclient.namelist_add(self._namelist, userid)
return info
def create_disks(self, userid, disk_list):
LOG.info("Beging to create disks for vm: %(userid)s, list: %(list)s",
{'userid': userid, 'list': disk_list})
user_direct = self._smtclient.get_user_direct(userid)
exist_disks = []
for ent in user_direct:
if ent.strip().startswith('MDISK'):
md_vdev = ent.split()[1].strip()
exist_disks.append(md_vdev)
if exist_disks:
start_vdev = hex(int(max(exist_disks), 16) + 1)[2:].rjust(4, '0')
else:
start_vdev = None
info = self._smtclient.add_mdisks(userid, disk_list, start_vdev)
LOG.info("Complete create disks for vm: %s", userid)
return info
def delete_disks(self, userid, vdev_list):
LOG.info("Begin to delete disk on vm: %(userid), vdev list: %(list)s",
{'userid': userid, 'list': vdev_list})
# not support delete disks when guest is active
if self._smtclient.get_power_state(userid) == 'on':
func = 'delete disks when guest is active'
raise exception.SDKFunctionNotImplementError(func)
self._smtclient.remove_mdisks(userid, vdev_list)
LOG.info("Complete delete disks for vm: %s", userid)
def guest_config_minidisks(self, userid, disk_info):
LOG.info("Begin to configure disks on vm: %(userid), info: %(info)s",
{'userid': userid, 'info': disk_info})
if disk_info != []:
self._smtclient.process_additional_minidisks(userid, disk_info)
LOG.info("Complete configure disks for vm: %s", userid)
else:
LOG.info("No disk to handle on %s." % userid)
def is_powered_off(self, instance_name):
"""Return True if the instance is powered off."""
return self._smtclient.get_power_state(instance_name) == 'off'
def delete_vm(self, userid):
"""Delete z/VM userid for the instance."""
LOG.info("Begin to delete vm %s", userid)
self._smtclient.delete_vm(userid)
# remove userid from smapi namelist
self._smtclient.namelist_remove(self._namelist, userid)
LOG.info("Complete delete vm %s", userid)
def execute_cmd(self, userid, cmdStr):
"""Execute commands on the guest vm."""
LOG.debug("executing cmd: %s", cmdStr)
return self._smtclient.execute_cmd(userid, cmdStr)
def set_hostname(self, userid, hostname, os_version):
"""Punch a script that used to set the hostname of the guest.
:param str guest: the user id of the guest
:param str hostname: the hostname of the guest
:param str os_version: version of guest operation system
"""
tmp_path = self._pathutils.get_guest_temp_path(userid)
if not os.path.exists(tmp_path):
os.makedirs(tmp_path)
tmp_file = tmp_path + '/hostname.sh'
lnxdist = self._dist_manager.get_linux_dist(os_version)()
lines = lnxdist.generate_set_hostname_script(hostname)
with open(tmp_file, 'w') as f:
f.writelines(lines)
requestData = "ChangeVM " + userid + " punchfile " + \
tmp_file + " --class x"
LOG.debug("Punch script to guest %s to set hostname" % userid)
try:
self._smtclient._request(requestData)
except exception.SDKSMTRequestFailed as err:
msg = ("Failed to punch set_hostname script to userid '%s'. SMT "
"error: %s" % (userid, err.format_message()))
LOG.error(msg)
raise exception.SDKSMTRequestFailed(err.results, msg)
finally:
self._pathutils.clean_temp_folder(tmp_path)
def guest_deploy(self, userid, image_name, transportfiles=None,
remotehost=None, vdev=None, hostname=None):
LOG.info("Begin to deploy image on vm %s", userid)
self._smtclient.guest_deploy(userid, image_name, transportfiles,
remotehost, vdev)
# punch scripts to set hostname
if (transportfiles is None) and hostname:
image_info = self._ImageDbOperator.image_query_record(image_name)
os_version = image_info[0]['imageosdistro']
self.set_hostname(userid, hostname, os_version)
def guest_capture(self, userid, image_name, capture_type='rootonly',
compress_level=6):
LOG.info("Begin to capture vm %(userid), image name is %(name)s",
{'userid': userid, 'name': image_name})
self._smtclient.guest_capture(userid, image_name,
capture_type=capture_type,
compress_level=compress_level)
LOG.info("Complete capture image on vm %s", userid)
def guest_list(self):
return self._smtclient.get_vm_list()
def get_definition_info(self, userid, **kwargs):
check_command = ["nic_coupled"]
direct_info = self._smtclient.get_user_direct(userid)
info = {}
info['user_direct'] = direct_info
for k, v in kwargs.items():
if k in check_command:
if (k == 'nic_coupled'):
info['nic_coupled'] = False
nstr = "NICDEF %s TYPE QDIO LAN SYSTEM" % v
for inf in direct_info:
if nstr in inf:
info['nic_coupled'] = True
break
else:
raise exception.SDKInvalidInputFormat(
msg=("invalid check option for user direct: %s") % k)
return info
def get_console_output(self, userid):
def append_to_log(log_data, log_path):
LOG.debug('log_data: %(log_data)r, log_path: %(log_path)r',
{'log_data': log_data, 'log_path': log_path})
with open(log_path, 'a+') as fp:
fp.write(log_data)
return log_path
LOG.info("Begin to capture console log on vm %s", userid)
log_size = CONF.guest.console_log_size * 1024
console_log = self._smtclient.get_user_console_output(userid)
log_path = self._pathutils.get_console_log_path(userid)
# TODO: need consider shrink log file size
append_to_log(console_log, log_path)
log_fp = open(log_path, 'rb')
try:
log_data, remaining = zvmutils.last_bytes(log_fp, log_size)
except Exception as err:
msg = ("Failed to truncate console log, error: %s" %
six.text_type(err))
LOG.error(msg)
raise exception.SDKInternalError(msg)
if remaining > 0:
LOG.info('Truncated console log returned, %d bytes ignored' %
remaining)
LOG.info("Complete get console output on vm %s", userid)
return log_data
def check_guests_exist_in_db(self, userids, raise_exc=True):
if not isinstance(userids, list):
# convert userid string to list
userids = [userids]
all_userids = self.guest_list()
userids_not_in_db = list(set(userids) - set(all_userids))
if userids_not_in_db:
if raise_exc:
# log and raise exception
userids_not_in_db = ' '.join(userids_not_in_db)
LOG.error("Guest '%s' does not exist in guests database" %
userids_not_in_db)
raise exception.SDKObjectNotExistError(
obj_desc=("Guest '%s'" % userids_not_in_db), modID='guest')
else:
return False
else:
userids_migrated = self._GuestDbOperator.get_migrated_guest_list()
userids_in_migrated = list(set(userids) & set(userids_migrated))
# case1 userid has been migrated.
if userids_in_migrated:
if raise_exc:
migrated_userids = ' '.join(userids_in_migrated)
LOG.error("Guest(s) '%s' has been migrated." %
migrated_userids)
raise exception.SDKObjectNotExistError(
obj_desc=("Guest(s) '%s'" % migrated_userids),
modID='guest')
else:
return False
flag = True
for uid in userids:
# case2 userid has been shudown and started on other host.
if zvmutils.check_userid_on_others(uid):
flag = False
comment = self._GuestDbOperator.get_comments_by_userid(uid)
comment['migrated'] = 1
action = "update guest '%s' in database" % uid
with zvmutils.log_and_reraise_sdkbase_error(action):
self._GuestDbOperator.update_guest_by_userid(
uid, comments=comment)
return flag
def live_resize_cpus(self, userid, count):
# Check power state is 'on'
state = self.get_power_state(userid)
if state != 'on':
LOG.error("Failed to live resize cpus of guest %s, error: "
"guest is inactive, cann't perform live resize." %
userid)
raise exception.SDKConflictError(modID='guest', rs=1,
userid=userid)
# Do live resize
self._smtclient.live_resize_cpus(userid, count)
LOG.info("Complete live resize cpu on vm %s", userid)
def resize_cpus(self, userid, count):
LOG.info("Begin to resize cpu on vm %s", userid)
# Do resize
self._smtclient.resize_cpus(userid, count)
LOG.info("Complete resize cpu on vm %s", userid)
def live_resize_memory(self, userid, memory):
# Check power state is 'on'
state = self.get_power_state(userid)
if state != 'on':
LOG.error("Failed to live resize memory of guest %s, error: "
"guest is inactive, cann't perform live resize." %
userid)
raise exception.SDKConflictError(modID='guest', rs=1,
userid=userid)
# Do live resize
self._smtclient.live_resize_memory(userid, memory)
LOG.info("Complete live resize memory on vm %s", userid)
def resize_memory(self, userid, memory):
LOG.info("Begin to resize memory on vm %s", userid)
# Do resize
self._smtclient.resize_memory(userid, memory)
LOG.info("Complete resize memory on vm %s", userid)
| 40.144495
| 79
| 0.588642
| 16,527
| 0.944238
| 0
| 0
| 0
| 0
| 0
| 0
| 4,442
| 0.253785
|
546685a1cd267c088cdbed690f4354973078c4ca
| 3,481
|
py
|
Python
|
Q146.py
|
Linchin/python_leetcode_git
|
3d08ab04bbdbd2ce268f33c501fbb149662872c7
|
[
"MIT"
] | null | null | null |
Q146.py
|
Linchin/python_leetcode_git
|
3d08ab04bbdbd2ce268f33c501fbb149662872c7
|
[
"MIT"
] | null | null | null |
Q146.py
|
Linchin/python_leetcode_git
|
3d08ab04bbdbd2ce268f33c501fbb149662872c7
|
[
"MIT"
] | null | null | null |
"""
Q146
LRU Cache
Medium
Author: Lingqing Gan
Date: 08/06/2019
Question:
Design and implement a data structure for Least Recently Used (LRU) cache.
It should support the following operations: get and put.
get(key) - Get the value (will always be positive) of the key if the key
exists in the cache, otherwise return -1.
put(key, value) - Set or insert the value if the key is not already present.
When the cache reached its capacity, it should invalidate the least
recently used item before inserting a new item.
The cache is initialized with a positive capacity.
Follow up:
Could you do both operations in O(1) time complexity?
notes:
linked list + dict(hash map)
12/24/2019
Merry Xmas~~
Now the code I wrote is working correctly. Just not very efficient.
Time to learn how the tutorial did it.
"""
class LRUCache:
class Node:
def __init__(self, key, val):
self.key = key
self.val = val
self.next = None
self.prev = None
def __init__(self, capacity: int):
self.cap = capacity
self.size = 0
self.hash = {}
self.head = None
self.tail = None
def get(self, key: int) -> int:
if key not in self.hash:
return -1
# if the inquired node is the last/only node
if self.hash[key].next is None:
return self.hash[key].val
# AT LEAST 2 NODES
# move the inquired node to the end of the linked list
# handle head and tail of the linked list
if self.head == self.hash[key]:
self.head = self.head.next
last = self.tail.key
self.tail.next = self.hash[key]
self.tail = self.hash[key]
# connect the nodes before and after the inquired node
if self.hash[key].prev is not None:
self.hash[key].prev.next = self.hash[key].next
if self.hash[key].next is not None:
self.hash[key].next.prev = self.hash[key].prev
# update the prev/next node of the inquired node
self.hash[key].prev = self.hash[last]
self.hash[key].next = None
return self.hash[key].val
def put(self, key: int, value: int) -> None:
# if the key exists
if key in self.hash:
self.hash[key].val = value
self.get(key)
return 0
# if key is new
self.hash[key] = self.Node(key, value)
if self.size == 0:
# first node
self.head = self.tail = self.hash[key]
self.size = 1
elif self.size < self.cap:
# capacity not reached, just add new node to the end
self.tail.next = self.hash[key]
self.hash[key].prev = self.tail
self.tail = self.hash[key]
self.size += 1
else:
# capacity reached, need to remove LRU node
self.tail.next = self.hash[key]
self.hash[key].prev = self.tail
self.tail = self.hash[key]
first = self.head.key
self.head = self.head.next
self.head.prev = None
del self.hash[first]
# Your LRUCache object will be instantiated and called as such:
# obj = LRUCache(capacity)
# param_1 = obj.get(key)
# obj.put(key,value)
capacity = 2
cache = LRUCache(capacity)
cache.put(1,1)
cache.put(2,2)
print(cache.get(1))
cache.put(3,3)
print(cache.get(2))
cache.put(4,4)
print(cache.get(1))
print(cache.get(3))
print(cache.get(4))
| 26.172932
| 76
| 0.600402
| 2,325
| 0.667912
| 0
| 0
| 0
| 0
| 0
| 0
| 1,341
| 0.385234
|
54670eac7c97edca8f6b8dd01151c748a6156511
| 9,940
|
py
|
Python
|
bin/genparams.py
|
neonkingfr/VizBench
|
e41f559cb6e761d717f2f5b202482d5d8dacd2d8
|
[
"MIT"
] | 7
|
2015-01-05T06:32:49.000Z
|
2020-10-30T19:29:07.000Z
|
bin/genparams.py
|
neonkingfr/VizBench
|
e41f559cb6e761d717f2f5b202482d5d8dacd2d8
|
[
"MIT"
] | null | null | null |
bin/genparams.py
|
neonkingfr/VizBench
|
e41f559cb6e761d717f2f5b202482d5d8dacd2d8
|
[
"MIT"
] | 4
|
2016-03-09T22:29:26.000Z
|
2021-04-07T13:52:28.000Z
|
# This script reads *VizParams.list files that define Vizlet parameters
# and generates .h files for them, making runtime access to them much faster.
# This allows new parameters to be added just by editing one file.
import sys
import os
import re
types={"bool":"BOOL","int":"INT","double":"DBL","string":"STR"}
realtypes={"bool":"bool","int":"int","double":"double","string":"std::string"}
paramtypes={"bool":"BoolParam","int":"IntParam","double":"DoubleParam","string":"StringParam"}
def readparams(listfile):
try:
f = open(listfile)
except:
print(sys.stderr,"Unable to open "+listfile)
sys.exit(1)
lines = f.readlines()
params = []
for ln in lines:
if len(ln) == 0:
continue
if ln[0] == '#':
continue
if ln[0] == ':':
# These lines are used to define string values
vals = ln.split(None,5)
(name,typ,mn,mx,default,comment) = vals
continue
vals = ln.split(None,5)
(name,typ,mn,mx,default,comment) = vals
params.append(
{ "name": name, "type": typ, "min": mn, "max": mx, "default": default, "comment": comment }
)
params = sorted(params, key=lambda dct: dct['name'])
return params
def writeln(line):
sys.stdout.write(line+"\n")
def write(line):
sys.stdout.write(line)
def genparamcpp(paramclass):
writeln("#include \"VizParams.h\"")
writeln("#include \""+paramclass+".h\"")
writeln("char* "+paramclass+"Names[] = { "+paramclass+"Names_INIT };")
## utility to make sure floating-point values are printed with a decimal point
## so function calls/etc get disambiguated between double and int.
def s2d(d):
return "%f" % float(d);
def genparamheader(params,classname):
uptype = classname.upper()
tab = "\t"
tab2 = "\t\t"
tab3 = "\t\t\t"
writeln("#ifndef _"+uptype+"_H")
writeln("#define _"+uptype+"_H")
writeln("#include \"VizParams.h\"")
writeln("#include \"VizJSON.h\"")
writeln("")
### Generate a declaration for the array of parameter names.
### The actual storage for it needs to be declared in a non-header file.
writeln("extern char* "+paramnames+"[];")
writeln("")
### Generate a macro which is all the parameter names, used to initialize that array
writeln("#define "+paramnames+"_INIT \\")
for p in params:
name = p["name"]
writeln(tab+"\"%s\",\\"%(name))
writeln(tab+"NULL")
writeln("")
### Start the class
writeln("class "+classname+" : public VizParams {")
writeln("public:")
### Generate the class constructor
writeln(tab+classname+"() {")
writeln(tab2+"loadDefaults();")
writeln(tab+"}")
writeln(tab+"char **ListOfNames() { return "+paramnames+"; }");
writeln(tab+"// std::string JsonListOfValues() { return _JsonListOfValues("+paramnames+"); }");
writeln(tab+"// std::string JsonListOfParams() { return _JsonListOfParams("+paramnames+"); }");
writeln(tab+"std::string JsonListOfStringValues(std::string type) { return _JsonListOfStringValues(type); }");
### Generate the method that loads JSON
writeln(tab+"void loadJson(cJSON* json) {")
writeln(tab2+"cJSON* j;")
for p in params:
name = p["name"]
typ = p["type"]
writeln(tab2+"j = cJSON_GetObjectItem(json,\""+name+"\");")
writeln(tab2+"if (j) { "+name+".set(j); }")
writeln(tab+"}")
### Generate the method that loads default values
writeln(tab+"void loadDefaults() {")
for p in params:
name = p["name"]
typ = p["type"]
defaultvalue = p["default"]
if typ == "double":
defaultvalue = s2d(defaultvalue)
writeln(tab2+name+".set("+defaultvalue+");")
writeln(tab+"}")
### Generate the method that applies one params to another
writeln(tab+"void applyVizParamsFrom("+classname+"* p) {")
writeln(tab2+"if ( ! p ) { return; }");
for p in params:
name = p["name"]
typ = p["type"]
writeln(tab2+"if ( p->"+name+".isset() ) { this->"+name+".set(p->"+name+".get()); }");
writeln(tab+"}")
### Generate the Set method
writeln(tab+"void Set(std::string nm, std::string val) {")
writeln(tab2+"bool stringval = false;")
estr = ""
for p in params:
name = p["name"]
writeln(tab2+estr+"if ( nm == \""+name+"\" ) {")
typ = p["type"]
if typ == "double":
writeln(tab3+name+".set(string2double(val));")
elif typ == "int":
writeln(tab3+name+".set(string2int(val));")
elif typ == "bool":
writeln(tab3+name+".set(string2bool(val));")
elif typ == "string":
writeln(tab3+name+".set(val);")
writeln(tab3+"stringval = true;")
estr = "} else "
writeln(tab2+"}")
writeln("")
writeln(tab2+"if ( ! stringval ) {")
writeln(tab3+"Increment(nm,0.0); // abide by limits, using code in Increment")
writeln(tab2+"}")
writeln(tab+"}")
### Generate the Increment method
writeln(tab+"void Increment(std::string nm, double amount) {")
estr = ""
for p in params:
name = p["name"]
typ = p["type"]
mn = p["min"]
mx = p["max"]
writeln(tab2+estr+"if ( nm == \""+name+"\" ) {")
if typ == "double":
writeln(tab3+name+".set(adjust("+name+".get(),amount,"+s2d(mn)+","+s2d(mx)+"));")
elif typ == "int":
writeln(tab3+name+".set(adjust("+name+".get(),amount,"+mn+","+mx+"));")
elif typ == "bool":
writeln(tab3+name+".set(adjust("+name+".get(),amount));")
elif typ == "string":
vals = p["min"]
if vals == "*":
writeln(tab3+"// '*' means the value can be anything");
else:
writeln(tab3+name+".set(adjust("+name+".get(),amount,VizParams::StringVals[\""+vals+"\"]));")
estr = "} else "
writeln(tab2+"}")
writeln("")
writeln(tab+"}")
### Generate the DefaultValue method
writeln(tab+"std::string DefaultValue(std::string nm) {")
estr = ""
for p in params:
name = p["name"]
typ = p["type"]
default = p["default"]
if default[0] != "\"":
default = "\"" + default + "\""
writeln(tab2+estr+"if ( nm == \""+name+"\" ) { return "+default+"; }");
writeln(tab2+"return \"\";");
writeln(tab+"}")
### Generate the MinValue method
writeln(tab+"std::string MinValue(std::string nm) {")
estr = ""
for p in params:
name = p["name"]
typ = p["type"]
mn = p["min"]
write(tab2+estr+"if ( nm == \""+name+"\" ) { ")
if typ == "double":
write("return \""+mn+"\";");
elif typ == "int":
write("return \""+mn+"\";");
elif typ == "bool":
write("return \"false\";");
elif typ == "string":
write("return \""+mn+"\";");
writeln(" }");
writeln(tab2+"return \"\";");
writeln(tab+"}")
### Generate the MaxValue method
writeln(tab+"std::string MaxValue(std::string nm) {")
estr = ""
for p in params:
name = p["name"]
typ = p["type"]
mx = p["max"]
write(tab2+estr+"if ( nm == \""+name+"\" ) { ")
if typ == "double":
write("return \""+mx+"\";");
elif typ == "int":
write("return \""+mx+"\";");
elif typ == "bool":
write("return \"true\";");
elif typ == "string":
write("return \""+mx+"\";");
writeln(" }");
writeln(tab2+"return \"\";");
writeln(tab+"}")
### Generate the Toggle method
writeln(tab+"void Toggle(std::string nm) {")
writeln(tab2+"bool stringval = false;")
estr = ""
for p in params:
name = p["name"]
typ = p["type"]
if typ == "bool":
writeln(tab2+estr+"if ( nm == \""+name+"\" ) {")
writeln(tab3+name+".set( ! "+name+".get());")
writeln(tab2+"}")
estr = "else "
writeln(tab+"}")
### Generate the Get method
writeln(tab+"std::string GetAsString(std::string nm) {")
estr = ""
for p in params:
name = p["name"]
typ = p["type"]
writeln(tab2+estr+"if ( nm == \""+name+"\" ) {")
if typ == "double":
writeln(tab3+"return DoubleString("+name+".get());")
elif typ == "int":
writeln(tab3+"return IntString("+name+".get());")
elif typ == "bool":
writeln(tab3+"return BoolString("+name+".get());")
elif typ == "string":
writeln(tab3+"return "+name+".get();")
estr = "} else "
writeln(tab2+"}")
writeln(tab2+"return \"\";")
writeln(tab+"}")
### Generate the GetType method
writeln(tab+"std::string GetType(std::string nm) {")
for p in params:
name = p["name"]
typ = p["type"]
writeln(tab2+"if ( nm == \""+name+"\" ) { return \""+typ+"\"; }")
writeln(tab2+"return \"\";")
writeln(tab+"}")
### Generate the member declarations
writeln("")
for p in params:
name = p["name"]
typ = p["type"]
paramtype = paramtypes[typ]
writeln(tab+paramtype+" "+name+";")
writeln("};")
writeln("")
writeln("#endif")
def modtime(file):
try:
return os.path.getmtime(file)
except:
return -1
if __name__ != "__main__":
print "This code needs to be invoked as a main program."
sys.exit(1)
if len(sys.argv) < 2:
print("Usage: %s {paramlist}" % sys.argv[0])
sys.exit(1)
# We expect this program to be invoked from the VizBench/bin directory
# so everything can be full paths without depending on environment variables
paramdir = "../src/params"
if not os.path.isdir(paramdir):
print("No directory "+paramdir+" !?")
sys.exit(1)
os.chdir(paramdir)
force = False
if len(sys.argv) > 2 and sys.argv[1] == "-f":
force = True
parambase = sys.argv[2]
else:
parambase = sys.argv[1]
paramclass = parambase+"VizParams"
paramlist = parambase+"VizParams.list"
paramtouch = parambase+"VizParams.touch"
paramnames = parambase+"VizParamsNames"
file_h = parambase + "VizParams.h"
file_cpp = parambase + "VizParams.cpp"
changed = force or (modtime(paramlist) > modtime(paramtouch) ) or not os.path.exists(file_h) or not os.path.exists(file_cpp)
if not changed:
print "No change in "+paramlist
sys.exit(0)
do_not_edit = "/************************************************\n" \
" *\n" \
" * This file is generated from '"+paramlist+"' by genparams.py\n" \
" *\n" \
" * DO NOT EDIT!\n" \
" *\n" \
" ************************************************/\n";
f = open(file_h,"w")
f.write(do_not_edit)
sys.stdout = f
params = readparams(paramlist)
genparamheader(params,paramclass)
f.close()
f = open(file_cpp,"w")
f.write(do_not_edit);
sys.stdout = f
genparamcpp(paramclass)
f.close()
def touch(filename):
f = open(filename,"w")
f.write("# This file exists to record the last build time\n");
f.close()
touch(paramtouch)
| 27.458564
| 124
| 0.609557
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 4,639
| 0.466606
|
54685a8741677f7fae5e8b83b5e24b77c1c400f9
| 712
|
py
|
Python
|
notebooks/session_4/s3-sobelAndmatplotlib.py
|
bigmpc/cv-spring-2021
|
81d9384f74f5411804cdbb26be5b7ced0d0f5958
|
[
"Apache-2.0"
] | 3
|
2021-03-09T10:00:50.000Z
|
2021-12-26T07:19:09.000Z
|
notebooks/session_4/s3-sobelAndmatplotlib.py
|
bigmpc/cv-spring-2021
|
81d9384f74f5411804cdbb26be5b7ced0d0f5958
|
[
"Apache-2.0"
] | null | null | null |
notebooks/session_4/s3-sobelAndmatplotlib.py
|
bigmpc/cv-spring-2021
|
81d9384f74f5411804cdbb26be5b7ced0d0f5958
|
[
"Apache-2.0"
] | 1
|
2021-02-27T16:09:30.000Z
|
2021-02-27T16:09:30.000Z
|
import cv2
import numpy as np
import matplotlib.pyplot as plt
#Read the image as grayscale:
image = cv2.imread('building.jpg', 0)
#Compute the gradient approximations using the Sobel operator:
dx = cv2.Sobel(image, cv2.CV_32F, 1, 0)
dy = cv2.Sobel(image, cv2.CV_32F, 0, 1)
#Visualize the results:
plt.figure()
plt.subplot(141)
plt.axis('off')
plt.title('image')
plt.imshow(image, cmap='gray')
plt.subplot(142)
plt.axis('off')
plt.imshow(dx, cmap='gray')
plt.title('dx')
plt.subplot(143)
plt.axis('off')
plt.imshow(dy, cmap='gray')
plt.title('dx')
plt.subplot(144)
plt.axis('off')
plt.title('dy + dx')
plt.imshow(np.absolute(dx)+np.absolute(dy), cmap='gray')
plt.show()
| 19.777778
| 63
| 0.671348
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 199
| 0.279494
|
5468626a4d8739106b686cc86e072541eeccc86e
| 956
|
py
|
Python
|
reporter-cli/sql-pdf/python/src/reporterprimary/__init__.py
|
rgolubtsov/reporter-multilang
|
6d7e04bbd57342ea80e1beccea3c4de1b1c4e203
|
[
"Unlicense"
] | 3
|
2017-04-28T16:40:22.000Z
|
2019-02-22T16:57:12.000Z
|
reporter-cli/sql-pdf/python/src/reporterprimary/__init__.py
|
rgolubtsov/reporter-multilang
|
6d7e04bbd57342ea80e1beccea3c4de1b1c4e203
|
[
"Unlicense"
] | 46
|
2017-01-17T01:10:15.000Z
|
2019-06-13T20:45:12.000Z
|
reporter-cli/sql-pdf/python/src/reporterprimary/__init__.py
|
rgolubtsov/reporter-multilang
|
6d7e04bbd57342ea80e1beccea3c4de1b1c4e203
|
[
"Unlicense"
] | 1
|
2017-07-06T14:18:55.000Z
|
2017-07-06T14:18:55.000Z
|
# -*- coding: utf-8 -*-
# reporter-cli/sql-pdf/python/src/reporterprimary/__init__.py
# =============================================================================
# Reporter Multilang. Version 0.5.9
# =============================================================================
# A tool to generate human-readable reports based on data from various sources
# with the focus on its implementation using a series of programming languages.
# =============================================================================
# Written by Radislav (Radicchio) Golubtsov, 2016-2021
#
# This is free and unencumbered software released into the public domain.
#
# Anyone is free to copy, modify, publish, use, compile, sell, or
# distribute this software, either in source code form or as a compiled
# binary, for any purpose, commercial or non-commercial, and by any
# means.
#
# (See the LICENSE file at the top of the source tree.)
#
# vim:set nu et ts=4 sw=4:
| 45.52381
| 79
| 0.561715
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 936
| 0.979079
|
5468c394ce1fe6e2cc2dd6fce2fd7d4c6e567c44
| 3,494
|
py
|
Python
|
bem/teq_planet.py
|
DanielAndreasen/bem
|
c4cca79322f08b5e9a3f3d39749c11d9f6296aae
|
[
"MIT"
] | null | null | null |
bem/teq_planet.py
|
DanielAndreasen/bem
|
c4cca79322f08b5e9a3f3d39749c11d9f6296aae
|
[
"MIT"
] | null | null | null |
bem/teq_planet.py
|
DanielAndreasen/bem
|
c4cca79322f08b5e9a3f3d39749c11d9f6296aae
|
[
"MIT"
] | null | null | null |
import numpy as np
from uncertainties import umath as um
def getTeqpl(Teffst, aR, ecc, A=0, f=1/4.):
"""Return the planet equilibrium temperature.
Relation adapted from equation 4 page 4 in http://www.mpia.de/homes/ppvi/chapter/madhusudhan.pdf
and https://en.wikipedia.org/wiki/Stefan%E2%80%93Boltzmann_law
and later updated to include the effect of excentricity on the average stellar planet distance
according to equation 5 p 25 of Laughlin & Lissauer 2015arXiv150105685L (1501.05685)
Plus Exoplanet atmospheres, physical processes, Sara Seager, p30 eq 3.9 for f contribution.
:param float/np.ndarray Teffst: Effective temperature of the star
:param float/np.ndarray aR: Ration of the planetary orbital semi-major axis over the stellar
radius (without unit)
:param float/np.ndarray A: Bond albedo (should be between 0 and 1)
:param float/np.ndarray f: Redistribution factor. If 1/4 the energy is uniformly redistributed
over the planetary surface. If f = 2/3, no redistribution at all, the atmosphere immediately
reradiate whithout advection.
:return float/np.ndarray Teqpl: Equilibrium temperature of the planet
"""
return Teffst * (f * (1 - A))**(1 / 4.) * np.sqrt(1 / aR) / (1 - ecc**2)**(1/8.)
def getTeqpl_error(Teffst, aR, ecc, A=0, f=1/4.):
"""Return the planet equilibrium temperature.
Relation adapted from equation 4 page 4 in http://www.mpia.de/homes/ppvi/chapter/madhusudhan.pdf
and https://en.wikipedia.org/wiki/Stefan%E2%80%93Boltzmann_law
and later updated to include the effect of excentricity on the average stellar planet distance
according to equation 5 p 25 of Laughlin & Lissauer 2015arXiv150105685L (1501.05685)
Plus Exoplanet atmospheres, physical processes, Sara Seager, p30 eq 3.9 for f contribution.
:param float/np.ndarray Teffst: Effective temperature of the star
:param float/np.ndarray aR: Ration of the planetary orbital semi-major axis over the stellar
radius (without unit)
:param float/np.ndarray A: Bond albedo (should be between 0 and 1)
:param float/np.ndarray f: Redistribution factor. If 1/4 the energy is uniformly redistributed
over the planetary surface. If f = 2/3, no redistribution at all, the atmosphere immediately
reradiate whithout advection.
:return float/np.ndarray Teqpl: Equilibrium temperature of the planet
"""
return Teffst * (f * (1 - A))**(1 / 4.) * um.sqrt(1 / aR) / (1 - ecc**2)**(1/8.)
def getHtidal(Ms, Rp, a, e):
# a -- in AU, semi major axis
# Teq -- in Kelvins, planetary equilibrium temperature
# M -- in Jupiter masses, planetary mass
# Z -- [Fe/H], stellar metallicity
# Rp -- radius planet
# Ms -- stellar mass
# e -- eccentricity
# G -- gravitational constant
#
#
G = 6.67408 * 10**(-11) # m3 kg-1 s-2
# Equation from Enoch et al. 2012
# Q = 10**5 # Tidal dissipation factor for high mass planets ...?
# k = 0.51 # Love number
# H_tidal = (63/4) * ((G * Ms)**(3/2) * Ms * Rp**5 * a**(-15/2)*e**2) / ((3*Q) / (2*k))
# Equation from Jackson 2008
# Qp' = (3*Qp) / (2*k)
Qp = 500 # with Love number 0.3 for terrestrial planets
H_tidal = (63 / 16*np.pi) * (((G*Ms)**(3/2) * Ms * Rp**3) / (Qp)) * a**(-15/2) * e**2
return H_tidal
def safronov_nb(Mp, Ms, Rp, a):
# Ozturk 2018, Safronov 1972
return (Mp/Ms) * (a/Rp)
| 48.527778
| 100
| 0.660561
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 2,811
| 0.804522
|
5469add1bc5b0732388dfd9a2adc569e52915599
| 1,656
|
py
|
Python
|
poppy/data_preprocess.py
|
phanxuanphucnd/BertTextClassification
|
c9a0500f07d831f924f56cc8211569b035c6e47a
|
[
"MIT"
] | 1
|
2021-06-14T21:03:04.000Z
|
2021-06-14T21:03:04.000Z
|
poppy/data_preprocess.py
|
phanxuanphucnd/BertTextClassification
|
c9a0500f07d831f924f56cc8211569b035c6e47a
|
[
"MIT"
] | null | null | null |
poppy/data_preprocess.py
|
phanxuanphucnd/BertTextClassification
|
c9a0500f07d831f924f56cc8211569b035c6e47a
|
[
"MIT"
] | null | null | null |
import pandas as pd
import re
import os
from tqdm import tqdm
## Cleaning train raw dataset
train = open('./data/raw/train.crash').readlines()
train_ids = []
train_texts = []
train_labels = []
for id, line in tqdm(enumerate(train)):
line = line.strip()
if line.startswith("train_"):
train_ids.append(id)
elif line == "0" or line == "1":
train_labels.append(id)
for id, lb in tqdm(zip(train_ids, train_labels)):
line_id = train[id].strip()
label = train[lb].strip()
text = ' '.join(train[id + 1: lb])
text = re.sub('\s+', ' ', text).strip()[1: -1].strip()
train_texts.append(text)
train_df = pd.DataFrame({
'id': train_ids,
'text': train_texts,
'label': train_labels
})
if not os.path.exists('./data'):
os.makedirs('./data')
train_df.to_csv('./data/train.csv', encoding='utf-8', index=False)
## Clean test raw dataset
test = open("./data/raw/test.crash").readlines()
test_ids = []
test_texts = []
for id, line in tqdm(enumerate(test)):
line = line.strip()
if line.startswith("test_"):
test_ids.append(id)
for i, id in tqdm(enumerate(test_ids)):
if i >= len(test_ids) - 1:
end = len(test)
else:
end = test_ids[i + 1]
line_id = test[id].strip()
text = re.sub('\s+', ' ', ' '.join(test[id + 1: end])).strip()[1:-1].strip()
test_texts.append(text)
test_df = pd.DataFrame({
'id': test_ids,
'text': test_texts
})
submission = pd.read_csv('./data/raw/sample_submission.csv', encoding='utf-8')
result = pd.concat([test_df, submission], axis=1, sort=False)
result.to_csv('./data/test.csv', encoding='utf-8', index=False)
| 23.323944
| 80
| 0.618357
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 277
| 0.167271
|
546a32ceac58022d2ad2cfb8c9d2804371eb31f5
| 6,456
|
py
|
Python
|
websaw/core/app.py
|
valq7711/websaw
|
fb5718ad3ecd011d7fbb3f24fa007d84951bd58c
|
[
"MIT"
] | 1
|
2022-02-25T15:02:25.000Z
|
2022-02-25T15:02:25.000Z
|
websaw/core/app.py
|
valq7711/websaw
|
fb5718ad3ecd011d7fbb3f24fa007d84951bd58c
|
[
"MIT"
] | null | null | null |
websaw/core/app.py
|
valq7711/websaw
|
fb5718ad3ecd011d7fbb3f24fa007d84951bd58c
|
[
"MIT"
] | null | null | null |
import functools
from types import SimpleNamespace
from typing import List
from . import globs
from .context import BaseContext
from .exceptions import FixtureProcessError
from .reloader import Reloader
from .static_registry import static_registry
def _dummy_exception_handler(ctx: BaseContext, exc: Exception):
raise exc
class Fixtured:
def __init__(self, h, fixt: List[str]):
if isinstance(h, self.__class__):
fixt = [*fixt, *h.fixt]
h = h.h
self.h = h
self.fixt = fixt
functools.update_wrapper(self, h)
def __call__(self, *a, **kw):
return self.h(*a, **kw)
class BaseApp:
static_registry = static_registry
add_route = staticmethod(globs.app.add_route)
reloader = Reloader
def __init__(
self,
default_config,
default_ctx: BaseContext,
):
self.default_config = default_config
self.default_ctx = default_ctx
self._registered = {}
self._mixins: List['BaseApp'] = []
def mixin(self, *mixins):
self._mixins.extend(mixins)
self.default_ctx.extend(*[m.default_ctx for m in mixins])
def _register(self, fun, route_args, fixtures=None):
meta = self._registered.setdefault(
fun, SimpleNamespace(routes_args=[], fixtures=[])
)
meta.routes_args.append(route_args)
if fixtures is not None:
meta.fixtures.extend(fixtures)
def route(self, rule, method='GET', name=None, **kw):
args = (rule, method, name)
def decorator(h):
fixt = None
if isinstance(h, Fixtured):
fixt = h.fixt
h = h.h
self._register(h, (args, kw), fixt)
return h
return decorator
def use(self, *fixt):
fixt = [self.default_ctx.get_or_make_fixture_key(f) for f in fixt]
def decorator(h):
if isinstance(h, Fixtured):
h.fixt[:] = [*fixt, *h.fixt]
return h
return Fixtured(h, fixt)
return decorator
def mount(
self,
config: dict = None,
context: BaseContext = None,
render_map: dict = None,
exception_handler=None
):
if context is None:
context = self.default_ctx
if config is None:
config = self.default_config
render_map: dict = config.get('render_map')
exception_handler = config.get('exception_handler')
context = context.clone()
app_data = context.app_data = SimpleNamespace(
routes=[],
named_routes={},
**config
)
for raw_h, meta, mixin_data in self._iter_registered():
h = self.make_handler(raw_h, meta.fixtures, context, render_map, exception_handler, mixin_data)
for route_args, route_kw in meta.routes_args:
self._mount_route(context.app_data, h, route_args, route_kw)
# mount app static
static_rule, static_h = self.static_registry.make_rule_and_handler(
f'{app_data.static_base_url}/static', app_data.static_folder, app_data.app_name
)
if static_rule is not None:
self._mount_route(context.app_data, static_h, (static_rule, 'GET', None), {})
# mount mixins static as /{app_name}/static/{mixin_name}/
for m in self._mixins:
m_cfg = m.default_config
m_name = m_cfg['app_name']
static_base_url = f'{app_data.base_url}/static/mxn/{m_name}'
static_rule, static_h = self.static_registry.make_rule_and_handler(
static_base_url, m_cfg['static_folder'], app_data.app_name
)
if static_rule is not None:
self._mount_route(context.app_data, static_h, (static_rule, 'GET', None), {})
# register
self.reloader.register_app_data(context.app_data)
context.app_mounted()
return context
def _iter_registered(self):
for m in reversed(self._mixins):
for raw_h, meta in m._registered.items():
yield raw_h, meta, SimpleNamespace(**m.default_config)
for raw_h, meta in self._registered.items():
yield raw_h, meta, None
@staticmethod
def make_handler(h, fixtures, ctx: BaseContext, render_map: dict = None, exception_handler=None, mixin_data=None):
hooks = False
if fixtures:
hooks = {
fkey: fobj for fkey, fobj
in ([fkey, getattr(ctx, fkey)] for fkey in fixtures)
if fobj.is_hook
} or False
else:
fixtures = False
if exception_handler is None:
exception_handler = _dummy_exception_handler
@functools.wraps(h)
def handler(**kw):
exc = None
ctx.initialize()
ctx.mixin_data = mixin_data
try:
if fixtures:
ctx.use_fixtures(fixtures, hooks)
ctx.output = h(ctx, **kw)
except FixtureProcessError:
pass
except Exception as exc_:
exc = exc_
ctx.finalize(exc)
if ctx.exception is not None:
exception_handler(ctx, ctx.exception)
if render_map:
output = ctx.output
render = render_map.get(type(output), False)
if render:
ctx.output = render(ctx, output)
return ctx.output
return handler
@staticmethod
def _get_abs_url(base_url, path):
if not path:
return base_url
if path[0] != '/':
path = f'{base_url}/{path}'
return path
def _mount_route(self, app_data, fun, route_args, route_kw):
path, method, name = route_args
is_index = path == 'index'
path = self._get_abs_url(app_data.base_url, path)
route = self.add_route(path, method, fun, **route_kw)
app_data.routes.append(route)
if name:
if name in app_data.named_routes:
raise KeyError(f'The route name already in use: {name}')
app_data.named_routes[name] = route
if is_index:
route = self.add_route(
path[:-len('/index')] or '/', method, fun, **route_kw
)
app_data.routes.append(route)
| 31.960396
| 118
| 0.577912
| 6,121
| 0.94811
| 283
| 0.043835
| 1,490
| 0.230793
| 0
| 0
| 324
| 0.050186
|
546beba67c891d71b93c4df6d7f37c550d736d00
| 1,772
|
py
|
Python
|
observations/r/chest_sizes.py
|
hajime9652/observations
|
2c8b1ac31025938cb17762e540f2f592e302d5de
|
[
"Apache-2.0"
] | 199
|
2017-07-24T01:34:27.000Z
|
2022-01-29T00:50:55.000Z
|
observations/r/chest_sizes.py
|
hajime9652/observations
|
2c8b1ac31025938cb17762e540f2f592e302d5de
|
[
"Apache-2.0"
] | 46
|
2017-09-05T19:27:20.000Z
|
2019-01-07T09:47:26.000Z
|
observations/r/chest_sizes.py
|
hajime9652/observations
|
2c8b1ac31025938cb17762e540f2f592e302d5de
|
[
"Apache-2.0"
] | 45
|
2017-07-26T00:10:44.000Z
|
2022-03-16T20:44:59.000Z
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import csv
import numpy as np
import os
import sys
from observations.util import maybe_download_and_extract
def chest_sizes(path):
"""Chest measurements of 5738 Scottish Militiamen
Quetelet's data on chest measurements of 5738 Scottish Militiamen.
Quetelet (1846) used this data as a demonstration of the normal
distribution of physical characteristics.
A data frame with 16 observations on the following 2 variables.
`chest`
Chest size (in inches)
`count`
Number of soldiers with this chest size
Velleman, P. F. and Hoaglin, D. C. (1981). *Applications, Basics, and
Computing of Exploratory Data Analysis*. Belmont. CA: Wadsworth.
Retrieved from Statlib:
`https://www.stat.cmu.edu/StatDat/Datafiles/MilitiamenChests.html`
Args:
path: str.
Path to directory which either stores file or otherwise file will
be downloaded and extracted there.
Filename is `chest_sizes.csv`.
Returns:
Tuple of np.ndarray `x_train` with 16 rows and 2 columns and
dictionary `metadata` of column headers (feature names).
"""
import pandas as pd
path = os.path.expanduser(path)
filename = 'chest_sizes.csv'
if not os.path.exists(os.path.join(path, filename)):
url = 'http://dustintran.com/data/r/HistData/ChestSizes.csv'
maybe_download_and_extract(path, url,
save_file_name='chest_sizes.csv',
resume=False)
data = pd.read_csv(os.path.join(path, filename), index_col=0,
parse_dates=True)
x_train = data.values
metadata = {'columns': data.columns}
return x_train, metadata
| 29.533333
| 71
| 0.705418
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1,068
| 0.602709
|
546e4ec20d3fdf8c1c5f8ed657bb3f80549f9803
| 1,365
|
py
|
Python
|
setup.py
|
google/ads-api-reports-fetcher
|
de0bacc3ab520b020cf19985284b7e3dbc9778b0
|
[
"Apache-2.0"
] | 4
|
2022-02-16T12:42:26.000Z
|
2022-03-30T17:14:32.000Z
|
setup.py
|
google/ads-api-reports-fetcher
|
de0bacc3ab520b020cf19985284b7e3dbc9778b0
|
[
"Apache-2.0"
] | null | null | null |
setup.py
|
google/ads-api-reports-fetcher
|
de0bacc3ab520b020cf19985284b7e3dbc9778b0
|
[
"Apache-2.0"
] | 1
|
2022-03-28T05:51:57.000Z
|
2022-03-28T05:51:57.000Z
|
import pathlib
from setuptools import setup, find_packages
HERE = pathlib.Path(__file__).parent
README = (HERE / "README.md").read_text()
setup(name="google-ads-api-report-fetcher",
version="0.1",
description="Library for fetching reports from Google Ads API and saving them locally / BigQuery.",
long_description=README,
long_description_content_type="text/markdown",
url="https://github.com/google/ads-api-reports-fetcher",
author="Google Inc. (gTech gPS CSE team)",
author_email="no-reply@google.com",
license="Apache 2.0",
classifiers=[
"Programming Language :: Python :: 3",
"Intended Audience :: Developers",
"Topic :: Software Development :: Libraries :: Python Modules",
"Operating System :: OS Independent",
"License :: OSI Approved :: Apache Software License"
],
packages=find_packages(include=["runner", "runner.*"]),
install_requires=[
"google-ads==14.1.0", "google-cloud-bigquery==2.26.0",
"pandas==1.3.4", "pyarrow==6.0.1", "tabulate"
],
setup_requires=["pytest-runner"],
tests_requires=["pytest"],
entry_points={
"console_scripts": [
"fetch-reports=runner.fetcher:main",
"post-process-queries=runner.post_processor:main",
]
})
| 36.891892
| 105
| 0.621245
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 720
| 0.527473
|
546e73d201a7995e9aa7205db669d55b27e2e940
| 2,880
|
py
|
Python
|
scan_service/scan_service/utils/stats.py
|
kkkkv/tgnms
|
a3b8fd8a69b647a614f9856933f05e50a4affadf
|
[
"MIT"
] | 12
|
2021-04-06T06:27:18.000Z
|
2022-03-18T10:52:29.000Z
|
scan_service/scan_service/utils/stats.py
|
kkkkv/tgnms
|
a3b8fd8a69b647a614f9856933f05e50a4affadf
|
[
"MIT"
] | 6
|
2022-01-04T13:32:16.000Z
|
2022-03-28T21:13:59.000Z
|
scan_service/scan_service/utils/stats.py
|
kkkkv/tgnms
|
a3b8fd8a69b647a614f9856933f05e50a4affadf
|
[
"MIT"
] | 7
|
2021-09-27T13:14:42.000Z
|
2022-03-28T16:24:15.000Z
|
#!/usr/bin/env python3
# Copyright 2004-present Facebook. All Rights Reserved.
import asyncio
import logging
import time
from collections import defaultdict
from typing import DefaultDict, Dict, List
from tglib.clients.prometheus_client import PrometheusClient, consts
from tglib.exceptions import ClientRuntimeError
from .topology import Topology
def reshape_values(network_name: str, values: Dict) -> DefaultDict:
"""Reshape the Prometheus results and map to other node's MAC address."""
node_metrics: DefaultDict = defaultdict(dict)
other_node: str
for metric, result in values.items():
for link_result in result:
node_pair = Topology.link_name_to_mac.get(network_name, {}).get(
link_result["metric"]["linkName"]
)
if node_pair is None:
logging.error(
f"Missing node_mac mapping for {link_result['metric']['linkName']}"
)
continue
if link_result["metric"]["radioMac"] == node_pair[0]:
other_node = node_pair[1]
elif link_result["metric"]["radioMac"] == node_pair[1]:
other_node = node_pair[0]
else:
logging.error(
"Incorrect node_mac mapping for "
f"{link_result['metric']['linkName']}"
)
continue
if link_result["values"]:
node_metrics[other_node][metric] = link_result["values"][-1][1]
return node_metrics
async def get_latest_stats(
network_name: str,
radio_mac: str,
metrics: List[str],
sample_period: int = 300,
hold_period: int = 30,
) -> DefaultDict:
"""Fetch latest metric values for specific links in the network."""
client = PrometheusClient(timeout=2)
coros = []
curr_time = int(time.time())
for metric in metrics:
coros.append(
client.query_range(
client.format_query(
metric, {consts.network: network_name, consts.radio_mac: radio_mac}
),
step=f"{hold_period+1}s",
start=curr_time - sample_period,
end=curr_time,
)
)
values: Dict = {}
for metric_name, response in zip(
metrics, await asyncio.gather(*coros, return_exceptions=True)
):
if isinstance(response, ClientRuntimeError):
logging.error(response)
continue
if response["status"] != "success":
logging.error(f"Failed to fetch {metric_name} data for {radio_mac}")
continue
result = response["data"]["result"]
if not result:
logging.error(f"Found no results for {metric}")
else:
values[metric_name] = result
return reshape_values(network_name, values)
| 32.727273
| 87
| 0.594097
| 0
| 0
| 0
| 0
| 0
| 0
| 1,329
| 0.461458
| 560
| 0.194444
|
547084a7679711993b0e3d30495458fce0c7f40b
| 1,866
|
py
|
Python
|
multithread_pipeline.py
|
kapitsa2811/smartOCR
|
6ecca79b29778778b1458ea28763a39920a3d58a
|
[
"MIT"
] | null | null | null |
multithread_pipeline.py
|
kapitsa2811/smartOCR
|
6ecca79b29778778b1458ea28763a39920a3d58a
|
[
"MIT"
] | null | null | null |
multithread_pipeline.py
|
kapitsa2811/smartOCR
|
6ecca79b29778778b1458ea28763a39920a3d58a
|
[
"MIT"
] | null | null | null |
import glob
import os
from io import StringIO
from threading import Thread
import logging
from logger import TimeHandler
from costants import THREADS, INFERENCE_GRAPH
from pipeline import pipeline
logger = logging.getLogger(__name__)
logger.setLevel(logging.INFO)
logger.addHandler(TimeHandler().handler)
class MyThread(Thread):
def __init__(self, name, file_path):
Thread.__init__(self)
self.name = name
self.path = file_path
def run(self):
for file_path in self.path:
file_path = os.path.join(file_path)
fp = StringIO()
pipeline(
pdf_path=file_path,
inference_graph_path=INFERENCE_GRAPH,
thread_name=self.name
)
logger.info(fp.getvalue())
fp.close()
if __name__ == '__main__':
path_list = []
for path in glob.iglob("..\\Polizze\\" + '/**/*.pdf', recursive=True):
path_list.append(path)
el_per_list = int(len(path_list) / THREADS)
thread_list = []
i = 0
path_list_per_thread = []
if len(path_list) == 1:
new_thread = MyThread('Thread_{}'.format(0), path_list)
new_thread.start()
new_thread.join()
else:
for i in range(0, THREADS):
if i < THREADS - 2:
path_list_per_thread = path_list[el_per_list * i:el_per_list * (i + 1) - 1]
else:
path_list_per_thread = path_list[
el_per_list * i:len(path_list) - 1] # lista vuota se c'e' un solo elemento
new_thread = MyThread('Thread_{}'.format(i), path_list_per_thread)
new_thread.start()
thread_list.append(new_thread)
for new_thread in thread_list:
new_thread.join()
| 29.619048
| 115
| 0.576099
| 523
| 0.280279
| 0
| 0
| 0
| 0
| 0
| 0
| 97
| 0.051983
|
5470a342899892808b0ad450ef5da5a2f9cf5b36
| 12,319
|
py
|
Python
|
src/keys_server/GMO/GMOKeysLookup.py
|
OasisLMF/gem
|
95c755a1cb76a2bbc41e5dd7bc503c59123ca3ac
|
[
"BSD-2-Clause"
] | null | null | null |
src/keys_server/GMO/GMOKeysLookup.py
|
OasisLMF/gem
|
95c755a1cb76a2bbc41e5dd7bc503c59123ca3ac
|
[
"BSD-2-Clause"
] | null | null | null |
src/keys_server/GMO/GMOKeysLookup.py
|
OasisLMF/gem
|
95c755a1cb76a2bbc41e5dd7bc503c59123ca3ac
|
[
"BSD-2-Clause"
] | null | null | null |
# -*- coding: utf-8 -*-
# Python 2 standard library imports
import csv
import io
import logging
import os
# Python 2 non-standard library imports
import pandas as pd
# Imports from Oasis core repos + subpackages or modules within keys_server
from oasislmf.utils.coverages import COVERAGE_TYPES
from oasislmf.utils.peril import PERILS
from oasislmf.utils.status import OASIS_KEYS_STATUS
KEYS_STATUS_FAIL = OASIS_KEYS_STATUS['fail']
KEYS_STATUS_NOMATCH = OASIS_KEYS_STATUS['nomatch']
KEYS_STATUS_SUCCESS = OASIS_KEYS_STATUS['success']
from oasislmf.model_preparation.lookup import OasisBaseKeysLookup
from oasislmf.utils.log import oasis_log
KEYS_STATUS_FAIL = OASIS_KEYS_STATUS['fail']['id']
KEYS_STATUS_NOMATCH = OASIS_KEYS_STATUS['nomatch']['id']
KEYS_STATUS_SUCCESS = OASIS_KEYS_STATUS['success']['id']
from .utils import (
AreaPerilLookup,
VulnerabilityLookup,
)
#
# Public entry point
#
__all__ = [
'GMOKeysLookup'
]
#
# START - deprecated oasislmf.utils.values
#
from datetime import datetime
import pytz
NULL_VALUES = [None, '', 'n/a', 'N/A', 'null', 'Null', 'NULL']
def get_timestamp(thedate=None, fmt='%Y%m%d%H%M%S'):
""" Get a timestamp """
d = thedate if thedate else datetime.now()
return d.strftime(fmt)
def get_utctimestamp(thedate=None, fmt='%Y-%b-%d %H:%M:%S'):
"""
Returns a UTC timestamp for a given ``datetime.datetime`` in the
specified string format - the default format is::
YYYY-MMM-DD HH:MM:SS
"""
d = thedate.astimezone(pytz.utc) if thedate else datetime.utcnow()
return d.strftime(fmt)
def to_string(val):
"""
Converts value to string, with possible additional formatting.
"""
return '' if val is None else str(val)
def to_int(val):
"""
Parse a string to int
"""
return None if val in NULL_VALUES else int(val)
def to_float(val):
"""
Parse a string to float
"""
return None if val in NULL_VALUES else float(val)
#
# END - deprecated oasislmf.utils.values
#
""" ---- Implementation note ----
In the original lookup implementation each location can map to multiple vulnerability ids,
each with difference levels of ductility and or material type.
Note from Malcolm:
Ductility is largely a product of materials, with unreinforced
masonry being the worst and wood the best. The reason it’s probably
not explicitly included in commercial cat models is
likely that the ductility for a given material is largely a function of age,
since better construction codes usually leads to more ductile structures.
Age usually is explicitly included in cat models wheres
the GEM functions capture this through the construction itself.
Original taxonomy:
gem_taxonomy_by_oed_occupancy_and_number_of_storeys_df = pd.DataFrame.from_dict({
'constructioncode': ['5156', '5150', '5150', '5150', '5150', '5150', '5150', '5109', '5109', '5109', '5109', '5109', '5109', '5109', '5105', '5105', '5105', '5105', '5105', '5105', '5105', '5105', '5101', '5103', '5103', '5103', '5000', '5050', '5050', '5050', '5050', '5050'],
'numberofstoreys': [1, 2, 2, 3, 2, 3, 1, 2, 3, 2, 3, 2, 3, 1, 2, 3, 1, 2, 3, 1, 2, 3, 2, 2, 1, 2, 1, 1, 1, 2, 1, -1],
'taxonomy': ['CR-PC_LWAL-DNO_H1', 'CR_LFINF-DNO_H2', 'CR_LFINF-DUH_H2', 'CR_LFINF-DUH_H3', 'CR_LFINF-DUM_H2', 'CR_LFINF-DUM_H3', 'CR_LFM-DNO_H1', 'MCF_LWAL-DNO_H2', 'MCF_LWAL-DNO_H3', 'MCF_LWAL-DUH_H2', 'MCF_LWAL-DUH_H3', 'MCF_LWAL-DUM_H2','MCF_LWAL-DUM_H3', 'MR_LWAL-DNO_H1','MR_LWAL-DNO_H2', 'MR_LWAL-DNO_H3','MR_LWAL-DUH_H1', 'MR_LWAL-DUH_H2', 'MR_LWAL-DUH_H3', 'MR_LWAL-DUM_H1', 'MR_LWAL-DUM_H2', 'MR_LWAL-DUM_H3', 'MUR-ADO_LWAL-DNO_H2', 'MUR-ST_LWAL-DNO_H2', 'MUR_LWAL-DNO_H1', 'MUR_LWAL-DNO_H2', 'UNK_H1', 'W-WBB_LPB-DNO_H1', 'W-WLI_LWAL-DNO_H1', 'W-WLI_LWAL-DNO_H2', 'W-WS_LPB-DNO_H1', 'W-']
})
The below was changed so that each unique combination of ('constructioncode', 'numberofstoreys')
maps to a single 'taxonomy' code
"""
gem_taxonomy_by_oed_occupancy_and_number_of_storeys_df = pd.DataFrame.from_dict({
'constructioncode': ['5156', '5150', '5150', '5150', '5109', '5109', '5109', '5105', '5105', '5105', '5101', '5103', '5103', '5000', '5050', '5050', '5050'],
'numberofstoreys': [1, 2, 3, 1, 2, 3, 1, 1, 2, 3, 2, 1, 2, 1, 1, 2, -1],
'taxonomy': ['CR-PC_LWAL-DNO_H1', 'CR_LFINF-DUM_H2', 'CR_LFINF-DUM_H3', 'CR_LFM-DNO_H1', 'MCF_LWAL-DNO_H2', 'MCF_LWAL-DNO_H3', 'MR_LWAL-DNO_H1', 'MR_LWAL-DUM_H1', 'MR_LWAL-DUM_H2', 'MR_LWAL-DUM_H3', 'MUR-ADO_LWAL-DNO_H2', 'MUR_LWAL-DNO_H1', 'MUR_LWAL-DNO_H2', 'UNK_H1', 'W-WLI_LWAL-DNO_H1', 'W-WLI_LWAL-DNO_H2', 'W-']
})
class GMOKeysLookup(OasisBaseKeysLookup):
"""
GMO keys lookup.
"""
_LOCATION_RECORD_META = {
'id': {
'source_header': 'loc_id', 'csv_data_type': int,
'validator': to_int, 'desc': 'Location ID'
},
'lon': {
'source_header': 'Longitude', 'csv_data_type': float,
'validator': to_float, 'desc': 'Longitude'
},
'lat': {
'source_header': 'Latitude', 'csv_data_type': float,
'validator': to_float, 'desc': 'Latitude'
},
'county': {
'source_header': 'GeogName1',
'csv_data_type': str,
'validator': to_string, 'desc': 'County'
},
'state': {
'source_header': 'AreaName1',
'csv_data_type': str,
'validator': to_string, 'desc': 'State'
},
'country': {
'source_header': 'CountryCode',
'csv_data_type': str,
'validator': to_string, 'desc': 'Country'
},
'coverage': {
'source_header': 'BuildingTIV', 'csv_data_type': int,
'validator': to_int, 'desc': 'Coverage'
},
'taxonomy': {
'source_header': 'taxonomy',
'csv_data_type': str,
'validator': to_string, 'desc': 'Class #1'
},
'occupancy': {
'source_header': 'OccupancyCode',
'csv_data_type': str,
'validator': to_string, 'desc': 'Class #2'
},
'imt': {
'source_header': 'type',
'csv_data_type': str,
'validator': to_string, 'desc': 'Intensity Measure'
}
}
@oasis_log()
def __init__(self,
keys_data_directory=None,
supplier='GEMFoundation',
model_name='GMO',
model_version=None,
complex_lookup_config_fp=None,
output_directory=None):
"""
Initialise the static data required for the lookup.
"""
super(self.__class__, self).__init__(
keys_data_directory,
supplier,
model_name,
model_version
)
# join IMTs with locs
self.vulnDict = pd.read_csv(os.path.join(self.keys_data_directory, 'vulnerability_dict.csv'))
self.area_peril_lookup = AreaPerilLookup(
areas_file=os.path.join(
self.keys_data_directory, 'areaperil_dict.csv')
) if keys_data_directory else AreaPerilLookup()
self.vulnerability_lookup = VulnerabilityLookup(
vulnerabilities_file=os.path.join(
self.keys_data_directory, 'vulnerability_dict.csv')
) if keys_data_directory else VulnerabilityLookup()
@oasis_log()
def process_locations(self, loc_df):
"""
Process location rows - passed in as a pandas dataframe.
"""
# Mapping to OED
set_dtype = {'constructioncode': 'int',
'numberofstoreys': 'int'}
loc_df = loc_df.astype(set_dtype).merge(
gem_taxonomy_by_oed_occupancy_and_number_of_storeys_df.astype(set_dtype),
on=['constructioncode', 'numberofstoreys'])
loc_df = loc_df.merge(self.vulnDict, on="taxonomy")
pd.set_option('display.max_columns', 500)
# Enforce single taxonomy per location row (Safe guard)
loc_df.drop_duplicates(subset=['locperilscovered', 'loc_id'], keep='first', inplace=True)
for i in range(len(loc_df)):
record = self._get_location_record(loc_df.iloc[i])
area_peril_rec = self.area_peril_lookup.do_lookup_location(record)
vuln_peril_rec = \
self.vulnerability_lookup.do_lookup_location(record)
status = message = ''
# print(area_peril_rec)
# print(vuln_peril_rec)
# print(KEYS_STATUS_SUCCESS)
if area_peril_rec['status'] == \
vuln_peril_rec['status'] == KEYS_STATUS_SUCCESS:
status = KEYS_STATUS_SUCCESS
elif (
area_peril_rec['status'] == KEYS_STATUS_FAIL or
vuln_peril_rec['status'] == KEYS_STATUS_FAIL
):
status = KEYS_STATUS_FAIL
message = '{}, {}'.format(
area_peril_rec['message'],
vuln_peril_rec['message']
)
else:
status = KEYS_STATUS_NOMATCH
message = 'No area peril or vulnerability match'
record = {
"loc_id": record['id'],
"peril_id": PERILS['earthquake']['id'],
"coverage_type": COVERAGE_TYPES['buildings']['id'],
"area_peril_id": area_peril_rec['area_peril_id'],
"vulnerability_id": vuln_peril_rec['vulnerability_id'],
"message": message,
"status": status
}
yield(record)
def process_locations_multiproc(self, locations):
"""
Process location rows - passed in as a pandas dataframe.
"""
# Mapping to OED
set_dtype = {'constructioncode': 'int',
'numberofstoreys': 'int'}
loc_df = locations.astype(set_dtype).merge(
gem_taxonomy_by_oed_occupancy_and_number_of_storeys_df.astype(set_dtype),
on=['constructioncode', 'numberofstoreys'])
loc_df = loc_df.merge(self.vulnDict, on="taxonomy")
pd.set_option('display.max_columns', 500)
# Enforce single taxonomy per location row (Safe guard)
loc_df.drop_duplicates(subset=['locperilscovered', 'loc_id'], keep='first', inplace=True)
results = []
for i in range(len(loc_df)):
record = self._get_location_record(loc_df.iloc[i])
area_peril_rec = self.area_peril_lookup.do_lookup_location(record)
vuln_peril_rec = \
self.vulnerability_lookup.do_lookup_location(record)
status = message = ''
if area_peril_rec['status'] == \
vuln_peril_rec['status'] == KEYS_STATUS_SUCCESS:
status = KEYS_STATUS_SUCCESS
elif (
area_peril_rec['status'] == KEYS_STATUS_FAIL or
vuln_peril_rec['status'] == KEYS_STATUS_FAIL
):
status = KEYS_STATUS_FAIL
message = '{}, {}'.format(
area_peril_rec['message'],
vuln_peril_rec['message']
)
else:
status = KEYS_STATUS_NOMATCH
message = 'No area peril or vulnerability match'
record = {
"loc_id": record['id'],
"peril_id": PERILS['earthquake']['id'],
"coverage_type": COVERAGE_TYPES['buildings']['id'],
"area_peril_id": area_peril_rec['area_peril_id'],
"vulnerability_id": vuln_peril_rec['vulnerability_id'],
"message": message,
"status": status
}
results.append(record)
return(results)
def _get_location_record(self, loc_item):
"""
Construct a location record (dict) from the location item, which in this
case is a row in a Pandas dataframe.
"""
# print("!! _get_location_record: {0}".format(loc_item))
meta = self._LOCATION_RECORD_META
return dict((
k,
meta[k]['validator'](loc_item[meta[k]['source_header'].lower()])
) for k in meta
)
| 36.554896
| 606
| 0.596396
| 7,708
| 0.625599
| 2,218
| 0.180018
| 3,336
| 0.270757
| 0
| 0
| 5,492
| 0.445743
|
5470aea747a6878071245059e1de2776baa03338
| 18,485
|
py
|
Python
|
pandemic_eval.py
|
aypan17/value_learning
|
240a67ecf99b178fe0c4ced2bfd1dd50453fbdfe
|
[
"MIT"
] | null | null | null |
pandemic_eval.py
|
aypan17/value_learning
|
240a67ecf99b178fe0c4ced2bfd1dd50453fbdfe
|
[
"MIT"
] | null | null | null |
pandemic_eval.py
|
aypan17/value_learning
|
240a67ecf99b178fe0c4ced2bfd1dd50453fbdfe
|
[
"MIT"
] | null | null | null |
import time
import sys
import json
import argparse
from tqdm import trange
from typing import Any, Callable, Dict, List, Optional, Tuple, Union
import torch
import numpy as np
from scipy.spatial.distance import jensenshannon
import gym
import matplotlib.pyplot as plt
from matplotlib.axes import Axes
from matplotlib.ticker import MaxNLocator
from matplotlib.lines import Line2D
import pandemic_simulator as ps
from pandemic_simulator.environment.reward import RewardFunction, SumReward, RewardFunctionFactory, RewardFunctionType
from pandemic_simulator.environment.interfaces import InfectionSummary
from pandemic_simulator.viz import PandemicViz
from pandemic_simulator.environment import PandemicSimOpts
from stable_baselines3.common import base_class
from stable_baselines3.common.vec_env import DummyVecEnv, VecEnv
def hellinger(p, q):
# distance between p and q
# p and q are np array probability distributions
return (1.0 / np.sqrt(2.0)) * np.sqrt(np.sum(np.square(np.sqrt(p) - np.sqrt(q)), axis=1))
def evaluate_policy(
name: str,
model: "base_class.BaseAlgorithm",
base_model: "base_class.BaseAlgorithm",
env: Union[gym.Env, VecEnv],
n_eval_episodes: int = 32,
deterministic: bool = True,
render: bool = False,
viz: Optional[PandemicViz] = None,
reward_threshold: Optional[float] = None,
return_episode_rewards: bool = False,
warn: bool = True,
) -> Union[Tuple[float, float], Tuple[List[float], List[int]]]:
"""
Runs policy for ``n_eval_episodes`` episodes and returns average reward.
If a vector env is passed in, this divides the episodes to evaluate onto the
different elements of the vector env. This static division of work is done to
remove bias. See https://github.com/DLR-RM/stable-baselines3/issues/402 for more
details and discussion.
.. note::
If environment has not been wrapped with ``Monitor`` wrapper, reward and
episode lengths are counted as it appears with ``env.step`` calls. If
the environment contains wrappers that modify rewards or episode lengths
(e.g. reward scaling, early episode reset), these will affect the evaluation
results as well. You can avoid this by wrapping environment with ``Monitor``
wrapper before anything else.
:param model: The RL agent you want to evaluate.
:param env: The gym environment or ``VecEnv`` environment.
:param n_eval_episodes: Number of episode to evaluate the agent
:param deterministic: Whether to use deterministic or stochastic actions
:param render: Whether to render the environment or not
:param callback: callback function to do additional checks,
called after each step. Gets locals() and globals() passed as parameters.
:param reward_threshold: Minimum expected reward per episode,
this will raise an error if the performance is not met
:param return_episode_rewards: If True, a list of rewards and episode lengths
per episode will be returned instead of the mean.
:param warn: If True (default), warns user about lack of a Monitor wrapper in the
evaluation environment.
:return: Mean reward per episode, std of reward per episode.
Returns ([float], [int]) when ``return_episode_rewards`` is True, first
list containing per-episode rewards and second containing per-episode lengths
(in number of steps).
"""
if not isinstance(env, VecEnv):
env = DummyVecEnv([lambda: env])
episode_rewards = []
reward_std = []
episode_true_rewards = []
true_reward_std = []
episode_true_rewards2 = []
true_reward_std2 = []
vfs = []
log_probs = []
ents = []
base_vfs = []
base_log_probs = []
base_ents = []
kls = []
js = []
h = []
numpy_obs = env.reset()
states = None
for t in range(200):
actions, states = model.predict(numpy_obs, state=states, deterministic=True)
vf, logp, ent = model.policy.evaluate_actions(torch.as_tensor(numpy_obs), torch.as_tensor(actions))
base_vf, base_logp, base_ent = base_model.policy.evaluate_actions(torch.as_tensor(numpy_obs), torch.as_tensor(actions))
vfs.append(torch.mean(vf).detach().item())
log_probs.append(torch.mean(logp).detach().item())
ents.append(torch.mean(ent).detach().item())
base_vfs.append(torch.mean(base_vf).detach().item())
base_log_probs.append(torch.mean(base_logp).detach().item())
base_ents.append(torch.mean(base_ent).detach().item())
# Distances
log_ratio = logp - base_logp
# Estimator of KL from http://joschu.net/blog/kl-approx.html
kls.append(torch.mean(torch.exp(log_ratio) - 1 - log_ratio).item())
latent_pi, _, latent_sde = model.policy._get_latent(torch.as_tensor(numpy_obs))
model_dist = model.policy._get_action_dist_from_latent(latent_pi, latent_sde=latent_sde).distribution.probs.detach().numpy()
latent_pi, _, latent_sde = base_model.policy._get_latent(torch.as_tensor(numpy_obs))
base_dist = base_model.policy._get_action_dist_from_latent(latent_pi, latent_sde=latent_sde).distribution.probs.detach().numpy()
js.append(np.mean(jensenshannon(model_dist, base_dist, axis=1)).item())
h.append(np.mean(hellinger(model_dist, base_dist)).item())
numpy_obs, _, done, info = env.step(actions)
rew = env.get_attr("last_reward")
true_rew = env.get_attr("get_true_reward")
true_rew2 = env.get_attr("get_true_reward2")
episode_rewards.append(np.mean(rew))
reward_std.append(rew)
episode_true_rewards.append(np.mean(true_rew))
true_reward_std.append(true_rew)
episode_true_rewards2.append(np.mean(true_rew2))
true_reward_std2.append(true_rew2)
obs = env.get_attr("observation")
infection_data = np.zeros((1, 5))
threshold_data = np.zeros(len(obs))
for o in obs:
infection_data += o.global_infection_summary[-1]
gis = np.array([o.global_infection_summary[-1] for o in obs]).squeeze(1)
gts = np.array([o.global_testing_summary[-1] for o in obs]).squeeze(1)
stage = np.array([o.stage[-1].item() for o in obs])
if viz:
viz.record_list(obs[0], gis, gts, stage, rew, true_rew, true_rew2=true_rew2)
reward = np.sum(episode_rewards).item()
true_reward = np.sum(episode_true_rewards).item()
true_reward2 = np.sum(episode_true_rewards2).item()
#if viz:
# viz.plot(name=name, evaluate=True, plots_to_show=['critical_summary', 'stages', 'cumulative_reward', 'cumulative_true_reward2'])
# viz.reset()
return reward, np.std(np.sum(np.array(reward_std), axis=0)).item(), \
true_reward, np.std(np.sum(np.array(true_reward_std), axis=0)).item(), \
true_reward2, np.std(np.sum(np.array(true_reward_std2), axis=0)).item(), \
kls, js, h, log_probs, base_log_probs, vfs, base_vfs
def plot_critical_summary(ax, viz, color, sty, m):
gis = np.vstack(viz._gis).squeeze()
gis_std = np.vstack(viz._gis_std).squeeze()
ax.plot(viz._num_persons * gis[:, viz._critical_index], color='black', linestyle=sty, linewidth=1, label='_nolegend_')
#ax.fill_between(np.arange(len(gis)), viz._num_persons * (gis-gis_std)[:, viz._critical_index], viz._num_persons * (gis+gis_std)[:, viz._critical_index], alpha=0.1, color=color)
ax.plot(np.arange(gis.shape[0]), np.ones(gis.shape[0]) * viz._max_hospital_capacity, 'y')
ax.legend(['Max hospital capacity'], loc='upper left')
ax.set_ylim(-0.1, viz._max_hospital_capacity * 3)
ax.set_title('ICU Usage', fontsize=16)
ax.set_xlabel('time (days)', fontsize=16)
ax.set_ylabel('persons', fontsize=16)
ax.yaxis.set_major_locator(MaxNLocator(integer=True))
height = viz._num_persons * gis[m, viz._critical_index]
ax.plot([m, m], [-0.1, height], color=color, linestyle=sty, linewidth=2)
ax.plot([0, m], [height, height], color=color, linestyle=sty, linewidth=2)
def plot_stages(ax, viz, color, sty):
days = np.arange(len(viz._stages))
stages = np.array(viz._stages)
stages_std = np.array(viz._stages_std)
ax.plot(days, stages, color='black', linestyle=sty, linewidth=1)
#ax.fill_between(days, stages - stages_std, stages + stages_std, alpha=0.1, color=color)
ax.set_ylim(-0.1, 5) # This assumes at most 5 stages!!
ax.set_title('Regulation Stage', fontsize=16)
ax.set_xlabel('time (days)', fontsize=16)
ax.yaxis.set_major_locator(MaxNLocator(integer=True))
m = np.argmax(stages[50:]) + 50
ax.plot([m, m], [-0.1, stages[m]], color=color, linestyle=sty, linewidth=2)
p1 = Line2D([0,1],[0,1],linestyle='-', color='black')
p2 = Line2D([0,1],[0,1],linestyle='--', color='black')
ax.legend([p1, p2], ['smaller policy', 'larger policy'], loc='upper right')
return m
def plot(v1, v2):
fig, (ax1, ax2) = plt.subplots(1, 2)
c1 = 'red'
c2 = 'blue'
s1 = '-'
s2 = '--'
m1 = plot_stages(ax2, v1, c1, s1)
plot_critical_summary(ax1, v1, c1, s1, m1)
m2 = plot_stages(ax2, v2, c2, s2)
plot_critical_summary(ax1, v2, c2, s2, m2)
ax1.figure.set_size_inches(4, 3)
ax2.figure.set_size_inches(4, 3)
fig.set_size_inches(8, 3)
plt.savefig('test.svg',dpi=120, bbox_inches='tight', pad_inches = 0, format='svg')
def make_cfg():
# cfg = ps.sh.small_town_config
# cfg.delta_start_lo = int(sys.argv[6])
# cfg.delta_start_hi = int(sys.argv[7])
# return cfg
sim_config = ps.env.PandemicSimConfig(
num_persons=500,
location_configs=[
ps.env.LocationConfig(ps.env.Home, num=150),
ps.env.LocationConfig(ps.env.GroceryStore, num=2, num_assignees=5, state_opts=dict(visitor_capacity=30)),
ps.env.LocationConfig(ps.env.Office, num=2, num_assignees=150, state_opts=dict(visitor_capacity=0)),
ps.env.LocationConfig(ps.env.School, num=10, num_assignees=2, state_opts=dict(visitor_capacity=30)),
ps.env.LocationConfig(ps.env.Hospital, num=1, num_assignees=15, state_opts=dict(patient_capacity=5)),
ps.env.LocationConfig(ps.env.RetailStore, num=2, num_assignees=5, state_opts=dict(visitor_capacity=30)),
ps.env.LocationConfig(ps.env.HairSalon, num=2, num_assignees=3, state_opts=dict(visitor_capacity=5)),
ps.env.LocationConfig(ps.env.Restaurant, num=1, num_assignees=6, state_opts=dict(visitor_capacity=30)),
ps.env.LocationConfig(ps.env.Bar, num=1, num_assignees=3, state_opts=dict(visitor_capacity=30))
],
person_routine_assignment=ps.sh.DefaultPersonRoutineAssignment(),
delta_start_lo = 95,
delta_start_hi = 105
)
sim_config_med = ps.env.PandemicSimConfig(
num_persons=2000,
location_configs=[
ps.env.LocationConfig(ps.env.Home, num=600),
ps.env.LocationConfig(ps.env.GroceryStore, num=4, num_assignees=10, state_opts=dict(visitor_capacity=30)),
ps.env.LocationConfig(ps.env.Office, num=4, num_assignees=300, state_opts=dict(visitor_capacity=0)),
ps.env.LocationConfig(ps.env.School, num=20, num_assignees=4, state_opts=dict(visitor_capacity=30)),
ps.env.LocationConfig(ps.env.Hospital, num=2, num_assignees=30, state_opts=dict(patient_capacity=5)),
ps.env.LocationConfig(ps.env.RetailStore, num=4, num_assignees=10, state_opts=dict(visitor_capacity=30)),
ps.env.LocationConfig(ps.env.HairSalon, num=4, num_assignees=6, state_opts=dict(visitor_capacity=5)),
ps.env.LocationConfig(ps.env.Restaurant, num=2, num_assignees=12, state_opts=dict(visitor_capacity=30)),
ps.env.LocationConfig(ps.env.Bar, num=2, num_assignees=6, state_opts=dict(visitor_capacity=30))
],
person_routine_assignment=ps.sh.DefaultPersonRoutineAssignment(),
delta_start_lo = 95,
delta_start_hi = 105
)
return sim_config
def make_reg():
return ps.sh.austin_regulations
def make_sim(sim_config, noise):
sim_opt = PandemicSimOpts()
sim_opt.spontaneous_testing_rate = noise
return ps.env.PandemicSim.from_config(sim_config=sim_config, sim_opts=sim_opt)
def make_viz(sim_config):
return ps.viz.GymViz.from_config(sim_config=sim_config)
def load_model(env, model_path, width, depth):
agent = ps.model.StageModel(env = env)
d_model = width
n_layers = depth
net_arch = [d_model] * n_layers if n_layers != 0 else []
policy_kwargs = {
"net_arch": [dict(pi=net_arch, vf=net_arch)],
}
model = agent.get_model("ppo", policy_kwargs = policy_kwargs, verbose = 0)
return model.load(model_path)
def init(args, noise):
n_cpus = args.n_cpus
ps.init_globals(seed=args.seed)
sim_config = make_cfg()
regulations = make_reg()
viz = make_viz(sim_config)
done_fn = ps.env.DoneFunctionFactory.default(ps.env.DoneFunctionType.TIME_LIMIT, horizon=200)
reward_fn = SumReward(
reward_fns=[
RewardFunctionFactory.default(RewardFunctionType.INFECTION_SUMMARY_ABOVE_THRESHOLD,
summary_type=InfectionSummary.CRITICAL,
threshold=sim_config.max_hospital_capacity / sim_config.num_persons),
RewardFunctionFactory.default(RewardFunctionType.INFECTION_SUMMARY_ABSOLUTE,
summary_type=InfectionSummary.CRITICAL),
RewardFunctionFactory.default(RewardFunctionType.LOWER_STAGE,
num_stages=len(regulations)),
RewardFunctionFactory.default(RewardFunctionType.SMOOTH_STAGE_CHANGES,
num_stages=len(regulations))
],
weights=[0, 10, 0.1, 0.01]
)
gym = ps.env.PandemicPolicyGymEnv.from_config(
sim_config=sim_config,
sim_opts = PandemicSimOpts(spontaneous_testing_rate=noise),
pandemic_regulations=regulations,
done_fn=done_fn,
reward_fn=reward_fn,
constrain=True,
four_start=False,
obs_history_size=3,
num_days_in_obs=8
)
env = gym.get_multi_env(n=n_cpus) if n_cpus > 1 else gym.get_single_env()
return env, viz
def evaluate(env, model_path, width, depth, base_model, viz):
model = load_model(env, model_path, width, depth)
model_parameters = filter(lambda p: p.requires_grad, model.policy.mlp_extractor.parameters())
params = sum([np.prod(p.size()) for p in model_parameters])
params = int(params)
print(f"Evaluating {model_path+str(width)}...")
reward, rstd, true_reward, trstd, true_reward2, tr2std, kl, js, h, log_probs, base_log_probs, vfs, base_vfs = evaluate_policy(model_path, model, base_model, env, viz=viz)
env.close()
print(f"Model: {model_path}. Proxy: {reward}. Objective: {true_reward}.")
return params, reward, rstd, true_reward, trstd, true_reward2, tr2std, kl, js, h, log_probs, base_log_probs, vfs, base_vfs
def main():
parser = argparse.ArgumentParser()
parser.add_argument('model_path')
parser.add_argument('base_model_path')
parser.add_argument('base_width', type=int)
parser.add_argument('base_depth', type=int)
parser.add_argument('--seed', type=int, default=17)
parser.add_argument('--n_cpus', type=int, default=32)
parser.add_argument('--n_episodes', type=int, default=32)
parser.add_argument('--epoch', type=int, default=0)
parser.add_argument('--width', type=int, default=0)
#parser.add_argument('--noise', type=str, default="")
args = parser.parse_known_args(sys.argv[1:])[0]
vs = []
for w in [16, 112]:
env, viz = init(args, 0.02)
base_model = load_model(env, args.base_model_path, args.base_width, args.base_depth)
evaluate(env, args.model_path+str(w), w, 2, base_model, viz)
vs.append(viz)
plot(vs[0], vs[1])
# params, reward, reward_std, true_reward, true_reward_std, true_reward2, true_reward2_std, kls, js, h, log_probs, base_log_probs, vfs, base_vfs, e, noises = \
# [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], []
# #widths = [4, 8, 12, 16, 20, 24, 28, 32] if args.width == 0 else [40, 48, 56, 64, 80, 96, 112, 128]
# for w in [args.width]:
# for noise in ['01', '02', '003', '005', '03', '04', '05', '06', '07', '08', '09', '095', '1']:
# n2n = {'01':0.1, '02':0.2, '003':0.03, '005':0.05, '03':0.3, '04':0.4, '05':0.5, '06':0.6, '07':0.7, '08':0.8, '09':0.9, '095':0.95, '1':1}
# env, viz = init(args, n2n[noise])
# base_model = load_model(env, args.base_model_path, args.base_width, args.base_depth)
# p, r, rs, tr, trs, tr2, tr2s, kl, j_s, h_, logp, blogp, vf, bvf = evaluate(env, args.model_path+noise+"_"+str(w), w, 2, base_model, viz)
# noises.append(n2n[noise])
# params.append(p)
# reward.append(r)
# reward_std.append(rs)
# true_reward.append(tr)
# true_reward_std.append(trs)
# true_reward2.append(tr2)
# true_reward2_std.append(tr2s)
# kls.append(kl)
# js.append(j_s)
# h.append(h_)
# log_probs.append(logp)
# base_log_probs.append(blogp)
# vfs.append(vf)
# base_vfs.append(bvf)
# e.append(args.epoch)
# f = open(f"pandemic_{args.epoch}_{args.width}_noise.json", "w")
# json.dump({'params':params, 'noise':noises, 'rew': reward, 'rew_std': reward_std, 'true_rew': true_reward, 'true_rew_std': true_reward_std, 'true_rew2': true_reward2,
# 'true_rew2_std': true_reward2_std, 'kls': kls, 'js': js, 'h': h, 'log_probs': log_probs, 'base_log_probs': base_log_probs, 'vfs': vfs, 'base_vfs': base_vfs, 'e': e}, f)
# f.close()
if __name__ == '__main__':
main()
| 47.51928
| 182
| 0.648526
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 5,161
| 0.279199
|
5471ef5e2041074700733cd254f4357bec345d93
| 3,289
|
py
|
Python
|
WagerBrain/odds.py
|
sedemmler/WagerBrain
|
b1cc33f5eb7a6130106bf8251b554718e2d22172
|
[
"MIT"
] | 83
|
2020-03-26T22:14:24.000Z
|
2022-03-22T19:00:48.000Z
|
website.py
|
rax-v/XSS
|
ff70b89c9fb94a19caaf84e81eddeeca052344ea
|
[
"MIT"
] | 2
|
2020-03-26T19:34:03.000Z
|
2020-03-27T19:56:14.000Z
|
website.py
|
rax-v/XSS
|
ff70b89c9fb94a19caaf84e81eddeeca052344ea
|
[
"MIT"
] | 19
|
2020-04-06T10:47:30.000Z
|
2022-03-30T19:16:42.000Z
|
from fractions import Fraction
from math import gcd
import numpy as np
"""
Convert the style of gambling odds to Function Name (Decimal, American, Fractional).
TO DO: Fix edge case related to Fraction module that causes weird rounding / slightly off output
"""
def american_odds(odds):
"""
:param odds: Float (e.g., 2.25) or String (e.g., '3/1' or '5/4').
:return: Integer. Odds expressed in American terms.
"""
if isinstance(odds, int):
return odds
elif isinstance(odds, float):
if odds > 2.0:
return round((odds - 1) * 100, 0)
else:
return round(-100 / (odds - 1), 0)
elif "/" in odds:
odds = Fraction(odds)
if odds.numerator > odds.denominator:
return (odds.numerator / odds.denominator) * 100
else:
return -100 / (odds.numerator / odds.denominator)
def decimal_odds(odds):
"""
:param odds: Integer (e.g., -350) or String (e.g., '3/1' or '5/4').
:return: Float. Odds expressed in Decimal terms.
"""
if isinstance(odds, float):
return odds
elif isinstance(odds, int):
if odds >= 100:
return abs(1 + (odds / 100))
elif odds <= -101 :
return 100 / abs(odds) + 1
else:
return float(odds)
elif "/" in odds:
odds = Fraction(odds)
return round((odds.numerator / odds.denominator) + 1, 2)
def fractional_odds(odds):
"""
:param odds: Numeric. (e.g., 2.25 or -350).
:return: Fraction Class. Odds expressed in Fractional terms.
"""
if isinstance(odds, str):
return Fraction(odds)
elif isinstance(odds, int):
if odds > 0:
denom = 100
g_cd = gcd(odds, denom)
num = int(odds / g_cd)
denom = int(denom / g_cd)
return Fraction(num, denom)
else:
num = 100
g_cd = gcd(num, odds)
num = int(num / g_cd)
denom = int(odds / g_cd)
return -Fraction(num, denom)
elif isinstance(odds, float):
new_odds = int((odds - 1) * 100)
g_cd = gcd(new_odds, 100)
return Fraction(int(new_odds/g_cd), int(100/g_cd))
def parlay_odds(odds):
"""
:param odds: List. A list of odds for wagers to be included in parlay
:return: Parlay odds in Decimal terms
"""
return np.prod(np.array([decimal_odds(x) for x in odds]))
def convert_odds(odds, odds_style='a'):
"""
:param odds: Stated odds from bookmaker (American, Decimal, or Fractional)
:param odds_style: American ('a', 'amer', 'american'), Decimal ('d', dec','decimal) Fractional ('f','frac','fractional)
:return: Numeric. Odds converted to selected style.
"""
try:
if odds_style.lower() == "american" or odds_style.lower() == 'amer' or odds_style.lower() == 'a':
return american_odds(odds)
elif odds_style.lower() == "decimal" or odds_style.lower() == 'dec' or odds_style.lower() == 'd':
return decimal_odds(odds)
elif odds_style.lower() == "fractional" or odds_style.lower() == 'frac' or odds_style.lower() == 'f':
return fractional_odds(odds)
except (ValueError, KeyError, NameError):
return None
| 28.850877
| 123
| 0.578291
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1,054
| 0.320462
|
5472180161d7e60f43fc9232da207e59fa3cb086
| 16,438
|
py
|
Python
|
GANs/jsigan/ops.py
|
JonathanLehner/nnabla-examples
|
2971b987484945e12fb171594181908789485a0f
|
[
"Apache-2.0"
] | null | null | null |
GANs/jsigan/ops.py
|
JonathanLehner/nnabla-examples
|
2971b987484945e12fb171594181908789485a0f
|
[
"Apache-2.0"
] | null | null | null |
GANs/jsigan/ops.py
|
JonathanLehner/nnabla-examples
|
2971b987484945e12fb171594181908789485a0f
|
[
"Apache-2.0"
] | null | null | null |
# Copyright (c) 2017 Sony Corporation. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from collections import namedtuple
import nnabla as nn
import nnabla.functions as F
import nnabla.parametric_functions as PF
import nnabla.initializer as I
import numpy as np
from utils import depth_to_space
def box_filter(x, szf):
"""
Box filter
"""
y = F.identity(x)
szy = list(y.shape)
b_filt = nn.Variable((szf, szf, 1, 1))
b_filt.data.fill(1.)
b_filt = b_filt / (szf ** 2)
# 5,5,1,1
b_filt = F.tile(b_filt, [1, 1, szy[3], 1])
b_filt = F.transpose(b_filt, (3, 2, 0, 1))
b_filt = F.reshape(b_filt, (6, 5, 5))
pp = int((szf - 1) / 2)
y = F.pad(y, (0, 0, pp, pp, pp, pp, 0, 0), mode='reflect')
y_chw = F.transpose(y, (0, 3, 1, 2))
y_chw = F.depthwise_convolution(y_chw, b_filt, multiplier=1, stride=(1, 1))
y_hwc = F.transpose(y_chw, (0, 2, 3, 1))
return y_hwc
def guided_filter(img, r, eps):
"""
Edge preserving filter
"""
img2 = F.concatenate(img, img * img, axis=3)
img2 = box_filter(img2, r)
mean = F.split(img2, axis=3)
mean_i = F.stack(mean[0], mean[1], mean[2], axis=3)
mean_ii = F.stack(mean[3], mean[4], mean[5], axis=3)
var_i = mean_ii - mean_i * mean_i
a = var_i / (var_i + eps)
b = mean_i - a * mean_i
ab = F.concatenate(a, b, axis=3)
ab = box_filter(ab, r)
mean_ab = F.split(ab, axis=3)
mean_a = F.stack(mean_ab[0], mean_ab[1], mean_ab[2], axis=3)
mean_b = F.stack(mean_ab[3], mean_ab[4], mean_ab[5], axis=3)
q = mean_a * img + mean_b
return q
def conv_2d(x, o_ch, kernel, name=None):
"""
Convolution for JSInet
"""
b = I.ConstantInitializer(0.)
h = PF.convolution(x, o_ch, kernel=kernel, stride=(1, 1), pad=(1, 1), channel_last=True,
b_init=b, name=name)
return h
def res_block(x, out_ch, name):
"""
Create residual block
"""
with nn.parameter_scope(name):
h = conv_2d(F.relu(x), out_ch, kernel=(3, 3), name='conv/0')
h = conv_2d(F.relu(h), out_ch, kernel=(3, 3), name='conv/1')
h = x + h
return h
def dyn_2d_filter(x, lf_2d, k_sz):
"""
Dynamic 2d filtering
"""
with nn.parameter_scope('Dynamic_2D_Filtering'):
f_localexpand = nn.Variable.from_numpy_array(
np.eye(k_sz[0] * k_sz[1], k_sz[0] * k_sz[1]))
f_localexpand = F.reshape(f_localexpand,
(k_sz[0], k_sz[1], 1, k_sz[0] * k_sz[1])) # (9,9,1,81))
f_localexpand = F.transpose(f_localexpand, (3, 0, 1, 2)) # (81,9,9,1))
x_sz = x.shape
x = F.reshape(x, (x_sz[0], x_sz[1], x_sz[2], 1)) # (1,100,170,1)
x_localexpand = F.convolution(x, f_localexpand, stride=(1, 1), pad=(4, 4),
channel_last=True) # (1,100,170,81)
x_le_sz = x_localexpand.shape
x_localexpand = F.reshape(x_localexpand,
(x_le_sz[0], x_le_sz[1], x_le_sz[2], 1, x_le_sz[3]))
y = F.batch_matmul(x_localexpand, lf_2d)
y_sz = y.shape
y = F.reshape(y, (y_sz[0], y_sz[1], y_sz[2], y_sz[4]))
return y
def dyn_2d_up_operation(x, lf_2d, k_sz, sf=2):
"""
Dynamic 2d upsampling
"""
with nn.parameter_scope("Dynamic_2D_Upsampling"):
y = []
sz = lf_2d.shape
lf_2d_new = F.reshape(
lf_2d, (sz[0], sz[1], sz[2], k_sz[0] * k_sz[0], sf ** 2))
lf_2d_new = F.softmax(lf_2d_new, axis=3)
for ch in range(3): # loop over YUV channels
# apply dynamic filtering operation
temp = dyn_2d_filter(x[:, :, :, ch], lf_2d_new, k_sz)
temp = depth_to_space(temp, sf)
y += [temp]
y = F.concatenate(*y, axis=3)
return y
def dyn_sep_up_operation(x, dr_k_v, dr_k_h, k_sz, sf):
"""
Dynamic separable upsampling operation with 1D separable local kernels.
x: [B, H, W, C], dr_k_v: [B, H, W, 41*sf*sf], dr_k_h: [B, H, W, 41*sf*sf]
out: [B, H*sf, W*sf, C]
"""
sz = x.shape
pad = k_sz // 2 # local filter pad size
# [B, H, W, C*sf*sf]
out_v = nn.Variable((sz[0], sz[1], sz[2], sz[3] * sf ** 2))
out_v.data.zero()
# [B, H, W, C*sf*sf]
out_h = nn.Variable((sz[0], sz[1], sz[2], sz[3] * sf ** 2))
out_h.data.zero()
img_pad = F.pad(x, (0, 0, pad, pad, 0, 0, 0, 0))
img_pad_y = F.reshape(img_pad[:, :, :, 0],
(img_pad.shape[0], img_pad.shape[1], img_pad.shape[2], 1))
img_pad_y = F.tile(img_pad_y, [1, 1, 1, sf ** 2])
img_pad_u = F.reshape(img_pad[:, :, :, 1],
(img_pad.shape[0], img_pad.shape[1], img_pad.shape[2], 1))
img_pad_u = F.tile(img_pad_u, [1, 1, 1, sf ** 2])
img_pad_v = F.reshape(img_pad[:, :, :, 2],
(img_pad.shape[0], img_pad.shape[1], img_pad.shape[2], 1))
img_pad_v = F.tile(img_pad_v, [1, 1, 1, sf ** 2])
img_pad = F.concatenate(img_pad_y, img_pad_u, img_pad_v, axis=3)
# vertical 1D filter
for i in range(k_sz):
out_v = out_v + img_pad[:, i:i + sz[1], :, :] * F.tile(
dr_k_v[:, :, :, i:k_sz * sf ** 2:k_sz], [1, 1, 1, 3])
img_pad = F.pad(out_v, (0, 0, 0, 0, pad, pad, 0, 0))
# horizontal 1D filter
for i in range(k_sz):
out_h = out_h + img_pad[:, :, i:i + sz[2], :] * F.tile(
dr_k_h[:, :, :, i:k_sz * sf ** 2:k_sz], [1, 1, 1, 3])
# depth to space upsampling (YUV)
out = depth_to_space(out_h[:, :, :, 0:sf ** 2], sf)
out = F.concatenate(out, depth_to_space(
out_h[:, :, :, sf ** 2:2 * sf ** 2], sf), axis=3)
out = F.concatenate(out, depth_to_space(
out_h[:, :, :, 2 * sf ** 2:3 * sf ** 2], sf), axis=3)
return out
def res_block_concat(x, out_ch, name):
"""
Basic residual block -> [conv-relu | conv-relu] + input
"""
with nn.parameter_scope(name):
h = conv_2d(F.relu(x), out_ch, kernel=(3, 3), name='conv/0')
h = conv_2d(F.relu(h), out_ch, kernel=(3, 3), name='conv/1')
h = x[:, :, :, :out_ch] + h
return h
def model(img, sf):
"""
Define JSInet model
"""
with nn.parameter_scope('Network'):
with nn.parameter_scope('local_contrast_enhancement'):
## ================= Local Contrast Enhancement Subnet ============================ ##
ch = 64
b = guided_filter(img, 5, 0.01)
n1 = conv_2d(b, ch, kernel=(3, 3), name='conv/0')
for i in range(4):
n1 = res_block(n1, ch, 'res_block/%d' % i)
n1 = F.relu(n1, inplace=True)
local_filter_2d = conv_2d(n1, (9 ** 2) * (sf ** 2), kernel=(3, 3),
name='conv_k') # [B, H, W, (9x9)*(sfxsf)]
# dynamic 2D upsampling with 2D local filters
pred_C = dyn_2d_up_operation(b, local_filter_2d, (9, 9), sf)
# local contrast mask
pred_C = 2 * F.sigmoid(pred_C)
## ================= Detail Restoration Subnet ============================ ##
ch = 64
d = F.div2(img, b + 1e-15)
with nn.parameter_scope('detail_restoration'):
n3 = conv_2d(d, ch, kernel=(3, 3), name='conv/0')
for i in range(4):
n3 = res_block(n3, ch, 'res_block/%d' % i)
if i == 0:
d_feature = n3
n3 = F.relu(n3, inplace=True)
# separable 1D filters
dr_k_h = conv_2d(n3, 41 * sf ** 2, kernel=(3, 3), name='conv_k_h')
dr_k_v = conv_2d(n3, 41 * sf ** 2, kernel=(3, 3), name='conv_k_v')
# dynamic separable upsampling with with separable 1D local filters
pred_D = dyn_sep_up_operation(d, dr_k_v, dr_k_h, 41, sf)
## ================= Image Reconstruction Subnet ============================ ##
with nn.parameter_scope('image_reconstruction'):
n4 = conv_2d(img, ch, kernel=(3, 3), name='conv/0')
for i in range(4):
if i == 1:
n4 = F.concatenate(n4, d_feature, axis=3)
n4 = res_block_concat(n4, ch, 'res_block/%d' % i)
else:
n4 = res_block(n4, ch, 'res_block/%d' % i)
n4 = F.relu(n4, inplace=True)
n4 = F.relu(conv_2d(n4, ch * sf * sf, kernel=(3, 3),
name='conv/1'), inplace=True)
# (1,100,170,1024) -> (1,100,170,4,4,64) -> (1,100,4,170,4,64)
# pixel shuffle
n4 = depth_to_space(n4, sf)
pred_I = conv_2d(n4, 3, kernel=(3, 3), name='conv/2')
pred = F.add2(pred_I, pred_D, inplace=True) * pred_C
jsinet = namedtuple('jsinet', ['pred'])
return jsinet(pred)
def truncated_normal(w_shape, mean, std):
"""
Numpy truncated normal
"""
init = I.NormalInitializer()
tmp = init(w_shape + (4,))
valid = np.logical_and((np.less(tmp, 2)), (np.greater(tmp, -2)))
ind = np.argmax(valid, axis=-1)
ind1 = (np.expand_dims(ind, -1))
trunc_norm = np.take_along_axis(tmp, ind1, axis=4).squeeze(-1)
trunc_norm = trunc_norm * std + mean
return trunc_norm
def conv(x, channels, kernel=4, stride=2, pad=0, pad_type='zero', use_bias=True, scope='conv_0'):
"""
Convolution for discriminator
"""
w_n_shape = (channels, kernel, kernel, x.shape[-1])
w_init = truncated_normal(w_n_shape, mean=0.0, std=0.02)
b_init = I.ConstantInitializer(0.)
with nn.parameter_scope(scope):
if pad > 0:
h = x.shape[1]
if h % stride == 0:
pad = pad * 2
else:
pad = max(kernel - (h % stride), 0)
pad_top = pad // 2
pad_bottom = pad - pad_top
pad_left = pad // 2
pad_right = pad - pad_left
if pad_type == 'zero':
x = F.pad(x, (0, 0, pad_top, pad_bottom,
pad_left, pad_right, 0, 0))
if pad_type == 'reflect':
x = F.pad(x, (0, 0, pad_top, pad_bottom, pad_left,
pad_right, 0, 0), mode='reflect')
def apply_w(w):
return PF.spectral_norm(w, dim=0)
x = PF.convolution(x, channels, kernel=(kernel, kernel), stride=(
stride, stride), apply_w=apply_w, w_init=w_init, b_init=b_init, with_bias=use_bias,
channel_last=True)
return x
def dis_block(n, c, i, train=True):
"""
Discriminator conv_bn_relu block
"""
out = conv(n, channels=c, kernel=4, stride=2, pad=1, use_bias=False,
scope='d_conv/' + str(2 * i + 2))
out_fm = F.leaky_relu(
PF.batch_normalization(
out, axes=[3], batch_stat=train, name='d_bn/' + str(2 * i + 1)),
alpha=0.2)
out = conv(out_fm, channels=c * 2, kernel=3, stride=1, pad=1, use_bias=False,
scope='d_conv/' + str(2 * i + 3))
out = F.leaky_relu(
PF.batch_normalization(
out, axes=[3], batch_stat=train, name='d_bn/' + str(2 * i + 2)),
alpha=0.2)
return out, out_fm
def discriminator_fm(x, sf, scope="Discriminator_FM"):
"""
Feature matching discriminator
"""
with nn.parameter_scope(scope):
fm_list = []
ch = 32
n = F.leaky_relu(conv(x, ch, 3, 1, 1, scope='d_conv/1'), alpha=0.2)
for i in range(4):
n, out_fm = dis_block(n, ch, i, train=True)
ch = ch * 2
fm_list.append(out_fm)
n = F.leaky_relu(PF.batch_normalization(
conv(n, channels=ch, kernel=4, stride=2,
pad=1, use_bias=False, scope='d_conv/10'),
axes=[3], batch_stat=True, name='d_bn/9'), alpha=0.2,
inplace=True)
if sf == 1:
n = F.leaky_relu(PF.batch_normalization(
conv(n, channels=ch, kernel=5, stride=1,
pad=1, use_bias=False, scope='d_conv/11'),
axes=[3], batch_stat=True, name='d_bn/10'), alpha=0.2, inplace=True)
else:
n = F.leaky_relu(PF.batch_normalization(
conv(n, channels=ch, kernel=5, stride=1,
use_bias=False, scope='d_conv/11'),
axes=[3], batch_stat=True, name='d_bn/10'), alpha=0.2, inplace=True)
n = PF.batch_normalization(
conv(n, channels=1, kernel=1, stride=1,
use_bias=False, scope='d_conv/12'),
axes=[3], batch_stat=True, name='d_bn/11')
out_logit = n
out = F.sigmoid(out_logit) # [B,1]
return out, out_logit, fm_list
def discriminator_loss(real, fake):
"""
Calculate discriminator loss
"""
real_loss = F.mean(
F.relu(1.0 - (real - F.reshape(F.mean(fake), (1, 1, 1, 1)))))
fake_loss = F.mean(
F.relu(1.0 + (fake - F.reshape(F.mean(real), (1, 1, 1, 1)))))
l_d = real_loss + fake_loss
return l_d
def generator_loss(real, fake):
"""
Calculate generator loss
"""
real_loss = F.mean(
F.relu(1.0 + (real - F.reshape(F.mean(fake), (1, 1, 1, 1)))))
fake_loss = F.mean(
F.relu(1.0 - (fake - F.reshape(F.mean(real), (1, 1, 1, 1)))))
l_g = real_loss + fake_loss
return l_g
def feature_matching_loss(x, y, num=4):
"""
Calculate feature matching loss
"""
fm_loss = 0.0
for i in range(num):
fm_loss += F.mean(F.squared_error(x[i], y[i]))
return fm_loss
def gan_model(label_ph, pred, conf):
"""
Define GAN model with adversarial and discriminator losses and their orchestration
"""
# Define Discriminator
_, d_real_logits, d_real_fm_list = discriminator_fm(
label_ph, conf.scaling_factor, scope="Discriminator_FM")
# output of D for fake images
_, d_fake_logits, d_fake_fm_list = discriminator_fm(
pred, conf.scaling_factor, scope="Discriminator_FM")
# Define Detail Discriminator
# compute the detail layers for the dicriminator (reuse)
base_gt = guided_filter(label_ph, 5, 0.01)
detail_gt = F.div2(label_ph, base_gt + 1e-15)
base_pred = guided_filter(pred, 5, 0.01)
detail_pred = F.div2(pred, base_pred + 1e-15)
# detail layer output of D for real images
_, d_detail_real_logits, d_detail_real_fm_list = \
discriminator_fm(detail_gt, conf.scaling_factor,
scope="Discriminator_Detail")
# detail layer output of D for fake images
_, d_detail_fake_logits, d_detail_fake_fm_list = \
discriminator_fm(detail_pred, conf.scaling_factor,
scope="Discriminator_Detail")
# Loss
# original GAN (hinge GAN)
d_adv_loss = discriminator_loss(d_real_logits, d_fake_logits)
d_adv_loss.persistent = True
g_adv_loss = generator_loss(d_real_logits, d_fake_logits)
g_adv_loss.persistent = True
# detail GAN (hinge GAN)
d_detail_adv_loss = conf.detail_lambda * \
discriminator_loss(d_detail_real_logits, d_detail_fake_logits)
d_detail_adv_loss.persistent = True
g_detail_adv_loss = conf.detail_lambda * \
generator_loss(d_detail_real_logits, d_detail_fake_logits)
g_detail_adv_loss.persistent = True
# feature matching (FM) loss
fm_loss = feature_matching_loss(d_real_fm_list, d_fake_fm_list, 4)
fm_loss.persistent = True
fm_detail_loss = conf.detail_lambda * feature_matching_loss(d_detail_real_fm_list,
d_detail_fake_fm_list, 4)
fm_detail_loss.persistent = True
jsigan = namedtuple('jsigan',
['d_adv_loss', 'd_detail_adv_loss', 'g_adv_loss', 'g_detail_adv_loss',
'fm_loss', 'fm_detail_loss'])
return jsigan(d_adv_loss, d_detail_adv_loss, g_adv_loss, g_detail_adv_loss, fm_loss,
fm_detail_loss)
| 37.359091
| 98
| 0.55828
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 3,241
| 0.197165
|
5475f0c326a3f8de3e388b70e03c71cc3faf4139
| 2,973
|
py
|
Python
|
neptune/internal/hardware/gpu/gpu_monitor.py
|
neptune-ml/neptune-client
|
7aea63160b5149c3fec40f62d3b0da7381a35748
|
[
"Apache-2.0"
] | 13
|
2019-02-11T13:18:38.000Z
|
2019-12-26T06:26:07.000Z
|
neptune/internal/hardware/gpu/gpu_monitor.py
|
neptune-ml/neptune-client
|
7aea63160b5149c3fec40f62d3b0da7381a35748
|
[
"Apache-2.0"
] | 39
|
2019-03-07T13:40:10.000Z
|
2020-01-07T17:19:24.000Z
|
neptune/internal/hardware/gpu/gpu_monitor.py
|
neptune-ml/neptune-client
|
7aea63160b5149c3fec40f62d3b0da7381a35748
|
[
"Apache-2.0"
] | 4
|
2019-02-11T13:07:23.000Z
|
2019-11-26T08:20:24.000Z
|
#
# Copyright (c) 2019, Neptune Labs Sp. z o.o.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import logging
from neptune.vendor.pynvml import (
NVMLError,
nvmlDeviceGetCount,
nvmlDeviceGetHandleByIndex,
nvmlDeviceGetMemoryInfo,
nvmlDeviceGetUtilizationRates,
nvmlInit,
)
_logger = logging.getLogger(__name__)
class GPUMonitor(object):
nvml_error_printed = False
def get_card_count(self):
return self.__nvml_get_or_else(nvmlDeviceGetCount, default=0)
def get_card_usage_percent(self, card_index):
# pylint: disable=no-member
# pylint incorrectly detects that function nvmlDeviceGetUtilizationRates returns str
return self.__nvml_get_or_else(
lambda: float(nvmlDeviceGetUtilizationRates(nvmlDeviceGetHandleByIndex(card_index)).gpu)
)
def get_card_used_memory_in_bytes(self, card_index):
# pylint: disable=no-member
# pylint incorrectly detects that function nvmlDeviceGetMemoryInfo returns str
return self.__nvml_get_or_else(
lambda: nvmlDeviceGetMemoryInfo(nvmlDeviceGetHandleByIndex(card_index)).used
)
def get_top_card_memory_in_bytes(self):
def read_top_card_memory_in_bytes():
# pylint: disable=no-member
# pylint incorrectly detects that function nvmlDeviceGetMemoryInfo returns str
return self.__nvml_get_or_else(
lambda: [
nvmlDeviceGetMemoryInfo(nvmlDeviceGetHandleByIndex(card_index)).total
for card_index in range(nvmlDeviceGetCount())
],
default=0,
)
memory_per_card = read_top_card_memory_in_bytes()
if not memory_per_card:
return 0
return max(memory_per_card)
def __nvml_get_or_else(self, getter, default=None):
try:
nvmlInit()
return getter()
except NVMLError as e:
if not GPUMonitor.nvml_error_printed:
warning = (
"Info (NVML): %s. GPU usage metrics may not be reported. For more information, "
"see https://docs-legacy.neptune.ai/logging-and-managing-experiment-results"
"/logging-experiment"
"-data.html#hardware-consumption "
)
_logger.warning(warning, e)
GPUMonitor.nvml_error_printed = True
return default
| 35.392857
| 100
| 0.664649
| 2,131
| 0.716784
| 0
| 0
| 0
| 0
| 0
| 0
| 1,113
| 0.374369
|
54768720b8a58a3c4d1cf1c8c265ceea8f6fc111
| 5,219
|
py
|
Python
|
tests/redis_map.py
|
jaredlunde/redis_structures
|
b9cce5f5c85db5e12c292633ff8d04e3ae053294
|
[
"MIT"
] | 2
|
2016-04-05T08:40:47.000Z
|
2016-06-27T14:03:26.000Z
|
tests/redis_map.py
|
jaredLunde/redis_structures
|
b9cce5f5c85db5e12c292633ff8d04e3ae053294
|
[
"MIT"
] | 1
|
2015-10-27T14:30:53.000Z
|
2015-11-09T17:54:33.000Z
|
tests/redis_map.py
|
jaredlunde/redis_structures
|
b9cce5f5c85db5e12c292633ff8d04e3ae053294
|
[
"MIT"
] | null | null | null |
#!/usr/bin/python3 -S
# -*- coding: utf-8 -*-
"""
`Redis Map Tests`
--·--·--·--·--·--·--·--·--·--·--·--·--·--·--·--·--·--·--·--·--·--·--·--·--·--·--
2015 Jared Lunde © The MIT License (MIT)
http://github.com/jaredlunde
"""
import datetime
import time
import pickle
import unittest
from redis_structures.debug import RandData, gen_rand_str
from redis_structures import StrictRedis, RedisMap
class TestJSONRedisMap(unittest.TestCase):
map = RedisMap("json_map", prefix="rs:unit_tests:", serialize=True)
is_str = False
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.addCleanup(self.map.clear)
def cast(self, obj):
return str(obj) if self.is_str else obj
def reset(self, count=10, type=int):
self.map.clear()
self.data = RandData(type).dict(count, 1)
self.data_count = count
self.map.update(self.data)
def test_prefix(self):
self.assertEqual(self.map.prefix, 'rs:unit_tests')
self.assertEqual(self.map.name, 'json_map')
self.assertEqual(self.map.key_prefix, 'rs:unit_tests:json_map')
def test_incr_decr(self):
self.reset()
self.map.incr('views', 1)
self.assertEqual(self.map['views'], self.cast(1))
self.map.incr('views', 3)
self.assertEqual(self.map['views'], self.cast(4))
self.map.decr('views', 1)
self.assertEqual(self.map['views'], self.cast(3))
def test_get(self):
self.reset()
self.map["hello"] = "world"
self.assertEqual(self.map.get("hello"), 'world')
self.assertEqual(self.map.get('world', 'hello'), 'hello')
def test_get_key(self):
self.assertEqual(
self.map.get_key('views'),
"{}:{}:{}".format(self.map.prefix, self.map.name, 'views'))
def test_items(self):
self.reset()
self.assertDictEqual(
{k: v for k, v in self.map.items()},
{k: self.cast(v) for k, v in self.data.items()})
def test_values(self):
self.reset()
self.assertSetEqual(
set(self.map.values()),
set(map(self.cast, self.data.values())))
def test_iter(self):
self.reset()
self.assertSetEqual(
set(k for k in self.map.iter()),
set(self.cast(k) for k in self.data.keys()))
def test_iter_match(self):
self.reset(count=10)
self.assertSetEqual(
set(k for k in self.map.iter("a*")),
set(self.cast(k) for k in self.data.keys() if k.startswith('a')))
def test_mget(self):
self.reset(0)
self.map.update({
'test1': 1,
'test2': 2,
'test3': 3,
'test4': 4,
'test5': 5})
self.assertListEqual(
self.map.mget('test2', 'test3', 'test4'),
[self.cast(2), self.cast(3), self.cast(4)])
def test_pop(self):
self.reset()
self.map['hello'] = 'world'
self.assertEqual(self.map.pop('hello'), 'world')
self.assertNotIn('hello', self.map)
def test_delete(self):
self.reset()
self.map['hello'] = 'world'
self.assertEqual(self.map['hello'], 'world')
del self.map['hello']
self.assertNotIn('hello', self.map)
def test_scan(self):
self.reset()
new_keys = []
cursor = '0'
while cursor:
cursor, keys = self.map.scan(count=1, cursor=int(cursor))
if keys:
new_keys.extend(keys)
self.assertSetEqual(
set(self.map.get_key(k) for k in self.data.keys()), set(new_keys))
def test_set(self):
self.reset()
self.map.set("hello", "world")
self.assertIn("hello", self.map)
def test_setex(self):
self.reset()
self.map.setex("hello", "world", 1)
self.assertIn("hello", self.map)
time.sleep(1.25)
self.assertNotIn("hello", self.map)
self.map.psetex("hello", "world", 1000)
self.assertIn("hello", self.map)
time.sleep(1.25)
self.assertNotIn("hello", self.map)
class TestPickledRedisMap(TestJSONRedisMap):
map = RedisMap("pickled_map", prefix="rs:unit_tests:", serializer=pickle)
def test_prefix(self):
self.assertEqual(self.map.prefix, 'rs:unit_tests')
self.assertEqual(self.map.name, 'pickled_map')
self.assertEqual(self.map.key_prefix, 'rs:unit_tests:pickled_map')
def test_incr_decr(self):
self.reset()
self.map.incr('views', 1)
self.assertEqual(self.map['views'], str(1))
self.map.incr('views', 3)
self.assertEqual(self.map['views'], str(4))
self.map.decr('views', 1)
self.assertEqual(self.map['views'], str(3))
class TestUnserializedRedisMap(TestJSONRedisMap):
map = RedisMap("unserialized_map", prefix="rs:unit_tests:")
is_str = True
def test_prefix(self):
self.assertEqual(self.map.prefix, 'rs:unit_tests')
self.assertEqual(self.map.name, 'unserialized_map')
self.assertEqual(
self.map.key_prefix, 'rs:unit_tests:unserialized_map')
if __name__ == '__main__':
unittest.main()
| 30.881657
| 80
| 0.57923
| 4,760
| 0.907358
| 0
| 0
| 0
| 0
| 0
| 0
| 902
| 0.171941
|
5477f31f091eaba6d081dd15b6e4e452029c17e6
| 4,480
|
py
|
Python
|
examples/parser_example.py
|
pibico/beacontools
|
513e1c7ff2aaf74b6c7d7b10805c2f6ca4384e3d
|
[
"MIT"
] | 139
|
2017-06-09T17:15:23.000Z
|
2022-03-15T03:02:17.000Z
|
examples/parser_example.py
|
pibico/beacontools
|
513e1c7ff2aaf74b6c7d7b10805c2f6ca4384e3d
|
[
"MIT"
] | 71
|
2017-06-20T03:20:56.000Z
|
2022-02-13T22:47:53.000Z
|
examples/parser_example.py
|
pibico/beacontools
|
513e1c7ff2aaf74b6c7d7b10805c2f6ca4384e3d
|
[
"MIT"
] | 59
|
2017-06-20T03:10:00.000Z
|
2022-03-15T23:54:44.000Z
|
# -*- coding: utf-8 -*-
from beacontools import parse_packet
# Eddystone UID packet
uid_packet = b"\x02\x01\x06\x03\x03\xaa\xfe\x17\x16\xaa\xfe\x00\xe3\x12\x34\x56\x78\x90\x12" \
b"\x34\x67\x89\x01\x00\x00\x00\x00\x00\x01\x00\x00"
uid_frame = parse_packet(uid_packet)
print("Namespace: %s" % uid_frame.namespace)
print("Instance: %s" % uid_frame.instance)
print("TX Power: %s" % uid_frame.tx_power)
print("-----")
# Eddystone URL packet
url_packet = b"\x03\x03\xAA\xFE\x13\x16\xAA\xFE\x10\xF8\x03github\x00citruz"
url_frame = parse_packet(url_packet)
print("TX Power: %d" % url_frame.tx_power)
print("URL: %s" % url_frame.url)
print("-----")
# Eddystone TLM packet (unencrypted)
tlm_packet = b"\x02\x01\x06\x03\x03\xaa\xfe\x11\x16\xaa\xfe\x20\x00\x0b\x18\x13\x00\x00\x00" \
b"\x14\x67\x00\x00\x2a\xc4\xe4"
tlm_frame = parse_packet(tlm_packet)
print("Voltage: %d mV" % tlm_frame.voltage)
print("Temperature: %f °C" % tlm_frame.temperature)
print("Advertising count: %d" % tlm_frame.advertising_count)
print("Seconds since boot: %d" % tlm_frame.seconds_since_boot)
print("-----")
# Eddystone TLM packet (encrypted)
enc_tlm_packet = b"\x02\x01\x06\x03\x03\xaa\xfe\x11\x16\xaa\xfe\x20\x01\x41\x41\x41\x41\x41" \
b"\x41\x41\x41\x41\x41\x41\x41\xDE\xAD\xBE\xFF"
enc_tlm_frame = parse_packet(enc_tlm_packet)
print("Data: %s" % enc_tlm_frame.encrypted_data)
print("Salt: %d" % enc_tlm_frame.salt)
print("Mic: %d" % enc_tlm_frame.mic)
print("-----")
# iBeacon Advertisement
ibeacon_packet = b"\x02\x01\x06\x1a\xff\x4c\x00\x02\x15\x41\x41\x41\x41\x41\x41\x41\x41\x41" \
b"\x41\x41\x41\x41\x41\x41\x41\x00\x01\x00\x01\xf8"
adv = parse_packet(ibeacon_packet)
print("UUID: %s" % adv.uuid)
print("Major: %d" % adv.major)
print("Minor: %d" % adv.minor)
print("TX Power: %d" % adv.tx_power)
print("-----")
# Cypress iBeacon Sensor
cypress_packet = b"\x02\x01\x04\x1a\xff\x4c\x00\x02\x15\x00\x05\x00\x01\x00\x00\x10\x00\x80" \
b"\x00\x00\x80\x5f\x9b\x01\x31\x00\x02\x6c\x66\xc3"
sensor = parse_packet(cypress_packet)
print("UUID: %s" % sensor.uuid)
print("Major: %d" % sensor.major)
print("Temperature: %d °C" % sensor.cypress_temperature)
print("Humidity: %d %%" % sensor.cypress_humidity)
print("TX Power: %d" % sensor.tx_power)
print("-----")
# Estimote Telemetry Packet (Subframe A)
telemetry_a_packet = b"\x02\x01\x04\x03\x03\x9a\xfe\x17\x16\x9a\xfe\x22\x47\xa0\x38\xd5"\
b"\xeb\x03\x26\x40\x00\x00\x01\x41\x44\x47\xfa\xff\xff\xff\xff"
telemetry = parse_packet(telemetry_a_packet)
print("Identifier: %s" % telemetry.identifier)
print("Protocol Version: %d" % telemetry.protocol_version)
print("Acceleration (g): (%f, %f, %f)" % telemetry.acceleration)
print("Is moving: %s" % telemetry.is_moving)
# ... see packet_types/estimote.py for all available attributes and units
print("-----")
# Estimote Telemetry Packet (Subframe B)
telemetry_b_packet = b"\x02\x01\x04\x03\x03\x9a\xfe\x17\x16\x9a\xfe\x22\x47\xa0\x38\xd5"\
b"\xeb\x03\x26\x40\x01\xd8\x42\xed\x73\x49\x25\x66\xbc\x2e\x50"
telemetry_b = parse_packet(telemetry_b_packet)
print("Identifier: %s" % telemetry_b.identifier)
print("Protocol Version: %d" % telemetry_b.protocol_version)
print("Magnetic field: (%f, %f, %f)" % telemetry_b.magnetic_field)
print("Temperature: %f °C" % telemetry_b.temperature)
# ... see packet_types/estimote.py for all available attributes and units
# Estimote Nearable Advertisement
nearable_packet = b"\x02\x01\x04\x03\x03\x0f\x18\x17\xff\x5d" \
b"\x01\x01\x1e\xfe\x42\x7e\xb6\xf4\xbc\x2f" \
b"\x04\x01\x68\xa1\xaa\xfe\x05\xc1\x45\x25" \
b"\x53\xb5"
nearable_adv = parse_packet(nearable_packet)
print("Identifier: %s" % nearable_adv.identifier)
print("Hardware_version: %d" % nearable_adv.hardware_version)
print("Firmware_version: %d" % nearable_adv.firmware_version)
print("Temperature: %d" % nearable_adv.temperature)
print("Is moving: %i" % nearable_adv.is_moving)
print("-----")
# CJ Monitor packet
cj_monitor_packet = b"\x02\x01\x06\x05\x02\x1A\x18\x00\x18" \
b"\x09\xFF\x72\x04\xFE\x10\xD1\x0C\x33\x61" \
b"\x09\x09\x4D\x6F\x6E\x20\x35\x36\x34\x33"
cj_monitor = parse_packet(cj_monitor_packet)
print("Name: %s" % cj_monitor.name)
print("Temperature: %f °C" % cj_monitor.temperature)
print("Humidity: %d %%" % cj_monitor.humidity)
print("Light: %f" % cj_monitor.light)
| 40
| 94
| 0.690625
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 2,343
| 0.522525
|
547c48103894763c6518d10f40329e0d7d4eaefd
| 1,228
|
py
|
Python
|
mlsurvey/sl/workflows/multiple_learning_workflow.py
|
jlaumonier/mlsurvey
|
373598d067c7f0930ba13fe8da9756ce26eecbaf
|
[
"MIT"
] | null | null | null |
mlsurvey/sl/workflows/multiple_learning_workflow.py
|
jlaumonier/mlsurvey
|
373598d067c7f0930ba13fe8da9756ce26eecbaf
|
[
"MIT"
] | null | null | null |
mlsurvey/sl/workflows/multiple_learning_workflow.py
|
jlaumonier/mlsurvey
|
373598d067c7f0930ba13fe8da9756ce26eecbaf
|
[
"MIT"
] | null | null | null |
from kedro.io import DataCatalog, MemoryDataSet
from kedro.pipeline import Pipeline
from kedro.runner import SequentialRunner
import mlsurvey as mls
from mlsurvey.workflows.learning_workflow import LearningWorkflow
class MultipleLearningWorkflow(LearningWorkflow):
def run(self):
"""
Run the workflow : run each config
"""
# data
data_catalog = DataCatalog({'config': MemoryDataSet(),
'log': MemoryDataSet(),
'base_directory': MemoryDataSet()})
data_catalog.save('config', self.config)
data_catalog.save('log', self.log)
data_catalog.save('base_directory', self.base_directory)
expand_config_node = mls.sl.workflows.tasks.ExpandConfigTask.get_node()
multiple_learning_node = mls.sl.workflows.tasks.MultipleLearningTask.get_node()
# Assemble nodes into a pipeline
pipeline = Pipeline([expand_config_node, multiple_learning_node])
# Create a runner to run the pipeline
runner = SequentialRunner()
# Run the pipeline
result = runner.run(pipeline, data_catalog)
if len(result) == 0:
self.terminate()
| 34.111111
| 87
| 0.653094
| 1,007
| 0.820033
| 0
| 0
| 0
| 0
| 0
| 0
| 209
| 0.170195
|
547cd68f734cef8dede708252277b864855b2580
| 2,542
|
py
|
Python
|
backend/apps/cmdb/migrations/0001_initial.py
|
renmcc/SA2
|
a524124c140ae0b291b10dafc11d38744dd93bd9
|
[
"MIT"
] | 4
|
2020-06-25T05:57:39.000Z
|
2021-06-26T04:58:16.000Z
|
backend/apps/cmdb/migrations/0001_initial.py
|
renmcc/SA2
|
a524124c140ae0b291b10dafc11d38744dd93bd9
|
[
"MIT"
] | null | null | null |
backend/apps/cmdb/migrations/0001_initial.py
|
renmcc/SA2
|
a524124c140ae0b291b10dafc11d38744dd93bd9
|
[
"MIT"
] | 1
|
2020-12-10T15:12:11.000Z
|
2020-12-10T15:12:11.000Z
|
# Generated by Django 2.2.12 on 2020-06-15 16:55
import datetime
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
('project', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='server',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('hostname', models.CharField(blank=True, help_text='主机名', max_length=200, verbose_name='主机名')),
('public_ip', models.GenericIPAddressField(blank=True, help_text='外网IP', null=True, verbose_name='外网IP')),
('private_ip', models.GenericIPAddressField(help_text='内网IP', unique=True, verbose_name='内网IP')),
('os', models.CharField(blank=True, default=None, help_text='操作系统', max_length=100, verbose_name='操作系统')),
('cpu', models.CharField(blank=True, default=None, help_text='CPU信息', max_length=250, verbose_name='CPU信息')),
('memory', models.CharField(blank=True, default=None, help_text='内存信息', max_length=100, verbose_name='内存信息')),
('disk', models.CharField(blank=True, help_text='硬盘信息', max_length=300, null=True, verbose_name='硬盘信息')),
('status', models.BooleanField(default=True, help_text='是否启用', verbose_name='启用')),
('remark', models.TextField(blank=True, help_text='备注', null=True, verbose_name='备注')),
('add_time', models.DateTimeField(default=datetime.datetime.now, help_text='添加时间', verbose_name='添加时间')),
('update_time', models.DateTimeField(auto_now=True, help_text='更新时间', verbose_name='更新时间')),
('area', models.ForeignKey(blank=True, help_text='所属大区', null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='server_area', to='project.ProjectArea', verbose_name='大区')),
('project', models.ForeignKey(blank=True, default=1, help_text='项目', null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='server_project', to='project.Project', verbose_name='项目')),
('role', models.ManyToManyField(blank=True, help_text='功能', null=True, related_name='server_role', to='project.ProjectRole', verbose_name='功能')),
],
options={
'verbose_name': '服务器列表',
'verbose_name_plural': '服务器列表',
'ordering': ('id',),
},
),
]
| 59.116279
| 215
| 0.632179
| 2,583
| 0.947542
| 0
| 0
| 0
| 0
| 0
| 0
| 703
| 0.257887
|
547d39324fd1deeba259dcc2ee665fe787ad6b6c
| 1,055
|
py
|
Python
|
sphecius/ciphers/base.py
|
douglasdaly/sphecius
|
df8fc8dd2add157c6360c2b66cb22ac6f0241051
|
[
"MIT"
] | 1
|
2019-09-26T01:08:20.000Z
|
2019-09-26T01:08:20.000Z
|
sphecius/ciphers/base.py
|
douglasdaly/sphecius
|
df8fc8dd2add157c6360c2b66cb22ac6f0241051
|
[
"MIT"
] | null | null | null |
sphecius/ciphers/base.py
|
douglasdaly/sphecius
|
df8fc8dd2add157c6360c2b66cb22ac6f0241051
|
[
"MIT"
] | 1
|
2019-09-26T01:08:19.000Z
|
2019-09-26T01:08:19.000Z
|
# -*- coding: utf-8 -*-
"""
base.py
Base Cipher Object class
@author: Douglas Daly
@date: 1/12/2017
"""
#
# Imports
#
from abc import ABCMeta, abstractmethod
from ..alphabets import English
#
# Classes
#
class Cipher(object, metaclass=ABCMeta):
"""
Base Cipher Class
"""
def __init__(self, alphabet=English):
""" Default Constructor
"""
self._alphabet = alphabet
self._key = None
def set_key(self, key):
""" Sets the Key for this Cipher object
:param str key: Key for this Cipher object
"""
self._key = key.upper()
@abstractmethod
def encrypt(self, plaintext):
""" Abstract Encrypt Method
:param str plaintext: Text to encrypt
:returns: Encrypted text
:rtype: str
"""
pass
@abstractmethod
def decrypt(self, ciphertext):
""" Abstract Decrypt Method
:param str ciphertext: Text to decrypt
:returns: Decrypted text
:rtype: str
"""
pass
| 16.484375
| 50
| 0.57346
| 836
| 0.792417
| 0
| 0
| 426
| 0.403791
| 0
| 0
| 589
| 0.558294
|
547ee9e4da4b047390b557dc16580a853bcc3c8e
| 281
|
py
|
Python
|
setup.py
|
codewars/python-unittest
|
5a6cc27a51a9d91ce997c953099515c701b76057
|
[
"MIT"
] | 4
|
2020-06-20T12:36:09.000Z
|
2021-10-31T22:04:48.000Z
|
setup.py
|
codewars/python-unittest
|
5a6cc27a51a9d91ce997c953099515c701b76057
|
[
"MIT"
] | null | null | null |
setup.py
|
codewars/python-unittest
|
5a6cc27a51a9d91ce997c953099515c701b76057
|
[
"MIT"
] | 3
|
2020-07-11T13:46:24.000Z
|
2022-02-23T20:55:19.000Z
|
from setuptools import setup
setup(
name="codewars_unittest",
version="0.1.0",
packages=["codewars_unittest"],
license="MIT",
description="unittest runner with Codewars output",
install_requires=[],
url="https://github.com/Codewars/python-unittest",
)
| 23.416667
| 55
| 0.690391
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 133
| 0.47331
|
547f16545ac590cbce83d8fc70ff6fbb32f028e2
| 16,628
|
py
|
Python
|
code/python/FactSetFunds/v1/fds/sdk/FactSetFunds/model/classifications.py
|
factset/enterprise-sdk
|
3fd4d1360756c515c9737a0c9a992c7451d7de7e
|
[
"Apache-2.0"
] | 6
|
2022-02-07T16:34:18.000Z
|
2022-03-30T08:04:57.000Z
|
code/python/FactSetFunds/v1/fds/sdk/FactSetFunds/model/classifications.py
|
factset/enterprise-sdk
|
3fd4d1360756c515c9737a0c9a992c7451d7de7e
|
[
"Apache-2.0"
] | 2
|
2022-02-07T05:25:57.000Z
|
2022-03-07T14:18:04.000Z
|
code/python/FactSetFunds/v1/fds/sdk/FactSetFunds/model/classifications.py
|
factset/enterprise-sdk
|
3fd4d1360756c515c9737a0c9a992c7451d7de7e
|
[
"Apache-2.0"
] | null | null | null |
"""
FactSet Funds API
FactSet Mutual Funds data offers over 50 fund- and share class-specific data points for mutual funds listed in the United States. <p>FactSet Mutual Funds Reference provides fund-specific reference information as well as FactSet's proprietary classification system. It includes but is not limited to the following coverage * Fund descriptions * A seven-tier classification system * Leverage information * Fees and expenses * Portfolio managers FactSet Mutual Funds Time Series provides quantitative data items on a historical basis. It includes but is not limited to the following coverage * Net asset value * Fund flows * Assets under management * Total return # noqa: E501
The version of the OpenAPI document: 1.0.0
Contact: api@factset.com
Generated by: https://openapi-generator.tech
"""
import re # noqa: F401
import sys # noqa: F401
from fds.sdk.FactSetFunds.model_utils import ( # noqa: F401
ApiTypeError,
ModelComposed,
ModelNormal,
ModelSimple,
cached_property,
change_keys_js_to_python,
convert_js_args_to_python_args,
date,
datetime,
file_type,
none_type,
validate_get_composed_info,
OpenApiModel
)
from fds.sdk.FactSetFunds.exceptions import ApiAttributeError
class Classifications(ModelNormal):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
Attributes:
allowed_values (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
with a capitalized key describing the allowed value and an allowed
value. These dicts store the allowed enum values.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
discriminator_value_class_map (dict): A dict to go from the discriminator
variable value to the discriminator class name.
validations (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
that stores validations for max_length, min_length, max_items,
min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum,
inclusive_minimum, and regex.
additional_properties_type (tuple): A tuple of classes accepted
as additional properties values.
"""
allowed_values = {
}
validations = {
}
@cached_property
def additional_properties_type():
"""
This must be a method because a model may have properties that are
of type self, this must run after the class is loaded
"""
return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501
_nullable = False
@cached_property
def openapi_types():
"""
This must be a method because a model may have properties that are
of type self, this must run after the class is loaded
Returns
openapi_types (dict): The key is attribute name
and the value is attribute type.
"""
return {
'fsym_id': (str,), # noqa: E501
'request_id': (str,), # noqa: E501
'asset_class': (str,), # noqa: E501
'category_class': (str,), # noqa: E501
'economic_development_class': (str,), # noqa: E501
'focus_class': (str,), # noqa: E501
'geographic_class': (str,), # noqa: E501
'niche_class': (str,), # noqa: E501
'region_class': (str,), # noqa: E501
}
@cached_property
def discriminator():
return None
attribute_map = {
'fsym_id': 'fsymId', # noqa: E501
'request_id': 'requestId', # noqa: E501
'asset_class': 'assetClass', # noqa: E501
'category_class': 'categoryClass', # noqa: E501
'economic_development_class': 'economicDevelopmentClass', # noqa: E501
'focus_class': 'focusClass', # noqa: E501
'geographic_class': 'geographicClass', # noqa: E501
'niche_class': 'nicheClass', # noqa: E501
'region_class': 'regionClass', # noqa: E501
}
read_only_vars = {
}
_composed_schemas = {}
@classmethod
@convert_js_args_to_python_args
def _from_openapi_data(cls, *args, **kwargs): # noqa: E501
"""Classifications - a model defined in OpenAPI
Keyword Args:
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
_visited_composed_classes (tuple): This stores a tuple of
classes that we have traveled through so that
if we see that class again we will not use its
discriminator again.
When traveling through a discriminator, the
composed schema that is
is traveled through is added to this set.
For example if Animal has a discriminator
petType and we pass in "Dog", and the class Dog
allOf includes Animal, we move through Animal
once using the discriminator, and pick Dog.
Then in Dog, we will make an instance of the
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
fsym_id (str): FactSet Security Identifier. Six alpha-numeric characters, excluding vowels, with a -S suffix (XXXXXX-S), resolved from the requestId of the Fund requested.. [optional] # noqa: E501
request_id (str): The requested Id sent as input.. [optional] # noqa: E501
asset_class (str): Returns the asset class description from FactSet's fund classification system. Asset class designates the fund's underlying holding type, e.g. equity, fixed-income, etc.. [optional] # noqa: E501
category_class (str): Returns the asset class category description from FactSet's fund classification system. The asset class category is the first-tier subcategory within the fund's asset class, e.g. size & style, sector, precious metals, etc.. [optional] # noqa: E501
economic_development_class (str): Returns the fund's economic development description from FactSet's fund classification system. This description refers to the development level for the fund's geographic region of focus, e.g. developed, emerging, etc.. [optional] # noqa: E501
focus_class (str): Returns the fund's focus description from FactSet's fund classification system. The fund's focus is the second-tier subcategory within the fund's asset class, e.g. small cap, energy, etc.. [optional] # noqa: E501
geographic_class (str): Returns the fund's specific geography description from FactSet's fund classification system. Specific geography refers to the fund's particular geographic focus within the region, e.g. Chile, BRICs, etc.. [optional] # noqa: E501
niche_class (str): Returns the fund's niche description from FactSet's fund classification system. The fund's niche is the third-tier subcategory with the fund's asset class, e.g. growth, coal, etc.. [optional] # noqa: E501
region_class (str): Returns the fund's region description from FactSet's fund classification system. Refers to the broad regional exposure of the fund's holdings, e.g. Latin America, Asia-Pacific, etc.. [optional] # noqa: E501
"""
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
self = super(OpenApiModel, cls).__new__(cls)
if args:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
for var_name, var_value in kwargs.items():
if var_name not in self.attribute_map and \
self._configuration is not None and \
self._configuration.discard_unknown_keys and \
self.additional_properties_type is None:
# discard variable.
continue
setattr(self, var_name, var_value)
return self
required_properties = set([
'_data_store',
'_check_type',
'_spec_property_naming',
'_path_to_item',
'_configuration',
'_visited_composed_classes',
])
@convert_js_args_to_python_args
def __init__(self, *args, **kwargs): # noqa: E501
"""Classifications - a model defined in OpenAPI
Keyword Args:
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
_visited_composed_classes (tuple): This stores a tuple of
classes that we have traveled through so that
if we see that class again we will not use its
discriminator again.
When traveling through a discriminator, the
composed schema that is
is traveled through is added to this set.
For example if Animal has a discriminator
petType and we pass in "Dog", and the class Dog
allOf includes Animal, we move through Animal
once using the discriminator, and pick Dog.
Then in Dog, we will make an instance of the
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
fsym_id (str): FactSet Security Identifier. Six alpha-numeric characters, excluding vowels, with a -S suffix (XXXXXX-S), resolved from the requestId of the Fund requested.. [optional] # noqa: E501
request_id (str): The requested Id sent as input.. [optional] # noqa: E501
asset_class (str): Returns the asset class description from FactSet's fund classification system. Asset class designates the fund's underlying holding type, e.g. equity, fixed-income, etc.. [optional] # noqa: E501
category_class (str): Returns the asset class category description from FactSet's fund classification system. The asset class category is the first-tier subcategory within the fund's asset class, e.g. size & style, sector, precious metals, etc.. [optional] # noqa: E501
economic_development_class (str): Returns the fund's economic development description from FactSet's fund classification system. This description refers to the development level for the fund's geographic region of focus, e.g. developed, emerging, etc.. [optional] # noqa: E501
focus_class (str): Returns the fund's focus description from FactSet's fund classification system. The fund's focus is the second-tier subcategory within the fund's asset class, e.g. small cap, energy, etc.. [optional] # noqa: E501
geographic_class (str): Returns the fund's specific geography description from FactSet's fund classification system. Specific geography refers to the fund's particular geographic focus within the region, e.g. Chile, BRICs, etc.. [optional] # noqa: E501
niche_class (str): Returns the fund's niche description from FactSet's fund classification system. The fund's niche is the third-tier subcategory with the fund's asset class, e.g. growth, coal, etc.. [optional] # noqa: E501
region_class (str): Returns the fund's region description from FactSet's fund classification system. Refers to the broad regional exposure of the fund's holdings, e.g. Latin America, Asia-Pacific, etc.. [optional] # noqa: E501
"""
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
if args:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
for var_name, var_value in kwargs.items():
if var_name not in self.attribute_map and \
self._configuration is not None and \
self._configuration.discard_unknown_keys and \
self.additional_properties_type is None:
# discard variable.
continue
setattr(self, var_name, var_value)
if var_name in self.read_only_vars:
raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate "
f"class with read only attributes.")
| 57.536332
| 709
| 0.619016
| 15,319
| 0.921277
| 0
| 0
| 13,248
| 0.796728
| 0
| 0
| 12,467
| 0.749759
|
547ff536693b82874299f521ef54379c7a3ee663
| 1,637
|
py
|
Python
|
tests/test_drc.py
|
atait/lymask
|
a047bee386e7c9c7f04030277cdfaf7b3c731d14
|
[
"MIT"
] | 3
|
2020-12-01T07:55:50.000Z
|
2022-03-16T22:18:07.000Z
|
tests/test_drc.py
|
atait/lymask
|
a047bee386e7c9c7f04030277cdfaf7b3c731d14
|
[
"MIT"
] | null | null | null |
tests/test_drc.py
|
atait/lymask
|
a047bee386e7c9c7f04030277cdfaf7b3c731d14
|
[
"MIT"
] | 2
|
2020-12-01T22:56:35.000Z
|
2021-05-03T09:30:09.000Z
|
import os, sys
import subprocess
import xmltodict
import lymask
from lymask import batch_drc_main
from conftest import test_dir
drc_file = os.path.join(test_dir, 'tech', 'lymask_example_tech', 'drc', 'default.yml')
layout_file = os.path.join(test_dir, '2_drc_src.oas')
outfile = os.path.join(test_dir, '2_drc_run.lyrdb')
reffile = os.path.join(test_dir, '2_drc_answer.lyrdb')
class DRC_difference(Exception):
pass
def assert_equal(rdb_file1, rdb_file2):
''' Errors if the rdbs are different.
This is done with dictionaries not the XML text itself
Note, ordering of lists matters currently (although it shouldn't). Dict key order does not (appropriately).
'''
with open(rdb_file1, 'r') as fx:
rdbspec1 = xmltodict.parse(fx.read(), process_namespaces=True)
with open(rdb_file2, 'r') as fx:
rdbspec2 = xmltodict.parse(fx.read(), process_namespaces=True)
if rdbspec1 != rdbspec2:
raise DRC_difference()
# This one need Technology working
def test_api():
lymask.set_active_technology('lymask_example_tech')
batch_drc_main(layout_file, ymlspec=drc_file, outfile=outfile)
assert_equal(outfile, reffile)
def test_from_technology():
batch_drc_main(layout_file, ymlspec='default', outfile=outfile, technology='lymask_example_tech')
assert_equal(outfile, reffile)
def test_cm_from_tech():
# this also checks that it defaults to default.yml
command = ['lymask', 'drc']
command += [layout_file]
command += ['-o', outfile]
command += ['-t', 'lymask_example_tech']
subprocess.check_call(command)
assert_equal(outfile, reffile)
| 30.886792
| 115
| 0.722053
| 41
| 0.025046
| 0
| 0
| 0
| 0
| 0
| 0
| 504
| 0.30788
|
5480da3b737fa2ac8f9665bf668142513e4bbaba
| 1,731
|
py
|
Python
|
graphviz/parameters/formatters.py
|
boeddeker/graphviz
|
acf79bca4518781cad02c102e89ec4e9ce757088
|
[
"MIT"
] | 1
|
2022-01-19T04:02:46.000Z
|
2022-01-19T04:02:46.000Z
|
graphviz/parameters/formatters.py
|
boeddeker/graphviz
|
acf79bca4518781cad02c102e89ec4e9ce757088
|
[
"MIT"
] | 1
|
2021-11-19T07:21:48.000Z
|
2021-11-19T07:21:48.000Z
|
graphviz/parameters/formatters.py
|
boeddeker/graphviz
|
acf79bca4518781cad02c102e89ec4e9ce757088
|
[
"MIT"
] | 1
|
2022-01-14T17:15:38.000Z
|
2022-01-14T17:15:38.000Z
|
"""Rendering formatter parameter handling."""
import typing
from . import base
__all__ = ['FORMATTERS', 'verify_formatter', 'Formatter']
FORMATTERS = {'cairo',
'core',
'gd',
'gdiplus',
'gdwbmp',
'xlib'}
REQUIRED = False
def verify_formatter(formatter: typing.Optional[str], *,
required: bool = REQUIRED) -> None:
if formatter is None:
if required:
raise ValueError('missing formatter')
elif formatter.lower() not in FORMATTERS:
raise ValueError(f'unknown formatter: {formatter!r}')
class Formatter(base.ParameterBase):
"""Rendering engine parameter (no default)."""
_formatter = None
_verify_formatter = staticmethod(verify_formatter)
def __init__(self, *, formatter: typing.Optional[str] = None, **kwargs) -> None:
super().__init__(**kwargs)
self.formatter = formatter
def _copy_kwargs(self, **kwargs):
"""Return the kwargs to create a copy of the instance."""
formatter = self._getattr_from_dict('_formatter')
if formatter is not None:
kwargs['formatter'] = formatter
return super()._copy_kwargs(**kwargs)
@property
def formatter(self) -> typing.Optional[str]:
"""The output formatter used for rendering
(``'cairo'``, ``'gd'``, ...)."""
return self._formatter
@formatter.setter
def formatter(self, formatter: typing.Optional[str]) -> None:
if formatter is None:
self.__dict__.pop('_formatter', None)
else:
formatter = formatter.lower()
self._verify_formatter(formatter)
self._formatter = formatter
| 28.377049
| 84
| 0.60312
| 1,112
| 0.642403
| 0
| 0
| 490
| 0.283073
| 0
| 0
| 405
| 0.233969
|
5480e17b073b3d2de7a418823c0645c307bf4d95
| 183
|
py
|
Python
|
reward/utils/device.py
|
lgvaz/torchrl
|
cfff8acaf70d1fec72169162b95ab5ad3547d17a
|
[
"MIT"
] | 5
|
2018-06-21T14:33:40.000Z
|
2018-08-18T02:26:03.000Z
|
reward/utils/device.py
|
lgvaz/reward
|
cfff8acaf70d1fec72169162b95ab5ad3547d17a
|
[
"MIT"
] | null | null | null |
reward/utils/device.py
|
lgvaz/reward
|
cfff8acaf70d1fec72169162b95ab5ad3547d17a
|
[
"MIT"
] | 2
|
2018-05-08T03:34:49.000Z
|
2018-06-22T15:04:17.000Z
|
import torch
CONFIG = {"device": torch.device("cuda" if torch.cuda.is_available() else "cpu")}
def get(): return CONFIG["device"]
def set_device(device): CONFIG["device"] = device
| 22.875
| 81
| 0.704918
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 35
| 0.191257
|
548192ff87fcf5b59d3f5cc728048383ca680545
| 5,727
|
py
|
Python
|
Source/Functions/RPSLS.Python.Api/NextMove/next_move.py
|
ivan-b-ivanov/RockPaperScissorsLizardSpock
|
9167bcbe5ad2937e834408475c2ec66cf92fef84
|
[
"MIT"
] | null | null | null |
Source/Functions/RPSLS.Python.Api/NextMove/next_move.py
|
ivan-b-ivanov/RockPaperScissorsLizardSpock
|
9167bcbe5ad2937e834408475c2ec66cf92fef84
|
[
"MIT"
] | null | null | null |
Source/Functions/RPSLS.Python.Api/NextMove/next_move.py
|
ivan-b-ivanov/RockPaperScissorsLizardSpock
|
9167bcbe5ad2937e834408475c2ec66cf92fef84
|
[
"MIT"
] | null | null | null |
import logging
import random
import os
import json
from typing import Tuple, List
import requests
def predict(player_name: str) -> str:
next_move = _predict_next_move(*_get_player_games(player_name))
return _convert_game_to_json(next_move)
R_rock, P_paper, S_scissors, V_spock, L_lizard = ('R', 'P', 'S', 'V', 'L')
INTERNAL_MOVES_ENCODING = [R_rock, P_paper, S_scissors, V_spock, L_lizard]
def _get_player_games(player_name: str) -> Tuple[str, str]:
game_manager_uri = os.getenv("GAME_MANAGER_URI", None)
url = f'{game_manager_uri}/game-manager/api/games?player={player_name}'
logging.info(f'requesting human moves: {url}')
req = requests.get(url)
data = req.json()
return _convert_games_to_str(data["challengerGames"]), _convert_games_to_str(data["humanGames"])
def _convert_games_to_str(games) -> str:
SOURCE_MOVES_ENCODING = [R_rock, P_paper, S_scissors, L_lizard, V_spock]
return "".join([SOURCE_MOVES_ENCODING[game] for game in games])
def _convert_game_to_json(game: str) -> str:
JSON_MOVES_ENCODING = {R_rock: "rock", P_paper: "paper",
S_scissors: "scissors", L_lizard: "lizard", V_spock: "spock"}
return json.dumps({"prediction": JSON_MOVES_ENCODING[game]})
def _zip_moves(challenger_moves: List[str], human_moves: List[str]) -> List[Tuple[str, str]]:
move_encoding_dict = {value: index for index, value in enumerate(INTERNAL_MOVES_ENCODING)}
history = [(move_encoding_dict[i], move_encoding_dict[j])
for i, j in zip(challenger_moves, human_moves)]
return history
def _predict_next_move(challenger_moves: str, human_moves: str) -> str:
history = _zip_moves(challenger_moves, human_moves)
# what would have been predicted in the last rounds?
pred_hist = [_best_next_moves_for_game(
history[:i]) for i in range(2, len(history)+1)]
# if no history prediction, then returns random
if not pred_hist:
return random.choice(INTERNAL_MOVES_ENCODING)
# how would the different predictions have scored?
# we have the pred_hist from moves i=2 to len(history) so we can check
# check https://i.stack.imgur.com/jILea.png for game rules
n_pred = len(pred_hist[0])
scores = [[0]*5 for i in range(n_pred)]
for pred, real in zip(pred_hist[:-1], history[2:]):
for i in range(n_pred):
# %5: When an int is negative it returns the count to the move
# to beat another, in (reverse order) counterclockwise
# i.e -1%5=4, -2%5=3
scores[i][(real[1]-pred[i]+1) % 5] += 1
scores[i][(real[1]-pred[i]+3) % 5] += 1
# 1 & 3 move to the other "moves" that beat another
# for example Rock is beaten with Paper and Spock,
# which are 1 & 3 positions away
scores[i][(real[1]-pred[i]+2) % 5] -= 1
scores[i][(real[1]-pred[i]+4) % 5] -= 1
# depending in predicted strategies, select best one with less risks
# return best counter move
best_scores = [list(max(enumerate(s), key=lambda x: x[1])) for s in scores]
best_scores[-1][1] *= 1.001 # bias towards the simplest strategy
if best_scores[-1][1] < 0.4*len(history):
best_scores[-1][1] *= 1.4
strat, (shift, _) = max(enumerate(best_scores), key=lambda x: x[1][1])
return INTERNAL_MOVES_ENCODING[(pred_hist[-1][strat]+shift) % 5]
def _best_next_moves_for_game(hist: List[str]) -> List[List[str]]:
N = len(hist)
# find longest match of the preceding moves in the earlier history
cand_m = cand_o = cand_b = range(N-1)
for l in range(1, min(N, 20)):
ref = hist[N-l]
# l = 1
# Looks for previous occurrences of the last move in my_moves, since hist[N-l] == hist[-1]
# l = 2
# it checks which of the possible candidates was preceded by the move previous to the last
# and so on... i.e loos for longest chain matching last moves to use the next move
cand_m_tmp = []
for c in cand_m:
if c >= l and hist[c-l+1][0] == ref[0]:
cand_m_tmp.append(c)
if not cand_m_tmp:
cand_m = cand_m[-1:]
else:
cand_m = cand_m_tmp[:]
# same for op_moves
cand_o_tmp = []
for c in cand_o:
if c >= l and hist[c-l+1][1] == ref[1]:
cand_o_tmp.append(c)
if not cand_o_tmp:
cand_o = cand_o[-1:]
else:
cand_o = cand_o_tmp[:]
# same for both_moves i.e directly the zipped tuples
cand_b_tmp = []
for c in cand_b:
if c >= l and hist[c-l+1] == ref:
cand_b_tmp.append(c)
if not cand_b_tmp:
cand_b = cand_b[-1:]
else:
cand_b = cand_b_tmp[:]
# analyze which moves were used how often, i.e a np.bincount
freq_m, freq_o = [0]*5, [0]*5
for m in hist:
freq_m[m[0]] += 1
freq_o[m[1]] += 1
# return predictions (or possible "good" strategies)
last_2_moves = [j for i in hist[:-3:-1] for j in i]
return (last_2_moves + # repeat last moves
[hist[cand_m[-1]+1][0], # history matching of my own moves
# history matching of opponent's moves
hist[cand_o[-1]+1][1],
hist[cand_b[-1]+1][0], # history matching of both
hist[cand_b[-1]+1][1],
freq_m.index(max(freq_m)), # my most frequent move
freq_o.index(max(freq_o)), # opponent's most frequent move
0])
| 39.226027
| 101
| 0.596124
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1,613
| 0.281648
|
5481ba7b076cad5057871b2955d0e7140c538c8a
| 5,410
|
py
|
Python
|
examples/trials/nas_cifar10/src/cifar10/nni_child_cifar10.py
|
runauto/nni
|
30152b04c4739f5b4f95087dee5f1e66ee893078
|
[
"MIT"
] | 2
|
2019-12-30T20:42:17.000Z
|
2021-01-24T16:51:56.000Z
|
examples/trials/nas_cifar10/src/cifar10/nni_child_cifar10.py
|
runauto/nni
|
30152b04c4739f5b4f95087dee5f1e66ee893078
|
[
"MIT"
] | null | null | null |
examples/trials/nas_cifar10/src/cifar10/nni_child_cifar10.py
|
runauto/nni
|
30152b04c4739f5b4f95087dee5f1e66ee893078
|
[
"MIT"
] | 1
|
2020-01-11T13:19:26.000Z
|
2020-01-11T13:19:26.000Z
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
import shutil
import logging
import tensorflow as tf
from src.cifar10.data_utils import read_data
from src.cifar10.general_child import GeneralChild
import src.cifar10_flags
from src.cifar10_flags import FLAGS
def build_logger(log_name):
logger = logging.getLogger(log_name)
logger.setLevel(logging.DEBUG)
fh = logging.FileHandler(log_name+'.log')
fh.setLevel(logging.DEBUG)
logger.addHandler(fh)
return logger
logger = build_logger("nni_child_cifar10")
def build_trial(images, labels, ChildClass):
'''Build child class'''
child_model = ChildClass(
images,
labels,
use_aux_heads=FLAGS.child_use_aux_heads,
cutout_size=FLAGS.child_cutout_size,
num_layers=FLAGS.child_num_layers,
num_cells=FLAGS.child_num_cells,
num_branches=FLAGS.child_num_branches,
fixed_arc=FLAGS.child_fixed_arc,
out_filters_scale=FLAGS.child_out_filters_scale,
out_filters=FLAGS.child_out_filters,
keep_prob=FLAGS.child_keep_prob,
drop_path_keep_prob=FLAGS.child_drop_path_keep_prob,
num_epochs=FLAGS.num_epochs,
l2_reg=FLAGS.child_l2_reg,
data_format=FLAGS.data_format,
batch_size=FLAGS.batch_size,
clip_mode="norm",
grad_bound=FLAGS.child_grad_bound,
lr_init=FLAGS.child_lr,
lr_dec_every=FLAGS.child_lr_dec_every,
lr_dec_rate=FLAGS.child_lr_dec_rate,
lr_cosine=FLAGS.child_lr_cosine,
lr_max=FLAGS.child_lr_max,
lr_min=FLAGS.child_lr_min,
lr_T_0=FLAGS.child_lr_T_0,
lr_T_mul=FLAGS.child_lr_T_mul,
optim_algo="momentum",
sync_replicas=FLAGS.child_sync_replicas,
num_aggregate=FLAGS.child_num_aggregate,
num_replicas=FLAGS.child_num_replicas
)
return child_model
def get_child_ops(child_model):
'''Assemble child op to a dict'''
child_ops = {
"global_step": child_model.global_step,
"loss": child_model.loss,
"train_op": child_model.train_op,
"lr": child_model.lr,
"grad_norm": child_model.grad_norm,
"train_acc": child_model.train_acc,
"optimizer": child_model.optimizer,
"num_train_batches": child_model.num_train_batches,
"eval_every": child_model.num_train_batches * FLAGS.eval_every_epochs,
"eval_func": child_model.eval_once,
}
return child_ops
class NASTrial():
def __init__(self):
images, labels = read_data(FLAGS.data_path, num_valids=0)
self.output_dir = os.path.join(os.getenv('NNI_OUTPUT_DIR'), '../..')
self.file_path = os.path.join(
self.output_dir, 'trainable_variable.txt')
self.graph = tf.Graph()
with self.graph.as_default():
self.child_model = build_trial(images, labels, GeneralChild)
self.total_data = {}
self.child_model.build_model()
self.child_ops = get_child_ops(self.child_model)
config = tf.ConfigProto(
intra_op_parallelism_threads=0,
inter_op_parallelism_threads=0,
allow_soft_placement=True)
self.sess = tf.train.SingularMonitoredSession(config=config)
logger.debug('initlize NASTrial done.')
def run_one_step(self):
'''Run this model on a batch of data'''
run_ops = [
self.child_ops["loss"],
self.child_ops["lr"],
self.child_ops["grad_norm"],
self.child_ops["train_acc"],
self.child_ops["train_op"],
]
loss, lr, gn, tr_acc, _ = self.sess.run(run_ops)
global_step = self.sess.run(self.child_ops["global_step"])
log_string = ""
log_string += "ch_step={:<6d}".format(global_step)
log_string += " loss={:<8.6f}".format(loss)
log_string += " lr={:<8.4f}".format(lr)
log_string += " |g|={:<8.4f}".format(gn)
log_string += " tr_acc={:<3d}/{:>3d}".format(tr_acc, FLAGS.batch_size)
if int(global_step) % FLAGS.log_every == 0:
logger.debug(log_string)
return loss, global_step
def run(self):
'''Run this model according to the `epoch` set in FALGS'''
max_acc = 0
while True:
_, global_step = self.run_one_step()
if global_step % self.child_ops['num_train_batches'] == 0:
acc = self.child_ops["eval_func"](
self.sess, "test", self.child_model)
max_acc = max(max_acc, acc)
'''@nni.report_intermediate_result(acc)'''
if global_step / self.child_ops['num_train_batches'] >= FLAGS.num_epochs:
'''@nni.report_final_result(max_acc)'''
break
def main(_):
logger.debug("-" * 80)
if not os.path.isdir(FLAGS.output_dir):
logger.debug(
"Path {} does not exist. Creating.".format(FLAGS.output_dir))
os.makedirs(FLAGS.output_dir)
elif FLAGS.reset_output_dir:
logger.debug(
"Path {} exists. Remove and remake.".format(FLAGS.output_dir))
shutil.rmtree(FLAGS.output_dir)
os.makedirs(FLAGS.output_dir)
logger.debug("-" * 80)
trial = NASTrial()
trial.run()
if __name__ == "__main__":
tf.app.run()
| 33.190184
| 85
| 0.64085
| 2,340
| 0.432532
| 0
| 0
| 0
| 0
| 0
| 0
| 738
| 0.136414
|
5481d023ae1cb5111f38843d186a6cb4876d216a
| 175
|
py
|
Python
|
apps/oper/apps.py
|
dryprojects/MyBlog
|
ec04ba2bc658e96cddeb1d4766047ca8e89ff656
|
[
"BSD-3-Clause"
] | 2
|
2021-08-17T13:29:21.000Z
|
2021-09-04T05:00:01.000Z
|
apps/oper/apps.py
|
dryprojects/MyBlog
|
ec04ba2bc658e96cddeb1d4766047ca8e89ff656
|
[
"BSD-3-Clause"
] | 1
|
2020-07-16T11:22:32.000Z
|
2020-07-16T11:22:32.000Z
|
apps/oper/apps.py
|
dryprojects/MyBlog
|
ec04ba2bc658e96cddeb1d4766047ca8e89ff656
|
[
"BSD-3-Clause"
] | 1
|
2020-09-18T10:41:59.000Z
|
2020-09-18T10:41:59.000Z
|
from django.apps import AppConfig
class OperConfig(AppConfig):
name = 'oper'
verbose_name = '用户操作管理'
def ready(self):
from oper import signals
| 17.5
| 34
| 0.64
| 146
| 0.780749
| 0
| 0
| 0
| 0
| 0
| 0
| 26
| 0.139037
|
5481e05c5889a5fab05aff46f53912b82371d733
| 1,952
|
py
|
Python
|
stella/core/interpreter/lexer.py
|
xabinapal/stella
|
ae02055749f997323390d642c99a37b80aa5df68
|
[
"MIT"
] | null | null | null |
stella/core/interpreter/lexer.py
|
xabinapal/stella
|
ae02055749f997323390d642c99a37b80aa5df68
|
[
"MIT"
] | null | null | null |
stella/core/interpreter/lexer.py
|
xabinapal/stella
|
ae02055749f997323390d642c99a37b80aa5df68
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
import io
import collections
from stella.core.utils import RewindableIterator
from stella.core.interpreter.productions import Token
__all__ = ['Tokenizer', 'Lexer']
################################################################################
### Tokenizer
################################################################################
class Tokenizer(object):
def __init__(self, tokens):
self.tokens = tokens
def get_token(self, value):
return next((x for x in self.tokens if x.match(value)), None)
################################################################################
### Lexer
################################################################################
class Lexer(object):
def __init__(self, stream, tokenizer):
iterator = iter(stream)
self.iterator = RewindableIterator(iterator)
self.tokenizer = tokenizer
def __iter__(self):
return RewindableIterator(self)
def __next__(self):
token = None
tmp_value = next(self.iterator)
tmp_token = self.tokenizer.get_token(tmp_value)
token_found = False
while tmp_token or not token_found:
if tmp_token:
token_found = True
value = tmp_value
token = tmp_token
try:
char = self.iterator.peek()
tmp_token = self.tokenizer.get_token(tmp_value + char)
if not token and not tmp_token and self.tokenizer.get_token(char):
token_found = True
value = tmp_value
if tmp_token or not token_found:
tmp_value = tmp_value + char
next(self.iterator)
except StopIteration:
value = tmp_value
token = tmp_token
break
self.iterator.commit()
return Token(token, value)
| 29.134328
| 82
| 0.483607
| 1,406
| 0.720287
| 0
| 0
| 0
| 0
| 0
| 0
| 383
| 0.196209
|
548332d9c8a9e409da8648383e49cb1b1c4dbca5
| 12,628
|
py
|
Python
|
tensorflow_v1/10_-_Sequence-to-sequence/03_-_Dynamic_attention_with_par-inject.py
|
mtanti/deeplearningtutorial
|
a6fef37c77216e4f98dba2bde7c62d6aa6292476
|
[
"MIT"
] | 5
|
2019-05-31T08:30:28.000Z
|
2020-02-13T20:17:13.000Z
|
tensorflow_v1/10_-_Sequence-to-sequence/03_-_Dynamic_attention_with_par-inject.py
|
mtanti/deeplearningtutorial
|
a6fef37c77216e4f98dba2bde7c62d6aa6292476
|
[
"MIT"
] | null | null | null |
tensorflow_v1/10_-_Sequence-to-sequence/03_-_Dynamic_attention_with_par-inject.py
|
mtanti/deeplearningtutorial
|
a6fef37c77216e4f98dba2bde7c62d6aa6292476
|
[
"MIT"
] | 6
|
2019-04-12T15:34:05.000Z
|
2019-10-01T16:57:39.000Z
|
import matplotlib.pyplot as plt
import numpy as np
import tensorflow as tf
tf.logging.set_verbosity(tf.logging.ERROR)
max_epochs = 6000
init_stddev = 0.0001
source_embedding_size = 2
target_embedding_size = 2
source_state_size = 2
preattention_size = 2
target_state_size = 2
max_seq_len = 10
source_tokens = [
'i like it'.split(' '),
'i hate it'.split(' '),
'i don\'t hate it'.split(' '),
'i don\'t like it'.split(' '),
]
target_tokens = [
'i don\'t like it'.split(' '),
'i don\'t hate it'.split(' '),
'i hate it'.split(' '),
'i like it'.split(' '),
]
source_vocab = [ 'EDGE' ] + sorted({ token for sent in source_tokens for token in sent })
source_token2index = { token: index for (index, token) in enumerate(source_vocab) }
source_index2token = { index: token for (index, token) in enumerate(source_vocab) }
source_max_len = max(len(sent) for sent in source_tokens)
index_source_indexes = []
index_source_lens = []
for sent in source_tokens:
source_lens = len(sent)
source_index = [ source_token2index[token] for token in sent ] + [ 0 for _ in range(source_max_len - source_lens) ]
index_source_lens.append(source_lens)
index_source_indexes.append(source_index)
target_vocab = [ 'EDGE' ] + sorted({ token for sent in target_tokens for token in sent })
target_token2index = { token: index for (index, token) in enumerate(target_vocab) }
target_index2token = { index: token for (index, token) in enumerate(target_vocab) }
target_max_len = max(len(sent) for sent in target_tokens) + 1 #Plus edge token
index_target_prefixes = []
index_target_lens = []
index_target_targets = []
for sent in target_tokens:
target_len = len(sent) + 1 #Plus edge token
target_index = [ target_token2index[token] for token in sent ]
target_prefix = [ target_token2index['EDGE'] ] + target_index + [ 0 for _ in range(target_max_len - target_len) ]
target_target = target_index + [ target_token2index['EDGE'] ] + [ 0 for _ in range(target_max_len - target_len) ]
index_target_prefixes.append(target_prefix)
index_target_lens.append(target_len)
index_target_targets.append(target_target)
g = tf.Graph()
with g.as_default():
source_indexes = tf.placeholder(tf.int32, [None, None], 'source_indexes')
source_lens = tf.placeholder(tf.int32, [None], 'source_lens')
target_prefixes = tf.placeholder(tf.int32, [None, None], 'target_prefixes')
target_lens = tf.placeholder(tf.int32, [None], 'target_lens')
target_targets = tf.placeholder(tf.int32, [None, None], 'target_targets')
batch_size = tf.shape(source_indexes)[0]
source_seq_width = tf.shape(source_indexes)[1]
target_seq_width = tf.shape(target_prefixes)[1]
with tf.variable_scope('source'):
with tf.variable_scope('embedding'):
embedding_matrix = tf.get_variable('embedding_matrix', [len(source_vocab), source_embedding_size], tf.float32, tf.random_normal_initializer(stddev=init_stddev))
embedded = tf.nn.embedding_lookup(embedding_matrix, source_indexes)
with tf.variable_scope('init_state'):
init_state_fw = tf.get_variable('init_state_fw', [source_state_size], tf.float32, tf.random_normal_initializer(stddev=init_stddev))
batch_init_fw = tf.tile(tf.reshape(init_state_fw, [1, source_state_size]), [batch_size, 1])
init_state_bw = tf.get_variable('init_state_bw', [source_state_size], tf.float32, tf.random_normal_initializer(stddev=init_stddev))
batch_init_bw = tf.tile(tf.reshape(init_state_bw, [1, source_state_size]), [batch_size, 1])
with tf.variable_scope('rnn'):
cell_fw = tf.contrib.rnn.GRUCell(source_state_size)
cell_bw = tf.contrib.rnn.GRUCell(source_state_size)
((outputs_fw, outputs_bw), _) = tf.nn.bidirectional_dynamic_rnn(cell_fw, cell_bw, embedded, sequence_length=source_lens, initial_state_fw=batch_init_fw, initial_state_bw=batch_init_bw)
outputs_ = tf.concat([ outputs_fw, outputs_bw ], axis=2)
outputs_2d_ = tf.reshape(outputs_, [batch_size*source_seq_width, 2*source_state_size])
W = tf.get_variable('W', [2*source_state_size, source_state_size], tf.float32, tf.random_normal_initializer(stddev=init_stddev))
b = tf.get_variable('b', [source_state_size], tf.float32, tf.zeros_initializer())
source_outputs_2d = tf.matmul(outputs_2d_, W) + b
source_outputs = tf.reshape(source_outputs_2d, [batch_size, source_seq_width, source_state_size])
with tf.variable_scope('targets'):
with tf.variable_scope('embedding'):
embedding_matrix = tf.get_variable('embedding_matrix', [len(target_vocab), target_embedding_size], tf.float32, tf.random_normal_initializer(stddev=init_stddev))
embedded = tf.nn.embedding_lookup(embedding_matrix, target_prefixes)
with tf.variable_scope('init_state'):
init_state = tf.get_variable('init_state', [target_state_size], tf.float32, tf.random_normal_initializer(stddev=init_stddev))
batch_init = tf.tile(tf.reshape(init_state, [1, target_state_size]), [batch_size, 1])
with tf.variable_scope('rnn'):
#Custom RNN cell for producing attention vectors that condition the language model via par-inject
class CellAttention(tf.nn.rnn_cell.RNNCell):
def __init__(self):
super(CellAttention, self).__init__()
self.W1 = None
self.b1 = None
self.W2 = None
self.b2 = None
self.inner_cell = tf.contrib.rnn.GRUCell(target_state_size) #The inner RNN cell that actually tranforms the input and previous state into the next state
@property
def state_size(self):
return source_state_size
@property
def output_size(self):
return (source_seq_width, source_state_size) #Return the attention vector apart from the next state (to be able to inspect it later)
def build(self, inputs_shape):
self.W1 = self.add_variable('W1', [source_state_size + target_state_size, preattention_size], tf.float32, tf.random_normal_initializer(stddev=init_stddev))
self.b1 = tf.get_variable('b1', [preattention_size], tf.float32, tf.zeros_initializer())
self.W2 = self.add_variable('W2', [preattention_size, 1], tf.float32, tf.random_normal_initializer(stddev=init_stddev))
self.b2 = tf.get_variable('b2', [1], tf.float32, tf.zeros_initializer())
self.built = True
def call(self, next_inputs, curr_states):
with tf.variable_scope('attention'):
#Replicate the current state for each source sentence word in order to concatenate it with each source sentence word vector
expanded_curr_state = tf.tile(tf.reshape(curr_states, [batch_size, 1, target_state_size]), [1, source_seq_width, 1])
pre_attention_input = tf.concat([ source_outputs, expanded_curr_state ], axis=2)
pre_attention_input_2d = tf.reshape(pre_attention_input, [batch_size*source_seq_width, source_state_size + target_state_size])
pre_attention_2d = tf.tanh(tf.matmul(pre_attention_input_2d, self.W1) + self.b1)
attention_logits = tf.reshape(tf.matmul(pre_attention_2d, self.W2) + self.b2, [batch_size, source_seq_width])
mask = tf.sequence_mask(source_lens, source_seq_width, tf.float32)
attention = tf.nn.softmax(attention_logits*mask + -1e10*(1 - mask))
expanded_attention = tf.tile(tf.reshape(attention, [batch_size, source_seq_width, 1]), [1, 1, source_state_size])
attended_sources = tf.reduce_sum(source_outputs*expanded_attention, axis=1)
#Pass the input and state to the inner cell to produce the next state (input consists of word embedding and attended source)
(new_output, new_state) = self.inner_cell(tf.concat([ attended_sources, next_inputs ], axis=1), curr_states)
return ((attention, new_state), new_state)
cell = CellAttention()
((attentions, outputs), _) = tf.nn.dynamic_rnn(cell, embedded, sequence_length=target_lens, initial_state=batch_init)
with tf.variable_scope('output'):
W = tf.get_variable('W', [target_state_size, len(target_vocab)], tf.float32, tf.random_normal_initializer(stddev=init_stddev))
b = tf.get_variable('b', [len(target_vocab)], tf.float32, tf.zeros_initializer())
outputs_2d = tf.reshape(outputs, [batch_size*target_seq_width, target_state_size])
logits_2d = tf.matmul(outputs_2d, W) + b
logits = tf.reshape(logits_2d, [batch_size, target_seq_width, len(target_vocab)])
probs = tf.nn.softmax(logits)
next_word_probs = probs[:, -1, :]
mask = tf.sequence_mask(target_lens, target_seq_width, tf.float32)
error = tf.reduce_sum(tf.nn.sparse_softmax_cross_entropy_with_logits(labels=target_targets, logits=logits)*mask)/tf.cast(tf.reduce_sum(target_lens), tf.float32)
step = tf.train.AdamOptimizer().minimize(error)
init = tf.global_variables_initializer()
g.finalize()
with tf.Session() as s:
s.run([ init ], { })
(fig, ax) = plt.subplots(1, 1)
plt.ion()
train_errors = list()
print('epoch', 'train error', sep='\t')
for epoch in range(1, max_epochs+1):
s.run([ step ], { source_indexes: index_source_indexes, source_lens: index_source_lens, target_prefixes: index_target_prefixes, target_lens: index_target_lens, target_targets: index_target_targets })
[ train_error ] = s.run([ error ], { source_indexes: index_source_indexes, source_lens: index_source_lens, target_prefixes: index_target_prefixes, target_lens: index_target_lens, target_targets: index_target_targets })
train_errors.append(train_error)
if epoch%100 == 0:
print(epoch, train_error, sep='\t')
ax.cla()
ax.plot(np.arange(len(train_errors)), train_errors, color='red', linestyle='-', label='train')
ax.set_xlim(0, max_epochs)
ax.set_xlabel('epoch')
ax.set_ylim(0.0, 2.0)
ax.set_ylabel('XE') #Cross entropy
ax.grid(True)
ax.set_title('Error progress')
ax.legend()
fig.tight_layout()
plt.draw()
plt.pause(0.0001)
print()
for sent in source_tokens:
source = [ source_token2index[token] for token in sent ]
prefix_prob = 1.0
index_prefix = [ target_token2index['EDGE'] ]
for _ in range(max_seq_len):
[ curr_probs ] = s.run([ next_word_probs ], { source_indexes: [ source ], source_lens: [ len(source) ], target_prefixes: [ index_prefix ], target_lens: [ len(index_prefix) ] })
selected_index = np.argmax(curr_probs[0, :])
prefix_prob = prefix_prob*curr_probs[0, selected_index]
index_prefix.append(selected_index)
if selected_index == target_token2index['EDGE']:
break
index_generated = index_prefix[1:]
generated = [ target_index2token[i] for i in index_generated ]
[ curr_attentions ] = s.run([ attentions ], { source_indexes: [ source ], source_lens: [ len(source) ], target_prefixes: [ index_generated ], target_lens: [ len(index_generated) ] })
print('Input sentence: ', ' '.join(sent))
print('Generated sentence:', ' '.join(generated))
print('Sentence probability:', prefix_prob)
print('Attention:')
print('', '\t', *sent)
for i in range(len(generated)):
print('', generated[i]+'\t', np.round(curr_attentions[0, i, :], 2))
print()
fig.show()
| 52.83682
| 230
| 0.633275
| 3,153
| 0.249683
| 0
| 0
| 293
| 0.023202
| 0
| 0
| 1,183
| 0.093681
|
54835562ea5262f2ee7bb00d7ceac361aa51a6f1
| 226
|
py
|
Python
|
lnd/utils.py
|
gsmadi/lightningpy
|
14f4cc2dd5eb8726a06db8798944302974b890aa
|
[
"MIT"
] | null | null | null |
lnd/utils.py
|
gsmadi/lightningpy
|
14f4cc2dd5eb8726a06db8798944302974b890aa
|
[
"MIT"
] | 3
|
2019-08-21T11:51:52.000Z
|
2019-10-07T11:51:45.000Z
|
lnd/utils.py
|
smadici-labs/pylnd
|
14f4cc2dd5eb8726a06db8798944302974b890aa
|
[
"MIT"
] | null | null | null |
import codecs
def encode_macaroon(macaroon):
encoded_macaroon = codecs.encode(macaroon, 'hex')
return encoded_macaroon
def read_file(file_path):
opened_file = open(file_path, 'rb').read()
return opened_file
| 20.545455
| 53
| 0.738938
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 9
| 0.039823
|
5483a8653b465908b4e7a3a5f68321bd151006ac
| 1,649
|
py
|
Python
|
ctapipe/image/muon/ring_fitter.py
|
chaimain/ctapipe
|
ff80cff2daaf56e1d05ea6501c68fd83a9cf79d5
|
[
"BSD-3-Clause"
] | 53
|
2015-06-23T15:24:20.000Z
|
2021-09-23T22:30:58.000Z
|
ctapipe/image/muon/ring_fitter.py
|
chaimain/ctapipe
|
ff80cff2daaf56e1d05ea6501c68fd83a9cf79d5
|
[
"BSD-3-Clause"
] | 1,537
|
2015-06-24T11:27:16.000Z
|
2022-03-31T16:17:08.000Z
|
ctapipe/image/muon/ring_fitter.py
|
chaimain/ctapipe
|
ff80cff2daaf56e1d05ea6501c68fd83a9cf79d5
|
[
"BSD-3-Clause"
] | 275
|
2015-07-09T14:09:28.000Z
|
2022-03-17T22:25:51.000Z
|
import numpy as np
from ctapipe.core import Component
from ctapipe.containers import MuonRingContainer
from .fitting import kundu_chaudhuri_circle_fit, taubin_circle_fit
import traitlets as traits
# the fit methods do not expose the same interface, so we
# force the same interface onto them, here.
# we also modify their names slightly, since the names are
# exposed to the user via the string traitlet `fit_method`
def kundu_chaudhuri(x, y, weights, mask):
"""kundu_chaudhuri_circle_fit with x, y, weights, mask interface"""
return kundu_chaudhuri_circle_fit(x[mask], y[mask], weights[mask])
def taubin(x, y, weights, mask):
"""taubin_circle_fit with x, y, weights, mask interface"""
return taubin_circle_fit(x, y, mask)
FIT_METHOD_BY_NAME = {m.__name__: m for m in [kundu_chaudhuri, taubin]}
__all__ = ["MuonRingFitter"]
class MuonRingFitter(Component):
"""Different ring fit algorithms for muon rings"""
fit_method = traits.CaselessStrEnum(
list(FIT_METHOD_BY_NAME.keys()),
default_value=list(FIT_METHOD_BY_NAME.keys())[0],
).tag(config=True)
def __call__(self, x, y, img, mask):
"""allows any fit to be called in form of
MuonRingFitter(fit_method = "name of the fit")
"""
fit_function = FIT_METHOD_BY_NAME[self.fit_method]
radius, center_x, center_y = fit_function(x, y, img, mask)
return MuonRingContainer(
center_x=center_x,
center_y=center_y,
radius=radius,
center_phi=np.arctan2(center_y, center_x),
center_distance=np.sqrt(center_x ** 2 + center_y ** 2),
)
| 32.333333
| 71
| 0.691935
| 797
| 0.483323
| 0
| 0
| 0
| 0
| 0
| 0
| 519
| 0.314736
|
5484be9bfb8cd5688ba3f0f969954eaa83e32875
| 1,873
|
py
|
Python
|
Main.py
|
dalwindercheema/FWPython
|
4c5d4d6d0b29a199dbf37d16bd4ed9bb2ac22d19
|
[
"BSD-2-Clause"
] | 2
|
2021-12-18T17:08:02.000Z
|
2021-12-22T04:19:15.000Z
|
Main.py
|
dalwindercheema/FWPython
|
4c5d4d6d0b29a199dbf37d16bd4ed9bb2ac22d19
|
[
"BSD-2-Clause"
] | null | null | null |
Main.py
|
dalwindercheema/FWPython
|
4c5d4d6d0b29a199dbf37d16bd4ed9bb2ac22d19
|
[
"BSD-2-Clause"
] | null | null | null |
import pandas as pd
from os import listdir
import numpy
from sklearn.model_selection import StratifiedKFold
from FS_ALO import WFS
from FW_ALO import WFW
from WFSWFW_ALO import WFSWFW
import matplotlib.pyplot as plt
def main_CV():
path='./datasets'
direc=sorted(listdir(path))
print(direc)
population=20
Total_iter=200
total_reruns=20
Cost=numpy.zeros([len(direc),total_reruns,3],dtype=numpy.float64)
CC=numpy.zeros([len(direc),total_reruns,Total_iter,3],dtype=numpy.float64)
Best_WFS=[]
Best_WFW=[]
Best_WFSWFW=[]
for dir_idx in range(0,len(direc)):
data_path=path+'/'+direc[dir_idx]
print(data_path)
tl=pd.read_csv(data_path,header=None,dtype='str')
tl.drop([0],axis=0,inplace=True)
data_array=numpy.array(tl)
data=data_array.astype(numpy.float)
dim=data.shape
train=data[:,0:dim[1]-1]
label=data[:,dim[1]-1]
for i in range(0,total_reruns):
cv=StratifiedKFold(n_splits=10,shuffle=True,random_state=i)
Cost[dir_idx,i,0],Elite_pos1,CC[dir_idx,i,:,0]=WFS(train,label,cv,population,Total_iter)
Cost[dir_idx,i,1],Elite_pos2,CC[dir_idx,i,:,1]=WFW(train,label,cv,population,Total_iter)
Cost[dir_idx,i,2],FS_pos,FW_pos,CC[dir_idx,i,:,2]=WFSWFW(train,label,cv,population,Total_iter)
Best_WFS.append(Elite_pos1)
Best_WFW.append(Elite_pos2)
Best_WFSWFW.append(FS_pos)
Best_WFSWFW.append(FW_pos)
mean_CC=numpy.mean(CC,axis=1)
for i in range(0,1):
plt.plot(mean_CC[i,:,0],color='r')
plt.plot(mean_CC[i,:,1],color='b')
plt.plot(mean_CC[i,:,2],color='g')
return Cost,Best_WFS,Best_WFW,Best_WFSWFW,CC
# Main program
Cost,Best_WFS,Best_WFW,Best_WFSWFW,CC=main_CV()
| 36.019231
| 107
| 0.645489
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 44
| 0.023492
|
548634bd7f60817d2246c17acdb44bb98affa644
| 1,189
|
py
|
Python
|
demo/demo/models.py
|
dracarysX/django-rest-query
|
62fe8ee8f72251a1a8982265fff57870f2d43ca9
|
[
"MIT"
] | 2
|
2017-06-28T02:51:52.000Z
|
2017-06-28T09:28:33.000Z
|
demo/demo/models.py
|
dracarysX/django-rest-query
|
62fe8ee8f72251a1a8982265fff57870f2d43ca9
|
[
"MIT"
] | null | null | null |
demo/demo/models.py
|
dracarysX/django-rest-query
|
62fe8ee8f72251a1a8982265fff57870f2d43ca9
|
[
"MIT"
] | null | null | null |
#! /usr/bin/env python
# -*-coding: utf-8 -*-
__author__ = 'dracarysX'
from django.db import models
class Publisher(models.Model):
id = models.AutoField(primary_key=True)
name = models.CharField(max_length=100)
class Meta:
db_table = 'Publisher'
def __str__(self):
return 'Publisher: {}'.format(self.name)
class School(models.Model):
id = models.AutoField(primary_key=True)
name = models.CharField(max_length=100)
class Meta:
db_table = 'School'
def __str__(self):
return 'School: {}'.format(self.name)
class Author(models.Model):
id = models.AutoField(primary_key=True)
name = models.CharField(max_length=50)
age = models.IntegerField()
school = models.ForeignKey(School)
class Meta:
db_table = 'Author'
def __str__(self):
return 'Author: {}'.format(self.name)
class Book(models.Model):
id = models.AutoField(primary_key=True)
name = models.CharField(max_length=50)
author = models.ForeignKey(Author)
publisher = models.ForeignKey(Publisher)
class Meta:
db_table = 'Book'
def __str__(self):
return 'Book: {}'.format(self.name)
| 22.018519
| 48
| 0.652649
| 1,076
| 0.904962
| 0
| 0
| 0
| 0
| 0
| 0
| 137
| 0.115223
|
54870fd0b78e5e716753c262ab01d38621a1dd9c
| 4,796
|
py
|
Python
|
feedback-api/src/api/services/feedback/feedback_camunda_service.py
|
josekudiyirippil/queue-management
|
e56a987e14cfd2b50b820f679c7669060450da8e
|
[
"Apache-2.0"
] | 30
|
2018-09-19T03:30:51.000Z
|
2022-03-07T02:57:05.000Z
|
feedback-api/src/api/services/feedback/feedback_camunda_service.py
|
ann-aot/queue-management
|
8ac8353a1e5f3f27fea74e70831ab5f0590d1805
|
[
"Apache-2.0"
] | 159
|
2018-09-17T23:45:58.000Z
|
2022-03-30T17:35:05.000Z
|
feedback-api/src/api/services/feedback/feedback_camunda_service.py
|
ann-aot/queue-management
|
8ac8353a1e5f3f27fea74e70831ab5f0590d1805
|
[
"Apache-2.0"
] | 52
|
2018-05-18T18:30:06.000Z
|
2021-08-25T12:00:29.000Z
|
# Copyright © 2019 Province of British Columbia
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Submit Citizen feedback.
This module consists of API that calls Camunda BPM to save citizen feedback comments.
"""
import os, requests, json
from typing import Dict
from jinja2 import Environment, FileSystemLoader
from .feedback_base_service import FeedbackBaseService
from flask import jsonify
class FeedbackCamundaService(FeedbackBaseService):
"""Implementation from FeedbackService."""
def submit(self, payload):
"""Submit feedback to Camunda API"""
camunda_service_endpoint = os.getenv('FEEDBACK_CAMUNDA_URL')
keycloak_endpoint = os.getenv('FEEDBACK_AUTH_URL')
keycloak_client_id = os.getenv('FEEDBACK_AUTH_CLIENT_ID')
keycloak_client_secret = os.getenv('FEEDBACK_AUTH_CLIENT_SECRET')
auth_payload = {"grant_type":"client_credentials",
"client_id":keycloak_client_id,
"client_secret":keycloak_client_secret}
try:
auth_response = requests.post(keycloak_endpoint,data=auth_payload)
access_token = auth_response.json()['access_token']
headers = {'Content-Type': 'application/json', 'Authorization': f'Bearer {access_token}'}
feedback_response = requests.post(camunda_service_endpoint,
headers=headers,
data=json.dumps(payload), timeout=10.0)
response_code = feedback_response.status_code
if (response_code != 200 and response_code != 201 and response_code != 202) :
raise Exception('Camunda API Failure')
return feedback_response.status_code
except Exception as e:
feedback_type = payload['variables']['engagement']['value']
feedback_message = payload['variables']['citizen_comments']['value']
response_required = payload['variables']['response']['value']
citizen_name = payload['variables']['citizen_name']['value']
citizen_contact = payload['variables']['citizen_contact']['value']
citizen_email = payload['variables']['citizen_email']['value']
service_date = payload['variables']['service_date']['value']
submit_date_time = payload['variables']['submit_date_time']['value']
ENV = Environment(loader=FileSystemLoader('.'), autoescape=True)
template = ENV.get_template('camunda_email_template.template')
body = template.render(feedback_type =feedback_type,
feedback_message =feedback_message,
response_required =response_required,
citizen_name =citizen_name,
citizen_contact =citizen_contact,
citizen_email =citizen_email,
service_date =service_date,
submit_date_time =submit_date_time)
application_auth_url = os.getenv('APP_AUTH_URL')
application_client_id = os.getenv('APP_AUTH_CLIENT_ID')
application_client_secret = os.getenv('APP_AUTH_CLIENT_SECRET')
notification_email_url = os.getenv('NOTIFICATION_EMAIL_URL')
email_to = (os.getenv('NOTIFICATION_EMAIL_TO')).split(",")
app_auth_payload = {"grant_type":"client_credentials",
"client_id":application_client_id,
"client_secret":application_client_secret}
email_payload = {
'bodyType': 'text',
'body': body,
'subject': 'Citizen Feedback - Camunda API failure',
'to': email_to
}
app_auth_response = requests.post(application_auth_url,data=app_auth_payload)
app_access_token = app_auth_response.json()['access_token']
email_headers = {'Content-Type': 'application/json', 'Authorization': f'Bearer {app_access_token}'}
email_response = requests.post(notification_email_url,
headers=email_headers,
data=json.dumps(email_payload))
print(email_response)
print(e)
return email_response.status_code
| 51.021277
| 111
| 0.641785
| 3,891
| 0.811132
| 0
| 0
| 0
| 0
| 0
| 0
| 1,665
| 0.347092
|
5489ae18fd1a18ba304d5257203fc13d1b20346d
| 2,334
|
py
|
Python
|
dezede/urls.py
|
dezede/dezede
|
985ed1b42a2a6bab996e26c1b92444ae04afcc2c
|
[
"BSD-3-Clause"
] | 15
|
2015-02-10T21:16:31.000Z
|
2021-03-25T16:46:20.000Z
|
dezede/urls.py
|
dezede/dezede
|
985ed1b42a2a6bab996e26c1b92444ae04afcc2c
|
[
"BSD-3-Clause"
] | 4
|
2021-02-10T15:42:08.000Z
|
2022-03-11T23:20:38.000Z
|
dezede/urls.py
|
dezede/dezede
|
985ed1b42a2a6bab996e26c1b92444ae04afcc2c
|
[
"BSD-3-Clause"
] | 6
|
2016-07-10T14:20:48.000Z
|
2022-01-19T18:34:02.000Z
|
from django.conf import settings
from django.conf.urls import *
from django.conf.urls.static import static
from django.contrib import admin
from django.contrib.sitemaps.views import sitemap
from django.views.decorators.cache import cache_page
from django.views.generic import TemplateView
from ajax_select import urls as ajax_select_urls
from .views import (
HomeView, CustomSearchView, autocomplete, ErrorView, BibliographieView,
RssFeed, GlobalSitemap,
)
admin.autodiscover()
urlpatterns = [
url(r'^$', HomeView.as_view(), name='home'),
url(r'^', include('libretto.urls')),
url(r'^examens/', include('examens.urls')),
url(r'^presentation$',
TemplateView.as_view(template_name='pages/presentation.html'),
name='presentation'),
url(r'^contribuer$',
TemplateView.as_view(template_name='pages/contribute.html'),
name='contribuer'),
url(r'^bibliographie$', BibliographieView.as_view(), name='bibliographie'),
url(r'^', include('accounts.urls')),
url(r'^dossiers/', include('dossiers.urls')),
url(r'^admin/lookups/', include(ajax_select_urls)),
url(r'^admin/', admin.site.urls),
url(r'^i18n/', include('django.conf.urls.i18n')),
url(r'^tinymce/', include('tinymce.urls')),
url(r'^grappelli/', include('grappelli.urls')),
url(r'^recherche/', CustomSearchView(), name='haystack_search'),
url(r'^api-auth/', include('rest_framework.urls',
namespace='rest_framework')),
url(r'^autocomplete$', autocomplete, name='autocomplete'),
url(r'^rss\.xml$', RssFeed(), name='rss_feed'),
url(r'^sitemap.xml$', cache_page(24*60*60)(sitemap),
{'sitemaps': {'global': GlobalSitemap}},
name='django.contrib.sitemaps.views.sitemap'),
url(r'^404$', ErrorView.as_view(status=404)),
]
if settings.DEBUG:
urlpatterns += static(settings.STATIC_URL,
document_root=settings.STATIC_ROOT)
urlpatterns += static(settings.MEDIA_URL,
document_root=settings.MEDIA_ROOT)
import debug_toolbar
urlpatterns += [
url(r'^__debug__/', include(debug_toolbar.urls)),
url(r'^403$', ErrorView.as_view(status=403)),
url(r'^500$', ErrorView.as_view(status=500)),
url(r'^503$', ErrorView.as_view(status=503)),
]
| 38.9
| 79
| 0.662811
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 612
| 0.262211
|
548afc21b16ee46ad8044ba3566ba260b8c8d71a
| 899
|
py
|
Python
|
database/chemtrack/contacts.py
|
mshobair/invitro_cheminformatics
|
17201496c73453accd440646a1ee81726119a59c
|
[
"MIT"
] | null | null | null |
database/chemtrack/contacts.py
|
mshobair/invitro_cheminformatics
|
17201496c73453accd440646a1ee81726119a59c
|
[
"MIT"
] | null | null | null |
database/chemtrack/contacts.py
|
mshobair/invitro_cheminformatics
|
17201496c73453accd440646a1ee81726119a59c
|
[
"MIT"
] | null | null | null |
import datetime
from database.database_schemas import Schemas
from sqlalchemy import Column, Integer, String, DateTime
from database.base import Base
class Contacts(Base):
"""Maps to contacts table in chemprop databases."""
__tablename__ = 'contacts'
__table_args__ = {'schema': Schemas.qsar_schema}
id = Column(Integer, primary_key=True, nullable=False)
first_name = Column(String)
last_name = Column(String)
vendor_id = Column(Integer)
email = Column(String)
title = Column(String)
phone1 = Column(String)
phone2 = Column(String)
fax = Column(String)
cell = Column(String)
other_details = Column(String)
department = Column(String)
contact_type_id = Column(Integer)
created_at = Column(DateTime, default=datetime.datetime.now, nullable=False)
updated_at = Column(DateTime, default=datetime.datetime.now, nullable=False)
| 31
| 80
| 0.72525
| 745
| 0.828699
| 0
| 0
| 0
| 0
| 0
| 0
| 69
| 0.076752
|
548ba908b52f98060805c6474bd241356237c223
| 7,487
|
py
|
Python
|
otter/generate/autograder.py
|
drjbarker/otter-grader
|
9e89e1675b09cf7889995b5f1bc8e1648bf6c309
|
[
"BSD-3-Clause"
] | null | null | null |
otter/generate/autograder.py
|
drjbarker/otter-grader
|
9e89e1675b09cf7889995b5f1bc8e1648bf6c309
|
[
"BSD-3-Clause"
] | null | null | null |
otter/generate/autograder.py
|
drjbarker/otter-grader
|
9e89e1675b09cf7889995b5f1bc8e1648bf6c309
|
[
"BSD-3-Clause"
] | null | null | null |
"""
Gradescope autograder configuration generator for Otter Generate
"""
import os
import json
import shutil
# import subprocess
import zipfile
import tempfile
import pathlib
import pkg_resources
import yaml
from glob import glob
from subprocess import PIPE
from jinja2 import Template
from .token import APIClient
from .utils import zip_folder
from ..plugins import PluginCollection
from ..run.run_autograder.constants import DEFAULT_OPTIONS
TEMPLATE_DIR = pkg_resources.resource_filename(__name__, "templates")
MINICONDA_INSTALL_URL = "https://repo.anaconda.com/miniconda/Miniconda3-py38_4.9.2-Linux-x86_64.sh"
OTTER_ENV_NAME = "otter-env"
def main(tests_path, output_path, config, lang, requirements, overwrite_requirements, environment,
username, password, files, assignment=None, plugin_collection=None, **kwargs):
"""
Runs Otter Generate
Args:
tests_path (``str``): path to directory of test files for this assignment
output_path (``str``): directory in which to write output zip file
config (``str``): path to an Otter configuration JSON file
lang (``str``): the language of the assignment; one of ``["python", "r"]``
requirements (``str``): path to a Python or R requirements file for this assignment
overwrite_requirements (``bool``): whether to overwrite the default requirements instead of
adding to them
environment (``str``): path to a conda environment file for this assignment
username (``str``): a username for Gradescope for generating a token
password (``str``): a password for Gradescope for generating a token
files (``list[str]``): list of file paths to add to the zip file
assignment (``otter.assign.assignment.Assignment``, optional): the assignment configurations
if used with Otter Assign
**kwargs: ignored kwargs (a remnant of how the argument parser is built)
Raises:
``FileNotFoundError``: if the specified Otter configuration JSON file could not be found
``ValueError``: if the configurations specify a Gradescope course ID or assignment ID but not
both
"""
# read in otter_config.json
if config is None and os.path.isfile("otter_config.json"):
config = "otter_config.json"
if config is not None and not os.path.isfile(config):
raise FileNotFoundError(f"Could not find otter configuration file {config}")
if config:
with open(config) as f:
otter_config = json.load(f)
else:
otter_config = {}
if "course_id" in otter_config and "assignment_id" in otter_config:
client = APIClient()
if username is not None and password is not None:
client.log_in(username, password)
token = client.token
else:
token = client.get_token()
otter_config["token"] = token
elif "course_id" in otter_config or "assignment_id" in otter_config:
raise ValueError(f"Otter config contains 'course_id' or 'assignment_id' but not both")
options = DEFAULT_OPTIONS.copy()
options.update(otter_config)
# update language
options["lang"] = lang.lower()
template_dir = os.path.join(TEMPLATE_DIR, options["lang"])
templates = {}
for fn in os.listdir(template_dir):
fp = os.path.join(template_dir, fn)
if os.path.isfile(fp): # prevents issue w/ finding __pycache__ in template dirs
with open(fp) as f:
templates[fn] = Template(f.read())
template_context = {
"autograder_dir": options['autograder_dir'],
"otter_env_name": OTTER_ENV_NAME,
"miniconda_install_url": MINICONDA_INSTALL_URL,
"ottr_branch": "stable",
}
if plugin_collection is None:
plugin_collection = PluginCollection(otter_config.get("plugins", []), None, {})
else:
plugin_collection.add_new_plugins(otter_config.get("plugins", []))
plugin_collection.run("during_generate", otter_config, assignment)
# create tmp directory to zip inside
with tempfile.TemporaryDirectory() as td:
# try:
# copy tests into tmp
test_dir = os.path.join(td, "tests")
os.mkdir(test_dir)
pattern = ("*.py", "*.[Rr]")[options["lang"] == "r"]
for file in glob(os.path.join(tests_path, pattern)):
shutil.copy(file, test_dir)
# open requirements if it exists
requirements = requirements
reqs_filename = f"requirements.{'R' if options['lang'] == 'r' else 'txt'}"
if requirements is None and os.path.isfile(reqs_filename):
requirements = reqs_filename
if requirements:
assert os.path.isfile(requirements), f"Requirements file {requirements} not found"
f = open(requirements)
else:
f = open(os.devnull)
template_context["other_requirements"] = f.read()
template_context["overwrite_requirements"] = overwrite_requirements
# close the {% if not other_requirements %}stream
f.close()
# open environment if it exists
# unlike requirements.txt, we will always overwrite, not append by default
environment = environment
env_filename = "environment.yml"
if environment is None and os.path.isfile(env_filename):
environment = env_filename
if environment:
assert os.path.isfile(environment), f"Environment file {environment} not found"
with open(environment) as f:
data = yaml.safe_load(f)
data['name'] = template_context["otter_env_name"]
template_context["other_environment"] = yaml.safe_dump(data, default_flow_style=False)
f.close()
else:
template_context["other_environment"] = None
rendered = {}
for fn, tmpl in templates.items():
rendered[fn] = tmpl.render(**template_context)
if os.path.isabs(output_path):
zip_path = os.path.join(output_path, "autograder.zip")
else:
zip_path = os.path.join(os.getcwd(), output_path, "autograder.zip")
if os.path.exists(zip_path):
os.remove(zip_path)
with zipfile.ZipFile(zip_path, mode="w") as zf:
for fn, contents in rendered.items():
zf.writestr(fn, contents)
test_dir = "tests"
pattern = ("*.py", "*.[Rr]")[options["lang"] == "r"]
for file in glob(os.path.join(tests_path, pattern)):
zf.write(file, arcname=os.path.join(test_dir, os.path.basename(file)))
zf.writestr("otter_config.json", json.dumps(otter_config, indent=2))
# copy files into tmp
if len(files) > 0:
for file in files:
full_fp = os.path.abspath(file)
assert os.getcwd() in full_fp, f"{file} is not in a subdirectory of the working directory"
if os.path.isfile(full_fp):
zf.write(file, arcname=os.path.join("files", file))
elif os.path.isdir(full_fp):
zip_folder(zf, full_fp, prefix="files")
else:
raise ValueError(f"Could not find file or directory '{full_fp}'")
if assignment is not None:
assignment._otter_config = otter_config
| 38.792746
| 110
| 0.631762
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 2,746
| 0.366769
|