blob_id stringlengths 40 40 | directory_id stringlengths 40 40 | path stringlengths 2 616 | content_id stringlengths 40 40 | detected_licenses listlengths 0 69 | license_type stringclasses 2 values | repo_name stringlengths 5 118 | snapshot_id stringlengths 40 40 | revision_id stringlengths 40 40 | branch_name stringlengths 4 63 | visit_date timestamp[us] | revision_date timestamp[us] | committer_date timestamp[us] | github_id int64 2.91k 686M ⌀ | star_events_count int64 0 209k | fork_events_count int64 0 110k | gha_license_id stringclasses 23 values | gha_event_created_at timestamp[us] | gha_created_at timestamp[us] | gha_language stringclasses 213 values | src_encoding stringclasses 30 values | language stringclasses 1 value | is_vendor bool 2 classes | is_generated bool 2 classes | length_bytes int64 2 10.3M | extension stringclasses 246 values | content stringlengths 2 10.3M | authors listlengths 1 1 | author_id stringlengths 0 212 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
9498b785c929e3d32d42560f88c9a3cb94906f44 | a4c4fdab8dfe1d2ec0668f40a1a7494584995bc8 | /ticket_sales/migrations/0005_auto__add_field_product_ordering.py | d0cd668d271369d3a0b9149c2de94b40b362bc17 | [] | no_license | japsu/lipunmyynti | fcf87f03ade10f17cb65f48bf6f1b71ca2dc48f1 | 521754b8136bc4e634866776e35217589db31dae | refs/heads/master | 2016-08-04T08:14:33.098651 | 2014-01-03T14:10:50 | 2014-01-03T14:10:50 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,601 | py | # -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'Product.ordering'
db.add_column('ticket_sales_product', 'ordering',
self.gf('django.db.models.fields.IntegerField')(default=0),
keep_default=False)
def backwards(self, orm):
# Deleting field 'Product.ordering'
db.delete_column('ticket_sales_product', 'ordering')
models = {
'ticket_sales.batch': {
'Meta': {'object_name': 'Batch'},
'create_time': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'delivery_time': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'prepare_time': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'print_time': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'})
},
'ticket_sales.customer': {
'Meta': {'object_name': 'Customer'},
'address': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'city': ('django.db.models.fields.CharField', [], {'max_length': '30'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'phone_number': ('django.db.models.fields.CharField', [], {'max_length': '30', 'null': 'True', 'blank': 'True'}),
'zip_code': ('django.db.models.fields.CharField', [], {'max_length': '5'})
},
'ticket_sales.order': {
'Meta': {'object_name': 'Order'},
'batch': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['ticket_sales.Batch']", 'null': 'True', 'blank': 'True'}),
'cancellation_time': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'confirm_time': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'customer': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['ticket_sales.Customer']", 'unique': 'True', 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'ip_address': ('django.db.models.fields.CharField', [], {'max_length': '15', 'null': 'True', 'blank': 'True'}),
'payment_date': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'school': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['ticket_sales.School']", 'null': 'True', 'blank': 'True'}),
'start_time': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
'ticket_sales.orderproduct': {
'Meta': {'object_name': 'OrderProduct'},
'count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'order': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'order_product_set'", 'to': "orm['ticket_sales.Order']"}),
'product': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'order_product_set'", 'to': "orm['ticket_sales.Product']"})
},
'ticket_sales.product': {
'Meta': {'object_name': 'Product'},
'available': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'description': ('django.db.models.fields.TextField', [], {}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'ilmoitus_mail': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'internal_description': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'mail_description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'ordering': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'price_cents': ('django.db.models.fields.IntegerField', [], {}),
'requires_shipping': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'sell_limit': ('django.db.models.fields.IntegerField', [], {})
},
'ticket_sales.school': {
'Meta': {'object_name': 'School'},
'address': ('django.db.models.fields.CharField', [], {'max_length': '40'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'max_people': ('django.db.models.fields.IntegerField', [], {}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '40'}),
'priority': ('django.db.models.fields.IntegerField', [], {}),
'product': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['ticket_sales.Product']"})
}
}
complete_apps = ['ticket_sales'] | [
"santtu@pajukanta.fi"
] | santtu@pajukanta.fi |
5fb283cf5046eb8fcd9be60b9db8a6c48a1d8373 | e2fe15caa1d179e92fef23d3083e84090caa887b | /CodingChallenge/Haley_process.py | df76facb661d33d53ceafe65bf8d35131cf9210b | [] | no_license | brentporter/SimpleVueJSLeafletMap | 5587990370b13049cef37fa865bf03e98346a3b6 | 61758d9e7dd746519524987a84dfffd0946ca0b8 | refs/heads/master | 2020-05-31T18:19:09.794989 | 2019-08-02T14:31:01 | 2019-08-02T14:31:01 | 190,430,985 | 1 | 2 | null | 2019-06-20T14:19:45 | 2019-06-05T16:33:46 | Vue | UTF-8 | Python | false | false | 5,294 | py |
#
# Haley Sanchez
# MAGIC - Flood Response
#
all_Temps = [] #added
avg = 0.0 #added
o = open("paradiseca_l8_tirs_k_20181007.txt", "r")
c = open("paradiseca_l8_tirs_c_20181007.txt", "w")
f = open("paradiseca_l8_tirs_f_20181007.txt", "w")
c.write(o.readline())
l = o.readline()
r = l.split(" ")
rows = r[9].split("\n")
c.write(l)
for i in range(4):
c.write(o.readline())
for x in range(int(rows[0])):
temps = o.readline().split(" ")
temps.pop()
all_Temps += temps #added
for kelvin in temps:
cel = float(kelvin) - 273.15
c.write("%.1f " %(cel))
c.write("\n")
o.close()
c.close()
all_Temps.sort() #added
for t in all_Temps: #added
avg += float(t) #added
avg = avg/len(all_Temps) #added
avgC = avg - 273.15 #
avgF = (round(avgC, 1) * (9/5)) + 32 #
minN = float(all_Temps[0]) #
maxN = float(all_Temps[len(all_Temps)-1]) #
minC = minN - 273.15 #
maxC = maxN - 273.15 #
minF = ((9/5) * round(minC, 1)) + 32 #
maxF = ((9/5) * round(maxC, 1)) + 32 #
print("For 'paradiseca_l8_tirs_k_20181007.txt':") #
print("The min Celsius temperature: %.1f Celsius\nThe max Celsius temperature: %.1f Celsius" %(minC, maxC)) #added
print("The average Celsius temperature: %.1f Celsius" %(avgC)) #
print("The min Fahrenheit temperature: %.1f Fahrenheit\nThe max Fahrenheit temperature: %.1f Fahrenheit" %(minF, maxF)) #added
print("The average Fahrenheit temperature: %.1f Fahrenheit" %(avgF)) #added
o = open("paradiseca_l8_tirs_k_20181007.txt", "r")
f.write(o.readline())
f.write(o.readline())
for i in range(4):
f.write(o.readline())
for x in range(int(rows[0])):
temps = o.readline().split(" ")
temps.pop()
for kelvin in temps:
fah = ((float(kelvin) - 273.15) * (9/5)) + 32
f.write("%.1f " %(fah))
f.write("\n")
o.close()
f.close()
all_Temps = [] #added
avg = 0.0 #added
o = open("paradiseca_l8_tirs_k_20190127.txt", "r")
c = open("paradiseca_l8_tirs_c_20190127.txt", "w")
f = open("paradiseca_l8_tirs_f_20190127.txt", "w")
c.write(o.readline())
l = o.readline()
r = l.split(" ")
rows = r[9].split("\n")
c.write(l)
for i in range(4):
c.write(o.readline())
for x in range(int(rows[0])):
temps = o.readline().split(" ")
temps.pop()
all_Temps += temps #added
for kelvin in temps:
cel = float(kelvin) - 273.15
c.write("%.1f " %(cel))
c.write("\n")
o.close()
c.close()
all_Temps.sort() #added
for t in all_Temps: #added
avg += float(t) #added
avg = avg/len(all_Temps) #added
avgC = avg - 273.15 #
avgF = (round(avgC, 1) * (9/5)) + 32 #
minN = float(all_Temps[0]) #
maxN = float(all_Temps[len(all_Temps)-1]) #
minC = minN - 273.15 #
maxC = maxN - 273.15 #
minF = ((9/5) * round(minC, 1)) + 32 #
maxF = ((9/5) * round(maxC, 1)) + 32 #
print("\nFor 'paradiseca_l8_tirs_k_20190127.txt':") #
print("The min Celsius temperature: %.1f Celsius\nThe max Celsius temperature: %.1f Celsius" %(minC, maxC)) #added
print("The average Celsius temperature: %.1f Celsius" %(avgC)) #
print("The min Fahrenheit temperature: %.1f Fahrenheit\nThe max Fahrenheit temperature: %.1f Fahrenheit" %(minF, maxF)) #added
print("The average Fahrenheit temperature: %.1f Fahrenheit" %(avgF)) #added
o = open("paradiseca_l8_tirs_k_20190127.txt", "r")
f.write(o.readline())
f.write(o.readline())
for i in range(4):
f.write(o.readline())
for x in range(int(rows[0])):
temps = o.readline().split(" ")
temps.pop()
for kelvin in temps:
fah = ((float(kelvin) - 273.15) * (9/5)) + 32
f.write("%.1f " %(fah))
f.write("\n")
o.close()
f.close()
all_Temps = [] #added
avg = 0.0 #added
o = open("paradiseca_l8_tirs_k_20181108.txt", "r")
c = open("paradiseca_l8_tirs_c_20181108.txt", "w")
f = open("paradiseca_l8_tirs_f_20181108.txt", "w")
c.write(o.readline())
l = o.readline()
r = l.split(" ")
rows = r[9].split("\n")
c.write(l)
for i in range(4):
c.write(o.readline())
for x in range(int(rows[0])):
temps = o.readline().split(" ")
temps.pop()
all_Temps += temps #added
for kelvin in temps:
cel = float(kelvin) - 273.15
c.write("%.1f " %(cel))
c.write("\n")
o.close()
c.close()
all_Temps.sort() #added
for t in all_Temps: #added
avg += float(t) #added
avg = avg/len(all_Temps) #added
avgC = avg - 273.15 #
avgF = (round(avgC, 1) * (9/5)) + 32 #
minN = float(all_Temps[0]) #
maxN = float(all_Temps[len(all_Temps)-1]) #
minC = minN - 273.15 #
maxC = maxN - 273.15 #
minF = ((9/5) * round(minC, 1)) + 32 #
maxF = ((9/5) * round(maxC, 1)) + 32 #
print("\nFor 'paradiseca_l8_tirs_k_20181108.txt':") #
print("The min Celsius temperature: %.1f Celsius\nThe max Celsius temperature: %.1f Celsius" %(minC, maxC)) #added
print("The average Celsius temperature: %.1f Celsius" %(avgC)) #
print("The min Fahrenheit temperature: %.1f Fahrenheit\nThe max Fahrenheit temperature: %.1f Fahrenheit" %(minF, maxF)) #added
print("The average Fahrenheit temperature: %.1f Fahrenheit" %(avgF)) #added
o = open("paradiseca_l8_tirs_k_20181108.txt", "r")
f.write(o.readline())
f.write(o.readline())
for i in range(4):
f.write(o.readline())
for x in range(int(rows[0])):
temps = o.readline().split(" ")
temps.pop()
for kelvin in temps:
fah = ((float(kelvin) - 273.15) * (9/5)) + 32
f.write("%.1f " %(fah))
f.write("\n")
o.close()
f.close()
| [
"noreply@github.com"
] | brentporter.noreply@github.com |
cb622aa0277ba77f2a376d872264b679a43a34b7 | 15f321878face2af9317363c5f6de1e5ddd9b749 | /solutions_python/Problem_116/502.py | 2e58e1cfd13b9f3b2dbd39e4bbfbd77517c7ce21 | [] | no_license | dr-dos-ok/Code_Jam_Webscraper | c06fd59870842664cd79c41eb460a09553e1c80a | 26a35bf114a3aa30fc4c677ef069d95f41665cc0 | refs/heads/master | 2020-04-06T08:17:40.938460 | 2018-10-14T10:12:47 | 2018-10-14T10:12:47 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 665 | py | from itertools import chain
def status(cube):
a=(cube[0],cube[6],cube[12],cube[18])
b=(cube[3],cube[7],cube[11],cube[15])
lines=cube.split('\n')
for line in chain((a,b),lines,zip(*lines)):
if ('.' not in line)and(line.count('T')<2):
if 'X' not in line:
return "O won"
if 'O' not in line:
return "X won"
if '.' in cube:
return "Game has not completed"
return "Draw"
file=open(r"C:\Users\user\Downloads\A-large.in").read().split('\n',1)[1]
cubes=file.split('\n\n')
for i, cube in enumerate(cubes):
print("Case #"+str(i+1)+":", status(cube))
| [
"miliar1732@gmail.com"
] | miliar1732@gmail.com |
5ffd130c93b0c35879f63a38d729d11ec0e3f05e | a97bc5f7012739c3a9c46257ccb36bea544868ab | /class/Inventory.py | 6a0af87df37a12f93c88ec3f1fec6ea93b6824f2 | [] | no_license | Cyber-code/Projet_python | 27d6cdee9c49e801b0ea4d1b10c2d3a3956535b3 | 48534c5fc26e5663346559729b44d4fbc39b2855 | refs/heads/master | 2023-04-30T11:29:58.158747 | 2019-12-13T16:26:48 | 2019-12-13T16:26:48 | 220,233,420 | 0 | 0 | null | 2023-04-21T20:44:08 | 2019-11-07T12:35:47 | Python | UTF-8 | Python | false | false | 1,217 | py | class Inventory:
"""
Inventory class instantiate a inventory object which is a character's caracteristic.
It contains the list of objects, amount of gold, armor slot, jewels slot and weapon slot.
"""
def __init__(self, objects=[], gold=0, leftHand=None, rigthHand=None, jewel1=None, jewel2=None, headArmor=None, chestArmor=None, armsArmor=None, legsArmor=None, feetArmor=None):
self.objects = [] # objects is a list of weapons, jewels, armors and consumables
self.gold = gold
self.weapon = {"leftHand": leftHand, "rightHand": rigthHand} # 2 Slots for weapons in each hand
self.jewels = {"jewel1": jewel1, "jewel2": jewel2} # 2 Slots for jewels
self.armor = {"head": headArmor, "chest": chestArmor, "arms": armsArmor, "legs": legsArmor, "feet": feetArmor} # 5 Slots for armor => head, chest, arms, legs, feet
def showInfo(self):
""" Return a string which are precised parameters of the consumable object. """
return "\nInventory" + "\nGold: "+str(self.gold)+" coins" + "\nObjects: "+str(self.objects) +"\nWeapon: "+str(self.weapon) + "\nJewels: "+str(self.jewels)+" %" + "\nArmor: "+str(self.armor)+ "\n" | [
"nicolas.schmitt@isen.yncrea.fr"
] | nicolas.schmitt@isen.yncrea.fr |
2c9cffc45656b637e2ad575d403136f256c96aea | 8264b7b699ba0cfbb55b69e24de30dac974af59c | /kale/loaddata/video_multi_domain.py | 437888db345eababbad7cf802e3019a8b7d77edc | [
"MIT"
] | permissive | JulienYulinMa/pykale | 63e093c9dc41e9b9f86a56d9523d10908d34ce71 | 8177ad9a79b3f6d96b3538a34a67514cf1a82b0b | refs/heads/main | 2023-04-26T00:41:15.997369 | 2021-05-20T15:53:00 | 2021-05-20T15:53:00 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 9,447 | py | # =============================================================================
# Author: Xianyuan Liu, xianyuan.liu@sheffield.ac.uk or xianyuan.liu@outlook.com
# Haiping Lu, h.lu@sheffield.ac.uk or hplu@ieee.org
# =============================================================================
"""Construct a dataset for videos with (multiple) source and target domains"""
import logging
import numpy as np
from sklearn.utils import check_random_state
from kale.loaddata.multi_domain import DatasetSizeType, MultiDomainDatasets, WeightingType
from kale.loaddata.sampler import FixedSeedSamplingConfig, MultiDataLoader
from kale.loaddata.video_access import get_image_modality
class VideoMultiDomainDatasets(MultiDomainDatasets):
def __init__(
self,
source_access_dict,
target_access_dict,
image_modality,
seed,
config_weight_type="natural",
config_size_type=DatasetSizeType.Max,
val_split_ratio=0.1,
source_sampling_config=None,
target_sampling_config=None,
n_fewshot=None,
random_state=None,
):
"""The class controlling how the source and target domains are iterated over when the input is joint.
Inherited from MultiDomainDatasets.
Args:
source_access_dict (dictionary): dictionary of source RGB and flow dataset accessors
target_access_dict (dictionary): dictionary of target RGB and flow dataset accessors
image_modality (string): image type (RGB or Optical Flow)
seed (int): seed value set manually.
"""
self._image_modality = image_modality
self.rgb, self.flow = get_image_modality(self._image_modality)
self._seed = seed
if self.rgb:
source_access = source_access_dict["rgb"]
target_access = target_access_dict["rgb"]
if self.flow:
source_access = source_access_dict["flow"]
target_access = target_access_dict["flow"]
weight_type = WeightingType(config_weight_type)
size_type = DatasetSizeType(config_size_type)
if weight_type is WeightingType.PRESET0:
self._source_sampling_config = FixedSeedSamplingConfig(
class_weights=np.arange(source_access.n_classes(), 0, -1)
)
self._target_sampling_config = FixedSeedSamplingConfig(
class_weights=np.random.randint(1, 4, size=target_access.n_classes())
)
elif weight_type is WeightingType.BALANCED:
self._source_sampling_config = FixedSeedSamplingConfig(balance=True)
self._target_sampling_config = FixedSeedSamplingConfig(balance=True)
elif weight_type not in WeightingType:
raise ValueError(f"Unknown weighting method {weight_type}.")
else:
self._source_sampling_config = FixedSeedSamplingConfig(seed=self._seed)
self._target_sampling_config = FixedSeedSamplingConfig(seed=self._seed)
self._source_access_dict = source_access_dict
self._target_access_dict = target_access_dict
self._val_split_ratio = val_split_ratio
self._rgb_source_by_split = {}
self._flow_source_by_split = {}
self._rgb_target_by_split = {}
self._flow_target_by_split = {}
self._size_type = size_type
self._n_fewshot = n_fewshot
self._random_state = check_random_state(random_state)
self._source_by_split = {}
self._labeled_target_by_split = None
self._target_by_split = {}
def prepare_data_loaders(self):
if self.rgb:
logging.debug("Load RGB train and val")
(self._rgb_source_by_split["train"], self._rgb_source_by_split["valid"]) = self._source_access_dict[
"rgb"
].get_train_val(self._val_split_ratio)
(self._rgb_target_by_split["train"], self._rgb_target_by_split["valid"]) = self._target_access_dict[
"rgb"
].get_train_val(self._val_split_ratio)
logging.debug("Load RGB Test")
self._rgb_source_by_split["test"] = self._source_access_dict["rgb"].get_test()
self._rgb_target_by_split["test"] = self._target_access_dict["rgb"].get_test()
if self.flow:
logging.debug("Load flow train and val")
(self._flow_source_by_split["train"], self._flow_source_by_split["valid"]) = self._source_access_dict[
"flow"
].get_train_val(self._val_split_ratio)
(self._flow_target_by_split["train"], self._flow_target_by_split["valid"]) = self._target_access_dict[
"flow"
].get_train_val(self._val_split_ratio)
logging.debug("Load flow Test")
self._flow_source_by_split["test"] = self._source_access_dict["flow"].get_test()
self._flow_target_by_split["test"] = self._target_access_dict["flow"].get_test()
def get_domain_loaders(self, split="train", batch_size=32):
rgb_source_ds = rgb_target_ds = flow_source_ds = flow_target_ds = None
rgb_source_loader = rgb_target_loader = flow_source_loader = flow_target_loader = None
rgb_target_labeled_loader = flow_target_labeled_loader = None
rgb_target_unlabeled_loader = flow_target_unlabeled_loader = n_dataset = None
if self.rgb:
rgb_source_ds = self._rgb_source_by_split[split]
rgb_source_loader = self._source_sampling_config.create_loader(rgb_source_ds, batch_size)
rgb_target_ds = self._rgb_target_by_split[split]
if self.flow:
flow_source_ds = self._flow_source_by_split[split]
flow_source_loader = self._source_sampling_config.create_loader(flow_source_ds, batch_size)
flow_target_ds = self._flow_target_by_split[split]
if self._labeled_target_by_split is None:
# unsupervised target domain
if self.rgb:
rgb_target_loader = self._target_sampling_config.create_loader(rgb_target_ds, batch_size)
n_dataset = DatasetSizeType.get_size(self._size_type, rgb_source_ds, rgb_target_ds)
if self.flow:
flow_target_loader = self._target_sampling_config.create_loader(flow_target_ds, batch_size)
n_dataset = DatasetSizeType.get_size(self._size_type, flow_source_ds, flow_target_ds)
dataloaders = [rgb_source_loader, flow_source_loader, rgb_target_loader, flow_target_loader]
dataloaders = [x for x in dataloaders if x is not None]
return MultiDataLoader(dataloaders=dataloaders, n_batches=max(n_dataset // batch_size, 1),)
else:
# semi-supervised target domain
if self.rgb:
rgb_target_labeled_ds = self._labeled_target_by_split[split]
rgb_target_unlabeled_ds = rgb_target_ds
# label domain: always balanced
rgb_target_labeled_loader = FixedSeedSamplingConfig(balance=True, class_weights=None).create_loader(
rgb_target_labeled_ds, batch_size=min(len(rgb_target_labeled_ds), batch_size)
)
rgb_target_unlabeled_loader = self._target_sampling_config.create_loader(
rgb_target_unlabeled_ds, batch_size
)
n_dataset = DatasetSizeType.get_size(
self._size_type, rgb_source_ds, rgb_target_labeled_ds, rgb_target_unlabeled_ds
)
if self.flow:
flow_target_labeled_ds = self._labeled_target_by_split[split]
flow_target_unlabeled_ds = flow_target_ds
flow_target_labeled_loader = FixedSeedSamplingConfig(balance=True, class_weights=None).create_loader(
flow_target_labeled_ds, batch_size=min(len(flow_target_labeled_ds), batch_size)
)
flow_target_unlabeled_loader = self._target_sampling_config.create_loader(
flow_target_unlabeled_ds, batch_size
)
n_dataset = DatasetSizeType.get_size(
self._size_type, rgb_source_ds, flow_target_labeled_ds, flow_target_unlabeled_ds
)
# combine loaders into a list and remove the loader which is NONE.
dataloaders = [
rgb_source_loader,
flow_source_loader,
rgb_target_labeled_loader,
flow_target_labeled_loader,
rgb_target_unlabeled_loader,
flow_target_unlabeled_loader,
]
dataloaders = [x for x in dataloaders if x is not None]
return MultiDataLoader(dataloaders=dataloaders, n_batches=max(n_dataset // batch_size, 1))
def __len__(self):
if self.rgb:
source_ds = self._rgb_source_by_split["train"]
target_ds = self._rgb_target_by_split["train"]
if self.flow:
source_ds = self._flow_source_by_split["train"]
target_ds = self._flow_target_by_split["train"]
if self._labeled_target_by_split is None:
return DatasetSizeType.get_size(self._size_type, source_ds, target_ds)
else:
labeled_target_ds = self._labeled_target_by_split["train"]
return DatasetSizeType.get_size(self._size_type, source_ds, labeled_target_ds, target_ds)
| [
"xianyuan.liu@sheffield.ac.uk"
] | xianyuan.liu@sheffield.ac.uk |
91ae4c7ce6a2bcc0bb06688c6407a3b68a6f9a4b | 61704c2c92d650f311447d4432a2acfd4d09c4af | /main.py | 184cd95e1bc4d3a57559b8514b3b68cbfb548521 | [] | no_license | AllenJShi/WeChat | 96e78ce73e7874009774d589555a898359b92661 | e0194aeab0973619eb9aec87068404729d5336b1 | refs/heads/master | 2022-12-03T21:12:15.636983 | 2020-08-23T03:40:38 | 2020-08-23T03:40:38 | 288,250,251 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 290 | py | # -*- coding: utf-8 -*-
# filename: main.py
import web
from handle import Handle
urls = (
'/wx', 'Handle',
)
class Test(object):
def GET(self):
return "hello, this is handle view"
if __name__ == '__main__':
app = web.application(urls, globals())
app.run()
| [
"allenjunsheng@gmail.com"
] | allenjunsheng@gmail.com |
2c63152886a759572313fc66b29e71496024554a | 6470dc699cc8ddb117d0c949dc839cf15f8b5f3e | /Variable correlation.py | 8d0282fd788e38481aebf9acc8744ab83ed5b82c | [] | no_license | JiahaoChen177/final-project | 21ebba02c3b348d7772419a6a06fb9fc63321d9c | a4fd7d68c5f4dcd7463e00ae9f38f40304cdf2e4 | refs/heads/main | 2023-04-15T08:11:37.554916 | 2021-04-30T16:36:53 | 2021-04-30T16:36:53 | 363,191,304 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 346 | py | # -*- coding: utf-8 -*-
"""
Created on Tue Apr 13 20:36:43 2021
@author: Jiahao Chen
"""
import pandas as pd
import seaborn as sns
data = pd.read_csv(r'NTN-data.csv')
data.head()
data_var = data[['Ca', 'Mg', 'K', 'Na', 'NH4', 'NO3', 'Cl', 'SO4', 'pH']]
data_var
sns.heatmap(data_var.corr(),vmin=-1, vmax=1, center=0, cbar=True)
| [
"noreply@github.com"
] | JiahaoChen177.noreply@github.com |
fe1346157b25fcdbfcec8afd8992c39c27b4810f | 54abd5a6dc3607b16f7f9b872bd230a8d50c390c | /Python2.7/HTML/HTML.py | 8c1fc63ce42a69fd2dd036f47e57ed2d4b8d34e7 | [] | no_license | zym1115718204/mygit | a7c57cf93d0975ce9f1c1388a81924bfaa8f80db | 428b70d688080768f5bc7b8b80215661dc829454 | refs/heads/master | 2021-01-21T12:53:44.237682 | 2016-04-18T11:23:00 | 2016-04-18T11:23:00 | 55,469,658 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 926 | py | #!/user/bin/python
# -*- coding: utf-8 -*-
#filename:HTML
from HTMLParser import HTMLParser
from htmlentitydefs import name2codepoint
class MyHTMLParser(HTMLParser):
def handle_starttag(self, tag, attrs):
print('<%s>' % tag)
def handle_endtag(self, tag):
print('</%s>' % tag)
def handle_startendtag(self, tag, attrs):
print('<%s/>' % tag)
def handle_data(self, data):
print('%s'%data)
def handle_comment(self, data):
print('<!-- -->')
def handle_entityref(self, name):
print('&%s;' % name)
def handle_charref(self, name):
print('&#%s;' % name)
parser = MyHTMLParser()
#with open('html2.htm','r') as f:
with open('FengSensor.html','r') as f:
html=f.read()
html2='''<html>i'm head</head><body><p>i'm body
<a href=\"#\">html</a> i'm tutorial...<br>i'm END</p></body></html>
'''
parser.feed(html)
print 'OK'
| [
"zym1115718204@163.com"
] | zym1115718204@163.com |
59f762dbd740848498df3ffc2fe417d5410db50b | 2898b1102ae90223490ea239d8683130e2c334de | /python/src/sample1.py | b779b9eabc140870bbcf114d9cb5030061476010 | [] | no_license | kyukio413/ex21 | b621096c56a4ed4d48334262a9c0682abfeb2279 | c8902f73fd8ba8d5cab4c36f92118412a05d21d4 | refs/heads/master | 2023-03-28T04:40:47.317428 | 2021-04-07T04:10:04 | 2021-04-07T04:10:04 | 355,387,475 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 169 | py | import matplotlib.pyplot as plt
import numpy as np
x = np.arange(0, 100, 0.5)
Hz = 5.
y = np.sin(2.0 * np.pi * (x * Hz) / 100)
plt.plot(x, y)
plt.savefig('test.png')
| [
"dzungarian@gmail.com"
] | dzungarian@gmail.com |
e0f989552808dac805e96ac320457b7df3f0e941 | 71123cfce3fccb7a1b8038e9e999bc4dc3d13a53 | /src/ast.py | 58bf6d5ff2a4a2bd6898f338a5a942c80ca64e36 | [] | no_license | fred-direne/ecom06_compilador | 38de1a3de4ccab45e30d03a2b63ce01b814e1990 | fe72c8dfb1944a610bef4e30d4465abb0d99b20e | refs/heads/master | 2022-03-01T20:37:23.054028 | 2019-10-28T14:28:22 | 2019-10-28T14:28:22 | 213,926,902 | 0 | 0 | null | 2022-09-09T19:46:39 | 2019-10-09T13:31:31 | Python | UTF-8 | Python | false | false | 7,578 | py | from src.errors import *
class Programa():
def __init__(self, statement):
self.statements = []
self.statements.append(statement)
def add_statement(self, statement):
self.statements.insert(0, statement)
def eval(self, env):
result = None
for statement in self.statements:
result = statement.eval(env)
return result
def get_statements(self):
return self.statements
class Booleano():
def __init__(self, value):
self.value = value
def eval(self, env):
return bool(self.value)
class Numero():
def __init__(self, value):
self.value = value
def eval(self, env):
return int(self.value)
class Real():
def __init__(self, value):
self.value = value
def eval(self, env):
return float(self.value)
class Caracter():
def __init__(self, value):
self.value = value
def eval(self, env):
return str(self.value).strip("'")
class String():
def __init__(self, value):
self.value = value
def eval(self, env):
return str(self.value).strip("\"")
class OpBinario():
def __init__(self, left, right):
self.left = left
self.right = right
class Block():
def __init__(self, statement):
self.statements = []
self.statements.append(statement)
def add_statement(self, statement):
self.statements.insert(0, statement)
def eval(self, env):
result = None
for statement in self.statements:
result = statement.eval(env)
return result
def get_statements(self):
return self.statements
class Soma(OpBinario):
def eval(self, env):
return self.left.eval(env) + self.right.eval(env)
class Subtracao(OpBinario):
def eval(self, env):
return self.left.eval(env) - self.right.eval(env)
class Multiplicacao(OpBinario):
def eval(self, env):
return self.left.eval(env) * self.right.eval(env)
class Divisao(OpBinario):
def eval(self, env):
return self.left.eval(env) / self.right.eval(env)
class Resto(OpBinario):
def eval(self, env):
return self.left.eval(env) % self.right.eval(env)
class Menor(OpBinario):
def eval(self, env):
return self.left.eval(env) < self.right.eval(env)
class Maior(OpBinario):
def eval(self, env):
return self.left.eval(env) > self.right.eval(env)
class MenorOuIgual(OpBinario):
def eval(self, env):
return self.left.eval(env) <= self.right.eval(env)
class MaiorOuIgual(OpBinario):
def eval(self, env):
return self.left.eval(env) >= self.right.eval(env)
class Igual(OpBinario):
def eval(self, env):
return self.left.eval(env) == self.right.eval(env)
class Diferente(OpBinario):
def eval(self, env):
return self.left.eval(env) != self.right.eval(env)
class And(OpBinario):
def eval(self, env):
return self.left.eval(env) and self.right.eval(env)
class Or(OpBinario):
def eval(self, env):
return self.left.eval(env) or self.right.eval(env)
class Not():
def __init__(self, value):
self.value = value
def eval(self, env):
return not self.value.eval(env)
class Print():
def __init__(self, value):
self.value = value
def eval(self, env):
print(self.value.eval(env))
class If():
def __init__(self, condition, body):
self.condition = condition
self.body = body
def eval(self, env):
condition = self.condition.eval(env)
if condition:
return self.body.eval(env)
class IfElse():
def __init__(self, condition, body, else_body):
self.condition = condition
self.body = body
self.else_body = else_body
def eval(self, env):
condition = self.condition.eval(env)
if condition:
return self.body.eval(env)
else:
return self.else_body.eval(env)
class While():
def __init__(self, condition, body):
self.condition = condition
self.body = body
def eval(self, env):
while self.condition.eval(env):
self.body.eval(env)
class For():
def __init__(self, atrib, condition, increment, body):
self.atrib = atrib
self.condition = condition
self.increment = increment
self.body = body
def eval(self, env):
self.atrib.eval(env)
while self.condition.eval(env):
self.body.eval(env)
self.increment.eval(env)
class Variavel():
def __init__(self, nome, tipo, value):
self.nome = nome
self.tipo = tipo
self.value = value
def eval(self, env):
if env.variables.get(self.nome) is not None:
self.value = env.variables[self.nome].eval(env)
return self.value
raise LogicError("Not yet defined")
class VariavelExpressao():
def __init__(self, nome):
self.nome = nome
def eval(self, env):
if env.variables.get(self.nome) is not None:
return env.variables[self.nome].value.eval(env)
raise LogicError("Variavel nao definida")
class Declaracao():
def __init__(self, nome, tipo, value):
self.nome = nome
self.tipo = tipo
self.value = value
def eval(self, env):
if env.variables.get(self.nome) is None:
if self.tipo == "int":
env.variables[self.nome] = Variavel(nome=self.nome, tipo=self.tipo, value=Numero(self.value))
elif self.tipo == "float":
env.variables[self.nome] = Variavel(nome=self.nome, tipo=self.tipo, value=Real(self.value))
elif self.tipo == "char":
env.variables[self.nome] = Variavel(nome=self.nome, tipo=self.tipo, value=Caracter(self.value))
else:
raise LogicError("Variavel ja foi definida")
class Atribuicao(OpBinario):
def eval(self, env):
if env.variables.get(self.left) is not None:
if env.variables[self.left].tipo == "int":
env.variables[self.left].value = Numero(self.right.eval(env))
elif env.variables[self.left].tipo == "float":
env.variables[self.left].value = Real(self.right.eval(env))
elif env.variables[self.left].tipo == "char":
env.variables[self.left].value = Caracter(self.right.eval(env))
return env.variables[self.left].value
else:
raise LogicError("Variavel nao definida")
class Read():
def __init__(self, nome):
self.nome = nome
def eval(self, env):
v = env.variables.get(self.nome)
if v is not None:
valorlido = input()
if v.tipo == "int":
env.variables[self.nome].value = Numero(int(valorlido))
return Numero(int(valorlido))
elif v.tipo == "float":
env.variables[self.nome].value = Real(float(valorlido))
return Real(float(valorlido))
elif v.tipo == "char":
env.variables[self.nome].value = Caracter(str(valorlido))
return Caracter(str(valorlido))
else:
raise LogicError("Variavel nao existe") | [
"leonardocosta013@gmail.com"
] | leonardocosta013@gmail.com |
228e05a08f5d3258187f846828e169149c0f49e7 | 3099ac2fa4a4b16ea35dbf6faef03351b7551430 | /server/src/component/level/CharacterLevelComponent.py | 3c95f3555d289afc97ba675ffe4b8397b32090ee | [] | no_license | freedream520/sanguogame | a63fad15aaedeefd412515082409e4dd04858755 | 1019a8f22fd6b96e33d61fd3a5ada59e72f312cb | refs/heads/master | 2021-01-14T11:38:59.954478 | 2014-06-30T16:07:37 | 2014-06-30T16:07:37 | 25,525,558 | 3 | 1 | null | null | null | null | UTF-8 | Python | false | false | 3,040 | py | #coding:utf8
'''
Created on 2009-12-2
@author: wudepeng
'''
import math
from net.MeleeSite import pushMessage
from component.Component import Component
from util import dbaccess
from util.DataLoader import loader
class CharacterLevelComponent(Component):
'''
level component for character
'''
def __init__(self, owner, level = 10, exp = 0):
'''
Constructor
'''
Component.__init__(self, owner)
self._level = level #玩家等级
self._exp = exp #玩家经验
self._twiceExp = 0#双倍经验值几率
self.MAXLEVEL = 40#满级ֵ
def getMaxExp(self):
'''计算当前级别的最大经验值'''
y = int(1 + math.pow((self._level - 60) / 10, 2))
if(y < 1):
y = 1
maxExp = 100 + 60 * (self._level - 1) + 10 * self._level * (self._level + 1) * (self._level - 1)
return int(maxExp)
def getLevel(self):
return self._level
def setLevel(self, level):
self._level = level
def getExp(self):
return self._exp
def setExp(self, exp):
self._exp = exp
def getTwiceExp(self):
return self._twiceExp
def setTwiceExp(self, twiceExp):
self._twiceExp = twiceExp
def updateLevel(self):
'''根据经验值更新等级'''
if self._level >= self.MAXLEVEL:
dbaccess.updatePlayerInfo(self._owner.baseInfo.id, {'exp' : 0})
self._exp = 0
return False
id = self._owner.baseInfo.id
sparePoint = self._owner.attribute.getSparePoint()
baseStr = self._owner.attribute.getBaseStr()
baseVit = self._owner.attribute.getBaseVit()
baseDex = self._owner.attribute.getBaseDex()
professionId = self._owner.profession.getProfession()
maxExp = self.getMaxExp()
if(self._exp >= maxExp):
self._level += 1
self._exp -= maxExp
sparePoint += 1
profession = loader.getById('profession', professionId, '*')
baseStr += profession["perLevelStr"]
baseVit += profession["perLevelVit"]
baseDex += profession["perLevelDex"]
maxHp = self._owner.attribute.getMaxHp(professionId, id, self._level)
maxMp = self._owner.attribute.getMaxMp(professionId, id, self._level)
dbaccess.updatePlayerInfo(id, {'level' : self._level, 'exp' : self._exp,
'sparepoint' : sparePoint, 'baseStr' : baseStr,
'baseDex' : baseDex, 'baseVit' : baseVit,
'hp' : maxHp, 'mp' : maxMp})
self._owner.attribute.setSparePoint(sparePoint)
self._owner.attribute.setBaseStr(baseStr)
self._owner.attribute.setBaseVit(baseVit)
self._owner.attribute.setBaseDex(baseDex)
pushMessage(str(self._owner.baseInfo.id), 'updataLevel')
return True
else:
return False
| [
"shadowinlife@gmail.com"
] | shadowinlife@gmail.com |
8dd5effacf8a5cd73146dd427ec175f35bcd0883 | bd1e283b66ea5c89d40a048e48a0f175c0062348 | /railroad/modes/movetrackobjectmode.py | 66bb5f8e80d479eb0198ec8f3244e9d52f1ed558 | [] | no_license | boberstarosta/railroad | 76e8de50415bc27e8cd38ab9e5435fdb5fd8a437 | 3c5c1ff1651d663f2374bdb1c95e86ebf2b7a21b | refs/heads/master | 2020-03-30T05:12:30.627984 | 2018-10-06T15:33:08 | 2018-10-06T15:33:08 | 150,786,785 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,538 | py |
import pyglet
from .. import geometry
from .basemode import BaseMode
class MoveTrackObjectMode(BaseMode):
name = "Move track object"
def __init__(self, app):
super().__init__(app)
self.active_to = None
def apply_position(self, mouse, to):
nearest_segment = self.app.network.get_nearest_track_segment(mouse, max_distance=self.search_radius)
if nearest_segment is not None:
nearest_t = geometry.nearest_t_on_line(
mouse,
nearest_segment.nodes[0].position,
nearest_segment.nodes[1].position,
)
if 0 <= nearest_t < 1:
to_class = type(self.active_to)
rotated = self.active_to.rotated
self.active_to.delete()
self.active_to = to_class(self.app.network, nearest_segment, nearest_t)
self.active_to.rotated = rotated
def on_mouse_drag(self, x, y, dx, dy, buttons, modifiers):
mouse = self.app.camera.to_world(x, y)
if buttons & pyglet.window.mouse.LEFT:
if self.active_to is not None:
self.apply_position(mouse, self.active_to)
def on_mouse_press(self, x, y, buttons, modifiers):
if buttons & pyglet.window.mouse.LEFT:
mouse = self.app.camera.to_world(x, y)
self.active_to = self.app.network.get_nearest_track_object(mouse, max_distance=self.search_radius)
def on_mouse_release(self, x, y, buttons, modifiers):
self.active_to = None
| [
"boberstarosta@gmail.com"
] | boberstarosta@gmail.com |
d98c762be03d1b8666775fff3909b4bf759ac3f0 | 9fb9eedff043bf706e34df741f91e91fbe7eadb5 | /sandbox.py | e36a68b9dd85dfddf0b018b47670eba4fda62b41 | [] | no_license | zhengyuyu/sandbox | 2bbde89b88cab2910a0eeb9c661d9bfa7cd40db6 | a10ccedb95eb8341f894a1e90a49adf008d3dbef | refs/heads/master | 2021-01-18T14:10:20.943286 | 2013-09-17T21:26:32 | 2013-09-17T21:26:32 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,613 | py | import signal
import __builtin__
class SandboxError(Exception):
pass
class TimeoutError(SandboxError):
pass
def make_secure():
""" Clear the unsafe builtins. Reference: https://isisblogs.poly.edu/2012/10/26/escaping-python-sandboxes/ """
UNSAFE = ['open',
'file',
'execfile',
'compile',
'reload',
'__import__',
'eval',
'input',
'__debug__',
'raw_input',
'intern',
'quit',
'BaseException',
'SystemExit']
for func in UNSAFE:
try:
del __builtin__.__dict__[func]
except KeyError:
continue
def checkType(value):
"""Ensure all arguments passed in belongs to the safe types"""
safeType = (bool, int, long, float)
if not isinstance(value, safeType):
raise SandboxError(value + " is not a safe type!")
def add(a,b):
"""Safe addition function. before operation is executed, checks inputs
format"""
checkType(a)
checkType(b)
number = (int, long, float)
if isinstance(a, number) and isinstance(b,number):
try:
c = a + b
except Exception, e:
raise SandboxError(str(e))
else:
raise SandboxError("Can not add bool values!")
return c
def sub(a,b):
"""safe subtraction function. before operation is executed, checks
inputs format"""
checkType(a)
checkType(b)
number = (int, long, float)
if isinstance(a, number) and isinstance(b,number):
try:
c = a - b
except Exception, e:
raise SandboxError(str(e))
else:
raise SandboxError("Can not subtract bool values!")
return c
def mul(a,b):
"""safe multiplication function. before operation is executed, checks
inputs format"""
checkType(a)
checkType(b)
number = (int, long, float)
if isinstance(a, number) and isinstance(b,number):
try:
c = a * b
except Exception, e:
raise SandboxError(str(e))
else:
raise SandboxError("Can not multiply bool values!")
return c
def div(a,b):
"""safe division function. before operation is executed, checks
inputs format"""
checkType(a)
checkType(b)
number = (int, long, float)
if isinstance(a, number) and isinstance(b,number):
try:
c = a / b
except Exception, e:
raise SandboxError(str(e))
else:
raise SandboxError("Can not multiply bool values!")
return c
def fibonacci(n):
"""Calculates the nth fibonacci number"""
checkType(n)
if not isinstance(n, int):
raise SandboxError("Only integer has fibonacci!")
elif n < 0:
raise SandboxError("Index of fibonacci must be positive!")
else:
if n <= 1:
return 1
else:
return add(fibonacci(n-1), fibonacci(n-2))
def counter(n):
"""Counts and proints all numbers from n to 1"""
checkType(n)
if not isinstance(n, int):
raise SandboxError("Only integer has fibonacci!")
elif n < 0:
raise SandboxError("Index of fibonacci must be positive!")
else:
while n >= 1:
print n
sub(n,1)
return 1
def factorial(n):
"""calculates factoral of n."""
checkType(n)
if not isinstance(n, int):
raise SandboxError("Only integer has factoral!")
elif n < 0:
raise SandboxError("Number must be positive!")
elif( n <= 1 ):
return 1
else:
return( n * factorial(n-1) )
def gcd(m,n):
"""This function calculates the Greatest common divisor"""
checkType(n)
checkType(m)
if not isinstance(n, int) or not isinstance (m, int):
raise SandboxError("Only integer has gcd!")
elif n < 1 or m < 1:
raise SandboxError("Numbers must be positive!")
while n > 0:
temp = div(m,n)
rem = sub(m, mul(temp, n))
m = n
n = rem
return m
class sandbox(object):
def __init__(self, seconds):
self.sig = signal.SIGALRM
make_secure()
self.seconds = seconds
def safe(self):
make_secure()
def _handle_timeout(self,signum,frame):
raise TimeoutError("Time is up!")
def run(self, func, *args, **kws):
"""A container to run the code."""
for arg in args:
checkType(arg)
for kw in kws:
checkType(kw)
signal.signal(self.sig, self._handle_timeout)
signal.alarm(self.seconds)
try:
output = func(*args, **kws)
except Exception,e:
raise SandboxError(str(e))
finally:
signal.alarm(0)
return output
| [
"nuistzyy@gmail.com"
] | nuistzyy@gmail.com |
40b76af72aec1dc5a3e6b91f4e5e44e96c66ae8e | adf60ace632d9958d2c555a4dadc6b9a4c872532 | /mrf/celestial.py | aeac80d61add50f5ed43f21428b81806051caba4 | [
"MIT"
] | permissive | johnnygreco/mrf | 5d515dda630ab4ed2367bfec20434a0c07149169 | e7d5d30c03b9b273fb3537da591acf9c19030eeb | refs/heads/master | 2020-07-04T04:03:20.511519 | 2019-08-13T04:01:15 | 2019-08-13T04:01:15 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 21,127 | py | import os
import copy
import scipy
import numpy as np
import matplotlib.pyplot as plt
from astropy import wcs
from astropy.io import fits
from astropy.table import Table, Column
import astropy.units as u
from astropy.coordinates import SkyCoord
from .display import display_single, SEG_CMAP
from .utils import img_cutout
from .imtools import imshift, imdelete, magnify, blkavg
class Celestial(object):
'''
Class for `Celestial` object.
'''
def __init__(self, img, mask=None, header=None, dataset='Dragonfly'):
'''Initialize `Celestial` object'''
self.header = header
self.wcs = wcs.WCS(header)
try:
self.pixel_scale = abs(header['CD1_1'] * 3600)
except:
self.pixel_scale = abs(header['PC1_1'] * 3600)
self.shape = img.shape # in ndarray format
self.dataset = dataset
self._image = img
if mask is not None:
self._mask = mask
# Sky position
ny, nx = img.shape
self.ny = ny
self.nx = nx
self.ra_cen, self.dec_cen = list(map(float, self.wcs.wcs_pix2world(ny // 2, nx // 2, 0)))
# This follows lower-left, lower-right, upper-right, upper-left.
self.ra_bounds, self.dec_bounds = self.wcs.wcs_pix2world([0, img.shape[1], img.shape[1], 0],
[0, 0, img.shape[0], img.shape[0]], 0)
self.sky_bounds = np.append(self.ra_bounds[2:], self.dec_bounds[1:3])
self.scale_bar_length = 5 # initial length for scale bar when displaying
@property
def image(self):
return self._image
@image.setter
def image(self, img_array):
self._image = img_array
@property
def mask(self):
return self._mask
@mask.setter
def mask(self, mask_array):
self._mask = mask_array
@property
def hscmask(self):
return self._hscmask
@hscmask.setter
def hscmask(self, mask_array):
self._hscmask = mask_array
@property
def variance(self):
return self._variance
@variance.setter
def variance(self, variance_array):
self._variance = variance_array
# Save 2-D numpy array to `fits`
def save_to_fits(self, fits_file_name, data='image', overwrite=True):
"""Save numpy 2-D arrays to `fits` file. (from `kungpao`)
Parameters:
data (str): can be 'image' or 'mask'
fits_file_name (str): File name of `fits` file
overwrite (bool): Default is True
Returns:
None
"""
if data == 'image':
data_use = self.image
elif data == 'mask':
data_use = self.mask
else:
raise ValueError('Data can only be "image" or "mask".')
img_hdu = fits.PrimaryHDU(data_use)
if self.header is not None:
img_hdu.header = self.header
if self.wcs is not None:
wcs_header = self.wcs.to_header()
import fnmatch
for i in wcs_header:
if i in self.header:
self.header[i] = wcs_header[i]
if fnmatch.fnmatch(i, 'PC?_?'):
self.header['CD' + i.lstrip("PC")] = wcs_header[i]
img_hdu.header = self.header
elif self.wcs is not None:
wcs_header = self.wcs.to_header()
img_hdu.header = wcs_header
else:
img_hdu = fits.PrimaryHDU(data_use)
if os.path.islink(fits_file_name):
os.unlink(fits_file_name)
img_hdu.writeto(fits_file_name, overwrite=overwrite)
return img_hdu
# Shift image/mask
def shift_image(self, dx, dy, method='iraf', order=5, cval=0.0):
'''Shift the image of Celestial object. The WCS of image will also be changed.
Parameters:
dx, dy (float): shift distance (in pixel) along x (horizontal) and y (vertical).
Note that elements in one row has the same y but different x.
Example: dx = 2 is to shift the image "RIGHT", dy = 3 is to shift the image "UP".
method (str): interpolation method. Use 'lanczos' or 'iraf'.
If using 'iraf', default interpolation is 'poly3.
order (int): the order of Lanczos interpolation (>0).
cval (scalar): value to fill the edges. Default is NaN.
Returns:
shift_image: ndarray.
'''
ny, nx = self.image.shape
if abs(dx) > nx or abs(ny) > ny:
raise ValueError('# Shift distance is beyond the image size.')
if method == 'lanczos':
try: # try to import galsim
from galsim import degrees, Angle
from galsim.interpolant import Lanczos
from galsim import Image, InterpolatedImage
from galsim.fitswcs import AstropyWCS
except:
raise ImportError('# Import `galsim` failed! Please check if `galsim` is installed!')
# Begin shift
assert (order > 0) and isinstance(order, int), 'order of ' + method + ' must be positive interger.'
galimg = InterpolatedImage(Image(self.image, dtype=float),
scale=self.pixel_scale, x_interpolant=Lanczos(order))
galimg = galimg.shift(dx=dx * self.pixel_scale, dy=dy * self.pixel_scale)
result = galimg.drawImage(scale=self.pixel_scale, nx=nx, ny=ny)#, wcs=AstropyWCS(self.wcs))
self._image = result.array
# Change the WCS of image
hdr = copy.deepcopy(self.header)
hdr['CRPIX1'] += dx
hdr['CRPIX2'] += dy
self.header = hdr
self.wcs = wcs.WCS(hdr)
self._wcs_header_merge()
return result.array
elif method == 'iraf':
self.save_to_fits('./_temp.fits', 'image')
imshift('./_temp.fits', './_shift_temp.fits', dx, dy, interp_type='poly3', boundary_type='constant')
hdu = fits.open('./_shift_temp.fits')
self.image = hdu[0].data
self.shape = hdu[0].data.shape
self.header = hdu[0].header
self.wcs = wcs.WCS(self.header)
hdu.close()
imdelete('./*temp.fits')
else:
raise ValueError("# Not supported interpolation method. Use 'lanczos' or 'iraf'.")
def shift_mask(self, dx, dy, method='iraf', order=5, cval=0.0):
'''Shift the mask of Celestial object.
Parameters:
dx, dy (float): shift distance (in pixel) along x (horizontal) and y (vertical).
Note that elements in one row has the same y but different x.
Example: dx = 2 is to shift the image "RIGHT", dy = 3 is to shift the image "UP".
method (str): interpolation method. Use 'lanczos' or 'spline' or 'iraf'
order (int): the order of spline interpolation (within 0-5) or Lanczos interpolation (>0).
cval (scalar): value to fill the edges. Default is NaN.
Returns:
shift_mask: ndarray.
'''
ny, nx = self.mask.shape
if abs(dx) > nx or abs(ny) > ny:
raise ValueError('# Shift distance is beyond the image size.')
if method == 'lanczos':
try: # try to import galsim
from galsim import degrees, Angle
from galsim.interpolant import Lanczos
from galsim import Image, InterpolatedImage
from galsim.fitswcs import AstropyWCS
except:
raise ImportError('# Import `galsim` failed! Please check if `galsim` is installed!')
# Begin shift
assert (order > 0) and isinstance(order, int), 'order of ' + method + ' must be positive interger.'
galimg = InterpolatedImage(Image(self.mask, dtype=float),
scale=self.pixel_scale, x_interpolant=Lanczos(order))
galimg = galimg.shift(dx=dx * self.pixel_scale, dy=dy * self.pixel_scale)
result = galimg.drawImage(scale=self.pixel_scale, nx=nx, ny=ny)#, wcs=AstropyWCS(self.wcs))
self._mask = result.array
# Change the WCS of image
hdr = copy.deepcopy(self.header)
hdr['CRPIX1'] += dx
hdr['CRPIX2'] += dy
self.header = hdr
self.wcs = wcs.WCS(hdr)
self._wcs_header_merge()
return result.array
elif method == 'iraf':
self.save_to_fits('./_temp.fits', 'mask')
imshift('./_temp.fits', './_shift_temp.fits', dx, dy, interp_type='poly3', boundary_type='constant')
hdu = fits.open('./_shift_temp.fits')
self.mask = hdu[0].data
self.shape = hdu[0].data.shape
self.header = hdu[0].header
self.wcs = wcs.WCS(self.header)
hdu.close()
imdelete('./*temp.fits')
else:
raise ValueError("# Not supported interpolation method. Use 'lanczos' or 'iraf'.")
def shift_Celestial(self, dx, dy, method='iraf', order=5, cval=0.0):
'''Shift the Celestial object.
Parameters:
dx, dy (float): shift distance (in pixel) along x (horizontal) and y (vertical).
Note that elements in one row has the same y but different x.
Example: dx = 2 is to shift the image "RIGHT", dy = 3 is to shift the image "UP".
method (str): interpolation method. Use 'lanczos' or 'spline'.
order (int): the order of spline interpolation (within 0-5) or Lanczos interpolation (>0).
cval (scalar): value to fill the edges. Default is NaN.
Returns:
'''
self.shift_image(dx, dy, method=method, order=order, cval=cval)
if hasattr(self, 'mask'):
self.shift_mask(dx, dy, method=method, order=order, cval=cval)
def resize_image(self, f, method='iraf', order=5, cval=0.0):
'''Zoom/Resize the image of Celestial object.
f > 1 means the image will be resampled (finer)! f < 1 means the image will be degraded.
Parameters:
f (float): the positive factor of zoom. If 0 < f < 1, the image will be resized to smaller one.
method (str): interpolation method. Use 'lanczos' or 'spline' or 'iraf'.
order (int): the order Lanczos interpolation (>0).
cval (scalar): value to fill the edges. Default is NaN.
Returns:
shift_image: ndarray.
'''
if method == 'lanczos':
try: # try to import galsim
from galsim import degrees, Angle
from galsim.interpolant import Lanczos
from galsim import Image, InterpolatedImage
from galsim.fitswcs import AstropyWCS
except:
raise ImportError('# Import `galsim` failed! Please check if `galsim` is installed!')
assert (order > 0) and isinstance(order, int), 'order of ' + method + ' must be positive interger.'
galimg = InterpolatedImage(Image(self.image, dtype=float),
scale=self.pixel_scale, x_interpolant=Lanczos(order))
#galimg = galimg.magnify(f)
ny, nx = self.image.shape
result = galimg.drawImage(scale=self.pixel_scale / f, nx=round(nx * f), ny=round(ny * f))#, wcs=AstropyWCS(self.wcs))
self.wcs = self._resize_wcs(self.image, self.wcs, f)
self._image = result.array
self.shape = self.image.shape
self._wcs_header_merge()
self.pixel_scale /= f
return result.array
elif method == 'iraf':
self.save_to_fits('./_temp.fits', 'image')
if f > 1:
magnify('./_temp.fits', './_resize_temp.fits', f, f)
else:
blkavg('./_temp.fits', './_resize_temp.fits', 1/f, 1/f, option='sum')
hdu = fits.open('./_resize_temp.fits')
self.image = hdu[0].data
self.shape = hdu[0].data.shape
self.header = hdu[0].header
self.wcs = wcs.WCS(self.header)
self.pixel_scale /= f
hdu.close()
imdelete('./*temp.fits')
else:
raise ValueError("# Not supported interpolation method. Use 'lanczos' or 'iraf'.")
def resize_mask(self, f, method='iraf', order=5, cval=0.0):
'''Zoom/Resize the mask of Celestial object.
f > 1 means the mask will be resampled (finer)! f < 1 means the mask will be degraded.
Parameters:
f (float): the positive factor of zoom. If 0 < f < 1, the mask will be resized to smaller one.
method (str): interpolation method. Use 'lanczos' or 'spline' or 'iraf'.
order (int): the order Lanczos interpolation (>0).
cval (scalar): value to fill the edges. Default is NaN.
Returns:
shift_image: ndarray.
'''
if method == 'lanczos':
try: # try to import galsim
from galsim import degrees, Angle
from galsim.interpolant import Lanczos
from galsim import Image, InterpolatedImage
from galsim.fitswcs import AstropyWCS
except:
raise ImportError('# Import `galsim` failed! Please check if `galsim` is installed!')
assert (order > 0) and isinstance(order, int), 'order of ' + method + ' must be positive interger.'
galimg = InterpolatedImage(Image(self.mask, dtype=float),
scale=self.pixel_scale, x_interpolant=Lanczos(order))
ny, nx = self.mask.shape
result = galimg.drawImage(scale=self.pixel_scale / f, nx=round(nx * f), ny=round(ny * f))#, wcs=AstropyWCS(self.wcs))
self.wcs = self._resize_wcs(self.image, self.wcs, f)
self._image = result.array
self.shape = self.mask.shape
self._wcs_header_merge()
self.pixel_scale /= f
return result.array
elif method == 'iraf':
self.save_to_fits('./_temp.fits', 'mask')
if f > 1:
magnify('./_temp.fits', './_resize_temp.fits', f, f)
else:
blkavg('./_temp.fits', './_resize_temp.fits', 1/f, 1/f, option='sum')
hdu = fits.open('./_resize_temp.fits')
self.mask = hdu[0].data
self.shape = hdu[0].data.shape
self.header = hdu[0].header
self.wcs = wcs.WCS(self.header)
self.pixel_scale /= f
hdu.close()
imdelete('./*temp.fits')
else:
raise ValueError("# Not supported interpolation method. Use 'lanczos' or 'iraf'.")
def resize_Celestial(self, f, method='iraf', order=5, cval=0.0):
'''Resize the Celestial object. f > 1 means the image will be resampled! f < 1 means the image will be degraded.
Parameters:
angle (float): rotation angle in degress, counterclockwise.
order (int): the order of spline interpolation, can be in the range 0-5.
reshape (bool): if True, the output shape is adapted so that the rorated image
is contained completely in the output array.
cval (scalar): value to fill the edges. Default is NaN.
Returns:
'''
self.resize_image(f, method=method, order=order, cval=cval)
if hasattr(self, 'mask'):
self.resize_mask(f, method=method, order=order, cval=cval)
# Display image/mask
def display_image(self, **kwargs):
display_single(self.image, scale_bar_length=self.scale_bar_length, **kwargs)
def display_mask(self, **kwargs):
display_single(self.mask, scale='linear',
cmap=SEG_CMAP, scale_bar_length=self.scale_bar_length, **kwargs)
def display_Celestial(self, **kwargs):
if hasattr(self, 'mask'):
display_single(self.image * (~self.mask.astype(bool)),
scale_bar_length=self.scale_bar_length, **kwargs)
else:
self.display_image()
class Star(Celestial):
def __init__(self, img, header, starobj, halosize=40, padsize=40, mask=None, hscmask=None):
"""Halosize is the radius!!!
RA, DEC are not supported yet!
"""
Celestial.__init__(self, img, mask, header=header)
if hscmask is not None:
self.hscmask = hscmask
self.name = 'star'
self.scale_bar_length = 3
# Trim the image to star size
# starobj should at least contain x, y, (or ra, dec) and
# Position of a star, in numpy convention
x_int = int(starobj['x'])
y_int = int(starobj['y'])
dx = -1.0 * (starobj['x'] - x_int)
dy = -1.0 * (starobj['y'] - y_int)
halosize = int(halosize)
# Make padded image to deal with stars near the edges
padsize = int(padsize)
ny, nx = self.image.shape
im_padded = np.zeros((ny + 2 * padsize, nx + 2 * padsize))
im_padded[padsize: ny + padsize, padsize: nx + padsize] = self.image
# Star itself, but no shift here.
halo = im_padded[y_int + padsize - halosize: y_int + padsize + halosize + 1,
x_int + padsize - halosize: x_int + padsize + halosize + 1]
self._image = halo
self.shape = halo.shape
self.cen_xy = [x_int, y_int]
self.dx = dx
self.dy = dy
# FLux
self.flux = starobj['flux']
self.fluxann = starobj['flux_ann']
if hasattr(self, 'mask'):
im_padded = np.zeros((ny + 2 * padsize, nx + 2 * padsize))
im_padded[padsize: ny + padsize, padsize: nx + padsize] = self.mask
# Mask itself, but no shift here.
halo = (im_padded[y_int + padsize - halosize: y_int + padsize + halosize + 1,
x_int + padsize - halosize: x_int + padsize + halosize + 1])
self._mask = halo
if hasattr(self, 'hscmask'):
im_padded = np.zeros((ny + 2 * padsize, nx + 2 * padsize))
im_padded[padsize: ny + padsize, padsize: nx + padsize] = self.hscmask
# Mask itself, but no shift here.
halo = (im_padded[y_int + padsize - halosize: y_int + padsize + halosize + 1,
x_int + padsize - halosize: x_int + padsize + halosize + 1])
self.hscmask = halo
def centralize(self, method='iraf', order=5, cval=0.0):
self.shift_Celestial(self.dx, self.dy, method=method, order=order, cval=cval)
def sub_bkg(self, verbose=True):
# Here I subtract local sky background
# Evaluate local sky backgroud within `halo_i`
# Actually this should be estimated in larger cutuouts.
# So make another cutout (larger)!
from astropy.convolution import convolve, Box2DKernel
from .image import extract_obj, seg_remove_cen_obj
from sep import Background
img_blur = convolve(abs(self.image), Box2DKernel(2))
img_objects, img_segmap = extract_obj(abs(img_blur), b=5, f=4, sigma=4.5, minarea=2, pixel_scale=self.pixel_scale,
deblend_nthresh=32, deblend_cont=0.0001,
sky_subtract=False, show_fig=False, verbose=False)
bk = Background(self.image, img_segmap != 0)
glbbck = bk.globalback
self.globalback = glbbck
if verbose:
print('# Global background: ', glbbck)
self.image -= glbbck
def get_masked_image(self, cval=np.nan):
if not hasattr(self, 'mask'):
print("This `Star` object doesn't have a `mask`!")
return self.image
else:
imgcp = copy.copy(self.image)
imgcp[self.mask.astype(bool)] = cval
return imgcp
def mask_out_contam(self, blowup=True, show_fig=True, verbose=True):
from astropy.convolution import convolve, Box2DKernel
from .utils import extract_obj, seg_remove_cen_obj
img_blur = convolve(abs(self.image), Box2DKernel(2))
img_objects, img_segmap = extract_obj(abs(img_blur), b=5, f=4, sigma=4.5, minarea=2, pixel_scale=self.pixel_scale,
deblend_nthresh=32, deblend_cont=0.0005,
sky_subtract=False, show_fig=show_fig, verbose=verbose)
# remove central object from segmap
img_segmap = seg_remove_cen_obj(img_segmap)
detect_mask = (img_segmap != 0).astype(float)
if blowup is True:
from astropy.convolution import convolve, Gaussian2DKernel
cv = convolve(detect_mask, Gaussian2DKernel(1.5))
detect_mask = (cv > 0.1).astype(float)
self.mask = detect_mask
return | [
"jiaxuan_li@pku.edu.cn"
] | jiaxuan_li@pku.edu.cn |
0ca4959de6189a20037715a3f49bfe953201051a | 214e9b526572a6a47e2ebbe8e84e940515a5ab1e | /wikipedia_random.py | 953900997fe5019e55130666ac2073bd72f9e392 | [
"Unlicense"
] | permissive | GabinCleaver/Wikipedia_Random_Article | 920dcd8ebb432fbb0a84a43c309d4bda09de2ed8 | c767a86a5ae9f2a8c6f4c0bcf77ff024976ce541 | refs/heads/main | 2023-05-10T16:02:04.193490 | 2021-06-05T17:08:43 | 2021-06-05T17:08:43 | 374,166,152 | 4 | 0 | null | null | null | null | UTF-8 | Python | false | false | 610 | py | from bs4 import BeautifulSoup
import requests
from colorama import Fore, init
init()
try:
res = requests.get("https://en.wikipedia.org/wiki/Special:Random")
res.raise_for_status()
wiki = BeautifulSoup(res.text, "html.parser")
r = open("random_wiki.txt", "w+", encoding='utf-8')
heading = wiki.find("h1").text
r.write(heading + "\n")
for i in wiki.select("p"):
r.write(i.getText())
r.close()
print(Fore.GREEN + "Fichier enregistré sous le nom de random_wiki.txt")
except:
print(Fore.RED + "Erreur dans la création du fichier.")
| [
"noreply@github.com"
] | GabinCleaver.noreply@github.com |
9d24b9db84aebb68970665b9b994d650ffa461e9 | 0544ff7fc0736d2f6dded68e5ae27f4e2ddf7a06 | /sdk/synapse/azure-mgmt-synapse/azure/mgmt/synapse/aio/operations/_sql_pool_blob_auditing_policies_operations.py | 9108426818f5f91c6798e42456dd8b7b978c8b93 | [
"MIT",
"LGPL-2.1-or-later",
"LicenseRef-scancode-generic-cla"
] | permissive | dhanizael/azure-sdk-for-python | 160469181f80593b0dff9104a8eb0d5b781722a9 | 17a1d048a8be4c869c7b943b2aff5a0c8213652e | refs/heads/master | 2023-03-04T14:47:54.334635 | 2021-02-10T20:16:40 | 2021-02-10T20:16:40 | 337,845,515 | 0 | 0 | MIT | 2021-02-10T20:37:41 | 2021-02-10T20:30:09 | null | UTF-8 | Python | false | false | 13,482 | py | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar, Union
import warnings
from azure.core.async_paging import AsyncItemPaged, AsyncList
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
from azure.mgmt.core.exceptions import ARMErrorFormat
from ... import models as _models
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
class SqlPoolBlobAuditingPoliciesOperations:
"""SqlPoolBlobAuditingPoliciesOperations async operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.synapse.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer) -> None:
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
async def get(
self,
resource_group_name: str,
workspace_name: str,
sql_pool_name: str,
**kwargs
) -> "_models.SqlPoolBlobAuditingPolicy":
"""Get a SQL pool's blob auditing policy.
Get a SQL pool's blob auditing policy.
:param resource_group_name: The name of the resource group. The name is case insensitive.
:type resource_group_name: str
:param workspace_name: The name of the workspace.
:type workspace_name: str
:param sql_pool_name: SQL pool name.
:type sql_pool_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: SqlPoolBlobAuditingPolicy, or the result of cls(response)
:rtype: ~azure.mgmt.synapse.models.SqlPoolBlobAuditingPolicy
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.SqlPoolBlobAuditingPolicy"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-06-01-preview"
blob_auditing_policy_name = "default"
accept = "application/json"
# Construct URL
url = self.get.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
'sqlPoolName': self._serialize.url("sql_pool_name", sql_pool_name, 'str'),
'blobAuditingPolicyName': self._serialize.url("blob_auditing_policy_name", blob_auditing_policy_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('SqlPoolBlobAuditingPolicy', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Synapse/workspaces/{workspaceName}/sqlPools/{sqlPoolName}/auditingSettings/{blobAuditingPolicyName}'} # type: ignore
async def create_or_update(
self,
resource_group_name: str,
workspace_name: str,
sql_pool_name: str,
parameters: "_models.SqlPoolBlobAuditingPolicy",
**kwargs
) -> "_models.SqlPoolBlobAuditingPolicy":
"""Creates or updates a SQL pool's blob auditing policy.
Creates or updates a SQL pool's blob auditing policy.
:param resource_group_name: The name of the resource group. The name is case insensitive.
:type resource_group_name: str
:param workspace_name: The name of the workspace.
:type workspace_name: str
:param sql_pool_name: SQL pool name.
:type sql_pool_name: str
:param parameters: The database blob auditing policy.
:type parameters: ~azure.mgmt.synapse.models.SqlPoolBlobAuditingPolicy
:keyword callable cls: A custom type or function that will be passed the direct response
:return: SqlPoolBlobAuditingPolicy, or the result of cls(response)
:rtype: ~azure.mgmt.synapse.models.SqlPoolBlobAuditingPolicy
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.SqlPoolBlobAuditingPolicy"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-06-01-preview"
blob_auditing_policy_name = "default"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self.create_or_update.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
'sqlPoolName': self._serialize.url("sql_pool_name", sql_pool_name, 'str'),
'blobAuditingPolicyName': self._serialize.url("blob_auditing_policy_name", blob_auditing_policy_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(parameters, 'SqlPoolBlobAuditingPolicy')
body_content_kwargs['content'] = body_content
request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 201]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if response.status_code == 200:
deserialized = self._deserialize('SqlPoolBlobAuditingPolicy', pipeline_response)
if response.status_code == 201:
deserialized = self._deserialize('SqlPoolBlobAuditingPolicy', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Synapse/workspaces/{workspaceName}/sqlPools/{sqlPoolName}/auditingSettings/{blobAuditingPolicyName}'} # type: ignore
def list_by_sql_pool(
self,
resource_group_name: str,
workspace_name: str,
sql_pool_name: str,
**kwargs
) -> AsyncIterable["_models.SqlPoolBlobAuditingPolicyListResult"]:
"""Lists auditing settings of a Sql pool.
:param resource_group_name: The name of the resource group. The name is case insensitive.
:type resource_group_name: str
:param workspace_name: The name of the workspace.
:type workspace_name: str
:param sql_pool_name: SQL pool name.
:type sql_pool_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either SqlPoolBlobAuditingPolicyListResult or the result of cls(response)
:rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.synapse.models.SqlPoolBlobAuditingPolicyListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.SqlPoolBlobAuditingPolicyListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-06-01-preview"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list_by_sql_pool.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
'sqlPoolName': self._serialize.url("sql_pool_name", sql_pool_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize('SqlPoolBlobAuditingPolicyListResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
list_by_sql_pool.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Synapse/workspaces/{workspaceName}/sqlPools/{sqlPoolName}/auditingSettings'} # type: ignore
| [
"noreply@github.com"
] | dhanizael.noreply@github.com |
a269b1d07daab4d38f2a6fa322b72ded7dcdeb1c | 3ec7e13a28fe72d5a4c676ffe0738d8ae4a6aa51 | /day7/day7.py | 2a0161eaaeb3e97e92a4c1a0f0d6140b4d5d3787 | [] | no_license | tibdhond/advent-of-code-2019 | 2a23d63f635e3406e1de6d6c2d68b544f11f57c5 | 2523846e8689c7083e767712cb8b3dfe659dc9d4 | refs/heads/master | 2020-09-24T06:15:47.420059 | 2019-12-17T11:18:55 | 2019-12-17T11:18:55 | 225,685,218 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,724 | py | from itertools import permutations
import threading
stdin = []
thread_turn = 0
def add(array, index, params):
value1 = array[int(array[index+1])] if params[-1] == "0" else array[index+1]
value2 = array[int(array[index+2])] if params[-2] == "0" else array[index+2]
array[int(array[index+3])] = str(int(value1) + int(value2))
return index + 4
def mul(array, index, params):
value1 = array[int(array[index+1])] if params[-1] == "0" else array[index+1]
value2 = array[int(array[index+2])] if params[-2] == "0" else array[index+2]
array[int(array[index+3])] = str(int(value1) * int(value2))
return index + 4
def inp(array, index, _):
global stdin, thread_turn
thread_turn = (thread_turn + 1) % 5
value = stdin.pop(0)
array[int(array[index+1])] = value
return index + 2
def outp(array, index, params):
global stdin
stdin.append(array[int(array[index+1])]) if params[-1] == "0" else print(array[index+1])
# print("output: %s" % stdin)
return index + 2
def jump_if_true(array, index, params):
value1 = array[int(array[index+1])] if params[-1] == "0" else array[index+1]
value2 = array[int(array[index+2])] if params[-2] == "0" else array[index+2]
if int(value1) != 0:
return int(value2)
else:
return index + 3
def jump_if_false(array, index, params):
value1 = array[int(array[index+1])] if params[-1] == "0" else array[index+1]
value2 = array[int(array[index+2])] if params[-2] == "0" else array[index+2]
if int(value1) == 0:
return int(value2)
else:
return index + 3
def less_than(array, index, params):
value1 = array[int(array[index+1])] if params[-1] == "0" else array[index+1]
value2 = array[int(array[index+2])] if params[-2] == "0" else array[index+2]
if int(value1) < int(value2):
array[int(array[index+3])] = 1
else:
array[int(array[index+3])] = 0
return index + 4
def equals(array, index, params):
value1 = array[int(array[index+1])] if params[-1] == "0" else array[index+1]
value2 = array[int(array[index+2])] if params[-2] == "0" else array[index+2]
if int(value1) == int(value2):
array[int(array[index+3])] = 1
else:
array[int(array[index+3])] = 0
return index + 4
def execute(fmap, id):
with open("input.txt") as f:
inp = f.read().split(",")
code = str(inp[0])
code = code.zfill(5)
index = 0
while code[-2:] != "99" and index < len(inp):
if code[-2:] == "03":
while thread_turn != id or len(stdin) == 0:
pass
# print("input in thread %d: %s" % (id, stdin))
index = fmap.get(code[-2:])(inp, index, code[:-2])
code = inp[index]
code = code.zfill(5)
return inp[0]
def main():
global stdin
fmap = {"01": add, "02": mul, "03": inp, "04": outp, "05": jump_if_true, "06": jump_if_false,
"07": less_than, "08": equals}
best = 0
all_phases = permutations("56789", 5)
all_phases = [x for x in all_phases]
print("Looping %d times!" % len(all_phases))
# all_phases = ["98765"]
for phases in all_phases:
threads = []
for phase in phases:
stdin.append(phase)
stdin.append(0)
for i, phase in enumerate(phases):
x = threading.Thread(target=execute, args=(fmap, i,))
threads.append(x)
x.start()
for thread in threads:
thread.join()
# print("main: %s" % stdin)
best = max(int(stdin[0]), best)
stdin = []
print("next loop")
print(best)
if __name__ == '__main__':
main()
| [
"Tibo.DHondt@UGent.be"
] | Tibo.DHondt@UGent.be |
de8d2940a82ec1a961cce3d822626965552a1b0e | ef3ecda07a9c7e0c380a18fb249e6c8232757f83 | /DjangoDir/PRDProject/PRDApp/forms.py | 1da07852d113c27b3211a4e48bac5d8f1542bac1 | [] | no_license | Harry050/DRF_practice | 1f8ae439b6b5fedea5e923bf93352186ff6cf13b | a67f17b2a140bbf16959b197ab4e081a2ec03e1f | refs/heads/main | 2023-06-16T10:35:56.169137 | 2021-07-14T02:54:16 | 2021-07-14T02:54:16 | 385,797,476 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 558 | py | from django.contrib.auth.forms import UserCreationForm
from django.contrib.auth.models import User
from django import forms
from .models import Process, Process_Thread, Process_Sub_Thread, UserProfile
from django.forms import ModelForm
class CreateUserForm(UserCreationForm):
class Meta:
model = User
fields = ['username', 'email','last_name', 'password1', 'password2']
class Process_Form(ModelForm):
class Meta:
model = UserProfile
#fields = '__all__'
fields = ['user','comment','user_process_sub_thread']
| [
"singh.harish050@gmail.com"
] | singh.harish050@gmail.com |
9681ef10bf650fe0a735453a0b8947a17d656494 | 61f543a87f506fceb1c0ffe8334fc46e23291751 | /lab-2.py | 623dde2bdb405d0e1fdee8d8248f3f9eb148a7a6 | [] | no_license | tkamag/python-reviewer-test | 2465939c7df4ab38d2a25638d63396c1d616296f | 206aae402b1ac8933e13288e41871d7a78fc944a | refs/heads/main | 2023-07-16T05:08:32.179284 | 2021-08-16T21:48:24 | 2021-08-16T21:48:24 | 396,701,066 | 0 | 0 | null | 2021-08-16T21:48:25 | 2021-08-16T08:40:50 | Python | UTF-8 | Python | false | false | 817 | py |
operations_count = 0
def main():
ask_again = True
while(ask_again):
a = input("Enter the numerator: ")
b = input("Enter the denominator: ")
result = perform_division(a,b)
print(result)
ask_again = input("Do you want to perform another operation? Enter yes or no: ")
if(ask_again == 'yes'):
ask_again = True
else:
ask_again = False
print("You performed " + str(operations_count) + " operations, bye!")
def perform_division(a,b):
try:
return int(a)/int(b)
except ZeroDivisionError as error:
print("Looks like you are trying to divide by zero! This is not possible.")
except Exception as error:
print("Invalid numbers.")
main() | [
"thierry.kamagne@gmail.com"
] | thierry.kamagne@gmail.com |
c9e4676b255b0e9278769a7fba0086e707c3e0d9 | f7cca57165872df8eb4a21904eb20b45cc742e92 | /sokubaikai/urls.py | 6f6e5529201930fca1ef9320f09ef865c81fa2b5 | [] | no_license | hirotb/sokubaikai_manage | e550bab6f21c9060aeebd672946c026f3b85bf23 | 0ef8b1c163f3bf26a5b0ed1c514166e7f885c12c | refs/heads/master | 2020-04-22T18:57:45.703024 | 2019-02-14T22:24:35 | 2019-02-14T22:24:35 | 170,593,404 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,508 | py | """sokubaikai_app URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/2.1/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: path('', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: path('', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.urls import include, path
2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))
"""
from django.urls import path
from . import views
app_name = 'sokubaikai'
urlpatterns = [
# path('', views.top(), name='home'),
# path('', views.top(), name='home'),
# path('post/<int:pk>/', views.BlogDetailView.as_view(), name='post_detail'),
# 以下のフォルダはアプリケーショーンに飛ぶ
# path('', views.BlogDetailView.as_view(), name='sokubaikai'),
path('', views.IndexView.as_view(), name='home'),
path('sokubaikai-list/', views.SokubaikaiListView.as_view(), name='list'),
# path('sokubaikai-detail/', views.SokubaikaiDetailView.as_view(), name='detail2'),
path('sokubaikai-detail/<int:pk>/', views.SokubaikaiDetailView.as_view(), name='detail'),
path('sokubaikai-detail/new/', views.SokubaikaiCreateView.as_view(), name='new'),
# サンプル
path('aaa/', views.MyView.as_view(), name='home2'),
]
| [
"a@example.com"
] | a@example.com |
005104a1ab267b5e9e5e794dae9db08901943e52 | ace682cba55e65ec1e07f4943d74e1d23b070b48 | /demo5.py | ce995ec9df09faef8fe527ce9c7ec4b744ea28bf | [
"BSD-3-Clause"
] | permissive | andy31lewis/brySVG | 122e4ecf52fc94cc5952a7cd05e6019ba875b812 | a504d95214418aa5c0138992bc1c5355dfa1cf28 | refs/heads/master | 2021-08-16T18:07:14.061378 | 2021-06-12T16:50:27 | 2021-06-12T16:50:27 | 140,744,896 | 11 | 1 | null | null | null | null | UTF-8 | Python | false | false | 2,520 | py | from browser import document
import brySVG.drawcanvas as SVG
def onButtonClick(event):
event.stopPropagation()
buttonid = event.currentTarget.id
for button in buttons.values(): button.setFillColour("pink")
buttons[buttonid].setFillColour("lime")
canvas.setTool(buttonid)
cursorname = buttonid if buttonid in ("select", "insertpoint", "deletepoint") else "draw"+buttonid
canvas.style.cursor = "url(cursors/{}.png), auto".format(cursorname)
def onDoubleClick(event):
for button in buttons.values(): button.setFillColour("pink")
buttons["select"].setFillColour("lime")
canvas.style.cursor = "auto"
canvas = SVG.CanvasObject("95vw", "100%", "cyan", objid="canvas")
document["demo5"] <= canvas
canvas.bind("dblclick", onDoubleClick)
icons = {
"polyline": SVG.PolylineObject([(-25,0), (0,-25), (12,25)], linewidth=3),
"polygon": SVG.PolygonObject([(-25,0), (0,-25), (12,25)], linewidth=3),
"rectangle": SVG.RectangleObject([(-50,-25), (50,25)], linewidth=5),
"ellipse": SVG.EllipseObject([(-50,-25), (50,25)], linewidth=5),
"sector": SVG.SectorObject((-25,0), 50, 60, 120, linewidth=3),
"bezier": SVG.BezierObject([(None,(-25,0),(0,-12)), ((0,-12),(0,-25),(25,-25)), ((25,0),(12,25),None)], linewidth=3),
"closedbezier": SVG.ClosedBezierObject([((-12,12),(-25,0),(0,-12)), ((0,-12),(0,-25),(25,-25)), ((25,0),(12,25),(-12,12))], linewidth=3),
"smoothbezier": SVG.SmoothBezierObject([(-25,0), (0,-25), (12,25)], linewidth=3),
"smoothclosedbezier": SVG.SmoothClosedBezierObject([(-25,0), (0,-25), (12,25)], linewidth=3),
"select": SVG.PolygonObject([(-20,-20), (20,-5), (5,0), (25,20), (20,25), (0,5), (-5,20)], linewidth=3, fillcolour="none"),
"insertpoint": SVG.GroupObject([SVG.BezierObject([(None,(0,25),(-60,-40)), ((60,-40),(0,25),None)], linewidth=3),SVG.LineObject([(-10,-5),(10,-5)], linewidth=3), SVG.LineObject([(0,-15),(0,5)], linewidth=3)]),
"deletepoint": SVG.GroupObject([SVG.BezierObject([(None,(0,25),(-60,-40)), ((60,-40),(0,25),None)], linewidth=3),SVG.LineObject([(-10,-5),(10,-5)], linewidth=3)])
}
icons["select"].style.strokeLinejoin = "round"
n = 9
iconsize = 50
buttons = {tool: SVG.ImageButton((10+(i//n)*(iconsize+10),5+(i%n)*(iconsize+10)), (iconsize, iconsize), icons[tool], onButtonClick, fillcolour="pink", canvas=canvas, objid=tool) for i, tool in enumerate(icons)}
for button in buttons.values():
canvas.addObject(button)
canvas.mouseMode = SVG.MouseMode.DRAW
[(x1, y1), (x2, y2)] = canvas.fitContents()
canvas.setViewBox([(0, y1), (x2-x1, y2)])
| [
"a.lewis@mathsanswers.org.uk"
] | a.lewis@mathsanswers.org.uk |
9aba9166ea828d0f0f34e028541acae11ebffba2 | e3376c04ecca6eaf0186f8a38eef245e03ddbe92 | /Modulo1/02_Segunda_Semana/Condicional1.py | a64afd04945e083015e229abfb35aab17db5c33a | [] | no_license | lamorenove/Ejercicios-Python | d0b31810ba5d0e6d4ab922b5e325ed76cc57e324 | ee26d1dd30db8c7e088a668e11dc635b779d933a | refs/heads/master | 2023-06-24T05:46:33.383620 | 2021-07-22T17:20:23 | 2021-07-22T17:20:23 | 385,313,864 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 717 | py | '''
CONDICIONAL SIMPLE
Para escribir progrmas útiles, casi simpre necesitamos la capacidad de comprobar ciertas condiciones y cambiar el comportamiento del programa.
Las sentencias concidionales nos dan esta capacidad. La forma más sencilla es la sentencia if
'''
'''
Algoritmo que pide un color, si se digita el color
rojo en minúscula, imprime en pantalla "Tiene buen gusto !",
si no, simplemente imprime "Le gusta el color" y el nombre
del color digitado:
Inicio
LEER color
Si color == "rojo"
Imprimir "Tiene Buen Gusto!"
Fin Si.
IMPRIMIR "Le gusta el color: ", color
Fin
'''
color = input("Digite el color: ")
if color == "rojo":
print("Tiene buen gusto")
print("Le gusta el color: ", color) | [
"lamorenove@gmail.com"
] | lamorenove@gmail.com |
6d0cba5ca8241ae0bba222073319191563f8268c | 5c31875cd922113a89fd2fde355c4dce03968249 | /framework/views.py | 095db861ec8970dd2efbce50f952865d57fa2bf3 | [] | no_license | AndreaBravi/red-crows | 1f75a5c2b047f60015bfd527c5f4cf11970d458a | c333d5083f9e7cd2dec701f103134971bfbfcdb2 | refs/heads/master | 2021-01-19T17:41:58.366116 | 2015-03-18T01:43:10 | 2015-03-18T01:43:10 | 31,160,931 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,449 | py | from django.shortcuts import get_object_or_404, render
from django.http import HttpResponseRedirect
from django.core.urlresolvers import reverse
from django.views import generic
from framework.models import Choice, Poll, Musician, Review, Reviewer, Music
from framework.forms import MusicianForm, ReviewerForm
class IndexView2(generic.ListView):
template_name = 'polls/index.html'
context_object_name = 'latest_poll_list'
def get_queryset(self):
"""Return the last five published polls."""
return Poll.objects.order_by('-pub_date')[:5]
class DetailView(generic.DetailView):
model = Poll
template_name = 'polls/detail.html'
class ResultsView(generic.DetailView):
model = Poll
template_name = 'polls/results.html'
def vote(request, poll_id):
p = get_object_or_404(Poll, pk=poll_id)
try:
selected_choice = p.choice_set.get(pk=request.POST['choice'])
except (KeyError, Choice.DoesNotExist):
# Redisplay the poll voting form.
return render(request, 'polls/detail.html', {
'poll': p,
'error_message': "You didn't select a choice.",
})
else:
selected_choice.votes += 1
selected_choice.save()
# Always return an HttpResponseRedirect after successfully dealing
# with POST data. This prevents data from being posted twice if a
# user hits the Back button.
return HttpResponseRedirect(reverse('polls:results', args=(p.id,)))
class IndexView(generic.base.TemplateView):
template_name = 'base/index.html'
class MusicianView(generic.DetailView):
model = Musician
template_name = 'base/musician.html'
class MusicView(generic.DetailView):
model = Music
template_name = 'base/music.html'
class ReviewerView(generic.DetailView):
model = Reviewer
template_name = 'base/reviewer.html'
class ReviewView(generic.DetailView):
model = Review
template_name = 'base/review.html'
# Create
class CreateMusicianView(generic.edit.CreateView):
form_class = MusicianForm
template_name = 'base/create/createmusician.html'
success_url = 'thanks/'
def post(self, request):
form = self.form_class(request.POST, request.FILES)
form = form.save(commit=False)
form.email = request.user.email
form.user = request.user
form.save()
return HttpResponseRedirect(self.success_url)
class CreateReviewerView(generic.edit.CreateView):
form_class = ReviewerForm
template_name = 'base/create/createreviewer.html'
success_url = 'thanks/'
def post(self, request):
form = self.form_class(request.POST, request.FILES)
form = form.save(commit=False)
form.user = request.user
form.email = request.user.email
form.save()
return HttpResponseRedirect(self.success_url)
class CreateMusicView(generic.edit.CreateView):
model = Music
template_name = 'base/create/createmusic.html'
fields = ['musician', 'product_type', 'title', 'description', 'product_picture']
success_url = 'thanks/'
# musician should be auto-populated
class CreateReviewView(generic.edit.CreateView):
model = Review
template_name = 'base/create/createreview.html'
fields = ['musician', 'reviewer', 'product', 'title', 'body', 'score']
success_url = 'thanks/'
# reviewer should be auto-populated
# Lists
class ListMusicianView(generic.ListView):
template_name = 'base/lists/listmusician.html'
context_object_name = 'musician_list'
def get_queryset(self):
return Musician.objects.order_by('-created')
class ListReviewerView(generic.ListView):
template_name = 'base/lists/listreviewer.html'
context_object_name = 'reviewer_list'
def get_queryset(self):
return Reviewer.objects.order_by('-created')
class ListReviewView(generic.ListView):
template_name = 'base/lists/listreview.html'
context_object_name = 'review_list'
def get_queryset(self):
return Review.objects.order_by('-created')
class ListMusicView(generic.ListView):
template_name = 'base/lists/listmusic.html'
context_object_name = 'music_list'
def get_queryset(self):
return Music.objects.order_by('-created')
# Thanks
class ThanksView(generic.base.TemplateView):
template_name = 'base/thanks.html'
| [
"abravi@apple.com"
] | abravi@apple.com |
1095ed5e0a63b1a5cd770baaadf5d0f94e2770b7 | 32cb0be487895629ad1184ea25e0076a43abba0a | /LifePictorial/top/api/rest/HanoiDataserviceWriteRequest.py | 4546e30919759891e21635e1140fc9b81459efb1 | [] | no_license | poorevil/LifePictorial | 6814e447ec93ee6c4d5b0f1737335601899a6a56 | b3cac4aa7bb5166608f4c56e5564b33249f5abef | refs/heads/master | 2021-01-25T08:48:21.918663 | 2014-03-19T08:55:47 | 2014-03-19T08:55:47 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 365 | py | '''
Created by auto_sdk on 2014-02-10 16:59:30
'''
from top.api.base import RestApi
class HanoiDataserviceWriteRequest(RestApi):
def __init__(self,domain='gw.api.taobao.com',port=80):
RestApi.__init__(self,domain, port)
self.app_name = None
self.data = None
self.params = None
def getapiname(self):
return 'taobao.hanoi.dataservice.write'
| [
"poorevil@gmail.com"
] | poorevil@gmail.com |
2575dcbfeb15eeac9bc117f6e1b0c41e27116d61 | b4eda202d51e2677f6f1584ed8371ff36f12b421 | /easy/number_len_sort.py | 608cf8bdb994d686008a6b1281ecbcd009d487d8 | [] | no_license | OM-Ra/interview_python | af3d66049aa985ae2fc2f1deb30988f7245a7a8c | 70d4464d9b54a5fce1a51765fa86688af3d1a8de | refs/heads/master | 2023-08-31T14:59:20.894025 | 2021-10-21T18:52:28 | 2021-10-21T18:52:28 | 386,342,155 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,623 | py | # -*- coding: utf-8 -*-
'''
Напишите функцию, которая будет принимать список чисел и возвращать
список тех же чисел, но в отсортированном виде.
Сортировать числа нужно по их длине.
Число, состоящее из одной цифры, будет идти первым,
а число с наибольшим количеством цифр — последним.
Если два и больше чисел имеют одинаковое количество цифр,
они должны располагаться в том же порядке, в каком стояли в исходном списке.
Примеры:
number_len_sort([1, 54, 1, 2, 463, 2]) [1, 1, 2, 2, 54, 463]
number_len_sort([999, 421, 22, 990, 32]) [22, 32, 999, 421, 990]
number_len_sort([9, 8, 7, 6, 5, 4, 31, 2, 1, 3]) [9, 8, 7, 6, 5, 4, 2, 1, 3, 31]
'''
from typing import List
def number_len_sort(arr: List[int]) -> List[int]:
# Функция для определения длины числа.
func = lambda x: len(str(x))
# Сортировка списка по длине числа.
return sorted(arr, key=func)
# Тесты.
tests = (
([1, 54, 1, 2, 463, 2], [1, 1, 2, 2, 54, 463]),
([999, 421, 22, 990, 32], [22, 32, 999, 421, 990]),
([9, 8, 7, 6, 5, 4, 31, 2, 1, 3], [9, 8, 7, 6, 5, 4, 2, 1, 3, 31])
)
for index, item in enumerate(tests):
res = number_len_sort(arr=item[0])
assert res == item[1], f'test:{index:>02} >>> {item[0]} -> {res} != {item[1]}'
| [
"syndeft@gmail.com"
] | syndeft@gmail.com |
03cea530563ac06991a2fb6e489a54a8b9af1f84 | 2a069c0c4ed0261f7252c8ada8cd727f138c5f62 | /homework/jinx.py | 17cf8a77266b85705d2e0d1bb321c0fe578c518a | [] | no_license | 790212828/python_pratices1 | 7c9d919df3649e3cca966de0bb7054e08f3efa60 | 925c391e0fb1c7d73b8c04d5fb052b66f819e118 | refs/heads/master | 2023-04-22T11:38:41.422725 | 2021-05-02T09:15:45 | 2021-05-02T09:15:45 | 363,607,380 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 676 | py | from ez import EZ
from hero import Hero
class Jinx(Hero):
hp=1000
power=210
name = "Jinx"
hero_line = "Jinx 进攻"
# def fight(self,enemy_hp,enemy_power):
# final_hp=self.hp
# final_enmey_hp=enemy_hp
#
# final_hp=final_hp-enemy_power
# final_enmey_hp=final_enmey_hp-self.power
# if final_hp>final_enmey_hp:
# print(f"Jinx赢了")
# elif final_hp<final_enmey_hp:
# print(f"敌人赢了")
# else:
# print("Jinx和敌人打平手了")
if __name__ == '__main__':
ez = EZ()
jinx = Jinx()
jinx.fight(ez.hp, ez.power)
ez.fight(jinx.hp,jinx.power) | [
"790212828@qq.com"
] | 790212828@qq.com |
5459506df297d93b8fd7fb89dd2efc2325135cc4 | 04b777aa3ac3ad998c388e2a74cf90f9babccddd | /plane_main.py | 747eca293e04fbadf88353a96f146f3d447d2af2 | [] | no_license | Sleepybear32-E/Sleepybear.github.io | b6b34e2206b5af452b396c4158e0bda35c9b11a5 | 3a3d700b7b56cdb9f09876399d3c04282a35ed07 | refs/heads/master | 2020-03-25T08:44:30.639799 | 2018-08-05T16:43:19 | 2018-08-05T16:43:19 | 143,630,152 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,245 | py | from plane_sprites import *
class Planegame(object):
def __init__(self):
# 1.设置游戏窗口
self.screen = pygame.display.set_mode(SCREEN_RECT.size)
# 2. 设置标题
pygame.display.set_caption("Michael's PLANEGAME")
# 3.创建游戏时钟
self.clock = pygame.time.Clock()
# 4.调用私有方法
self.__create_sprites()
# 5.每隔一秒创建敌机
pygame.time.set_timer(CREATE_ENEMY_EVENT, 250)
# 6. 每隔0.5s发射子弹
pygame.time.set_timer(CREATE_BULLET_EVENT, 500)
def __create_sprites(self):
# 创建背景精灵和精灵组
self.bg1 = Background()
self.bg2 = Background(True)
self.back_group = pygame.sprite.Group(self.bg1, self.bg2)
# 创建敌机精灵组
self.enemy_group = pygame.sprite.Group()
# 创建主飞机
self.hero = MainPlane()
self.main_plane = pygame.sprite.Group(self.hero)
def start_game(self):
while True:
# 1.设置刷新帧率
self.clock.tick(CLOCK_FRAME)
# 2.事件监听
self.__event_handler()
# 3. 碰撞检测
self.__check_collide_()
# 4. 更新/绘制精灵组
self.__update_sprites_()
# 5.更新显示
pygame.display.update()
def __event_handler(self):
for event in pygame.event.get():
# 退出游戏
if event.type == pygame.QUIT:
game.__game_over()
# 创建敌机, 将敌机加入精灵组
elif event.type == CREATE_ENEMY_EVENT:
self.enemy = Enemy()
self.enemy.update()
# 将敌机加入精灵组
self.enemy_group.add(self.enemy)
# 开火
elif event.type == CREATE_BULLET_EVENT:
self.hero.fire()
# 设置摁键,向左或者向右
keys_pressed = pygame.key.get_pressed()
if keys_pressed[pygame.K_RIGHT]:
self.hero.update()
self.hero.speed = 2
elif keys_pressed[pygame.K_LEFT]:
self.hero.update()
self.hero.speed = -2
else:
self.hero.update()
self.hero.speed = 0
def __check_collide_(self):
# 子弹摧毁敌机
pygame.sprite.groupcollide(self.hero.bullet_group, self.enemy_group, True, True)
if pygame.sprite.groupcollide(self.main_plane, self.enemy_group, False, True):
self.hero.kill()
game.__game_over()
def __update_sprites_(self):
self.back_group.update()
self.back_group.draw(self.screen)
self.enemy_group.update()
self.enemy_group.draw(self.screen)
self.main_plane.update()
self.main_plane.draw(self.screen)
self.hero.bullet_group.update()
self.hero.bullet_group.draw(self.screen)
# 静态方法
@staticmethod
def __game_over():
pygame.quit()
exit()
if __name__ == '__main__':
game = Planegame()
game.start_game()
| [
"noreply@github.com"
] | Sleepybear32-E.noreply@github.com |
88a98959dc34080d7672403f049a3d324137a9f1 | 71c433c0b1ac502e2099346cdfcf071429683dbf | /checkout/tests/__init__.py | 3f62ea7ccd270a59728e9f580874df7edb802aa4 | [] | no_license | mrd136/BDMS-ERP | 54c04bd96ed72cb74dadbb11539a0f32574c7f97 | 7187bf22cfc879aa55af49491885dfe382a88992 | refs/heads/main | 2023-08-02T17:02:13.734237 | 2021-09-14T20:16:11 | 2021-09-14T20:16:11 | 406,798,326 | 0 | 1 | null | 2021-09-15T14:22:28 | 2021-09-15T14:22:27 | null | UTF-8 | Python | false | false | 217 | py | # Copyright 2021 Vauxoo (https://www.vauxoo.com) <info@vauxoo.com>
# License OPL-1 (https://www.odoo.com/documentation/user/13.0/legal/licenses/licenses.html).
from . import test_checkout
from . import checkout_data
| [
"michael@sunguza.com"
] | michael@sunguza.com |
7e35f883917daa22e345865897b179caecaa535c | 5a3d41024bf4c1d2a0c6bdabe160333066cd34b7 | /ocdaction/tasks/migrations/0005_created_initial_anxiety_score_model_rename_task_name.py | 51ed5c41016af020ecc5900da67f7955a306f5ad | [] | no_license | HJSFoundation/ocd-action | 6271ef9cf5e8225c85f94f409d721dcc00870910 | c90409bb56dc709b974ffc7b68fa2f8f4bb9aae9 | refs/heads/master | 2021-06-21T13:51:20.183751 | 2017-08-29T21:22:44 | 2017-08-29T21:22:44 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 445 | py | # -*- coding: utf-8 -*-
# Generated by Django 1.9.6 on 2017-02-05 13:58
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('tasks', '0004_created_initial_anxiety_score_model'),
]
operations = [
migrations.RenameField(
model_name='task',
old_name='taskname',
new_name='task_name',
),
]
| [
"Masher@iMac.home"
] | Masher@iMac.home |
ff0442a8f224f25bec3735cab46e6f77f63da826 | 7a3757a341fb1c5a06482e2e5cb066a967a6eff5 | /app/schemas/similarity.py | 257682f2095194b137bd953deb64c0a4640937e0 | [
"MIT"
] | permissive | ninoseki/uzen | 4bff6080b9c0677dcf25abc0f104eca3fb92ed8a | 2a0065aa57fe3891c46e1174c1dc9aab673e52a8 | refs/heads/master | 2023-09-02T01:59:18.893712 | 2022-08-28T09:49:12 | 2022-08-28T09:49:12 | 241,092,872 | 87 | 9 | MIT | 2023-06-01T01:08:05 | 2020-02-17T11:37:59 | Python | UTF-8 | Python | false | false | 1,485 | py | from functools import lru_cache
from typing import List, Optional
from pydantic import Field, validator
from app.schemas.base import APIModel
from app.schemas.snapshot import PlainSnapshot, SnapshotSearchFilters
class SimilarityScan(APIModel):
"""Similarity scan payload"""
html: str = Field(...)
threshold: Optional[float] = Field(None)
exclude_hostname: Optional[str] = Field(None, description="Hostname to exclude")
exclude_ip_address: Optional[str] = Field(None, description="IP address to exclude")
@validator("threshold")
def threshold_value(cls, v: Optional[float]):
if v is None:
return v
if v < 0.0:
raise ValueError("Threshold should be greather than 0.0")
if v > 1.0:
raise ValueError("Threshold should be smaller than 1.0")
return v
class SimilarityScanWithSearchOptions(SimilarityScan):
"""Similarity scan payload with search options"""
size: Optional[int] = Field(None)
offset: Optional[int] = Field(None)
filters: SnapshotSearchFilters = Field(...)
class SimilarityScanResult(PlainSnapshot):
"""Similarity scan result + snapshot"""
similarity: float = Field(...)
@classmethod
@lru_cache(maxsize=1)
def field_keys(cls) -> List[str]:
keys = list(cls.__fields__.keys())
for non_db_key in ["similarity", "tags"]:
if non_db_key in keys:
keys.remove(non_db_key)
return keys
| [
"manabu.niseki@gmail.com"
] | manabu.niseki@gmail.com |
c1f138c3870e76e44ee79a563e9a268b36afc646 | 285539d4997b438eed0a428171ec5685594b5b8c | /my_library/model/__init__.py | 5a0a48f1a634290133deb544cab4d6bce397a01e | [] | no_license | p-null/Forum-Data-Mining | 671ef3fffd3bf59b14f5def57a1f825f7b6212d6 | 35b3139c709a1157ebe5288993a5c941b1043ebc | refs/heads/master | 2022-03-22T21:06:28.567450 | 2019-11-22T05:26:45 | 2019-11-22T05:26:45 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 59 | py | from my_library.model.text_classifier import TextClassifier | [
"tingkai.zhang@outlook.com"
] | tingkai.zhang@outlook.com |
58d19a48f36d79f7d419d5fb3667070e294f8251 | b0bba8c8838e4c3c57ecd969216bc15c74d435fa | /Scripts/ReplaceMesh.py | 5da723df0a42f9a1558ff36cf0043abd4682d311 | [] | no_license | MikeWhitaker/GameResource | a6258b17529837a9a31e8190a602d1d100c5aa67 | 58e08c58825cc4a3f950355befb6714e7515ee4a | refs/heads/master | 2021-01-13T08:53:23.317330 | 2016-11-22T09:31:00 | 2016-11-22T09:31:00 | 72,001,294 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 164 | py | import bpy
# the name of the mesh to copy to all selected objects
mesh = bpy.data.meshes["Cube"]
for o in bpy.data.objects:
if o.select:
o.data = mesh | [
"whitaker.mj@gmail.com"
] | whitaker.mj@gmail.com |
d8470e59e499648032b0b3332133ee5aa0924156 | 457258ba74e4e6a6ba90dfcca405d71e04892bde | /interpolation/smolyak/grid.py | f16123952f36cfa40a973cb78e25fb61dcb7ae62 | [
"BSD-2-Clause"
] | permissive | EconForge/interpolation.py | 4683f30157c2cf4738de837f2dec8eb2dac865cf | 19b2cd3882003c19b7aeb7c35fca5cdad3fe1d5e | refs/heads/master | 2023-08-08T06:52:03.394943 | 2023-07-25T20:57:21 | 2023-07-25T20:57:21 | 8,406,709 | 116 | 37 | BSD-2-Clause | 2023-07-25T20:46:36 | 2013-02-25T10:03:49 | Python | UTF-8 | Python | false | false | 25,964 | py | """
This file contains a class that builds a Smolyak Grid. The hope is that
it will eventually contain the interpolation routines necessary so that
the given some data, this class can build a grid and use the Chebychev
polynomials to interpolate and approximate the data.
Method based on Judd, Maliar, Maliar, Valero 2013 (W.P)
Authors
=======
- Chase Coleman (ccoleman@stern.nyu.edu)
- Spencer Lyon (slyon@stern.nyu.edu)
References
==========
Judd, Kenneth L, Lilia Maliar, Serguei Maliar, and Rafael Valero. 2013.
"Smolyak Method for Solving Dynamic Economic Models: Lagrange
Interpolation, Anisotropic Grid and Adaptive Domain".
Krueger, Dirk, and Felix Kubler. 2004. "Computing Equilibrium in OLG
Models with Stochastic Production." Journal of Economic Dynamics and
Control 28 (7) (April): 1411-1436.
"""
from __future__ import division
from operator import mul
from itertools import product, combinations_with_replacement
from itertools import chain
import numpy as np
from scipy.linalg import lu
from functools import reduce
from .util import *
## --------------- ##
# - Building Blocks -#
## --------------- ##
__all__ = [
"num_grid_points",
"m_i",
"cheby2n",
"s_n",
"a_chain",
"phi_chain",
"smol_inds",
"build_grid",
"build_B",
"SmolyakGrid",
]
def num_grid_points(d, mu):
"""
Checks the number of grid points for a given d, mu combination.
Parameters
----------
d, mu : int
The parameters d and mu that specify the grid
Returns
-------
num : int
The number of points that would be in a grid with params d, mu
Notes
-----
This function is only defined for mu = 1, 2, or 3
"""
if mu == 1:
return 2 * d + 1
if mu == 2:
return 1 + 4 * d + 4 * d * (d - 1) / 2.0
if mu == 3:
return 1 + 8 * d + 12 * d * (d - 1) / 2.0 + 8 * d * (d - 1) * (d - 2) / 6.0
def m_i(i):
r"""
Compute one plus the "total degree of the interpolating
polynoimals" (Kruger & Kubler, 2004). This shows up many times in
Smolyak's algorithm. It is defined as:
.. math::
m_i = \begin{cases}
1 \quad & \text{if } i = 1 \\
2^{i-1} + 1 \quad & \text{if } i \geq 2
\end{cases}
Parameters
----------
i : int
The integer i which the total degree should be evaluated
Returns
-------
num : int
Return the value given by the expression above
"""
if i < 0:
raise ValueError("i must be positive")
elif i == 0:
return 0
elif i == 1:
return 1
else:
return 2 ** (i - 1) + 1
def chebyvalto(x, n, kind=1.0):
"""
Computes first :math:`n` Chebychev polynomials of the first kind
evaluated at each point in :math:`x` and places them side by side
in a matrix. NOTE: Not including the first Chebychev polynomial
because it is simply a set of ones
Parameters
----------
x : float or array(float)
A single point (float) or an array of points where each
polynomial should be evaluated
n : int
The integer specifying which Chebychev polynomial is the last
to be computed
kind : float, optional(default=1.0)
The "kind" of Chebychev polynomial to compute. Only accepts
values 1 for first kind or 2 for second kind
Returns
-------
results : array (float, ndim=x.ndim+1)
The results of computation. This will be an :math:`(n+1 \\times
dim \\dots)` where :math:`(dim \\dots)` is the shape of x. Each
slice along the first dimension represents a new Chebychev
polynomial. This dimension has length :math:`n+1` because it
includes :math:`\\phi_0` which is equal to 1 :math:`\\forall x`
"""
x = np.asarray(x)
row, col = x.shape
ret_matrix = np.zeros((row, col * (n - 1)))
init = np.ones((row, col))
ret_matrix[:, :col] = x * kind
ret_matrix[:, col : 2 * col] = 2 * x * ret_matrix[:, :col] - init
for i in range(3, n):
ret_matrix[:, col * (i - 1) : col * (i)] = (
2 * x * ret_matrix[:, col * (i - 2) : col * (i - 1)]
- ret_matrix[:, col * (i - 3) : col * (i - 2)]
)
return ret_matrix
def cheby2n(x, n, kind=1.0):
"""
Computes the first :math:`n+1` Chebychev polynomials of the first
kind evaluated at each point in :math:`x` .
Parameters
----------
x : float or array(float)
A single point (float) or an array of points where each
polynomial should be evaluated
n : int
The integer specifying which Chebychev polynomial is the last
to be computed
kind : float, optional(default=1.0)
The "kind" of Chebychev polynomial to compute. Only accepts
values 1 for first kind or 2 for second kind
Returns
-------
results : array (float, ndim=x.ndim+1)
The results of computation. This will be an :math:`(n+1 \\times
dim \\dots)` where :math:`(dim \\dots)` is the shape of x. Each
slice along the first dimension represents a new Chebychev
polynomial. This dimension has length :math:`n+1` because it
includes :math:`\\phi_0` which is equal to 1 :math:`\\forall x`
"""
x = np.asarray(x)
dim = x.shape
results = np.zeros((n + 1,) + dim)
results[0, ...] = np.ones(dim)
results[1, ...] = x * kind
for i in range(2, n + 1):
results[i, ...] = 2 * x * results[i - 1, ...] - results[i - 2, ...]
return results
def s_n(n):
"""
Finds the set :math:`S_n` , which is the :math:`n` th Smolyak set of
Chebychev extrema
Parameters
----------
n : int
The index :math:`n` specifying which Smolyak set to compute
Returns
-------
s_n : array (float, ndim=1)
An array containing all the Chebychev extrema in the set
:math:`S_n`
"""
if n == 1:
return np.array([0.0])
# Apply the necessary transformation to get the nested sequence
m_i = 2 ** (n - 1) + 1
# Create an array of values that will be passed in to calculate
# the set of values
comp_vals = np.arange(1.0, m_i + 1.0)
# Values are - cos(pi(j-1)/(n-1)) for j in [1, 2, ..., n]
vals = -1.0 * np.cos(np.pi * (comp_vals - 1.0) / (m_i - 1.0))
vals[np.where(np.abs(vals) < 1e-14)] = 0.0
return vals
def a_chain(n):
"""
Finds all of the unidimensional disjoint sets of Chebychev extrema
that are used to construct the grid. It improves on past algorithms
by noting that :math:`A_{n} = S_{n}` [evens] except for :math:`A_1
= \{0\}` and :math:`A_2 = \{-1, 1\}` . Additionally, :math:`A_{n} =
A_{n+1}` [odds] This prevents the calculation of these nodes
repeatedly. Thus we only need to calculate biggest of the S_n's to
build the sequence of :math:`A_n` 's
Parameters
----------
n : int
This is the number of disjoint sets from Sn that this should make
Returns
-------
A_chain : dict (int -> list)
This is a dictionary of the disjoint sets that are made. They are
indexed by the integer corresponding
"""
# # Start w finding the biggest Sn(We will subsequently reduce it)
Sn = s_n(n)
A_chain = {}
A_chain[1] = [0.0]
A_chain[2] = [-1.0, 1.0]
# Need a for loop to extract remaining elements
for seq in range(n, 2, -1):
num = Sn.size
# Need odd indices in python because indexing starts at 0
A_chain[seq] = tuple(Sn[range(1, num, 2)])
# A_chain.append(list(Sn[range(1, num, 2)]))
Sn = Sn[range(0, num, 2)]
return A_chain
def phi_chain(n):
"""
For each number in 1 to n, compute the Smolyak indices for the
corresponding basis functions. This is the :math:`n` in
:math:`\\phi_n`
Parameters
----------
n : int
The last Smolyak index :math:`n` for which the basis polynomial
indices should be found
Returns
-------
aphi_chain : dict (int -> list)
A dictionary whose keys are the Smolyak index :math:`n` and
values are lists containing all basis polynomial subscripts for
that Smolyak index
"""
# First create a dictionary
aphi_chain = {}
aphi_chain[1] = [1]
aphi_chain[2] = [2, 3]
curr_val = 4
for i in range(3, n + 1):
end_val = 2 ** (i - 1) + 1
temp = range(curr_val, end_val + 1)
aphi_chain[i] = temp
curr_val = end_val + 1
return aphi_chain
## ---------------------- ##
# - Construction Utilities -#
## ---------------------- ##
def smol_inds(d, mu):
"""
Finds all of the indices that satisfy the requirement that
:math:`d \leq \sum_{i=1}^d \leq d + \mu`.
Parameters
----------
d : int
The number of dimensions in the grid
mu : int or array (int, ndim=1)
The parameter mu defining the density of the grid. If an array,
there must be d elements and an anisotropic grid is formed
Returns
-------
true_inds : array
A 1-d Any array containing all d element arrays satisfying the
constraint
Notes
-----
This function is used directly by build_grid and poly_inds
"""
if isinstance(mu, int):
max_mu = mu
else:
if mu.size != d:
raise ValueError("mu must have d elements. It has %i" % mu.size)
max_mu = int(np.max(mu))
# Need to capture up to value mu + 1 so in python need mu+2
possible_values = range(1, max_mu + 2)
# find all (i1, i2, ... id) such that their sum is in range
# we want; this will cut down on later iterations
poss_inds = [
el
for el in combinations_with_replacement(possible_values, d)
if d < sum(el) <= d + max_mu
]
if isinstance(mu, int):
true_inds = [[el for el in permute(list(val))] for val in poss_inds]
else:
true_inds = [
[el for el in permute(list(val)) if all(el <= mu + 1)] for val in poss_inds
]
# Add the d dimension 1 array so that we don't repeat it a bunch
# of times
true_inds.extend([[[1] * d]])
tinds = list(chain.from_iterable(true_inds))
return tinds
def poly_inds(d, mu, inds=None):
"""
Build indices specifying all the Cartesian products of Chebychev
polynomials needed to build Smolyak polynomial
Parameters
----------
d : int
The number of dimensions in grid / polynomial
mu : int
The parameter mu defining the density of the grid
inds : list (list (int)), optional (default=None)
The Smolyak indices for parameters d and mu. Should be computed
by calling `smol_inds(d, mu)`. If None is given, the indices
are computed using this function call
Returns
-------
phi_inds : array : (int, ndim=2)
A two dimensional array of integers where each row specifies a
new set of indices needed to define a Smolyak basis polynomial
Notes
-----
This function uses smol_inds and phi_chain. The output of this
function is used by build_B to construct the B matrix
"""
if inds is None:
inds = smol_inds(d, mu)
if isinstance(mu, int):
max_mu = mu
else:
max_mu = max(mu)
aphi = phi_chain(max_mu + 1)
base_polys = []
for el in inds:
temp = [aphi[i] for i in el]
# Save these indices that we iterate through because
# we need them for the chebychev polynomial combination
# inds.append(el)
base_polys.extend(list(product(*temp)))
return base_polys
def build_grid(d, mu, inds=None):
"""
Use disjoint Smolyak sets to construct Smolyak grid of degree d and
density parameter :math:`mu`
The return value is an :math:`n \\times d` Array, where :math:`n`
is the number of points in the grid
Parameters
----------
d : int
The number of dimensions in the grid
mu : int
The density parameter for the grid
inds : list (list (int)), optional (default=None)
The Smolyak indices for parameters d and mu. Should be computed
by calling `smol_inds(d, mu)`. If None is given, the indices
are computed using this function call
Returns
-------
grid : array (float, ndim=2)
The Smolyak grid for the given d, :math:`mu`
"""
if inds is None:
inds = smol_inds(d, mu)
# Get An chain
if isinstance(mu, int):
An = a_chain(mu + 1)
else: # Anisotropic case
An = a_chain(max(mu) + 1)
points = []
# Need to get the correct indices
for el in inds:
temp = [An[i] for i in el]
# Save these indices that we iterate through because
# we need them for the chebychev polynomial combination
# inds.append(el)
points.extend(list(product(*temp)))
grid = np.array(points)
return grid
def build_B(d, mu, pts, b_inds=None, deriv=False):
"""
Compute the matrix B from equation 22 in JMMV 2013
Translation of dolo.numeric.interpolation.smolyak.SmolyakBasic
Parameters
----------
d : int
The number of dimensions on the grid
mu : int or array (int, ndim=1, legnth=d)
The mu parameter used to define grid
pts : array (float, dims=2)
Arbitrary d-dimensional points. Each row is assumed to be a new
point. Often this is the smolyak grid returned by calling
`build_grid(d, mu)`
b_inds : array (int, ndim=2)
The polynomial indices for parameters a given grid. These should
be computed by calling `poly_inds(d, mu)`.
deriv : bool
Whether or not to compute the values needed for the derivative matrix
B_prime.
Returns
-------
B : array (float, ndim=2)
The matrix B that represents the Smolyak polynomial
corresponding to grid
B_Prime : array (float, ndim=3), optional (default=false)
This will be the 3 dimensional array representing the gradient of the
Smolyak polynomial at each of the points. It is only returned when
`deriv=True`
"""
if b_inds is None:
inds = smol_inds(d, mu)
b_inds = poly_inds(d, mu, inds)
if isinstance(mu, int):
max_mu = mu
else:
max_mu = max(mu)
Ts = cheby2n(pts.T, m_i(max_mu + 1))
npolys = len(b_inds)
npts = pts.shape[0]
B = np.empty((npts, npolys), order="F")
for ind, comb in enumerate(b_inds):
B[:, ind] = reduce(mul, [Ts[comb[i] - 1, i, :] for i in range(d)])
if deriv:
# TODO: test this. I am going to bed.
Us = cheby2n(pts.T, m_i(max_mu + 1), kind=2.0)
Us = np.concatenate([np.zeros((1, d, npts)), Us], axis=0)
for i in range(Us.shape[0]):
Us[i, :, :] = Us[i, :, :] * i
der_B = np.zeros((npolys, d, npts))
for i in range(d):
for ind, comb in enumerate(b_inds):
der_B[ind, i, :] = reduce(
mul,
[
(Ts[comb[k] - 1, k, :] if i != k else Us[comb[k] - 1, k, :])
for k in range(d)
],
)
return B, der_B
return B
# def exp_B(d, mu, grid):
# """
# write a nice doc string if it works
# """
# npts = grid.shape[0]
# num_chebs = m_i(mu + 1)
# max_ind = d + mu
# aphi = phi_chain(mu + 1)
# B = np.ones((npts, npts))
# # These are simply all the values of phi_n (up to n=mu+1) where all
# # other indices on the phi are 1 (hence valued at 1)
# easy_B = chebyvalto(grid, num_chebs)
# B[:, :d*(num_chebs-1)] = easy_B
# # Create a tracker to keep track of indexes
# B_col_mrk = d*(num_chebs - 1)
# # Now we need to account for all the cross products
# # We have the values we need hiding in B already. No need to
# # compute any more. They are multiplications of different numbers
# # of elements from the pieces of easy_B.
# if mu==2:
# for i in range(d-1):
# mult_inds = np.hstack([np.arange(i+1, d), np.arange(d + (i+1), 2*d)])
# temp1 = easy_B[:, i].reshape(npts, 1) * easy_B[:, mult_inds]
# temp2 = temp2 = easy_B[:, i+d].reshape(npts, 1) * easy_B[:, mult_inds]
# new_cols = temp1.shape[1] + temp2.shape[1]
# B[:, B_col_mrk: B_col_mrk + new_cols] = np.hstack([temp1, temp2])
# B_col_mrk = B_col_mrk + new_cols
# #-----------------------------------------------------------------#
# #-----------------------------------------------------------------#
# # This part will be the general section. Above I am trying to
# # make it work with just mu=2
# # NOTE: Below this point the code is incomplete. At best this is
# # some general pseudo-code to write the generalization step. Hoping
# # to make it handle general cases.
# #-----------------------------------------------------------------#
# #-----------------------------------------------------------------#
# # for i in range(2, mu+1):
# # curr_ind = i
# # while True:
# # curr_dim = 2
# # curr_col = 0
# # # Find which possible polynomials can be reached (lowest is 2)
# # poss_inds = np.arange(2, m_i(some function of curr_ind, d, mu)+1)
# # for dd in range(d-1):
# # # Create range of d to be used to build the fancy index
# # mult_ind = np.arange(curr_col+1, d)
# # # Initialize array for fancy index. Want to add arange(d) + (d*i)
# # # for every chebyshev polynomial that we need to reach with these
# # # indexes
# # mult_inds = np.array([])
# # for tt in range(some condition for what is max polynomial we reach -1):
# # mult_inds = np.hstack([mult_inds, mult_inds + (d*tt)])
# # # this will create the column times all the stuff following it
# # # in the other indexes
# # temp1 = easy_B[:, curr_col] * easy_B[:, mult_inds]
# # new_cols = temp1.shape[1]
# # B[:, B_col_mrk: B_col_mrk + new_cols] = temp1
# # while d>curr_dim and condition for continuing is met:
# # curr_dim += 1
# # for mm in range(curr_col + 2, d-1):
# # for bb in mult_inds[:-1]:
# # temp2 = easy_B[:, bb*d + mm] * temp1
# # new_cols2 = temp2.shape[1]
# # B[:, B_col_mrk: B_col_mrk + new_cols2]
# # Need to continue code. It is not done yet
# return B
## ------------------ ##
# - Class: SmolyakGrid -#
## ------------------ ##
class SmolyakGrid(object):
"""
This class currently takes a dimension and a degree of polynomial
and builds the Smolyak Sparse grid. We base this on the work by
Judd, Maliar, Maliar, and Valero (2013).
Parameters
----------
d : int
The number of dimensions in the grid
mu : int or array(int, ndim=1, length=d)
The "density" parameter for the grid
Attributes
----------
d : int
This is the dimension of grid that you are building
mu : int
mu is a parameter that defines the fineness of grid that we
want to build
lb : array (float, ndim=2)
This is an array of the lower bounds for each dimension
ub : array (float, ndim=2)
This is an array of the upper bounds for each dimension
cube_grid : array (float, ndim=2)
The Smolyak sparse grid on the domain :math:`[-1, 1]^d`
grid: : array (float, ndim=2)
The sparse grid, transformed to the user-specified bounds for
the domain
inds : list (list (int))
This is a lists of lists that contains all of the indices
B : array (float, ndim=2)
This is the B matrix that is used to do lagrange interpolation
B_L : array (float, ndim=2)
Lower triangle matrix of the decomposition of B
B_U : array (float, ndim=2)
Upper triangle matrix of the decomposition of B
Examples
--------
>>> s = SmolyakGrid(3, 2)
>>> s
Smolyak Grid:
d: 3
mu: 2
npoints: 25
B: 0.65% non-zero
>>> ag = SmolyakGrid(3, [1, 2, 3])
>>> ag
Anisotropic Smolyak Grid:
d: 3
mu: 1 x 2 x 3
npoints: 51
B: 0.68% non-zero
"""
def __init__(self, d, mu, lb=None, ub=None):
self.d = d
if lb is None: # default is [-1, 1]^d
self.lb = -1 * np.ones(d)
elif isinstance(lb, int) or isinstance(lb, float): # scalar. copy it
self.lb = np.ones(d) * lb
elif isinstance(lb, list) or isinstance(lb, np.ndarray):
lb = np.asarray(lb)
if lb.size == d:
self.lb = lb
else:
raise ValueError(
"lb must be a scalar or array-like object" + "with d elements."
)
if ub is None: # default is [-1, 1]^d
self.ub = 1 * np.ones(d)
elif isinstance(ub, int) or isinstance(ub, float): # scalar. copy it
self.ub = np.ones(d) * ub
elif isinstance(ub, list) or isinstance(ub, np.ndarray):
ub = np.asarray(ub)
if ub.size == d:
self.ub = ub
else:
raise ValueError(
"lb must be a scalar or array-like object" + "with d elements."
)
if d <= 1:
raise ValueError("Number of dimensions must be >= 2")
if isinstance(mu, int): # Isotropic case
if mu < 1:
raise ValueError("The parameter mu needs to be > 1.")
self.mu = mu
self.inds = smol_inds(d, mu)
self.pinds = poly_inds(d, mu, inds=self.inds)
self.cube_grid = build_grid(self.d, self.mu, self.inds)
self.grid = self.cube2dom(self.cube_grid)
self.B = build_B(self.d, self.mu, self.cube_grid, self.pinds)
else: # Anisotropic case
mu = np.asarray(mu)
if any(mu < 1):
raise ValueError("Each element in mu needs to be > 1.")
if len(mu) != d:
raise ValueError("For Anisotropic grid, mu must have len d ")
self.mu = mu
self.inds = smol_inds(d, mu)
self.pinds = poly_inds(d, mu, inds=self.inds)
self.cube_grid = build_grid(self.d, self.mu, self.inds)
self.grid = self.cube2dom(self.cube_grid)
self.B = build_B(self.d, self.mu, self.cube_grid, self.pinds)
# Compute LU decomposition of B
l, u = lu(self.B, True) # pass permute_l as true. See scipy docs
self.B_L = l
self.B_U = u
def __repr__(self):
npoints = self.cube_grid.shape[0]
nz_pts = np.count_nonzero(self.B)
pct_nz = nz_pts / (npoints**2.0)
if isinstance(self.mu, int):
msg = "Smolyak Grid:\n\td: {0} \n\tmu: {1} \n\tnpoints: {2}"
msg += "\n\tB: {3:.2f}% non-zero"
return msg.format(self.d, self.mu, self.cube_grid.shape[0], pct_nz)
else: # Anisotropic grid
msg = "Anisotropic Smolyak Grid:"
msg += "\n\td: {0} \n\tmu: {1} \n\tnpoints: {2}"
msg += "\n\tB: {3:.2f}% non-zero"
mu_str = " x ".join(map(str, self.mu))
return msg.format(self.d, mu_str, self.cube_grid.shape[0], pct_nz)
def __str__(self):
return self.__repr__()
def dom2cube(self, pts):
"""
Takes a point(s) and transforms it(them) into the [-1, 1]^d domain
"""
# Could add some bounds checks to make sure points are in the
# correct domain (between low and up bounds) and if right dim
lb = self.lb
ub = self.ub
centers = lb + (ub - lb) / 2
radii = (ub - lb) / 2
trans_pts = (pts - centers) / radii
return trans_pts
def cube2dom(self, pts):
"""
Takes a point(s) and transforms it(them) from domain [-1, 1]^d
back into the desired domain
"""
# Also could use some bounds checks/other stuff to make sure
# that everything being passed in is viable
lb = self.lb
ub = self.ub
centers = lb + (ub - lb) / 2
radii = (ub - lb) / 2
inv_trans_pts = pts * radii + centers
return inv_trans_pts
def plot_grid(self):
"""
Beautifully plots the grid for the 2d and 3d cases
Parameters
----------
None
Returns
-------
None
"""
import matplotlib.pyplot as plt
from mpl_toolkits.mplot3d import Axes3D
grid = self.grid
if grid.shape[1] == 2:
xs = grid[:, 0]
ys = grid[:, 1]
fig = plt.figure()
ax = fig.add_subplot(111)
ax.set_xlim(xs.min() - 0.5, xs.max() + 0.5)
ax.set_ylim(ys.min() - 0.5, ys.max() + 0.5)
ax.plot(xs, ys, ".", markersize=6)
ax.set_title("Smolyak grid: $d=%i, \; \\mu=%i$" % (self.d, self.mu))
plt.show()
elif grid.shape[1] == 3:
xs = grid[:, 0]
ys = grid[:, 1]
zs = grid[:, 2]
fig = plt.figure()
ax = fig.add_subplot(111, projection="3d")
ax.scatter(xs, ys, zs)
ax.set_title("Smolyak grid: $d=%i, \; \\mu=%i$" % (self.d, self.mu))
plt.show()
else:
raise ValueError("Can only plot 2 or 3 dimensional problems")
return fig
| [
"pablo.winant@gmail.com"
] | pablo.winant@gmail.com |
fd9a95536b8e4efdee560f0e82b8a24dfa6888e8 | d674f1b1b47d4e526e28c23372fda453b86dc456 | /TreeMaker/TreeMaker/python/__init__.py | fe1dca774304ed083c9e9ef6bb9f66a7688b9785 | [] | no_license | amkalsi/TauPOGUpgrade | 5ac29a8f1dd2fcfca922aa3ca88cca95cad5f72a | d348d2325051c783f12c79df721b415f50b89c45 | refs/heads/master | 2021-01-18T17:18:51.785086 | 2015-03-10T08:07:28 | 2015-03-10T08:07:28 | 24,678,260 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 190 | py | #Automatically created by SCRAM
import os
__path__.append(os.path.dirname(os.path.abspath(__file__).rsplit('/TreeMaker/TreeMaker/',1)[0])+'/cfipython/slc5_amd64_gcc472/TreeMaker/TreeMaker')
| [
"rimmy330@gmail.com"
] | rimmy330@gmail.com |
c47e5b5332aeb48b3740fc45c304e312bb2cc8c8 | 454d2dd8e8d181e8bd40e6c20a7a275699377ef6 | /Prac_04/total_income.py | 4524a4603e6aa44d82279e9850f8ed6dedd5afa2 | [] | no_license | RioThomas/CP1404_Practicals | d9af835ce2c23621d4e1385f60f5a69f5f21eadb | 2e9c725937155c82d8d5945bf9822fa405cca214 | refs/heads/master | 2022-11-04T16:33:17.974862 | 2020-06-18T02:09:20 | 2020-06-18T02:09:20 | 245,760,239 | 0 | 0 | null | 2020-06-18T02:09:21 | 2020-03-08T05:53:15 | Python | UTF-8 | Python | false | false | 694 | py | """
CP1404/CP5632 Practical
Cumulative total income program
"""
def main():
"""Display income report for incomes over a given number of months."""
incomes = []
number_of_months = int(input("How many months? "))
for month in range(1, number_of_months + 1):
income = float(input("Enter income for month " + str(month) + ": "))
incomes.append(income)
print_report(incomes)
def print_report(incomes):
print("\nIncome Report\n-------------")
total = 0
for month in range(len(incomes)):
income = incomes[month]
total += income
print("Month {:2} - Income: ${:10.2f} Total: ${:10.2f}".format(month, income, total))
main()
| [
"53397329+RioThomas@users.noreply.github.com"
] | 53397329+RioThomas@users.noreply.github.com |
f391b8bbfac34b2969f7efedd758ba809ad3b557 | 4dbebba87a95bfa435081921100b9b78c6b91d4c | /firstpython.py | 6dd7f8b8c003b8cd09f1c9ce196a32caa0379e7e | [] | no_license | Ken-Wise/testrepo | de724149c91684edfc11806688fbd4c8d22a63d1 | a0e4ac574e568b2865da9a9afe1abab0a1736754 | refs/heads/main | 2023-03-14T15:53:28.861112 | 2021-03-02T04:33:12 | 2021-03-02T04:33:12 | 343,634,843 | 0 | 0 | null | 2021-03-02T04:33:12 | 2021-03-02T03:34:26 | Python | UTF-8 | Python | false | false | 51 | py | #Display new Python File
print("New Python File")
| [
"noreply@github.com"
] | Ken-Wise.noreply@github.com |
7e2b85418ffafc13fefe95746e1074280425aa4d | 1b3728d0de91d746afc1f649eff289b79e910958 | /newsapp/admin.py | b0e7bf6bdf00a93f80fb06ececac193a80c39619 | [] | no_license | vgaicuks/django-newsapp | 396d8a96ceaaf2d5bce9c0c143204b53ff5d2d2b | 9c544e01ffa06f516e2c25a5e1be0b900146caed | refs/heads/master | 2021-01-15T10:24:54.885193 | 2014-10-29T21:19:16 | 2014-10-29T21:19:16 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,231 | py | # -*- coding: utf-8 -*-
from django.contrib import admin
from .models import New
from django.conf import settings
from django.utils.translation import ugettext_lazy as _
if 'modeltranslation' in settings.INSTALLED_APPS:
from modeltranslation.admin import TranslationAdmin
ParentModel = TranslationAdmin
else:
ParentModel = admin.ModelAdmin
if 'tinymce' in settings.INSTALLED_APPS:
from django.db import models
from tinymce.widgets import AdminTinyMCE
news_formfield_overrides = {
models.TextField: {'widget': AdminTinyMCE},
}
else:
news_formfield_overrides = {}
class NewAdmin(ParentModel):
list_display = ('title', 'date_added', 'active')
prepopulated_fields = {"slug": ("title",)}
date_hierarchy = 'date_added'
formfield_overrides = news_formfield_overrides
fieldsets = (
(_('Title section') , {
'fields': ('title', 'slug', ),
}),
(_('Additional') , {
'fields': ('date_added', 'active', 'image',)
}),
(_('Short content'), {
'fields': ('content_short',)
}),
(_('Full content'), {
'fields': ('content',)
}),
)
admin.site.register(New, NewAdmin)
| [
"a@trialine.lv"
] | a@trialine.lv |
bdfadb61a7aac33e0ace7546d022b20ae19335d9 | 3db588499dd7c86db6104fb206bfa256c660c526 | /src/mydlgo/tests/test_gtp.py | 632cabf3ca504a51dc716a01f59f43d357c416d5 | [] | no_license | SojiroNishimura/my-dlgo | 1837e400782fd4bb7dbc83e16a410c5f0a67bcbc | aafc46e76bf20c70a548a36e6761d9f2c5a9a40a | refs/heads/master | 2020-08-02T04:55:57.831640 | 2020-06-23T11:24:19 | 2020-06-23T11:24:19 | 211,241,087 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,577 | py | import pytest
from mydlgo.gtp import CommandType, Command, Color, Point, Vertex
@pytest.fixture(scope="module")
def P():
from typing import NamedTuple
P = NamedTuple("P", [("row", int), ("col", int)])
return P
@pytest.mark.parametrize("size", [9, 13, 19])
def test_boardsize_with_valid_size(size):
assert Command.boardsize(size).to_string() == f"{CommandType.BOARDSIZE.value} {size}"
@pytest.mark.parametrize("size", [0, 20])
def test_boardsize_with_invalid_size(size):
with pytest.raises(ValueError):
Command.boardsize(size)
def test_clear_board():
assert Command.clear_board().to_string() == f"{CommandType.CLEAR_BOARD.value}"
@pytest.mark.parametrize("color", ["Black", "White"])
def test_genmove_with_str(color):
assert Command.genmove(color).to_string() == f"{CommandType.GENMOVE.value} {color.lower()}"
@pytest.mark.parametrize("color", [Color.BLACK, Color.WHITE])
def test_genmove_with_color(color):
assert Command.genmove(color).to_string() == f"{CommandType.GENMOVE.value} {color.value}"
@pytest.mark.parametrize("komi", [0, 7.5])
def test_komi(komi):
assert Command.komi(komi).to_string() == f"{CommandType.KOMI.value} {komi}"
@pytest.mark.parametrize("player", [Color.BLACK, 1, "Black"])
def test_player_is_black(player):
assert Color.is_black(player) is True
@pytest.mark.parametrize("player", [Color.WHITE, 2, "White"])
def test_player_is_white(player):
assert Color.is_black(player) is False
@pytest.mark.parametrize("point", [(1, 19), "A1", "T19", "pass"])
def test_point_can_be_converted_to_vertex(P, point):
p = P(row=point[0], col=point[1]) if Point.is_point(point) else point
v = Vertex.from_point(p)
assert isinstance(v, Vertex)
@pytest.mark.parametrize("player, point", [(Color.BLACK, (1, 1)), (1, (4, 16)), ("Black", (19, 19))])
def test_play_legal_move_black(P, player, point):
p = P(row=point[0], col=point[1])
v = Vertex.from_point(p)
assert Command.play(player, p).to_string() == f"{CommandType.PLAY.value} {Color.BLACK.value} {v}"
@pytest.mark.parametrize("player, point", [(Color.WHITE, (1, 1)), (2, (4, 16)), ("White", (19, 19))])
def test_play_legal_move_white(P, player, point):
p = P(row=point[0], col=point[1])
v = Vertex.from_point(p)
assert Command.play(player, p).to_string() == f"{CommandType.PLAY.value} {Color.WHITE.value} {v}"
@pytest.mark.parametrize("player", [Color.BLACK, Color.WHITE])
def test_play_pass(player):
assert Command.play(player, "pass").to_string() == f"{CommandType.PLAY.value} {player.value} pass"
| [
"sjiro.nishimura@gmail.com"
] | sjiro.nishimura@gmail.com |
cff530542f2cad59bae355c3551746f6d0b674fa | 9967f075b8386c5a0d1f5db72b822e45842cd174 | /manage.py | 6ed771f6709cebe5fb1191c5d5296a30201564d0 | [] | no_license | hardtosaygoodbye/homestay | 6f62eb8a3d2716c4940414663337a0dad5f250bb | 8be4c28a56c38406e4e025eadbdd1b4bdb089a9a | refs/heads/master | 2020-04-11T07:18:15.406454 | 2018-12-22T05:56:49 | 2018-12-22T05:56:49 | 161,606,693 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 540 | py | #!/usr/bin/env python
import os
import sys
if __name__ == '__main__':
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'homestay.settings')
try:
from django.core.management import execute_from_command_line
except ImportError as exc:
raise ImportError(
"Couldn't import Django. Are you sure it's installed and "
"available on your PYTHONPATH environment variable? Did you "
"forget to activate a virtual environment?"
) from exc
execute_from_command_line(sys.argv)
| [
"hardtosaygoodbye@sina.com"
] | hardtosaygoodbye@sina.com |
fbb8527899cf6672f63cfc73def7ed71402a62cc | b8260b664833f6edbe61ba226a32cb22c9afb72a | /osmnx/truncate.py | a9e8e0550a16a4afea579b09952ced70238cfa7c | [
"MIT"
] | permissive | PratipRana/osmnx | a6cd515945e84f1c6a0552df21b1000e36da5ed9 | 9e063d441163c3b1e40390ab21838cc5f2b36410 | refs/heads/master | 2022-09-06T13:34:53.556633 | 2020-06-01T15:15:34 | 2020-06-01T15:15:34 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 7,217 | py | """Truncate graph by distance, bounding box, or polygon."""
import geopandas as gpd
import networkx as nx
import pandas as pd
from shapely.geometry import Point
from . import utils
from . import utils_geo
from . import utils_graph
def truncate_graph_dist(G, source_node, max_dist=1000, weight="length", retain_all=False):
"""
Remove everything farther than some network distance from specified node.
Parameters
----------
G : networkx.MultiDiGraph
input graph
source_node : int
the node in the graph from which to measure network distances to other
nodes
max_dist : int
remove every node in the graph greater than this distance from the
source_node (along the network)
weight : string
how to weight the graph when measuring distance (default 'length' is
how many meters long the edge is)
retain_all : bool
if True, return the entire graph even if it is not connected
Returns
-------
G : networkx.MultiDiGraph
the truncated graph
"""
# get the shortest distance between the node and every other node, then
# remove every node further than max_dist away
G = G.copy()
distances = nx.shortest_path_length(G, source=source_node, weight=weight)
distant_nodes = {key: value for key, value in dict(distances).items() if value > max_dist}
G.remove_nodes_from(distant_nodes.keys())
utils.log("Truncated graph by weighted network distance")
# remove any isolated nodes and retain only the largest component (if
# retain_all is True)
if not retain_all:
G = utils_graph.remove_isolated_nodes(G)
G = utils_graph.get_largest_component(G)
return G
def truncate_graph_bbox(G, north, south, east, west, truncate_by_edge=False, retain_all=False):
"""
Remove every node in graph that falls outside a bounding box.
Needed because overpass returns entire ways that also include nodes outside
the bbox if the way (that is, a way with a single OSM ID) has a node inside
the bbox at some point.
Parameters
----------
G : networkx.MultiDiGraph
input graph
north : float
northern latitude of bounding box
south : float
southern latitude of bounding box
east : float
eastern longitude of bounding box
west : float
western longitude of bounding box
truncate_by_edge : bool
if True retain node if it's outside bbox but at least one of node's
neighbors are within bbox
retain_all : bool
if True, return the entire graph even if it is not connected
Returns
-------
G : networkx.MultiDiGraph
the truncated graph
"""
G = G.copy()
nodes_outside_bbox = []
for node, data in G.nodes(data=True):
if data["y"] > north or data["y"] < south or data["x"] > east or data["x"] < west:
# this node is outside the bounding box
if not truncate_by_edge:
# if we're not truncating by edge, add node to list of nodes
# outside the bounding box
nodes_outside_bbox.append(node)
else:
# if we're truncating by edge, see if any of node's neighbors
# are within bounding box
any_neighbors_in_bbox = False
neighbors = list(G.successors(node)) + list(G.predecessors(node))
for neighbor in neighbors:
x = G.nodes[neighbor]["x"]
y = G.nodes[neighbor]["y"]
if y < north and y > south and x < east and x > west:
any_neighbors_in_bbox = True
break
# if none of its neighbors are within the bounding box, add node
# to list of nodes outside the bounding box
if not any_neighbors_in_bbox:
nodes_outside_bbox.append(node)
G.remove_nodes_from(nodes_outside_bbox)
utils.log("Truncated graph by bounding box")
# remove any isolated nodes and retain only the largest component (if
# retain_all is True)
if not retain_all:
G = utils_graph.remove_isolated_nodes(G)
G = utils_graph.get_largest_component(G)
return G
def truncate_graph_polygon(
G,
polygon,
retain_all=False,
truncate_by_edge=False,
quadrat_width=0.05,
min_num=3,
buffer_amount=1e-9,
):
"""
Remove every node in graph that outside a shapely (Multi)Polygon.
Parameters
----------
G : networkx.MultiDiGraph
input graph
polygon : shapely.geometry.Polygon or shapely.geometry.MultiPolygon
only retain nodes in graph that lie within this geometry
retain_all : bool
if True, return the entire graph even if it is not connected
truncate_by_edge : bool
if True retain node if it's outside polygon but at least one of node's
neighbors are within polygon
quadrat_width : numeric
passed on to intersect_index_quadrats: the linear length (in degrees) of
the quadrats with which to cut up the geometry (default = 0.05, approx
4km at NYC's latitude)
min_num : int
passed on to intersect_index_quadrats: the minimum number of linear
quadrat lines (e.g., min_num=3 would produce a quadrat grid of 4
squares)
buffer_amount : numeric
passed on to intersect_index_quadrats: buffer the quadrat grid lines by
quadrat_width times buffer_amount
Returns
-------
G : networkx.MultiDiGraph
the truncated graph
"""
G = G.copy()
utils.log("Identifying all nodes that lie outside the polygon...")
# get a GeoDataFrame of all the nodes
node_geom = [Point(data["x"], data["y"]) for _, data in G.nodes(data=True)]
gdf_nodes = gpd.GeoDataFrame({"node": list(G.nodes()), "geometry": node_geom})
gdf_nodes.crs = G.graph["crs"]
# find all the nodes in the graph that lie outside the polygon
points_within_geometry = utils_geo._intersect_index_quadrats(
gdf_nodes,
polygon,
quadrat_width=quadrat_width,
min_num=min_num,
buffer_amount=buffer_amount,
)
nodes_outside_polygon = gdf_nodes[~gdf_nodes.index.isin(points_within_geometry.index)]
if truncate_by_edge:
nodes_to_remove = []
for node in nodes_outside_polygon["node"]:
neighbors = pd.Series(list(G.successors(node)) + list(G.predecessors(node)))
# check if all the neighbors of this node also lie outside polygon
if neighbors.isin(nodes_outside_polygon["node"]).all():
nodes_to_remove.append(node)
else:
nodes_to_remove = nodes_outside_polygon["node"]
# now remove from the graph all those nodes that lie outside the place
# polygon
G.remove_nodes_from(nodes_to_remove)
utils.log(f"Removed {len(nodes_outside_polygon)} nodes outside polygon")
# remove any isolated nodes and retain only the largest component (if retain_all is False)
if not retain_all:
G = utils_graph.remove_isolated_nodes(G)
G = utils_graph.get_largest_component(G)
return G
| [
"boeing@usc.edu"
] | boeing@usc.edu |
c02a249003fd8e35d1379a6b5eab282f99f928e1 | 85a9ffeccb64f6159adbd164ff98edf4ac315e33 | /pysnmp/T11-FC-SP-POLICY-MIB.py | 9a640419ee5d983b90b82003634a6b2ec6febda3 | [
"Apache-2.0"
] | permissive | agustinhenze/mibs.snmplabs.com | 5d7d5d4da84424c5f5a1ed2752f5043ae00019fb | 1fc5c07860542b89212f4c8ab807057d9a9206c7 | refs/heads/master | 2020-12-26T12:41:41.132395 | 2019-08-16T15:51:41 | 2019-08-16T15:53:57 | 237,512,469 | 0 | 0 | Apache-2.0 | 2020-01-31T20:41:36 | 2020-01-31T20:41:35 | null | UTF-8 | Python | false | false | 66,483 | py | #
# PySNMP MIB module T11-FC-SP-POLICY-MIB (http://snmplabs.com/pysmi)
# ASN.1 source file:///Users/davwang4/Dev/mibs.snmplabs.com/asn1/T11-FC-SP-POLICY-MIB
# Produced by pysmi-0.3.4 at Mon Apr 29 21:07:33 2019
# On host DAVWANG4-M-1475 platform Darwin version 18.5.0 by user davwang4
# Using Python version 3.7.3 (default, Mar 27 2019, 09:23:15)
#
Integer, OctetString, ObjectIdentifier = mibBuilder.importSymbols("ASN1", "Integer", "OctetString", "ObjectIdentifier")
NamedValues, = mibBuilder.importSymbols("ASN1-ENUMERATION", "NamedValues")
ValueSizeConstraint, ValueRangeConstraint, SingleValueConstraint, ConstraintsUnion, ConstraintsIntersection = mibBuilder.importSymbols("ASN1-REFINEMENT", "ValueSizeConstraint", "ValueRangeConstraint", "SingleValueConstraint", "ConstraintsUnion", "ConstraintsIntersection")
fcmInstanceIndex, FcDomainIdOrZero, FcNameIdOrZero = mibBuilder.importSymbols("FC-MGMT-MIB", "fcmInstanceIndex", "FcDomainIdOrZero", "FcNameIdOrZero")
InetAddress, InetPortNumber, InetAddressType = mibBuilder.importSymbols("INET-ADDRESS-MIB", "InetAddress", "InetPortNumber", "InetAddressType")
SnmpAdminString, = mibBuilder.importSymbols("SNMP-FRAMEWORK-MIB", "SnmpAdminString")
ModuleCompliance, NotificationGroup, ObjectGroup = mibBuilder.importSymbols("SNMPv2-CONF", "ModuleCompliance", "NotificationGroup", "ObjectGroup")
NotificationType, IpAddress, MibScalar, MibTable, MibTableRow, MibTableColumn, Unsigned32, Bits, TimeTicks, ModuleIdentity, Integer32, iso, MibIdentifier, mib_2, Counter64, ObjectIdentity, Gauge32, Counter32 = mibBuilder.importSymbols("SNMPv2-SMI", "NotificationType", "IpAddress", "MibScalar", "MibTable", "MibTableRow", "MibTableColumn", "Unsigned32", "Bits", "TimeTicks", "ModuleIdentity", "Integer32", "iso", "MibIdentifier", "mib-2", "Counter64", "ObjectIdentity", "Gauge32", "Counter32")
TimeStamp, StorageType, TruthValue, TextualConvention, DisplayString, RowStatus = mibBuilder.importSymbols("SNMPv2-TC", "TimeStamp", "StorageType", "TruthValue", "TextualConvention", "DisplayString", "RowStatus")
T11NsGs4RejectReasonCode, = mibBuilder.importSymbols("T11-FC-NAME-SERVER-MIB", "T11NsGs4RejectReasonCode")
T11FcSpPolicyNameType, T11FcSpAlphaNumName, T11FcSpHashCalculationStatus, T11FcSpPolicyHashValue, T11FcSpAlphaNumNameOrAbsent, T11FcSpPolicyName, T11FcSpPolicyHashFormat, T11FcSpPolicyObjectType = mibBuilder.importSymbols("T11-FC-SP-TC-MIB", "T11FcSpPolicyNameType", "T11FcSpAlphaNumName", "T11FcSpHashCalculationStatus", "T11FcSpPolicyHashValue", "T11FcSpAlphaNumNameOrAbsent", "T11FcSpPolicyName", "T11FcSpPolicyHashFormat", "T11FcSpPolicyObjectType")
T11FabricIndex, = mibBuilder.importSymbols("T11-TC-MIB", "T11FabricIndex")
t11FcSpPolicyMIB = ModuleIdentity((1, 3, 6, 1, 2, 1, 178))
t11FcSpPolicyMIB.setRevisions(('2008-08-20 00:00',))
if mibBuilder.loadTexts: t11FcSpPolicyMIB.setLastUpdated('200808200000Z')
if mibBuilder.loadTexts: t11FcSpPolicyMIB.setOrganization('This MIB module was developed through the coordinated effort of two organizations: T11 began the development and the IETF (in the IMSS Working Group) finished it.')
t11FcSpPoMIBNotifications = MibIdentifier((1, 3, 6, 1, 2, 1, 178, 0))
t11FcSpPoMIBObjects = MibIdentifier((1, 3, 6, 1, 2, 1, 178, 1))
t11FcSpPoMIBConformance = MibIdentifier((1, 3, 6, 1, 2, 1, 178, 2))
t11FcSpPoActive = MibIdentifier((1, 3, 6, 1, 2, 1, 178, 1, 1))
t11FcSpPoOperations = MibIdentifier((1, 3, 6, 1, 2, 1, 178, 1, 2))
t11FcSpPoNonActive = MibIdentifier((1, 3, 6, 1, 2, 1, 178, 1, 3))
t11FcSpPoStatistics = MibIdentifier((1, 3, 6, 1, 2, 1, 178, 1, 4))
t11FcSpPoControl = MibIdentifier((1, 3, 6, 1, 2, 1, 178, 1, 5))
t11FcSpPoTable = MibTable((1, 3, 6, 1, 2, 1, 178, 1, 1, 1), )
if mibBuilder.loadTexts: t11FcSpPoTable.setStatus('current')
t11FcSpPoEntry = MibTableRow((1, 3, 6, 1, 2, 1, 178, 1, 1, 1, 1), ).setIndexNames((0, "FC-MGMT-MIB", "fcmInstanceIndex"), (0, "T11-FC-SP-POLICY-MIB", "t11FcSpPoFabricIndex"))
if mibBuilder.loadTexts: t11FcSpPoEntry.setStatus('current')
t11FcSpPoFabricIndex = MibTableColumn((1, 3, 6, 1, 2, 1, 178, 1, 1, 1, 1, 1), T11FabricIndex())
if mibBuilder.loadTexts: t11FcSpPoFabricIndex.setStatus('current')
t11FcSpPoPolicySummaryObjName = MibTableColumn((1, 3, 6, 1, 2, 1, 178, 1, 1, 1, 1, 2), T11FcSpAlphaNumName()).setMaxAccess("readonly")
if mibBuilder.loadTexts: t11FcSpPoPolicySummaryObjName.setStatus('current')
t11FcSpPoAdminFabricName = MibTableColumn((1, 3, 6, 1, 2, 1, 178, 1, 1, 1, 1, 3), FcNameIdOrZero().subtype(subtypeSpec=ValueSizeConstraint(8, 8)).setFixedLength(8)).setMaxAccess("readonly")
if mibBuilder.loadTexts: t11FcSpPoAdminFabricName.setStatus('current')
t11FcSpPoActivatedTimeStamp = MibTableColumn((1, 3, 6, 1, 2, 1, 178, 1, 1, 1, 1, 4), TimeStamp()).setMaxAccess("readonly")
if mibBuilder.loadTexts: t11FcSpPoActivatedTimeStamp.setStatus('current')
t11FcSpPoSummaryTable = MibTable((1, 3, 6, 1, 2, 1, 178, 1, 1, 2), )
if mibBuilder.loadTexts: t11FcSpPoSummaryTable.setStatus('current')
t11FcSpPoSummaryEntry = MibTableRow((1, 3, 6, 1, 2, 1, 178, 1, 1, 2, 1), ).setIndexNames((0, "FC-MGMT-MIB", "fcmInstanceIndex"), (0, "T11-FC-SP-POLICY-MIB", "t11FcSpPoFabricIndex"), (0, "T11-FC-SP-POLICY-MIB", "t11FcSpPoSummaryPolicyNameType"), (0, "T11-FC-SP-POLICY-MIB", "t11FcSpPoSummaryPolicyName"))
if mibBuilder.loadTexts: t11FcSpPoSummaryEntry.setStatus('current')
t11FcSpPoSummaryPolicyNameType = MibTableColumn((1, 3, 6, 1, 2, 1, 178, 1, 1, 2, 1, 1), T11FcSpPolicyNameType().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 7))).clone(namedValues=NamedValues(("nodeName", 1), ("alphaNumericName", 7))))
if mibBuilder.loadTexts: t11FcSpPoSummaryPolicyNameType.setStatus('current')
t11FcSpPoSummaryPolicyName = MibTableColumn((1, 3, 6, 1, 2, 1, 178, 1, 1, 2, 1, 2), T11FcSpPolicyName())
if mibBuilder.loadTexts: t11FcSpPoSummaryPolicyName.setStatus('current')
t11FcSpPoSummaryPolicyType = MibTableColumn((1, 3, 6, 1, 2, 1, 178, 1, 1, 2, 1, 3), T11FcSpPolicyObjectType()).setMaxAccess("readonly")
if mibBuilder.loadTexts: t11FcSpPoSummaryPolicyType.setStatus('current')
t11FcSpPoSummaryHashFormat = MibTableColumn((1, 3, 6, 1, 2, 1, 178, 1, 1, 2, 1, 4), T11FcSpPolicyHashFormat()).setMaxAccess("readonly")
if mibBuilder.loadTexts: t11FcSpPoSummaryHashFormat.setStatus('current')
t11FcSpPoSummaryHashValue = MibTableColumn((1, 3, 6, 1, 2, 1, 178, 1, 1, 2, 1, 5), T11FcSpPolicyHashValue()).setMaxAccess("readonly")
if mibBuilder.loadTexts: t11FcSpPoSummaryHashValue.setStatus('current')
t11FcSpPoSwMembTable = MibTable((1, 3, 6, 1, 2, 1, 178, 1, 1, 3), )
if mibBuilder.loadTexts: t11FcSpPoSwMembTable.setStatus('current')
t11FcSpPoSwMembEntry = MibTableRow((1, 3, 6, 1, 2, 1, 178, 1, 1, 3, 1), ).setIndexNames((0, "FC-MGMT-MIB", "fcmInstanceIndex"), (0, "T11-FC-SP-POLICY-MIB", "t11FcSpPoFabricIndex"), (0, "T11-FC-SP-POLICY-MIB", "t11FcSpPoSwMembSwitchNameType"), (0, "T11-FC-SP-POLICY-MIB", "t11FcSpPoSwMembSwitchName"))
if mibBuilder.loadTexts: t11FcSpPoSwMembEntry.setStatus('current')
t11FcSpPoSwMembSwitchNameType = MibTableColumn((1, 3, 6, 1, 2, 1, 178, 1, 1, 3, 1, 1), T11FcSpPolicyNameType().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 5, 6))).clone(namedValues=NamedValues(("nodeName", 1), ("restrictedNodeName", 2), ("wildcard", 5), ("restrictedWildcard", 6))))
if mibBuilder.loadTexts: t11FcSpPoSwMembSwitchNameType.setStatus('current')
t11FcSpPoSwMembSwitchName = MibTableColumn((1, 3, 6, 1, 2, 1, 178, 1, 1, 3, 1, 2), FcNameIdOrZero().subtype(subtypeSpec=ValueSizeConstraint(8, 8)).setFixedLength(8))
if mibBuilder.loadTexts: t11FcSpPoSwMembSwitchName.setStatus('current')
t11FcSpPoSwMembSwitchFlags = MibTableColumn((1, 3, 6, 1, 2, 1, 178, 1, 1, 3, 1, 3), Bits().clone(namedValues=NamedValues(("staticDomainID", 0), ("insistentDomainID", 1), ("serialPortsAccess", 2), ("physicalPortsAccess", 3), ("managerRole", 4)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: t11FcSpPoSwMembSwitchFlags.setStatus('current')
t11FcSpPoSwMembDomainID = MibTableColumn((1, 3, 6, 1, 2, 1, 178, 1, 1, 3, 1, 4), FcDomainIdOrZero()).setMaxAccess("readonly")
if mibBuilder.loadTexts: t11FcSpPoSwMembDomainID.setStatus('current')
t11FcSpPoSwMembPolicyDataRole = MibTableColumn((1, 3, 6, 1, 2, 1, 178, 1, 1, 3, 1, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("client", 1), ("autonomous", 2), ("server", 3)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: t11FcSpPoSwMembPolicyDataRole.setStatus('current')
t11FcSpPoSwMembAuthBehaviour = MibTableColumn((1, 3, 6, 1, 2, 1, 178, 1, 1, 3, 1, 6), Bits().clone(namedValues=NamedValues(("mustAuthenticate", 0), ("rejectIsFailure", 1)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: t11FcSpPoSwMembAuthBehaviour.setStatus('current')
t11FcSpPoSwMembAttribute = MibTableColumn((1, 3, 6, 1, 2, 1, 178, 1, 1, 3, 1, 7), T11FcSpAlphaNumNameOrAbsent()).setMaxAccess("readonly")
if mibBuilder.loadTexts: t11FcSpPoSwMembAttribute.setStatus('current')
t11FcSpPoNoMembTable = MibTable((1, 3, 6, 1, 2, 1, 178, 1, 1, 4), )
if mibBuilder.loadTexts: t11FcSpPoNoMembTable.setStatus('current')
t11FcSpPoNoMembEntry = MibTableRow((1, 3, 6, 1, 2, 1, 178, 1, 1, 4, 1), ).setIndexNames((0, "FC-MGMT-MIB", "fcmInstanceIndex"), (0, "T11-FC-SP-POLICY-MIB", "t11FcSpPoFabricIndex"), (0, "T11-FC-SP-POLICY-MIB", "t11FcSpPoNoMembNodeNameType"), (0, "T11-FC-SP-POLICY-MIB", "t11FcSpPoNoMembNodeName"))
if mibBuilder.loadTexts: t11FcSpPoNoMembEntry.setStatus('current')
t11FcSpPoNoMembNodeNameType = MibTableColumn((1, 3, 6, 1, 2, 1, 178, 1, 1, 4, 1, 1), T11FcSpPolicyNameType().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6))).clone(namedValues=NamedValues(("nodeName", 1), ("restrictedNodeName", 2), ("portName", 3), ("restrictedPortName", 4), ("wildcard", 5), ("restrictedWildcard", 6))))
if mibBuilder.loadTexts: t11FcSpPoNoMembNodeNameType.setStatus('current')
t11FcSpPoNoMembNodeName = MibTableColumn((1, 3, 6, 1, 2, 1, 178, 1, 1, 4, 1, 2), FcNameIdOrZero().subtype(subtypeSpec=ValueSizeConstraint(8, 8)).setFixedLength(8))
if mibBuilder.loadTexts: t11FcSpPoNoMembNodeName.setStatus('current')
t11FcSpPoNoMembFlags = MibTableColumn((1, 3, 6, 1, 2, 1, 178, 1, 1, 4, 1, 3), Bits().clone(namedValues=NamedValues(("scsiEnclosureAccess", 0), ("authenticationRequired", 1)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: t11FcSpPoNoMembFlags.setStatus('current')
t11FcSpPoNoMembCtAccessIndex = MibTableColumn((1, 3, 6, 1, 2, 1, 178, 1, 1, 4, 1, 4), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(0, 4294967295))).setMaxAccess("readonly")
if mibBuilder.loadTexts: t11FcSpPoNoMembCtAccessIndex.setStatus('current')
t11FcSpPoNoMembAttribute = MibTableColumn((1, 3, 6, 1, 2, 1, 178, 1, 1, 4, 1, 5), T11FcSpAlphaNumNameOrAbsent()).setMaxAccess("readonly")
if mibBuilder.loadTexts: t11FcSpPoNoMembAttribute.setStatus('current')
t11FcSpPoCtDescrTable = MibTable((1, 3, 6, 1, 2, 1, 178, 1, 1, 5), )
if mibBuilder.loadTexts: t11FcSpPoCtDescrTable.setStatus('current')
t11FcSpPoCtDescrEntry = MibTableRow((1, 3, 6, 1, 2, 1, 178, 1, 1, 5, 1), ).setIndexNames((0, "FC-MGMT-MIB", "fcmInstanceIndex"), (0, "T11-FC-SP-POLICY-MIB", "t11FcSpPoFabricIndex"), (0, "T11-FC-SP-POLICY-MIB", "t11FcSpPoCtDescrSpecifierIndex"), (0, "T11-FC-SP-POLICY-MIB", "t11FcSpPoCtDescrIndex"))
if mibBuilder.loadTexts: t11FcSpPoCtDescrEntry.setStatus('current')
t11FcSpPoCtDescrSpecifierIndex = MibTableColumn((1, 3, 6, 1, 2, 1, 178, 1, 1, 5, 1, 1), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(1, 4294967295)))
if mibBuilder.loadTexts: t11FcSpPoCtDescrSpecifierIndex.setStatus('current')
t11FcSpPoCtDescrIndex = MibTableColumn((1, 3, 6, 1, 2, 1, 178, 1, 1, 5, 1, 2), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(1, 4294967295)))
if mibBuilder.loadTexts: t11FcSpPoCtDescrIndex.setStatus('current')
t11FcSpPoCtDescrFlags = MibTableColumn((1, 3, 6, 1, 2, 1, 178, 1, 1, 5, 1, 3), Bits().clone(namedValues=NamedValues(("allow", 0), ("gsTypeWildcard", 1), ("gsSubTypeWildcard", 2), ("readOnly", 3)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: t11FcSpPoCtDescrFlags.setStatus('current')
t11FcSpPoCtDescrGsType = MibTableColumn((1, 3, 6, 1, 2, 1, 178, 1, 1, 5, 1, 4), OctetString().subtype(subtypeSpec=ValueSizeConstraint(1, 1)).setFixedLength(1)).setMaxAccess("readonly")
if mibBuilder.loadTexts: t11FcSpPoCtDescrGsType.setStatus('current')
t11FcSpPoCtDescrGsSubType = MibTableColumn((1, 3, 6, 1, 2, 1, 178, 1, 1, 5, 1, 5), OctetString().subtype(subtypeSpec=ValueSizeConstraint(1, 1)).setFixedLength(1)).setMaxAccess("readonly")
if mibBuilder.loadTexts: t11FcSpPoCtDescrGsSubType.setStatus('current')
t11FcSpPoSwConnTable = MibTable((1, 3, 6, 1, 2, 1, 178, 1, 1, 6), )
if mibBuilder.loadTexts: t11FcSpPoSwConnTable.setStatus('current')
t11FcSpPoSwConnEntry = MibTableRow((1, 3, 6, 1, 2, 1, 178, 1, 1, 6, 1), ).setIndexNames((0, "FC-MGMT-MIB", "fcmInstanceIndex"), (0, "T11-FC-SP-POLICY-MIB", "t11FcSpPoFabricIndex"), (0, "T11-FC-SP-POLICY-MIB", "t11FcSpPoSwConnSwitchName"), (0, "T11-FC-SP-POLICY-MIB", "t11FcSpPoSwConnAllowedType"), (0, "T11-FC-SP-POLICY-MIB", "t11FcSpPoSwConnPortNameOrAll"), (0, "T11-FC-SP-POLICY-MIB", "t11FcSpPoSwConnAllowedIndex"))
if mibBuilder.loadTexts: t11FcSpPoSwConnEntry.setStatus('current')
t11FcSpPoSwConnSwitchName = MibTableColumn((1, 3, 6, 1, 2, 1, 178, 1, 1, 6, 1, 1), FcNameIdOrZero().subtype(subtypeSpec=ValueSizeConstraint(8, 8)).setFixedLength(8))
if mibBuilder.loadTexts: t11FcSpPoSwConnSwitchName.setStatus('current')
t11FcSpPoSwConnAllowedType = MibTableColumn((1, 3, 6, 1, 2, 1, 178, 1, 1, 6, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("switch", 1), ("node", 2))))
if mibBuilder.loadTexts: t11FcSpPoSwConnAllowedType.setStatus('current')
t11FcSpPoSwConnPortNameOrAll = MibTableColumn((1, 3, 6, 1, 2, 1, 178, 1, 1, 6, 1, 3), FcNameIdOrZero().subtype(subtypeSpec=ConstraintsUnion(ValueSizeConstraint(0, 0), ValueSizeConstraint(8, 8), )))
if mibBuilder.loadTexts: t11FcSpPoSwConnPortNameOrAll.setStatus('current')
t11FcSpPoSwConnAllowedIndex = MibTableColumn((1, 3, 6, 1, 2, 1, 178, 1, 1, 6, 1, 4), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(1, 4294967295)))
if mibBuilder.loadTexts: t11FcSpPoSwConnAllowedIndex.setStatus('current')
t11FcSpPoSwConnAllowedNameType = MibTableColumn((1, 3, 6, 1, 2, 1, 178, 1, 1, 6, 1, 5), T11FcSpPolicyNameType().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6))).clone(namedValues=NamedValues(("nodeName", 1), ("restrictedNodeName", 2), ("portName", 3), ("restrictedPortName", 4), ("wildcard", 5), ("restrictedWildcard", 6)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: t11FcSpPoSwConnAllowedNameType.setStatus('current')
t11FcSpPoSwConnAllowedName = MibTableColumn((1, 3, 6, 1, 2, 1, 178, 1, 1, 6, 1, 6), T11FcSpPolicyName().subtype(subtypeSpec=ValueSizeConstraint(8, 8)).setFixedLength(8)).setMaxAccess("readonly")
if mibBuilder.loadTexts: t11FcSpPoSwConnAllowedName.setStatus('current')
t11FcSpPoIpMgmtTable = MibTable((1, 3, 6, 1, 2, 1, 178, 1, 1, 7), )
if mibBuilder.loadTexts: t11FcSpPoIpMgmtTable.setStatus('current')
t11FcSpPoIpMgmtEntry = MibTableRow((1, 3, 6, 1, 2, 1, 178, 1, 1, 7, 1), ).setIndexNames((0, "FC-MGMT-MIB", "fcmInstanceIndex"), (0, "T11-FC-SP-POLICY-MIB", "t11FcSpPoFabricIndex"), (0, "T11-FC-SP-POLICY-MIB", "t11FcSpPoIpMgmtEntryNameType"), (0, "T11-FC-SP-POLICY-MIB", "t11FcSpPoIpMgmtEntryNameLow"), (0, "T11-FC-SP-POLICY-MIB", "t11FcSpPoIpMgmtEntryNameHigh"))
if mibBuilder.loadTexts: t11FcSpPoIpMgmtEntry.setStatus('current')
t11FcSpPoIpMgmtEntryNameType = MibTableColumn((1, 3, 6, 1, 2, 1, 178, 1, 1, 7, 1, 1), InetAddressType())
if mibBuilder.loadTexts: t11FcSpPoIpMgmtEntryNameType.setStatus('current')
t11FcSpPoIpMgmtEntryNameLow = MibTableColumn((1, 3, 6, 1, 2, 1, 178, 1, 1, 7, 1, 2), InetAddress().subtype(subtypeSpec=ConstraintsUnion(ValueSizeConstraint(4, 4), ValueSizeConstraint(16, 16), )))
if mibBuilder.loadTexts: t11FcSpPoIpMgmtEntryNameLow.setStatus('current')
t11FcSpPoIpMgmtEntryNameHigh = MibTableColumn((1, 3, 6, 1, 2, 1, 178, 1, 1, 7, 1, 3), InetAddress().subtype(subtypeSpec=ConstraintsUnion(ValueSizeConstraint(4, 4), ValueSizeConstraint(16, 16), )))
if mibBuilder.loadTexts: t11FcSpPoIpMgmtEntryNameHigh.setStatus('current')
t11FcSpPoIpMgmtWkpIndex = MibTableColumn((1, 3, 6, 1, 2, 1, 178, 1, 1, 7, 1, 4), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(0, 4294967295))).setMaxAccess("readonly")
if mibBuilder.loadTexts: t11FcSpPoIpMgmtWkpIndex.setStatus('current')
t11FcSpPoIpMgmtAttribute = MibTableColumn((1, 3, 6, 1, 2, 1, 178, 1, 1, 7, 1, 5), T11FcSpAlphaNumNameOrAbsent()).setMaxAccess("readonly")
if mibBuilder.loadTexts: t11FcSpPoIpMgmtAttribute.setStatus('current')
t11FcSpPoWkpDescrTable = MibTable((1, 3, 6, 1, 2, 1, 178, 1, 1, 8), )
if mibBuilder.loadTexts: t11FcSpPoWkpDescrTable.setStatus('current')
t11FcSpPoWkpDescrEntry = MibTableRow((1, 3, 6, 1, 2, 1, 178, 1, 1, 8, 1), ).setIndexNames((0, "FC-MGMT-MIB", "fcmInstanceIndex"), (0, "T11-FC-SP-POLICY-MIB", "t11FcSpPoFabricIndex"), (0, "T11-FC-SP-POLICY-MIB", "t11FcSpPoWkpDescrSpecifierIndex"), (0, "T11-FC-SP-POLICY-MIB", "t11FcSpPoWkpDescrIndex"))
if mibBuilder.loadTexts: t11FcSpPoWkpDescrEntry.setStatus('current')
t11FcSpPoWkpDescrSpecifierIndex = MibTableColumn((1, 3, 6, 1, 2, 1, 178, 1, 1, 8, 1, 1), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(1, 4294967295)))
if mibBuilder.loadTexts: t11FcSpPoWkpDescrSpecifierIndex.setStatus('current')
t11FcSpPoWkpDescrIndex = MibTableColumn((1, 3, 6, 1, 2, 1, 178, 1, 1, 8, 1, 2), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(1, 4294967295)))
if mibBuilder.loadTexts: t11FcSpPoWkpDescrIndex.setStatus('current')
t11FcSpPoWkpDescrFlags = MibTableColumn((1, 3, 6, 1, 2, 1, 178, 1, 1, 8, 1, 3), Bits().clone(namedValues=NamedValues(("allow", 0), ("wkpWildcard", 1), ("destPortWildcard", 2), ("readOnly", 3)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: t11FcSpPoWkpDescrFlags.setStatus('current')
t11FcSpPoWkpDescrWkpNumber = MibTableColumn((1, 3, 6, 1, 2, 1, 178, 1, 1, 8, 1, 4), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(0, 255))).setMaxAccess("readonly")
if mibBuilder.loadTexts: t11FcSpPoWkpDescrWkpNumber.setStatus('current')
t11FcSpPoWkpDescrDestPort = MibTableColumn((1, 3, 6, 1, 2, 1, 178, 1, 1, 8, 1, 5), InetPortNumber()).setMaxAccess("readonly")
if mibBuilder.loadTexts: t11FcSpPoWkpDescrDestPort.setStatus('current')
t11FcSpPoAttribTable = MibTable((1, 3, 6, 1, 2, 1, 178, 1, 1, 9), )
if mibBuilder.loadTexts: t11FcSpPoAttribTable.setStatus('current')
t11FcSpPoAttribEntry = MibTableRow((1, 3, 6, 1, 2, 1, 178, 1, 1, 9, 1), ).setIndexNames((0, "FC-MGMT-MIB", "fcmInstanceIndex"), (0, "T11-FC-SP-POLICY-MIB", "t11FcSpPoFabricIndex"), (0, "T11-FC-SP-POLICY-MIB", "t11FcSpPoAttribName"), (0, "T11-FC-SP-POLICY-MIB", "t11FcSpPoAttribEntryIndex"), (0, "T11-FC-SP-POLICY-MIB", "t11FcSpPoAttribPartIndex"))
if mibBuilder.loadTexts: t11FcSpPoAttribEntry.setStatus('current')
t11FcSpPoAttribName = MibTableColumn((1, 3, 6, 1, 2, 1, 178, 1, 1, 9, 1, 1), T11FcSpAlphaNumName())
if mibBuilder.loadTexts: t11FcSpPoAttribName.setStatus('current')
t11FcSpPoAttribEntryIndex = MibTableColumn((1, 3, 6, 1, 2, 1, 178, 1, 1, 9, 1, 2), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(1, 4294967295)))
if mibBuilder.loadTexts: t11FcSpPoAttribEntryIndex.setStatus('current')
t11FcSpPoAttribPartIndex = MibTableColumn((1, 3, 6, 1, 2, 1, 178, 1, 1, 9, 1, 3), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(1, 4294967295)))
if mibBuilder.loadTexts: t11FcSpPoAttribPartIndex.setStatus('current')
t11FcSpPoAttribType = MibTableColumn((1, 3, 6, 1, 2, 1, 178, 1, 1, 9, 1, 4), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(1, 4294967295))).setMaxAccess("readonly")
if mibBuilder.loadTexts: t11FcSpPoAttribType.setStatus('current')
t11FcSpPoAttribValue = MibTableColumn((1, 3, 6, 1, 2, 1, 178, 1, 1, 9, 1, 5), OctetString().subtype(subtypeSpec=ValueSizeConstraint(0, 256))).setMaxAccess("readonly")
if mibBuilder.loadTexts: t11FcSpPoAttribValue.setStatus('current')
t11FcSpPoAttribExtension = MibTableColumn((1, 3, 6, 1, 2, 1, 178, 1, 1, 9, 1, 6), ObjectIdentifier()).setMaxAccess("readonly")
if mibBuilder.loadTexts: t11FcSpPoAttribExtension.setStatus('current')
t11FcSpPoAuthProtTable = MibTable((1, 3, 6, 1, 2, 1, 178, 1, 1, 10), )
if mibBuilder.loadTexts: t11FcSpPoAuthProtTable.setStatus('current')
t11FcSpPoAuthProtEntry = MibTableRow((1, 3, 6, 1, 2, 1, 178, 1, 1, 10, 1), ).setIndexNames((0, "FC-MGMT-MIB", "fcmInstanceIndex"), (0, "T11-FC-SP-POLICY-MIB", "t11FcSpPoFabricIndex"), (0, "T11-FC-SP-POLICY-MIB", "t11FcSpPoAttribName"), (0, "T11-FC-SP-POLICY-MIB", "t11FcSpPoAttribEntryIndex"), (0, "T11-FC-SP-POLICY-MIB", "t11FcSpPoAuthProtIdentifier"), (0, "T11-FC-SP-POLICY-MIB", "t11FcSpPoAuthProtPartIndex"))
if mibBuilder.loadTexts: t11FcSpPoAuthProtEntry.setStatus('current')
t11FcSpPoAuthProtIdentifier = MibTableColumn((1, 3, 6, 1, 2, 1, 178, 1, 1, 10, 1, 1), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(0, 4294967295)))
if mibBuilder.loadTexts: t11FcSpPoAuthProtIdentifier.setStatus('current')
t11FcSpPoAuthProtPartIndex = MibTableColumn((1, 3, 6, 1, 2, 1, 178, 1, 1, 10, 1, 2), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(1, 4294967295)))
if mibBuilder.loadTexts: t11FcSpPoAuthProtPartIndex.setStatus('current')
t11FcSpPoAuthProtParams = MibTableColumn((1, 3, 6, 1, 2, 1, 178, 1, 1, 10, 1, 3), OctetString().subtype(subtypeSpec=ValueSizeConstraint(0, 256))).setMaxAccess("readonly")
if mibBuilder.loadTexts: t11FcSpPoAuthProtParams.setStatus('current')
t11FcSpPoOperTable = MibTable((1, 3, 6, 1, 2, 1, 178, 1, 2, 1), )
if mibBuilder.loadTexts: t11FcSpPoOperTable.setStatus('current')
t11FcSpPoOperEntry = MibTableRow((1, 3, 6, 1, 2, 1, 178, 1, 2, 1, 1), ).setIndexNames((0, "FC-MGMT-MIB", "fcmInstanceIndex"), (0, "T11-FC-SP-POLICY-MIB", "t11FcSpPoFabricIndex"))
if mibBuilder.loadTexts: t11FcSpPoOperEntry.setStatus('current')
t11FcSpPoOperActivate = MibTableColumn((1, 3, 6, 1, 2, 1, 178, 1, 2, 1, 1, 1), T11FcSpAlphaNumName()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: t11FcSpPoOperActivate.setStatus('current')
t11FcSpPoOperDeActivate = MibTableColumn((1, 3, 6, 1, 2, 1, 178, 1, 2, 1, 1, 2), T11FcSpAlphaNumName()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: t11FcSpPoOperDeActivate.setStatus('current')
t11FcSpPoOperResult = MibTableColumn((1, 3, 6, 1, 2, 1, 178, 1, 2, 1, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6, 7))).clone(namedValues=NamedValues(("activateSuccess", 1), ("badSummaryObject", 2), ("activateFailure", 3), ("deactivateSuccess", 4), ("deactivateFailure", 5), ("inProgress", 6), ("none", 7)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: t11FcSpPoOperResult.setStatus('current')
t11FcSpPoOperFailCause = MibTableColumn((1, 3, 6, 1, 2, 1, 178, 1, 2, 1, 1, 4), SnmpAdminString().subtype(subtypeSpec=ValueSizeConstraint(0, 64))).setMaxAccess("readonly")
if mibBuilder.loadTexts: t11FcSpPoOperFailCause.setStatus('current')
t11FcSpPoNaSummaryTable = MibTable((1, 3, 6, 1, 2, 1, 178, 1, 3, 1), )
if mibBuilder.loadTexts: t11FcSpPoNaSummaryTable.setStatus('current')
t11FcSpPoNaSummaryEntry = MibTableRow((1, 3, 6, 1, 2, 1, 178, 1, 3, 1, 1), ).setIndexNames((0, "FC-MGMT-MIB", "fcmInstanceIndex"), (0, "T11-FC-SP-POLICY-MIB", "t11FcSpPoFabricIndex"), (0, "T11-FC-SP-POLICY-MIB", "t11FcSpPoNaSummaryName"), (0, "T11-FC-SP-POLICY-MIB", "t11FcSpPoNaSummaryPolicyType"), (0, "T11-FC-SP-POLICY-MIB", "t11FcSpPoNaSummaryPolicyIndex"))
if mibBuilder.loadTexts: t11FcSpPoNaSummaryEntry.setStatus('current')
t11FcSpPoNaSummaryName = MibTableColumn((1, 3, 6, 1, 2, 1, 178, 1, 3, 1, 1, 1), T11FcSpAlphaNumName())
if mibBuilder.loadTexts: t11FcSpPoNaSummaryName.setStatus('current')
t11FcSpPoNaSummaryPolicyType = MibTableColumn((1, 3, 6, 1, 2, 1, 178, 1, 3, 1, 1, 2), T11FcSpPolicyObjectType())
if mibBuilder.loadTexts: t11FcSpPoNaSummaryPolicyType.setStatus('current')
t11FcSpPoNaSummaryPolicyIndex = MibTableColumn((1, 3, 6, 1, 2, 1, 178, 1, 3, 1, 1, 3), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(1, 4294967295)))
if mibBuilder.loadTexts: t11FcSpPoNaSummaryPolicyIndex.setStatus('current')
t11FcSpPoNaSummaryPolicyNameType = MibTableColumn((1, 3, 6, 1, 2, 1, 178, 1, 3, 1, 1, 4), T11FcSpPolicyNameType().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 7))).clone(namedValues=NamedValues(("nodeName", 1), ("alphaNumericName", 7)))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: t11FcSpPoNaSummaryPolicyNameType.setStatus('current')
t11FcSpPoNaSummaryPolicyName = MibTableColumn((1, 3, 6, 1, 2, 1, 178, 1, 3, 1, 1, 5), T11FcSpPolicyName()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: t11FcSpPoNaSummaryPolicyName.setStatus('current')
t11FcSpPoNaSummaryHashStatus = MibTableColumn((1, 3, 6, 1, 2, 1, 178, 1, 3, 1, 1, 6), T11FcSpHashCalculationStatus().clone('stale')).setMaxAccess("readcreate")
if mibBuilder.loadTexts: t11FcSpPoNaSummaryHashStatus.setStatus('current')
t11FcSpPoNaSummaryHashFormat = MibTableColumn((1, 3, 6, 1, 2, 1, 178, 1, 3, 1, 1, 7), T11FcSpPolicyHashFormat().clone(hexValue="00000001")).setMaxAccess("readonly")
if mibBuilder.loadTexts: t11FcSpPoNaSummaryHashFormat.setStatus('current')
t11FcSpPoNaSummaryHashValue = MibTableColumn((1, 3, 6, 1, 2, 1, 178, 1, 3, 1, 1, 8), T11FcSpPolicyHashValue()).setMaxAccess("readonly")
if mibBuilder.loadTexts: t11FcSpPoNaSummaryHashValue.setStatus('current')
t11FcSpPoNaSummaryRowStatus = MibTableColumn((1, 3, 6, 1, 2, 1, 178, 1, 3, 1, 1, 9), RowStatus()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: t11FcSpPoNaSummaryRowStatus.setStatus('current')
t11FcSpPoNaSwListTable = MibTable((1, 3, 6, 1, 2, 1, 178, 1, 3, 2), )
if mibBuilder.loadTexts: t11FcSpPoNaSwListTable.setStatus('current')
t11FcSpPoNaSwListEntry = MibTableRow((1, 3, 6, 1, 2, 1, 178, 1, 3, 2, 1), ).setIndexNames((0, "FC-MGMT-MIB", "fcmInstanceIndex"), (0, "T11-FC-SP-POLICY-MIB", "t11FcSpPoFabricIndex"), (0, "T11-FC-SP-POLICY-MIB", "t11FcSpPoNaSwListName"))
if mibBuilder.loadTexts: t11FcSpPoNaSwListEntry.setStatus('current')
t11FcSpPoNaSwListName = MibTableColumn((1, 3, 6, 1, 2, 1, 178, 1, 3, 2, 1, 1), T11FcSpAlphaNumName())
if mibBuilder.loadTexts: t11FcSpPoNaSwListName.setStatus('current')
t11FcSpPoNaSwListFabricName = MibTableColumn((1, 3, 6, 1, 2, 1, 178, 1, 3, 2, 1, 2), FcNameIdOrZero()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: t11FcSpPoNaSwListFabricName.setStatus('current')
t11FcSpPoNaSwListRowStatus = MibTableColumn((1, 3, 6, 1, 2, 1, 178, 1, 3, 2, 1, 3), RowStatus()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: t11FcSpPoNaSwListRowStatus.setStatus('current')
t11FcSpPoNaSwMembTable = MibTable((1, 3, 6, 1, 2, 1, 178, 1, 3, 3), )
if mibBuilder.loadTexts: t11FcSpPoNaSwMembTable.setStatus('current')
t11FcSpPoNaSwMembEntry = MibTableRow((1, 3, 6, 1, 2, 1, 178, 1, 3, 3, 1), ).setIndexNames((0, "FC-MGMT-MIB", "fcmInstanceIndex"), (0, "T11-FC-SP-POLICY-MIB", "t11FcSpPoFabricIndex"), (0, "T11-FC-SP-POLICY-MIB", "t11FcSpPoNaSwListName"), (0, "T11-FC-SP-POLICY-MIB", "t11FcSpPoNaSwMembSwitchNameType"), (0, "T11-FC-SP-POLICY-MIB", "t11FcSpPoNaSwMembSwitchName"))
if mibBuilder.loadTexts: t11FcSpPoNaSwMembEntry.setStatus('current')
t11FcSpPoNaSwMembSwitchNameType = MibTableColumn((1, 3, 6, 1, 2, 1, 178, 1, 3, 3, 1, 1), T11FcSpPolicyNameType().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 5, 6))).clone(namedValues=NamedValues(("nodeName", 1), ("restrictedNodeName", 2), ("wildcard", 5), ("restrictedWildcard", 6))))
if mibBuilder.loadTexts: t11FcSpPoNaSwMembSwitchNameType.setStatus('current')
t11FcSpPoNaSwMembSwitchName = MibTableColumn((1, 3, 6, 1, 2, 1, 178, 1, 3, 3, 1, 2), FcNameIdOrZero().subtype(subtypeSpec=ValueSizeConstraint(8, 8)).setFixedLength(8))
if mibBuilder.loadTexts: t11FcSpPoNaSwMembSwitchName.setStatus('current')
t11FcSpPoNaSwMembFlags = MibTableColumn((1, 3, 6, 1, 2, 1, 178, 1, 3, 3, 1, 3), Bits().clone(namedValues=NamedValues(("staticDomainID", 0), ("insistentDomainID", 1), ("serialPortsAccess", 2), ("physicalPortsAccess", 3), ("managerRole", 4)))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: t11FcSpPoNaSwMembFlags.setStatus('current')
t11FcSpPoNaSwMembDomainID = MibTableColumn((1, 3, 6, 1, 2, 1, 178, 1, 3, 3, 1, 4), FcDomainIdOrZero()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: t11FcSpPoNaSwMembDomainID.setStatus('current')
t11FcSpPoNaSwMembPolicyDataRole = MibTableColumn((1, 3, 6, 1, 2, 1, 178, 1, 3, 3, 1, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("client", 1), ("autonomous", 2), ("server", 3)))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: t11FcSpPoNaSwMembPolicyDataRole.setStatus('current')
t11FcSpPoNaSwMembAuthBehaviour = MibTableColumn((1, 3, 6, 1, 2, 1, 178, 1, 3, 3, 1, 6), Bits().clone(namedValues=NamedValues(("mustAuthenticate", 0), ("rejectIsFailure", 1)))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: t11FcSpPoNaSwMembAuthBehaviour.setStatus('current')
t11FcSpPoNaSwMembAttribute = MibTableColumn((1, 3, 6, 1, 2, 1, 178, 1, 3, 3, 1, 7), T11FcSpAlphaNumNameOrAbsent()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: t11FcSpPoNaSwMembAttribute.setStatus('current')
t11FcSpPoNaSwMembRowStatus = MibTableColumn((1, 3, 6, 1, 2, 1, 178, 1, 3, 3, 1, 8), RowStatus()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: t11FcSpPoNaSwMembRowStatus.setStatus('current')
t11FcSpPoNaNoMembTable = MibTable((1, 3, 6, 1, 2, 1, 178, 1, 3, 4), )
if mibBuilder.loadTexts: t11FcSpPoNaNoMembTable.setStatus('current')
t11FcSpPoNaNoMembEntry = MibTableRow((1, 3, 6, 1, 2, 1, 178, 1, 3, 4, 1), ).setIndexNames((0, "FC-MGMT-MIB", "fcmInstanceIndex"), (0, "T11-FC-SP-POLICY-MIB", "t11FcSpPoFabricIndex"), (0, "T11-FC-SP-POLICY-MIB", "t11FcSpPoNaNoMembListName"), (0, "T11-FC-SP-POLICY-MIB", "t11FcSpPoNaNoMembNodeNameType"), (0, "T11-FC-SP-POLICY-MIB", "t11FcSpPoNaNoMembNodeName"))
if mibBuilder.loadTexts: t11FcSpPoNaNoMembEntry.setStatus('current')
t11FcSpPoNaNoMembListName = MibTableColumn((1, 3, 6, 1, 2, 1, 178, 1, 3, 4, 1, 1), T11FcSpAlphaNumName())
if mibBuilder.loadTexts: t11FcSpPoNaNoMembListName.setStatus('current')
t11FcSpPoNaNoMembNodeNameType = MibTableColumn((1, 3, 6, 1, 2, 1, 178, 1, 3, 4, 1, 2), T11FcSpPolicyNameType().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6))).clone(namedValues=NamedValues(("nodeName", 1), ("restrictedNodeName", 2), ("portName", 3), ("restrictedPortName", 4), ("wildcard", 5), ("restrictedWildcard", 6))))
if mibBuilder.loadTexts: t11FcSpPoNaNoMembNodeNameType.setStatus('current')
t11FcSpPoNaNoMembNodeName = MibTableColumn((1, 3, 6, 1, 2, 1, 178, 1, 3, 4, 1, 3), FcNameIdOrZero().subtype(subtypeSpec=ValueSizeConstraint(8, 8)).setFixedLength(8))
if mibBuilder.loadTexts: t11FcSpPoNaNoMembNodeName.setStatus('current')
t11FcSpPoNaNoMembFlags = MibTableColumn((1, 3, 6, 1, 2, 1, 178, 1, 3, 4, 1, 4), Bits().clone(namedValues=NamedValues(("scsiEnclosureAccess", 0), ("authenticationRequired", 1)))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: t11FcSpPoNaNoMembFlags.setStatus('current')
t11FcSpPoNaNoMembCtAccessIndex = MibTableColumn((1, 3, 6, 1, 2, 1, 178, 1, 3, 4, 1, 5), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(0, 4294967295))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: t11FcSpPoNaNoMembCtAccessIndex.setStatus('current')
t11FcSpPoNaNoMembAttribute = MibTableColumn((1, 3, 6, 1, 2, 1, 178, 1, 3, 4, 1, 6), T11FcSpAlphaNumNameOrAbsent()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: t11FcSpPoNaNoMembAttribute.setStatus('current')
t11FcSpPoNaNoMembRowStatus = MibTableColumn((1, 3, 6, 1, 2, 1, 178, 1, 3, 4, 1, 7), RowStatus()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: t11FcSpPoNaNoMembRowStatus.setStatus('current')
t11FcSpPoNaCtDescrTable = MibTable((1, 3, 6, 1, 2, 1, 178, 1, 3, 5), )
if mibBuilder.loadTexts: t11FcSpPoNaCtDescrTable.setStatus('current')
t11FcSpPoNaCtDescrEntry = MibTableRow((1, 3, 6, 1, 2, 1, 178, 1, 3, 5, 1), ).setIndexNames((0, "FC-MGMT-MIB", "fcmInstanceIndex"), (0, "T11-FC-SP-POLICY-MIB", "t11FcSpPoFabricIndex"), (0, "T11-FC-SP-POLICY-MIB", "t11FcSpPoNaCtDescrSpecifierIndex"), (0, "T11-FC-SP-POLICY-MIB", "t11FcSpPoNaCtDescrIndex"))
if mibBuilder.loadTexts: t11FcSpPoNaCtDescrEntry.setStatus('current')
t11FcSpPoNaCtDescrSpecifierIndex = MibTableColumn((1, 3, 6, 1, 2, 1, 178, 1, 3, 5, 1, 1), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(1, 4294967295)))
if mibBuilder.loadTexts: t11FcSpPoNaCtDescrSpecifierIndex.setStatus('current')
t11FcSpPoNaCtDescrIndex = MibTableColumn((1, 3, 6, 1, 2, 1, 178, 1, 3, 5, 1, 2), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(1, 4294967295)))
if mibBuilder.loadTexts: t11FcSpPoNaCtDescrIndex.setStatus('current')
t11FcSpPoNaCtDescrFlags = MibTableColumn((1, 3, 6, 1, 2, 1, 178, 1, 3, 5, 1, 3), Bits().clone(namedValues=NamedValues(("allow", 0), ("gsTypeWildcard", 1), ("gsSubTypeWildcard", 2), ("readOnly", 3)))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: t11FcSpPoNaCtDescrFlags.setStatus('current')
t11FcSpPoNaCtDescrGsType = MibTableColumn((1, 3, 6, 1, 2, 1, 178, 1, 3, 5, 1, 4), OctetString().subtype(subtypeSpec=ValueSizeConstraint(1, 1)).setFixedLength(1)).setMaxAccess("readcreate")
if mibBuilder.loadTexts: t11FcSpPoNaCtDescrGsType.setStatus('current')
t11FcSpPoNaCtDescrGsSubType = MibTableColumn((1, 3, 6, 1, 2, 1, 178, 1, 3, 5, 1, 5), OctetString().subtype(subtypeSpec=ValueSizeConstraint(1, 1)).setFixedLength(1)).setMaxAccess("readcreate")
if mibBuilder.loadTexts: t11FcSpPoNaCtDescrGsSubType.setStatus('current')
t11FcSpPoNaCtDescrRowStatus = MibTableColumn((1, 3, 6, 1, 2, 1, 178, 1, 3, 5, 1, 6), RowStatus()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: t11FcSpPoNaCtDescrRowStatus.setStatus('current')
t11FcSpPoNaSwConnTable = MibTable((1, 3, 6, 1, 2, 1, 178, 1, 3, 6), )
if mibBuilder.loadTexts: t11FcSpPoNaSwConnTable.setStatus('current')
t11FcSpPoNaSwConnEntry = MibTableRow((1, 3, 6, 1, 2, 1, 178, 1, 3, 6, 1), ).setIndexNames((0, "FC-MGMT-MIB", "fcmInstanceIndex"), (0, "T11-FC-SP-POLICY-MIB", "t11FcSpPoFabricIndex"), (0, "T11-FC-SP-POLICY-MIB", "t11FcSpPoNaSwConnSwitchName"), (0, "T11-FC-SP-POLICY-MIB", "t11FcSpPoNaSwConnAllowedType"), (0, "T11-FC-SP-POLICY-MIB", "t11FcSpPoNaSwConnPortNameOrAll"), (0, "T11-FC-SP-POLICY-MIB", "t11FcSpPoNaSwConnAllowedIndex"))
if mibBuilder.loadTexts: t11FcSpPoNaSwConnEntry.setStatus('current')
t11FcSpPoNaSwConnSwitchName = MibTableColumn((1, 3, 6, 1, 2, 1, 178, 1, 3, 6, 1, 1), FcNameIdOrZero().subtype(subtypeSpec=ValueSizeConstraint(8, 8)).setFixedLength(8))
if mibBuilder.loadTexts: t11FcSpPoNaSwConnSwitchName.setStatus('current')
t11FcSpPoNaSwConnAllowedType = MibTableColumn((1, 3, 6, 1, 2, 1, 178, 1, 3, 6, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("switch", 1), ("node", 2))))
if mibBuilder.loadTexts: t11FcSpPoNaSwConnAllowedType.setStatus('current')
t11FcSpPoNaSwConnPortNameOrAll = MibTableColumn((1, 3, 6, 1, 2, 1, 178, 1, 3, 6, 1, 3), FcNameIdOrZero().subtype(subtypeSpec=ConstraintsUnion(ValueSizeConstraint(0, 0), ValueSizeConstraint(8, 8), )))
if mibBuilder.loadTexts: t11FcSpPoNaSwConnPortNameOrAll.setStatus('current')
t11FcSpPoNaSwConnAllowedIndex = MibTableColumn((1, 3, 6, 1, 2, 1, 178, 1, 3, 6, 1, 4), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(1, 4294967295)))
if mibBuilder.loadTexts: t11FcSpPoNaSwConnAllowedIndex.setStatus('current')
t11FcSpPoNaSwConnAllowedNameType = MibTableColumn((1, 3, 6, 1, 2, 1, 178, 1, 3, 6, 1, 5), T11FcSpPolicyNameType().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6))).clone(namedValues=NamedValues(("nodeName", 1), ("restrictedNodeName", 2), ("portName", 3), ("restrictedPortName", 4), ("wildcard", 5), ("restrictedWildcard", 6)))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: t11FcSpPoNaSwConnAllowedNameType.setStatus('current')
t11FcSpPoNaSwConnAllowedName = MibTableColumn((1, 3, 6, 1, 2, 1, 178, 1, 3, 6, 1, 6), FcNameIdOrZero().subtype(subtypeSpec=ValueSizeConstraint(8, 8)).setFixedLength(8)).setMaxAccess("readcreate")
if mibBuilder.loadTexts: t11FcSpPoNaSwConnAllowedName.setStatus('current')
t11FcSpPoNaSwConnRowStatus = MibTableColumn((1, 3, 6, 1, 2, 1, 178, 1, 3, 6, 1, 7), RowStatus()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: t11FcSpPoNaSwConnRowStatus.setStatus('current')
t11FcSpPoNaIpMgmtTable = MibTable((1, 3, 6, 1, 2, 1, 178, 1, 3, 7), )
if mibBuilder.loadTexts: t11FcSpPoNaIpMgmtTable.setStatus('current')
t11FcSpPoNaIpMgmtEntry = MibTableRow((1, 3, 6, 1, 2, 1, 178, 1, 3, 7, 1), ).setIndexNames((0, "FC-MGMT-MIB", "fcmInstanceIndex"), (0, "T11-FC-SP-POLICY-MIB", "t11FcSpPoFabricIndex"), (0, "T11-FC-SP-POLICY-MIB", "t11FcSpPoNaIpMgmtListName"), (0, "T11-FC-SP-POLICY-MIB", "t11FcSpPoNaIpMgmtEntryNameType"), (0, "T11-FC-SP-POLICY-MIB", "t11FcSpPoNaIpMgmtEntryNameLow"), (0, "T11-FC-SP-POLICY-MIB", "t11FcSpPoNaIpMgmtEntryNameHigh"))
if mibBuilder.loadTexts: t11FcSpPoNaIpMgmtEntry.setStatus('current')
t11FcSpPoNaIpMgmtListName = MibTableColumn((1, 3, 6, 1, 2, 1, 178, 1, 3, 7, 1, 1), T11FcSpAlphaNumName())
if mibBuilder.loadTexts: t11FcSpPoNaIpMgmtListName.setStatus('current')
t11FcSpPoNaIpMgmtEntryNameType = MibTableColumn((1, 3, 6, 1, 2, 1, 178, 1, 3, 7, 1, 2), InetAddressType().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("ipv4", 1), ("ipv6", 2))))
if mibBuilder.loadTexts: t11FcSpPoNaIpMgmtEntryNameType.setStatus('current')
t11FcSpPoNaIpMgmtEntryNameLow = MibTableColumn((1, 3, 6, 1, 2, 1, 178, 1, 3, 7, 1, 3), InetAddress().subtype(subtypeSpec=ConstraintsUnion(ValueSizeConstraint(4, 4), ValueSizeConstraint(16, 16), )))
if mibBuilder.loadTexts: t11FcSpPoNaIpMgmtEntryNameLow.setStatus('current')
t11FcSpPoNaIpMgmtEntryNameHigh = MibTableColumn((1, 3, 6, 1, 2, 1, 178, 1, 3, 7, 1, 4), InetAddress().subtype(subtypeSpec=ConstraintsUnion(ValueSizeConstraint(4, 4), ValueSizeConstraint(16, 16), )))
if mibBuilder.loadTexts: t11FcSpPoNaIpMgmtEntryNameHigh.setStatus('current')
t11FcSpPoNaIpMgmtWkpIndex = MibTableColumn((1, 3, 6, 1, 2, 1, 178, 1, 3, 7, 1, 5), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(0, 4294967295))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: t11FcSpPoNaIpMgmtWkpIndex.setStatus('current')
t11FcSpPoNaIpMgmtAttribute = MibTableColumn((1, 3, 6, 1, 2, 1, 178, 1, 3, 7, 1, 6), T11FcSpAlphaNumNameOrAbsent()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: t11FcSpPoNaIpMgmtAttribute.setStatus('current')
t11FcSpPoNaIpMgmtRowStatus = MibTableColumn((1, 3, 6, 1, 2, 1, 178, 1, 3, 7, 1, 7), RowStatus()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: t11FcSpPoNaIpMgmtRowStatus.setStatus('current')
t11FcSpPoNaWkpDescrTable = MibTable((1, 3, 6, 1, 2, 1, 178, 1, 3, 8), )
if mibBuilder.loadTexts: t11FcSpPoNaWkpDescrTable.setStatus('current')
t11FcSpPoNaWkpDescrEntry = MibTableRow((1, 3, 6, 1, 2, 1, 178, 1, 3, 8, 1), ).setIndexNames((0, "FC-MGMT-MIB", "fcmInstanceIndex"), (0, "T11-FC-SP-POLICY-MIB", "t11FcSpPoFabricIndex"), (0, "T11-FC-SP-POLICY-MIB", "t11FcSpPoNaWkpDescrSpecifierIndx"), (0, "T11-FC-SP-POLICY-MIB", "t11FcSpPoNaWkpDescrIndex"))
if mibBuilder.loadTexts: t11FcSpPoNaWkpDescrEntry.setStatus('current')
t11FcSpPoNaWkpDescrSpecifierIndx = MibTableColumn((1, 3, 6, 1, 2, 1, 178, 1, 3, 8, 1, 1), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(1, 4294967295)))
if mibBuilder.loadTexts: t11FcSpPoNaWkpDescrSpecifierIndx.setStatus('current')
t11FcSpPoNaWkpDescrIndex = MibTableColumn((1, 3, 6, 1, 2, 1, 178, 1, 3, 8, 1, 2), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(1, 4294967295)))
if mibBuilder.loadTexts: t11FcSpPoNaWkpDescrIndex.setStatus('current')
t11FcSpPoNaWkpDescrFlags = MibTableColumn((1, 3, 6, 1, 2, 1, 178, 1, 3, 8, 1, 3), Bits().clone(namedValues=NamedValues(("allow", 0), ("wkpWildcard", 1), ("destPortWildcard", 2), ("readOnly", 3)))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: t11FcSpPoNaWkpDescrFlags.setStatus('current')
t11FcSpPoNaWkpDescrWkpNumber = MibTableColumn((1, 3, 6, 1, 2, 1, 178, 1, 3, 8, 1, 4), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(0, 255))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: t11FcSpPoNaWkpDescrWkpNumber.setStatus('current')
t11FcSpPoNaWkpDescrDestPort = MibTableColumn((1, 3, 6, 1, 2, 1, 178, 1, 3, 8, 1, 5), InetPortNumber()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: t11FcSpPoNaWkpDescrDestPort.setStatus('current')
t11FcSpPoNaWkpDescrRowStatus = MibTableColumn((1, 3, 6, 1, 2, 1, 178, 1, 3, 8, 1, 6), RowStatus()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: t11FcSpPoNaWkpDescrRowStatus.setStatus('current')
t11FcSpPoNaAttribTable = MibTable((1, 3, 6, 1, 2, 1, 178, 1, 3, 9), )
if mibBuilder.loadTexts: t11FcSpPoNaAttribTable.setStatus('current')
t11FcSpPoNaAttribEntry = MibTableRow((1, 3, 6, 1, 2, 1, 178, 1, 3, 9, 1), ).setIndexNames((0, "FC-MGMT-MIB", "fcmInstanceIndex"), (0, "T11-FC-SP-POLICY-MIB", "t11FcSpPoFabricIndex"), (0, "T11-FC-SP-POLICY-MIB", "t11FcSpPoNaAttribName"), (0, "T11-FC-SP-POLICY-MIB", "t11FcSpPoNaAttribEntryIndex"), (0, "T11-FC-SP-POLICY-MIB", "t11FcSpPoNaAttribPartIndex"))
if mibBuilder.loadTexts: t11FcSpPoNaAttribEntry.setStatus('current')
t11FcSpPoNaAttribName = MibTableColumn((1, 3, 6, 1, 2, 1, 178, 1, 3, 9, 1, 1), T11FcSpAlphaNumName())
if mibBuilder.loadTexts: t11FcSpPoNaAttribName.setStatus('current')
t11FcSpPoNaAttribEntryIndex = MibTableColumn((1, 3, 6, 1, 2, 1, 178, 1, 3, 9, 1, 2), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(1, 4294967295)))
if mibBuilder.loadTexts: t11FcSpPoNaAttribEntryIndex.setStatus('current')
t11FcSpPoNaAttribPartIndex = MibTableColumn((1, 3, 6, 1, 2, 1, 178, 1, 3, 9, 1, 3), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(1, 4294967295)))
if mibBuilder.loadTexts: t11FcSpPoNaAttribPartIndex.setStatus('current')
t11FcSpPoNaAttribType = MibTableColumn((1, 3, 6, 1, 2, 1, 178, 1, 3, 9, 1, 4), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(1, 4294967295))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: t11FcSpPoNaAttribType.setStatus('current')
t11FcSpPoNaAttribValue = MibTableColumn((1, 3, 6, 1, 2, 1, 178, 1, 3, 9, 1, 5), OctetString().subtype(subtypeSpec=ValueSizeConstraint(0, 256))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: t11FcSpPoNaAttribValue.setStatus('current')
t11FcSpPoNaAttribExtension = MibTableColumn((1, 3, 6, 1, 2, 1, 178, 1, 3, 9, 1, 6), ObjectIdentifier()).setMaxAccess("readonly")
if mibBuilder.loadTexts: t11FcSpPoNaAttribExtension.setStatus('current')
t11FcSpPoNaAttribRowStatus = MibTableColumn((1, 3, 6, 1, 2, 1, 178, 1, 3, 9, 1, 7), RowStatus()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: t11FcSpPoNaAttribRowStatus.setStatus('current')
t11FcSpPoNaAuthProtTable = MibTable((1, 3, 6, 1, 2, 1, 178, 1, 3, 10), )
if mibBuilder.loadTexts: t11FcSpPoNaAuthProtTable.setStatus('current')
t11FcSpPoNaAuthProtEntry = MibTableRow((1, 3, 6, 1, 2, 1, 178, 1, 3, 10, 1), ).setIndexNames((0, "FC-MGMT-MIB", "fcmInstanceIndex"), (0, "T11-FC-SP-POLICY-MIB", "t11FcSpPoFabricIndex"), (0, "T11-FC-SP-POLICY-MIB", "t11FcSpPoNaAttribName"), (0, "T11-FC-SP-POLICY-MIB", "t11FcSpPoNaAttribEntryIndex"), (0, "T11-FC-SP-POLICY-MIB", "t11FcSpPoNaAuthProtIdentifier"), (0, "T11-FC-SP-POLICY-MIB", "t11FcSpPoNaAuthProtPartIndex"))
if mibBuilder.loadTexts: t11FcSpPoNaAuthProtEntry.setStatus('current')
t11FcSpPoNaAuthProtIdentifier = MibTableColumn((1, 3, 6, 1, 2, 1, 178, 1, 3, 10, 1, 1), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(0, 4294967295)))
if mibBuilder.loadTexts: t11FcSpPoNaAuthProtIdentifier.setStatus('current')
t11FcSpPoNaAuthProtPartIndex = MibTableColumn((1, 3, 6, 1, 2, 1, 178, 1, 3, 10, 1, 2), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(1, 4294967295)))
if mibBuilder.loadTexts: t11FcSpPoNaAuthProtPartIndex.setStatus('current')
t11FcSpPoNaAuthProtParams = MibTableColumn((1, 3, 6, 1, 2, 1, 178, 1, 3, 10, 1, 3), OctetString().subtype(subtypeSpec=ValueSizeConstraint(0, 256))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: t11FcSpPoNaAuthProtParams.setStatus('current')
t11FcSpPoNaAuthProtRowStatus = MibTableColumn((1, 3, 6, 1, 2, 1, 178, 1, 3, 10, 1, 4), RowStatus()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: t11FcSpPoNaAuthProtRowStatus.setStatus('current')
t11FcSpPoStatsTable = MibTable((1, 3, 6, 1, 2, 1, 178, 1, 4, 1), )
if mibBuilder.loadTexts: t11FcSpPoStatsTable.setStatus('current')
t11FcSpPoStatsEntry = MibTableRow((1, 3, 6, 1, 2, 1, 178, 1, 4, 1, 1), ).setIndexNames((0, "FC-MGMT-MIB", "fcmInstanceIndex"), (0, "T11-FC-SP-POLICY-MIB", "t11FcSpPoFabricIndex"))
if mibBuilder.loadTexts: t11FcSpPoStatsEntry.setStatus('current')
t11FcSpPoInRequests = MibTableColumn((1, 3, 6, 1, 2, 1, 178, 1, 4, 1, 1, 1), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: t11FcSpPoInRequests.setStatus('current')
t11FcSpPoInAccepts = MibTableColumn((1, 3, 6, 1, 2, 1, 178, 1, 4, 1, 1, 2), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: t11FcSpPoInAccepts.setStatus('current')
t11FcSpPoInRejects = MibTableColumn((1, 3, 6, 1, 2, 1, 178, 1, 4, 1, 1, 3), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: t11FcSpPoInRejects.setStatus('current')
t11FcSpPoServerAddress = MibScalar((1, 3, 6, 1, 2, 1, 178, 1, 5, 1), FcNameIdOrZero()).setMaxAccess("accessiblefornotify")
if mibBuilder.loadTexts: t11FcSpPoServerAddress.setStatus('current')
t11FcSpPoControlTable = MibTable((1, 3, 6, 1, 2, 1, 178, 1, 5, 2), )
if mibBuilder.loadTexts: t11FcSpPoControlTable.setStatus('current')
t11FcSpPoControlEntry = MibTableRow((1, 3, 6, 1, 2, 1, 178, 1, 5, 2, 1), ).setIndexNames((0, "FC-MGMT-MIB", "fcmInstanceIndex"), (0, "T11-FC-SP-POLICY-MIB", "t11FcSpPoFabricIndex"))
if mibBuilder.loadTexts: t11FcSpPoControlEntry.setStatus('current')
t11FcSpPoStorageType = MibTableColumn((1, 3, 6, 1, 2, 1, 178, 1, 5, 2, 1, 1), StorageType()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: t11FcSpPoStorageType.setStatus('current')
t11FcSpPoNotificationEnable = MibTableColumn((1, 3, 6, 1, 2, 1, 178, 1, 5, 2, 1, 2), TruthValue()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: t11FcSpPoNotificationEnable.setStatus('current')
t11FcSpPoLastNotifyType = MibTableColumn((1, 3, 6, 1, 2, 1, 178, 1, 5, 2, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5))).clone(namedValues=NamedValues(("none", 1), ("activation", 2), ("activateFail", 3), ("deactivation", 4), ("deactivateFail", 5)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: t11FcSpPoLastNotifyType.setStatus('current')
t11FcSpPoRequestSource = MibTableColumn((1, 3, 6, 1, 2, 1, 178, 1, 5, 2, 1, 4), FcNameIdOrZero()).setMaxAccess("readonly")
if mibBuilder.loadTexts: t11FcSpPoRequestSource.setStatus('current')
t11FcSpPoReasonCode = MibTableColumn((1, 3, 6, 1, 2, 1, 178, 1, 5, 2, 1, 5), T11NsGs4RejectReasonCode()).setMaxAccess("readonly")
if mibBuilder.loadTexts: t11FcSpPoReasonCode.setStatus('current')
t11FcSpPoCtCommandString = MibTableColumn((1, 3, 6, 1, 2, 1, 178, 1, 5, 2, 1, 6), OctetString().subtype(subtypeSpec=ValueSizeConstraint(0, 255))).setMaxAccess("readonly")
if mibBuilder.loadTexts: t11FcSpPoCtCommandString.setStatus('current')
t11FcSpPoReasonCodeExp = MibTableColumn((1, 3, 6, 1, 2, 1, 178, 1, 5, 2, 1, 7), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(0, 255))).setMaxAccess("readonly")
if mibBuilder.loadTexts: t11FcSpPoReasonCodeExp.setStatus('current')
t11FcSpPoReasonVendorCode = MibTableColumn((1, 3, 6, 1, 2, 1, 178, 1, 5, 2, 1, 8), OctetString().subtype(subtypeSpec=ConstraintsUnion(ValueSizeConstraint(0, 0), ValueSizeConstraint(1, 1), ))).setMaxAccess("readonly")
if mibBuilder.loadTexts: t11FcSpPoReasonVendorCode.setStatus('current')
t11FcSpPoNotifyActivation = NotificationType((1, 3, 6, 1, 2, 1, 178, 0, 1)).setObjects(("T11-FC-SP-POLICY-MIB", "t11FcSpPoServerAddress"), ("T11-FC-SP-POLICY-MIB", "t11FcSpPoPolicySummaryObjName"), ("T11-FC-SP-POLICY-MIB", "t11FcSpPoRequestSource"))
if mibBuilder.loadTexts: t11FcSpPoNotifyActivation.setStatus('current')
t11FcSpPoNotifyActivateFail = NotificationType((1, 3, 6, 1, 2, 1, 178, 0, 2)).setObjects(("T11-FC-SP-POLICY-MIB", "t11FcSpPoServerAddress"), ("T11-FC-SP-POLICY-MIB", "t11FcSpPoRequestSource"), ("T11-FC-SP-POLICY-MIB", "t11FcSpPoCtCommandString"), ("T11-FC-SP-POLICY-MIB", "t11FcSpPoReasonCode"), ("T11-FC-SP-POLICY-MIB", "t11FcSpPoReasonCodeExp"), ("T11-FC-SP-POLICY-MIB", "t11FcSpPoReasonVendorCode"))
if mibBuilder.loadTexts: t11FcSpPoNotifyActivateFail.setStatus('current')
t11FcSpPoNotifyDeactivation = NotificationType((1, 3, 6, 1, 2, 1, 178, 0, 3)).setObjects(("T11-FC-SP-POLICY-MIB", "t11FcSpPoServerAddress"), ("T11-FC-SP-POLICY-MIB", "t11FcSpPoRequestSource"))
if mibBuilder.loadTexts: t11FcSpPoNotifyDeactivation.setStatus('current')
t11FcSpPoNotifyDeactivateFail = NotificationType((1, 3, 6, 1, 2, 1, 178, 0, 4)).setObjects(("T11-FC-SP-POLICY-MIB", "t11FcSpPoServerAddress"), ("T11-FC-SP-POLICY-MIB", "t11FcSpPoRequestSource"), ("T11-FC-SP-POLICY-MIB", "t11FcSpPoCtCommandString"), ("T11-FC-SP-POLICY-MIB", "t11FcSpPoReasonCode"), ("T11-FC-SP-POLICY-MIB", "t11FcSpPoReasonCodeExp"), ("T11-FC-SP-POLICY-MIB", "t11FcSpPoReasonVendorCode"))
if mibBuilder.loadTexts: t11FcSpPoNotifyDeactivateFail.setStatus('current')
t11FcSpPoMIBCompliances = MibIdentifier((1, 3, 6, 1, 2, 1, 178, 2, 1))
t11FcSpPoMIBGroups = MibIdentifier((1, 3, 6, 1, 2, 1, 178, 2, 2))
t11FcSpPoMIBCompliance = ModuleCompliance((1, 3, 6, 1, 2, 1, 178, 2, 1, 1)).setObjects(("T11-FC-SP-POLICY-MIB", "t11FcSpPoActiveObjectsGroup"), ("T11-FC-SP-POLICY-MIB", "t11FcSpPoNonActiveObjectsGroup"), ("T11-FC-SP-POLICY-MIB", "t11FcSpPoNotifyObjectsGroup"), ("T11-FC-SP-POLICY-MIB", "t11FcSpPoNotificationGroup"), ("T11-FC-SP-POLICY-MIB", "t11FcSpPoOperationsObjectsGroup"), ("T11-FC-SP-POLICY-MIB", "t11FcSpPoStatsObjectsGroup"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
t11FcSpPoMIBCompliance = t11FcSpPoMIBCompliance.setStatus('current')
t11FcSpPoActiveObjectsGroup = ObjectGroup((1, 3, 6, 1, 2, 1, 178, 2, 2, 1)).setObjects(("T11-FC-SP-POLICY-MIB", "t11FcSpPoPolicySummaryObjName"), ("T11-FC-SP-POLICY-MIB", "t11FcSpPoAdminFabricName"), ("T11-FC-SP-POLICY-MIB", "t11FcSpPoActivatedTimeStamp"), ("T11-FC-SP-POLICY-MIB", "t11FcSpPoSummaryPolicyType"), ("T11-FC-SP-POLICY-MIB", "t11FcSpPoSummaryHashFormat"), ("T11-FC-SP-POLICY-MIB", "t11FcSpPoSummaryHashValue"), ("T11-FC-SP-POLICY-MIB", "t11FcSpPoSwMembSwitchFlags"), ("T11-FC-SP-POLICY-MIB", "t11FcSpPoSwMembDomainID"), ("T11-FC-SP-POLICY-MIB", "t11FcSpPoSwMembPolicyDataRole"), ("T11-FC-SP-POLICY-MIB", "t11FcSpPoSwMembAuthBehaviour"), ("T11-FC-SP-POLICY-MIB", "t11FcSpPoSwMembAttribute"), ("T11-FC-SP-POLICY-MIB", "t11FcSpPoNoMembFlags"), ("T11-FC-SP-POLICY-MIB", "t11FcSpPoNoMembCtAccessIndex"), ("T11-FC-SP-POLICY-MIB", "t11FcSpPoNoMembAttribute"), ("T11-FC-SP-POLICY-MIB", "t11FcSpPoCtDescrFlags"), ("T11-FC-SP-POLICY-MIB", "t11FcSpPoCtDescrGsType"), ("T11-FC-SP-POLICY-MIB", "t11FcSpPoCtDescrGsSubType"), ("T11-FC-SP-POLICY-MIB", "t11FcSpPoSwConnAllowedNameType"), ("T11-FC-SP-POLICY-MIB", "t11FcSpPoSwConnAllowedName"), ("T11-FC-SP-POLICY-MIB", "t11FcSpPoIpMgmtWkpIndex"), ("T11-FC-SP-POLICY-MIB", "t11FcSpPoIpMgmtAttribute"), ("T11-FC-SP-POLICY-MIB", "t11FcSpPoWkpDescrFlags"), ("T11-FC-SP-POLICY-MIB", "t11FcSpPoWkpDescrWkpNumber"), ("T11-FC-SP-POLICY-MIB", "t11FcSpPoWkpDescrDestPort"), ("T11-FC-SP-POLICY-MIB", "t11FcSpPoAttribType"), ("T11-FC-SP-POLICY-MIB", "t11FcSpPoAttribValue"), ("T11-FC-SP-POLICY-MIB", "t11FcSpPoAttribExtension"), ("T11-FC-SP-POLICY-MIB", "t11FcSpPoAuthProtParams"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
t11FcSpPoActiveObjectsGroup = t11FcSpPoActiveObjectsGroup.setStatus('current')
t11FcSpPoOperationsObjectsGroup = ObjectGroup((1, 3, 6, 1, 2, 1, 178, 2, 2, 2)).setObjects(("T11-FC-SP-POLICY-MIB", "t11FcSpPoOperActivate"), ("T11-FC-SP-POLICY-MIB", "t11FcSpPoOperDeActivate"), ("T11-FC-SP-POLICY-MIB", "t11FcSpPoOperResult"), ("T11-FC-SP-POLICY-MIB", "t11FcSpPoOperFailCause"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
t11FcSpPoOperationsObjectsGroup = t11FcSpPoOperationsObjectsGroup.setStatus('current')
t11FcSpPoNonActiveObjectsGroup = ObjectGroup((1, 3, 6, 1, 2, 1, 178, 2, 2, 3)).setObjects(("T11-FC-SP-POLICY-MIB", "t11FcSpPoStorageType"), ("T11-FC-SP-POLICY-MIB", "t11FcSpPoNaSummaryPolicyNameType"), ("T11-FC-SP-POLICY-MIB", "t11FcSpPoNaSummaryPolicyName"), ("T11-FC-SP-POLICY-MIB", "t11FcSpPoNaSummaryHashStatus"), ("T11-FC-SP-POLICY-MIB", "t11FcSpPoNaSummaryHashFormat"), ("T11-FC-SP-POLICY-MIB", "t11FcSpPoNaSummaryHashValue"), ("T11-FC-SP-POLICY-MIB", "t11FcSpPoNaSummaryRowStatus"), ("T11-FC-SP-POLICY-MIB", "t11FcSpPoNaSwListFabricName"), ("T11-FC-SP-POLICY-MIB", "t11FcSpPoNaSwListRowStatus"), ("T11-FC-SP-POLICY-MIB", "t11FcSpPoNaSwMembFlags"), ("T11-FC-SP-POLICY-MIB", "t11FcSpPoNaSwMembDomainID"), ("T11-FC-SP-POLICY-MIB", "t11FcSpPoNaSwMembPolicyDataRole"), ("T11-FC-SP-POLICY-MIB", "t11FcSpPoNaSwMembAuthBehaviour"), ("T11-FC-SP-POLICY-MIB", "t11FcSpPoNaSwMembAttribute"), ("T11-FC-SP-POLICY-MIB", "t11FcSpPoNaSwMembRowStatus"), ("T11-FC-SP-POLICY-MIB", "t11FcSpPoNaNoMembFlags"), ("T11-FC-SP-POLICY-MIB", "t11FcSpPoNaNoMembCtAccessIndex"), ("T11-FC-SP-POLICY-MIB", "t11FcSpPoNaNoMembAttribute"), ("T11-FC-SP-POLICY-MIB", "t11FcSpPoNaNoMembRowStatus"), ("T11-FC-SP-POLICY-MIB", "t11FcSpPoNaCtDescrFlags"), ("T11-FC-SP-POLICY-MIB", "t11FcSpPoNaCtDescrGsType"), ("T11-FC-SP-POLICY-MIB", "t11FcSpPoNaCtDescrGsSubType"), ("T11-FC-SP-POLICY-MIB", "t11FcSpPoNaCtDescrRowStatus"), ("T11-FC-SP-POLICY-MIB", "t11FcSpPoNaSwConnAllowedNameType"), ("T11-FC-SP-POLICY-MIB", "t11FcSpPoNaSwConnAllowedName"), ("T11-FC-SP-POLICY-MIB", "t11FcSpPoNaSwConnRowStatus"), ("T11-FC-SP-POLICY-MIB", "t11FcSpPoNaIpMgmtWkpIndex"), ("T11-FC-SP-POLICY-MIB", "t11FcSpPoNaIpMgmtAttribute"), ("T11-FC-SP-POLICY-MIB", "t11FcSpPoNaIpMgmtRowStatus"), ("T11-FC-SP-POLICY-MIB", "t11FcSpPoNaWkpDescrFlags"), ("T11-FC-SP-POLICY-MIB", "t11FcSpPoNaWkpDescrWkpNumber"), ("T11-FC-SP-POLICY-MIB", "t11FcSpPoNaWkpDescrDestPort"), ("T11-FC-SP-POLICY-MIB", "t11FcSpPoNaWkpDescrRowStatus"), ("T11-FC-SP-POLICY-MIB", "t11FcSpPoNaAttribType"), ("T11-FC-SP-POLICY-MIB", "t11FcSpPoNaAttribValue"), ("T11-FC-SP-POLICY-MIB", "t11FcSpPoNaAttribExtension"), ("T11-FC-SP-POLICY-MIB", "t11FcSpPoNaAttribRowStatus"), ("T11-FC-SP-POLICY-MIB", "t11FcSpPoNaAuthProtParams"), ("T11-FC-SP-POLICY-MIB", "t11FcSpPoNaAuthProtRowStatus"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
t11FcSpPoNonActiveObjectsGroup = t11FcSpPoNonActiveObjectsGroup.setStatus('current')
t11FcSpPoStatsObjectsGroup = ObjectGroup((1, 3, 6, 1, 2, 1, 178, 2, 2, 4)).setObjects(("T11-FC-SP-POLICY-MIB", "t11FcSpPoInRequests"), ("T11-FC-SP-POLICY-MIB", "t11FcSpPoInAccepts"), ("T11-FC-SP-POLICY-MIB", "t11FcSpPoInRejects"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
t11FcSpPoStatsObjectsGroup = t11FcSpPoStatsObjectsGroup.setStatus('current')
t11FcSpPoNotifyObjectsGroup = ObjectGroup((1, 3, 6, 1, 2, 1, 178, 2, 2, 5)).setObjects(("T11-FC-SP-POLICY-MIB", "t11FcSpPoNotificationEnable"), ("T11-FC-SP-POLICY-MIB", "t11FcSpPoServerAddress"), ("T11-FC-SP-POLICY-MIB", "t11FcSpPoLastNotifyType"), ("T11-FC-SP-POLICY-MIB", "t11FcSpPoRequestSource"), ("T11-FC-SP-POLICY-MIB", "t11FcSpPoReasonCode"), ("T11-FC-SP-POLICY-MIB", "t11FcSpPoCtCommandString"), ("T11-FC-SP-POLICY-MIB", "t11FcSpPoReasonCodeExp"), ("T11-FC-SP-POLICY-MIB", "t11FcSpPoReasonVendorCode"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
t11FcSpPoNotifyObjectsGroup = t11FcSpPoNotifyObjectsGroup.setStatus('current')
t11FcSpPoNotificationGroup = NotificationGroup((1, 3, 6, 1, 2, 1, 178, 2, 2, 6)).setObjects(("T11-FC-SP-POLICY-MIB", "t11FcSpPoNotifyActivation"), ("T11-FC-SP-POLICY-MIB", "t11FcSpPoNotifyActivateFail"), ("T11-FC-SP-POLICY-MIB", "t11FcSpPoNotifyDeactivation"), ("T11-FC-SP-POLICY-MIB", "t11FcSpPoNotifyDeactivateFail"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
t11FcSpPoNotificationGroup = t11FcSpPoNotificationGroup.setStatus('current')
mibBuilder.exportSymbols("T11-FC-SP-POLICY-MIB", t11FcSpPoNaAttribEntryIndex=t11FcSpPoNaAttribEntryIndex, t11FcSpPoStatistics=t11FcSpPoStatistics, t11FcSpPoNaNoMembListName=t11FcSpPoNaNoMembListName, t11FcSpPoNaSwConnAllowedNameType=t11FcSpPoNaSwConnAllowedNameType, t11FcSpPoSwConnAllowedIndex=t11FcSpPoSwConnAllowedIndex, t11FcSpPoIpMgmtTable=t11FcSpPoIpMgmtTable, t11FcSpPoMIBConformance=t11FcSpPoMIBConformance, t11FcSpPoNaSummaryHashValue=t11FcSpPoNaSummaryHashValue, t11FcSpPoIpMgmtEntryNameType=t11FcSpPoIpMgmtEntryNameType, t11FcSpPoNaAuthProtRowStatus=t11FcSpPoNaAuthProtRowStatus, t11FcSpPoNaWkpDescrIndex=t11FcSpPoNaWkpDescrIndex, t11FcSpPoNaNoMembRowStatus=t11FcSpPoNaNoMembRowStatus, t11FcSpPoControlEntry=t11FcSpPoControlEntry, t11FcSpPoCtDescrEntry=t11FcSpPoCtDescrEntry, t11FcSpPoNotifyDeactivation=t11FcSpPoNotifyDeactivation, t11FcSpPoSummaryPolicyNameType=t11FcSpPoSummaryPolicyNameType, t11FcSpPoSwConnEntry=t11FcSpPoSwConnEntry, t11FcSpPoOperationsObjectsGroup=t11FcSpPoOperationsObjectsGroup, t11FcSpPoNaWkpDescrTable=t11FcSpPoNaWkpDescrTable, t11FcSpPoNaNoMembNodeName=t11FcSpPoNaNoMembNodeName, t11FcSpPoNaIpMgmtAttribute=t11FcSpPoNaIpMgmtAttribute, t11FcSpPoNaCtDescrIndex=t11FcSpPoNaCtDescrIndex, t11FcSpPoReasonCodeExp=t11FcSpPoReasonCodeExp, t11FcSpPoReasonVendorCode=t11FcSpPoReasonVendorCode, t11FcSpPoCtDescrFlags=t11FcSpPoCtDescrFlags, t11FcSpPoCtCommandString=t11FcSpPoCtCommandString, t11FcSpPoSwConnTable=t11FcSpPoSwConnTable, t11FcSpPoNaAuthProtParams=t11FcSpPoNaAuthProtParams, t11FcSpPoSwConnPortNameOrAll=t11FcSpPoSwConnPortNameOrAll, t11FcSpPoEntry=t11FcSpPoEntry, t11FcSpPoIpMgmtWkpIndex=t11FcSpPoIpMgmtWkpIndex, t11FcSpPoNaSwListRowStatus=t11FcSpPoNaSwListRowStatus, t11FcSpPoSwMembSwitchNameType=t11FcSpPoSwMembSwitchNameType, t11FcSpPoInRequests=t11FcSpPoInRequests, t11FcSpPoAuthProtTable=t11FcSpPoAuthProtTable, t11FcSpPoActivatedTimeStamp=t11FcSpPoActivatedTimeStamp, t11FcSpPoNaSwMembPolicyDataRole=t11FcSpPoNaSwMembPolicyDataRole, t11FcSpPoNaSwMembRowStatus=t11FcSpPoNaSwMembRowStatus, t11FcSpPoNaAuthProtPartIndex=t11FcSpPoNaAuthProtPartIndex, t11FcSpPoSummaryHashValue=t11FcSpPoSummaryHashValue, t11FcSpPoWkpDescrEntry=t11FcSpPoWkpDescrEntry, t11FcSpPoAttribTable=t11FcSpPoAttribTable, t11FcSpPoAttribPartIndex=t11FcSpPoAttribPartIndex, t11FcSpPoNaSwMembEntry=t11FcSpPoNaSwMembEntry, t11FcSpPoNaSwMembDomainID=t11FcSpPoNaSwMembDomainID, t11FcSpPoCtDescrGsSubType=t11FcSpPoCtDescrGsSubType, t11FcSpPoNaCtDescrEntry=t11FcSpPoNaCtDescrEntry, t11FcSpPoSummaryTable=t11FcSpPoSummaryTable, t11FcSpPoWkpDescrWkpNumber=t11FcSpPoWkpDescrWkpNumber, t11FcSpPoNaSwListTable=t11FcSpPoNaSwListTable, t11FcSpPoNaSummaryPolicyIndex=t11FcSpPoNaSummaryPolicyIndex, t11FcSpPoNotificationEnable=t11FcSpPoNotificationEnable, t11FcSpPoNaAuthProtEntry=t11FcSpPoNaAuthProtEntry, t11FcSpPoSummaryEntry=t11FcSpPoSummaryEntry, t11FcSpPoNaSwMembSwitchName=t11FcSpPoNaSwMembSwitchName, t11FcSpPoNaIpMgmtEntry=t11FcSpPoNaIpMgmtEntry, t11FcSpPoNaSwConnSwitchName=t11FcSpPoNaSwConnSwitchName, t11FcSpPoNaWkpDescrFlags=t11FcSpPoNaWkpDescrFlags, t11FcSpPoAttribName=t11FcSpPoAttribName, t11FcSpPoCtDescrTable=t11FcSpPoCtDescrTable, t11FcSpPoNaNoMembEntry=t11FcSpPoNaNoMembEntry, t11FcSpPoNaSwListName=t11FcSpPoNaSwListName, t11FcSpPoStatsObjectsGroup=t11FcSpPoStatsObjectsGroup, t11FcSpPoNaAttribRowStatus=t11FcSpPoNaAttribRowStatus, t11FcSpPoNotificationGroup=t11FcSpPoNotificationGroup, t11FcSpPoNaIpMgmtListName=t11FcSpPoNaIpMgmtListName, t11FcSpPoIpMgmtAttribute=t11FcSpPoIpMgmtAttribute, t11FcSpPoNoMembCtAccessIndex=t11FcSpPoNoMembCtAccessIndex, t11FcSpPoNaCtDescrSpecifierIndex=t11FcSpPoNaCtDescrSpecifierIndex, t11FcSpPoCtDescrIndex=t11FcSpPoCtDescrIndex, t11FcSpPoNaWkpDescrWkpNumber=t11FcSpPoNaWkpDescrWkpNumber, t11FcSpPoNaSwConnAllowedIndex=t11FcSpPoNaSwConnAllowedIndex, t11FcSpPoCtDescrSpecifierIndex=t11FcSpPoCtDescrSpecifierIndex, t11FcSpPoAdminFabricName=t11FcSpPoAdminFabricName, t11FcSpPoSwConnAllowedType=t11FcSpPoSwConnAllowedType, t11FcSpPoNaSummaryHashFormat=t11FcSpPoNaSummaryHashFormat, t11FcSpPoSwConnSwitchName=t11FcSpPoSwConnSwitchName, t11FcSpPoSummaryPolicyType=t11FcSpPoSummaryPolicyType, t11FcSpPoMIBObjects=t11FcSpPoMIBObjects, t11FcSpPoNaNoMembTable=t11FcSpPoNaNoMembTable, t11FcSpPoNotifyActivateFail=t11FcSpPoNotifyActivateFail, t11FcSpPoOperDeActivate=t11FcSpPoOperDeActivate, t11FcSpPoNaSwMembAuthBehaviour=t11FcSpPoNaSwMembAuthBehaviour, t11FcSpPoAttribValue=t11FcSpPoAttribValue, t11FcSpPoMIBCompliance=t11FcSpPoMIBCompliance, t11FcSpPoRequestSource=t11FcSpPoRequestSource, t11FcSpPoNotifyObjectsGroup=t11FcSpPoNotifyObjectsGroup, t11FcSpPoPolicySummaryObjName=t11FcSpPoPolicySummaryObjName, t11FcSpPoAttribEntryIndex=t11FcSpPoAttribEntryIndex, t11FcSpPoNaNoMembAttribute=t11FcSpPoNaNoMembAttribute, t11FcSpPoAttribEntry=t11FcSpPoAttribEntry, t11FcSpPoNaNoMembFlags=t11FcSpPoNaNoMembFlags, t11FcSpPoCtDescrGsType=t11FcSpPoCtDescrGsType, t11FcSpPoOperActivate=t11FcSpPoOperActivate, t11FcSpPoNaSummaryTable=t11FcSpPoNaSummaryTable, t11FcSpPoNaAttribTable=t11FcSpPoNaAttribTable, t11FcSpPoOperFailCause=t11FcSpPoOperFailCause, t11FcSpPoStatsTable=t11FcSpPoStatsTable, t11FcSpPoInRejects=t11FcSpPoInRejects, t11FcSpPoSwMembDomainID=t11FcSpPoSwMembDomainID, t11FcSpPoSwConnAllowedName=t11FcSpPoSwConnAllowedName, t11FcSpPoWkpDescrSpecifierIndex=t11FcSpPoWkpDescrSpecifierIndex, t11FcSpPoNaAttribExtension=t11FcSpPoNaAttribExtension, t11FcSpPoNaSwMembAttribute=t11FcSpPoNaSwMembAttribute, t11FcSpPoNonActiveObjectsGroup=t11FcSpPoNonActiveObjectsGroup, t11FcSpPoNaSwConnPortNameOrAll=t11FcSpPoNaSwConnPortNameOrAll, t11FcSpPoNoMembTable=t11FcSpPoNoMembTable, t11FcSpPoStorageType=t11FcSpPoStorageType, t11FcSpPolicyMIB=t11FcSpPolicyMIB, t11FcSpPoNotifyDeactivateFail=t11FcSpPoNotifyDeactivateFail, t11FcSpPoMIBCompliances=t11FcSpPoMIBCompliances, t11FcSpPoIpMgmtEntryNameLow=t11FcSpPoIpMgmtEntryNameLow, t11FcSpPoNaWkpDescrEntry=t11FcSpPoNaWkpDescrEntry, t11FcSpPoInAccepts=t11FcSpPoInAccepts, t11FcSpPoNaAuthProtTable=t11FcSpPoNaAuthProtTable, t11FcSpPoFabricIndex=t11FcSpPoFabricIndex, t11FcSpPoAuthProtPartIndex=t11FcSpPoAuthProtPartIndex, t11FcSpPoNaSummaryPolicyName=t11FcSpPoNaSummaryPolicyName, t11FcSpPoSwMembPolicyDataRole=t11FcSpPoSwMembPolicyDataRole, t11FcSpPoAuthProtParams=t11FcSpPoAuthProtParams, t11FcSpPoNaNoMembNodeNameType=t11FcSpPoNaNoMembNodeNameType, t11FcSpPoNaWkpDescrDestPort=t11FcSpPoNaWkpDescrDestPort, t11FcSpPoMIBNotifications=t11FcSpPoMIBNotifications, t11FcSpPoSwConnAllowedNameType=t11FcSpPoSwConnAllowedNameType, t11FcSpPoAuthProtIdentifier=t11FcSpPoAuthProtIdentifier, t11FcSpPoNaWkpDescrSpecifierIndx=t11FcSpPoNaWkpDescrSpecifierIndx, t11FcSpPoNaSwConnAllowedType=t11FcSpPoNaSwConnAllowedType, t11FcSpPoSwMembEntry=t11FcSpPoSwMembEntry, t11FcSpPoWkpDescrTable=t11FcSpPoWkpDescrTable, t11FcSpPoWkpDescrIndex=t11FcSpPoWkpDescrIndex, t11FcSpPoNaSwMembSwitchNameType=t11FcSpPoNaSwMembSwitchNameType, t11FcSpPoNaSwMembFlags=t11FcSpPoNaSwMembFlags, t11FcSpPoWkpDescrFlags=t11FcSpPoWkpDescrFlags, t11FcSpPoNaIpMgmtRowStatus=t11FcSpPoNaIpMgmtRowStatus, t11FcSpPoSwMembSwitchName=t11FcSpPoSwMembSwitchName, t11FcSpPoStatsEntry=t11FcSpPoStatsEntry, t11FcSpPoNaCtDescrRowStatus=t11FcSpPoNaCtDescrRowStatus, t11FcSpPoNaSwMembTable=t11FcSpPoNaSwMembTable, t11FcSpPoOperations=t11FcSpPoOperations, t11FcSpPoAttribExtension=t11FcSpPoAttribExtension, t11FcSpPoOperEntry=t11FcSpPoOperEntry, t11FcSpPoNaSwListFabricName=t11FcSpPoNaSwListFabricName, t11FcSpPoNaAuthProtIdentifier=t11FcSpPoNaAuthProtIdentifier, t11FcSpPoReasonCode=t11FcSpPoReasonCode, t11FcSpPoControlTable=t11FcSpPoControlTable, t11FcSpPoMIBGroups=t11FcSpPoMIBGroups, t11FcSpPoNaAttribPartIndex=t11FcSpPoNaAttribPartIndex, t11FcSpPoAuthProtEntry=t11FcSpPoAuthProtEntry, t11FcSpPoNoMembFlags=t11FcSpPoNoMembFlags, t11FcSpPoNaSummaryRowStatus=t11FcSpPoNaSummaryRowStatus, t11FcSpPoActive=t11FcSpPoActive, t11FcSpPoAttribType=t11FcSpPoAttribType, t11FcSpPoActiveObjectsGroup=t11FcSpPoActiveObjectsGroup, t11FcSpPoNoMembAttribute=t11FcSpPoNoMembAttribute, t11FcSpPoNaSwConnAllowedName=t11FcSpPoNaSwConnAllowedName, t11FcSpPoNoMembNodeName=t11FcSpPoNoMembNodeName, t11FcSpPoNaSwConnTable=t11FcSpPoNaSwConnTable, t11FcSpPoNoMembEntry=t11FcSpPoNoMembEntry, t11FcSpPoNaCtDescrFlags=t11FcSpPoNaCtDescrFlags, t11FcSpPoNaCtDescrGsSubType=t11FcSpPoNaCtDescrGsSubType, t11FcSpPoNaIpMgmtEntryNameType=t11FcSpPoNaIpMgmtEntryNameType, t11FcSpPoLastNotifyType=t11FcSpPoLastNotifyType, t11FcSpPoNaSwConnRowStatus=t11FcSpPoNaSwConnRowStatus, t11FcSpPoSummaryPolicyName=t11FcSpPoSummaryPolicyName, t11FcSpPoControl=t11FcSpPoControl, t11FcSpPoNaSummaryHashStatus=t11FcSpPoNaSummaryHashStatus, t11FcSpPoTable=t11FcSpPoTable, t11FcSpPoNonActive=t11FcSpPoNonActive, t11FcSpPoNaAttribName=t11FcSpPoNaAttribName, t11FcSpPoNaAttribType=t11FcSpPoNaAttribType, t11FcSpPoNotifyActivation=t11FcSpPoNotifyActivation, t11FcSpPoSummaryHashFormat=t11FcSpPoSummaryHashFormat, t11FcSpPoNaIpMgmtEntryNameLow=t11FcSpPoNaIpMgmtEntryNameLow, t11FcSpPoNaSummaryName=t11FcSpPoNaSummaryName, t11FcSpPoWkpDescrDestPort=t11FcSpPoWkpDescrDestPort, t11FcSpPoNaAttribEntry=t11FcSpPoNaAttribEntry, t11FcSpPoOperResult=t11FcSpPoOperResult, t11FcSpPoNaSummaryPolicyType=t11FcSpPoNaSummaryPolicyType, t11FcSpPoSwMembAuthBehaviour=t11FcSpPoSwMembAuthBehaviour, t11FcSpPoNaNoMembCtAccessIndex=t11FcSpPoNaNoMembCtAccessIndex, t11FcSpPoNaAttribValue=t11FcSpPoNaAttribValue, t11FcSpPoNaIpMgmtEntryNameHigh=t11FcSpPoNaIpMgmtEntryNameHigh, t11FcSpPoNaIpMgmtTable=t11FcSpPoNaIpMgmtTable, PYSNMP_MODULE_ID=t11FcSpPolicyMIB, t11FcSpPoSwMembTable=t11FcSpPoSwMembTable, t11FcSpPoNaIpMgmtWkpIndex=t11FcSpPoNaIpMgmtWkpIndex, t11FcSpPoNaSummaryEntry=t11FcSpPoNaSummaryEntry, t11FcSpPoIpMgmtEntry=t11FcSpPoIpMgmtEntry, t11FcSpPoNaSummaryPolicyNameType=t11FcSpPoNaSummaryPolicyNameType, t11FcSpPoNaSwConnEntry=t11FcSpPoNaSwConnEntry, t11FcSpPoNaCtDescrTable=t11FcSpPoNaCtDescrTable, t11FcSpPoServerAddress=t11FcSpPoServerAddress, t11FcSpPoSwMembAttribute=t11FcSpPoSwMembAttribute, t11FcSpPoNaSwListEntry=t11FcSpPoNaSwListEntry, t11FcSpPoIpMgmtEntryNameHigh=t11FcSpPoIpMgmtEntryNameHigh, t11FcSpPoNaCtDescrGsType=t11FcSpPoNaCtDescrGsType, t11FcSpPoNoMembNodeNameType=t11FcSpPoNoMembNodeNameType, t11FcSpPoSwMembSwitchFlags=t11FcSpPoSwMembSwitchFlags, t11FcSpPoNaWkpDescrRowStatus=t11FcSpPoNaWkpDescrRowStatus, t11FcSpPoOperTable=t11FcSpPoOperTable)
| [
"dcwangmit01@gmail.com"
] | dcwangmit01@gmail.com |
44dfc718b6e3fc2f1c65e8c1d482ac4b732644a6 | 6f7e6d4225919b65829ce29f23527b36ac04d77d | /kik_profile/urls.py | abe54215cef16bbe3562f644cfc0cd68ab4d370d | [
"MIT"
] | permissive | kimilguk/Django-kimilguk | 1756a7f5dbd84380571314656add53f4fd7f837a | d110ff68015832fa7c94a4fa23a3e8881fc3520c | refs/heads/master | 2023-05-03T06:06:44.982206 | 2021-04-30T04:10:31 | 2021-04-30T04:10:31 | 362,828,080 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 516 | py | from django.urls import path
from . import views
urlpatterns = [
path('', views.index, name='index'),
path('list/', views.career_list, name='career_list'),
path('award/', views.award_list, name='award_list'),
path('award/create/', views.award_create, name='award_create'),
path('award/read/<int:pk>/', views.award_read, name='award_read'),
path('award/update/<int:pk>/', views.award_update, name='award_update'),
path('award/delete/<int:pk>/', views.award_delete, name='award_delete'),
] | [
"boramcom@daum.net"
] | boramcom@daum.net |
5ae0c6fffbf2bcf472e2f4fa5da1974390f29cff | d666fc9cfbbf59253b744ee0664803a51bbb1898 | /scripts/sac_env/local/bin/pygmentize | 328b06da4fe80de1b302b4c195a4e2b0d431263a | [
"LicenseRef-scancode-generic-cla",
"BSD-2-Clause"
] | permissive | akshay-sharma1995/sac | 0968ca50b85e700d0816c03f3d3aa5ce7b9f20d7 | 0c506e8f4f4d7323d88f007476721537e7fb8817 | refs/heads/master | 2020-09-23T14:10:39.697595 | 2020-02-13T16:01:54 | 2020-02-13T16:01:54 | 225,518,093 | 0 | 0 | NOASSERTION | 2019-12-03T03:00:56 | 2019-12-03T03:00:55 | null | UTF-8 | Python | false | false | 260 | #!/home/abhijeet/Desktop/abcd/sac/scripts/sac_env/bin/python3.5
# -*- coding: utf-8 -*-
import re
import sys
from pygments.cmdline import main
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
sys.exit(main())
| [
"akshay.iitkbkn@gmail.com"
] | akshay.iitkbkn@gmail.com | |
65f5de6be9f02e05e865abdddf6e58ab8f868791 | b5238647ba5c961f74ccbfd0f39d402ab0c45029 | /correlation/scatters1.py | ccef3dbf50c6832120ff0930b11aca0453da15a2 | [] | no_license | KenoLeon/PythonDev | 5582427d1c4b0eca48e323cc27235db4aff679c9 | ca64aba94e1c4cb12661e83ab1b5f40700aa6c06 | refs/heads/master | 2021-05-11T07:51:28.017239 | 2020-08-08T18:45:25 | 2020-08-08T18:45:25 | 118,033,531 | 1 | 9 | null | null | null | null | UTF-8 | Python | false | false | 650 | py |
#Import Libraries
from bokeh.models import HoverTool
from bokeh.plotting import figure, show, output_file
import pandas as pd
# Read Data
df = pd.read_csv("correlation/weatherIceCream.csv", usecols=['Date','AVG temp C','Ice Cream production'])
hover = HoverTool(tooltips=[
("(Temp,Ice Cream Production", "($x, $y)")
])
p = figure(x_range=(-10, 30),y_range=(35, 90), tools=[hover])
# Main chart definition
p.scatter(df['AVG temp C'], df['Ice Cream production'],size=10)
p.background_fill_color = "mintcream"
p.background_fill_alpha = 0.2
p.xaxis.axis_label = "Avg Temp C"
p.yaxis.axis_label = "Ice Cream Production (1000, Gallons)"
show(p)
| [
"me@k3no.com"
] | me@k3no.com |
968d49da59f36b79b4d99e991e0797c40b6cbe57 | 6d66584d18e581cb9e90a7c2daab2709c545666d | /subscriber/src/subscriber.py | 6c42a170902397e999f3a30e5123f321938ed910 | [] | no_license | fabiobedeschi/iiot-userservice | f90da4988417349e239dbb04a55608383441f82c | 884d8c51e4b73cc6439a4484522f4d44514b303f | refs/heads/master | 2023-05-21T01:11:13.122803 | 2021-06-11T15:55:51 | 2021-06-11T16:00:22 | 344,623,800 | 0 | 0 | null | 2021-06-11T14:08:40 | 2021-03-04T22:14:44 | Python | UTF-8 | Python | false | false | 1,300 | py | from logging import getLogger
from os import getenv
from paho.mqtt.client import Client
from ujson import loads
from .database import Database
logger = getLogger()
class Subscriber(Client):
def __init__(self, db=None, topic=None):
super().__init__()
self.db = db or Database(keep_retrying=True)
self.topic = topic
self.on_connect = self.sub_on_connect
self.on_subscribe = self.sub_on_subscribe
self.on_message = self.sub_on_message
def sub_on_connect(self, client, userdata, flags, rc):
logger.info('Successfully connected to mqtt broker.')
client.subscribe(
topic=self.topic or getenv('USERSERVICE_TOPIC'),
qos=int(getenv('USERSERVICE_QOS', 0))
)
def sub_on_subscribe(self, client, userdata, mid, granted_qos):
logger.info(f'Successfully subscribed to "{self.topic or getenv("USERSERVICE_TOPIC")}" topic.')
def sub_on_message(self, client, userdata, message):
logger.info(f'Received message: {message}')
payload = loads(message.payload)
user = payload.get('user')
if user:
response = self.db.update_user(
uuid=user.get('uuid'),
delta=user.get('delta')
)
return response
| [
"fabiobedeschi5@gmail.com"
] | fabiobedeschi5@gmail.com |
f88931e5c35b1af9590e3763f0bf73700c04d323 | dbd678a51e8b9db4320bda1a2124767cdaa40fdc | /log.py | e5e278b3e5c5e0c9e479fc13fba3170766dc3b9f | [
"MIT"
] | permissive | chensonglu/ssd.pytorch | 46c375828cf1d3014d029c46087492d6db887f8b | 22de68f603fd55a6e72f93ef304449f240db31bd | refs/heads/master | 2023-01-09T19:35:14.561724 | 2022-12-30T09:27:37 | 2022-12-30T09:27:37 | 188,745,601 | 4 | 1 | MIT | 2020-11-14T08:16:55 | 2019-05-27T00:29:25 | Python | UTF-8 | Python | false | false | 1,074 | py | import logging
import time,os,sys
if sys.version_info.major == 3:
import configparser as cfg
else:
import ConfigParser as cfg
class log(object):
# root logger setting
save_path = time.strftime("%m_%d_%H_%M") + '.log'
l = logging.getLogger()
l.setLevel(logging.DEBUG)
formatter = logging.Formatter('%(asctime)s - %(levelname)s - %(message)s')
# clear handler streams
for it in l.handlers:
l.removeHandler(it)
# file handler setting
config = cfg.RawConfigParser()
config.read('util.config')
save_dir = config.get('general', 'log_path')
if not os.path.exists(save_dir):
os.makedirs(save_dir)
save_path = os.path.join(save_dir, save_path)
f_handler = logging.FileHandler(save_path)
f_handler.setLevel(logging.DEBUG)
f_handler.setFormatter(formatter)
# console handler
c_handler = logging.StreamHandler()
c_handler.setLevel(logging.INFO)
c_handler.setFormatter(formatter)
l.addHandler(f_handler)
l.addHandler(c_handler)
# print(l.handlers[0].__dict__)
| [
"chenslvs@163.com"
] | chenslvs@163.com |
5862cef2cbcda42f964c3446310223fed5ae2879 | cce830c9b076bd2147b129d03efc52f44817c571 | /backend/locations/services.py | 1580b87925ab15d4b9a8eb5e5da9e1c062b6cc7e | [] | no_license | ashd32/linkedIn-clone | 9b6859628cb5214e30b049d70c595399e2928f8e | 83a96ebc823c7f39260d8fac18f5e39c0fa2020b | refs/heads/master | 2020-08-23T17:58:37.546602 | 2019-09-24T17:45:36 | 2019-09-24T17:45:36 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,419 | py | import requests
from .models import Country, City
class GeoLocationApiClient:
BASE_URL = "https://api.hh.ru"
ALL_COUNTRIES_URL = '/areas/countries'
CITIES_FROM_COUNTRY_URL = '/areas/{0}'
@classmethod
def get_all_countries(cls):
response = requests.get(cls.BASE_URL + cls.ALL_COUNTRIES_URL)
data = response.json()
return data
@classmethod
def get_all_cities_from_country(cls, id):
response = requests.get(cls.BASE_URL + cls.CITIES_FROM_COUNTRY_URL.format(id))
data = response.json()
return data['areas']
class DataHandler:
def save_countries_list(self, countries):
for country in countries:
Country.objects.get_or_create(
country_id=country['id'],
name=country['name'],
)
def save_cities_list(self, cities):
for city in cities:
City.objects.get_or_create(
city_id=city['id'],
name=city['name'],
country_id=city['parent_id']
)
def load_locations():
g = GeoLocationApiClient()
data_handler = DataHandler()
countries = g.get_all_countries()
data_handler.save_countries_list(countries)
for country in countries:
cities = g.get_all_cities_from_country(country['id'])
data_handler.save_cities_list(cities)
| [
"maxim226356@mail.ru"
] | maxim226356@mail.ru |
3e12c123598f024e7a232f15fc7f4946c1f61e72 | 0e010fefc92258efa1279ecc75149c0fe8050c4a | /model/cliArgument.py | 1122cc592508fbe9e37d5faf7044b99e76e936ec | [] | no_license | yanivbenzvi/cvdm-cli | 8ca82e36504d250593b4b83753453b82c345c66e | 5b4ea87df72decf017615f6260883dde8c388131 | refs/heads/master | 2021-09-03T20:04:25.062324 | 2018-01-11T15:49:10 | 2018-01-11T15:49:10 | 113,788,326 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 1,727 | py | import argparse
text="CV's manager"
parser=argparse.ArgumentParser(description=text)
args = None
def arg_command():
"""
function adding all necessary argument
"""
global args
parser.add_argument("--login", action="store_true", help="if you want to login")
parser.add_argument("--sign_up", "-su", action="store_true", help="if you want to sign_up")
parser.add_argument("--sign_up_multi", "-sum", action="store", help="if you want to sign_up")
parser.add_argument("--fimport", "-im", action="store", help="import the json file to database")
parser.add_argument("--fexport", "-ex", action="store_true", help="export from daatbase to json file")
parser.add_argument("--search", "-s", action="store_true", help="the HR can search on the data base, it will open json search file .")
parser.add_argument("-jsonformat", action="store_true", help="cv template.")
parser.add_argument("-p", "--password", action="store", help="add password")
parser.add_argument("-u", "--user", action="store", help="add candidate to the database")
parser.add_argument("-get", action="store", help="get candidate card, candidate id.")
parser.add_argument("-update", action="store", help="get candidate card, candidate id.")
parser.add_argument("-search", action="store", help="search options, for sort academic = 1,academic history = 2, expand skills =3.")
parser.add_argument("-search_v", action="store", help="search value sort academic = 1/2/3/4, other method is string value.")
parser.add_argument("-get_e", action="store_true",help="search value sort academic = 1/2/3/4, other method is string value.")#### hackton 3 indvidual task ############
args = parser.parse_args()
| [
"yanivbenzvi023@gmail.com"
] | yanivbenzvi023@gmail.com |
df622ac604a77184763b5e833724bc3afb5e47b9 | e8bf00dba3e81081adb37f53a0192bb0ea2ca309 | /domains/explore/problems/training/problem196_EE.py | 461acff0df75b97b0fd0be169bf9ddbdada09445 | [
"BSD-3-Clause"
] | permissive | patras91/rae_release | 1e6585ee34fe7dbb117b084df982ca8a8aed6795 | 0e5faffb7eb732fdb8e3bbf2c6d2f2cbd520aa30 | refs/heads/master | 2023-07-13T20:09:41.762982 | 2021-08-11T17:02:58 | 2021-08-11T17:02:58 | 394,797,515 | 2 | 1 | null | null | null | null | UTF-8 | Python | false | false | 1,805 | py | __author__ = 'patras'
from domain_exploreEnv import *
from timer import DURATION
from state import state, rv
DURATION.TIME = {
'survey': 5,
'monitor': 5,
'screen': 5,
'sample': 5,
'process': 5,
'fly': 3,
'deposit': 1,
'transferData': 1,
'take': 2,
'put': 2,
'move': 10,
'charge': 5,
'negotiate': 5,
'handleAlien': 5,
}
DURATION.COUNTER = {
'survey': 5,
'monitor': 5,
'screen': 5,
'sample': 5,
'process': 5,
'fly': 3,
'deposit': 1,
'transferData': 1,
'take': 2,
'put': 2,
'move': 10,
'charge': 5,
'negotiate': 5,
'handleAlien': 5,
}
rv.TYPE = {'e1': 'survey', 'e2': 'monitor', 'e3': 'screen', 'e4': 'sample', 'e5':'process'}
rv.EQUIPMENT = {'survey': 'e1', 'monitor': 'e2', 'screen': 'e3', 'sample': 'e4', 'process': 'e5'}
rv.EQUIPMENTTYPE = {'e1': 'survey', 'e2': 'monitor', 'e3': 'screen', 'e4': 'sample', 'e5':'process'}
rv.LOCATIONS = ['base', 'z1', 'z2', 'z3', 'z4', 'z5', 'z6']
rv.EDGES = {'base': {'z1': 50, 'z3': 50, 'z4': 40, 'z6': 40}, 'z1': {'base': 50, 'z2': 20}, 'z2': {'z1': 20, 'z3': 20}, 'z3': {'z2': 20, 'base': 50}, 'z4': {'z3': 90, 'z5': 35}, 'z5': {'z4': 35, 'z6': 35}, 'z6': {'base': 40, 'z5': 35}}
def ResetState():
state.loc = {'r1': 'base', 'r2': 'base', 'UAV': 'base'}
state.charge = { 'UAV': 80, 'r1': 80, 'r2': 80}
state.data = { 'UAV': 1, 'r1': 1, 'r2': 3}
state.pos = {'c1': 'base', 'e1': 'base', 'e2': 'base', 'e3': 'base', 'e4': 'base', 'e5': 'base', 'o1': 'UAV'}
state.load = {'r1': NIL, 'r2': NIL, 'UAV': 'o1'}
state.storm = {'active': True}
tasks = {
2: [['doActivities', 'UAV', [['survey', 'z2'], ['survey', 'z3'], ['survey', 'z1']]]],
4: [['handleEmergency', 'r2', 'z1']],
}
eventsEnv = {
4: [alienSpotted, ['z2']]
} | [
"patras@umd.edu"
] | patras@umd.edu |
527e04c3e2df43a65d5318175e6297130d8ea4bf | 408bf5a15b698e148b5f5cdfc377cc3fea89645e | /mod02/lesson03/hello-device.py | e90587f72742f8a728805783597fc65eae8ef054 | [] | no_license | chuck-a-black/PyNE | dc06143be496530a65f35e5d90f030c5f6245e76 | 17d8916a91ff8ca95ebafcab1d97410c8750cd7a | refs/heads/master | 2021-01-10T13:08:33.876486 | 2015-12-14T17:57:48 | 2015-12-14T17:57:48 | 47,991,766 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 121 | py | import pexpect
ping = pexpect.spawn('ping -c 5 localhost')
ping.expect([pexpect.EOF,pexpect.TIMEOUT])
print(ping.before)
| [
"chuck.black@tallac.com"
] | chuck.black@tallac.com |
df62f85137c0cc0103a0d3891ff44224548d7fe7 | 5ea14588b6d2d720ba0d8e152348f74671b92a6f | /bin/django-admin.py | 45c22248a5358d09f15a0c9bc23f4c84f60f1f17 | [] | no_license | LeviWyattP/s04p | 7775477ea0a39380253eb4e1b5670de81cb821cb | 7d56e5c4345ff87351dd8e2240210b4f10ca69e8 | refs/heads/master | 2020-12-24T21:27:28.408801 | 2016-05-30T01:21:08 | 2016-05-30T01:21:08 | 59,710,125 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 136 | py | #!/home/levi/blog/bin/python3
from django.core import management
if __name__ == "__main__":
management.execute_from_command_line()
| [
"leviwyattp@gmail.com"
] | leviwyattp@gmail.com |
23c2230caa19bd3b0f93a09ef4b4c4bd47228ccf | e446bd2ad78a6835402b6d3a852f5f1312b846de | /Frisa/Inventario/migrations/0002_auto_20210305_2350.py | ab28b2c6f6ea63c166f198defd1bc3291275f8ce | [
"MIT"
] | permissive | ag94e/Django_simple_crud | 35fcd47831b4ce1ef980cf4397c15d091c14256b | 1cd85bb42a2e182255e97d9e37bab446229c1ef5 | refs/heads/main | 2023-03-18T18:20:25.169561 | 2021-03-14T08:08:48 | 2021-03-14T08:08:48 | 347,458,560 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 533 | py | # Generated by Django 3.1.7 on 2021-03-05 23:50
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('Inventario', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='modeloinventario',
name='cantidad',
field=models.IntegerField(),
),
migrations.AlterField(
model_name='modeloinventario',
name='codigo',
field=models.IntegerField(),
),
]
| [
"a.guerrero.esp94@gmail.com"
] | a.guerrero.esp94@gmail.com |
1829eb7fa9708ae44c7d8604fd42a485cddf16cf | 52b5773617a1b972a905de4d692540d26ff74926 | /.history/valid_20200616203128.py | cdda987d9744f74ed718f6a4384af399c1237fc4 | [] | no_license | MaryanneNjeri/pythonModules | 56f54bf098ae58ea069bf33f11ae94fa8eedcabc | f4e56b1e4dda2349267af634a46f6b9df6686020 | refs/heads/master | 2022-12-16T02:59:19.896129 | 2020-09-11T12:05:22 | 2020-09-11T12:05:22 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 26 | py | # pv4 -->
def valid(str):
| [
"mary.jereh@gmail.com"
] | mary.jereh@gmail.com |
2c7b2a0d6df9f2dfe95841996308db227c7052fb | d84c269e132935033550ecc834e59c95503d53bd | /src/zeep/wsse/signature.py | 407d93a414ac7fde73cce7e2a34a6990c967db08 | [
"BSD-3-Clause",
"MIT"
] | permissive | renzon/python-zeep | 9d5fda7f7b07e88c4ec78ffa95cfd1d92f075fc4 | 34e44794206b809fff739ac9b8ba4e14127f645f | refs/heads/master | 2020-12-30T15:21:42.463485 | 2017-05-12T22:11:03 | 2017-05-12T22:11:03 | 91,129,538 | 0 | 0 | null | 2017-05-12T21:03:46 | 2017-05-12T21:03:46 | null | UTF-8 | Python | false | false | 9,616 | py | """Functions for WS-Security (WSSE) signature creation and verification.
Heavily based on test examples in https://github.com/mehcode/python-xmlsec as
well as the xmlsec documentation at https://www.aleksey.com/xmlsec/.
Reading the xmldsig, xmlenc, and ws-security standards documents, though
admittedly painful, will likely assist in understanding the code in this
module.
"""
from lxml import etree
from lxml.etree import QName
try:
import xmlsec
except ImportError:
xmlsec = None
from zeep import ns
from zeep.utils import detect_soap_env
from zeep.exceptions import SignatureVerificationFailed
from zeep.wsse.utils import ensure_id, get_security_header
# SOAP envelope
SOAP_NS = 'http://schemas.xmlsoap.org/soap/envelope/'
class Signature(object):
"""Sign given SOAP envelope with WSSE sig using given key and cert."""
def __init__(self, key_file, certfile, password=None):
check_xmlsec_import()
self.key_file = key_file
self.certfile = certfile
self.password = password
def apply(self, envelope, headers):
sign_envelope(envelope, self.key_file, self.certfile, self.password)
return envelope, headers
def verify(self, envelope):
verify_envelope(envelope, self.certfile)
return envelope
def check_xmlsec_import():
if xmlsec is None:
raise ImportError(
"The xmlsec module is required for wsse.Signature()\n" +
"You can install xmlsec with: pip install xmlsec\n" +
"or install zeep via: pip install zeep[xmlsec]\n"
)
def sign_envelope(envelope, keyfile, certfile, password=None):
"""Sign given SOAP envelope with WSSE sig using given key and cert.
Sign the wsu:Timestamp node in the wsse:Security header and the soap:Body;
both must be present.
Add a ds:Signature node in the wsse:Security header containing the
signature.
Use EXCL-C14N transforms to normalize the signed XML (so that irrelevant
whitespace or attribute ordering changes don't invalidate the
signature). Use SHA1 signatures.
Expects to sign an incoming document something like this (xmlns attributes
omitted for readability):
<soap:Envelope>
<soap:Header>
<wsse:Security mustUnderstand="true">
<wsu:Timestamp>
<wsu:Created>2015-06-25T21:53:25.246276+00:00</wsu:Created>
<wsu:Expires>2015-06-25T21:58:25.246276+00:00</wsu:Expires>
</wsu:Timestamp>
</wsse:Security>
</soap:Header>
<soap:Body>
...
</soap:Body>
</soap:Envelope>
After signing, the sample document would look something like this (note the
added wsu:Id attr on the soap:Body and wsu:Timestamp nodes, and the added
ds:Signature node in the header, with ds:Reference nodes with URI attribute
referencing the wsu:Id of the signed nodes):
<soap:Envelope>
<soap:Header>
<wsse:Security mustUnderstand="true">
<Signature xmlns="http://www.w3.org/2000/09/xmldsig#">
<SignedInfo>
<CanonicalizationMethod
Algorithm="http://www.w3.org/2001/10/xml-exc-c14n#"/>
<SignatureMethod
Algorithm="http://www.w3.org/2000/09/xmldsig#rsa-sha1"/>
<Reference URI="#id-d0f9fd77-f193-471f-8bab-ba9c5afa3e76">
<Transforms>
<Transform
Algorithm="http://www.w3.org/2001/10/xml-exc-c14n#"/>
</Transforms>
<DigestMethod
Algorithm="http://www.w3.org/2000/09/xmldsig#sha1"/>
<DigestValue>nnjjqTKxwl1hT/2RUsBuszgjTbI=</DigestValue>
</Reference>
<Reference URI="#id-7c425ac1-534a-4478-b5fe-6cae0690f08d">
<Transforms>
<Transform
Algorithm="http://www.w3.org/2001/10/xml-exc-c14n#"/>
</Transforms>
<DigestMethod
Algorithm="http://www.w3.org/2000/09/xmldsig#sha1"/>
<DigestValue>qAATZaSqAr9fta9ApbGrFWDuCCQ=</DigestValue>
</Reference>
</SignedInfo>
<SignatureValue>Hz8jtQb...bOdT6ZdTQ==</SignatureValue>
<KeyInfo>
<wsse:SecurityTokenReference>
<X509Data>
<X509Certificate>MIIDnzC...Ia2qKQ==</X509Certificate>
<X509IssuerSerial>
<X509IssuerName>...</X509IssuerName>
<X509SerialNumber>...</X509SerialNumber>
</X509IssuerSerial>
</X509Data>
</wsse:SecurityTokenReference>
</KeyInfo>
</Signature>
<wsu:Timestamp wsu:Id="id-7c425ac1-534a-4478-b5fe-6cae0690f08d">
<wsu:Created>2015-06-25T22:00:29.821700+00:00</wsu:Created>
<wsu:Expires>2015-06-25T22:05:29.821700+00:00</wsu:Expires>
</wsu:Timestamp>
</wsse:Security>
</soap:Header>
<soap:Body wsu:Id="id-d0f9fd77-f193-471f-8bab-ba9c5afa3e76">
...
</soap:Body>
</soap:Envelope>
"""
# Create the Signature node.
signature = xmlsec.template.create(
envelope,
xmlsec.Transform.EXCL_C14N,
xmlsec.Transform.RSA_SHA1,
)
# Add a KeyInfo node with X509Data child to the Signature. XMLSec will fill
# in this template with the actual certificate details when it signs.
key_info = xmlsec.template.ensure_key_info(signature)
x509_data = xmlsec.template.add_x509_data(key_info)
xmlsec.template.x509_data_add_issuer_serial(x509_data)
xmlsec.template.x509_data_add_certificate(x509_data)
# Load the signing key and certificate.
key = xmlsec.Key.from_file(keyfile, xmlsec.KeyFormat.PEM, password=password)
key.load_cert_from_file(certfile, xmlsec.KeyFormat.PEM)
# Insert the Signature node in the wsse:Security header.
security = get_security_header(envelope)
security.insert(0, signature)
# Perform the actual signing.
ctx = xmlsec.SignatureContext()
ctx.key = key
security.append(etree.Element(QName(ns.WSU, 'Timestamp')))
soap_env = detect_soap_env(envelope)
_sign_node(ctx, signature, envelope.find(QName(soap_env, 'Body')))
_sign_node(ctx, signature, security.find(QName(ns.WSU, 'Timestamp')))
ctx.sign(signature)
# Place the X509 data inside a WSSE SecurityTokenReference within
# KeyInfo. The recipient expects this structure, but we can't rearrange
# like this until after signing, because otherwise xmlsec won't populate
# the X509 data (because it doesn't understand WSSE).
sec_token_ref = etree.SubElement(
key_info, QName(ns.WSSE, 'SecurityTokenReference'))
sec_token_ref.append(x509_data)
def verify_envelope(envelope, certfile):
"""Verify WS-Security signature on given SOAP envelope with given cert.
Expects a document like that found in the sample XML in the ``sign()``
docstring.
Raise SignatureVerificationFailed on failure, silent on success.
"""
soap_env = detect_soap_env(envelope)
header = envelope.find(QName(soap_env, 'Header'))
if not header:
raise SignatureVerificationFailed()
security = header.find(QName(ns.WSSE, 'Security'))
signature = security.find(QName(ns.DS, 'Signature'))
ctx = xmlsec.SignatureContext()
# Find each signed element and register its ID with the signing context.
refs = signature.xpath(
'ds:SignedInfo/ds:Reference', namespaces={'ds': ns.DS})
for ref in refs:
# Get the reference URI and cut off the initial '#'
referenced_id = ref.get('URI')[1:]
referenced = envelope.xpath(
"//*[@wsu:Id='%s']" % referenced_id,
namespaces={'wsu': ns.WSU},
)[0]
ctx.register_id(referenced, 'Id', ns.WSU)
key = xmlsec.Key.from_file(certfile, xmlsec.KeyFormat.CERT_PEM, None)
ctx.key = key
try:
ctx.verify(signature)
except xmlsec.Error:
# Sadly xmlsec gives us no details about the reason for the failure, so
# we have nothing to pass on except that verification failed.
raise SignatureVerificationFailed()
def _sign_node(ctx, signature, target):
"""Add sig for ``target`` in ``signature`` node, using ``ctx`` context.
Doesn't actually perform the signing; ``ctx.sign(signature)`` should be
called later to do that.
Adds a Reference node to the signature with URI attribute pointing to the
target node, and registers the target node's ID so XMLSec will be able to
find the target node by ID when it signs.
"""
# Ensure the target node has a wsu:Id attribute and get its value.
node_id = ensure_id(target)
# Unlike HTML, XML doesn't have a single standardized Id. WSSE suggests the
# use of the wsu:Id attribute for this purpose, but XMLSec doesn't
# understand that natively. So for XMLSec to be able to find the referenced
# node by id, we have to tell xmlsec about it using the register_id method.
ctx.register_id(target, 'Id', ns.WSU)
# Add reference to signature with URI attribute pointing to that ID.
ref = xmlsec.template.add_reference(
signature, xmlsec.Transform.SHA1, uri='#' + node_id)
# This is an XML normalization transform which will be performed on the
# target node contents before signing. This ensures that changes to
# irrelevant whitespace, attribute ordering, etc won't invalidate the
# signature.
xmlsec.template.add_transform(ref, xmlsec.Transform.EXCL_C14N)
| [
"michael@mvantellingen.nl"
] | michael@mvantellingen.nl |
ffdcccc83d83ac6a4aa95ef532ebcb5817b876e1 | d2c4c0ffccb8c8de92cc2338e6b9574d93492c4c | /application/__init__.py | fb4c1332af82f7814033bdac3aafb37ea2f43b9f | [] | no_license | liupengzhouyi/LearnFlask | f8a682611ec3bb76b1f17da0ec3d79e720863b13 | 0e361c8f3912a47aa0192239904deaf7dd4818aa | refs/heads/master | 2021-04-14T05:49:30.194510 | 2020-03-25T09:22:03 | 2020-03-25T09:22:03 | 249,210,898 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 89 | py | # router/__init__.py
from .good import good_blueprint
blueprint = [
good_blueprint,
] | [
"liupeng.0@outlook.com"
] | liupeng.0@outlook.com |
21a4e492b98364a784328df3e23366dc9703e2ea | b516617c64cf357463ad9eac996978601f8581dc | /manage.py | 5be0b9d85da20632a2c231cecb515e75eecc82ff | [] | no_license | susilthapa/IchanguKarateDo | 87c8be4ffd48d9f18b7ebb8faba98339905f36b2 | 2cd446ce688ab1f32af42c9a4c1234f439d36475 | refs/heads/master | 2022-12-13T05:18:40.961487 | 2020-08-30T09:05:38 | 2020-08-30T09:05:38 | 276,271,876 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 631 | py | #!/usr/bin/env python
"""Django's command-line utility for administrative tasks."""
import os
import sys
def main():
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'Ichangu.settings.dev')
try:
from django.core.management import execute_from_command_line
except ImportError as exc:
raise ImportError(
"Couldn't import Django. Are you sure it's installed and "
"available on your PYTHONPATH environment variable? Did you "
"forget to activate a virtual environment?"
) from exc
execute_from_command_line(sys.argv)
if __name__ == '__main__':
main()
| [
"thapasusil53@gmail.com"
] | thapasusil53@gmail.com |
dbb7c26ab24c77cf8b7bae35df9b83cbadabb520 | 53fab060fa262e5d5026e0807d93c75fb81e67b9 | /backup/user_099/ch20_2019_08_26_20_34_30_055432.py | 5b6bd16aa25f031ab5ae04642a101db1f6d2e1ac | [] | no_license | gabriellaec/desoft-analise-exercicios | b77c6999424c5ce7e44086a12589a0ad43d6adca | 01940ab0897aa6005764fc220b900e4d6161d36b | refs/heads/main | 2023-01-31T17:19:42.050628 | 2020-12-16T05:21:31 | 2020-12-16T05:21:31 | 306,735,108 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 138 | py | nome=string(input("Qual seu nome? "))
if nome=string("Chris"):
print("Todo mundo odeia o Chris")
else:
print("Olá, {0}".format(nome) | [
"you@example.com"
] | you@example.com |
a1d0397837a90fef07def6f189362fdb5faadb0a | e70c8622d1ba6bbef8e0ad47be17a5835b27f927 | /ras.py | ebb2e754ae4588e6c5236f8aded9eed9da66c384 | [] | no_license | jchacon4/mil | e91b1ccb9ef9420d0e349f144cc3fed02c918a36 | 2739678c49b33aab81782c9deb3417f970883269 | refs/heads/master | 2021-01-02T09:10:35.875828 | 2017-08-02T19:16:32 | 2017-08-02T19:16:32 | 99,152,066 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,073 | py | # This is a demo of running face recognition on a Raspberry Pi.
# This program will print out the names of anyone it recognizes to the console.
# To run this, you need a Raspberry Pi 2 (or greater) with face_recognition and
# the picamera[array] module installed.
# You can follow this installation instructions to get your RPi set up:
# https://gist.github.com/ageitgey/1ac8dbe8572f3f533df6269dab35df65
import face_recognition
import picamera
import numpy as np
import cv2
# Get a reference to the Raspberry Pi camera.
# If this fails, make sure you have a camera connected to the RPi and that you
# enabled your camera in raspi-config and rebooted first.
camera = picamera.PiCamera()
camera.resolution = (320, 240)
output = np.empty((240, 320, 3), dtype=np.uint8)
# Load a sample picture and learn how to recognize it.
print("Loading known face image(s)")
obama_image = face_recognition.load_image_file("obama_small.jpg")
obama_face_encoding = face_recognition.face_encodings(obama_image)[0]
# Initialize some variables
face_locations = []
face_encodings = []
while True:
print("Capturing image.")
# Grab a single frame of video from the RPi camera as a numpy array
camera.capture(output, format="rgb")
# Find all the faces and face encodings in the current frame of video
face_locations = face_recognition.face_locations(output)
print("Found {} faces in image.".format(len(face_locations)))
face_encodings = face_recognition.face_encodings(output, face_locations)
# Loop over each face found in the frame to see if it's someone we know.
for face_encoding in face_encodings:
# See if the face is a match for the known face(s)
match = face_recognition.compare_faces([obama_face_encoding], face_encoding)
name = "<Unknown Person>"
if match[0]:
name = "Barack Obama"
print("I see someone named {}!".format(name))
# Display the resulting image
cv2.imshow('Video', output)
# Hit 'q' on the keyboard to quit!
if cv2.waitKey(1) & 0xFF == ord('q'):
break
| [
"kei@keis-MacBook-Pro.local"
] | kei@keis-MacBook-Pro.local |
34a3e06523fafdfbb4fdc2d235b73a2d7be4515a | 1b3cda917b4095a983bbff81520e528c73b6130b | /wishes/wish_app/models.py | f183f0ef6a83c5288fde58be90a7ab48933e7255 | [] | no_license | cuauhtemocmartinez/django_projects | 36dd3ff39816f663cd611cc62adb94ef2d94a48a | ee6fbf144ae2f36a6501d5d4cc50349cabbf9d0c | refs/heads/main | 2023-02-21T20:12:44.113056 | 2021-01-26T02:36:00 | 2021-01-26T02:36:00 | 309,503,194 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,320 | py | from django.db import models
import re
class UserManager(models.Manager):
def validator(self, postData):
errors = {}
EMAIL_REGEX = re.compile(r'^[a-zA-Z0-9.+_-]+@[a-zA-Z0-9._-]+\.[a-zA-Z]+$')
if len(postData['first_name']) < 2:
errors['first_name'] = "Your first name must be more than 2 characters"
if len(postData['last_name']) < 2:
errors['last_name'] = "Your first name must be more than 2 characters"
if not EMAIL_REGEX.match(postData['email']):
errors['email'] = "Email must be valid format"
if len(postData['password']) < 8:
errors['password'] = "Password must be at least 8 characters"
if postData['password'] != postData['confirm_password']:
errors['confirm_password'] = "Password and conform password do not match"
return errors
class WishManager(models.Manager):
def validator(self, form):
errors = {}
if len(form['item']) < 3:
errors['length'] = "A wish must consist of at least 3 characters!"
if len(form['description']) < 3:
errors['length'] = "A description must be provided!"
return errors
class User(models.Model):
first_name = models.CharField(max_length=100)
last_name = models.CharField(max_length=100)
email = models.CharField(max_length=100)
password = models.CharField(max_length=100)
created_at = models.DateTimeField(auto_now_add=True)
updated_at = models.DateTimeField(auto_now=True)
objects = UserManager()
class Wish(models.Model):
item = models.CharField(max_length=100)
description = models.CharField(max_length=100)
poster = models.ForeignKey(User, related_name="wishes", on_delete=models.CASCADE)
likes = models.ManyToManyField(User, related_name="likes")
created_at = models.DateTimeField(auto_now_add=True)
updated_at = models.DateTimeField(auto_now=True)
objects = WishManager()
class Grant(models.Model):
item = models.CharField(max_length=100)
poster = models.ForeignKey(User, related_name="granted", on_delete=models.CASCADE)
message = models.ForeignKey(Wish, related_name="wishlist", on_delete=models.CASCADE)
created_at = models.DateTimeField(auto_now_add=True)
updated_at = models.DateTimeField(auto_now=True) | [
"mr.cuauhtemocmartinez@gmail.com"
] | mr.cuauhtemocmartinez@gmail.com |
b844e06698da729e17ebecf76c72893e5c36e704 | c5438b999c5af89b292eaeff9e1a1173b8d7ccf2 | /ikcrm/aike/api_request.py | ddb5530958bdebd3b39145c243421ffca542c3b3 | [] | no_license | shellyes/ikcrm | f52d55f948e72dcb37d1d0240a246fc7acfcb2c0 | 871274e01277a3c8cb755ebd5358e861758b395d | refs/heads/master | 2021-07-21T05:52:12.525048 | 2017-10-27T06:56:46 | 2017-10-27T06:56:46 | 108,510,230 | 1 | 1 | null | null | null | null | UTF-8 | Python | false | false | 5,931 | py | #coding:utf-8
import requests
import json
headers = {
'User-Agent':'Mozilla/5.0 (Windows NT 6.1; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/59.0.3071.115 Safari/537.36'
}
# headers = {'Content-type': 'application/json', 'Accept': 'application/json'}
#方法一: 直接加参数传过去,不考虑数据类型:
# url = "https://api.ikcrm.com/api/v2/auth/login?login=18180428128&password=kalibei1228&device=dingtalk"
#方法二: json格式的数据,所以要对数据进行转换为json数据data=json.dumps(login_data)或者直接jion=login_data。
url = "https://api.ikcrm.com/api/v2/auth/login"
login_data = {'login': '18180428128', 'password': 'kalibei1228', 'device': "dingtalk"}
json_data = requests.post(url=url, json=login_data, headers=headers).content
#转换为字典形式
dict_data = json.loads(json_data)
print dict_data
user_id = dict_data['data']['user_id']
user_token = dict_data['data']['user_token']
version_code = '3.13.0'
device = 'dingtalk_open'
globals_p = 'user_token='+str(dict_data['data']['user_token'])+'&device=dingtalk&version_code=3.13.0'
#------------------------商机---------------------------------
# opportunities_url = 'https://api.ikcrm.com/api/v2/opportunities?stage=1129692&per_page=50&page=1&'+globals_p
# opportunities_data = requests.get(url=opportunities_url, headers=headers).content
# first_opportunities = json.loads(opportunities_data)['data']['opportunities']
# opportunities_id = first_opportunities
#
# # print 'customers_id:', opportunities_id
# print '页第一分商机',json.dumps(first_opportunities)
# opportunities_detail_url = 'https://api.ikcrm.com/api/v2/opportunities/164406'+'?' + globals_p
# opportunities_detail_data = requests.get(url=opportunities_detail_url, headers=headers).content
# first_opportunities = json.loads(opportunities_detail_data)['data']
#
# print '商机详情:', json.dumps(first_opportunities)
# #--------------------------商机排序----------------------------------
# opportunities_sort_url = 'https://api.ikcrm.com/api/v2/opportunities?page=1&sort=real_revisit_at&order=desc&'+globals_p
# opportunities_sort_data = requests.get(url=opportunities_sort_url, headers=headers).content
# first_sort_opportunities = json.loads(opportunities_sort_data)
#
# print '总的条数',first_sort_opportunities['data']['total_count']
# print '第一页的商机', json.dumps( first_sort_opportunities['data']['opportunities'])
#
#------------------------商机筛选--------------------------------------
#
# # 商机筛选分组/api/v2/opportunities/filter_sort_group(.:format)
# opportunities_filters_url = 'https://api.ikcrm.com/api/v2/opportunities/filter_sort_group?'+globals_p
# opportunities_filters_data = requests.get(url=opportunities_filters_url, headers=headers).content
#
# print '商机销售状态进行筛选:', opportunities_filters_data
#
#
# #销售状态/api/v2/opportunities/:field_name/filter_options(.:format)
# opportunities_filter_url = 'https://api.ikcrm.com/api/v2/opportunities/stage/filter_options?'+globals_p
# opportunities_filter_data = requests.get(url=opportunities_filter_url, headers=headers).content
# first_filter_opportunities = json.loads(opportunities_filter_data)['data']
#
# print '商机状态进行筛选:', json.dumps(first_filter_opportunities)
#
# opportunities_filter_url ='https://api.ikcrm.com/api/v2/opportunities?stage=1129697&'+ globals_p
# opportunities_filter_data = requests.get(url=opportunities_filter_url, headers=headers).content
# first_filter_opportunities = json.loads(opportunities_filter_data)['data']
#
# print '输单数据:', json.dumps(first_filter_opportunities)
#
#--------------------------------客户-------------------------
#
# customers_url = 'https://api.ikcrm.com/api/v2/customers?per_page=50&page=1&'+globals_p
# customers_data = requests.get(url=customers_url, headers=headers).content
#
# first_customers = json.loads(customers_data)['data']['customers'][0]
# customers_id = first_customers['id']
#
# print '101页第一个位客户',json.dumps(first_customers)
# customers_detail_url = 'https://api.ikcrm.com/api/v2/customers/'+'3670520'+'?'+globals_p
# customers_detail_data = requests.get(url=customers_detail_url, headers=headers).content
#
# customers_json = json.loads(customers_detail_data)
#
# print '客户详情', json.dumps(customers_json)
#--------------------------跟进记录------------------------------------------
# revisit_url = 'https://api.ikcrm.com/api/v2/revisit_logs/new_index?per_page=20&stage=1129692&'+globals_p
# revisit_data = requests.get(url=revisit_url, headers=headers).content
# revisit_json = json.loads(revisit_data)
#
# print '跟进记录详情', json.dumps(revisit_json)
# #
# #
revisit_detail_url = 'https://api.ikcrm.com/api/v2/revisit_logs/new_index?loggable_type=opportunity&loggable_id='+'154789'+'&'+globals_p
revisit_detail_data = requests.get(url=revisit_detail_url, headers=headers).content
revisit_detail_json = json.loads(revisit_detail_data)
print '某条商机跟进记录详情', json.dumps(revisit_detail_json)
#-------------------------客户转移到公海------------------------------------
#客户转移:PUT /api/v2/customers/:id/turn_common?common_id=:common_id 移送到oc输单
# rm_url = 'https://api.ikcrm.com/api/v2/customers/'+'3666598'+'/turn_common?common_id=11637&'+globals_p
# rm_data = requests.put(url=rm_url, headers=headers).content
# rm_json = json.loads(rm_data)['data']
#
# print '客户调入公海',json.dumps(rm_json)
#公海类型
# rm_url = 'https://api.ikcrm.com/api/v2/common_customers/common_settings?'+globals_p
# rm_data = requests.get(url=rm_url, headers=headers).content
# rm_json = json.loads(rm_data)['data']
#
# print '客户调入公海',json.dumps(rm_json)
| [
"1461847795@qq.com"
] | 1461847795@qq.com |
2a19054b1cac1ad4d37de8d9ea4c2c41bbf518db | c5872ae7a57bccacd00b636ebdeb93ffadbf8f0c | /06 More Mouse Event Examples.py | b440bbda331cd1b51160da24b1ac76906f7b154f | [] | no_license | kkgarai/OpenCV | e42592b25c2a150762ab25c3df47ea9eeaddeb70 | 0a6f0ce2f667fc6e41a059e62a6e3538cfaabaf6 | refs/heads/master | 2023-02-26T19:12:24.144075 | 2021-02-04T17:17:33 | 2021-02-04T17:17:33 | 336,007,029 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,323 | py | import cv2
import numpy as np
'''
events=[i for i in dir(cv2) if "EVENT" in i]
print(events)
'''
# Draw a line b/w the clicked points
'''
# create a mouse click callback function
def click_event(event,x,y,flag,param):
if event==cv2.EVENT_LBUTTONDOWN:
cv2.circle(img,(x,y),3,(0,0,255),-1)
points.append((x,y))
if len(points)>=2:
cv2.line(img,points[-1],points[-2],(255,0,0),1)
cv2.imshow("Image",img)
img=np.zeros((512,512,3),dtype=np.uint8)
#img=cv2.imread('lena.jpg')
cv2.imshow("Image",img)
points=[]
cv2.setMouseCallback("Image",click_event)
cv2.waitKey(0)
cv2.destroyAllWindows()
'''
# show the colour in a different window
# create a mouse click callback function
def click_event(event,x,y,flag,param):
if event==cv2.EVENT_LBUTTONDOWN:
blue = img[y, x, 0]
green = img[y, x, 1]
red = img[y, x, 2]
text = str(blue) + " , " + str(green) + " , " + str(red)
cv2.circle(img,(x,y),3,(0,0,255),-1)
mycloourimage=np.zeros((512,512,3),np.uint8)
mycloourimage[:]=[blue,green,red]
cv2.imshow("Colour",mycloourimage)
#img=np.zeros((512,512,3),dtype=np.uint8)
img=cv2.imread('lena.jpg')
cv2.imshow("Image",img)
cv2.setMouseCallback("Image",click_event)
cv2.waitKey(0)
cv2.destroyAllWindows() | [
"kkgarai99@gmail.com"
] | kkgarai99@gmail.com |
efd5c5581b7f1df181cfa2c42e18603c0217134f | 3be9f525eae7e01065ea23fb6f2a63e78b670ab5 | /venv/bin/pip2.7 | 9991e3d5758beeb240825d03d6519b75dd28ce4b | [] | no_license | mpatini/wiki | cdc1d303cad23b0a3ab67075f4e33d3f73952a44 | 58f268667e0230f82bb7637cf40477a9df38a799 | refs/heads/master | 2016-08-06T22:29:52.672739 | 2014-03-12T21:19:59 | 2014-03-12T21:19:59 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 223 | 7 | #!/var/www/wiki/wiki/venv/bin/python
# -*- coding: utf-8 -*-
import re
import sys
from pip import main
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
sys.exit(main())
| [
"root@wiki.(none)"
] | root@wiki.(none) |
1ac642943c9ec33b7d2afdb634f73c675df5f538 | 75d22792d458610018c244a00221cb732bcae6f8 | /Linear Algebra/factoring_lab.py | 0ed3780bc409c2aaad13ff3f3c0921fbecb6f3e0 | [] | no_license | mdimarco/Archive | 489c03e6175979fd2fa14e781ef2464684f1ac51 | 6a2bed313b0a4d69d0223695c8e1d1f364dc8556 | refs/heads/master | 2020-12-26T13:03:17.466149 | 2016-06-01T21:03:13 | 2016-06-01T21:03:13 | 54,572,641 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,897 | py | # version code 112c76cd8477+
# Please fill out this stencil and submit using the provided submission script.
from vec import Vec
from GF2 import one
from factoring_support import dumb_factor
from factoring_support import intsqrt
from factoring_support import gcd
from factoring_support import primes
from factoring_support import prod
import echelon
## Task 1
def int2GF2(i):
'''
Returns one if i is odd, 0 otherwise.
Input:
- i: an int
Output:
- one if i is congruent to 1 mod 2
- 0 if i is congruent to 0 mod 2
Examples:
>>> int2GF2(3)
one
>>> int2GF2(100)
0
'''
return 0 if i%2 == 0 else one
## Task 2
def make_Vec(primeset, factors):
'''
Input:
- primeset: a set of primes
- factors: a list of factors [(p_1,a_1), ..., (p_n, a_n)]
with p_i in primeset
Output:
- a vector v over GF(2) with domain primeset
such that v[p_i] = int2GF2(a_i) for all i
Example:
>>> make_Vec({2,3,11}, [(2,3), (3,2)]) == Vec({2,3,11},{2:one})
True
'''
return Vec(primeset, {tup[0]:int2GF2(tup[1]) for tup in factors } )
## Task 3
def find_candidates(N, primeset):
'''
Input:
- N: an int to factor
- primeset: a set of primes
Output:
- a tuple (roots, rowlist)
- roots: a list a_0, a_1, ..., a_n where a_i*a_i - N can be factored
over primeset
- rowlist: a list such that rowlist[i] is a
primeset-vector over GF(2) corresponding to a_i
such that len(roots) = len(rowlist) and len(roots) > len(primeset)
'''
roots = []
rowlist = []
x = intsqrt(N)+2
while( len(roots) <= len(primeset)+1 ):
fact = dumb_factor(x*x-N, primeset)
if fact:
roots.append(x)
rowlist.append( make_Vec(primeset, fact) )
x += 1
return (roots, rowlist)
## Task 4
def find_a_and_b(v, roots, N):
'''
Input:
- a {0,1,..., n-1}-vector v over GF(2) where n = len(roots)
- a list roots of integers
- an integer N to factor
Output:
a pair (a,b) of integers
such that a*a-b*b is a multiple of N
(if v is correctly chosen)
'''
alist = [ roots[i] for i in range(len(v.D)) if v[i] == one ]
a = prod(alist)
c = prod( [x*x-N for x in alist] )
b = intsqrt(c)
return a,b
## Task 5
print("Start")
primelist = primes(1000)
N = 2461799993978700679
candidates = find_candidates(N, primelist)
print("Found Candidates")
M = echelon.transformation_rows( candidates[1] )
print("Found M")
print(len(M))
'''for x in range(len(M)-1,-1,-1):
v = M[x]
a,b = find_a_and_b(v, candidates[0], N)
c = gcd(N, a-b)
if c != 1 and c!=N:
break
print(a-b)
print(gcd(N, a-b) )'''
nontrivial_divisor_of_2461799993978700679 = 1230926561
| [
"mhd894@gmail.com"
] | mhd894@gmail.com |
a760d7842432f986fb1a3e7786f5b30fbcceae37 | ff6248be9573caec94bea0fa2b1e4b6bf0aa682b | /output/StudentProblem/10.21.12.17/2/1569577681.py | b485642f0b2d17dbd19bb9fa237a3fa9c43405d3 | [] | no_license | LennartElbe/codeEvo | 0e41b1a7705204e934ef71a5a28c047366c10f71 | e89b329bc9edd37d5d9986f07ca8a63d50686882 | refs/heads/master | 2020-12-21T17:28:25.150352 | 2020-03-26T10:22:35 | 2020-03-26T10:22:35 | 236,498,032 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,703 | py | ============================= test session starts ==============================
platform darwin -- Python 3.7.4, pytest-5.4.1, py-1.8.1, pluggy-0.13.1
rootdir: /tmp
collected 0 items / 1 error
==================================== ERRORS ====================================
________________________ ERROR collecting test session _________________________
../../../Library/Python/3.7/lib/python/site-packages/_pytest/python.py:513: in _importtestmodule
mod = self.fspath.pyimport(ensuresyspath=importmode)
../../../Library/Python/3.7/lib/python/site-packages/py/_path/local.py:701: in pyimport
__import__(modname)
<frozen importlib._bootstrap>:983: in _find_and_load
???
<frozen importlib._bootstrap>:967: in _find_and_load_unlocked
???
<frozen importlib._bootstrap>:677: in _load_unlocked
???
../../../Library/Python/3.7/lib/python/site-packages/_pytest/assertion/rewrite.py:143: in exec_module
source_stat, co = _rewrite_test(fn, self.config)
../../../Library/Python/3.7/lib/python/site-packages/_pytest/assertion/rewrite.py:328: in _rewrite_test
tree = ast.parse(source, filename=fn)
/usr/local/Cellar/python/3.7.4_1/Frameworks/Python.framework/Versions/3.7/lib/python3.7/ast.py:35: in parse
return compile(source, filename, mode, PyCF_ONLY_AST)
E File "/private/tmp/blabla.py", line 40
E def represent(n: int) -> list
E ^
E SyntaxError: invalid syntax
=========================== short test summary info ============================
ERROR ../../../../../tmp
!!!!!!!!!!!!!!!!!!!! Interrupted: 1 error during collection !!!!!!!!!!!!!!!!!!!!
=============================== 1 error in 0.18s ===============================
| [
"lenni.elbe@gmail.com"
] | lenni.elbe@gmail.com |
895876551202a6bcf544d4906770ddb46cf2aab3 | ebec16f77071c1425417cb1c58e3464683083b94 | /mysite/main/models.py | 793df46d5d8eca2985b66e9031eb3d02572d26a1 | [] | no_license | cm1100/django_web_development | 73a403c63a7035f977596373507cfd70658e0889 | 18069b34f1291b52f2a3fb8c8e8119382fd78352 | refs/heads/master | 2022-09-14T01:20:50.190308 | 2020-06-03T14:40:08 | 2020-06-03T14:40:08 | 269,108,168 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,268 | py | from django.db import models
from datetime import datetime
# Create your models here.
class Tutorial_Category(models.Model):
tutorial_category = models.CharField(max_length=200)
category_summary = models.CharField(max_length=200)
category_slug = models.CharField(max_length=200)
class Mete:
verbose_name_plural = "Categories"
def __str__(self):
return self.tutorial_category
class Tutorial_Series(models.Model):
tutorial_series = models.CharField(max_length=200)
tutorial_category = models.ForeignKey(Tutorial_Category,default=1,verbose_name="Categories",on_delete=models.SET_DEFAULT)
series_summary = models.CharField(max_length=200)
class Mete:
verbose_name_plural = "Series"
def __str__(self):
return self.tutorial_series
class Tutorial(models.Model):
tutorial_title = models.CharField(max_length=200)
tutorial_content = models.TextField()
tutorial_published = models.DateTimeField("date published",default=datetime.now())
tutorial_series = models.ForeignKey(Tutorial_Series,default=1,verbose_name="Series",on_delete=models.SET_DEFAULT)
tutorial_slug = models.CharField(max_length=200,default=1)
def __str__(self):
return self.tutorial_title
| [
"noreply@github.com"
] | cm1100.noreply@github.com |
b4f5ccea358baabbf32a4c713d4de41820427bbc | 18411284fb7b011fa091ddae3a2455688e61dcb5 | /app/core/migrations/0005_recipe_image.py | 072176f6cd44736c763abed1f450a2dd81738c44 | [
"MIT"
] | permissive | julesc00/recipe-app-api | ff1105329a8d6791e648aab3b3a370d1377aea89 | fee549f98ad8cf009999e0b3f45559a7d9b5d163 | refs/heads/master | 2021-04-02T00:09:08.931471 | 2020-07-28T15:52:37 | 2020-07-28T15:52:37 | 248,222,430 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 431 | py | # Generated by Django 2.1.15 on 2020-07-11 13:12
import core.models
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('core', '0004_recipe'),
]
operations = [
migrations.AddField(
model_name='recipe',
name='image',
field=models.ImageField(null=True, upload_to=core.models.recipe_image_file_path),
),
]
| [
"julesc00@hotmail.com"
] | julesc00@hotmail.com |
a9dd43003b888b8c3c2da6a37639e0bbe0a7e1ae | 314d73129f3783dbb28c23058f3489ab6570c9f7 | /new_app/newahmed/doctype/car_type/car_type.py | 5a7f28316cfa686730ceb5398228632dcc501ce3 | [
"MIT"
] | permissive | ahmeed363/new_app | 67cbfdca0e15c3629b69608b4f3eaefab9df0c8f | 381115efae2a0758d3debdcb5d04ccf269b117ea | refs/heads/master | 2023-02-05T12:03:44.037581 | 2020-12-28T17:50:57 | 2020-12-28T17:50:57 | 325,077,853 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 251 | py | # -*- coding: utf-8 -*-
# Copyright (c) 2020, Ahmed and contributors
# For license information, please see license.txt
from __future__ import unicode_literals
# import frappe
from frappe.model.document import Document
class CarType(Document):
pass
| [
"ahmed.m.safadi@gmail.com"
] | ahmed.m.safadi@gmail.com |
5c6d7989c2eac3580b1d3f48a3edbfb85e3ba888 | d171393d9c197dd8b5f083bb990d30a12faa7614 | /tests/test_cache.py | 4b1a950bd4c72ecb0e2b722e24c36be6574aaa00 | [] | no_license | alex-px/currency-converter | 719e364f7ff00d29ab2a36f699a67dbbd2b5146e | 51ed6574e214cd85090aeae78839cd9403bf2537 | refs/heads/master | 2021-01-18T16:36:55.702270 | 2017-03-30T21:43:26 | 2017-03-30T21:43:26 | 86,748,411 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 357 | py | import json
import os
from converter.currency_converter import CACHE_FILE, _get_rates_usd
class TestCache:
def test_cached(self):
rates = {"GGP": 0.805647, "UGX": 3601.999708, "MGA": 3185.000239}
with open(CACHE_FILE, 'w') as fh:
json.dump(rates, fh)
assert _get_rates_usd() == rates
os.remove(CACHE_FILE)
| [
"faxforalex@gmail.com"
] | faxforalex@gmail.com |
ceec7ebcfb4e56edf160593108edc9074f9ff650 | 00b498c945734b6b4eabbf1457c97898be4c20c5 | /statmech/idealgas.py | c24d754b6066217d05c174d2743de733f91570dc | [] | no_license | HussainAther/physics | 2982f2c859bfe9c2ca3d1c0d747634428d7b9f89 | 4514e7231ee36ad10030105db14333bd04ee7f72 | refs/heads/master | 2020-04-20T08:33:26.282139 | 2020-02-24T02:28:38 | 2020-02-24T02:28:38 | 168,742,644 | 18 | 4 | null | null | null | null | UTF-8 | Python | false | false | 2,361 | py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import matplotlib.pyplot as plt
import pickle as pk
import numpy as np
import pylab as pl
"""
We can also use the molecular dynamics simulations to learn about the ideal gas. We can, for example,
determine how the pressure in the gas depends on other properties of the system?
The pressure is a property we can measure in the gas and which may vary or at least fluctuate with time — just as we have
seen the number of particles in the left half to vary. The volume and the total energy on the other hand, are values we
determine when we start the simulations, and we typically do not allow them to vary throughout the simulation: Total energy
is conserved when the particles move in conservative force fields, and we keep the size of the box, and hence the volume, constant.
If the gas is very dilute, all the particles are far away from each other most of the time, and the interaction energies from the interatomic
potentials will be very small (since it decays rapidly with the distance between atoms). We may therefore approximate the
total energy by the kinetic energy of the atoms instead. We will come back later to how to
measure the pressure in a molecular dynamics simulation, but let us here assume that it is measured from the average force on the walls of the system.
We introduce reflective walls in both the x and the y direction to contain the system.
We can set up a two-dimensional simulation of a dilute gas, just as we did before, and measure the volume V = Lx · Ly , the pressure P , as measured by the
simulation program, and the total kinetic energy.
K=summation from i=1 to N of (1/2)m(v_x^2 +v_y^2).
"""
# Before running this script, you must run the lammps file by using
# `lammps < in.gasstatistics01`
data = pk.dump("gasstat01.lammpstrj")
# Simulate a 2-D Lennard-Jones gas (lennard jones)
t = data.time() # pylab's time function for the input data.
nt = np.size(t)
nleft = np.zeros(nt,float) # Store number of particles
tmp_time, box, atoms, bonds, tris, lines = data.viz(0)
halfsize = 0.5*box[3]
# Box size in x-dir
for it in range(nt):
xit = np.array(data.vecs(it, "x"))
jj = find(xit<halfsize)
numx = size(jj)
nleft[it] = numx
plt.plot(t,nleft)
plt.xlabel("t")
plt.ylabel("n")
plt.show()
np.savetxt("ndata.d", (t, nleft))
np.savetxt("ndata.d",(t, nleft))
| [
"shussainather@gmail.com"
] | shussainather@gmail.com |
948e90ac197b63051d82a99090c2a407b326eab6 | d8edfe66428c4ae5a7ce43e3c2c741bd4942833a | /mdacalc/calc.v0.1.pyw | 69aeca0923bc9a6e6e742677d057c1955254de5e | [] | no_license | rickman90/mystuff | 19a47915ca7c8e2c962e0470e561c9523420bc94 | 1d2f2297699bc3d4559ab2599ef088b199e6b896 | refs/heads/master | 2021-01-19T07:49:27.470127 | 2014-08-25T04:11:35 | 2014-08-25T04:11:35 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,831 | pyw | #!/usr/bin/python
from Tkinter import *
import tkMessageBox
import math
fields = ('Sample Count Time (min)', 'Bkg Count Rate (cpm)', 'Bkg Count Time (min)', 'Efficiency (%)', 'Sample Amount (ea)', 'Well Known Bkg MDA (dpm)', 'Paired Bkg MDA (dpm)')
def wk_mda(entries):
try:
st = float(entries['Sample Count Time (min)'].get())
br = float(entries['Bkg Count Rate (cpm)'].get())
bt = float(entries['Bkg Count Time (min)'].get())
eff = float(entries['Efficiency (%)'].get())
sa = float(entries['Sample Amount (ea)'].get())
wkmda = ((1.645**2 / st) + 3.29 * math.sqrt((br / st) + (br / bt))) / ((eff / 100) * sa)
wkmda = ("%8.2f" % wkmda).strip()
entries['Well Known Bkg MDA (dpm)'].delete(0,END)
entries['Well Known Bkg MDA (dpm)'].insert(0, wkmda)
except ValueError:
tkMessageBox.showerror('Error:', 'Zero, non-numeric or nonsense values entered. Please fix them!')
except ZeroDivisionError:
tkMessageBox.showerror('Error:', 'Zero, non-numeric or nonsense values entered. Please fix them!')
def p_mda(entries):
try:
st = float(entries['Sample Count Time (min)'].get())
br = float(entries['Bkg Count Rate (cpm)'].get())
bt = float(entries['Bkg Count Time (min)'].get())
eff = float(entries['Efficiency (%)'].get())
sa = float(entries['Sample Amount (ea)'].get())
pmda = ((1.645**2 / st) + 4.65 * math.sqrt((br / st) + (br / bt))) / ((eff / 100) * sa)
pmda = ("%8.2f" % pmda).strip()
entries['Paired Bkg MDA (dpm)'].delete(0,END)
entries['Paired Bkg MDA (dpm)'].insert(0, pmda)
except ValueError:
tkMessageBox.showerror('Error:', 'Zero, non-numeric or nonsense values entered. Please fix them!')
except ZeroDivisionError:
tkMessageBox.showerror('Error:', 'Zero, non-numeric or nonsense values entered. Please fix them!')
def makeform(root, fields):
root.title("MDA Calculator v0.1")
entries = {}
for field in fields:
row = Frame(root)
lab = Label(row, width=22, text=field+": ", anchor='w')
ent = Entry(row)
ent.insert(0,"0")
row.pack(side=TOP, fill=X, padx=5, pady=5)
lab.pack(side=LEFT)
ent.pack(side=RIGHT, expand=YES, fill=X)
entries[field] = ent
return entries
def combo(e, wk_mda, p_mda):
wk_mda(e)
p_mda(e)
if __name__ == '__main__':
root = Tk()
root.geometry("400x400+300+300")
ents = makeform(root, fields)
root.bind('<Return>', (lambda event, e=ents: combo(e, wk_mda, p_mda)))
b1 = Button(root, text='Calculate', command=(lambda e=ents: combo(e, wk_mda, p_mda)))
b1.pack(side=LEFT, padx=5, pady=5)
b2 = Button(root, text='Quit', command=root.quit)
b2.pack(side=LEFT, padx=5, pady=5)
root.mainloop()
| [
"rickdog6@users.noreply.github.com"
] | rickdog6@users.noreply.github.com |
cddde75515efdbde2dd6f8261ad306bd7c979de3 | f169fd77de719e9959d51b627162f413d73eea92 | /nfvOrchestrator/orchestrator/NFVIcatalogue.py | 238ec285cb17ff96113f07ace5669242ce59c5b0 | [] | no_license | hongyunnchen/nfvOrchestrator | 89af44543373a1b2d2fb9b8bc612ac763ad9ca68 | e41b92cf3310003c2fef50bdb62f30fa0bbebc62 | refs/heads/master | 2021-01-11T22:15:29.642477 | 2017-01-14T07:32:55 | 2017-01-14T07:32:55 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,030 | py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# @Time : 16-12-21 下午4:30
# @Author : mengyuGuo
# @Site :
# @File : NFVIcatalogue.py
# @Software: PyCharm
# hold all physical resources,including computing,storage,networking
# and vm level
class NFVI_manager:
def __init__(self):
self.cpu_intotal=0
self.mem_intotal=0
self.disk_intotal=0
self.bind_with_intotal=0
self.used_vcpu_intotal=0
self.used_mem_intotal=0
self.used_disk_intotal = 0
self.used_bind_with_intotal = 0
self.compute_node_list=[]
self.core_layer_switch_list = []
self.agg_layer_switch_list = []
self.edge_layer_switch_list = []
def get_all_compute_node(self):
return self.compute_node_list
def get_all_vm(self):
pass
def get_all_switch_node(self):
res=[]
for node in self.core_layer_switch_list:
res.append(node)
for node in self.agg_layer_switch_list:
res.append(node)
for node in self.edge_layer_switch_list:
res.append(node)
return res
class compute_node:
def __init__(self,zone,name,type,state,cpu_intotal,mem_intotal,disk_intotal,bandwith,switch):
self.zone=zone
self.name=name
self.type=type
self.state=state
self.cpu_intotal = cpu_intotal
self.mem_intotal = mem_intotal
self.disk_intotal = disk_intotal
self.vcpu_in_use=0
self.mem_in_use=0
self.disk_in_use=0
self.bandwith_intotal=bandwith
self.vm_nums=0
self.vm_list=[]
self.linked_switch_list=[switch]
self.bandwith_dic[switch.name] = {bandwith}
def add_linked_switch_list(self,switch,bandwith):
self.linked_switch_list.append(switch)
self.bandwith_dic[switch.name] = bandwith
def add_server(self,server):
self.vm_nums = self.vm_nums+1
self.vm_list.append(server)
self.vcpu_in_use = self.vcpu_in_use+server.vcpu
self.mem_in_use = self.mem_in_use+server.mem
self.disk_in_use = self.disk_in_use+server.disk
class server:
def __init__(self,name,flavor,image,net):
self.name=name
self.cpu=flavor.cpu
self.mem=flavor.mem
self.disk=flavor.disk
self.image=image
self.net_list=[net]
self.ip_list=[]
class switch:
def __init__(self,level,name):
self.level=level
self.name=name
self.compute_node_list=[]
self.lower_level_switch_list = []
self.upper_level_switch_list = []
self.bandwith_dic = {}
def add_compute_node(self,compute_node):
self.compute_node_list.append(compute_node)
self.bandwith_dic[compute_node.name]=compute_node.bandwith_intotal
def add_lower_level_switch_list(self,switch,bandwith):
self.lower_level_switch_list.append(switch)
self.bandwith_dic[switch.name] = bandwith
def add_upper_compute_node(self,switch,bandwith):
self.upper_level_switch_list.append(switch)
self.bandwith_dic[switch.name] = bandwith
def del_compute_node(self,compute_node):
for tmp in self.compute_node_list:
if tmp.name ==compute_node.name:
self.compute_node_list.remove(tmp)
if compute_node.name in self.bandwith_dic:
self.bandwith_dic.pop(compute_node.name)
def del_lower_level_switch_list(self,switch):
for tmp in self.lower_level_switch_list:
if tmp.name ==switch.name:
self.lower_level_switch_list.remove(tmp)
if switch.name in self.bandwith_dic:
self.bandwith_dic.pop(switch.name)
def del_upper_compute_node(self,switch):
for tmp in self.lower_level_switch_list:
if tmp.name ==switch.name:
self.lower_level_switch_list.remove(tmp)
if switch.name in self.bandwith_dic:
self.bandwith_dic.pop(switch.name)
| [
"448668358@qq.com"
] | 448668358@qq.com |
d94b9afeadd0c2d77bcbc6be4a418c8c0ec38a8b | ff642c107f68787cae4fb3cf95e428e7e0856228 | /FAQ/nlp_engine/extractors/getNo_people/word2number/w2n.py | 769bff7ae037034ad47c11cf1676d038966ab209 | [] | no_license | riteshsharthi/botx | 62d0d86fdcde58e4222cbf6655e1315175d2d3d3 | dc6031ac1bb788bae6697eacbd5c2d942d7f9f95 | refs/heads/master | 2022-12-08T09:46:10.626671 | 2019-01-03T12:03:05 | 2019-01-03T12:03:05 | 163,968,682 | 0 | 0 | null | 2022-11-22T03:11:05 | 2019-01-03T12:12:58 | HTML | UTF-8 | Python | false | false | 7,104 | py | from __future__ import print_function
american_number_system = {
'zero': 0,
'one': 1,
'two': 2,
'three': 3,
'four': 4,
'five': 5,
'six': 6,
'seven': 7,
'eight': 8,
'nine': 9,
'ten': 10,
'eleven': 11,
'twelve': 12,
'thirteen': 13,
'fourteen': 14,
'fifteen': 15,
'sixteen': 16,
'seventeen': 17,
'eighteen': 18,
'nineteen': 19,
'twenty': 20,
'thirty': 30,
'forty': 40,
'fifty': 50,
'sixty': 60,
'seventy': 70,
'eighty': 80,
'ninety': 90,
'hundred': 100,
'thousand': 1000,
'million': 1000000,
'billion': 1000000000,
'point': '.'
}
decimal_words = ['zero', 'one', 'two', 'three', 'four', 'five', 'six', 'seven', 'eight', 'nine']
"""
#TODO
indian_number_system = {
'zero': 0,
'one': 1,
'two': 2,
'three': 3,
'four': 4,
'five': 5,
'six': 6,
'seven': 7,
'eight': 8,
'nine': 9,
'ten': 10,
'eleven': 11,
'twelve': 12,
'thirteen': 13,
'fourteen': 14,
'fifteen': 15,
'sixteen': 16,
'seventeen': 17,
'eighteen': 18,
'nineteen': 19,
'twenty': 20,
'thirty': 30,
'forty': 40,
'fifty': 50,
'sixty': 60,
'seventy': 70,
'eighty': 80,
'ninety': 90,
'hundred': 100,
'thousand': 1000,
'lac': 100000,
'lakh': 100000,
'crore': 10000000
}
"""
"""
function to form numeric multipliers for million, billion, thousand etc.
input: list of strings
return value: integer
"""
def number_formation(number_words):
numbers = []
for number_word in number_words:
numbers.append(american_number_system[number_word])
if len(numbers) == 4:
return (numbers[0] * numbers[1]) + numbers[2] + numbers[3]
elif len(numbers) == 3:
return numbers[0] * numbers[1] + numbers[2]
elif len(numbers) == 2:
if 100 in numbers:
return numbers[0] * numbers[1]
else:
return numbers[0] + numbers[1]
else:
return numbers[0]
"""
function to convert post decimal digit words to numerial digits
input: list of strings
output: double
"""
def get_decimal_sum(decimal_digit_words):
decimal_number_str = []
for dec_word in decimal_digit_words:
if(dec_word not in decimal_words):
return 0
else:
decimal_number_str.append(american_number_system[dec_word])
final_decimal_string = '0.' + ''.join(map(str,decimal_number_str))
return float(final_decimal_string)
"""
function to return integer for an input `number_sentence` string
input: string
output: int or double or None
"""
def word_to_num(number_sentence):
if type(number_sentence) is not str:
return ""
raise ValueError("Type of input is not string! Please enter a valid number word (eg. \'two million twenty three thousand and forty nine\')")
number_sentence = number_sentence.replace('-', ' ')
number_sentence = number_sentence.lower() # converting input to lowercase
if(number_sentence.isdigit()): # return the number if user enters a number string
return int(number_sentence)
split_words = number_sentence.strip().split() # strip extra spaces and split sentence into words
clean_numbers = []
clean_decimal_numbers = []
# removing and, & etc.
for word in split_words:
if word in american_number_system:
clean_numbers.append(word)
# Error message if the user enters invalid input!
if len(clean_numbers) == 0:
return ""
raise ValueError("No valid number words found! Please enter a valid number word (eg. two million twenty three thousand and forty nine)")
# Error if user enters million,billion, thousand or decimal point twice
if clean_numbers.count('thousand') > 1 or clean_numbers.count('million') > 1 or clean_numbers.count('billion') > 1 or clean_numbers.count('point')> 1:
return ""
raise ValueError("Redundant number word! Please enter a valid number word (eg. two million twenty three thousand and forty nine)")
# separate decimal part of number (if exists)
if clean_numbers.count('point') == 1:
clean_decimal_numbers = clean_numbers[clean_numbers.index('point')+1:]
clean_numbers = clean_numbers[:clean_numbers.index('point')]
billion_index = clean_numbers.index('billion') if 'billion' in clean_numbers else -1
million_index = clean_numbers.index('million') if 'million' in clean_numbers else -1
thousand_index = clean_numbers.index('thousand') if 'thousand' in clean_numbers else -1
if (thousand_index > -1 and (thousand_index < million_index or thousand_index < billion_index)) or (million_index>-1 and million_index < billion_index):
return ""
raise ValueError("Malformed number! Please enter a valid number word (eg. two million twenty three thousand and forty nine)")
total_sum = 0 # storing the number to be returned
if len(clean_numbers) > 0:
# hack for now, better way TODO
if len(clean_numbers) == 1:
total_sum += american_number_system[clean_numbers[0]]
else:
if billion_index > -1:
billion_multiplier = number_formation(clean_numbers[0:billion_index])
total_sum += billion_multiplier * 1000000000
if million_index > -1:
if billion_index > -1:
million_multiplier = number_formation(clean_numbers[billion_index+1:million_index])
else:
million_multiplier = number_formation(clean_numbers[0:million_index])
total_sum += million_multiplier * 1000000
if thousand_index > -1:
if million_index > -1:
thousand_multiplier = number_formation(clean_numbers[million_index+1:thousand_index])
elif billion_index > -1 and million_index == -1:
thousand_multiplier = number_formation(clean_numbers[billion_index+1:thousand_index])
else:
thousand_multiplier = number_formation(clean_numbers[0:thousand_index])
total_sum += thousand_multiplier * 1000
if thousand_index > -1 and thousand_index != len(clean_numbers)-1:
hundreds = number_formation(clean_numbers[thousand_index+1:])
elif million_index > -1 and million_index != len(clean_numbers)-1:
hundreds = number_formation(clean_numbers[million_index+1:])
elif billion_index > -1 and billion_index != len(clean_numbers)-1:
hundreds = number_formation(clean_numbers[billion_index+1:])
elif thousand_index == -1 and million_index == -1 and billion_index == -1:
hundreds = number_formation(clean_numbers)
else:
hundreds = 0
total_sum += hundreds
# adding decimal part to total_sum (if exists)
if len(clean_decimal_numbers) > 0:
decimal_sum = get_decimal_sum(clean_decimal_numbers)
total_sum += decimal_sum
return total_sum | [
"ritesh@gmail.com"
] | ritesh@gmail.com |
be9c9aac045bc76f90341b2f5900954261aed765 | 9e01ec604f1aa1c61a47f30ab2f8a91d27848223 | /retro_games/helper_functions.py | 5522093a76edf8719989d981869ebe9997ee824b | [] | no_license | chrismomdjian/cis4100-project | f5097242d01f2c5a4a57629d05c5c92cab81ffe5 | 806b8172883c1fc6916c3d0881ab668668d89baf | refs/heads/main | 2023-04-18T07:57:56.486959 | 2021-04-24T23:07:01 | 2021-04-24T23:07:01 | 339,591,151 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 607 | py | # IGDB api requests
import requests
import json
from igdb.wrapper import IGDBWrapper
url = 'https://id.twitch.tv/oauth2/token'
client_id = 'bbi2z3lufqh6td2cfi5tkorsc4h1se'
client_secret = '2drg2w2smx3693nmxnbjbyqxvyxy1h'
data = {
'client_id': client_id,
'client_secret': client_secret,
'grant_type': 'client_credentials'
}
response = requests.post(url, data=data).json()
def api_call(endpoint, fields_string):
wrapper = IGDBWrapper(client_id, response['access_token'])
res = wrapper.api_request(endpoint, fields_string)
raw_json = res.decode('utf8').replace("'", '"')
return json.loads(raw_json) | [
"chrismomdjian@gmail.com"
] | chrismomdjian@gmail.com |
fa9f0975c26693b85b3639243ae4bfb63982d35c | 2388e81d802af1d3436325316e7c9e41b406b854 | /easyNav_pi_dispatcher/__init__.py | dca6e07a8b452077972cd2868b35e4313bcbbf86 | [] | no_license | easyNav/easyNav-pi-dispatcher | 83d37f6a587ba3a09c25defa8f269814b349a34c | ea73cadca980453f3dab51474bfacd6469519ce9 | refs/heads/master | 2021-01-23T13:30:31.119082 | 2014-11-15T05:48:59 | 2014-11-15T05:48:59 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 602 | py | #!/usr/bin/python
# -*- coding: utf-8 -*-
# This file is part of easyNav-pi-dispatcher.
# https://github.com/easyNav/easyNav-pi-dispatcher
# Licensed under the MIT license:
# http://www.opensource.org/licenses/MIT-license
# Copyright (c) 2014 Joel Tong me@joeltong.org
import logging
logging.getLogger('').handlers = []
logging.basicConfig(
# filename = "a.log",
# filemode="w",
level = logging.INFO)
from easyNav_pi_dispatcher.version import __version__
from easyNav_pi_dispatcher.dispatcherDaemon import DispatcherDaemon
from easyNav_pi_dispatcher.dispatcherClient import DispatcherClient
| [
"me@joeltong.org"
] | me@joeltong.org |
c65f854a44397409a217f336217cd698923b646c | 20a358db6e9e9872453a7fb36ef21268054b241d | /pyml/jobs/utils.py | 84372d99ac44a52cc4345cd8cd530cab80f3d152 | [] | no_license | fengkaicnic/pyml | ee654cdef2ba107e1c1e8d598691af3accb96b3c | a19865cdb9eb69517258416a2b08b86f9d43a023 | refs/heads/master | 2021-01-21T04:40:44.659607 | 2016-07-29T08:33:07 | 2016-07-29T08:33:07 | 44,159,061 | 2 | 2 | null | null | null | null | UTF-8 | Python | false | false | 5,209 | py | #coding:utf8
import pickle
import sys
reload(sys)
import pdb
sys.setdefaultencoding('utf8')
def store_rst(decesion_tree,filename):
writer = open(filename,'w')
pickle.dump(decesion_tree,writer)
writer.close()
def read_rst(filename):
reader = open(filename,'rU')
return pickle.load(reader)
def get_key_positionsingle(postdct, position_dct, position):
if position_dct.has_key(position):
return position
for key in postdct.keys():
if key in position:
if u'总监' in position or u'主管' in position:
return postdct[key][2]
elif u'经理' in position or u'主任' in position:
return postdct[key][1]
else:
return postdct[key][0]
return 'None'
def get_key_position(postdct, positions):
for key in postdct.keys():
if key in positions[0][0]:
if u'总监' in positions[0][0] or u'主管' in positions[0][0]:
return postdct[key][2]
elif u'经理' in positions[0][0] or u'主任' in positions[0][0]:
return postdct[key][1]
else:
return postdct[key][0]
elif key in positions[1][0]:
if u'总监' in positions[1][0] or u'主管' in positions[1][0]:
return postdct[key][2]
elif u'经理' in positions[1][0] or u'主任' in positions[1][0]:
return postdct[key][1]
else:
return postdct[key][0]
return None
def get_key_position_old(postdct, positions):
for key in postdct.keys():
if key in positions[0]:
if u'总监' in positions[0] or u'主管' in positions[0]:
return postdct[key][2]
elif u'经理' in positions[0] or u'主任' in positions[0]:
return postdct[key][1]
else:
return postdct[key][0]
elif key in positions[1]:
if u'总监' in positions[1] or u'主管' in positions[1]:
return postdct[key][2]
elif u'经理' in positions[1] or u'主任' in positions[1]:
return postdct[key][1]
else:
return postdct[key][0]
return None
def get_position(major_dusdct, positions, position_dct, postdct):
if major_dusdct.has_key(positions[0][2]):
if major_dusdct[positions[0][2]].has_key(positions[0][1]):
for position_name in major_dusdct[positions[0][2]][positions[0][1]]:
if position_dct.has_key(position_name):
return position_name
for position_name in major_dusdct[positions[0][2]][positions[0][1]]:
for key in postdct.keys():
if key in positions[0]:
if u'总监' in positions[0] or u'主管' in positions[0]:
return postdct[key][2]
elif u'经理' in positions[0] or u'主任' in positions[0]:
return postdct[key][1]
else:
return postdct[key][0]
elif key in positions[1]:
if u'总监' in positions[0] or u'主管' in positions[0]:
return postdct[key][2]
elif u'经理' in positions[0] or u'主任' in positions[0]:
return postdct[key][1]
else:
return postdct[key][0]
return u'销售经理'
def get_industry_position(industryr, industrys, position_dct, postdct):
if industryr.has_key(industrys[0]):
if position_dct.has_key(industryr[industrys[0]][0]):
return industryr[industrys[0]][0]
else:
position = get_key_positionsingle(postdct, industryr[industrys[0]][0])
if position:
return position
if industryr.has_key(industrys[1]):
if position_dct.has_key(industryr[industrys[1]][0]):
return industryr[industrys[1]][0]
else:
position = get_key_positionsingle(postdct, industryr[industrys[1]][0])
if position:
return position
return 'test'
def get_labels(train_file, ind):
labels = []
for index,line in enumerate(open(train_file,'rU').readlines()):
print index
label = line.strip().split(',')[ind]
labels.append(label)
return labels
def format_data(dataset_file):
dataset = []
for index,line in enumerate(open(dataset_file,'rU').readlines()):
line = line.strip()
fea_and_label = line.split(',')
dataset.append(fea_and_label)
#features = [dataset[0][i] for i in range(len(dataset[0])-1)]
#sepal length������ȣ���sepal width�������ȣ���petal length�����곤�ȣ���petal width�������ȣ�
features = ['degree', 'age','start_age','bstart_year','gender','start_salary','start_size','major']
return dataset,features | [
"fkdhy@163.com"
] | fkdhy@163.com |
8123446b4176de628fdc5ac24079f01b204edebe | 9a5e36d07382aaa3c98e5050e525917c984986fa | /KNN.py | facf01b9c9a5be366f06ee1bbbc0a1be11622813 | [
"MIT"
] | permissive | waihoyu/MachineLearning | 2ec412464f63e5d8fa9c0e59f02bde7ccb69ebe0 | 47578079f1bfdb4867a5ade051dad317922ef4e0 | refs/heads/master | 2020-03-29T19:24:20.607173 | 2018-09-25T16:09:30 | 2018-09-25T16:09:30 | 150,261,251 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 45 | py | print("Hello World!")
print("Hello Python!") | [
"13701343809@qq.com"
] | 13701343809@qq.com |
fa77616b1e7b82ae745b8706f59544f48ca65e46 | 2f400a7888857ae557b894231fd5f13f668dce1b | /readme.py | b6d8d82626f78fa0ef45e29abef57302b428768f | [] | no_license | tonyfast/whereami | ea39e8b4ee96f6a5d8fb8a0db90331450bdea036 | 1ade40cb26b5a36b1f34b44bc2a54b1f954d8078 | refs/heads/master | 2021-05-06T09:00:32.871551 | 2017-12-14T23:37:59 | 2017-12-14T23:37:59 | 114,041,277 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,840 | py |
# coding: utf-8
# # [whereami](https://github.com/tonyfast/whereami)
#
# <code>[pip install git+https://github.com/tonyfast/whereami](https://github.com/tonyfast/whereami)</code>
#
# Logic circuits to identify the context a notebook's derived source is executing in.
#
# * Is Jupyter running this?
# * Is source in an Interactive session?
# * Is this a command line tool?
#
#
# > [Presentation](http://nbviewer.jupyter.org/format/slides/github/tonyfast/whereami/blob/master/whereami.ipynb#/) | [Source](whereami.ipynb) | [`readme`](readme.ipynb)
#
# [](https://mybinder.org/v2/gh/tonyfast/whereami/master?filepath=index.ipynb)
#
# ---
#
# ipython setup.py develop
# ## Basic usage
# In[1]:
huh = __import__('whereami').huh(globals())
huh
# ## Advanced
#
# `whereami` contains an object `state` that contains
# In[2]:
import whereami
whereami.state
# ## IPython magic
# In[3]:
get_ipython().magic('reload_ext whereami')
get_ipython().magic('run whereami.ipynb')
get_ipython().magic('run whereami.py')
# # Developer
#
# `whereami` contains it's own build steps. Run that notebook in `--execute` mode by checking for `huh.JUPYTER`.
# In[5]:
huh = __import__('whereami').huh(globals())
if huh.JUPYTER:
get_ipython().system('jupyter nbconvert --to markdown --execute whereami.ipynb')
get_ipython().system('python -m doctest whereami.py')
get_ipython().system('python -m pydoc -w whereami')
get_ipython().system('jupyter nbconvert --to python readme.ipynb')
get_ipython().system('jupyter nbconvert --to markdown readme.ipynb')
get_ipython().system('jupyter nbconvert index.ipynb')
# `whereami` as a package.
# In[6]:
if huh.MAIN and not huh.JUPYTER:
__import__('setuptools').setup(name="whereami", py_modules=['whereami'])
| [
"tony.fast@gmail.com"
] | tony.fast@gmail.com |
91a29245b0b4bd452310cbac2deb6210b14f9069 | 863509e794b069c9688f6263454c06d2c48868b2 | /backend/backend/serializers.py | 44a1dac69ad9d3cf60ada54dba83a4fa368adac5 | [
"MIT"
] | permissive | TheDuckWhisperer/tournesol | c6df38185963bbecc6109dac275075a3ceca857a | 0fde7587e91a42e5a2218f2ffb70d4fc8cff7f73 | refs/heads/master | 2023-04-18T15:31:20.627952 | 2021-05-01T19:59:07 | 2021-05-01T19:59:07 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 213 | py | from backend.models import Video
from rest_framework import serializers
class VideoSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = Video
fields = ['video_id']
| [
"sergei.volodin.ch@gmail.com"
] | sergei.volodin.ch@gmail.com |
ac7f4af3d97b64a71fed6f342aa0ebb4858a79af | eb9110e91fdc78acbe4e34fb2966523de65536b4 | /store/migrations/0002_variation.py | c7b8c5ffb97d71a658910f5a492a9f828f4b0ebf | [] | no_license | NaveenMulaga/HealthyHarvest-sdp-2 | 050b7694f9345449c0a10a993560936504ff49b1 | a785e29b979c6abda4fb2b30843134f765b53129 | refs/heads/main | 2023-04-24T07:15:40.781190 | 2021-05-18T12:46:52 | 2021-05-18T12:46:52 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 916 | py | # Generated by Django 3.1 on 2021-05-18 06:19
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('store', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='Variation',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('variation_category', models.CharField(choices=[('type', 'type'), ('size', 'size')], max_length=150)),
('variation_value', models.CharField(max_length=150)),
('is_active', models.BooleanField(default=True)),
('created_date', models.DateTimeField(auto_now=True)),
('product', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='store.product')),
],
),
]
| [
"mnaveeb1105@gmail.com"
] | mnaveeb1105@gmail.com |
895ebfce2c1523989ede3eed768142c7b89b798e | ed58239ecfeab4bcb9d7e86ebfd27a696b4d4735 | /FinalAssessment_ID/Question1/FA_TreeTest.py | 50e373ea91f939fd008378ef9efe7038ab8d8418 | [] | no_license | connorkuljis/Data-Structures-and-Algorithms | eca225665b280023ccf510087624a34154868bd1 | e15d3b2c4170db94ec68336d741bd0ebab68edac | refs/heads/master | 2023-01-14T14:49:16.716643 | 2020-11-17T15:58:24 | 2020-11-17T15:58:24 | 285,579,328 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 277 | py | #
# DSA Final Assessment Question 1 - FA_TreeTest.py
#
# Name :
# ID :
#
#
from FA_BinarySearchTree import *
print(“\n**** Question 1: Testing Trees ****\n”)
# Put your code here
t = FA_BinarySearchTree()
print(“\n**** Tests Complete ****\n”)
| [
"conkuljis@gmail.com"
] | conkuljis@gmail.com |
385884e95833990887aef6c6cf967707a572234b | 91a8117a4da296d4c0d317b3b2c79e4df49ff14d | /services/recommender/api/models.py | 7c5924ff7c3952302d2fa1ea6d8328d15d33a601 | [
"MIT"
] | permissive | IMDB-2-0/CS497-B | b3650af48301420a365ffaefd3902f4f34ccde2b | 0b94bb71ff216cef437cdcbbe897c2ae66623a14 | refs/heads/master | 2023-05-01T00:44:59.098971 | 2021-05-07T22:26:21 | 2021-05-07T22:26:21 | 341,971,017 | 2 | 0 | MIT | 2021-05-07T22:26:22 | 2021-02-24T16:58:03 | JavaScript | UTF-8 | Python | false | false | 339 | py | from typing import List, Optional
from pydantic import BaseModel
class RatingsIn(BaseModel):
userid: int
movieid: int
rating: str # TODO (Incorrect types)
class RatingsOut(BaseModel):
movieid: int
title: str
imdbid: int
tmdbid: int
class RecommenderOut(BaseModel):
message: str
data: List[RatingsOut] | [
"hansquiogue@gmail.com"
] | hansquiogue@gmail.com |
706c1daf836beebc3e6578a37fe6eeb5aa6b2d24 | ed0c14eee26ed0288783d95c7ccbf382a8cf3bb2 | /.config/conkykeep/conkyKeep/__main__.py | 695b261089d406344be2d8efd2a68d995ea56178 | [
"MIT"
] | permissive | epityathens/ArchSystemConky | a1a68c49314496effff9a305996a624f5f6fa4e8 | dcdf56c719c4934874a56a6b2cfcb4936c8c5165 | refs/heads/master | 2021-08-24T03:12:13.874508 | 2017-12-07T20:30:50 | 2017-12-07T20:30:50 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 87 | py | #!/usr/bin/env python3
# encoding: utf-8
from conkyKeep.conky_keep import main
main()
| [
"mr.mustafa.ozhan@gmail.com"
] | mr.mustafa.ozhan@gmail.com |
48d0991c3a40847c562778fb746ab28c7e4596a1 | 9a59d08730c65585ac7db21d527dae858b2234a7 | /LPTHW/ex13+.py | 577c8bf6365f0089b6bab67dae4218e233281fbf | [] | no_license | cpiccirilli1/LPTHW | 8345e305e7531b608c781a40f5896b920985ee4c | 5e8b1504fa5b1143c84c6b07065a2ea5e7453743 | refs/heads/master | 2020-05-03T15:03:29.221996 | 2019-03-31T14:04:42 | 2019-03-31T14:04:42 | 178,695,911 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 248 | py | from sys import argv
script, banana, orange, apple, grapes = argv
print script
print banana
b = raw_input("Yes or No?")
print orange
c = raw_input("Yes or No?")
print apple
d = raw_input("Yes or No?")
print grapes
e = raw_input("Yes or No?")
| [
"noreply@github.com"
] | cpiccirilli1.noreply@github.com |
63f2f9d35ae8e52ff01b811ab0e2e6e6cb475b85 | 35bfdecf361d5beb5c531db41df4d0fb54059122 | /sentiment_analyser/settings.py | 04d159849512571c590c05ceb9d7e1364d011b22 | [
"MIT"
] | permissive | gorkemyontem/SWE-573-2020 | 5d940ff06d6f57ffa23dd9634da97def6f7c77e7 | 6a9ca57d294066fcc0db640f45d38d7341754a68 | refs/heads/main | 2023-03-02T20:52:42.360556 | 2021-02-13T12:27:05 | 2021-02-13T12:27:05 | 309,190,078 | 1 | 1 | MIT | 2020-12-22T19:19:10 | 2020-11-01T21:17:24 | Python | UTF-8 | Python | false | false | 6,649 | py | """
Django settings for sentiment_analyser project.
Generated by 'django-admin startproject' using Django 3.1.3.
For more information on this file, see
https://docs.djangoproject.com/en/3.1/topics/settings/
For the full list of settings and their values, see
https://docs.dALLOWED_HOSTSjangoproject.com/en/3.1/ref/settings/
"""
import os
import environ
from pathlib import Path
env = environ.Env()
ENV_DIR = os.path.join(os.path.dirname(os.path.dirname(__file__)), '.env')
environ.Env.read_env(ENV_DIR)
# Build paths inside the project like this: BASE_DIR / 'subdir'.
BASE_DIR = Path(__file__).resolve().parent.parent
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/3.1/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = env.str('SECRET_KEY', 'sample_unsafe_secret')
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = env('DEBUG')
# ALLOWED_HOSTS = []
ALLOWED_HOSTS = ["*", "localhost", "0.0.0.0", "127.0.0.1"]
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
# https://github.com/evansd/whitenoise
'whitenoise.runserver_nostatic',
'django.contrib.staticfiles',
'django.contrib.sites',
'django.contrib.humanize',
# Third-party
'allauth',
'allauth.account',
'crispy_forms',
'debug_toolbar',
'django_q',
# Local
'accounts',
'pages',
'scraper',
'analyser'
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'whitenoise.middleware.WhiteNoiseMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'debug_toolbar.middleware.DebugToolbarMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'sentiment_analyser.urls'
WSGI_APPLICATION = 'sentiment_analyser.wsgi.application'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': ['templates'], # Changing default behavior
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
# Database
# https://docs.djangoproject.com/en/3.1/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql',
'NAME': env('DATABASE_NAME'),
'USER': env('DATABASE_USER'),
'PASSWORD': env('DATABASE_PASSWORD'),
'HOST': env('DATABASE_HOST'),
'PORT': env('DATABASE_PORT'),
"OPTIONS": {
"sslmode": "verify-ca",
"sslrootcert": os.path.join(BASE_DIR, "amazon-rds-ca-cert.pem")
}
}
}
# Password validation
# https://docs.djangoproject.com/en/3.1/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/3.1/topics/i18n/
LANGUAGE_CODE = 'en-us'
# TIME_ZONE = 'UTC'
TIME_ZONE = 'Europe/Istanbul'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# STATIC
# ------------------------------------------------------------------------------
# https://docs.djangoproject.com/en/dev/ref/settings/#static-root
STATIC_ROOT = str(BASE_DIR.joinpath('staticfiles'))
# https://docs.djangoproject.com/en/dev/ref/settings/#static-url
STATIC_URL = '/static/'
# https://docs.djangoproject.com/en/dev/ref/contrib/staticfiles/#std:setting-STATICFILES_DIRS
STATICFILES_DIRS = [str(BASE_DIR.joinpath('static'))]
# http://whitenoise.evans.io/en/stable/django.html#add-compression-and-caching-support
STATICFILES_STORAGE = "whitenoise.storage.CompressedManifestStaticFilesStorage"
# DJANGO-CRISPY-FORMS CONFIGS
# ------------------------------------------------------------------------------
# https://django-crispy-forms.readthedocs.io/en/latest/install.html#template-packs
CRISPY_TEMPLATE_PACK = "bootstrap4"
# EMAIL
# ------------------------------------------------------------------------------
# https://docs.djangoproject.com/en/dev/ref/settings/#email-backend
EMAIL_BACKEND = 'django.core.mail.backends.console.EmailBackend'
# DJANGO-DEBUG-TOOLBAR CONFIGS
# ------------------------------------------------------------------------------
# https://django-debug-toolbar.readthedocs.io/en/latest/installation.html
# https://docs.djangoproject.com/en/dev/ref/settings/#internal-ips
INTERNAL_IPS = ['127.0.0.1']
# CUSTOM USER MODEL CONFIGS
# ------------------------------------------------------------------------------
# https://docs.djangoproject.com/en/dev/topics/auth/customizing/#substituting-a-custom-user-model
AUTH_USER_MODEL = 'accounts.CustomUser'
# DJANGO-ALLAUTH CONFIGS
# ------------------------------------------------------------------------------
# https://docs.djangoproject.com/en/dev/ref/settings/#site-id
SITE_ID = 1
# https://docs.djangoproject.com/en/dev/ref/settings/#login-redirect-url
LOGIN_REDIRECT_URL = 'home'
# https://django-allauth.readthedocs.io/en/latest/views.html#logout-account-logout
ACCOUNT_LOGOUT_REDIRECT_URL = 'home'
# https://django-allauth.readthedocs.io/en/latest/installation.html?highlight=backends
AUTHENTICATION_BACKENDS = (
"django.contrib.auth.backends.ModelBackend",
"allauth.account.auth_backends.AuthenticationBackend",
)
# https://django-allauth.readthedocs.io/en/latest/configuration.html
ACCOUNT_SESSION_REMEMBER = True
ACCOUNT_SIGNUP_PASSWORD_ENTER_TWICE = False
ACCOUNT_USERNAME_REQUIRED = False
ACCOUNT_AUTHENTICATION_METHOD = 'email'
ACCOUNT_EMAIL_REQUIRED = True
ACCOUNT_UNIQUE_EMAIL = True
Q_CLUSTER = {
'name': 'DjangORM',
'workers': 1,
'timeout': 4800,
'retry': 4801,
'queue_limit': 500,
'orm': 'default'
}
| [
"gorkem@tiko.es"
] | gorkem@tiko.es |
a38a931ae0f6d4c485ff80a901bbc8b65e7e767d | f241df59f8e6c13cab13ec3b5d5d9ade89c419f7 | /leo/modes/phpsection.py | 8102358d1ee25398a514e4922a490b90b915f295 | [
"BSD-3-Clause",
"MIT"
] | permissive | leo-editor/leo-editor | 6c6e09c1ae89cb9b1952c9f5b0c3a6c76ae9e625 | a3f6c3ebda805dc40cd93123948f153a26eccee5 | refs/heads/devel | 2023-08-28T08:57:01.365701 | 2023-08-23T10:21:57 | 2023-08-23T10:21:57 | 16,728,437 | 1,671 | 219 | NOASSERTION | 2023-09-14T19:39:01 | 2014-02-11T11:14:41 | Python | UTF-8 | Python | false | false | 102,224 | py | # Leo colorizer control file for phpsection mode.
# This file is in the public domain.
# Properties for phpsection mode.
properties = {
"commentEnd": "-->",
"commentStart": "<!--",
"indentCloseBrackets": "}",
"indentOpenBrackets": "{",
"initialModeDelegate": "phpsection::PHP",
"lineUpClosingBracket": "true",
}
# Attributes dict for phpsection_main ruleset.
phpsection_main_attributes_dict = {
"default": "null",
"digit_re": "",
"escape": "",
"highlight_digits": "true",
"ignore_case": "true",
"no_word_sep": "",
}
# Attributes dict for phpsection_tags ruleset.
phpsection_tags_attributes_dict = {
"default": "MARKUP",
"digit_re": "",
"escape": "",
"highlight_digits": "true",
"ignore_case": "true",
"no_word_sep": "",
}
# Attributes dict for phpsection_tags_literal ruleset.
phpsection_tags_literal_attributes_dict = {
"default": "LITERAL1",
"digit_re": "",
"escape": "",
"highlight_digits": "true",
"ignore_case": "true",
"no_word_sep": "",
}
# Attributes dict for phpsection_php ruleset.
phpsection_php_attributes_dict = {
"default": "LITERAL1",
"digit_re": "",
"escape": "\\",
"highlight_digits": "true",
"ignore_case": "true",
"no_word_sep": "",
}
# Attributes dict for phpsection_php_literal ruleset.
phpsection_php_literal_attributes_dict = {
"default": "LITERAL1",
"digit_re": "",
"escape": "\\",
"highlight_digits": "true",
"ignore_case": "true",
"no_word_sep": "",
}
# Attributes dict for phpsection_javascript ruleset.
phpsection_javascript_attributes_dict = {
"default": "MARKUP",
"digit_re": "",
"escape": "\\",
"highlight_digits": "true",
"ignore_case": "true",
"no_word_sep": "",
}
# Attributes dict for phpsection_javascript_php ruleset.
phpsection_javascript_php_attributes_dict = {
"default": "MARKUP",
"digit_re": "",
"escape": "\\",
"highlight_digits": "true",
"ignore_case": "true",
"no_word_sep": "",
}
# Attributes dict for phpsection_phpdoc ruleset.
phpsection_phpdoc_attributes_dict = {
"default": "COMMENT3",
"digit_re": "",
"escape": "\\",
"highlight_digits": "true",
"ignore_case": "true",
"no_word_sep": "",
}
# Dictionary of attributes dictionaries for phpsection mode.
attributesDictDict = {
"phpsection_javascript": phpsection_javascript_attributes_dict,
"phpsection_javascript_php": phpsection_javascript_php_attributes_dict,
"phpsection_main": phpsection_main_attributes_dict,
"phpsection_php": phpsection_php_attributes_dict,
"phpsection_php_literal": phpsection_php_literal_attributes_dict,
"phpsection_phpdoc": phpsection_phpdoc_attributes_dict,
"phpsection_tags": phpsection_tags_attributes_dict,
"phpsection_tags_literal": phpsection_tags_literal_attributes_dict,
}
# Keywords dict for phpsection_main ruleset.
phpsection_main_keywords_dict = {}
# Keywords dict for phpsection_tags ruleset.
phpsection_tags_keywords_dict = {}
# Keywords dict for phpsection_tags_literal ruleset.
phpsection_tags_literal_keywords_dict = {}
# Keywords dict for phpsection_php ruleset.
phpsection_php_keywords_dict = {
"__class__": "keyword3",
"__file__": "keyword3",
"__function__": "keyword3",
"__line__": "keyword3",
"__method__": "keyword3",
"abs": "keyword2",
"abstract": "keyword1",
"accept_connect": "keyword2",
"acos": "keyword2",
"add": "keyword2",
"add_iovec": "keyword2",
"addaction": "keyword2",
"addcolor": "keyword2",
"addcslashes": "keyword2",
"addentry": "keyword2",
"addfill": "keyword2",
"addshape": "keyword2",
"addslashes": "keyword2",
"addstring": "keyword2",
"align": "keyword2",
"and": "operator",
"apache_child_terminate": "keyword2",
"apache_lookup_uri": "keyword2",
"apache_note": "keyword2",
"apache_sub_req": "keyword2",
"array": "keyword1",
"array_combine": "keyword2",
"array_count_values": "keyword2",
"array_diff": "keyword2",
"array_diff_assoc": "keyword2",
"array_diff_uassoc": "keyword2",
"array_filter": "keyword2",
"array_flip": "keyword2",
"array_intersect": "keyword2",
"array_intersect_assoc": "keyword2",
"array_keys": "keyword2",
"array_map": "keyword2",
"array_merge": "keyword2",
"array_merge_recursive": "keyword2",
"array_multisort": "keyword2",
"array_pad": "keyword2",
"array_pop": "keyword2",
"array_push": "keyword2",
"array_rand": "keyword2",
"array_reduce": "keyword2",
"array_reverse": "keyword2",
"array_search": "keyword2",
"array_shift": "keyword2",
"array_slice": "keyword2",
"array_splice": "keyword2",
"array_sum": "keyword2",
"array_udiff": "keyword2",
"array_udiff_assoc": "keyword2",
"array_udiff_uassoc": "keyword2",
"array_unique": "keyword2",
"array_unshift": "keyword2",
"array_values": "keyword2",
"array_walk": "keyword2",
"array_walk_recursive": "keyword2",
"arsort": "keyword2",
"as": "keyword1",
"asin": "keyword2",
"asort": "keyword2",
"aspell_check": "keyword2",
"aspell_check_raw": "keyword2",
"aspell_new": "keyword2",
"aspell_suggest": "keyword2",
"assert": "keyword2",
"assert_options": "keyword2",
"atan": "keyword2",
"atan2": "keyword2",
"base64_decode": "keyword2",
"base64_encode": "keyword2",
"base_convert": "keyword2",
"basename": "keyword2",
"bcadd": "keyword2",
"bccomp": "keyword2",
"bcdiv": "keyword2",
"bcmod": "keyword2",
"bcmul": "keyword2",
"bcpow": "keyword2",
"bcscale": "keyword2",
"bcsqrt": "keyword2",
"bcsub": "keyword2",
"bin2hex": "keyword2",
"bind": "keyword2",
"bindec": "keyword2",
"bindtextdomain": "keyword2",
"break": "keyword1",
"build_iovec": "keyword2",
"bzclose": "keyword2",
"bzcompress": "keyword2",
"bzdecompress": "keyword2",
"bzerrno": "keyword2",
"bzerror": "keyword2",
"bzerrstr": "keyword2",
"bzflush": "keyword2",
"bzopen": "keyword2",
"bzread": "keyword2",
"bzwrite": "keyword2",
"call_user_func": "keyword2",
"call_user_func_array": "keyword2",
"call_user_method": "keyword2",
"call_user_method_array": "keyword2",
"case": "keyword1",
"catch": "keyword1",
"ccvs_add": "keyword2",
"ccvs_auth": "keyword2",
"ccvs_command": "keyword2",
"ccvs_count": "keyword2",
"ccvs_delete": "keyword2",
"ccvs_done": "keyword2",
"ccvs_init": "keyword2",
"ccvs_lookup": "keyword2",
"ccvs_new": "keyword2",
"ccvs_report": "keyword2",
"ccvs_return": "keyword2",
"ccvs_reverse": "keyword2",
"ccvs_sale": "keyword2",
"ccvs_status": "keyword2",
"ccvs_textvalue": "keyword2",
"ccvs_void": "keyword2",
"ceil": "keyword2",
"chdir": "keyword2",
"checkdate": "keyword2",
"checkdnsrr": "keyword2",
"chgrp": "keyword2",
"chmod": "keyword2",
"chop": "keyword2",
"chown": "keyword2",
"chr": "keyword2",
"chroot": "keyword2",
"chunk_split": "keyword2",
"class": "keyword1",
"class_exists": "keyword2",
"clearstatcache": "keyword2",
"clone": "keyword1",
"close": "keyword2",
"closedir": "keyword2",
"closelog": "keyword2",
"com_get": "keyword2",
"com_invoke": "keyword2",
"com_load": "keyword2",
"com_propget": "keyword2",
"com_propput": "keyword2",
"com_propset": "keyword2",
"com_set": "keyword2",
"compact": "keyword2",
"confirm_cybermut_compiled": "keyword2",
"confirm_extname_compiled": "keyword2",
"connect": "keyword2",
"connection_aborted": "keyword2",
"connection_status": "keyword2",
"const": "keyword1",
"constant": "keyword2",
"continue": "keyword1",
"convert_cyr_string": "keyword2",
"convert_uudecode": "keyword2",
"convert_uuencode": "keyword2",
"copy": "keyword2",
"cos": "keyword2",
"count": "keyword2",
"count_chars": "keyword2",
"cpdf_add_annotation": "keyword2",
"cpdf_add_outline": "keyword2",
"cpdf_arc": "keyword2",
"cpdf_begin_text": "keyword2",
"cpdf_circle": "keyword2",
"cpdf_clip": "keyword2",
"cpdf_close": "keyword2",
"cpdf_closepath": "keyword2",
"cpdf_closepath_fill_stroke": "keyword2",
"cpdf_closepath_stroke": "keyword2",
"cpdf_continue_text": "keyword2",
"cpdf_curveto": "keyword2",
"cpdf_end_text": "keyword2",
"cpdf_fill": "keyword2",
"cpdf_fill_stroke": "keyword2",
"cpdf_finalize": "keyword2",
"cpdf_finalize_page": "keyword2",
"cpdf_global_set_document_limits": "keyword2",
"cpdf_import_jpeg": "keyword2",
"cpdf_lineto": "keyword2",
"cpdf_moveto": "keyword2",
"cpdf_newpath": "keyword2",
"cpdf_open": "keyword2",
"cpdf_output_buffer": "keyword2",
"cpdf_page_init": "keyword2",
"cpdf_place_inline_image": "keyword2",
"cpdf_rect": "keyword2",
"cpdf_restore": "keyword2",
"cpdf_rlineto": "keyword2",
"cpdf_rmoveto": "keyword2",
"cpdf_rotate": "keyword2",
"cpdf_rotate_text": "keyword2",
"cpdf_save": "keyword2",
"cpdf_save_to_file": "keyword2",
"cpdf_scale": "keyword2",
"cpdf_set_action_url": "keyword2",
"cpdf_set_char_spacing": "keyword2",
"cpdf_set_creator": "keyword2",
"cpdf_set_current_page": "keyword2",
"cpdf_set_font": "keyword2",
"cpdf_set_font_directories": "keyword2",
"cpdf_set_font_map_file": "keyword2",
"cpdf_set_horiz_scaling": "keyword2",
"cpdf_set_keywords": "keyword2",
"cpdf_set_leading": "keyword2",
"cpdf_set_page_animation": "keyword2",
"cpdf_set_subject": "keyword2",
"cpdf_set_text_matrix": "keyword2",
"cpdf_set_text_pos": "keyword2",
"cpdf_set_text_rendering": "keyword2",
"cpdf_set_text_rise": "keyword2",
"cpdf_set_title": "keyword2",
"cpdf_set_viewer_preferences": "keyword2",
"cpdf_set_word_spacing": "keyword2",
"cpdf_setdash": "keyword2",
"cpdf_setflat": "keyword2",
"cpdf_setgray": "keyword2",
"cpdf_setgray_fill": "keyword2",
"cpdf_setgray_stroke": "keyword2",
"cpdf_setlinecap": "keyword2",
"cpdf_setlinejoin": "keyword2",
"cpdf_setlinewidth": "keyword2",
"cpdf_setmiterlimit": "keyword2",
"cpdf_setrgbcolor": "keyword2",
"cpdf_setrgbcolor_fill": "keyword2",
"cpdf_setrgbcolor_stroke": "keyword2",
"cpdf_show": "keyword2",
"cpdf_show_xy": "keyword2",
"cpdf_stringwidth": "keyword2",
"cpdf_stroke": "keyword2",
"cpdf_text": "keyword2",
"cpdf_translate": "keyword2",
"crack_check": "keyword2",
"crack_closedict": "keyword2",
"crack_getlastmessage": "keyword2",
"crack_opendict": "keyword2",
"crash": "keyword2",
"crc32": "keyword2",
"create_function": "keyword2",
"crypt": "keyword2",
"ctype_alnum": "keyword2",
"ctype_alpha": "keyword2",
"ctype_cntrl": "keyword2",
"ctype_digit": "keyword2",
"ctype_graph": "keyword2",
"ctype_lower": "keyword2",
"ctype_print": "keyword2",
"ctype_punct": "keyword2",
"ctype_space": "keyword2",
"ctype_upper": "keyword2",
"ctype_xdigit": "keyword2",
"curl_close": "keyword2",
"curl_errno": "keyword2",
"curl_error": "keyword2",
"curl_exec": "keyword2",
"curl_getinfo": "keyword2",
"curl_init": "keyword2",
"curl_setopt": "keyword2",
"curl_version": "keyword2",
"current": "keyword2",
"cv_add": "keyword2",
"cv_auth": "keyword2",
"cv_command": "keyword2",
"cv_count": "keyword2",
"cv_delete": "keyword2",
"cv_done": "keyword2",
"cv_init": "keyword2",
"cv_lookup": "keyword2",
"cv_new": "keyword2",
"cv_report": "keyword2",
"cv_return": "keyword2",
"cv_reverse": "keyword2",
"cv_sale": "keyword2",
"cv_status": "keyword2",
"cv_textvalue": "keyword2",
"cv_void": "keyword2",
"cybercash_base64_decode": "keyword2",
"cybercash_base64_encode": "keyword2",
"cybercash_decr": "keyword2",
"cybercash_encr": "keyword2",
"cybermut_creerformulairecm": "keyword2",
"cybermut_creerreponsecm": "keyword2",
"cybermut_testmac": "keyword2",
"date": "keyword2",
"dba_close": "keyword2",
"dba_delete": "keyword2",
"dba_exists": "keyword2",
"dba_fetch": "keyword2",
"dba_firstkey": "keyword2",
"dba_insert": "keyword2",
"dba_nextkey": "keyword2",
"dba_open": "keyword2",
"dba_optimize": "keyword2",
"dba_popen": "keyword2",
"dba_replace": "keyword2",
"dba_sync": "keyword2",
"dbase_add_record": "keyword2",
"dbase_close": "keyword2",
"dbase_create": "keyword2",
"dbase_delete_record": "keyword2",
"dbase_get_record": "keyword2",
"dbase_get_record_with_names": "keyword2",
"dbase_numfields": "keyword2",
"dbase_numrecords": "keyword2",
"dbase_open": "keyword2",
"dbase_pack": "keyword2",
"dbase_replace_record": "keyword2",
"dblist": "keyword2",
"dbmclose": "keyword2",
"dbmdelete": "keyword2",
"dbmexists": "keyword2",
"dbmfetch": "keyword2",
"dbmfirstkey": "keyword2",
"dbminsert": "keyword2",
"dbmnextkey": "keyword2",
"dbmopen": "keyword2",
"dbmreplace": "keyword2",
"dbx_close": "keyword2",
"dbx_cmp_asc": "keyword2",
"dbx_cmp_desc": "keyword2",
"dbx_connect": "keyword2",
"dbx_error": "keyword2",
"dbx_query": "keyword2",
"dbx_sort": "keyword2",
"dcgettext": "keyword2",
"debug_backtrace": "keyword2",
"debug_print_backtrace": "keyword2",
"decbin": "keyword2",
"dechex": "keyword2",
"declare": "keyword1",
"decoct": "keyword2",
"default": "keyword1",
"define": "keyword2",
"define_syslog_variables": "keyword2",
"defined": "keyword2",
"deg2rad": "keyword2",
"delete_iovec": "keyword2",
"dgettext": "keyword2",
"die": "keyword2",
"dir": "keyword2",
"dirname": "keyword2",
"diskfreespace": "keyword2",
"display_disabled_function": "keyword2",
"dl": "keyword2",
"do": "keyword1",
"domxml_add_root": "keyword2",
"domxml_attributes": "keyword2",
"domxml_children": "keyword2",
"domxml_dumpmem": "keyword2",
"domxml_elem_get_attribute": "keyword2",
"domxml_elem_set_attribute": "keyword2",
"domxml_get_attribute": "keyword2",
"domxml_getattr": "keyword2",
"domxml_new_child": "keyword2",
"domxml_new_xmldoc": "keyword2",
"domxml_node": "keyword2",
"domxml_node_attributes": "keyword2",
"domxml_node_children": "keyword2",
"domxml_node_new_child": "keyword2",
"domxml_node_set_content": "keyword2",
"domxml_node_unlink_node": "keyword2",
"domxml_root": "keyword2",
"domxml_set_attribute": "keyword2",
"domxml_setattr": "keyword2",
"domxml_unlink_node": "keyword2",
"domxml_version": "keyword2",
"doubleval": "keyword2",
"drawarc": "keyword2",
"drawcircle": "keyword2",
"drawcubic": "keyword2",
"drawcubicto": "keyword2",
"drawcurve": "keyword2",
"drawcurveto": "keyword2",
"drawglyph": "keyword2",
"drawline": "keyword2",
"drawlineto": "keyword2",
"each": "keyword2",
"easter_date": "keyword2",
"easter_days": "keyword2",
"echo": "keyword1",
"else": "keyword1",
"elseif": "keyword1",
"empty": "keyword1",
"end": "keyword2",
"endfor": "keyword1",
"endforeach": "keyword1",
"endif": "keyword1",
"endswitch": "keyword1",
"endwhile": "keyword1",
"ereg": "keyword2",
"ereg_replace": "keyword2",
"eregi": "keyword2",
"eregi_replace": "keyword2",
"error_log": "keyword2",
"error_reporting": "keyword2",
"escapeshellarg": "keyword2",
"escapeshellcmd": "keyword2",
"exec": "keyword2",
"exit": "keyword2",
"exp": "keyword2",
"explode": "keyword2",
"extends": "keyword1",
"extension_loaded": "keyword2",
"extract": "keyword2",
"ezmlm_hash": "keyword2",
"false": "keyword3",
"fbsql": "keyword2",
"fbsql_affected_rows": "keyword2",
"fbsql_autocommit": "keyword2",
"fbsql_close": "keyword2",
"fbsql_commit": "keyword2",
"fbsql_connect": "keyword2",
"fbsql_create_db": "keyword2",
"fbsql_data_seek": "keyword2",
"fbsql_database": "keyword2",
"fbsql_database_password": "keyword2",
"fbsql_db_query": "keyword2",
"fbsql_drop_db": "keyword2",
"fbsql_errno": "keyword2",
"fbsql_error": "keyword2",
"fbsql_fetch_array": "keyword2",
"fbsql_fetch_assoc": "keyword2",
"fbsql_fetch_field": "keyword2",
"fbsql_fetch_lengths": "keyword2",
"fbsql_fetch_object": "keyword2",
"fbsql_fetch_row": "keyword2",
"fbsql_field_flags": "keyword2",
"fbsql_field_len": "keyword2",
"fbsql_field_name": "keyword2",
"fbsql_field_seek": "keyword2",
"fbsql_field_table": "keyword2",
"fbsql_field_type": "keyword2",
"fbsql_free_result": "keyword2",
"fbsql_hostname": "keyword2",
"fbsql_insert_id": "keyword2",
"fbsql_list_dbs": "keyword2",
"fbsql_list_fields": "keyword2",
"fbsql_list_tables": "keyword2",
"fbsql_next_result": "keyword2",
"fbsql_num_fields": "keyword2",
"fbsql_num_rows": "keyword2",
"fbsql_password": "keyword2",
"fbsql_pconnect": "keyword2",
"fbsql_query": "keyword2",
"fbsql_result": "keyword2",
"fbsql_rollback": "keyword2",
"fbsql_select_db": "keyword2",
"fbsql_start_db": "keyword2",
"fbsql_stop_db": "keyword2",
"fbsql_username": "keyword2",
"fbsql_warnings": "keyword2",
"fclose": "keyword2",
"fd_alloc": "keyword2",
"fd_clear": "keyword2",
"fd_dealloc": "keyword2",
"fd_isset": "keyword2",
"fd_set": "keyword2",
"fd_zero": "keyword2",
"fdf_add_template": "keyword2",
"fdf_close": "keyword2",
"fdf_create": "keyword2",
"fdf_get_file": "keyword2",
"fdf_get_status": "keyword2",
"fdf_get_value": "keyword2",
"fdf_next_field_name": "keyword2",
"fdf_open": "keyword2",
"fdf_save": "keyword2",
"fdf_set_ap": "keyword2",
"fdf_set_file": "keyword2",
"fdf_set_flags": "keyword2",
"fdf_set_javascript_action": "keyword2",
"fdf_set_opt": "keyword2",
"fdf_set_status": "keyword2",
"fdf_set_submit_form_action": "keyword2",
"fdf_set_value": "keyword2",
"feof": "keyword2",
"fetch_iovec": "keyword2",
"fflush": "keyword2",
"fgetc": "keyword2",
"fgetcsv": "keyword2",
"fgets": "keyword2",
"fgetss": "keyword2",
"file": "keyword2",
"file_exists": "keyword2",
"file_get_contents": "keyword2",
"file_put_contents": "keyword2",
"fileatime": "keyword2",
"filectime": "keyword2",
"filegroup": "keyword2",
"fileinode": "keyword2",
"filemtime": "keyword2",
"fileowner": "keyword2",
"fileperms": "keyword2",
"filepro": "keyword2",
"filepro_fieldcount": "keyword2",
"filepro_fieldname": "keyword2",
"filepro_fieldtype": "keyword2",
"filepro_fieldwidth": "keyword2",
"filepro_retrieve": "keyword2",
"filepro_rowcount": "keyword2",
"filesize": "keyword2",
"filetype": "keyword2",
"final": "keyword1",
"floatval": "keyword2",
"flock": "keyword2",
"floor": "keyword2",
"flush": "keyword2",
"fopen": "keyword2",
"fopenstream": "keyword2",
"for": "keyword1",
"foreach": "keyword1",
"fpassthru": "keyword2",
"fputs": "keyword2",
"fread": "keyword2",
"free_iovec": "keyword2",
"frenchtojd": "keyword2",
"fribidi_log2vis": "keyword2",
"fscanf": "keyword2",
"fseek": "keyword2",
"fsockopen": "keyword2",
"fstat": "keyword2",
"ftell": "keyword2",
"ftp_alloc": "keyword2",
"ftp_cdup": "keyword2",
"ftp_chdir": "keyword2",
"ftp_connect": "keyword2",
"ftp_delete": "keyword2",
"ftp_exec": "keyword2",
"ftp_fget": "keyword2",
"ftp_fput": "keyword2",
"ftp_get": "keyword2",
"ftp_login": "keyword2",
"ftp_mdtm": "keyword2",
"ftp_mkdir": "keyword2",
"ftp_nlist": "keyword2",
"ftp_pasv": "keyword2",
"ftp_put": "keyword2",
"ftp_pwd": "keyword2",
"ftp_quit": "keyword2",
"ftp_rawlist": "keyword2",
"ftp_rename": "keyword2",
"ftp_rmdir": "keyword2",
"ftp_site": "keyword2",
"ftp_size": "keyword2",
"ftp_ssl_connect": "keyword2",
"ftp_systype": "keyword2",
"ftruncate": "keyword2",
"func_get_arg": "keyword2",
"func_get_args": "keyword2",
"func_num_args": "keyword2",
"function": "keyword1",
"function_exists": "keyword2",
"fwrite": "keyword2",
"gd_info": "keyword2",
"get_all_headers": "keyword2",
"get_browser": "keyword2",
"get_cfg_var": "keyword2",
"get_class": "keyword2",
"get_class_methods": "keyword2",
"get_class_vars": "keyword2",
"get_current_user": "keyword2",
"get_declared_classes": "keyword2",
"get_declared_interfaces": "keyword2",
"get_defined_functions": "keyword2",
"get_defined_vars": "keyword2",
"get_extension_funcs": "keyword2",
"get_headers": "keyword2",
"get_html_translation_table": "keyword2",
"get_included_files": "keyword2",
"get_loaded_extensions": "keyword2",
"get_magic_quotes_gpc": "keyword2",
"get_magic_quotes_runtime": "keyword2",
"get_meta_tags": "keyword2",
"get_object_vars": "keyword2",
"get_parent_class": "keyword2",
"get_required_files": "keyword2",
"get_resource_type": "keyword2",
"getallheaders": "keyword2",
"getascent": "keyword2",
"getcwd": "keyword2",
"getdate": "keyword2",
"getdescent": "keyword2",
"getenv": "keyword2",
"getheight": "keyword2",
"gethostbyaddr": "keyword2",
"gethostbyname": "keyword2",
"gethostbynamel": "keyword2",
"getimagesize": "keyword2",
"getlastmod": "keyword2",
"getleading": "keyword2",
"getmxrr": "keyword2",
"getmyinode": "keyword2",
"getmypid": "keyword2",
"getmyuid": "keyword2",
"getopt": "keyword2",
"getpeername": "keyword2",
"getprotobyname": "keyword2",
"getprotobynumber": "keyword2",
"getrandmax": "keyword2",
"getrusage": "keyword2",
"getservbyname": "keyword2",
"getservbyport": "keyword2",
"getshape1": "keyword2",
"getshape2": "keyword2",
"getsockname": "keyword2",
"getsockopt": "keyword2",
"gettext": "keyword2",
"gettimeofday": "keyword2",
"gettype": "keyword2",
"getwidth": "keyword2",
"global": "keyword1",
"gmdate": "keyword2",
"gmmktime": "keyword2",
"gmp_abs": "keyword2",
"gmp_add": "keyword2",
"gmp_and": "keyword2",
"gmp_clrbit": "keyword2",
"gmp_cmp": "keyword2",
"gmp_com": "keyword2",
"gmp_div": "keyword2",
"gmp_div_q": "keyword2",
"gmp_div_qr": "keyword2",
"gmp_div_r": "keyword2",
"gmp_divexact": "keyword2",
"gmp_fact": "keyword2",
"gmp_gcd": "keyword2",
"gmp_gcdext": "keyword2",
"gmp_hamdist": "keyword2",
"gmp_init": "keyword2",
"gmp_intval": "keyword2",
"gmp_invert": "keyword2",
"gmp_jacobi": "keyword2",
"gmp_legendre": "keyword2",
"gmp_mod": "keyword2",
"gmp_mul": "keyword2",
"gmp_neg": "keyword2",
"gmp_or": "keyword2",
"gmp_perfect_square": "keyword2",
"gmp_popcount": "keyword2",
"gmp_pow": "keyword2",
"gmp_powm": "keyword2",
"gmp_prob_prime": "keyword2",
"gmp_random": "keyword2",
"gmp_scan0": "keyword2",
"gmp_scan1": "keyword2",
"gmp_setbit": "keyword2",
"gmp_sign": "keyword2",
"gmp_sqrt": "keyword2",
"gmp_sqrtrem": "keyword2",
"gmp_strval": "keyword2",
"gmp_sub": "keyword2",
"gmp_xor": "keyword2",
"gmstrftime": "keyword2",
"gregoriantojd": "keyword2",
"gzclose": "keyword2",
"gzcompress": "keyword2",
"gzdeflate": "keyword2",
"gzencode": "keyword2",
"gzeof": "keyword2",
"gzfile": "keyword2",
"gzgetc": "keyword2",
"gzgets": "keyword2",
"gzgetss": "keyword2",
"gzinflate": "keyword2",
"gzopen": "keyword2",
"gzpassthru": "keyword2",
"gzputs": "keyword2",
"gzread": "keyword2",
"gzrewind": "keyword2",
"gzseek": "keyword2",
"gztell": "keyword2",
"gzuncompress": "keyword2",
"gzwrite": "keyword2",
"header": "keyword2",
"headers_list": "keyword2",
"headers_sent": "keyword2",
"hebrev": "keyword2",
"hebrevc": "keyword2",
"hexdec": "keyword2",
"highlight_file": "keyword2",
"highlight_string": "keyword2",
"htmlentities": "keyword2",
"htmlspecialchars": "keyword2",
"http_build_query": "keyword2",
"hw_array2objrec": "keyword2",
"hw_changeobject": "keyword2",
"hw_children": "keyword2",
"hw_childrenobj": "keyword2",
"hw_close": "keyword2",
"hw_connect": "keyword2",
"hw_connection_info": "keyword2",
"hw_cp": "keyword2",
"hw_deleteobject": "keyword2",
"hw_docbyanchor": "keyword2",
"hw_docbyanchorobj": "keyword2",
"hw_document_attributes": "keyword2",
"hw_document_bodytag": "keyword2",
"hw_document_content": "keyword2",
"hw_document_setcontent": "keyword2",
"hw_document_size": "keyword2",
"hw_dummy": "keyword2",
"hw_edittext": "keyword2",
"hw_error": "keyword2",
"hw_errormsg": "keyword2",
"hw_free_document": "keyword2",
"hw_getanchors": "keyword2",
"hw_getanchorsobj": "keyword2",
"hw_getandlock": "keyword2",
"hw_getcgi": "keyword2",
"hw_getchildcoll": "keyword2",
"hw_getchildcollobj": "keyword2",
"hw_getchilddoccoll": "keyword2",
"hw_getchilddoccollobj": "keyword2",
"hw_getobject": "keyword2",
"hw_getobjectbyftquery": "keyword2",
"hw_getobjectbyftquerycoll": "keyword2",
"hw_getobjectbyftquerycollobj": "keyword2",
"hw_getobjectbyftqueryobj": "keyword2",
"hw_getobjectbyquery": "keyword2",
"hw_getobjectbyquerycoll": "keyword2",
"hw_getobjectbyquerycollobj": "keyword2",
"hw_getobjectbyqueryobj": "keyword2",
"hw_getparents": "keyword2",
"hw_getparentsobj": "keyword2",
"hw_getrellink": "keyword2",
"hw_getremote": "keyword2",
"hw_getremotechildren": "keyword2",
"hw_getsrcbydestobj": "keyword2",
"hw_gettext": "keyword2",
"hw_getusername": "keyword2",
"hw_identify": "keyword2",
"hw_incollections": "keyword2",
"hw_info": "keyword2",
"hw_inscoll": "keyword2",
"hw_insdoc": "keyword2",
"hw_insertanchors": "keyword2",
"hw_insertdocument": "keyword2",
"hw_insertobject": "keyword2",
"hw_mapid": "keyword2",
"hw_modifyobject": "keyword2",
"hw_mv": "keyword2",
"hw_new_document": "keyword2",
"hw_new_document_from_file": "keyword2",
"hw_objrec2array": "keyword2",
"hw_output_document": "keyword2",
"hw_pconnect": "keyword2",
"hw_pipecgi": "keyword2",
"hw_pipedocument": "keyword2",
"hw_root": "keyword2",
"hw_setlinkroot": "keyword2",
"hw_stat": "keyword2",
"hw_unlock": "keyword2",
"hw_who": "keyword2",
"ibase_blob_add": "keyword2",
"ibase_blob_cancel": "keyword2",
"ibase_blob_close": "keyword2",
"ibase_blob_create": "keyword2",
"ibase_blob_echo": "keyword2",
"ibase_blob_get": "keyword2",
"ibase_blob_import": "keyword2",
"ibase_blob_info": "keyword2",
"ibase_blob_open": "keyword2",
"ibase_close": "keyword2",
"ibase_commit": "keyword2",
"ibase_connect": "keyword2",
"ibase_errmsg": "keyword2",
"ibase_execute": "keyword2",
"ibase_fetch_object": "keyword2",
"ibase_fetch_row": "keyword2",
"ibase_field_info": "keyword2",
"ibase_free_query": "keyword2",
"ibase_free_result": "keyword2",
"ibase_num_fields": "keyword2",
"ibase_pconnect": "keyword2",
"ibase_prepare": "keyword2",
"ibase_query": "keyword2",
"ibase_rollback": "keyword2",
"ibase_timefmt": "keyword2",
"ibase_trans": "keyword2",
"icap_create_calendar": "keyword2",
"icap_delete_calendar": "keyword2",
"icap_delete_event": "keyword2",
"icap_fetch_event": "keyword2",
"icap_list_alarms": "keyword2",
"icap_list_events": "keyword2",
"icap_open": "keyword2",
"icap_popen": "keyword2",
"icap_rename_calendar": "keyword2",
"icap_reopen": "keyword2",
"icap_snooze": "keyword2",
"icap_store_event": "keyword2",
"iconv": "keyword2",
"iconv_get_encoding": "keyword2",
"iconv_set_encoding": "keyword2",
"idate": "keyword2",
"if": "keyword1",
"ifx_affected_rows": "keyword2",
"ifx_blobinfile_mode": "keyword2",
"ifx_byteasvarchar": "keyword2",
"ifx_close": "keyword2",
"ifx_connect": "keyword2",
"ifx_copy_blob": "keyword2",
"ifx_create_blob": "keyword2",
"ifx_create_char": "keyword2",
"ifx_do": "keyword2",
"ifx_error": "keyword2",
"ifx_errormsg": "keyword2",
"ifx_fetch_row": "keyword2",
"ifx_fieldproperties": "keyword2",
"ifx_fieldtypes": "keyword2",
"ifx_free_blob": "keyword2",
"ifx_free_char": "keyword2",
"ifx_free_result": "keyword2",
"ifx_get_blob": "keyword2",
"ifx_get_char": "keyword2",
"ifx_getsqlca": "keyword2",
"ifx_htmltbl_result": "keyword2",
"ifx_nullformat": "keyword2",
"ifx_num_fields": "keyword2",
"ifx_num_rows": "keyword2",
"ifx_pconnect": "keyword2",
"ifx_prepare": "keyword2",
"ifx_query": "keyword2",
"ifx_textasvarchar": "keyword2",
"ifx_update_blob": "keyword2",
"ifx_update_char": "keyword2",
"ifxus_close_slob": "keyword2",
"ifxus_create_slob": "keyword2",
"ifxus_free_slob": "keyword2",
"ifxus_open_slob": "keyword2",
"ifxus_read_slob": "keyword2",
"ifxus_seek_slob": "keyword2",
"ifxus_tell_slob": "keyword2",
"ifxus_write_slob": "keyword2",
"ignore_user_abort": "keyword2",
"iis_addserver": "keyword2",
"iis_getdirsecurity": "keyword2",
"iis_getscriptmap": "keyword2",
"iis_getserverbycomment": "keyword2",
"iis_getserverbypath": "keyword2",
"iis_getserverright": "keyword2",
"iis_getservicestate": "keyword2",
"iis_removeserver": "keyword2",
"iis_setappsettings": "keyword2",
"iis_setdirsecurity": "keyword2",
"iis_setscriptmap": "keyword2",
"iis_setserverright": "keyword2",
"iis_startserver": "keyword2",
"iis_startservice": "keyword2",
"iis_stopserver": "keyword2",
"iis_stopservice": "keyword2",
"image2wbmp": "keyword2",
"image_type_to_extension": "keyword2",
"imagealphablending": "keyword2",
"imagearc": "keyword2",
"imagechar": "keyword2",
"imagecharup": "keyword2",
"imagecolorallocate": "keyword2",
"imagecolorat": "keyword2",
"imagecolorclosest": "keyword2",
"imagecolorclosestalpha": "keyword2",
"imagecolorclosesthwb": "keyword2",
"imagecolordeallocate": "keyword2",
"imagecolorexact": "keyword2",
"imagecolorexactalpha": "keyword2",
"imagecolormatch": "keyword2",
"imagecolorresolve": "keyword2",
"imagecolorresolvealpha": "keyword2",
"imagecolorset": "keyword2",
"imagecolorsforindex": "keyword2",
"imagecolorstotal": "keyword2",
"imagecolortransparent": "keyword2",
"imagecopy": "keyword2",
"imagecopymerge": "keyword2",
"imagecopymergegray": "keyword2",
"imagecopyresampled": "keyword2",
"imagecopyresized": "keyword2",
"imagecreate": "keyword2",
"imagecreatefromgif": "keyword2",
"imagecreatefromjpeg": "keyword2",
"imagecreatefrompng": "keyword2",
"imagecreatefromstring": "keyword2",
"imagecreatefromwbmp": "keyword2",
"imagecreatefromxbm": "keyword2",
"imagecreatefromxpm": "keyword2",
"imagecreatetruecolor": "keyword2",
"imagedashedline": "keyword2",
"imagedestroy": "keyword2",
"imageellipse": "keyword2",
"imagefill": "keyword2",
"imagefilledarc": "keyword2",
"imagefilledellipse": "keyword2",
"imagefilledpolygon": "keyword2",
"imagefilledrectangle": "keyword2",
"imagefilltoborder": "keyword2",
"imagefilter": "keyword2",
"imagefontheight": "keyword2",
"imagefontwidth": "keyword2",
"imagegammacorrect": "keyword2",
"imagegif": "keyword2",
"imageinterlace": "keyword2",
"imagejpeg": "keyword2",
"imagelayereffect": "keyword2",
"imageline": "keyword2",
"imageloadfont": "keyword2",
"imagepalettecopy": "keyword2",
"imagepng": "keyword2",
"imagepolygon": "keyword2",
"imagepsbbox": "keyword2",
"imagepscopyfont": "keyword2",
"imagepsencodefont": "keyword2",
"imagepsextendfont": "keyword2",
"imagepsfreefont": "keyword2",
"imagepsloadfont": "keyword2",
"imagepsslantfont": "keyword2",
"imagepstext": "keyword2",
"imagerectangle": "keyword2",
"imagerotate": "keyword2",
"imagesetbrush": "keyword2",
"imagesetpixel": "keyword2",
"imagesetstyle": "keyword2",
"imagesetthickness": "keyword2",
"imagesettile": "keyword2",
"imagestring": "keyword2",
"imagestringup": "keyword2",
"imagesx": "keyword2",
"imagesy": "keyword2",
"imagetruecolortopalette": "keyword2",
"imagettfbbox": "keyword2",
"imagettftext": "keyword2",
"imagetypes": "keyword2",
"imagewbmp": "keyword2",
"imap_8bit": "keyword2",
"imap_alerts": "keyword2",
"imap_append": "keyword2",
"imap_base64": "keyword2",
"imap_binary": "keyword2",
"imap_body": "keyword2",
"imap_bodystruct": "keyword2",
"imap_check": "keyword2",
"imap_clearflag_full": "keyword2",
"imap_close": "keyword2",
"imap_create": "keyword2",
"imap_createmailbox": "keyword2",
"imap_delete": "keyword2",
"imap_deletemailbox": "keyword2",
"imap_errors": "keyword2",
"imap_expunge": "keyword2",
"imap_fetch_overview": "keyword2",
"imap_fetchbody": "keyword2",
"imap_fetchheader": "keyword2",
"imap_fetchstructure": "keyword2",
"imap_fetchtext": "keyword2",
"imap_get_quota": "keyword2",
"imap_getmailboxes": "keyword2",
"imap_getsubscribed": "keyword2",
"imap_header": "keyword2",
"imap_headerinfo": "keyword2",
"imap_headers": "keyword2",
"imap_last_error": "keyword2",
"imap_list": "keyword2",
"imap_listmailbox": "keyword2",
"imap_listsubscribed": "keyword2",
"imap_lsub": "keyword2",
"imap_mail": "keyword2",
"imap_mail_compose": "keyword2",
"imap_mail_copy": "keyword2",
"imap_mail_move": "keyword2",
"imap_mailboxmsginfo": "keyword2",
"imap_mime_header_decode": "keyword2",
"imap_msgno": "keyword2",
"imap_num_msg": "keyword2",
"imap_num_recent": "keyword2",
"imap_open": "keyword2",
"imap_ping": "keyword2",
"imap_popen": "keyword2",
"imap_qprint": "keyword2",
"imap_rename": "keyword2",
"imap_renamemailbox": "keyword2",
"imap_reopen": "keyword2",
"imap_rfc822_parse_adrlist": "keyword2",
"imap_rfc822_parse_headers": "keyword2",
"imap_rfc822_write_address": "keyword2",
"imap_scan": "keyword2",
"imap_scanmailbox": "keyword2",
"imap_search": "keyword2",
"imap_set_quota": "keyword2",
"imap_setflag_full": "keyword2",
"imap_sort": "keyword2",
"imap_status": "keyword2",
"imap_subscribe": "keyword2",
"imap_uid": "keyword2",
"imap_undelete": "keyword2",
"imap_unsubscribe": "keyword2",
"imap_utf7_decode": "keyword2",
"imap_utf7_encode": "keyword2",
"imap_utf8": "keyword2",
"implements": "keyword1",
"implode": "keyword2",
"in_array": "keyword2",
"include": "keyword1",
"include_once": "keyword1",
"ingres_autocommit": "keyword2",
"ingres_close": "keyword2",
"ingres_commit": "keyword2",
"ingres_connect": "keyword2",
"ingres_fetch_array": "keyword2",
"ingres_fetch_object": "keyword2",
"ingres_fetch_row": "keyword2",
"ingres_field_length": "keyword2",
"ingres_field_name": "keyword2",
"ingres_field_nullable": "keyword2",
"ingres_field_precision": "keyword2",
"ingres_field_scale": "keyword2",
"ingres_field_type": "keyword2",
"ingres_num_fields": "keyword2",
"ingres_num_rows": "keyword2",
"ingres_pconnect": "keyword2",
"ingres_query": "keyword2",
"ingres_rollback": "keyword2",
"ini_alter": "keyword2",
"ini_get": "keyword2",
"ini_restore": "keyword2",
"ini_set": "keyword2",
"instanceof": "operator",
"interface": "keyword1",
"intval": "keyword2",
"ip2long": "keyword2",
"iptcembed": "keyword2",
"iptcparse": "keyword2",
"ircg_channel_mode": "keyword2",
"ircg_disconnect": "keyword2",
"ircg_html_encode": "keyword2",
"ircg_ignore_add": "keyword2",
"ircg_ignore_del": "keyword2",
"ircg_is_conn_alive": "keyword2",
"ircg_join": "keyword2",
"ircg_kick": "keyword2",
"ircg_lookup_format_messages": "keyword2",
"ircg_msg": "keyword2",
"ircg_nick": "keyword2",
"ircg_nickname_escape": "keyword2",
"ircg_nickname_unescape": "keyword2",
"ircg_notice": "keyword2",
"ircg_part": "keyword2",
"ircg_pconnect": "keyword2",
"ircg_register_format_messages": "keyword2",
"ircg_set_current": "keyword2",
"ircg_topic": "keyword2",
"ircg_whois": "keyword2",
"is_array": "keyword2",
"is_bool": "keyword2",
"is_dir": "keyword2",
"is_double": "keyword2",
"is_executable": "keyword2",
"is_file": "keyword2",
"is_float": "keyword2",
"is_int": "keyword2",
"is_integer": "keyword2",
"is_link": "keyword2",
"is_long": "keyword2",
"is_null": "keyword2",
"is_numeric": "keyword2",
"is_object": "keyword2",
"is_readable": "keyword2",
"is_real": "keyword2",
"is_resource": "keyword2",
"is_scalar": "keyword2",
"is_string": "keyword2",
"is_subclass_of": "keyword2",
"is_uploaded_file": "keyword2",
"is_writable": "keyword2",
"is_writeable": "keyword2",
"isset": "keyword1",
"java_last_exception_clear": "keyword2",
"java_last_exception_get": "keyword2",
"jddayofweek": "keyword2",
"jdmonthname": "keyword2",
"jdtofrench": "keyword2",
"jdtogregorian": "keyword2",
"jdtojewish": "keyword2",
"jdtojulian": "keyword2",
"jdtounix": "keyword2",
"jewishtojd": "keyword2",
"join": "keyword2",
"jpeg2wbmp": "keyword2",
"juliantojd": "keyword2",
"key": "keyword2",
"krsort": "keyword2",
"ksort": "keyword2",
"labelframe": "keyword2",
"lcg_value": "keyword2",
"ldap_8859_to_t61": "keyword2",
"ldap_add": "keyword2",
"ldap_bind": "keyword2",
"ldap_close": "keyword2",
"ldap_compare": "keyword2",
"ldap_connect": "keyword2",
"ldap_count_entries": "keyword2",
"ldap_delete": "keyword2",
"ldap_dn2ufn": "keyword2",
"ldap_err2str": "keyword2",
"ldap_errno": "keyword2",
"ldap_error": "keyword2",
"ldap_explode_dn": "keyword2",
"ldap_first_attribute": "keyword2",
"ldap_first_entry": "keyword2",
"ldap_first_reference": "keyword2",
"ldap_free_result": "keyword2",
"ldap_get_attributes": "keyword2",
"ldap_get_dn": "keyword2",
"ldap_get_entries": "keyword2",
"ldap_get_option": "keyword2",
"ldap_get_values": "keyword2",
"ldap_get_values_len": "keyword2",
"ldap_list": "keyword2",
"ldap_mod_add": "keyword2",
"ldap_mod_del": "keyword2",
"ldap_mod_replace": "keyword2",
"ldap_modify": "keyword2",
"ldap_next_attribute": "keyword2",
"ldap_next_entry": "keyword2",
"ldap_next_reference": "keyword2",
"ldap_parse_reference": "keyword2",
"ldap_parse_result": "keyword2",
"ldap_read": "keyword2",
"ldap_rename": "keyword2",
"ldap_search": "keyword2",
"ldap_set_option": "keyword2",
"ldap_t61_to_8859": "keyword2",
"ldap_unbind": "keyword2",
"leak": "keyword2",
"levenshtein": "keyword2",
"link": "keyword2",
"linkinfo": "keyword2",
"list": "keyword1",
"listen": "keyword2",
"localeconv": "keyword2",
"localtime": "keyword2",
"log": "keyword2",
"log10": "keyword2",
"long2ip": "keyword2",
"lstat": "keyword2",
"ltrim": "keyword2",
"magic_quotes_runtime": "keyword2",
"mail": "keyword2",
"max": "keyword2",
"mb_convert_case": "keyword2",
"mb_strtolower": "keyword2",
"mb_strtoupper": "keyword2",
"mcal_append_event": "keyword2",
"mcal_close": "keyword2",
"mcal_create_calendar": "keyword2",
"mcal_date_compare": "keyword2",
"mcal_date_valid": "keyword2",
"mcal_day_of_week": "keyword2",
"mcal_day_of_year": "keyword2",
"mcal_days_in_month": "keyword2",
"mcal_delete_calendar": "keyword2",
"mcal_delete_event": "keyword2",
"mcal_event_add_attribute": "keyword2",
"mcal_event_init": "keyword2",
"mcal_event_set_alarm": "keyword2",
"mcal_event_set_category": "keyword2",
"mcal_event_set_class": "keyword2",
"mcal_event_set_description": "keyword2",
"mcal_event_set_end": "keyword2",
"mcal_event_set_recur_daily": "keyword2",
"mcal_event_set_recur_monthly_mday": "keyword2",
"mcal_event_set_recur_monthly_wday": "keyword2",
"mcal_event_set_recur_none": "keyword2",
"mcal_event_set_recur_weekly": "keyword2",
"mcal_event_set_recur_yearly": "keyword2",
"mcal_event_set_start": "keyword2",
"mcal_event_set_title": "keyword2",
"mcal_fetch_current_stream_event": "keyword2",
"mcal_fetch_event": "keyword2",
"mcal_is_leap_year": "keyword2",
"mcal_list_alarms": "keyword2",
"mcal_list_events": "keyword2",
"mcal_next_recurrence": "keyword2",
"mcal_open": "keyword2",
"mcal_popen": "keyword2",
"mcal_rename_calendar": "keyword2",
"mcal_reopen": "keyword2",
"mcal_snooze": "keyword2",
"mcal_store_event": "keyword2",
"mcal_time_valid": "keyword2",
"mcal_week_of_year": "keyword2",
"mcrypt_cbc": "keyword2",
"mcrypt_cfb": "keyword2",
"mcrypt_create_iv": "keyword2",
"mcrypt_decrypt": "keyword2",
"mcrypt_ecb": "keyword2",
"mcrypt_enc_get_algorithms_name": "keyword2",
"mcrypt_enc_get_block_size": "keyword2",
"mcrypt_enc_get_iv_size": "keyword2",
"mcrypt_enc_get_key_size": "keyword2",
"mcrypt_enc_get_modes_name": "keyword2",
"mcrypt_enc_get_supported_key_sizes": "keyword2",
"mcrypt_enc_is_block_algorithm": "keyword2",
"mcrypt_enc_is_block_algorithm_mode": "keyword2",
"mcrypt_enc_is_block_mode": "keyword2",
"mcrypt_enc_self_test": "keyword2",
"mcrypt_encrypt": "keyword2",
"mcrypt_generic": "keyword2",
"mcrypt_generic_deinit": "keyword2",
"mcrypt_generic_end": "keyword2",
"mcrypt_generic_init": "keyword2",
"mcrypt_get_block_size": "keyword2",
"mcrypt_get_cipher_name": "keyword2",
"mcrypt_get_iv_size": "keyword2",
"mcrypt_get_key_size": "keyword2",
"mcrypt_list_algorithms": "keyword2",
"mcrypt_list_modes": "keyword2",
"mcrypt_module_close": "keyword2",
"mcrypt_module_get_algo_block_size": "keyword2",
"mcrypt_module_get_algo_key_size": "keyword2",
"mcrypt_module_get_supported_key_sizes": "keyword2",
"mcrypt_module_is_block_algorithm": "keyword2",
"mcrypt_module_is_block_algorithm_mode": "keyword2",
"mcrypt_module_is_block_mode": "keyword2",
"mcrypt_module_open": "keyword2",
"mcrypt_module_self_test": "keyword2",
"mcrypt_ofb": "keyword2",
"md5": "keyword2",
"md5_file": "keyword2",
"mdecrypt_generic": "keyword2",
"metaphone": "keyword2",
"method_exists": "keyword2",
"mhash": "keyword2",
"mhash_count": "keyword2",
"mhash_get_block_size": "keyword2",
"mhash_get_hash_name": "keyword2",
"mhash_keygen_s2k": "keyword2",
"microtime": "keyword2",
"min": "keyword2",
"ming_setcubicthreshold": "keyword2",
"ming_setscale": "keyword2",
"mkdir": "keyword2",
"mktime": "keyword2",
"move": "keyword2",
"move_uploaded_file": "keyword2",
"movepen": "keyword2",
"movepento": "keyword2",
"moveto": "keyword2",
"msql": "keyword2",
"msql_affected_rows": "keyword2",
"msql_close": "keyword2",
"msql_connect": "keyword2",
"msql_create_db": "keyword2",
"msql_createdb": "keyword2",
"msql_data_seek": "keyword2",
"msql_db_query": "keyword2",
"msql_dbname": "keyword2",
"msql_drop_db": "keyword2",
"msql_dropdb": "keyword2",
"msql_error": "keyword2",
"msql_fetch_array": "keyword2",
"msql_fetch_field": "keyword2",
"msql_fetch_object": "keyword2",
"msql_fetch_row": "keyword2",
"msql_field_flags": "keyword2",
"msql_field_len": "keyword2",
"msql_field_name": "keyword2",
"msql_field_seek": "keyword2",
"msql_field_table": "keyword2",
"msql_field_type": "keyword2",
"msql_fieldflags": "keyword2",
"msql_fieldlen": "keyword2",
"msql_fieldname": "keyword2",
"msql_fieldtable": "keyword2",
"msql_fieldtype": "keyword2",
"msql_free_result": "keyword2",
"msql_freeresult": "keyword2",
"msql_list_dbs": "keyword2",
"msql_list_fields": "keyword2",
"msql_list_tables": "keyword2",
"msql_listdbs": "keyword2",
"msql_listfields": "keyword2",
"msql_listtables": "keyword2",
"msql_num_fields": "keyword2",
"msql_num_rows": "keyword2",
"msql_numfields": "keyword2",
"msql_numrows": "keyword2",
"msql_pconnect": "keyword2",
"msql_query": "keyword2",
"msql_regcase": "keyword2",
"msql_result": "keyword2",
"msql_select_db": "keyword2",
"msql_selectdb": "keyword2",
"msql_tablename": "keyword2",
"mssql_affected_rows": "keyword2",
"mssql_close": "keyword2",
"mssql_connect": "keyword2",
"mssql_data_seek": "keyword2",
"mssql_fetch_array": "keyword2",
"mssql_fetch_batch": "keyword2",
"mssql_fetch_field": "keyword2",
"mssql_fetch_object": "keyword2",
"mssql_fetch_row": "keyword2",
"mssql_field_length": "keyword2",
"mssql_field_name": "keyword2",
"mssql_field_seek": "keyword2",
"mssql_field_type": "keyword2",
"mssql_free_result": "keyword2",
"mssql_get_last_message": "keyword2",
"mssql_min_client_severity": "keyword2",
"mssql_min_error_severity": "keyword2",
"mssql_min_message_severity": "keyword2",
"mssql_min_server_severity": "keyword2",
"mssql_next_result": "keyword2",
"mssql_num_fields": "keyword2",
"mssql_num_rows": "keyword2",
"mssql_pconnect": "keyword2",
"mssql_query": "keyword2",
"mssql_result": "keyword2",
"mssql_rows_affected": "keyword2",
"mssql_select_db": "keyword2",
"mt_getrandmax": "keyword2",
"mt_rand": "keyword2",
"mt_srand": "keyword2",
"multcolor": "keyword2",
"muscat_close": "keyword2",
"muscat_get": "keyword2",
"muscat_give": "keyword2",
"muscat_setup": "keyword2",
"muscat_setup_net": "keyword2",
"mysql": "keyword2",
"mysql_affected_rows": "keyword2",
"mysql_close": "keyword2",
"mysql_connect": "keyword2",
"mysql_create_db": "keyword2",
"mysql_createdb": "keyword2",
"mysql_data_seek": "keyword2",
"mysql_db_name": "keyword2",
"mysql_db_query": "keyword2",
"mysql_dbname": "keyword2",
"mysql_drop_db": "keyword2",
"mysql_dropdb": "keyword2",
"mysql_errno": "keyword2",
"mysql_error": "keyword2",
"mysql_escape_string": "keyword2",
"mysql_fetch_array": "keyword2",
"mysql_fetch_assoc": "keyword2",
"mysql_fetch_field": "keyword2",
"mysql_fetch_lengths": "keyword2",
"mysql_fetch_object": "keyword2",
"mysql_fetch_row": "keyword2",
"mysql_field_flags": "keyword2",
"mysql_field_len": "keyword2",
"mysql_field_name": "keyword2",
"mysql_field_seek": "keyword2",
"mysql_field_table": "keyword2",
"mysql_field_type": "keyword2",
"mysql_fieldflags": "keyword2",
"mysql_fieldlen": "keyword2",
"mysql_fieldname": "keyword2",
"mysql_fieldtable": "keyword2",
"mysql_fieldtype": "keyword2",
"mysql_free_result": "keyword2",
"mysql_freeresult": "keyword2",
"mysql_get_client_info": "keyword2",
"mysql_get_host_info": "keyword2",
"mysql_get_proto_info": "keyword2",
"mysql_get_server_info": "keyword2",
"mysql_insert_id": "keyword2",
"mysql_list_dbs": "keyword2",
"mysql_list_fields": "keyword2",
"mysql_list_tables": "keyword2",
"mysql_listdbs": "keyword2",
"mysql_listfields": "keyword2",
"mysql_listtables": "keyword2",
"mysql_num_fields": "keyword2",
"mysql_num_rows": "keyword2",
"mysql_numfields": "keyword2",
"mysql_numrows": "keyword2",
"mysql_pconnect": "keyword2",
"mysql_query": "keyword2",
"mysql_result": "keyword2",
"mysql_select_db": "keyword2",
"mysql_selectdb": "keyword2",
"mysql_tablename": "keyword2",
"mysql_unbuffered_query": "keyword2",
"natcasesort": "keyword2",
"natsort": "keyword2",
"new": "keyword1",
"new_xmldoc": "keyword2",
"next": "keyword2",
"nextframe": "keyword2",
"nl2br": "keyword2",
"notes_body": "keyword2",
"notes_copy_db": "keyword2",
"notes_create_db": "keyword2",
"notes_create_note": "keyword2",
"notes_drop_db": "keyword2",
"notes_find_note": "keyword2",
"notes_header_info": "keyword2",
"notes_list_msgs": "keyword2",
"notes_mark_read": "keyword2",
"notes_mark_unread": "keyword2",
"notes_nav_create": "keyword2",
"notes_search": "keyword2",
"notes_unread": "keyword2",
"notes_version": "keyword2",
"null": "keyword3",
"number_format": "keyword2",
"ob_end_clean": "keyword2",
"ob_end_flush": "keyword2",
"ob_get_clean": "keyword2",
"ob_get_contents": "keyword2",
"ob_get_flush": "keyword2",
"ob_get_length": "keyword2",
"ob_gzhandler": "keyword2",
"ob_iconv_handler": "keyword2",
"ob_implicit_flush": "keyword2",
"ob_list_handlers": "keyword2",
"ob_start": "keyword2",
"ocibindbyname": "keyword2",
"ocicancel": "keyword2",
"ocicollappend": "keyword2",
"ocicollassign": "keyword2",
"ocicollassignelem": "keyword2",
"ocicolldateappendelem": "keyword2",
"ocicolldateassignelem": "keyword2",
"ocicolldategetelem": "keyword2",
"ocicollgetelem": "keyword2",
"ocicollmax": "keyword2",
"ocicollsize": "keyword2",
"ocicolltrim": "keyword2",
"ocicolumnisnull": "keyword2",
"ocicolumnname": "keyword2",
"ocicolumnprecision": "keyword2",
"ocicolumnscale": "keyword2",
"ocicolumnsize": "keyword2",
"ocicolumntype": "keyword2",
"ocicolumntyperaw": "keyword2",
"ocicommit": "keyword2",
"ocidefinebyname": "keyword2",
"ocierror": "keyword2",
"ociexecute": "keyword2",
"ocifetch": "keyword2",
"ocifetchinto": "keyword2",
"ocifetchstatement": "keyword2",
"ocifreecoll": "keyword2",
"ocifreecursor": "keyword2",
"ocifreedesc": "keyword2",
"ocifreestatement": "keyword2",
"ociinternaldebug": "keyword2",
"ociloadlob": "keyword2",
"ocilogoff": "keyword2",
"ocilogon": "keyword2",
"ocinewcollection": "keyword2",
"ocinewcursor": "keyword2",
"ocinewdescriptor": "keyword2",
"ocinlogon": "keyword2",
"ocinumcols": "keyword2",
"ociparse": "keyword2",
"ociplogon": "keyword2",
"ociresult": "keyword2",
"ocirollback": "keyword2",
"ocirowcount": "keyword2",
"ocisavelob": "keyword2",
"ocisavelobfile": "keyword2",
"ociserverversion": "keyword2",
"ocisetprefetch": "keyword2",
"ocistatementtype": "keyword2",
"ociwritelobtofile": "keyword2",
"octdec": "keyword2",
"odbc_autocommit": "keyword2",
"odbc_binmode": "keyword2",
"odbc_close": "keyword2",
"odbc_close_all": "keyword2",
"odbc_columnprivileges": "keyword2",
"odbc_columns": "keyword2",
"odbc_commit": "keyword2",
"odbc_connect": "keyword2",
"odbc_cursor": "keyword2",
"odbc_data_source": "keyword2",
"odbc_do": "keyword2",
"odbc_error": "keyword2",
"odbc_errormsg": "keyword2",
"odbc_exec": "keyword2",
"odbc_execute": "keyword2",
"odbc_fetch_array": "keyword2",
"odbc_fetch_into": "keyword2",
"odbc_fetch_object": "keyword2",
"odbc_fetch_row": "keyword2",
"odbc_field_len": "keyword2",
"odbc_field_name": "keyword2",
"odbc_field_num": "keyword2",
"odbc_field_precision": "keyword2",
"odbc_field_scale": "keyword2",
"odbc_field_type": "keyword2",
"odbc_foreignkeys": "keyword2",
"odbc_free_result": "keyword2",
"odbc_gettypeinfo": "keyword2",
"odbc_longreadlen": "keyword2",
"odbc_next_result": "keyword2",
"odbc_num_fields": "keyword2",
"odbc_num_rows": "keyword2",
"odbc_pconnect": "keyword2",
"odbc_prepare": "keyword2",
"odbc_primarykeys": "keyword2",
"odbc_procedurecolumns": "keyword2",
"odbc_procedures": "keyword2",
"odbc_result": "keyword2",
"odbc_result_all": "keyword2",
"odbc_rollback": "keyword2",
"odbc_setoption": "keyword2",
"odbc_specialcolumns": "keyword2",
"odbc_statistics": "keyword2",
"odbc_tableprivileges": "keyword2",
"odbc_tables": "keyword2",
"old_function": "keyword1",
"open": "keyword2",
"open_listen_sock": "keyword2",
"opendir": "keyword2",
"openlog": "keyword2",
"openssl_error_string": "keyword2",
"openssl_free_key": "keyword2",
"openssl_get_privatekey": "keyword2",
"openssl_get_publickey": "keyword2",
"openssl_pkcs7_decrypt": "keyword2",
"openssl_pkcs7_encrypt": "keyword2",
"openssl_pkcs7_sign": "keyword2",
"openssl_pkcs7_verify": "keyword2",
"openssl_seal": "keyword2",
"openssl_sign": "keyword2",
"openssl_verify": "keyword2",
"openssl_x509_checkpurpose": "keyword2",
"openssl_x509_free": "keyword2",
"openssl_x509_parse": "keyword2",
"openssl_x509_read": "keyword2",
"or": "operator",
"ora_bind": "keyword2",
"ora_close": "keyword2",
"ora_columnname": "keyword2",
"ora_columnsize": "keyword2",
"ora_columntype": "keyword2",
"ora_commit": "keyword2",
"ora_commitoff": "keyword2",
"ora_commiton": "keyword2",
"ora_do": "keyword2",
"ora_error": "keyword2",
"ora_errorcode": "keyword2",
"ora_exec": "keyword2",
"ora_fetch": "keyword2",
"ora_fetch_into": "keyword2",
"ora_getcolumn": "keyword2",
"ora_logoff": "keyword2",
"ora_logon": "keyword2",
"ora_numcols": "keyword2",
"ora_numrows": "keyword2",
"ora_open": "keyword2",
"ora_parse": "keyword2",
"ora_plogon": "keyword2",
"ora_rollback": "keyword2",
"orbit_caught_exception": "keyword2",
"orbit_exception_id": "keyword2",
"orbit_exception_value": "keyword2",
"orbit_get_repository_id": "keyword2",
"orbit_load_idl": "keyword2",
"ord": "keyword2",
"output": "keyword2",
"ovrimos_close": "keyword2",
"ovrimos_close_all": "keyword2",
"ovrimos_commit": "keyword2",
"ovrimos_connect": "keyword2",
"ovrimos_cursor": "keyword2",
"ovrimos_exec": "keyword2",
"ovrimos_execute": "keyword2",
"ovrimos_fetch_into": "keyword2",
"ovrimos_fetch_row": "keyword2",
"ovrimos_field_len": "keyword2",
"ovrimos_field_name": "keyword2",
"ovrimos_field_num": "keyword2",
"ovrimos_field_type": "keyword2",
"ovrimos_free_result": "keyword2",
"ovrimos_longreadlen": "keyword2",
"ovrimos_num_fields": "keyword2",
"ovrimos_num_rows": "keyword2",
"ovrimos_prepare": "keyword2",
"ovrimos_result": "keyword2",
"ovrimos_result_all": "keyword2",
"ovrimos_rollback": "keyword2",
"pack": "keyword2",
"parse_ini_file": "keyword2",
"parse_str": "keyword2",
"parse_url": "keyword2",
"passthru": "keyword2",
"pathinfo": "keyword2",
"pclose": "keyword2",
"pdf_add_annotation": "keyword2",
"pdf_add_bookmark": "keyword2",
"pdf_add_launchlink": "keyword2",
"pdf_add_locallink": "keyword2",
"pdf_add_note": "keyword2",
"pdf_add_outline": "keyword2",
"pdf_add_pdflink": "keyword2",
"pdf_add_thumbnail": "keyword2",
"pdf_add_weblink": "keyword2",
"pdf_arc": "keyword2",
"pdf_arcn": "keyword2",
"pdf_attach_file": "keyword2",
"pdf_begin_page": "keyword2",
"pdf_begin_pattern": "keyword2",
"pdf_begin_template": "keyword2",
"pdf_circle": "keyword2",
"pdf_clip": "keyword2",
"pdf_close": "keyword2",
"pdf_close_image": "keyword2",
"pdf_close_pdi": "keyword2",
"pdf_close_pdi_page": "keyword2",
"pdf_closepath": "keyword2",
"pdf_closepath_fill_stroke": "keyword2",
"pdf_closepath_stroke": "keyword2",
"pdf_concat": "keyword2",
"pdf_continue_text": "keyword2",
"pdf_curveto": "keyword2",
"pdf_delete": "keyword2",
"pdf_end_page": "keyword2",
"pdf_end_pattern": "keyword2",
"pdf_end_template": "keyword2",
"pdf_endpath": "keyword2",
"pdf_fill": "keyword2",
"pdf_fill_stroke": "keyword2",
"pdf_findfont": "keyword2",
"pdf_get_buffer": "keyword2",
"pdf_get_font": "keyword2",
"pdf_get_fontname": "keyword2",
"pdf_get_fontsize": "keyword2",
"pdf_get_image_height": "keyword2",
"pdf_get_image_width": "keyword2",
"pdf_get_parameter": "keyword2",
"pdf_get_pdi_parameter": "keyword2",
"pdf_get_pdi_value": "keyword2",
"pdf_get_value": "keyword2",
"pdf_initgraphics": "keyword2",
"pdf_lineto": "keyword2",
"pdf_makespotcolor": "keyword2",
"pdf_moveto": "keyword2",
"pdf_new": "keyword2",
"pdf_open": "keyword2",
"pdf_open_ccitt": "keyword2",
"pdf_open_file": "keyword2",
"pdf_open_gif": "keyword2",
"pdf_open_image": "keyword2",
"pdf_open_image_file": "keyword2",
"pdf_open_jpeg": "keyword2",
"pdf_open_memory_image": "keyword2",
"pdf_open_pdi": "keyword2",
"pdf_open_pdi_page": "keyword2",
"pdf_open_png": "keyword2",
"pdf_open_tiff": "keyword2",
"pdf_place_image": "keyword2",
"pdf_place_pdi_page": "keyword2",
"pdf_rect": "keyword2",
"pdf_restore": "keyword2",
"pdf_rotate": "keyword2",
"pdf_save": "keyword2",
"pdf_scale": "keyword2",
"pdf_set_border_color": "keyword2",
"pdf_set_border_dash": "keyword2",
"pdf_set_border_style": "keyword2",
"pdf_set_char_spacing": "keyword2",
"pdf_set_duration": "keyword2",
"pdf_set_font": "keyword2",
"pdf_set_horiz_scaling": "keyword2",
"pdf_set_info": "keyword2",
"pdf_set_info_author": "keyword2",
"pdf_set_info_creator": "keyword2",
"pdf_set_info_keywords": "keyword2",
"pdf_set_info_subject": "keyword2",
"pdf_set_info_title": "keyword2",
"pdf_set_leading": "keyword2",
"pdf_set_parameter": "keyword2",
"pdf_set_text_pos": "keyword2",
"pdf_set_text_rendering": "keyword2",
"pdf_set_text_rise": "keyword2",
"pdf_set_transition": "keyword2",
"pdf_set_value": "keyword2",
"pdf_set_word_spacing": "keyword2",
"pdf_setcolor": "keyword2",
"pdf_setdash": "keyword2",
"pdf_setflat": "keyword2",
"pdf_setfont": "keyword2",
"pdf_setgray": "keyword2",
"pdf_setgray_fill": "keyword2",
"pdf_setgray_stroke": "keyword2",
"pdf_setlinecap": "keyword2",
"pdf_setlinejoin": "keyword2",
"pdf_setlinewidth": "keyword2",
"pdf_setmatrix": "keyword2",
"pdf_setmiterlimit": "keyword2",
"pdf_setpolydash": "keyword2",
"pdf_setrgbcolor": "keyword2",
"pdf_setrgbcolor_fill": "keyword2",
"pdf_setrgbcolor_stroke": "keyword2",
"pdf_show": "keyword2",
"pdf_show_boxed": "keyword2",
"pdf_show_xy": "keyword2",
"pdf_skew": "keyword2",
"pdf_stringwidth": "keyword2",
"pdf_stroke": "keyword2",
"pdf_translate": "keyword2",
"pfpro_cleanup": "keyword2",
"pfpro_init": "keyword2",
"pfpro_process": "keyword2",
"pfpro_process_raw": "keyword2",
"pfpro_version": "keyword2",
"pfsockopen": "keyword2",
"pg_client_encoding": "keyword2",
"pg_clientencoding": "keyword2",
"pg_close": "keyword2",
"pg_cmdtuples": "keyword2",
"pg_connect": "keyword2",
"pg_convert": "keyword2",
"pg_dbname": "keyword2",
"pg_delete": "keyword2",
"pg_end_copy": "keyword2",
"pg_errormessage": "keyword2",
"pg_exec": "keyword2",
"pg_fetch_all": "keyword2",
"pg_fetch_array": "keyword2",
"pg_fetch_assoc": "keyword2",
"pg_fetch_object": "keyword2",
"pg_fetch_row": "keyword2",
"pg_fieldisnull": "keyword2",
"pg_fieldname": "keyword2",
"pg_fieldnum": "keyword2",
"pg_fieldprtlen": "keyword2",
"pg_fieldsize": "keyword2",
"pg_fieldtype": "keyword2",
"pg_freeresult": "keyword2",
"pg_get_notify": "keyword2",
"pg_get_pid": "keyword2",
"pg_getlastoid": "keyword2",
"pg_host": "keyword2",
"pg_insert": "keyword2",
"pg_loclose": "keyword2",
"pg_locreate": "keyword2",
"pg_loexport": "keyword2",
"pg_loimport": "keyword2",
"pg_loopen": "keyword2",
"pg_loread": "keyword2",
"pg_loreadall": "keyword2",
"pg_lounlink": "keyword2",
"pg_lowrite": "keyword2",
"pg_meta_data": "keyword2",
"pg_numfields": "keyword2",
"pg_numrows": "keyword2",
"pg_options": "keyword2",
"pg_pconnect": "keyword2",
"pg_ping": "keyword2",
"pg_port": "keyword2",
"pg_put_line": "keyword2",
"pg_result": "keyword2",
"pg_result_seek": "keyword2",
"pg_select": "keyword2",
"pg_set_client_encoding": "keyword2",
"pg_setclientencoding": "keyword2",
"pg_trace": "keyword2",
"pg_tty": "keyword2",
"pg_unescape_bytea": "keyword2",
"pg_untrace": "keyword2",
"pg_update": "keyword2",
"php_logo_guid": "keyword2",
"php_sapi_name": "keyword2",
"php_uname": "keyword2",
"phpcredits": "keyword2",
"phpinfo": "keyword2",
"phpversion": "keyword2",
"pi": "keyword2",
"png2wbmp": "keyword2",
"popen": "keyword2",
"pos": "keyword2",
"posix_ctermid": "keyword2",
"posix_getcwd": "keyword2",
"posix_getegid": "keyword2",
"posix_geteuid": "keyword2",
"posix_getgid": "keyword2",
"posix_getgrgid": "keyword2",
"posix_getgrnam": "keyword2",
"posix_getgroups": "keyword2",
"posix_getlogin": "keyword2",
"posix_getpgid": "keyword2",
"posix_getpgrp": "keyword2",
"posix_getpid": "keyword2",
"posix_getppid": "keyword2",
"posix_getpwnam": "keyword2",
"posix_getpwuid": "keyword2",
"posix_getrlimit": "keyword2",
"posix_getsid": "keyword2",
"posix_getuid": "keyword2",
"posix_isatty": "keyword2",
"posix_kill": "keyword2",
"posix_mkfifo": "keyword2",
"posix_setegid": "keyword2",
"posix_seteuid": "keyword2",
"posix_setgid": "keyword2",
"posix_setpgid": "keyword2",
"posix_setsid": "keyword2",
"posix_setuid": "keyword2",
"posix_times": "keyword2",
"posix_ttyname": "keyword2",
"posix_uname": "keyword2",
"pow": "keyword2",
"preg_grep": "keyword2",
"preg_match": "keyword2",
"preg_match_all": "keyword2",
"preg_quote": "keyword2",
"preg_replace": "keyword2",
"preg_replace_callback": "keyword2",
"preg_split": "keyword2",
"prev": "keyword2",
"print_r": "keyword2",
"printer_abort": "keyword2",
"printer_close": "keyword2",
"printer_create_brush": "keyword2",
"printer_create_dc": "keyword2",
"printer_create_font": "keyword2",
"printer_create_pen": "keyword2",
"printer_delete_brush": "keyword2",
"printer_delete_dc": "keyword2",
"printer_delete_font": "keyword2",
"printer_delete_pen": "keyword2",
"printer_draw_bmp": "keyword2",
"printer_draw_chord": "keyword2",
"printer_draw_elipse": "keyword2",
"printer_draw_line": "keyword2",
"printer_draw_pie": "keyword2",
"printer_draw_rectangle": "keyword2",
"printer_draw_roundrect": "keyword2",
"printer_draw_text": "keyword2",
"printer_end_doc": "keyword2",
"printer_end_page": "keyword2",
"printer_get_option": "keyword2",
"printer_list": "keyword2",
"printer_logical_fontheight": "keyword2",
"printer_open": "keyword2",
"printer_select_brush": "keyword2",
"printer_select_font": "keyword2",
"printer_select_pen": "keyword2",
"printer_set_option": "keyword2",
"printer_start_doc": "keyword2",
"printer_start_page": "keyword2",
"printer_write": "keyword2",
"printf": "keyword2",
"private": "keyword1",
"protected": "keyword1",
"pspell_add_to_personal": "keyword2",
"pspell_add_to_session": "keyword2",
"pspell_check": "keyword2",
"pspell_clear_session": "keyword2",
"pspell_config_create": "keyword2",
"pspell_config_ignore": "keyword2",
"pspell_config_mode": "keyword2",
"pspell_config_personal": "keyword2",
"pspell_config_repl": "keyword2",
"pspell_config_runtogether": "keyword2",
"pspell_config_save_repl": "keyword2",
"pspell_new": "keyword2",
"pspell_new_config": "keyword2",
"pspell_new_personal": "keyword2",
"pspell_save_wordlist": "keyword2",
"pspell_store_replacement": "keyword2",
"pspell_suggest": "keyword2",
"public": "keyword1",
"putenv": "keyword2",
"qdom_error": "keyword2",
"qdom_tree": "keyword2",
"quoted_printable_decode": "keyword2",
"quotemeta": "keyword2",
"rad2deg": "keyword2",
"rand": "keyword2",
"range": "keyword2",
"rawurldecode": "keyword2",
"rawurlencode": "keyword2",
"read": "keyword2",
"read_exif_data": "keyword2",
"readdir": "keyword2",
"readfile": "keyword2",
"readgzfile": "keyword2",
"readline": "keyword2",
"readline_add_history": "keyword2",
"readline_clear_history": "keyword2",
"readline_completion_function": "keyword2",
"readline_info": "keyword2",
"readline_list_history": "keyword2",
"readline_read_history": "keyword2",
"readline_write_history": "keyword2",
"readlink": "keyword2",
"readv": "keyword2",
"realpath": "keyword2",
"recode": "keyword2",
"recode_file": "keyword2",
"recode_string": "keyword2",
"recv": "keyword2",
"recvfrom": "keyword2",
"recvmsg": "keyword2",
"register_shutdown_function": "keyword2",
"register_tick_function": "keyword2",
"remove": "keyword2",
"rename": "keyword2",
"require": "keyword1",
"require_once": "keyword1",
"reset": "keyword2",
"restore_error_handler": "keyword2",
"return": "keyword1",
"rewind": "keyword2",
"rewinddir": "keyword2",
"rmdir": "keyword2",
"rotate": "keyword2",
"rotateto": "keyword2",
"round": "keyword2",
"rsort": "keyword2",
"rtrim": "keyword2",
"satellite_caught_exception": "keyword2",
"satellite_exception_id": "keyword2",
"satellite_exception_value": "keyword2",
"satellite_get_repository_id": "keyword2",
"satellite_load_idl": "keyword2",
"save": "keyword2",
"savetofile": "keyword2",
"scale": "keyword2",
"scaleto": "keyword2",
"scandir": "keyword2",
"select": "keyword2",
"sem_acquire": "keyword2",
"sem_get": "keyword2",
"sem_release": "keyword2",
"send": "keyword2",
"sendmsg": "keyword2",
"sendto": "keyword2",
"serialize": "keyword2",
"session_cache_limiter": "keyword2",
"session_decode": "keyword2",
"session_destroy": "keyword2",
"session_encode": "keyword2",
"session_get_cookie_params": "keyword2",
"session_id": "keyword2",
"session_is_registered": "keyword2",
"session_module_name": "keyword2",
"session_name": "keyword2",
"session_register": "keyword2",
"session_save_path": "keyword2",
"session_set_cookie_params": "keyword2",
"session_set_save_handler": "keyword2",
"session_start": "keyword2",
"session_unregister": "keyword2",
"session_unset": "keyword2",
"session_write_close": "keyword2",
"set_content": "keyword2",
"set_error_handler": "keyword2",
"set_file_buffer": "keyword2",
"set_iovec": "keyword2",
"set_magic_quotes_runtime": "keyword2",
"set_nonblock": "keyword2",
"set_socket_blocking": "keyword2",
"set_time_limit": "keyword2",
"setaction": "keyword2",
"setbackground": "keyword2",
"setbounds": "keyword2",
"setcolor": "keyword2",
"setcookie": "keyword2",
"setdepth": "keyword2",
"setdimension": "keyword2",
"setdown": "keyword2",
"setfont": "keyword2",
"setframes": "keyword2",
"setheight": "keyword2",
"sethit": "keyword2",
"setindentation": "keyword2",
"setleftfill": "keyword2",
"setleftmargin": "keyword2",
"setline": "keyword2",
"setlinespacing": "keyword2",
"setlocale": "keyword2",
"setmargins": "keyword2",
"setmatrix": "keyword2",
"setname": "keyword2",
"setover": "keyword2",
"setrate": "keyword2",
"setratio": "keyword2",
"setrightfill": "keyword2",
"setrightmargin": "keyword2",
"setsockopt": "keyword2",
"setspacing": "keyword2",
"settype": "keyword2",
"setup": "keyword2",
"sha1": "keyword2",
"sha1_file": "keyword2",
"shell_exec": "keyword2",
"shm_attach": "keyword2",
"shm_detach": "keyword2",
"shm_get_var": "keyword2",
"shm_put_var": "keyword2",
"shm_remove": "keyword2",
"shm_remove_var": "keyword2",
"shmop_close": "keyword2",
"shmop_delete": "keyword2",
"shmop_open": "keyword2",
"shmop_read": "keyword2",
"shmop_size": "keyword2",
"shmop_write": "keyword2",
"show_source": "keyword2",
"shuffle": "keyword2",
"shutdown": "keyword2",
"signal": "keyword2",
"similar_text": "keyword2",
"sin": "keyword2",
"sizeof": "keyword2",
"skewx": "keyword2",
"skewxto": "keyword2",
"skewy": "keyword2",
"skewyto": "keyword2",
"sleep": "keyword2",
"snmp_get_quick_print": "keyword2",
"snmp_set_quick_print": "keyword2",
"snmpget": "keyword2",
"snmprealwalk": "keyword2",
"snmpset": "keyword2",
"snmpwalk": "keyword2",
"snmpwalkoid": "keyword2",
"socket": "keyword2",
"socket_get_status": "keyword2",
"socket_set_blocking": "keyword2",
"socket_set_timeout": "keyword2",
"socketpair": "keyword2",
"sort": "keyword2",
"soundex": "keyword2",
"split": "keyword2",
"spliti": "keyword2",
"sprintf": "keyword2",
"sql_regcase": "keyword2",
"sqrt": "keyword2",
"srand": "keyword2",
"sscanf": "keyword2",
"ssl_open": "keyword2",
"stat": "keyword2",
"static": "keyword1",
"str_pad": "keyword2",
"str_repeat": "keyword2",
"str_replace": "keyword2",
"str_rot13": "keyword2",
"str_split": "keyword2",
"str_word_count": "keyword2",
"strcasecmp": "keyword2",
"strchr": "keyword2",
"strcmp": "keyword2",
"strcoll": "keyword2",
"strcspn": "keyword2",
"stream_context_create": "keyword2",
"stream_context_set_option": "keyword2",
"stream_context_set_params": "keyword2",
"stream_filter_append": "keyword2",
"stream_filter_prepend": "keyword2",
"stream_get_status": "keyword2",
"stream_select": "keyword2",
"stream_set_blocking": "keyword2",
"stream_set_timeout": "keyword2",
"streammp3": "keyword2",
"strerror": "keyword2",
"strftime": "keyword2",
"strip_tags": "keyword2",
"stripcslashes": "keyword2",
"stripos": "keyword2",
"stripslashes": "keyword2",
"stristr": "keyword2",
"strlen": "keyword2",
"strnatcasecmp": "keyword2",
"strnatcmp": "keyword2",
"strncasecmp": "keyword2",
"strncmp": "keyword2",
"strpbrk": "keyword2",
"strpos": "keyword2",
"strrchr": "keyword2",
"strrev": "keyword2",
"strrpos": "keyword2",
"strspn": "keyword2",
"strstr": "keyword2",
"strtok": "keyword2",
"strtolower": "keyword2",
"strtotime": "keyword2",
"strtoupper": "keyword2",
"strtr": "keyword2",
"strval": "keyword2",
"substr": "keyword2",
"substr_compare": "keyword2",
"substr_count": "keyword2",
"substr_replace": "keyword2",
"swf_actiongeturl": "keyword2",
"swf_actiongotoframe": "keyword2",
"swf_actiongotolabel": "keyword2",
"swf_actionnextframe": "keyword2",
"swf_actionplay": "keyword2",
"swf_actionprevframe": "keyword2",
"swf_actionsettarget": "keyword2",
"swf_actionstop": "keyword2",
"swf_actiontogglequality": "keyword2",
"swf_actionwaitforframe": "keyword2",
"swf_addbuttonrecord": "keyword2",
"swf_addcolor": "keyword2",
"swf_closefile": "keyword2",
"swf_definebitmap": "keyword2",
"swf_definefont": "keyword2",
"swf_defineline": "keyword2",
"swf_definepoly": "keyword2",
"swf_definerect": "keyword2",
"swf_definetext": "keyword2",
"swf_endbutton": "keyword2",
"swf_enddoaction": "keyword2",
"swf_endshape": "keyword2",
"swf_endsymbol": "keyword2",
"swf_fontsize": "keyword2",
"swf_fontslant": "keyword2",
"swf_fonttracking": "keyword2",
"swf_getbitmapinfo": "keyword2",
"swf_getfontinfo": "keyword2",
"swf_getframe": "keyword2",
"swf_labelframe": "keyword2",
"swf_lookat": "keyword2",
"swf_modifyobject": "keyword2",
"swf_mulcolor": "keyword2",
"swf_nextid": "keyword2",
"swf_oncondition": "keyword2",
"swf_openfile": "keyword2",
"swf_ortho": "keyword2",
"swf_ortho2": "keyword2",
"swf_perspective": "keyword2",
"swf_placeobject": "keyword2",
"swf_polarview": "keyword2",
"swf_popmatrix": "keyword2",
"swf_posround": "keyword2",
"swf_pushmatrix": "keyword2",
"swf_removeobject": "keyword2",
"swf_rotate": "keyword2",
"swf_scale": "keyword2",
"swf_setfont": "keyword2",
"swf_setframe": "keyword2",
"swf_shapearc": "keyword2",
"swf_shapecurveto": "keyword2",
"swf_shapecurveto3": "keyword2",
"swf_shapefillbitmapclip": "keyword2",
"swf_shapefillbitmaptile": "keyword2",
"swf_shapefilloff": "keyword2",
"swf_shapefillsolid": "keyword2",
"swf_shapelinesolid": "keyword2",
"swf_shapelineto": "keyword2",
"swf_shapemoveto": "keyword2",
"swf_showframe": "keyword2",
"swf_startbutton": "keyword2",
"swf_startdoaction": "keyword2",
"swf_startshape": "keyword2",
"swf_startsymbol": "keyword2",
"swf_textwidth": "keyword2",
"swf_translate": "keyword2",
"swf_viewport": "keyword2",
"swfaction": "keyword2",
"swfbitmap": "keyword2",
"swfbutton": "keyword2",
"swfbutton_keypress": "keyword2",
"swffill": "keyword2",
"swffont": "keyword2",
"swfgradient": "keyword2",
"swfmorph": "keyword2",
"swfmovie": "keyword2",
"swfshape": "keyword2",
"swfsprite": "keyword2",
"swftext": "keyword2",
"swftextfield": "keyword2",
"switch": "keyword1",
"sybase_affected_rows": "keyword2",
"sybase_close": "keyword2",
"sybase_connect": "keyword2",
"sybase_data_seek": "keyword2",
"sybase_fetch_array": "keyword2",
"sybase_fetch_field": "keyword2",
"sybase_fetch_object": "keyword2",
"sybase_fetch_row": "keyword2",
"sybase_field_seek": "keyword2",
"sybase_free_result": "keyword2",
"sybase_get_last_message": "keyword2",
"sybase_min_client_severity": "keyword2",
"sybase_min_error_severity": "keyword2",
"sybase_min_message_severity": "keyword2",
"sybase_min_server_severity": "keyword2",
"sybase_num_fields": "keyword2",
"sybase_num_rows": "keyword2",
"sybase_pconnect": "keyword2",
"sybase_query": "keyword2",
"sybase_result": "keyword2",
"sybase_select_db": "keyword2",
"symlink": "keyword2",
"syslog": "keyword2",
"system": "keyword2",
"tan": "keyword2",
"tempnam": "keyword2",
"textdomain": "keyword2",
"throw": "keyword1",
"time": "keyword2",
"time_nanosleep": "keyword2",
"tmpfile": "keyword2",
"touch": "keyword2",
"trigger_error": "keyword2",
"trim": "keyword2",
"true": "keyword3",
"try": "keyword1",
"uasort": "keyword2",
"ucfirst": "keyword2",
"ucwords": "keyword2",
"udm_add_search_limit": "keyword2",
"udm_alloc_agent": "keyword2",
"udm_api_version": "keyword2",
"udm_clear_search_limits": "keyword2",
"udm_errno": "keyword2",
"udm_error": "keyword2",
"udm_find": "keyword2",
"udm_free_agent": "keyword2",
"udm_free_ispell_data": "keyword2",
"udm_free_res": "keyword2",
"udm_get_doc_count": "keyword2",
"udm_get_res_field": "keyword2",
"udm_get_res_param": "keyword2",
"udm_load_ispell_data": "keyword2",
"udm_set_agent_param": "keyword2",
"uksort": "keyword2",
"umask": "keyword2",
"uniqid": "keyword2",
"unixtojd": "keyword2",
"unlink": "keyword2",
"unpack": "keyword2",
"unregister_tick_function": "keyword2",
"unserialize": "keyword2",
"unset": "keyword2",
"urldecode": "keyword2",
"urlencode": "keyword2",
"user_error": "keyword2",
"usleep": "keyword2",
"usort": "keyword2",
"utf8_decode": "keyword2",
"utf8_encode": "keyword2",
"var": "keyword1",
"var_dump": "keyword2",
"velocis_autocommit": "keyword2",
"velocis_close": "keyword2",
"velocis_commit": "keyword2",
"velocis_connect": "keyword2",
"velocis_exec": "keyword2",
"velocis_fetch": "keyword2",
"velocis_fieldname": "keyword2",
"velocis_fieldnum": "keyword2",
"velocis_freeresult": "keyword2",
"velocis_off_autocommit": "keyword2",
"velocis_result": "keyword2",
"velocis_rollback": "keyword2",
"virtual": "keyword2",
"vpopmail_add_alias_domain": "keyword2",
"vpopmail_add_alias_domain_ex": "keyword2",
"vpopmail_add_domain": "keyword2",
"vpopmail_add_domain_ex": "keyword2",
"vpopmail_add_user": "keyword2",
"vpopmail_auth_user": "keyword2",
"vpopmail_del_domain": "keyword2",
"vpopmail_del_domain_ex": "keyword2",
"vpopmail_del_user": "keyword2",
"vpopmail_error": "keyword2",
"vpopmail_passwd": "keyword2",
"vpopmail_set_user_quota": "keyword2",
"wddx_add_vars": "keyword2",
"wddx_deserialize": "keyword2",
"wddx_packet_end": "keyword2",
"wddx_packet_start": "keyword2",
"wddx_serialize_value": "keyword2",
"wddx_serialize_vars": "keyword2",
"while": "keyword1",
"wordwrap": "keyword2",
"write": "keyword2",
"writev": "keyword2",
"xml_error_string": "keyword2",
"xml_get_current_byte_index": "keyword2",
"xml_get_current_column_number": "keyword2",
"xml_get_current_line_number": "keyword2",
"xml_get_error_code": "keyword2",
"xml_parse": "keyword2",
"xml_parse_into_struct": "keyword2",
"xml_parser_create": "keyword2",
"xml_parser_create_ns": "keyword2",
"xml_parser_free": "keyword2",
"xml_parser_get_option": "keyword2",
"xml_parser_set_option": "keyword2",
"xml_set_character_data_handler": "keyword2",
"xml_set_default_handler": "keyword2",
"xml_set_element_handler": "keyword2",
"xml_set_end_namespace_decl_handler": "keyword2",
"xml_set_external_entity_ref_handler": "keyword2",
"xml_set_notation_decl_handler": "keyword2",
"xml_set_object": "keyword2",
"xml_set_processing_instruction_handler": "keyword2",
"xml_set_start_namespace_decl_handler": "keyword2",
"xml_set_unparsed_entity_decl_handler": "keyword2",
"xmldoc": "keyword2",
"xmldocfile": "keyword2",
"xmltree": "keyword2",
"xpath_eval": "keyword2",
"xpath_eval_expression": "keyword2",
"xptr_eval": "keyword2",
"xslt_closelog": "keyword2",
"xslt_create": "keyword2",
"xslt_errno": "keyword2",
"xslt_error": "keyword2",
"xslt_fetch_result": "keyword2",
"xslt_free": "keyword2",
"xslt_openlog": "keyword2",
"xslt_output_begintransform": "keyword2",
"xslt_output_endtransform": "keyword2",
"xslt_process": "keyword2",
"xslt_run": "keyword2",
"xslt_set_base": "keyword2",
"xslt_set_encoding": "keyword2",
"xslt_set_error_handler": "keyword2",
"xslt_set_sax_handler": "keyword2",
"xslt_set_scheme_handler": "keyword2",
"xslt_transform": "keyword2",
"yaz_addinfo": "keyword2",
"yaz_ccl_conf": "keyword2",
"yaz_ccl_parse": "keyword2",
"yaz_close": "keyword2",
"yaz_connect": "keyword2",
"yaz_database": "keyword2",
"yaz_element": "keyword2",
"yaz_errno": "keyword2",
"yaz_error": "keyword2",
"yaz_hits": "keyword2",
"yaz_itemorder": "keyword2",
"yaz_present": "keyword2",
"yaz_range": "keyword2",
"yaz_record": "keyword2",
"yaz_scan": "keyword2",
"yaz_scan_result": "keyword2",
"yaz_search": "keyword2",
"yaz_syntax": "keyword2",
"yaz_wait": "keyword2",
"yp_all": "keyword2",
"yp_cat": "keyword2",
"yp_err_string": "keyword2",
"yp_errno": "keyword2",
"yp_first": "keyword2",
"yp_get_default_domain": "keyword2",
"yp_master": "keyword2",
"yp_match": "keyword2",
"yp_next": "keyword2",
"yp_order": "keyword2",
"zend_logo_guid": "keyword2",
"zend_test_func": "keyword2",
"zend_version": "keyword2",
"zzip_close": "keyword2",
"zzip_closedir": "keyword2",
"zzip_entry_compressedsize": "keyword2",
"zzip_entry_compressionmethod": "keyword2",
"zzip_entry_filesize": "keyword2",
"zzip_entry_name": "keyword2",
"zzip_open": "keyword2",
"zzip_opendir": "keyword2",
"zzip_read": "keyword2",
"zzip_readdir": "keyword2",
}
# Keywords dict for phpsection_php_literal ruleset.
phpsection_php_literal_keywords_dict = {}
# Keywords dict for phpsection_javascript ruleset.
phpsection_javascript_keywords_dict = {}
# Keywords dict for phpsection_javascript_php ruleset.
phpsection_javascript_php_keywords_dict = {}
# Keywords dict for phpsection_phpdoc ruleset.
phpsection_phpdoc_keywords_dict = {
"@abstract": "label",
"@access": "label",
"@author": "label",
"@category": "label",
"@copyright": "label",
"@deprecated": "label",
"@example": "label",
"@filesource": "label",
"@final": "label",
"@global": "label",
"@id": "label",
"@ignore": "label",
"@inheritdoc": "label",
"@internal": "label",
"@license": "label",
"@link": "label",
"@name": "label",
"@package": "label",
"@param": "label",
"@return": "label",
"@see": "label",
"@since": "label",
"@source": "label",
"@static": "label",
"@staticvar": "label",
"@subpackage": "label",
"@toc": "label",
"@todo": "label",
"@tutorial": "label",
"@uses": "label",
"@var": "label",
"@version": "label",
}
# Dictionary of keywords dictionaries for phpsection mode.
keywordsDictDict = {
"phpsection_javascript": phpsection_javascript_keywords_dict,
"phpsection_javascript_php": phpsection_javascript_php_keywords_dict,
"phpsection_main": phpsection_main_keywords_dict,
"phpsection_php": phpsection_php_keywords_dict,
"phpsection_php_literal": phpsection_php_literal_keywords_dict,
"phpsection_phpdoc": phpsection_phpdoc_keywords_dict,
"phpsection_tags": phpsection_tags_keywords_dict,
"phpsection_tags_literal": phpsection_tags_literal_keywords_dict,
}
# Rules for phpsection_main ruleset.
def phpsection_rule0(colorer, s, i):
return colorer.match_span(s, i, kind="markup", begin="<?php", end="?>",
delegate="phpsection::php")
def phpsection_rule1(colorer, s, i):
return colorer.match_span(s, i, kind="markup", begin="<?", end="?>",
delegate="phpsection::php")
def phpsection_rule2(colorer, s, i):
return colorer.match_span(s, i, kind="markup", begin="<%=", end="%>",
delegate="phpsection::php")
def phpsection_rule3(colorer, s, i):
return colorer.match_span(s, i, kind="comment1", begin="<!--", end="-->")
def phpsection_rule4(colorer, s, i):
return colorer.match_span_regexp(s, i, kind="markup", begin="<SCRIPT\\s+LANGUAGE=\"?PHP\"?>", end="</SCRIPT>",
delegate="phpsection::php")
def phpsection_rule5(colorer, s, i):
return colorer.match_span(s, i, kind="markup", begin="<SCRIPT", end="</SCRIPT>",
delegate="phpsection::javascript")
def phpsection_rule6(colorer, s, i):
return colorer.match_span(s, i, kind="markup", begin="<STYLE", end="</STYLE>",
delegate="html::css")
def phpsection_rule7(colorer, s, i):
return colorer.match_span(s, i, kind="keyword2", begin="<!", end=">",
delegate="xml::dtd-tags")
def phpsection_rule8(colorer, s, i):
return colorer.match_span(s, i, kind="markup", begin="<", end=">",
delegate="phpsection::tags")
def phpsection_rule9(colorer, s, i):
return colorer.match_span(s, i, kind="literal2", begin="&", end=";",
no_word_break=True)
# Rules dict for phpsection_main ruleset.
rulesDict1 = {
"&": [phpsection_rule9,],
"<": [phpsection_rule0, phpsection_rule1, phpsection_rule2, phpsection_rule3, phpsection_rule5, phpsection_rule6, phpsection_rule7, phpsection_rule8,],
"< ": [phpsection_rule4,],
}
# Rules for phpsection_tags ruleset.
def phpsection_rule10(colorer, s, i):
return colorer.match_span(s, i, kind="markup", begin="<?php", end="?>",
delegate="phpsection::php")
def phpsection_rule11(colorer, s, i):
return colorer.match_span(s, i, kind="markup", begin="<?", end="?>",
delegate="phpsection::php")
def phpsection_rule12(colorer, s, i):
return colorer.match_span(s, i, kind="markup", begin="<%=", end="%>",
delegate="phpsection::php")
def phpsection_rule13(colorer, s, i):
return colorer.match_span(s, i, kind="literal1", begin="\"", end="\"",
delegate="phpsection::tags_literal")
def phpsection_rule14(colorer, s, i):
return colorer.match_span(s, i, kind="literal1", begin="'", end="'",
delegate="phpsection::tags_literal")
def phpsection_rule15(colorer, s, i):
return colorer.match_plain_seq(s, i, kind="operator", seq="=")
# Rules dict for phpsection_tags ruleset.
rulesDict2 = {
"\"": [phpsection_rule13,],
"'": [phpsection_rule14,],
"<": [phpsection_rule10, phpsection_rule11, phpsection_rule12,],
"=": [phpsection_rule15,],
}
# Rules for phpsection_tags_literal ruleset.
def phpsection_rule16(colorer, s, i):
return colorer.match_span(s, i, kind="markup", begin="<?php", end="?>",
delegate="phpsection::php")
def phpsection_rule17(colorer, s, i):
return colorer.match_span(s, i, kind="markup", begin="<?", end="?>",
delegate="phpsection::php")
def phpsection_rule18(colorer, s, i):
return colorer.match_span(s, i, kind="markup", begin="<%=", end="%>",
delegate="phpsection::php")
# Rules dict for phpsection_tags_literal ruleset.
rulesDict3 = {
"<": [phpsection_rule16, phpsection_rule17, phpsection_rule18,],
}
# Rules for phpsection_php ruleset.
def phpsection_rule19(colorer, s, i):
return colorer.match_span(s, i, kind="comment3", begin="/**", end="*/",
delegate="phpsection::phpdoc")
def phpsection_rule20(colorer, s, i):
return colorer.match_span(s, i, kind="comment1", begin="/*", end="*/")
def phpsection_rule21(colorer, s, i):
return colorer.match_span(s, i, kind="literal1", begin="\"", end="\"",
delegate="phpsection::php_literal")
def phpsection_rule22(colorer, s, i):
return colorer.match_span(s, i, kind="literal1", begin="'", end="'")
def phpsection_rule23(colorer, s, i):
return colorer.match_span(s, i, kind="literal1", begin="`", end="`",
delegate="phpsection::php_literal")
def phpsection_rule24(colorer, s, i):
return colorer.match_eol_span(s, i, kind="comment2", seq="//")
def phpsection_rule25(colorer, s, i):
return colorer.match_eol_span(s, i, kind="comment1", seq="#")
def phpsection_rule26(colorer, s, i):
return colorer.match_span_regexp(s, i, kind="literal1", begin="<<<[[:space:]'\"]*([[:alnum:]_]+)[[:space:]'\"]*", end="$1",
delegate="phpsection::php_literal")
def phpsection_rule27(colorer, s, i):
return colorer.match_mark_following(s, i, kind="keyword3", pattern="$")
def phpsection_rule28(colorer, s, i):
return colorer.match_plain_seq(s, i, kind="operator", seq="=")
def phpsection_rule29(colorer, s, i):
return colorer.match_plain_seq(s, i, kind="operator", seq="->")
def phpsection_rule30(colorer, s, i):
return colorer.match_plain_seq(s, i, kind="operator", seq="!")
def phpsection_rule31(colorer, s, i):
return colorer.match_plain_seq(s, i, kind="operator", seq=">=")
def phpsection_rule32(colorer, s, i):
return colorer.match_plain_seq(s, i, kind="operator", seq="<=")
def phpsection_rule33(colorer, s, i):
return colorer.match_plain_seq(s, i, kind="operator", seq="=")
def phpsection_rule34(colorer, s, i):
return colorer.match_plain_seq(s, i, kind="operator", seq="+")
def phpsection_rule35(colorer, s, i):
return colorer.match_plain_seq(s, i, kind="operator", seq="-")
def phpsection_rule36(colorer, s, i):
return colorer.match_plain_seq(s, i, kind="operator", seq="/")
def phpsection_rule37(colorer, s, i):
return colorer.match_plain_seq(s, i, kind="operator", seq="*")
def phpsection_rule38(colorer, s, i):
return colorer.match_plain_seq(s, i, kind="operator", seq=">")
def phpsection_rule39(colorer, s, i):
return colorer.match_plain_seq(s, i, kind="operator", seq="<")
def phpsection_rule40(colorer, s, i):
return colorer.match_plain_seq(s, i, kind="operator", seq="%")
def phpsection_rule41(colorer, s, i):
return colorer.match_plain_seq(s, i, kind="operator", seq="&")
def phpsection_rule42(colorer, s, i):
return colorer.match_plain_seq(s, i, kind="operator", seq="|")
def phpsection_rule43(colorer, s, i):
return colorer.match_plain_seq(s, i, kind="operator", seq="^")
def phpsection_rule44(colorer, s, i):
return colorer.match_plain_seq(s, i, kind="operator", seq="~")
def phpsection_rule45(colorer, s, i):
return colorer.match_plain_seq(s, i, kind="operator", seq=".")
def phpsection_rule46(colorer, s, i):
return colorer.match_plain_seq(s, i, kind="operator", seq="}")
def phpsection_rule47(colorer, s, i):
return colorer.match_plain_seq(s, i, kind="operator", seq="{")
def phpsection_rule48(colorer, s, i):
return colorer.match_plain_seq(s, i, kind="operator", seq=",")
def phpsection_rule49(colorer, s, i):
return colorer.match_plain_seq(s, i, kind="operator", seq=";")
def phpsection_rule50(colorer, s, i):
return colorer.match_plain_seq(s, i, kind="operator", seq="]")
def phpsection_rule51(colorer, s, i):
return colorer.match_plain_seq(s, i, kind="operator", seq="[")
def phpsection_rule52(colorer, s, i):
return colorer.match_plain_seq(s, i, kind="operator", seq="?")
def phpsection_rule53(colorer, s, i):
return colorer.match_plain_seq(s, i, kind="operator", seq="@")
def phpsection_rule54(colorer, s, i):
return colorer.match_plain_seq(s, i, kind="operator", seq=":")
def phpsection_rule55(colorer, s, i):
return colorer.match_mark_previous(s, i, kind="function", pattern="(",
exclude_match=True)
def phpsection_rule56(colorer, s, i):
return colorer.match_keywords(s, i)
# Rules dict for phpsection_php ruleset.
rulesDict4 = {
"!": [phpsection_rule30,],
"\"": [phpsection_rule21,],
"#": [phpsection_rule25,],
"$": [phpsection_rule27,],
"%": [phpsection_rule40,],
"&": [phpsection_rule41,],
"'": [phpsection_rule22,],
"(": [phpsection_rule55,],
"*": [phpsection_rule37,],
"+": [phpsection_rule34,],
",": [phpsection_rule48,],
"-": [phpsection_rule29, phpsection_rule35,],
".": [phpsection_rule45,],
"/": [phpsection_rule19, phpsection_rule20, phpsection_rule24, phpsection_rule36,],
"0": [phpsection_rule56,],
"1": [phpsection_rule56,],
"2": [phpsection_rule56,],
"3": [phpsection_rule56,],
"4": [phpsection_rule56,],
"5": [phpsection_rule56,],
"6": [phpsection_rule56,],
"7": [phpsection_rule56,],
"8": [phpsection_rule56,],
"9": [phpsection_rule56,],
":": [phpsection_rule54,],
";": [phpsection_rule49,],
"<": [phpsection_rule32, phpsection_rule39,],
"< ": [phpsection_rule26,],
"=": [phpsection_rule28, phpsection_rule33,],
">": [phpsection_rule31, phpsection_rule38,],
"?": [phpsection_rule52,],
"@": [phpsection_rule53, phpsection_rule56,],
"A": [phpsection_rule56,],
"B": [phpsection_rule56,],
"C": [phpsection_rule56,],
"D": [phpsection_rule56,],
"E": [phpsection_rule56,],
"F": [phpsection_rule56,],
"G": [phpsection_rule56,],
"H": [phpsection_rule56,],
"I": [phpsection_rule56,],
"J": [phpsection_rule56,],
"K": [phpsection_rule56,],
"L": [phpsection_rule56,],
"M": [phpsection_rule56,],
"N": [phpsection_rule56,],
"O": [phpsection_rule56,],
"P": [phpsection_rule56,],
"Q": [phpsection_rule56,],
"R": [phpsection_rule56,],
"S": [phpsection_rule56,],
"T": [phpsection_rule56,],
"U": [phpsection_rule56,],
"V": [phpsection_rule56,],
"W": [phpsection_rule56,],
"X": [phpsection_rule56,],
"Y": [phpsection_rule56,],
"Z": [phpsection_rule56,],
"[": [phpsection_rule51,],
"]": [phpsection_rule50,],
"^": [phpsection_rule43,],
"_": [phpsection_rule56,],
"`": [phpsection_rule23,],
"a": [phpsection_rule56,],
"b": [phpsection_rule56,],
"c": [phpsection_rule56,],
"d": [phpsection_rule56,],
"e": [phpsection_rule56,],
"f": [phpsection_rule56,],
"g": [phpsection_rule56,],
"h": [phpsection_rule56,],
"i": [phpsection_rule56,],
"j": [phpsection_rule56,],
"k": [phpsection_rule56,],
"l": [phpsection_rule56,],
"m": [phpsection_rule56,],
"n": [phpsection_rule56,],
"o": [phpsection_rule56,],
"p": [phpsection_rule56,],
"q": [phpsection_rule56,],
"r": [phpsection_rule56,],
"s": [phpsection_rule56,],
"t": [phpsection_rule56,],
"u": [phpsection_rule56,],
"v": [phpsection_rule56,],
"w": [phpsection_rule56,],
"x": [phpsection_rule56,],
"y": [phpsection_rule56,],
"z": [phpsection_rule56,],
"{": [phpsection_rule47,],
"|": [phpsection_rule42,],
"}": [phpsection_rule46,],
"~": [phpsection_rule44,],
}
# Rules for phpsection_php_literal ruleset.
def phpsection_rule57(colorer, s, i):
return colorer.match_mark_following(s, i, kind="keyword3", pattern="$")
# Rules dict for phpsection_php_literal ruleset.
rulesDict5 = {
"$": [phpsection_rule57,],
}
# Rules for phpsection_javascript ruleset.
def phpsection_rule58(colorer, s, i):
return colorer.match_seq(s, i, kind="markup", seq=">",
delegate="phpsection::javascript+php")
def phpsection_rule59(colorer, s, i):
return colorer.match_seq(s, i, kind="markup", seq="SRC=",
delegate="phpsection::back_to_html")
# Rules dict for phpsection_javascript ruleset.
rulesDict6 = {
">": [phpsection_rule58,],
"S": [phpsection_rule59,],
}
# Rules for phpsection_javascript_php ruleset.
def phpsection_rule60(colorer, s, i):
return colorer.match_span(s, i, kind="markup", begin="<?php", end="?>",
delegate="php::php")
def phpsection_rule61(colorer, s, i):
return colorer.match_span(s, i, kind="markup", begin="<?", end="?>",
delegate="php::php")
def phpsection_rule62(colorer, s, i):
return colorer.match_span(s, i, kind="markup", begin="<%=", end="%>",
delegate="php::php")
# Rules dict for phpsection_javascript_php ruleset.
rulesDict7 = {
"<": [phpsection_rule60, phpsection_rule61, phpsection_rule62,],
}
# Rules for phpsection_phpdoc ruleset.
def phpsection_rule63(colorer, s, i):
return colorer.match_plain_seq(s, i, kind="comment3", seq="{")
def phpsection_rule64(colorer, s, i):
return colorer.match_plain_seq(s, i, kind="comment3", seq="*")
def phpsection_rule65(colorer, s, i):
return colorer.match_span(s, i, kind="comment2", begin="<!--", end="-->")
def phpsection_rule66(colorer, s, i):
return colorer.match_plain_seq(s, i, kind="comment3", seq="<<")
def phpsection_rule67(colorer, s, i):
return colorer.match_plain_seq(s, i, kind="comment3", seq="<=")
def phpsection_rule68(colorer, s, i):
return colorer.match_plain_seq(s, i, kind="comment3", seq="< ")
def phpsection_rule69(colorer, s, i):
return colorer.match_span(s, i, kind="markup", begin="<", end=">",
delegate="xml::tags",
no_line_break=True)
def phpsection_rule70(colorer, s, i):
return colorer.match_keywords(s, i)
# Rules dict for phpsection_phpdoc ruleset.
rulesDict8 = {
"*": [phpsection_rule64,],
"0": [phpsection_rule70,],
"1": [phpsection_rule70,],
"2": [phpsection_rule70,],
"3": [phpsection_rule70,],
"4": [phpsection_rule70,],
"5": [phpsection_rule70,],
"6": [phpsection_rule70,],
"7": [phpsection_rule70,],
"8": [phpsection_rule70,],
"9": [phpsection_rule70,],
"<": [phpsection_rule65, phpsection_rule66, phpsection_rule67, phpsection_rule68, phpsection_rule69,],
"@": [phpsection_rule70,],
"A": [phpsection_rule70,],
"B": [phpsection_rule70,],
"C": [phpsection_rule70,],
"D": [phpsection_rule70,],
"E": [phpsection_rule70,],
"F": [phpsection_rule70,],
"G": [phpsection_rule70,],
"H": [phpsection_rule70,],
"I": [phpsection_rule70,],
"J": [phpsection_rule70,],
"K": [phpsection_rule70,],
"L": [phpsection_rule70,],
"M": [phpsection_rule70,],
"N": [phpsection_rule70,],
"O": [phpsection_rule70,],
"P": [phpsection_rule70,],
"Q": [phpsection_rule70,],
"R": [phpsection_rule70,],
"S": [phpsection_rule70,],
"T": [phpsection_rule70,],
"U": [phpsection_rule70,],
"V": [phpsection_rule70,],
"W": [phpsection_rule70,],
"X": [phpsection_rule70,],
"Y": [phpsection_rule70,],
"Z": [phpsection_rule70,],
"_": [phpsection_rule70,],
"a": [phpsection_rule70,],
"b": [phpsection_rule70,],
"c": [phpsection_rule70,],
"d": [phpsection_rule70,],
"e": [phpsection_rule70,],
"f": [phpsection_rule70,],
"g": [phpsection_rule70,],
"h": [phpsection_rule70,],
"i": [phpsection_rule70,],
"j": [phpsection_rule70,],
"k": [phpsection_rule70,],
"l": [phpsection_rule70,],
"m": [phpsection_rule70,],
"n": [phpsection_rule70,],
"o": [phpsection_rule70,],
"p": [phpsection_rule70,],
"q": [phpsection_rule70,],
"r": [phpsection_rule70,],
"s": [phpsection_rule70,],
"t": [phpsection_rule70,],
"u": [phpsection_rule70,],
"v": [phpsection_rule70,],
"w": [phpsection_rule70,],
"x": [phpsection_rule70,],
"y": [phpsection_rule70,],
"z": [phpsection_rule70,],
"{": [phpsection_rule63,],
}
# x.rulesDictDict for phpsection mode.
rulesDictDict = {
"phpsection_javascript": rulesDict6,
"phpsection_javascript_php": rulesDict7,
"phpsection_main": rulesDict1,
"phpsection_php": rulesDict4,
"phpsection_php_literal": rulesDict5,
"phpsection_phpdoc": rulesDict8,
"phpsection_tags": rulesDict2,
"phpsection_tags_literal": rulesDict3,
}
# Import dict for phpsection mode.
importDict = {
"phpsection_javascript_php": ["javascript::main",],
}
| [
"edreamleo@gmail.com"
] | edreamleo@gmail.com |
8c93ac27c41b2a3efd82d096114bdca0d51162be | e1a3c091bff1c0bb53c11098213ac6bda81929ae | /PBproject/wsgi.py | 94058416a6b14251e4adf39b616155cb558704b0 | [] | no_license | meet19435/Skin-Cancer-Detection | b7e5087a9714d8599e32e166d2f7252a26fa51ac | c8f8d6786e7225af141fa780f5d259886d4fc52f | refs/heads/master | 2023-04-25T13:47:06.619744 | 2021-05-11T14:32:57 | 2021-05-11T14:32:57 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 411 | py | """
WSGI config for PBproject project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/3.0/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'PBproject.settings')
application = get_wsgi_application()
| [
"67797541+HardikDudeja@users.noreply.github.com"
] | 67797541+HardikDudeja@users.noreply.github.com |
3ce35769eccbbe6a1cdc19716e182f123a530e48 | 2f04ca4624680bf8d43c2c341be3380f4ebad56c | /Ahmet Ergani 161044011/CSE321 Introduction to Algorithm Design/HW4/part1.py | e8641410b74c608bfac4bff8bd5035251b81e58a | [] | no_license | aErgani/Homeworks | af545caed2a7602f80cdfacb99985e3ce546c346 | a58a285552d3337a58cf45b15e8d0587077100da | refs/heads/master | 2022-12-05T11:43:34.750496 | 2020-08-20T12:07:16 | 2020-08-20T12:07:16 | 198,170,771 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,429 | py | def penalty(new_stop,last_stop): #Calculates the penalty between 2 stops
A = 200 - (new_stop - last_stop)
return A ** 2
def longTrip(last_stop,path):
A = [190,220,410,580,640,770,950,1100,1350]
if last_stop == 8: #means we reached the end
return
if last_stop == -1: #means this is the initial call
new_stop = 0
check = penalty(A[new_stop],0)
else:
new_stop = last_stop + 1
check = penalty(A[new_stop],A[last_stop])
if check == 0 : #means the path is perfect
path.append(new_stop)
longTrip(new_stop,path)
else:
while 1: #calculates other paths and compares it to previous path's penalty
print(last_stop," ",new_stop)
if new_stop == 8:
break
if last_stop == -1:
check2 = penalty(A[new_stop + 1],0)
else:
check2 = penalty(A[new_stop + 1],A[last_stop])
print(check," ",check2)
if check2 == 0:
check = check2
new_stop = new_stop + 1
break
if check2 <= check:
check = check2
new_stop = new_stop + 1
elif check2 > check:
break
path.append(new_stop)
longTrip(new_stop,path)
B = []
longTrip(-1,B)
print(B) | [
"noreply@github.com"
] | aErgani.noreply@github.com |
3fec723ba32b0afdc039f5ea66e8ebc970a3dc23 | 4a85fd5c3513edf10b37465a7f50dd5da77167af | /lenet_tensorboard_fixedpoint2.py | 68d786212f7777989ca47f411984b72de1bbecc6 | [] | no_license | Alireza1997/Summer17Research | 5a5bc5f44e8e99e73d16defc7a41a776be32d038 | 3a41193b6afbecce76025860ef59efc719b27bd8 | refs/heads/master | 2021-01-21T11:53:27.606741 | 2017-08-31T19:05:26 | 2017-08-31T19:05:26 | 102,030,502 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 12,591 | py | #=================Imports==================#
#get the mnist data
import input_data
mnist = input_data.read_data_sets('MNIST_data', one_hot=True) #one hot means it labels the classes by one hot, so 1 for the correct class, 0 for the rest
import tensorflow as tf
rnd = tf.load_op_library('fix_round_2.so')
from tensorflow.python.framework import ops
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import sparse_ops
@ops.RegisterGradient("FixRound")
def _fix_round_grad(op, grad):
"""The gradients for `zero_out`.
Args:
op: The `zero_out` `Operation` that we are differentiating, which we can use
to find the inputs and outputs of the original op.
grad: Gradient with respect to the output of the `zero_out` op.
Returns:
Gradients with respect to the input of `zero_out`.
"""
return rnd.fix_round_grad(grad,op.inputs[0],op.inputs[1])
#=================Tensorboard setup=================#
#=================Function Decleration=================#
#creates weight variables with some noise
def weight_variable(shape, _name):
initial = tf.truncated_normal(shape, stddev=0.1, name = _name)
return tf.Variable(initial)
#creates bias with a value slightly above 0
def bias_variable(shape, _name):
initial = tf.constant(0.1, shape=shape, name = _name)
return tf.Variable(initial)
#convolution with stride of 1 and no padding
def conv2d(x, W):
return tf.nn.conv2d(x, W, strides=[1,1,1,1], padding='VALID')
#maxpooling with stride of 2 using a 2x2 filter and no padding
def max_pool_2x2(x):
return tf.nn.max_pool(x, ksize=[1,2,2,1], strides=[1,2,2,1], padding='VALID')
def floatToFixPoint(tensor_):
tensor_ = tf.clip_by_value(tensor_,-32.0,32.0)
tensor_ = tf.scalar_mul(32768.0,tensor_)
tensor_ = tf.round(tensor_)
tensor_ = tf.scalar_mul(1/32768.0,tensor_)
return (tensor_)
#====================putting layers together====================#
#placeholders
x = tf.placeholder(tf.float32, shape=[None, 784], name = 'input') #flattened input image
y_ = tf.placeholder(tf.float32, shape= [None, 10], name = 'labels') #labled classifications
y_conv = tf.Variable(tf.zeros([1,10]))
ILFL = tf.constant([5.,15.])
#convolution and pooling 1
with tf.name_scope("ConvPool1"):
with tf.name_scope("Weights"):
W_conv1 = weight_variable([5,5,1,20], 'W1') #5x5 filter, 1 input channel, 20 output channels
with tf.name_scope("biases"):
b_conv1 = bias_variable([20],'B1')
with tf.name_scope("input"):
x_image = rnd.fix_round(tf.reshape(x, [-1,28,28,1]),ILFL) #28x28 input image with 1 color channel, the -1 just makes sure that the total size is kept constant during reshaping
h_conv1 = tf.nn.relu(conv2d(x_image, W_conv1) + b_conv1) #convolution 1 (includes relu)
h_pool1 = rnd.fix_round(max_pool_2x2(h_conv1),ILFL) #maxpool 1
#convolution and pooling 2
W_conv2 = weight_variable([5,5,20,40],'W2') #5x5 filter, 20 input channels, 40 output channels
b_conv2 = bias_variable([40],'B2')
h_conv2 = tf.nn.relu(conv2d(h_pool1, W_conv2) + b_conv2) #convolution 2 (includes relu)
h_pool2 = rnd.fix_round((max_pool_2x2(h_conv2)),ILFL) #maxpool 2
#fully connected
W_fc1 = weight_variable([4*4*40, 1000],'W3')#fully connected, 4x4x40 to 6400
b_fc1 = bias_variable([1000],'B3')
h_pool2_flat = tf.reshape(h_pool2, [-1, 4*4*40]) #flatten the output of last layer so it is compatible for matrix multiplication
h_fc1 = rnd.fix_round(tf.nn.relu(tf.matmul(h_pool2_flat, W_fc1) + b_fc1),ILFL)
W_fc2 = weight_variable([1000, 500], 'W4')#fully connected, 4x4x40 to 6400
b_fc2 = bias_variable([500], 'B4')
h_fc1_flat = tf.reshape(h_fc1, [-1, 1000]) #flatten the output of last layer so it is compatible for matrix multiplication
h_fc2 = rnd.fix_round(tf.nn.relu(tf.matmul(h_fc1_flat, W_fc2) + b_fc2),ILFL)
#dropout
#keep_prob = tf.placeholder(tf.float32) #probability of keeping a neuron (not dropping it out)
#h_fc1_drop = tf.nn.dropout(h_fc1, keep_prob) #dropout some of the neurons to prevent overfitting
#readout layer (softmax!)
W_fc3 = weight_variable([500,10],'W5') #reduce the total outputs to 10 different classes
b_fc3 = bias_variable([10],'B5')
y_conv = rnd.fix_round((tf.matmul(h_fc2, W_fc3) + b_fc3),ILFL)
#=================delta setup==================#
W_old_c2 = tf.Variable(W_conv2.initialized_value(), name="old_W_conv2")
save_old_c2 = tf.assign(W_old_c2,W_conv2)
W_deltas_c2 = tf.Variable(tf.zeros([5,5,20,40]), name="deltas_c2")
save_delta_c2 = tf.assign(W_deltas_c2, tf.subtract(W_old_c2,W_conv2))
tf.summary.histogram("deltas_c2", W_deltas_c2)
W_deltas_abs_c2 = tf.Variable(tf.zeros([5,5,20,40]), name="deltas_c2_abs")
save_abs_delta_c2 = tf.assign(W_deltas_abs_c2, tf.abs(W_deltas_c2))
tf.summary.histogram("deltas_abs_c2", W_deltas_abs_c2)
tf.summary.scalar("deltas_abs_min_c2", tf.reduce_min(W_deltas_abs_c2))
tf.summary.scalar("deltas_abs_max_c2", tf.reduce_max(W_deltas_abs_c2))
tf.summary.scalar("deltas_mean_c2", tf.reduce_mean(W_deltas_abs_c2))
W_old_c1 = tf.Variable(W_conv1.initialized_value(), name="old_W_conv1")
save_old_c1 = tf.assign(W_old_c1,W_conv1)
W_deltas_c1 = tf.Variable(tf.zeros([5,5,1,20]), name="deltas_c1")
save_delta_c1 = tf.assign(W_deltas_c1, tf.subtract(W_old_c1,W_conv1))
tf.summary.histogram("deltas_c1", W_deltas_c1)
W_deltas_abs_c1 = tf.Variable(tf.zeros([5,5,1,20]), name="deltas_c1_abs")
save_abs_delta_c1 = tf.assign(W_deltas_abs_c1, tf.abs(W_deltas_c1))
tf.summary.histogram("deltas_abs_c1", W_deltas_abs_c1)
tf.summary.scalar("deltas_abs_min_c1", tf.reduce_min(W_deltas_abs_c1))
tf.summary.scalar("deltas_abs_max_c1", tf.reduce_max(W_deltas_abs_c1))
tf.summary.scalar("deltas_mean_c1", tf.reduce_mean(W_deltas_abs_c1))
W_old = tf.Variable(W_fc1.initialized_value(), name="old_W")
save_old = tf.assign(W_old,W_fc1)
W_deltas = tf.Variable(tf.zeros([4*4*40, 1000]), name="deltas_fc1")
save_delta = tf.assign(W_deltas, tf.subtract(W_old,W_fc1))
tf.summary.histogram("deltas_fc1", W_deltas)
W_deltas_abs = tf.Variable(tf.zeros([4*4*40, 1000]), name="deltas_fc1_abs")
save_abs_delta = tf.assign(W_deltas_abs, tf.abs(W_deltas))
tf.summary.histogram("deltas_abs_fc1", W_deltas_abs)
tf.summary.scalar("deltas_abs_min_fc1", tf.reduce_min(W_deltas_abs))
tf.summary.scalar("deltas_abs_max_fc1", tf.reduce_max(W_deltas_abs))
tf.summary.scalar("deltas_mean_fc1", tf.reduce_mean(W_deltas_abs))
W_old2 = tf.Variable(W_fc2.initialized_value(), name="old_W2")
save_old2 = tf.assign(W_old2,W_fc2)
W_deltas2 = tf.Variable(tf.zeros([1000, 500]), name="deltas_fc2")
save_delta2 = tf.assign(W_deltas2, tf.subtract(W_old2,W_fc2))
tf.summary.histogram("deltas_fc2", W_deltas2)
W_deltas2_abs = tf.Variable(tf.zeros([1000, 500]), name="deltas_fc2_abs")
save_abs_delta2 = tf.assign(W_deltas2_abs, tf.abs(W_deltas2))
tf.summary.histogram("deltas_abs_fc2", W_deltas2_abs)
tf.summary.scalar("deltas_abs_min_fc2", tf.reduce_min(W_deltas2_abs))
tf.summary.scalar("deltas_abs_max_fc2", tf.reduce_max(W_deltas2_abs))
tf.summary.scalar("deltas_mean_fc2", tf.reduce_mean(W_deltas2_abs))
W_old3 = tf.Variable(W_fc3.initialized_value(), name="old_W3")
save_old3 = tf.assign(W_old3,W_fc3)
W_deltas3 = tf.Variable(tf.zeros([500,10]), name="deltas_fc3")
save_delta3 = tf.assign(W_deltas3, tf.subtract(W_old3,W_fc3))
tf.summary.histogram("deltas_fc3", W_deltas3)
W_deltas3_abs = tf.Variable(tf.zeros([500, 10]), name="deltas_fc3_abs")
save_abs_delta3 = tf.assign(W_deltas3_abs, tf.abs(W_deltas3))
tf.summary.histogram("deltas_abs_fc3", W_deltas3_abs)
tf.summary.scalar("deltas_abs_min_fc3", tf.reduce_min(W_deltas3_abs))
tf.summary.scalar("deltas_abs_max_fc3", tf.reduce_max(W_deltas3_abs))
tf.summary.scalar("deltas_mean_fc3", tf.reduce_mean(W_deltas3_abs))
W_init = tf.Variable(W_fc2.initialized_value(), name="init_Wfc2")
save_W_init = tf.assign(W_init,tf.subtract(W_init,W_fc2))
tf.summary.histogram("W_c1", W_conv1)
tf.summary.histogram("W_c2", W_conv2)
tf.summary.histogram("W_fc1", W_fc1)
tf.summary.histogram("W_fc2", W_fc2)
tf.summary.histogram("W_fc3", W_fc3)
#===================training====================#
cross_entropy = tf.reduce_mean(tf.nn.softmax_cross_entropy_with_logits(labels=y_, logits=y_conv))
train_step = tf.train.AdamOptimizer(1e-4).minimize(cross_entropy) #learning rate of 1e-4 using ADAM optimizer
correct_prediction = tf.equal(tf.argmax(y_conv,1), tf.argmax(y_,1)) #check if the predicted class matches labled class
accuracy = tf.reduce_mean(tf.cast(correct_prediction, tf.float32))
tf.summary.scalar("accuracy", accuracy)
sess = tf.InteractiveSession() #an interactive session connects to the backend to use the highly efficient C++ computations
sess.run(tf.global_variables_initializer())
file_writer = tf.summary.FileWriter('/home/alireza/tensorflow/tutorials/logs', sess.graph)#setup the path for tensorflow logs
tf.summary.scalar("accuracy", accuracy)
merged = tf.summary.merge_all()
for i in range(2000):
batch = mnist.train.next_batch(50) #load a batch from the training set
if i%10 == 0:
train_accuracy = accuracy.eval(feed_dict={ x:batch[0], y_: batch[1]})
print ("step %d, training accuracy %g" %(i, train_accuracy))
# sess.run(save_old_c1)
# sess.run(save_old_c2)
# sess.run(save_old) #save W before it changes
# sess.run(save_old2)
# sess.run(save_old3)
#print sess.run(W_conv2)
train_step.run(feed_dict={x:batch[0], y_: batch[1]}) #run the training step using new values for x and y_, update weights
# sess.run(save_delta_c1)
# sess.run(save_abs_delta_c1)
# sess.run(save_delta_c2)
# sess.run(save_abs_delta_c2)
# sess.run(save_delta) #save the delta value after weights change
# sess.run(save_abs_delta)
# sess.run(save_delta2)
# sess.run(save_abs_delta2)
# sess.run(save_delta3)
# sess.run(save_abs_delta3)
#==========converting all weights and biases to fixed point========#
W_conv1 = rnd.fix_round(W_conv1,[1,1])
sess.run(W_conv1)
b_conv1 = rnd.fix_round(b_conv1,[1,1])
sess.run(b_conv1)
# sess.run(tf.assign(W_conv1,floatToFixPoint(W_conv1)))
# sess.run(tf.assign(b_conv1,floatToFixPoint(b_conv1)))
#h_conv1 = floatToFixPoint(h_conv1)
#h_pool1 = floatToFixPoint(h_pool1)
# print sess.run(W_conv2)
W_conv2 = rnd.fix_round(W_conv2,[1,1])
sess.run(W_conv2)
b_conv2 = rnd.fix_round(b_conv2,[1,1])
sess.run(b_conv2)
# print sess.run(W_conv2)
# sess.run(tf.assign(W_conv2 ,floatToFixPoint(W_conv2)))
# #print sess.run(W_conv2)
# sess.run(tf.assign(b_conv2 ,floatToFixPoint(b_conv2)))
#h_conv2 = floatToFixPoint(h_conv2)
#h_pool2 = floatToFixPoint(h_pool2)
W_fc1 = rnd.fix_round(W_fc1,[1,1])
b_fc1 = rnd.fix_round(b_fc1,[1,1])
# sess.run(tf.assign(W_fc1 ,floatToFixPoint(W_fc1)))
# sess.run(tf.assign(b_fc1, floatToFixPoint(b_fc1)))
#h_pool2_flat = floatToFixPoint(h_pool2_flat)
#h_fc1 = floatToFixPoint(h_fc1)
W_fc2 = rnd.fix_round(W_fc2,[1,1])
b_fc2 = rnd.fix_round(b_fc2,[1,1])
# sess.run(tf.assign(W_fc2 ,floatToFixPoint(W_fc2)))
# sess.run(tf.assign(b_fc2, floatToFixPoint(b_fc2)))
#h_fc1_flat = floatToFixPoint(h_fc1_flat)
#h_fc2 = floatToFixPoint(h_fc2)
W_fc3 = rnd.fix_round(W_fc3,[1,1])
b_fc3 = rnd.fix_round(b_fc3,[1,1])
y_conv = rnd.fix_round(y_conv,[1,1])
# sess.run(tf.assign(W_fc3 ,floatToFixPoint(W_fc3)))
# sess.run(tf.assign(b_fc3,floatToFixPoint(b_fc3)))
# y_conv = floatToFixPoint(y_conv)
# W_conv1 = floatToFixPoint(W_conv1)
# b_conv1 = floatToFixPoint(b_conv1)
# h_conv1 = floatToFixPoint(h_conv1)
# h_pool1 = floatToFixPoint(h_pool1)
# #print sess.run(W_conv2)
# W_conv2 = floatToFixPoint(W_conv2)
# #print sess.run(W_conv2)
# b_conv2 = floatToFixPoint(b_conv2)
# h_conv2 = floatToFixPoint(h_conv2)
# h_pool2 = floatToFixPoint(h_pool2)
# W_fc1 = floatToFixPoint(W_fc1)
# b_fc1 = floatToFixPoint(b_fc1)
# h_pool2_flat = floatToFixPoint(h_pool2_flat)
# h_fc1 = floatToFixPoint(h_fc1)
# W_fc2 = floatToFixPoint(W_fc2)
# b_fc2 = floatToFixPoint(b_fc2)
# h_fc1_flat = floatToFixPoint(h_fc1_flat)
# h_fc2 = floatToFixPoint(h_fc2)
# W_fc3 = floatToFixPoint(W_fc3)
# b_fc3 = floatToFixPoint(b_fc3)
# y_conv = floatToFixPoint(y_conv)
if i < 2000:
result = sess.run(merged, feed_dict={ x:batch[0], y_: batch[1]})
file_writer.add_summary(result, i)
#print (sess.run(tf.nn.softmax(y_conv), feed_dict={ x:batch[0], y_:batch[1]}))
#print (batch[1])
if i%100 == 0:
print sess.run(W_fc2)
print sess.run(tf.subtract(W_init,W_fc2))
#print (sess.run(W_fc1)) #print out the deltas every 100 steps
#print (sess.run(W_deltas_abs))
print ("test accuracy %g" %accuracy.eval(feed_dict={ x: mnist.test.images, y_: mnist.test.labels}))
print sess.run(W_fc2)
print(sess.run(save_W_init))
print(sess.run(tf.divide(W_init,2000)))
| [
"alireza.nik1997@gmail.com"
] | alireza.nik1997@gmail.com |
68d6ace739c23eac0a7a49732010295ed54812da | e3cd5c6dd66471c2eaabd0bba27b101b89b26bed | /sparx/tornado/testing.py | 74d04b6000b8f2ac61a3bf2793e1cfe27cc3098a | [
"MIT"
] | permissive | CleverInsight/sparx-core | 6902a1dfccda6befba6426d096a043d9d7e99ee1 | b0f1c73df67921408ef63ec7ac017a476d5feceb | refs/heads/master | 2023-08-24T01:01:22.865172 | 2019-06-16T13:52:07 | 2019-06-16T13:52:07 | 189,747,326 | 2 | 0 | MIT | 2023-08-14T21:35:48 | 2019-06-01T15:16:24 | Python | UTF-8 | Python | false | false | 28,046 | py | #!/usr/bin/env python
"""Support classes for automated testing.
* `AsyncTestCase` and `AsyncHTTPTestCase`: Subclasses of unittest.TestCase
with additional support for testing asynchronous (`.IOLoop`-based) code.
* `ExpectLog` and `LogTrapTestCase`: Make test logs less spammy.
* `main()`: A simple test runner (wrapper around unittest.main()) with support
for the tornado.autoreload module to rerun the tests when code changes.
"""
from __future__ import absolute_import, division, print_function
try:
from tornado import gen
from tornado.httpclient import AsyncHTTPClient
from tornado.httpserver import HTTPServer
from tornado.simple_httpclient import SimpleAsyncHTTPClient
from tornado.ioloop import IOLoop, TimeoutError
from tornado import netutil
from tornado.process import Subprocess
except ImportError:
# These modules are not importable on app engine. Parts of this module
# won't work, but e.g. LogTrapTestCase and main() will.
AsyncHTTPClient = None # type: ignore
gen = None # type: ignore
HTTPServer = None # type: ignore
IOLoop = None # type: ignore
netutil = None # type: ignore
SimpleAsyncHTTPClient = None # type: ignore
Subprocess = None # type: ignore
from tornado.log import gen_log, app_log
from tornado.stack_context import ExceptionStackContext
from tornado.util import raise_exc_info, basestring_type, PY3
import functools
import inspect
import logging
import os
import re
import signal
import socket
import sys
if PY3:
from io import StringIO
else:
from cStringIO import StringIO
try:
from collections.abc import Generator as GeneratorType # type: ignore
except ImportError:
from types import GeneratorType # type: ignore
if sys.version_info >= (3, 5):
iscoroutine = inspect.iscoroutine # type: ignore
iscoroutinefunction = inspect.iscoroutinefunction # type: ignore
else:
iscoroutine = iscoroutinefunction = lambda f: False
# Tornado's own test suite requires the updated unittest module
# (either py27+ or unittest2) so tornado.test.util enforces
# this requirement, but for other users of tornado.testing we want
# to allow the older version if unitest2 is not available.
if PY3:
# On python 3, mixing unittest2 and unittest (including doctest)
# doesn't seem to work, so always use unittest.
import unittest
else:
# On python 2, prefer unittest2 when available.
try:
import unittest2 as unittest # type: ignore
except ImportError:
import unittest # type: ignore
_next_port = 10000
def get_unused_port():
"""Returns a (hopefully) unused port number.
This function does not guarantee that the port it returns is available,
only that a series of get_unused_port calls in a single process return
distinct ports.
.. deprecated::
Use bind_unused_port instead, which is guaranteed to find an unused port.
"""
global _next_port
port = _next_port
_next_port = _next_port + 1
return port
def bind_unused_port(reuse_port=False):
"""Binds a server socket to an available port on localhost.
Returns a tuple (socket, port).
.. versionchanged:: 4.4
Always binds to ``127.0.0.1`` without resolving the name
``localhost``.
"""
sock = netutil.bind_sockets(None, '127.0.0.1', family=socket.AF_INET,
reuse_port=reuse_port)[0]
port = sock.getsockname()[1]
return sock, port
def get_async_test_timeout():
"""Get the global timeout setting for async tests.
Returns a float, the timeout in seconds.
.. versionadded:: 3.1
"""
try:
return float(os.environ.get('ASYNC_TEST_TIMEOUT'))
except (ValueError, TypeError):
return 5
class _TestMethodWrapper(object):
"""Wraps a test method to raise an error if it returns a value.
This is mainly used to detect undecorated generators (if a test
method yields it must use a decorator to consume the generator),
but will also detect other kinds of return values (these are not
necessarily errors, but we alert anyway since there is no good
reason to return a value from a test).
"""
def __init__(self, orig_method):
self.orig_method = orig_method
def __call__(self, *args, **kwargs):
result = self.orig_method(*args, **kwargs)
if isinstance(result, GeneratorType) or iscoroutine(result):
raise TypeError("Generator and coroutine test methods should be"
" decorated with tornado.testing.gen_test")
elif result is not None:
raise ValueError("Return value from test method ignored: %r" %
result)
def __getattr__(self, name):
"""Proxy all unknown attributes to the original method.
This is important for some of the decorators in the `unittest`
module, such as `unittest.skipIf`.
"""
return getattr(self.orig_method, name)
class AsyncTestCase(unittest.TestCase):
"""`~unittest.TestCase` subclass for testing `.IOLoop`-based
asynchronous code.
The unittest framework is synchronous, so the test must be
complete by the time the test method returns. This means that
asynchronous code cannot be used in quite the same way as usual.
To write test functions that use the same ``yield``-based patterns
used with the `tornado.gen` module, decorate your test methods
with `tornado.testing.gen_test` instead of
`tornado.gen.coroutine`. This class also provides the `stop()`
and `wait()` methods for a more manual style of testing. The test
method itself must call ``self.wait()``, and asynchronous
callbacks should call ``self.stop()`` to signal completion.
By default, a new `.IOLoop` is constructed for each test and is available
as ``self.io_loop``. This `.IOLoop` should be used in the construction of
HTTP clients/servers, etc. If the code being tested requires a
global `.IOLoop`, subclasses should override `get_new_ioloop` to return it.
The `.IOLoop`'s ``start`` and ``stop`` methods should not be
called directly. Instead, use `self.stop <stop>` and `self.wait
<wait>`. Arguments passed to ``self.stop`` are returned from
``self.wait``. It is possible to have multiple ``wait``/``stop``
cycles in the same test.
Example::
# This test uses coroutine style.
class MyTestCase(AsyncTestCase):
@tornado.testing.gen_test
def test_http_fetch(self):
client = AsyncHTTPClient(self.io_loop)
response = yield client.fetch("http://www.tornadoweb.org")
# Test contents of response
self.assertIn("FriendFeed", response.body)
# This test uses argument passing between self.stop and self.wait.
class MyTestCase2(AsyncTestCase):
def test_http_fetch(self):
client = AsyncHTTPClient(self.io_loop)
client.fetch("http://www.tornadoweb.org/", self.stop)
response = self.wait()
# Test contents of response
self.assertIn("FriendFeed", response.body)
# This test uses an explicit callback-based style.
class MyTestCase3(AsyncTestCase):
def test_http_fetch(self):
client = AsyncHTTPClient(self.io_loop)
client.fetch("http://www.tornadoweb.org/", self.handle_fetch)
self.wait()
def handle_fetch(self, response):
# Test contents of response (failures and exceptions here
# will cause self.wait() to throw an exception and end the
# test).
# Exceptions thrown here are magically propagated to
# self.wait() in test_http_fetch() via stack_context.
self.assertIn("FriendFeed", response.body)
self.stop()
"""
def __init__(self, methodName='runTest'):
super(AsyncTestCase, self).__init__(methodName)
self.__stopped = False
self.__running = False
self.__failure = None
self.__stop_args = None
self.__timeout = None
# It's easy to forget the @gen_test decorator, but if you do
# the test will silently be ignored because nothing will consume
# the generator. Replace the test method with a wrapper that will
# make sure it's not an undecorated generator.
setattr(self, methodName, _TestMethodWrapper(getattr(self, methodName)))
def setUp(self):
super(AsyncTestCase, self).setUp()
self.io_loop = self.get_new_ioloop()
self.io_loop.make_current()
def tearDown(self):
# Clean up Subprocess, so it can be used again with a new ioloop.
Subprocess.uninitialize()
self.io_loop.clear_current()
if (not IOLoop.initialized() or
self.io_loop is not IOLoop.instance()):
# Try to clean up any file descriptors left open in the ioloop.
# This avoids leaks, especially when tests are run repeatedly
# in the same process with autoreload (because curl does not
# set FD_CLOEXEC on its file descriptors)
self.io_loop.close(all_fds=True)
super(AsyncTestCase, self).tearDown()
# In case an exception escaped or the StackContext caught an exception
# when there wasn't a wait() to re-raise it, do so here.
# This is our last chance to raise an exception in a way that the
# unittest machinery understands.
self.__rethrow()
def get_new_ioloop(self):
"""Creates a new `.IOLoop` for this test. May be overridden in
subclasses for tests that require a specific `.IOLoop` (usually
the singleton `.IOLoop.instance()`).
"""
return IOLoop()
def _handle_exception(self, typ, value, tb):
if self.__failure is None:
self.__failure = (typ, value, tb)
else:
app_log.error("multiple unhandled exceptions in test",
exc_info=(typ, value, tb))
self.stop()
return True
def __rethrow(self):
if self.__failure is not None:
failure = self.__failure
self.__failure = None
raise_exc_info(failure)
def run(self, result=None):
with ExceptionStackContext(self._handle_exception):
super(AsyncTestCase, self).run(result)
# As a last resort, if an exception escaped super.run() and wasn't
# re-raised in tearDown, raise it here. This will cause the
# unittest run to fail messily, but that's better than silently
# ignoring an error.
self.__rethrow()
def stop(self, _arg=None, **kwargs):
"""Stops the `.IOLoop`, causing one pending (or future) call to `wait()`
to return.
Keyword arguments or a single positional argument passed to `stop()` are
saved and will be returned by `wait()`.
"""
assert _arg is None or not kwargs
self.__stop_args = kwargs or _arg
if self.__running:
self.io_loop.stop()
self.__running = False
self.__stopped = True
def wait(self, condition=None, timeout=None):
"""Runs the `.IOLoop` until stop is called or timeout has passed.
In the event of a timeout, an exception will be thrown. The
default timeout is 5 seconds; it may be overridden with a
``timeout`` keyword argument or globally with the
``ASYNC_TEST_TIMEOUT`` environment variable.
If ``condition`` is not None, the `.IOLoop` will be restarted
after `stop()` until ``condition()`` returns true.
.. versionchanged:: 3.1
Added the ``ASYNC_TEST_TIMEOUT`` environment variable.
"""
if timeout is None:
timeout = get_async_test_timeout()
if not self.__stopped:
if timeout:
def timeout_func():
try:
raise self.failureException(
'Async operation timed out after %s seconds' %
timeout)
except Exception:
self.__failure = sys.exc_info()
self.stop()
self.__timeout = self.io_loop.add_timeout(self.io_loop.time() + timeout, timeout_func)
while True:
self.__running = True
self.io_loop.start()
if (self.__failure is not None or
condition is None or condition()):
break
if self.__timeout is not None:
self.io_loop.remove_timeout(self.__timeout)
self.__timeout = None
assert self.__stopped
self.__stopped = False
self.__rethrow()
result = self.__stop_args
self.__stop_args = None
return result
class AsyncHTTPTestCase(AsyncTestCase):
"""A test case that starts up an HTTP server.
Subclasses must override `get_app()`, which returns the
`tornado.web.Application` (or other `.HTTPServer` callback) to be tested.
Tests will typically use the provided ``self.http_client`` to fetch
URLs from this server.
Example, assuming the "Hello, world" example from the user guide is in
``hello.py``::
import hello
class TestHelloApp(AsyncHTTPTestCase):
def get_app(self):
return hello.make_app()
def test_homepage(self):
response = self.fetch('/')
self.assertEqual(response.code, 200)
self.assertEqual(response.body, 'Hello, world')
That call to ``self.fetch()`` is equivalent to ::
self.http_client.fetch(self.get_url('/'), self.stop)
response = self.wait()
which illustrates how AsyncTestCase can turn an asynchronous operation,
like ``http_client.fetch()``, into a synchronous operation. If you need
to do other asynchronous operations in tests, you'll probably need to use
``stop()`` and ``wait()`` yourself.
"""
def setUp(self):
super(AsyncHTTPTestCase, self).setUp()
sock, port = bind_unused_port()
self.__port = port
self.http_client = self.get_http_client()
self._app = self.get_app()
self.http_server = self.get_http_server()
self.http_server.add_sockets([sock])
def get_http_client(self):
return AsyncHTTPClient(io_loop=self.io_loop)
def get_http_server(self):
return HTTPServer(self._app, io_loop=self.io_loop,
**self.get_httpserver_options())
def get_app(self):
"""Should be overridden by subclasses to return a
`tornado.web.Application` or other `.HTTPServer` callback.
"""
raise NotImplementedError()
def fetch(self, path, **kwargs):
"""Convenience method to synchronously fetch a url.
The given path will be appended to the local server's host and
port. Any additional kwargs will be passed directly to
`.AsyncHTTPClient.fetch` (and so could be used to pass
``method="POST"``, ``body="..."``, etc).
"""
self.http_client.fetch(self.get_url(path), self.stop, **kwargs)
return self.wait()
def get_httpserver_options(self):
"""May be overridden by subclasses to return additional
keyword arguments for the server.
"""
return {}
def get_http_port(self):
"""Returns the port used by the server.
A new port is chosen for each test.
"""
return self.__port
def get_protocol(self):
return 'http'
def get_url(self, path):
"""Returns an absolute url for the given path on the test server."""
return '%s://localhost:%s%s' % (self.get_protocol(),
self.get_http_port(), path)
def tearDown(self):
self.http_server.stop()
self.io_loop.run_sync(self.http_server.close_all_connections,
timeout=get_async_test_timeout())
if (not IOLoop.initialized() or
self.http_client.io_loop is not IOLoop.instance()):
self.http_client.close()
super(AsyncHTTPTestCase, self).tearDown()
class AsyncHTTPSTestCase(AsyncHTTPTestCase):
"""A test case that starts an HTTPS server.
Interface is generally the same as `AsyncHTTPTestCase`.
"""
def get_http_client(self):
return AsyncHTTPClient(io_loop=self.io_loop, force_instance=True,
defaults=dict(validate_cert=False))
def get_httpserver_options(self):
return dict(ssl_options=self.get_ssl_options())
def get_ssl_options(self):
"""May be overridden by subclasses to select SSL options.
By default includes a self-signed testing certificate.
"""
# Testing keys were generated with:
# openssl req -new -keyout tornado/test/test.key -out tornado/test/test.crt -nodes -days 3650 -x509
module_dir = os.path.dirname(__file__)
return dict(
certfile=os.path.join(module_dir, 'test', 'test.crt'),
keyfile=os.path.join(module_dir, 'test', 'test.key'))
def get_protocol(self):
return 'https'
def gen_test(func=None, timeout=None):
"""Testing equivalent of ``@gen.coroutine``, to be applied to test methods.
``@gen.coroutine`` cannot be used on tests because the `.IOLoop` is not
already running. ``@gen_test`` should be applied to test methods
on subclasses of `AsyncTestCase`.
Example::
class MyTest(AsyncHTTPTestCase):
@gen_test
def test_something(self):
response = yield gen.Task(self.fetch('/'))
By default, ``@gen_test`` times out after 5 seconds. The timeout may be
overridden globally with the ``ASYNC_TEST_TIMEOUT`` environment variable,
or for each test with the ``timeout`` keyword argument::
class MyTest(AsyncHTTPTestCase):
@gen_test(timeout=10)
def test_something_slow(self):
response = yield gen.Task(self.fetch('/'))
.. versionadded:: 3.1
The ``timeout`` argument and ``ASYNC_TEST_TIMEOUT`` environment
variable.
.. versionchanged:: 4.0
The wrapper now passes along ``*args, **kwargs`` so it can be used
on functions with arguments.
"""
if timeout is None:
timeout = get_async_test_timeout()
def wrap(f):
# Stack up several decorators to allow us to access the generator
# object itself. In the innermost wrapper, we capture the generator
# and save it in an attribute of self. Next, we run the wrapped
# function through @gen.coroutine. Finally, the coroutine is
# wrapped again to make it synchronous with run_sync.
#
# This is a good case study arguing for either some sort of
# extensibility in the gen decorators or cancellation support.
@functools.wraps(f)
def pre_coroutine(self, *args, **kwargs):
result = f(self, *args, **kwargs)
if isinstance(result, GeneratorType) or iscoroutine(result):
self._test_generator = result
else:
self._test_generator = None
return result
if iscoroutinefunction(f):
coro = pre_coroutine
else:
coro = gen.coroutine(pre_coroutine)
@functools.wraps(coro)
def post_coroutine(self, *args, **kwargs):
try:
return self.io_loop.run_sync(
functools.partial(coro, self, *args, **kwargs),
timeout=timeout)
except TimeoutError as e:
# run_sync raises an error with an unhelpful traceback.
# Throw it back into the generator or coroutine so the stack
# trace is replaced by the point where the test is stopped.
self._test_generator.throw(e)
# In case the test contains an overly broad except clause,
# we may get back here. In this case re-raise the original
# exception, which is better than nothing.
raise
return post_coroutine
if func is not None:
# Used like:
# @gen_test
# def f(self):
# pass
return wrap(func)
else:
# Used like @gen_test(timeout=10)
return wrap
# Without this attribute, nosetests will try to run gen_test as a test
# anywhere it is imported.
gen_test.__test__ = False # type: ignore
class LogTrapTestCase(unittest.TestCase):
"""A test case that captures and discards all logging output
if the test passes.
Some libraries can produce a lot of logging output even when
the test succeeds, so this class can be useful to minimize the noise.
Simply use it as a base class for your test case. It is safe to combine
with AsyncTestCase via multiple inheritance
(``class MyTestCase(AsyncHTTPTestCase, LogTrapTestCase):``)
This class assumes that only one log handler is configured and
that it is a `~logging.StreamHandler`. This is true for both
`logging.basicConfig` and the "pretty logging" configured by
`tornado.options`. It is not compatible with other log buffering
mechanisms, such as those provided by some test runners.
.. deprecated:: 4.1
Use the unittest module's ``--buffer`` option instead, or `.ExpectLog`.
"""
def run(self, result=None):
logger = logging.getLogger()
if not logger.handlers:
logging.basicConfig()
handler = logger.handlers[0]
if (len(logger.handlers) > 1 or
not isinstance(handler, logging.StreamHandler)):
# Logging has been configured in a way we don't recognize,
# so just leave it alone.
super(LogTrapTestCase, self).run(result)
return
old_stream = handler.stream
try:
handler.stream = StringIO()
gen_log.info("RUNNING TEST: " + str(self))
old_error_count = len(result.failures) + len(result.errors)
super(LogTrapTestCase, self).run(result)
new_error_count = len(result.failures) + len(result.errors)
if new_error_count != old_error_count:
old_stream.write(handler.stream.getvalue())
finally:
handler.stream = old_stream
class ExpectLog(logging.Filter):
"""Context manager to capture and suppress expected log output.
Useful to make tests of error conditions less noisy, while still
leaving unexpected log entries visible. *Not thread safe.*
The attribute ``logged_stack`` is set to true if any exception
stack trace was logged.
Usage::
with ExpectLog('tornado.application', "Uncaught exception"):
error_response = self.fetch("/some_page")
.. versionchanged:: 4.3
Added the ``logged_stack`` attribute.
"""
def __init__(self, logger, regex, required=True):
"""Constructs an ExpectLog context manager.
:param logger: Logger object (or name of logger) to watch. Pass
an empty string to watch the root logger.
:param regex: Regular expression to match. Any log entries on
the specified logger that match this regex will be suppressed.
:param required: If true, an exception will be raised if the end of
the ``with`` statement is reached without matching any log entries.
"""
if isinstance(logger, basestring_type):
logger = logging.getLogger(logger)
self.logger = logger
self.regex = re.compile(regex)
self.required = required
self.matched = False
self.logged_stack = False
def filter(self, record):
if record.exc_info:
self.logged_stack = True
message = record.getMessage()
if self.regex.match(message):
self.matched = True
return False
return True
def __enter__(self):
self.logger.addFilter(self)
return self
def __exit__(self, typ, value, tb):
self.logger.removeFilter(self)
if not typ and self.required and not self.matched:
raise Exception("did not get expected log message")
def main(**kwargs):
"""A simple test runner.
This test runner is essentially equivalent to `unittest.main` from
the standard library, but adds support for tornado-style option
parsing and log formatting. It is *not* necessary to use this
`main` function to run tests using `AsyncTestCase`; these tests
are self-contained and can run with any test runner.
The easiest way to run a test is via the command line::
python -m tornado.testing tornado.test.stack_context_test
See the standard library unittest module for ways in which tests can
be specified.
Projects with many tests may wish to define a test script like
``tornado/test/runtests.py``. This script should define a method
``all()`` which returns a test suite and then call
`tornado.testing.main()`. Note that even when a test script is
used, the ``all()`` test suite may be overridden by naming a
single test on the command line::
# Runs all tests
python -m tornado.test.runtests
# Runs one test
python -m tornado.test.runtests tornado.test.stack_context_test
Additional keyword arguments passed through to ``unittest.main()``.
For example, use ``tornado.testing.main(verbosity=2)``
to show many test details as they are run.
See http://docs.python.org/library/unittest.html#unittest.main
for full argument list.
"""
from tornado.options import define, options, parse_command_line
define('exception_on_interrupt', type=bool, default=True,
help=("If true (default), ctrl-c raises a KeyboardInterrupt "
"exception. This prints a stack trace but cannot interrupt "
"certain operations. If false, the process is more reliably "
"killed, but does not print a stack trace."))
# support the same options as unittest's command-line interface
define('verbose', type=bool)
define('quiet', type=bool)
define('failfast', type=bool)
define('catch', type=bool)
define('buffer', type=bool)
argv = [sys.argv[0]] + parse_command_line(sys.argv)
if not options.exception_on_interrupt:
signal.signal(signal.SIGINT, signal.SIG_DFL)
if options.verbose is not None:
kwargs['verbosity'] = 2
if options.quiet is not None:
kwargs['verbosity'] = 0
if options.failfast is not None:
kwargs['failfast'] = True
if options.catch is not None:
kwargs['catchbreak'] = True
if options.buffer is not None:
kwargs['buffer'] = True
if __name__ == '__main__' and len(argv) == 1:
print("No tests specified", file=sys.stderr)
sys.exit(1)
try:
# In order to be able to run tests by their fully-qualified name
# on the command line without importing all tests here,
# module must be set to None. Python 3.2's unittest.main ignores
# defaultTest if no module is given (it tries to do its own
# test discovery, which is incompatible with auto2to3), so don't
# set module if we're not asking for a specific test.
if len(argv) > 1:
unittest.main(module=None, argv=argv, **kwargs)
else:
unittest.main(defaultTest="all", argv=argv, **kwargs)
except SystemExit as e:
if e.code == 0:
gen_log.info('PASS')
else:
gen_log.error('FAIL')
raise
if __name__ == '__main__':
main()
| [
"bastinrobins@gmail.com"
] | bastinrobins@gmail.com |
473194a3599f62fa5eb651c59a77943ef082fb55 | f7fd58c36d8c56f2e7fe5a0b3e1f6e86b06512b9 | /app/schemas/payments.py | d1d6cacfb50419d5391eee2742097319efeb3962 | [] | no_license | Anturion/globalbit_technical_test_back | 6f8759e74995af3b1e093c8b72df38f1ca86b0c2 | 47de3717fe1cc55853888a69ee2a3b08913a54e1 | refs/heads/main | 2023-08-03T01:10:56.435481 | 2021-09-08T15:15:04 | 2021-09-08T15:15:04 | 366,434,547 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 304 | py | from datetime import datetime
from pydantic import BaseModel
from enum import Enum
class StatePaymentEnum(str, Enum):
rechazada = u'rechazada',
en_proceso = u'en-proceso',
aprobada = u'aprobada'
class PaymentCreate(BaseModel):
email: str
amount: float
state: StatePaymentEnum | [
"alucardcampillo@gmail.com"
] | alucardcampillo@gmail.com |
0667e42e73b0c4eb8a2f2e8752573ce40543cfb2 | 4aa6b7c3a5ae3817007e09ad1289c1e9f7a355c0 | /greedy/lemonade-change.py | cc8c4bdbd758354e0499b280a640a88fc2082bd8 | [] | no_license | liuhuipy/Algorithm-python | 8f5143e06cf5fa2de2c178e3ba9e5fd12b9bcdf7 | 4e92a0b874f956d1df84d1493f870a5d1f06cde2 | refs/heads/master | 2021-06-03T04:19:01.946149 | 2021-01-08T07:44:40 | 2021-01-08T07:44:40 | 99,838,105 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,146 | py | """
柠檬水找零:
在柠檬水摊上,每一杯柠檬水的售价为 5 美元。
顾客排队购买你的产品,(按账单 bills 支付的顺序)一次购买一杯。
每位顾客只买一杯柠檬水,然后向你付 5 美元、10 美元或 20 美元。你必须给每个顾客正确找零,也就是说净交易是每位顾客向你支付 5 美元。
注意,一开始你手头没有任何零钱。
如果你能给每位顾客正确找零,返回 true ,否则返回 false 。
示例 1:
输入:[5,5,5,10,20]
输出:true
解释:
前 3 位顾客那里,我们按顺序收取 3 张 5 美元的钞票。
第 4 位顾客那里,我们收取一张 10 美元的钞票,并返还 5 美元。
第 5 位顾客那里,我们找还一张 10 美元的钞票和一张 5 美元的钞票。
由于所有客户都得到了正确的找零,所以我们输出 true。
示例 2:
输入:[5,5,10]
输出:true
示例 3:
输入:[10,10]
输出:false
示例 4:
输入:[5,5,10,10,20]
输出:false
解释:
前 2 位顾客那里,我们按顺序收取 2 张 5 美元的钞票。
对于接下来的 2 位顾客,我们收取一张 10 美元的钞票,然后返还 5 美元。
对于最后一位顾客,我们无法退回 15 美元,因为我们现在只有两张 10 美元的钞票。
由于不是每位顾客都得到了正确的找零,所以答案是 false。
"""
from typing import List
class Solution:
def lemonadeChange(self, bills: List[int]) -> bool:
m, n = 0, 0
for bill in bills:
need_p = bill - 5
if need_p >= 15 and n > 0:
need_p = 5
n -= 1
if m * 5 >= need_p >= 5:
m -= need_p // 5
need_p = 0
if need_p != 0:
return False
if bill == 5:
m += 1
elif bill == 10:
n += 1
return True
if __name__ == '__main__':
print(Solution().lemonadeChange([5,5,10,20,5,5,5,5,5,5,5,5,5,10,5,5,20,5,20,5]))
| [
"liuhui_py@163.com"
] | liuhui_py@163.com |
ff786a6cb5798541371c0211866f0d38a9927df7 | 54b50cac45ceb865623c0efac03a39ace131e45c | /PVFFlash.py | b5a2a7803274be85a84c05216c483c3a3caab2f3 | [] | no_license | RahulJain7/DTLServers | 76786d8fda7aac01dcfaa5dbc94704cbe0d3e3a9 | b4d0fd21b6d4b72c0c13609119582411e1acc339 | refs/heads/master | 2020-04-01T08:27:43.121130 | 2015-10-01T00:33:23 | 2015-10-01T00:33:23 | 42,653,318 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,435 | py | # -*- coding: utf-8 -*-
"""
Created on Mon Sep 07 17:05:31 2015
@author: RAHUL JAIN
"""
# -*- coding: utf-8 -*-
"""
Created on Wed Aug 19 03:13:38 2015
@author: RAHUL JAIN
"""
import socket
import win32com.client
def Main():
HOST = ''
PORT = 7000
dtl = win32com.client.Dispatch("DTL.Thermodynamics.Calculator")
dtl.Initialize()
serversocket = socket.socket(socket.AF_INET,socket.SOCK_STREAM)
serversocket.bind((HOST,PORT))
serversocket.listen(2)
print('Server Listening.....')
while True:
connsocket, addr = serversocket.accept()
print('Connection from',addr)
if True:
data = connsocket.recv(4096)
if not data: break
strdata = data.decode()
splitdata = strdata.split(',')
Nc = int(splitdata[3])
No = 4+Nc
P = float(splitdata[1])
VF = float(splitdata[2])
Comp = splitdata[4:No]
Xstr = splitdata[No:len(splitdata)]
X = [float(i) for i in Xstr]
PVFlash = dtl.PVFFlash(splitdata[0],0,P,VF,Comp,X)
ptfl = " " + str(PVFlash[2][0]) + " "
if Nc>2:
for j in range(3,Nc+1):
ptfl = ptfl + str(PVFlash[j][0]) + " "
ptfl = ptfl + PVFlash[Nc+2][0]
connsocket.send(ptfl)
else:
connsocket.close()
serversocket.close()
if __name__ == '__main__':
Main() | [
"rahjain1@gmail.com"
] | rahjain1@gmail.com |
84506bf4331ef72cb58b1d8d88354756d76b2da5 | b3f0b07c9bae9414bd7c91d0841ae75636f4dc9f | /koala/urls.py | fbd9125fd707d964e9fc20b9c881f9ade0cac893 | [] | no_license | tjworks/koala | 544f975b43cee44f66ce3fb34d13ebaa4904af4e | 36e02f7b48fa70f2df4d7a371cd611f32827d2b2 | refs/heads/master | 2020-04-26T13:12:03.857119 | 2012-11-08T15:05:16 | 2012-11-08T15:05:16 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,114 | py | from django.conf.urls import patterns, include, url
from django.contrib import admin
# Uncomment the next two lines to enable the admin:
# from django.contrib import admin
# admin.autodiscover()
urlpatterns = patterns('',
url('^docs/(?P<path>.*)$', 'django.views.static.serve', {'document_root': 'docs/_build/html'}),
url('^static/(?P<path>.*)$', 'django.views.static.serve', {'document_root': 'static'}),
url(r'^pages/(?P<pagename>.*)$', 'koala.views.main.serve'),
# search/proxy
url(r'^(?P<post_id>.*)/edit/?$', 'koala.views.adview.edit'),
url(r'^(?P<post_id>.*?)/?activate/?$', 'koala.views.adview.activate'),
url(r'^(?P<post_id>.*?)/?view/?$', 'koala.views.adview.view'),
url(r'^(?P<item_id>.*)/update/?$', 'koala.views.adview.update'),
#url(r'^activate/(?P<post_id>.*)$', 'koala.views.adview.activate'),
#url(r'^view/(?P<post_id>.*)$', 'koala.views.adview.view'),
#url(r'^assets/(?P<path>.*)$', 'django.views.static.serve', {'document_root': 'static'}),
#url(r'^(?P<path>.*.(css|js|png|gif|swf|jpg|html|htm|pdf|csv|json))$', 'django.views.static.serve', {'document_root': 'static'}),
# url(r'^koala/', include('koala.foo.urls')),
# Uncomment the admin/doc line below to enable admin documentation:
url(r'^admin/doc/', include('django.contrib.admindocs.urls')),
# Uncomment the next line to enable the admin:
url(r'^admin/', include(admin.site.urls)),
# Home page
url(r'^$', 'koala.views.main.home'),
url(r'^howto$', 'koala.views.home'),
)
"""
url(r'^(?P<path>.*.js)$', 'django.views.static.serve', {'document_root': 'static'}),
url(r'^(?P<path>.*.png)$', 'django.views.static.serve', {'document_root': 'static'}),
url(r'^(?P<path>.*.gif)$', 'django.views.static.serve', {'document_root': 'static'}),
url(r'^(?P<path>.*.swf)$', 'django.views.static.serve', {'document_root': 'static'}),
url(r'^(?P<path>.*.jpg)$', 'django.views.static.serve', {'document_root': 'static'}),
url(r'^(?P<path>.*.html)$', 'django.views.static.serve', {'document_root': 'static'}),
"""
| [
"evertang@gmail.com"
] | evertang@gmail.com |
1645c21e379fec3a3406db819f99146db61ac417 | 298383e1591f4d27284fa2a094c967a2846d91f8 | /sumocharge/rdm880/rdm880.py | fb67fda3e6939eed06f8c8f7fccbaba0ef73ee8f | [
"BSD-2-Clause"
] | permissive | lorenhsu1128/sumo-charge | 47586a3ea2801da599a4254e6b0ba813aa7f3221 | 7a2a746542a7a76fd89a0681598307698375c6ad | refs/heads/master | 2021-01-23T13:09:33.086001 | 2015-11-02T07:28:57 | 2015-11-02T07:28:57 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,088 | py | class Packet(object):
def __init__(self, cmd=None, data=[], stationid=0x00):
self.stationid = stationid
self.cmd = cmd
self.data = data
self.length = None
self.status = None
self.bcc = None
def build(self, debug=False):
self.msgtype = 'command'
pack = {
'stationid': chr(self.stationid),
'cmd': chr(self.cmd),
'data': ''.join([chr(x) for x in self.data]),
}
pack['length'] = chr(len(pack['data']) + 1)
pack['bcc'] = self.stationid ^ ord(pack['length']) ^ self.cmd
for c in self.data:
pack['bcc'] ^= c
pack['bcc'] = chr(pack['bcc'])
raw = '\xaa%(stationid)s%(length)s%(cmd)s%(data)s%(bcc)s\xbb' % pack
if debug:
for k, v in pack.items():
print k.ljust(20, ' '), ' '.join(['%02x' % ord(x) for x in v])
print 'raw'.ljust(20, ' '), ' '.join(['%02X' % ord(x) for x in raw])
return raw
def parse(self, raw):
self.msgtype = 'reply'
raw = [ord(x) for x in raw]
if raw[0] != 0xAA or raw[-1] != 0xBB:
print 'Missing STX or ETX'
return None
raw = raw[1:-1]
self.stationid, self.length, self.status = raw[:3]
raw = raw[4:]
self.data = raw[:self.length - 2]
raw = raw[self.length - 2:]
self.bcc = raw[0]
if len(raw) > 1:
print 'Length did not match packet size!'
return None
def __str__(self):
ret = 'msgtype %s\n' % self.msgtype
if self.msgtype == 'command':
ret += 'stationid %02X\n' % self.stationid
ret += 'cmd %02X\n' % self.cmd
ret += 'data %s\n' % ' '.join(['%02X' % x for x in self.data])
else:
ret += 'stationid %02X\n' % self.stationid
ret += 'length %02X\n' % self.length
ret += 'status %02X (%s)\n' % (self.status, Status.get(self.status, 'Unknown status code'))
ret += 'data %s\n' % ' '.join(['%02X' % x for x in self.data])
ret += 'bcc %02X\n' % self.bcc
return ret.rstrip('\n')
def execute(self, io):
raw = self.build()
io.write(raw)
raw = io.read(3)
length = ord(raw[2])
raw += io.read(length + 2)
p = Packet()
p.parse(raw)
return p
class CommandSet(object):
def __init__(self, name, data):
self.name = name
self.data = data
def __getattr__(self, key):
return self.data[key]
Mifare = CommandSet('Mifare', {
'Read': 0x20,
'Write': 0x21,
'InitVal': 0x22,
'Decrement': 0x23,
'Increment': 0x24,
'GetSNR': 0x25,
})
System = CommandSet('System', {
'SetAddress': 0x80,
'SetBaudrate': 0x81,
'SetSerialNumber': 0x82,
'GetSerialNumber': 0x83,
'Write_UserInfo': 0x84,
'Read_UserInfo': 0x85,
'Get_VersionNum': 0x86,
'Control_Led1': 0x87,
'Control_Led2': 0x88,
'Control_Buzzer': 0x89,
})
Status = {
0x00: 'Command OK',
0x01: 'Command failed',
0x80: 'Set OK',
0x81: 'Set failed',
0x82: 'Reader reply timeout',
0x83: 'Card does not exist',
0x84: 'The data response from the card is error',
0x85: 'Invalid command parameter',
0x87: 'Unknown internal error',
0x8f: 'Reader received unknown command',
0x8a: 'ISO14443: Error in InitVal process',
0x8b: 'ISO14443: Wrong SNR during anticollision loop',
0x8c: 'ISO14443: Authentication failure',
0x90: 'ISO15693: The card does not support this command',
0x91: 'ISO15693: Invalid command format',
0x92: 'ISO15693: Do not support option mode',
0x93: 'ISO15693: Block does not exist',
0x94: 'ISO15693: The object has been locked',
0x95: 'ISO15693: Lock operation failed',
0x96: 'ISO15693: Operation failed',
}
| [
"rwallhead@gmail.com"
] | rwallhead@gmail.com |
f2fb15dbed9b7d1a0b312c5336559cc001864e21 | 5e6519e1ed37d463c4740b65dc4030c883d2df64 | /jobsite/employee/models.py | 9f29e9a043e0615463669ff1fc9ff05aa28e37d2 | [] | no_license | shohag000/jobsite | ed4811d778ed1d58a0d10930174a1fd3e549e630 | 25025e9ae4c41362fb81ece7747617d7837e209c | refs/heads/master | 2020-05-18T18:03:56.496353 | 2019-05-21T11:03:28 | 2019-05-21T11:03:28 | 184,574,525 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,104 | py | from sqlalchemy import Column, desc,ForeignKey
from utils import get_current_time,STRING_LEN
from extensions import db
from constants import JOB_TYPE
class Employee(db.Model):
__tablename__ = 'employee'
id = db.Column(db.Integer, primary_key=True)
full_name = db.Column(db.String(STRING_LEN), nullable=False)
academy = db.Column(db.String(STRING_LEN), nullable=False)
created_at = db.Column(db.DateTime, nullable=False, default=get_current_time)
user = db.Column(db.Integer,db.ForeignKey("user.id"),nullable=False)
def __repr__(self):
return '<Employee %r>' % self.full_name
@property
def serialize(self):
"""Return object data in easily serializable format"""
return {
'id' : self.id,
'full_name': self.full_name,
'created_at': self.created_at,
'academy': self.academy,
'employee_user' : {
"username" : self.employee_user.username,
"email" : self.employee_user.email,
"employer" : self.employee_user.employer,
}
} | [
"shohagbiswas000@gmail.com"
] | shohagbiswas000@gmail.com |
4edd8ee7de5778dab0422d9f5f6e2c73198cb473 | 5308d3624036fb27ca158b520d2c59b643f8bf32 | /tests/test_fouriersqrt.py | fac7cd6b5cbe2b3290a902027595fb01a6bd3223 | [
"BSD-3-Clause",
"BSD-2-Clause"
] | permissive | kernsuite-debian/galsim | bfacc7f665e35595189c03b164e61809c2943cc5 | 1515537b429fb3337d5c1090f9161f0fb223f2a0 | refs/heads/master | 2022-11-06T21:22:51.794046 | 2018-08-29T18:51:56 | 2018-08-29T18:51:56 | 82,295,722 | 0 | 1 | NOASSERTION | 2022-10-19T06:05:05 | 2017-02-17T12:33:53 | Python | UTF-8 | Python | false | false | 4,376 | py | # Copyright (c) 2012-2018 by the GalSim developers team on GitHub
# https://github.com/GalSim-developers
#
# This file is part of GalSim: The modular galaxy image simulation toolkit.
# https://github.com/GalSim-developers/GalSim
#
# GalSim is free software: redistribution and use in source and binary forms,
# with or without modification, are permitted provided that the following
# conditions are met:
#
# 1. Redistributions of source code must retain the above copyright notice, this
# list of conditions, and the disclaimer given in the accompanying LICENSE
# file.
# 2. Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions, and the disclaimer given in the documentation
# and/or other materials provided with the distribution.
#
from __future__ import print_function
import numpy as np
import os
import sys
import galsim
from galsim_test_helpers import *
imgdir = os.path.join(".", "SBProfile_comparison_images") # Directory containing the reference
# images.
@timer
def test_fourier_sqrt():
"""Test that the FourierSqrt operator is the inverse of auto-convolution.
"""
dx = 0.4
myImg1 = galsim.ImageF(80,80, scale=dx)
myImg1.setCenter(0,0)
myImg2 = galsim.ImageF(80,80, scale=dx)
myImg2.setCenter(0,0)
# Test trivial case, where we could (but don't) analytically collapse the
# chain of profiles by recognizing that FourierSqrt is the inverse of
# AutoConvolve.
psf = galsim.Moffat(beta=3.8, fwhm=1.3, flux=5)
psf.drawImage(myImg1, method='no_pixel')
sqrt1 = galsim.FourierSqrt(psf)
psf2 = galsim.AutoConvolve(sqrt1)
np.testing.assert_almost_equal(psf.stepk, psf2.stepk)
psf2.drawImage(myImg2, method='no_pixel')
printval(myImg1, myImg2)
np.testing.assert_array_almost_equal(
myImg1.array, myImg2.array, 4,
err_msg="Moffat sqrt convolved with self disagrees with original")
check_basic(sqrt1, "FourierSqrt", do_x=False)
# Test non-trivial case where we compare (in Fourier space) sqrt(a*a + b*b + 2*a*b) against (a + b)
a = galsim.Moffat(beta=3.8, fwhm=1.3, flux=5)
a.shift(dx=0.5, dy=-0.3) # need nonzero centroid to test
b = galsim.Moffat(beta=2.5, fwhm=1.6, flux=3)
check = galsim.Sum([a, b])
sqrt = galsim.FourierSqrt(
galsim.Sum([
galsim.AutoConvolve(a),
galsim.AutoConvolve(b),
2*galsim.Convolve([a, b])
])
)
np.testing.assert_almost_equal(check.stepk, sqrt.stepk)
check.drawImage(myImg1, method='no_pixel')
sqrt.drawImage(myImg2, method='no_pixel')
np.testing.assert_almost_equal(check.centroid.x, sqrt.centroid.x)
np.testing.assert_almost_equal(check.centroid.y, sqrt.centroid.y)
np.testing.assert_almost_equal(check.flux, sqrt.flux)
np.testing.assert_almost_equal(check.xValue(check.centroid), check.max_sb)
print('check.max_sb = ',check.max_sb)
print('sqrt.max_sb = ',sqrt.max_sb)
# This isn't super accurate...
np.testing.assert_allclose(check.max_sb, sqrt.max_sb, rtol=0.1)
printval(myImg1, myImg2)
np.testing.assert_array_almost_equal(
myImg1.array, myImg2.array, 4,
err_msg="Fourier square root of expanded square disagrees with original")
# Check picklability
do_pickle(sqrt1, lambda x: x.drawImage(method='no_pixel'))
do_pickle(sqrt1)
# Should raise an exception for invalid arguments
assert_raises(TypeError, galsim.FourierSqrt)
assert_raises(TypeError, galsim.FourierSqrt, myImg1)
assert_raises(TypeError, galsim.FourierSqrt, [psf])
assert_raises(TypeError, galsim.FourierSqrt, psf, psf)
assert_raises(TypeError, galsim.FourierSqrt, psf, real_space=False)
assert_raises(TypeError, galsim.FourierSqrtProfile)
assert_raises(TypeError, galsim.FourierSqrtProfile, myImg1)
assert_raises(TypeError, galsim.FourierSqrtProfile, [psf])
assert_raises(TypeError, galsim.FourierSqrtProfile, psf, psf)
assert_raises(TypeError, galsim.FourierSqrtProfile, psf, real_space=False)
assert_raises(NotImplementedError, sqrt1.xValue, galsim.PositionD(0,0))
assert_raises(NotImplementedError, sqrt1.drawReal, myImg1)
assert_raises(NotImplementedError, sqrt1.shoot, 1)
if __name__ == "__main__":
test_fourier_sqrt()
| [
"gijs@pythonic.nl"
] | gijs@pythonic.nl |
db31896453b0d9c35209f306d8b832fdf80c2ebe | bc167f434158921bcf2c678155c5cdfec1c9b0c9 | /PI_code/simulator/behaviourGeneration/group/behav26.py | ac01b00523c06c1efa233dc581d1ac85d476ea99 | [] | no_license | s0217391/DifferentProjects | 6450efc89c64ecd21b86c705737e89e5c69433a6 | 7f4da153660817b6cbf72d2e823aa29c0c2f95a9 | refs/heads/master | 2021-01-17T02:58:46.219240 | 2015-05-26T22:45:46 | 2015-05-26T22:45:46 | 34,995,164 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 715 | py | #!/usr/bin/python
import sys
def compute(prey, otherHunter, dist):
temp0 = -1 * prey[1]
if prey[0] > prey[1] :
temp1 = dist * otherHunter[1]
else:
if dist > dist :
temp1 = temp0 - otherHunter[0]
else:
if otherHunter[1] != 0:
temp1 = prey[1] / otherHunter[1]
else:
temp1 = otherHunter[1]
temp0 = max( otherHunter[1] , prey[0] )
temp2 = otherHunter[0] + temp0
temp2 = min( dist , prey[0] )
temp3 = temp0 * otherHunter[1]
temp2 = max( temp0 , prey[0] )
temp0 = prey[1] + temp2
temp0 = -1 * prey[1]
if dist != 0:
temp2 = dist / dist
else:
temp2 = dist
if otherHunter[1] != 0:
temp2 = prey[0] % otherHunter[1]
else:
temp2 = otherHunter[1]
return [ otherHunter[1] , temp1 ]
| [
"i7674211@bournemouth.ac.uk"
] | i7674211@bournemouth.ac.uk |
1a5c13ab2556ac746a4a14bd970965e9877cd1ac | 771ce92216b373f26063e79bc982268131eba9cf | /forest.py | a94769856d56a3a466c55ffca8694119235d2c2a | [] | no_license | amovsheva/Forest | c56a9ba2ab5ca8a214cdb18a5dd1c3e99b041dd6 | fcb4137e5d39367a1420679d450d2432afb81f9b | refs/heads/master | 2021-05-03T21:19:14.096197 | 2018-02-06T00:59:26 | 2018-02-06T00:59:26 | 120,381,725 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 17,636 | py | import random as rnd
import numpy as np
from Tree import *
##############################################################################
##############################################################################
############################## MULTITREE OBJECT ##############################
##############################################################################
##############################################################################
class MultiTree:
##############################################################################
############################### INITIALIZATION ###############################
##############################################################################
def __init__(self, trees):
self.__trees = trees # list of Trees
##############################################################################
############################ ATTRIBUTE PROPERTIES ############################
##############################################################################
@property
def trees(self):
return self.__trees
##############################################################################
############################### STATIC METHODS ###############################
##############################################################################
@staticmethod
def tree_tie(leaf, root):
"""
Acts on: a MultiTree class or object
Input: - leaf, a MetricTree which is a leaf
- root, a MetricTree which is a root
Output: modifies tree that the leaf belongs to by linking all
the children of the root to the leaf
Type: staticmethod
"""
while len(root.children) > 0:
child = root.children[0]
root._unlink(child)
leaf._link(child)
##############################################################################
################################### METHODS ##################################
##############################################################################
def __str__(self):
string = ''
for tree in self.trees:
string += 'Tree: ' + tree.__str__() + ' Parent: ' + tree.name + '\n'
for child in tree.children:
string += ' Child: ' + child.name + '\n'
for child2 in child.children:
string += ' Child: ' + child2.name + '\n'
return string
def copy(self):
"""
Acts on: a MultiTree object
Input: none
Output: a MultiTree object equivalent (but not same) as the
original MultiTree
"""
trees = []
for tree in self.trees:
trees.append(tree.copy())
return MultiTree(trees)
def delete(self):
"""
Acts on: a MultiTree object
Input: none
Output: modifies the object by deleting all the trees within it
"""
for tree in self.trees:
tree.delete()
def __repr__(self):
string = 'MultiTree(['
for tree in self.trees:
string += tree.__repr__() + ','
string = string[:-1]
string += '])'
return string
def is_name_tree_in_mult(self, name):
"""
Acts on: a MultiTree object
Input: name, a string
Output: True if a tree exists inside the MultiTree with the input
name; False if such tree does not exist inside the
MultiTree
"""
for tree in self.trees:
if tree.name == name:
return True
return False
def name_tree_in_mult(self, name):
"""
Acts on: a MultiTree object
Input: name, a string
Output: a MetricTree if a MetricTree with the input name as its
name exists within the MultiTree. None is no such
MultiTree exist (we are talking about name of root)
"""
for tree in self.trees:
if tree.name == name:
return tree
return None
def add_to_trees(self, tree):
"""
Acts on: MutliTree object
Input: tree, a MetricTree
Output: modifies the MultiTree by adding the tree to its list of
trees
"""
self.__trees.append(tree)
def __iadd__(self, other):
"""
Acts on: MultiTree object, bottom MultiTree (can be
multigenerational)
Input: - other, a MultiTree object, with trees of height 1, that
will be added on top
Output: MultiTree that is combination of self and other where
other is added on top of self, by placing roots of self on
corresponding leaves of other MultiTree
Short form: self += other
"""
if len(self.trees) == 1:
return self
top_mult = other
bottom_mult = self
for tree_top in top_mult.trees[:]:
for child in tree_top.children[:]:
name = child.name
root = bottom_mult.name_tree_in_mult(name)
if root is None:
if len(tree_top.children) == 1:
top_mult.__trees.remove(tree_top)
tree_top.delete()
else:
child.delete()
else:
self.tree_tie(child, root)
top_mult.trees[:] = [x for x in top_mult.trees if len(x.children) > 0]
for tree in top_mult.trees:
for child in tree.children[:]:
if len(child.children) == 1:
child_of_child = child.children[0]
tree._unlink(child)
tree._link(child_of_child)
return top_mult
def __eq__(self, other):
if len(self.trees) != len(other.trees):
return False
for tree in self.trees:
if tree not in other.trees:
return False
return True
##############################################################################
##############################################################################
################################ FOREST OBJECT ###############################
##############################################################################
##############################################################################
class Forest:
##############################################################################
############################### INITIALIZATION ###############################
##############################################################################
def __init__(self, multitrees, recombinations):
self.__multitrees = multitrees
self.__recombinations = recombinations
##############################################################################
############################ ATTRIBUTE PROPERTIES ############################
##############################################################################
@property
def multitrees(self):
return self.__multitrees
@property
def recombinations(self):
return self.__recombinations
##############################################################################
############################## OTHER PROPERTIES ##############################
##############################################################################
@property
def number_trees(self):
N = 0
for multitree in self.multitrees:
N += len(multitree.trees)
return N
##############################################################################
############################### STATIC METHODS ###############################
##############################################################################
@staticmethod
def iteration(m, N0, rho):
"""
Input: - m, positive integer, number of children
- N0, positive integer, number of parents to choose from for
children
- rho, positive float, between 0 and 1
Output: a list of 4-tuples, which represents finding the parents and
recombination locations of genomes of each child in children
list. First and second elements of tuple are indeces of first
and second parents chosen out of N0/2 pairs, third element is
which parent (0th or 1st) is used first to create child's
genome, fourth element is a list of floats between 0 and 1
which represent all the recombination locations.
"""
data_list = []
parent_list = []
for i in range(m):
recombination = []
t = 0
while t < 1:
if rnd.random() < rho * np.exp(-rho * t):
recombination.append(t)
t += 0.01
recombination.append(1.)
first_parent = rnd.randrange(0, 1)
ind = rnd.randrange(0, N0//2)
if 2 * ind not in parent_list:
parent_list.append(2 * ind)
parent_list.append(2 * ind + 1)
parent_one = 2 * ind
parent_two = 2 * ind + 1
if rnd.randrange(0,1) == 0:
tpl = (parent_one, parent_two, recombination)
else:
tpl = (parent_two, parent_one, recombination)
data_list.append(tpl)
return data_list, parent_list
@staticmethod
def set_of_recombination_sites(data_list):
"""
Input: data_list, a list of 4 tuples, result of iteration
Output: a list of floats between 0 and 1 in increasing order, which is
a union of all the recombination sites in the genomes of this
generation
"""
total_set = set()
for datum in data_list:
total_set.update(datum[2])
return sorted(list(total_set))
@staticmethod
def parent_in_interval(interval_start, interval_end, datum):
"""
Input: - interval_start, a float between 0 and 1, where interval in
genome starts
- interval_end, a float between 0 and 1, larger than
inteval_start, where the interval in genome ends
- datum, a 4-tuple just like an element of list output by
iteration, which holds in itself information of where all
parts of this particular genome came from (which parent)
Output: parent from which the interval signified by interval_start and
interval_end in the genome came from
"""
if interval_end <= datum[2][0]:
return datum[0]
for i in range(1, len(datum[2])):
if (interval_start >= datum[2][i - 1]
and interval_end <= datum[2][i]):
return datum[i%2]
return None
##############################################################################
################################## METHODS ###################################
##############################################################################
def delete(self):
for mult in self.multitrees:
mult.delete()
def __str__(self):
string = ''
for i in range(len(self.multitrees)):
string += 'Interval: ' + str(self.recombinations[i]) + '\n'
for tree in self.multitrees[i].trees:
string += ' Tree: ' + tree.__str__()
string += ' Parent: ' + tree.name + '\n'
return string
def forest_subdivide(self, new_recomb_list):
"""
Acts on: a Forest object
Input: new_recomb_list, a list of floats from 0 to 1 in
increasing order, which contains the set of recombinations
of the Forest
Output: modifies the Forest to have multitrees for the new list of
recombination sites (copies multitrees if an preexisting
interval is being subdivided) and updates the list of
recombination sites as well
"""
ind = 0
new_multitrees = []
for new_recomb in new_recomb_list:
new_multitrees.append(self.multitrees[ind].copy())
if new_recomb == self.recombinations[ind]:
ind += 1
self.delete()
self.__multitrees = new_multitrees
self.__recombinations = new_recomb_list
def __repr__(self):
string = 'Forest(['
for multitree in self.multitrees:
string += multitree.__repr__() + ','
string = string[:-1] + '],'
string += str(self.recombinations)
string +=')'
return string
##############################################################################
############################### CLASS METHODS ################################
##############################################################################
@classmethod
def forest_one_iteration(cls, children, N0, rho, t):
"""
Input: - children, list of integers
- N0, positive integer, number of parents to choose from
- rho, float between 0 and 1, rate of recombination
- t, non-negative float, height of leaves
Output: a Forest of height one (all multitrees in it have tress of
height one) grown for the input children where parents were
chosen by pairs out of a pool of N0 parents (N0/2 pairs)
and recombination sites were found using poisson process with
rate constant rho, and list of parents' indices (names) that
were found for the input children
(t is not very important. It is the number of iteration which
is used to find the height of the trees in the Forest to later
on add on to the base of the Forest)
"""
m = len(children)
data_list, parent_list = cls.iteration(m, N0, rho)
recombination_sites = cls.set_of_recombination_sites(data_list)
forest = cls([], recombination_sites)
# this is loop for multitree
interval_start = 0.
for recombination in recombination_sites:
interval_end = recombination
new_mult = MultiTree([])
# this is loop for trees within multitree
for i in range(m):
newtree = MetricTree([], None, float(t), str(children[i]))
parent_name = cls.parent_in_interval(interval_start,
interval_end,
data_list[i])
parent_tree = new_mult.name_tree_in_mult(str(parent_name))
if parent_tree is None:
parent_tree = MetricTree([], None, t + 1.,
str(parent_name))
new_mult.add_to_trees(parent_tree)
parent_tree._link(newtree)
forest.__multitrees.append(new_mult)
interval_start = interval_end
return forest, parent_list
@classmethod
def forest_n_iterations(cls, m, N0, rho):
"""
Acts on: Forest class
Input: - m, positive integer, number of individuals in first
generation
- N0, positive integer, number of parents to choose from
- rho, float between 0 and 1, constant in poisson process
for recombination
Output: a Forest of Trees on genome intervals defined by
recombination sites grown until common ancestor is found
for all individuals in the first generation on that
interval in the genome.
Type: classmethod
"""
t = 0
base_f, parent_u_1 = cls.forest_one_iteration(list(range(m)), N0, rho,
float(t))
t += 1
while base_f.number_trees > len(base_f.multitrees):
top_f, parent_u_2 = cls.forest_one_iteration(parent_u_1, N0, rho,
float(t))
total_set_recombinations = set(base_f.recombinations)
total_set_recombinations.update(top_f.recombinations)
recombination_union = sorted(list(total_set_recombinations))
base_f.forest_subdivide(recombination_union)
top_f.forest_subdivide(recombination_union)
for k in range(len(recombination_union)):
base_f.__multitrees[k] += top_f.__multitrees[k]
ind = 0
while ind < len(base_f.multitrees) - 1:
if base_f.multitrees[ind] == base_f.multitrees[ind + 1]:
base_f.__recombinations.pop(ind)
base_f.__multitrees.pop(ind)
else:
ind +=1
parent_u_1 = parent_u_2
t += 1
return base_f | [
"noreply@github.com"
] | amovsheva.noreply@github.com |
6b7255841f8f59a233e48e60b2a3ae013f03ae5b | fb36ea3a7c2b06f49dce19a7e08e784732830431 | /.vscode/calu.py | fc9af7910aec6f194a9a11a27f42c096bd3d4ada | [] | no_license | michaelsas4/python-training | 22f240553d32e3a16b7b6b70e702d11d55373274 | a53910f7ed915b9f3cf5ef146546c412a463381e | refs/heads/master | 2022-11-26T15:29:48.169574 | 2020-08-07T16:03:44 | 2020-08-07T16:03:44 | 257,241,970 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,803 | py | # ---------- main.py ----------
import kivy
kivy.require("1.10.1")
from kivy.app import App
from kivy.uix.gridlayout import GridLayout
class CalcGridLayout(GridLayout):
# Function called when equals is pressed
def calculate(self, calculation):
if calculation:
try:
# Solve formula and display it in entry
# which is pointed at by display
self.display.text = str(eval(calculation))
except Exception:
self.display.text = "Error"
class CalculatorApp(App):
def build(self):
return CalcGridLayout()
calcApp = CalculatorApp()
calcApp.run()
# ---------- calculator.kv ----------
# Custom button
<CustButton@Button>:
font_size: 32
# Define id so I can refer to the CalcGridLayout
# class functions
# Display points to the entry widget
<CalcGridLayout>:
id: calculator
display: entry
rows: 5
padding: 10
spacing: 10
# Where input is displayed
BoxLayout:
TextInput:
id: entry
font_size: 32
multiline: False
# When buttons are pressed update the entry
BoxLayout:
spacing: 10
CustButton:
text: "7"
on_press: entry.text += self.text
CustButton:
text: "8"
on_press: entry.text += self.text
CustButton:
text: "9"
on_press: entry.text += self.text
CustButton:
text: "+"
on_press: entry.text += self.text
BoxLayout:
spacing: 10
CustButton:
text: "4"
on_press: entry.text += self.text
CustButton:
text: "5"
on_press: entry.text += self.text
CustButton:
text: "6"
on_press: entry.text += self.text
CustButton:
text: "-"
on_press: entry.text += self.text
BoxLayout:
spacing: 10
CustButton:
text: "1"
on_press: entry.text += self.text
CustButton:
text: "2"
on_press: entry.text += self.text
CustButton:
text: "3"
on_press: entry.text += self.text
CustButton:
text: "*"
on_press: entry.text += self.text
# When equals is pressed pass text in the entry
# to the calculate function
BoxLayout:
spacing: 10
CustButton:
text: "AC"
on_press: entry.text = ""
CustButton:
text: "0"
on_press: entry.text += self.text
CustButton:
text: "="
on_press: calculator.calculate(entry.text)
CustButton:
text: "/"
on_press: entry.text += self.text | [
"adeyeriayodele19@gmail.com"
] | adeyeriayodele19@gmail.com |
b1445ad8bbec7a296ccdb501f4220c7435f47527 | 0c84b33ce263877078b55ea6a51a5c7f30468378 | /project/settings.py | add689ad41500e65f6e35ba6b70231b3d2ea9eac | [] | no_license | Kruhy/hagakure | b723f6ef82187def9aca7ad21039417e8c9ad3db | 8c92223bc7dae78e014fb8e9676b13739f94dd41 | refs/heads/main | 2023-04-07T13:26:25.278730 | 2021-03-31T12:02:44 | 2021-03-31T12:02:44 | 327,093,238 | 0 | 0 | null | 2021-03-31T12:02:45 | 2021-01-05T19:04:45 | JavaScript | UTF-8 | Python | false | false | 7,268 | py | """
Django settings for hagakure project.
Generated by 'django-admin startproject' using Django 2.2.17.
For more information on this file, see
https://docs.djangoproject.com/en/2.2/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/2.2/ref/settings/
"""
import os
from .prod_settings import SECRET_KEY as LOCAL_KEY, DATABASES as LOCAL_DATABASES, EMAIL_HOST_PASSWORD as LOCAL_EMAIL_HOST_PASSWORD
from .prod_settings import EMAIL_HOST_PASSWORD as LOCAL_HOST_PASSWORD, EMAIL_HOST as LOCAL_EMAIL_HOST, EMAIL_HOST_USER as LOCAL_EMAIL_HOST_USER
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/2.2/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = LOCAL_KEY
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = False
ALLOWED_HOSTS = ['54.38.53.152', '.hagakure.pl',]
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'articles',
'auth_ex',
'ckeditor',
'ckeditor_uploader',
'gallery',
'hagakure',
'messaging',
'news',
'registration',
'trainings',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'project.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'project.wsgi.application'
# Database
# https://docs.djangoproject.com/en/2.2/ref/settings/#databases
DATABASES = LOCAL_DATABASES
# User substitution
# https://docs.djangoproject.com/en/1.11/topics/auth/customizing/#auth-custom-user
AUTH_USER_MODEL = 'auth_ex.User'
# Password validation
# https://docs.djangoproject.com/en/2.2/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
LOGIN_URL = 'user/login/'
# Internationalization
# https://docs.djangoproject.com/en/2.2/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/2.2/howto/static-files/
STATIC_URL = '/static/'
STATICFILES_DIRS = [
os.path.join(BASE_DIR, 'project/static/'),
]
STATIC_ROOT = os.path.join(BASE_DIR, 'static/')
MEDIA_ROOT = os.path.join(BASE_DIR, 'media/')
MEDIA_URL = '/media/'
FILE_UPLOAD_PERMISSIONS = 0o664
# Email Server Setup
# EMAIL_BACKEND = 'django.core.mail.backends.filebased.EmailBackend'
# EMAIL_FILE_PATH = BASE_DIR
EMAIL_BACKEND = 'django.core.mail.backends.smtp.EmailBackend'
MAILER_EMAIL_BACKEND = EMAIL_BACKEND
EMAIL_HOST = LOCAL_EMAIL_HOST
EMAIL_HOST_PASSWORD = LOCAL_EMAIL_HOST_PASSWORD
EMAIL_HOST_USER = LOCAL_EMAIL_HOST_USER
EMAIL_PORT = 465
EMAIL_USE_SSL = True
DEFAULT_FROM_EMAIL = EMAIL_HOST_USER
# deployment security stuff
SECURE_CONTENT_TYPE_NOSNIFF = True
SECURE_BROWSER_XSS_FILTER = True
X_FRAME_OPTIONS = 'DENY'
# to be anabled when SSL is setup
SECURE_SSL_REDIRECT = True
SECURE_HSTS_SECONDS = 3600
CSRF_COOKIE_SECURE = True
SESSION_COOKIE_SECURE = True
# ckeditor cinfiguration
CKEDITOR_UPLOAD_PATH = 'uploads/'
CKEDITOR_CONFIGS = {
'default': {
'skin': 'moono-lisa',
'toolbar_Basic': [
['Source', '-', 'Bold', 'Italic']
],
'enterMode': 2,
'toolbar_YourCustomToolbarConfig': [
{'name': 'clipboard', 'items': ['Cut', 'Copy', 'Paste', 'PasteText', 'PasteFromWord', '-', 'Undo', 'Redo']},
{'name': 'editing', 'items': ['Find', 'Replace', '-', 'SelectAll']},
{'name': 'basicstyles',
'items': ['Bold', 'Italic', 'Underline', 'Strike', 'Subscript', 'Superscript', '-', 'RemoveFormat']},
'/',
{'name': 'paragraph',
'items': ['NumberedList', 'BulletedList', '-', 'Outdent', 'Indent', '-', 'Blockquote', 'CreateDiv', '-',
'JustifyLeft', 'JustifyCenter', 'JustifyRight', 'JustifyBlock', '-', 'BidiLtr', 'BidiRtl',
'Language']},
{'name': 'links', 'items': ['Link', 'Unlink', 'Anchor']},
{'name': 'insert',
'items': ['Image', 'Table', 'HorizontalRule', 'Smiley', 'SpecialChar', 'PageBreak', 'Iframe']},
'/',
{'name': 'styles', 'items': ['Styles', 'Format', 'Font', 'FontSize']},
{'name': 'colors', 'items': ['TextColor', 'BGColor']},
{'name': 'tools', 'items': ['Maximize', 'ShowBlocks']},
{'name': 'about', 'items': ['About']},
'/', # put this to force next toolbar on new line
{'name': 'yourcustomtools', 'items': [
# put the name of your editor.ui.addButton here
'Preview',
'Maximize',
]},
],
'toolbar': 'YourCustomToolbarConfig', # put selected toolbar config here
# 'toolbarGroups': [{ 'name': 'document', 'groups': [ 'mode', 'document', 'doctools' ] }],
# 'height': 400,
# 'width': '100%',
# 'filebrowserWindowHeight': 725,
# 'filebrowserWindowWidth': 940,
# 'toolbarCanCollapse': True,
# 'mathJaxLib': '//cdn.mathjax.org/mathjax/2.2-latest/MathJax.js?config=TeX-AMS_HTML',
'language': 'pl',
'tabSpaces': 4,
'language_list': ['pl:Polski', 'en:English'],
'extraPlugins': ','.join([
'uploadimage', # the upload image feature
'uploadwidget',
# your extra plugins here
'div',
'autolink',
'autoembed',
'embedsemantic',
'autogrow',
# 'devtools',
'widget',
'lineutils',
'clipboard',
'dialog',
'dialogui',
'elementspath',
'language',
'wsc'
]),
}
} | [
"piotrszczygielski@gmail.com"
] | piotrszczygielski@gmail.com |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.