code
stringlengths
4
4.48k
docstring
stringlengths
1
6.45k
_id
stringlengths
24
24
@inside_glslc_testsuite('SpirvAssembly') <NEW_LINE> class TestEmptyAssemblyFile(expect.ValidObjectFile): <NEW_LINE> <INDENT> shader = FileShader('', '.spvasm') <NEW_LINE> glslc_args = ['-c', shader]
Tests that glslc accepts an empty assembly file.
62598faceab8aa0e5d30bd50
class UdimIntAdapter(object): <NEW_LINE> <INDENT> def __init__(self, value, width=10): <NEW_LINE> <INDENT> super(UdimIntAdapter, self).__init__() <NEW_LINE> self.value = value <NEW_LINE> self.width = width <NEW_LINE> <DEDENT> def __iadd__(self, value): <NEW_LINE> <INDENT> if not isinstance(value, int): <NEW_LINE> <INDENT> value = convert_2d_to_index(value, self.width) <NEW_LINE> <DEDENT> self.value += value
A class that will make dealing with 1D/2D UDIM indexes less painful.
62598fac498bea3a75a57ae2
class PostgreSQLDatabase(Database): <NEW_LINE> <INDENT> query_cls = PostgreSQLQuery <NEW_LINE> def __init__( self, host="localhost", port=5432, database=None, user=None, password=None, **kwags ): <NEW_LINE> <INDENT> super(PostgreSQLDatabase, self).__init__(host, port, database, **kwags) <NEW_LINE> self.user = user <NEW_LINE> self.password = password <NEW_LINE> <DEDENT> def connect(self): <NEW_LINE> <INDENT> import psycopg2 <NEW_LINE> return psycopg2.connect( host=self.host, port=self.port, dbname=self.database, user=self.user, password=self.password, ) <NEW_LINE> <DEDENT> def trunc_date(self, field, interval): <NEW_LINE> <INDENT> return DateTrunc(field, str(interval)) <NEW_LINE> <DEDENT> def date_add(self, field, date_part, interval): <NEW_LINE> <INDENT> return fn.DateAdd(str(date_part), interval, field) <NEW_LINE> <DEDENT> def get_column_definitions(self, schema, table, connection=None): <NEW_LINE> <INDENT> columns = Table("columns", schema="INFORMATION_SCHEMA") <NEW_LINE> columns_query = ( PostgreSQLQuery.from_(columns, immutable=False) .select(columns.column_name, columns.data_type) .where(columns.table_schema == schema) .where(columns.field("table_name") == table) .distinct() .orderby(columns.column_name) ) <NEW_LINE> return self.fetch(str(columns_query), connection=connection)
PostgreSQL client that uses the psycopg module.
62598fac32920d7e50bc6019
class StudentsFetch(Resource): <NEW_LINE> <INDENT> @jwt_required <NEW_LINE> def get(self, username): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> student_details = fetch_student.find_all_student(username) <NEW_LINE> return get_response(data=student_details, code=201) <NEW_LINE> <DEDENT> except Exception as e: <NEW_LINE> <INDENT> logger.error(e) <NEW_LINE> return {"error": str(e)}
To fetch all the students details only by admin
62598fac4527f215b58e9ea5
class OntologyWordListView(ListAPIView): <NEW_LINE> <INDENT> permission_classes = [AllowAny] <NEW_LINE> queryset = OntologyWord.objects.all() <NEW_LINE> serializer_class = OntologyWordSerializer <NEW_LINE> pagination_class = None
Returns a collection of ontology words instances.
62598faca17c0f6771d5c1fa
class LegalHoldProperties(Model): <NEW_LINE> <INDENT> _validation = { 'has_legal_hold': {'readonly': True}, } <NEW_LINE> _attribute_map = { 'has_legal_hold': {'key': 'hasLegalHold', 'type': 'bool'}, 'tags': {'key': 'tags', 'type': '[TagProperty]'}, } <NEW_LINE> def __init__(self, *, tags=None, **kwargs) -> None: <NEW_LINE> <INDENT> super(LegalHoldProperties, self).__init__(**kwargs) <NEW_LINE> self.has_legal_hold = None <NEW_LINE> self.tags = tags
The LegalHold property of a blob container. Variables are only populated by the server, and will be ignored when sending a request. :ivar has_legal_hold: The hasLegalHold public property is set to true by SRP if there are at least one existing tag. The hasLegalHold public property is set to false by SRP if all existing legal hold tags are cleared out. There can be a maximum of 1000 blob containers with hasLegalHold=true for a given account. :vartype has_legal_hold: bool :param tags: The list of LegalHold tags of a blob container. :type tags: list[~azure.mgmt.storage.v2018_07_01.models.TagProperty]
62598fac3d592f4c4edbae90
class SheetItemTimeline(generics.ListAPIView): <NEW_LINE> <INDENT> def get(self, request, entity_pk, *args, **kwargs): <NEW_LINE> <INDENT> nodes = self.request.QUERY_PARAMS.get('nodes', None) <NEW_LINE> if nodes: <NEW_LINE> <INDENT> nodes = [int(node_id) for node_id in nodes.split(',')] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> items = models.SheetItem.objects.timeline(nodes, entity_pk) <NEW_LINE> serialized_timeline = SheetTimeline(items, many=True).data <NEW_LINE> return Response(serialized_timeline)
API endpoint that retrieves a timeline of sheet items. The timeline is created according to the given entity, node(s)
62598fac6aa9bd52df0d4e8c
class OMAttribution(OMCompoundElement, CompoundAttributes): <NEW_LINE> <INDENT> _fields = ['pairs', 'obj', 'id', 'cdbase']
An OpenMath Attribution Object.
62598fac3539df3088ecc277
class __Proxy(cls, metaclass=Meta, class_name=cls.__name__, logger=logger): <NEW_LINE> <INDENT> pass
Proxy class required for proper interception.
62598fac38b623060ffa905e
class Site(object): <NEW_LINE> <INDENT> def __init__(self, position, occupant="", occ_alias="", charge=None): <NEW_LINE> <INDENT> self.occupant = occupant <NEW_LINE> self.occ_alias = occ_alias <NEW_LINE> self.charge = charge <NEW_LINE> self.position = position <NEW_LINE> try: <NEW_LINE> <INDENT> _, _, _ = [float(x) for x in self.position] <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> raise SiteError("Position %s could not be cast to a triplet of floats", self.position) <NEW_LINE> <DEDENT> <DEDENT> def write(self, stream=None, index=None): <NEW_LINE> <INDENT> arg_string = "" <NEW_LINE> if index is not None: <NEW_LINE> <INDENT> arg_string += (self.occupant + "_" + str(index) + " " + self.occ_alias + " " + (" ").join([str(x) for x in self.position])) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> arg_string += ((" ").join([str(x) for x in self.position]) + " " + self.occupant) <NEW_LINE> <DEDENT> arg_string += "\n" <NEW_LINE> if stream is not None: <NEW_LINE> <INDENT> stream.write(arg_string) <NEW_LINE> <DEDENT> return arg_string <NEW_LINE> <DEDENT> def write_charge(self, stream, total_charge=None): <NEW_LINE> <INDENT> if total_charge is None: <NEW_LINE> <INDENT> if self.charge is not None: <NEW_LINE> <INDENT> total_charge = float(self.charge) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise GeomError("No charge on site and no charge supplied, can't print charge!") <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> total_charge = float(total_charge) <NEW_LINE> <DEDENT> stream.write("charge\n") <NEW_LINE> stream.write("%g\n" % total_charge) <NEW_LINE> stream.write("location of charge\n") <NEW_LINE> stream.write((" ").join([str(x) for x in self.position]) + " " + self.occupant)
Site in a basis. Contains: self.occupant = CASM specie name, empty string by default self.occ_alias = alias (atom file) name, empty string by default self.position = vec of float self.charge = charge at this coordinate
62598fac99fddb7c1ca62dcb
class Soubor(object): <NEW_LINE> <INDENT> csv_adresář = os.path.join(os.path.dirname(__file__), 'experts/files/talasnica/python') <NEW_LINE> def __init__(self, jméno): <NEW_LINE> <INDENT> self.encoding = encoding <NEW_LINE> csv_adresář = self.csv_adresář <NEW_LINE> for adresář in symbol.replace('.', '_'), JMÉNO_GRAFU[časový_rámec]: <NEW_LINE> <INDENT> csv_adresář = os.path.join(csv_adresář, adresář) <NEW_LINE> if not os.path.isdir(csv_adresář): <NEW_LINE> <INDENT> print('vytvářím adresář {}'.format(csv_adresář)) <NEW_LINE> os.mkdir(csv_adresář) <NEW_LINE> <DEDENT> <DEDENT> csv_adresář = csv_adresář <NEW_LINE> self.cesta = os.path.join(csv_adresář, '{}.csv'.format(jméno)) <NEW_LINE> print('uložím do souboru {}'.format(self.cesta)) <NEW_LINE> <DEDENT> def __enter__(self): <NEW_LINE> <INDENT> print('otevírám pro zápis soubor {}'.format(self.cesta)) <NEW_LINE> self.soubor = open(self.cesta, mode = "w", encoding = self.encoding) <NEW_LINE> return self <NEW_LINE> <DEDENT> def řádek(self, *sloupce): <NEW_LINE> <INDENT> csv_řádek = ';'.join(map(str, sloupce)) <NEW_LINE> print(csv_řádek, file = self.soubor) <NEW_LINE> <DEDENT> def __exit__(self, *args): <NEW_LINE> <INDENT> print('zapsáno do souboru {}'.format(self.cesta)) <NEW_LINE> self.soubor.close()
zapíše do souboru
62598fac0c0af96317c56347
class Multiply(object): <NEW_LINE> <INDENT> def __init__(self, value=1.0): <NEW_LINE> <INDENT> if value < 0.0: <NEW_LINE> <INDENT> raise TypeError('The video is blacked out since for value < 0.0') <NEW_LINE> <DEDENT> self.value = value <NEW_LINE> <DEDENT> def __call__(self, clip): <NEW_LINE> <INDENT> is_PIL = isinstance(clip[0], PIL.Image.Image) <NEW_LINE> if is_PIL: <NEW_LINE> <INDENT> clip = [np.asarray(img) for img in clip] <NEW_LINE> <DEDENT> data_final = [] <NEW_LINE> for i in range(len(clip)): <NEW_LINE> <INDENT> image = clip[i].astype(np.float64) <NEW_LINE> image *= self.value <NEW_LINE> image = np.where(image > 255, 255, image) <NEW_LINE> image = np.where(image < 0, 0, image) <NEW_LINE> image = image.astype(np.uint8) <NEW_LINE> data_final.append(image.astype(np.uint8)) <NEW_LINE> <DEDENT> if is_PIL: <NEW_LINE> <INDENT> return [PIL.Image.fromarray(img) for img in data_final] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return data_final
Multiply all pixel intensities with given value. This augmenter can be used to make images lighter or darker. Args: value (float): The value with which to multiply the pixel intensities of video.
62598fac71ff763f4b5e7734
class SugarActivityCheckBase(CheckBase): <NEW_LINE> <INDENT> def __init__(self, base): <NEW_LINE> <INDENT> CheckBase.__init__(self, base, __file__)
Common base class for sugar checks.
62598fac56ac1b37e63021b0
class User(AbstractUser): <NEW_LINE> <INDENT> USER_TYPE_CHOICES = ( ('admin' , 'Administrator'), ('parent' , 'Parent' ), ('student', 'Student' ), ) <NEW_LINE> user_type = models.CharField('User Type', max_length=16, editable=False, choices=USER_TYPE_CHOICES, default='admin') <NEW_LINE> def save(self, force_insert=False, force_update=False): <NEW_LINE> <INDENT> super(AbstractUser, self).save(force_insert, force_update) <NEW_LINE> if self.user_type == 'admin': return <NEW_LINE> profileType = globals()[capitalize(self.user_type) + 'Profile'] <NEW_LINE> myProfile = profileType.objects.filter(user_id=self.id) <NEW_LINE> if not len(myProfile): myProfile.create(user=self.id)
A parent profile has a foreign key relationship to: * a list of students
62598fac63d6d428bbee2770
class Content(urwid.ListBox): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.header = HeaderWidget() <NEW_LINE> self.keywords = KeywordsWidget() <NEW_LINE> self.record_body = BodyWidget() <NEW_LINE> super(Content, self).__init__( body=urwid.SimpleFocusListWalker([ self.header, urwid.Divider('-'), self.record_body ])) <NEW_LINE> <DEDENT> def keyword_widget_handler(self): <NEW_LINE> <INDENT> interaction_mode = self.header.interaction.text <NEW_LINE> switch = { 'INSERT':self.show_keywords, 'COMMAND':self.hide_keywords } <NEW_LINE> switch[interaction_mode]() <NEW_LINE> <DEDENT> def show_keywords(self): <NEW_LINE> <INDENT> self.base_widget.body.insert(1, self.keywords) <NEW_LINE> <DEDENT> def hide_keywords(self): <NEW_LINE> <INDENT> if len(self.base_widget.body) == 4: <NEW_LINE> <INDENT> self.keywords = self.base_widget.body.pop(1)
Container to hold header, keywords, and body widgets
62598fac10dbd63aa1c70b78
class PushConf(object): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self._registries = { "docker": [], "pulp": {}, } <NEW_LINE> <DEDENT> def add_docker_registry(self, registry_uri, insecure=False): <NEW_LINE> <INDENT> if registry_uri is None: <NEW_LINE> <INDENT> raise RuntimeError("registry URI cannot be None") <NEW_LINE> <DEDENT> r = DockerRegistry(registry_uri, insecure=insecure) <NEW_LINE> self._registries["docker"].append(r) <NEW_LINE> return r <NEW_LINE> <DEDENT> def add_docker_registries(self, registry_uris, insecure=False): <NEW_LINE> <INDENT> for registry_uri in registry_uris: <NEW_LINE> <INDENT> self.add_docker_registry(registry_uri, insecure=insecure) <NEW_LINE> <DEDENT> <DEDENT> def add_pulp_registry(self, name, crane_uri): <NEW_LINE> <INDENT> if crane_uri is None: <NEW_LINE> <INDENT> raise RuntimeError("registry URI cannot be None") <NEW_LINE> <DEDENT> r = PulpRegistry(name, crane_uri) <NEW_LINE> self._registries["pulp"][name] = r <NEW_LINE> return r <NEW_LINE> <DEDENT> @property <NEW_LINE> def has_some_docker_registry(self): <NEW_LINE> <INDENT> return len(self.docker_registries) > 0 <NEW_LINE> <DEDENT> @property <NEW_LINE> def docker_registries(self): <NEW_LINE> <INDENT> return self._registries["docker"] <NEW_LINE> <DEDENT> @property <NEW_LINE> def pulp_registries(self): <NEW_LINE> <INDENT> return [registry for registry in self._registries["pulp"].values()] <NEW_LINE> <DEDENT> @property <NEW_LINE> def all_registries(self): <NEW_LINE> <INDENT> return self.docker_registries + self.pulp_registries
configuration of remote registries: docker-registry or pulp
62598fac7d847024c075c388
class TestTask(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def tearDown(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def make_instance(self, include_optional): <NEW_LINE> <INDENT> if include_optional : <NEW_LINE> <INDENT> return Task( class_ref = airflow_client.models.class_reference.ClassReference( module_path = '0', class_name = '0', ), task_id = '0', owner = '0', start_date = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), end_date = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), trigger_rule = 'all_success', extra_links = [ airflow_client.models.task_extra_links.Task_extra_links( class_ref = airflow_client.models.class_reference.ClassReference( module_path = '0', class_name = '0', ), ) ], depends_on_past = True, wait_for_downstream = True, retries = 1.337, queue = '0', pool = '0', pool_slots = 1.337, execution_timeout = airflow_client.models.time_delta.TimeDelta( __type = '0', days = 56, seconds = 56, microsecond = 56, ), retry_delay = airflow_client.models.time_delta.TimeDelta( __type = '0', days = 56, seconds = 56, microsecond = 56, ), retry_exponential_backoff = True, priority_weight = 1.337, weight_rule = 'downstream', ui_color = 'a', ui_fgcolor = 'a', template_fields = [ '0' ], sub_dag = airflow_client.models.dag.DAG( dag_id = '0', root_dag_id = '0', is_paused = True, is_subdag = True, fileloc = '0', file_token = '0', owners = [ '0' ], description = '0', schedule_interval = null, tags = [ airflow_client.models.tag.Tag( name = '0', ) ], ), downstream_task_ids = [ '0' ] ) <NEW_LINE> <DEDENT> else : <NEW_LINE> <INDENT> return Task( ) <NEW_LINE> <DEDENT> <DEDENT> def testTask(self): <NEW_LINE> <INDENT> inst_req_only = self.make_instance(include_optional=False) <NEW_LINE> inst_req_and_optional = self.make_instance(include_optional=True)
Task unit test stubs
62598fac56b00c62f0fb287a
class Board(object): <NEW_LINE> <INDENT> def __init__(self, tiles): <NEW_LINE> <INDENT> self.goal = [1,2,3,4,5,6,7,8,"x"] <NEW_LINE> self.tiles = tiles <NEW_LINE> <DEDENT> def is_goal(self): <NEW_LINE> <INDENT> if self.tiles==self.goal: <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> return False <NEW_LINE> <DEDENT> def heuristic(self): <NEW_LINE> <INDENT> count=0 <NEW_LINE> lista=map(operator.eq, self.tiles, [1, 2, 3, 4, 5, 6, 7, 8, "x"]) <NEW_LINE> for i in lista: <NEW_LINE> <INDENT> if(i==False): <NEW_LINE> <INDENT> count+=1 <NEW_LINE> <DEDENT> <DEDENT> return count <NEW_LINE> <DEDENT> def get_neighbors(self): <NEW_LINE> <INDENT> index = 0 <NEW_LINE> neighbors=[] <NEW_LINE> for i in range(len(self.tiles)): <NEW_LINE> <INDENT> if self.tiles[i] == 'x': <NEW_LINE> <INDENT> index = i <NEW_LINE> <DEDENT> <DEDENT> if (index==0 or index==1 or index==3 or index==4 or index==6 or index==7): <NEW_LINE> <INDENT> atual = self.tiles.copy() <NEW_LINE> atual[index] = atual[index+1] <NEW_LINE> atual[index+1] = 'x' <NEW_LINE> neighbors.append(Board(atual)) <NEW_LINE> <DEDENT> if(index==1 or index==2 or index==4 or index==5 or index==7 or index==8): <NEW_LINE> <INDENT> atual = self.tiles.copy() <NEW_LINE> atual[index] = atual[index-1] <NEW_LINE> atual[index-1] = 'x' <NEW_LINE> neighbors.append(Board(atual)) <NEW_LINE> <DEDENT> if(index==0 or index==1 or index==2 or index==3 or index==4 or index==5): <NEW_LINE> <INDENT> atual = self.tiles.copy() <NEW_LINE> atual[index] = atual[index+3] <NEW_LINE> atual[index+3] = 'x' <NEW_LINE> neighbors.append(Board(atual)) <NEW_LINE> <DEDENT> if(index==3 or index==4 or index==5 or index==6 or index==7 or index==8): <NEW_LINE> <INDENT> atual = self.tiles.copy() <NEW_LINE> atual[index] = atual[index-3] <NEW_LINE> atual[index-3] = 'x' <NEW_LINE> neighbors.append(Board(atual)) <NEW_LINE> <DEDENT> return neighbors <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> return self.tiles == other.tiles <NEW_LINE> <DEDENT> def __hash__(self): <NEW_LINE> <INDENT> return hash(tuple(self.tiles)) <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return str(self.tiles) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return str(self.tiles) <NEW_LINE> <DEDENT> def print_board(self): <NEW_LINE> <INDENT> print(self.tiles[:3]) <NEW_LINE> print(self.tiles[3:6]) <NEW_LINE> print(self.tiles[6:])
Esta classe representa uma configuração do tabuleiro do quebra-cabeça. O tabuleiro é um estado no problema de busca. O tabuleiro tem 9 posições (em inglês tiles), sendo 8 posições dedicadas aos números de 1 até 8 e uma posição especial "x" que representa a posição vazia. O tabuleiro é representado de forma linear, por exemplo, [1, 2, 3, 4, 5, 6, 7, 8, "x"], que visualmente representa o tabuleiro: 1 2 3 4 5 6 7 8 x
62598fac4a966d76dd5eeea6
class RetailerBusinessCreateSerializer(serializers.ModelSerializer): <NEW_LINE> <INDENT> bank_details = RetailerBankDetailsSerializer(read_only=True) <NEW_LINE> class Meta: <NEW_LINE> <INDENT> model = RetailerBusiness <NEW_LINE> fields = ('id', 'retailer_id', 'name', 'address1', 'address2', 'postcode', 'town', 'unique_string', 'county', 'country', 'company_number', 'vat_number', 'notes', 'bank_details', 'is_limited','is_active')
serializer to create Retailer business
62598fac283ffb24f3cf3852
class Solution: <NEW_LINE> <INDENT> def binaryTreePaths(self, root): <NEW_LINE> <INDENT> res = [] <NEW_LINE> if not root: <NEW_LINE> <INDENT> return res <NEW_LINE> <DEDENT> if not root.left and not root.right: <NEW_LINE> <INDENT> res.append(str(root.val)) <NEW_LINE> return res <NEW_LINE> <DEDENT> leftS = self.binaryTreePaths(root.left) <NEW_LINE> for i in range(len(leftS)): <NEW_LINE> <INDENT> res.append(str(root.val) + '->' + leftS[i]) <NEW_LINE> <DEDENT> rightS = self.binaryTreePaths(root.right) <NEW_LINE> for i in range(len(rightS)): <NEW_LINE> <INDENT> res.append(str(root.val) + '->' + rightS[i]) <NEW_LINE> <DEDENT> return res
@param: root: the root of the binary tree @return: all root-to-leaf paths
62598fac3317a56b869be52d
class Exciter(GroupBase): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> super().__init__() <NEW_LINE> self.common_params.extend(('syn',)) <NEW_LINE> self.common_vars.extend(('vout', 'vi',)) <NEW_LINE> self.VoltComp = BackRef() <NEW_LINE> self.PSS = BackRef()
Exciter group for synchronous generators.
62598fac5fcc89381b26612f
class NoAliasingCompensation(AliasingCompensation): <NEW_LINE> <INDENT> def __init__(self, input_signal=None, maximum_harmonics=1): <NEW_LINE> <INDENT> AliasingCompensation.__init__(self, input_signal=input_signal, maximum_harmonics=maximum_harmonics) <NEW_LINE> <DEDENT> def CreateModified(self, input_signal=None, maximum_harmonics=None): <NEW_LINE> <INDENT> if input_signal is None: <NEW_LINE> <INDENT> input_signal = self._input_signal <NEW_LINE> <DEDENT> if maximum_harmonics is None: <NEW_LINE> <INDENT> maximum_harmonics = self._maximum_harmonics <NEW_LINE> <DEDENT> return self.__class__(input_signal=input_signal, maximum_harmonics=maximum_harmonics)
A class which acts as a pass through of signals.
62598face5267d203ee6b8d0
class OpenProducer(Open): <NEW_LINE> <INDENT> def __init__(self, name): <NEW_LINE> <INDENT> Open.__init__(self, name, constants.FLAG_OPEN_PRODUCER)
Open producer spec
62598facac7a0e7691f724cf
class File(db.Model): <NEW_LINE> <INDENT> __tablename__ = 'files' <NEW_LINE> id = db.Column(db.Integer, primary_key=True, autoincrement=True) <NEW_LINE> path = db.Column(db.String(200)) <NEW_LINE> uploaded_on = db.Column(db.DateTime) <NEW_LINE> incident_id = db.Column(db.Integer, db.ForeignKey('incidents.id')) <NEW_LINE> def __init__(self, path="", incident_id=None): <NEW_LINE> <INDENT> self.path = path <NEW_LINE> self.uploaded_on = datetime.now() <NEW_LINE> self.incident_id = incident_id <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return '<File object. Path: {}>'.format(self.path)
Model for files storage paths
62598fac460517430c432040
class LoopGC(VM): <NEW_LINE> <INDENT> def __init__(self, nodes, thunks, pre_call_clear, post_thunk_clear): <NEW_LINE> <INDENT> super(LoopGC, self).__init__(nodes, thunks, pre_call_clear) <NEW_LINE> self.post_thunk_clear = post_thunk_clear <NEW_LINE> self.allow_gc = True <NEW_LINE> if not (len(nodes) == len(thunks) == len(post_thunk_clear)): <NEW_LINE> <INDENT> raise ValueError() <NEW_LINE> <DEDENT> <DEDENT> def __call__(self): <NEW_LINE> <INDENT> if self.time_thunks: <NEW_LINE> <INDENT> for cont in self.pre_call_clear: <NEW_LINE> <INDENT> cont[0] = None <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> i = 0 <NEW_LINE> for thunk, node, old_storage in zip(self.thunks, self.nodes, self.post_thunk_clear): <NEW_LINE> <INDENT> t0 = time.time() <NEW_LINE> thunk() <NEW_LINE> t1 = time.time() <NEW_LINE> self.call_counts[i] += 1 <NEW_LINE> self.call_times[i] += t1 - t0 <NEW_LINE> for old_s in old_storage: <NEW_LINE> <INDENT> old_s[0] = None <NEW_LINE> <DEDENT> i += 1 <NEW_LINE> <DEDENT> <DEDENT> except Exception: <NEW_LINE> <INDENT> link.raise_with_op(node, thunk) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> for cont in self.pre_call_clear: <NEW_LINE> <INDENT> cont[0] = None <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> for thunk, node, old_storage in zip(self.thunks, self.nodes, self.post_thunk_clear): <NEW_LINE> <INDENT> thunk() <NEW_LINE> for old_s in old_storage: <NEW_LINE> <INDENT> old_s[0] = None <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> except Exception: <NEW_LINE> <INDENT> link.raise_with_op(node, thunk)
Unconditional start-to-finish program execution in Python. Garbage collection is possible on intermediate results.
62598fac9c8ee82313040154
class CharacterExtraction: <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.__image_matrix = 0 <NEW_LINE> self.__image_name = "" <NEW_LINE> self.__image_height = 0 <NEW_LINE> self.__image_width = 0 <NEW_LINE> self.__image_list = [] <NEW_LINE> <DEDENT> def split_image(self, image_object): <NEW_LINE> <INDENT> self.__image_matrix = image_object.get_image_matrix() <NEW_LINE> self.__image_name = image_object.get_image_name() <NEW_LINE> self.__image_height = image_object.get_image_height() <NEW_LINE> self.__image_width = image_object.get_image_width() <NEW_LINE> has_black = False <NEW_LINE> pre_col_black = False <NEW_LINE> left_limit = 0 <NEW_LINE> right_limit = 0 <NEW_LINE> no = 0 <NEW_LINE> for i in range(self.__image_width): <NEW_LINE> <INDENT> if has_black: <NEW_LINE> <INDENT> pre_col_black = True <NEW_LINE> <DEDENT> has_black = False <NEW_LINE> for j in range(self.__image_height): <NEW_LINE> <INDENT> if self.__image_matrix[j, i] < 100: <NEW_LINE> <INDENT> has_black = True <NEW_LINE> if not pre_col_black: <NEW_LINE> <INDENT> left_limit = i <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> if pre_col_black and not has_black: <NEW_LINE> <INDENT> no += 1 <NEW_LINE> right_limit = i <NEW_LINE> image_width = right_limit - left_limit <NEW_LINE> char_img = self.__image_matrix[:, left_limit - 1:right_limit + 1] <NEW_LINE> image_name = "{}char.bmp".format(no) <NEW_LINE> image_object = ImageObject() <NEW_LINE> image_object.set_image_matrix(char_img) <NEW_LINE> image_object.set_image_name(image_name) <NEW_LINE> image_object.set_image_width(image_width) <NEW_LINE> image_object.set_image_height(self.__image_height) <NEW_LINE> self.__image_list.append(image_object) <NEW_LINE> pre_col_black = False <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def get_image_list(self): <NEW_LINE> <INDENT> return self.__image_list
Add the class description here
62598fac7b25080760ed7475
class add_header_redefinition(Plugin): <NEW_LINE> <INDENT> summary = 'Nested "add_header" drops parent headers.' <NEW_LINE> severity = gixy.severity.MEDIUM <NEW_LINE> description = ('"add_header" replaces ALL parent headers. ' 'See documentation: http://nginx.org/en/docs/http/ngx_http_headers_module.html#add_header') <NEW_LINE> help_url = 'https://github.com/yandex/gixy/blob/master/docs/en/plugins/addheaderredefinition.md' <NEW_LINE> directives = ['server', 'location', 'if'] <NEW_LINE> options = {'headers': set(['x-frame-options', 'x-content-type-options', 'x-xss-protection', 'content-security-policy', 'cache-control']) } <NEW_LINE> def __init__(self, config): <NEW_LINE> <INDENT> super(add_header_redefinition, self).__init__(config) <NEW_LINE> self.interesting_headers = self.config.get('headers') <NEW_LINE> <DEDENT> def audit(self, directive): <NEW_LINE> <INDENT> if not directive.is_block: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> actual_headers = get_headers(directive) <NEW_LINE> if not actual_headers: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> for parent in directive.parents: <NEW_LINE> <INDENT> parent_headers = get_headers(parent) <NEW_LINE> if not parent_headers: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> diff = (parent_headers - actual_headers) & self.interesting_headers <NEW_LINE> if len(diff): <NEW_LINE> <INDENT> self._report_issue(directive, parent, diff) <NEW_LINE> <DEDENT> break <NEW_LINE> <DEDENT> <DEDENT> def _report_issue(self, current, parent, diff): <NEW_LINE> <INDENT> directives = [] <NEW_LINE> directives.extend(parent.find('add_header')) <NEW_LINE> directives.extend(current.find('add_header')) <NEW_LINE> reason = 'Parent headers "{headers}" was dropped in current level'.format(headers='", "'.join(diff)) <NEW_LINE> self.add_issue(directive=directives, reason=reason)
Insecure example: server { add_header X-Content-Type-Options nosniff; location / { add_header X-Frame-Options DENY; } }
62598fac2c8b7c6e89bd378b
class activity(AbstractedFileStructureElement): <NEW_LINE> <INDENT> campus: "wuecampus" <NEW_LINE> course_: "course" <NEW_LINE> section_: "section" <NEW_LINE> title: str <NEW_LINE> def name(self) -> str: <NEW_LINE> <INDENT> return normalized(self.title)
Activity management class.
62598fac67a9b606de545f93
class Add1(AvocadoTest): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def tearDown(self, *args, **kwargs): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def test(self): <NEW_LINE> <INDENT> pass
:avocado: enable
62598facfff4ab517ebcd7ab
class XTCReader(XDRBaseReader): <NEW_LINE> <INDENT> format = 'XTC' <NEW_LINE> units = {'time': 'ps', 'length': 'nm'} <NEW_LINE> _writer = XTCWriter <NEW_LINE> _file = XTCFile <NEW_LINE> def _frame_to_ts(self, frame, ts): <NEW_LINE> <INDENT> ts.frame = self._frame <NEW_LINE> ts.time = frame.time <NEW_LINE> ts.data['step'] = frame.step <NEW_LINE> ts.dimensions = triclinic_box(*frame.box) <NEW_LINE> if self._sub is not None: <NEW_LINE> <INDENT> ts.positions = frame.x[self._sub] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> ts.positions = frame.x <NEW_LINE> <DEDENT> if self.convert_units: <NEW_LINE> <INDENT> self.convert_pos_from_native(ts.positions) <NEW_LINE> self.convert_pos_from_native(ts.dimensions[:3]) <NEW_LINE> <DEDENT> return ts
XTC is a compressed trajectory format from Gromacs. The trajectory is saved with reduced precision (3 decimal places) compared to other lossless formarts like TRR and DCD. The main advantage of XTC files is that they require significantly less disk space and the loss of precision is usually not a problem. Notes ----- See :ref:`Notes on offsets <offsets-label>` for more information about offsets.
62598facbaa26c4b54d4f279
class ApplicationGatewayListResult(msrest.serialization.Model): <NEW_LINE> <INDENT> _attribute_map = { 'value': {'key': 'value', 'type': '[ApplicationGateway]'}, 'next_link': {'key': 'nextLink', 'type': 'str'}, } <NEW_LINE> def __init__( self, **kwargs ): <NEW_LINE> <INDENT> super(ApplicationGatewayListResult, self).__init__(**kwargs) <NEW_LINE> self.value = kwargs.get('value', None) <NEW_LINE> self.next_link = kwargs.get('next_link', None)
Response for ListApplicationGateways API service call. :param value: List of an application gateways in a resource group. :type value: list[~azure.mgmt.network.v2020_08_01.models.ApplicationGateway] :param next_link: URL to get the next set of results. :type next_link: str
62598faca8370b77170f03a2
class ProgressBar(RemoteProgress): <NEW_LINE> <INDENT> class Action(Enum): <NEW_LINE> <INDENT> PULL = 1 <NEW_LINE> PUSH = 2 <NEW_LINE> <DEDENT> def setup(self, repo_name, action=Action.PULL): <NEW_LINE> <INDENT> if action == ProgressBar.Action.PULL: <NEW_LINE> <INDENT> message = 'Pulling from {}'.format(repo_name) <NEW_LINE> <DEDENT> elif action == ProgressBar.Action.PUSH: <NEW_LINE> <INDENT> message = 'Pushing to {}'.format(repo_name) <NEW_LINE> <DEDENT> self.bar = Bar(message=message, suffix='') <NEW_LINE> <DEDENT> def update(self, op_code, cur_count, max_count=100, message=''): <NEW_LINE> <INDENT> max_count = int(max_count or 100) <NEW_LINE> if max_count != self.bar.max: <NEW_LINE> <INDENT> self.bar.max = max_count <NEW_LINE> <DEDENT> self.bar.goto(int(cur_count))
Nice looking progress bar for long running commands
62598fac21bff66bcd722c2d
class User(Base): <NEW_LINE> <INDENT> __tablename__ = 'user' <NEW_LINE> id = sa.Column(sa.String(36), primary_key=True) <NEW_LINE> first_name = sa.Column(sa.Text) <NEW_LINE> last_name = sa.Column(sa.Text) <NEW_LINE> gender = sa.Column(sa.Text) <NEW_LINE> email = sa.Column(sa.Text) <NEW_LINE> birthdate = sa.Column(sa.Text) <NEW_LINE> height = sa.Column(sa.Float)
Описывает структуру таблицы user, содержащую данные о пользователях
62598fac01c39578d7f12d46
class TestMasterUI(RWTestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> super(type(self), TestMasterUI).setUp(self) <NEW_LINE> self.masterui = mommy.make('rw.MasterUI') <NEW_LINE> self.other_uimode = mommy.make('rw.UIMode') <NEW_LINE> self.project = self.masterui.project <NEW_LINE> <DEDENT> @use_locmemcache(models, 'cache') <NEW_LINE> def test_tag_category_cache_invalidation_post_save(self): <NEW_LINE> <INDENT> ui_mode_name = self.masterui.ui_mode.name <NEW_LINE> get_orig_tag_cats = self.project.get_tag_cats_by_ui_mode(ui_mode_name) <NEW_LINE> self.assertIn(self.masterui.tag_category, get_orig_tag_cats) <NEW_LINE> self.masterui.ui_mode = self.other_uimode <NEW_LINE> get_tag_cats = self.project.get_tag_cats_by_ui_mode(ui_mode_name) <NEW_LINE> self.assertIn(self.masterui.tag_category, get_tag_cats) <NEW_LINE> self.masterui.save() <NEW_LINE> get_tag_cats = self.project.get_tag_cats_by_ui_mode(ui_mode_name) <NEW_LINE> self.assertNotIn(self.masterui.tag_category, get_tag_cats)
exercise MasterUI model class
62598fac8e7ae83300ee9069
class SpellingPropertiesDialog(QDialog, Ui_SpellingPropertiesDialog): <NEW_LINE> <INDENT> def __init__(self, project, new, parent): <NEW_LINE> <INDENT> QDialog.__init__(self, parent) <NEW_LINE> self.setupUi(self) <NEW_LINE> self.project = project <NEW_LINE> self.parent = parent <NEW_LINE> self.pwlCompleter = E4FileCompleter(self.pwlEdit) <NEW_LINE> self.pelCompleter = E4FileCompleter(self.pelEdit) <NEW_LINE> projectSpellings = QStringList(self.trUtf8("<default>")) <NEW_LINE> for language in sorted(SpellChecker.getAvailableLanguages()): <NEW_LINE> <INDENT> projectSpellings.append(language) <NEW_LINE> <DEDENT> self.spellingComboBox.addItems(projectSpellings) <NEW_LINE> if not new: <NEW_LINE> <INDENT> self.initDialog() <NEW_LINE> <DEDENT> <DEDENT> def initDialog(self): <NEW_LINE> <INDENT> index = self.spellingComboBox.findText(self.project.pdata["SPELLLANGUAGE"][0]) <NEW_LINE> if index == -1: <NEW_LINE> <INDENT> index = 0 <NEW_LINE> <DEDENT> self.spellingComboBox.setCurrentIndex(index) <NEW_LINE> if self.project.pdata["SPELLWORDS"][0]: <NEW_LINE> <INDENT> self.pwlEdit.setText( os.path.join(self.project.ppath, self.project.pdata["SPELLWORDS"][0])) <NEW_LINE> <DEDENT> if self.project.pdata["SPELLEXCLUDES"][0]: <NEW_LINE> <INDENT> self.pelEdit.setText( os.path.join(self.project.ppath, self.project.pdata["SPELLEXCLUDES"][0])) <NEW_LINE> <DEDENT> <DEDENT> @pyqtSignature("") <NEW_LINE> def on_pwlButton_clicked(self): <NEW_LINE> <INDENT> pwl = self.pwlEdit.text() <NEW_LINE> if pwl.isEmpty(): <NEW_LINE> <INDENT> pwl = self.project.ppath <NEW_LINE> <DEDENT> file = KQFileDialog.getOpenFileName( self, self.trUtf8("Select project word list"), pwl, self.trUtf8("Dictionary File (*.dic);;All Files (*)")) <NEW_LINE> if not file.isEmpty(): <NEW_LINE> <INDENT> self.pwlEdit.setText(Utilities.toNativeSeparators(file)) <NEW_LINE> <DEDENT> <DEDENT> @pyqtSignature("") <NEW_LINE> def on_pelButton_clicked(self): <NEW_LINE> <INDENT> pel = self.pelEdit.text() <NEW_LINE> if pel.isEmpty(): <NEW_LINE> <INDENT> pel = self.project.ppath <NEW_LINE> <DEDENT> file = KQFileDialog.getOpenFileName( self, self.trUtf8("Select project exclude list"), pel, self.trUtf8("Dictionary File (*.dic);;All Files (*)")) <NEW_LINE> if not file.isEmpty(): <NEW_LINE> <INDENT> self.pelEdit.setText(Utilities.toNativeSeparators(file)) <NEW_LINE> <DEDENT> <DEDENT> def storeData(self): <NEW_LINE> <INDENT> if self.spellingComboBox.currentIndex() == 0: <NEW_LINE> <INDENT> self.project.pdata["SPELLLANGUAGE"] = [Preferences.getEditor("SpellCheckingDefaultLanguage")] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.project.pdata["SPELLLANGUAGE"] = [unicode(self.spellingComboBox.currentText())] <NEW_LINE> <DEDENT> self.project.pdata["SPELLWORDS"] = [self.project.getRelativePath(unicode(self.pwlEdit.text()))] <NEW_LINE> self.project.pdata["SPELLEXCLUDES"] = [self.project.getRelativePath(unicode(self.pelEdit.text()))]
Class implementing the Spelling Properties dialog.
62598fac3346ee7daa33762c
class _CompleterModel(QAbstractItemModel): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> QAbstractItemModel.__init__(self) <NEW_LINE> self.completer = None <NEW_LINE> <DEDENT> def index(self, row, column, parent): <NEW_LINE> <INDENT> return self.createIndex(row, column) <NEW_LINE> <DEDENT> def parent(self, index): <NEW_LINE> <INDENT> return QModelIndex() <NEW_LINE> <DEDENT> def rowCount(self, index): <NEW_LINE> <INDENT> if index.isValid(): <NEW_LINE> <INDENT> return 0 <NEW_LINE> <DEDENT> if self.completer is None: <NEW_LINE> <INDENT> return 0 <NEW_LINE> <DEDENT> return self.completer.rowCount() <NEW_LINE> <DEDENT> def columnCount(self, index): <NEW_LINE> <INDENT> if self.completer is None: <NEW_LINE> <INDENT> return 0 <NEW_LINE> <DEDENT> return self.completer.columnCount() <NEW_LINE> <DEDENT> def data(self, index, role): <NEW_LINE> <INDENT> if self.completer is None: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> if role == Qt.DisplayRole: <NEW_LINE> <INDENT> return self.completer.text(index.row(), index.column()) <NEW_LINE> <DEDENT> elif role == Qt.DecorationRole: <NEW_LINE> <INDENT> return self.completer.icon(index.row(), index.column()) <NEW_LINE> <DEDENT> return None <NEW_LINE> <DEDENT> def setCompleter(self, completer): <NEW_LINE> <INDENT> self.completer = completer <NEW_LINE> self.modelReset.emit()
QAbstractItemModel implementation. Adapter between complex and not intuitive QAbstractItemModel interface and simple AbstractCompleter interface. Provides data for TreeView with completions and information
62598fac0c0af96317c56349
class TestDataIntegrationsApi(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> self.api = openlattice.api.data_integrations_api.DataIntegrationsApi() <NEW_LINE> <DEDENT> def tearDown(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def test_get_entity_key_ids(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def test_integrate_entity_and_association_data(self): <NEW_LINE> <INDENT> pass
DataIntegrationsApi unit test stubs
62598fac10dbd63aa1c70b7a
class SearchFacultyBusiness(webapp2.RequestHandler): <NEW_LINE> <INDENT> def get(self): <NEW_LINE> <INDENT> user = users.get_current_user() <NEW_LINE> query = db.GqlQuery("SELECT * from CountReviews") <NEW_LINE> if user: <NEW_LINE> <INDENT> template_values = { 'user_mail': users.get_current_user().email(), 'logout': users.create_logout_url(self.request.host_url), 'query': query, } <NEW_LINE> template = jinja_environment.get_template('business.html') <NEW_LINE> self.response.out.write(template.render(template_values)) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.redirect(self.request.host_url)
Display search page
62598fac7d847024c075c38a
class Comment(models.Model): <NEW_LINE> <INDENT> author = models.CharField(max_length=80) <NEW_LINE> email = models.EmailField() <NEW_LINE> text = models.CharField(max_length=160) <NEW_LINE> commented = models.DateTimeField(default=timezone.now) <NEW_LINE> event = models.ForeignKey(Event, on_delete=models.CASCADE, related_name='comment_event') <NEW_LINE> """Retorna a partir do endereço de email, um avatar configurado no Gravatar""" <NEW_LINE> def avatar(self): <NEW_LINE> <INDENT> g = Gravatar(self.email) <NEW_LINE> return g.get_image(default='identicon') <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return "{} comentou em {:%c}".format(self.author, self.commented)
"Comentário efetuados em um determinado evento.
62598faca79ad1619776a02d
class Br(SelfClosingTag): <NEW_LINE> <INDENT> tag = "br"
Class that changes the closing tag to breaks
62598fac66673b3332c30393
class ModelFactory(object): <NEW_LINE> <INDENT> models = dict() <NEW_LINE> loaders = dict() <NEW_LINE> @staticmethod <NEW_LINE> def get_model(name, params): <NEW_LINE> <INDENT> return ModelFactory.models[name](params) <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def load_model(name, files, params): <NEW_LINE> <INDENT> return ModelFactory.loaders[name](files, params) <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def register_model(name,mod): <NEW_LINE> <INDENT> ModelFactory.models[name] = mod <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def register_loader(name, loader): <NEW_LINE> <INDENT> ModelFactory.loaders[name] = loader
A factory class for managing the model loaders and builder. Loaders and builders can be registered and then they can get used by the with_model_loader or with_model_builder decorators.
62598face1aae11d1e7ce807
class CompatLogCaptureFixture(LogCaptureFixture): <NEW_LINE> <INDENT> def _warn_compat(self, old, new): <NEW_LINE> <INDENT> self._item.warn(code='L1', message=("{0} is deprecated, use {1} instead" .format(old, new))) <NEW_LINE> <DEDENT> @CallableStr.compat_property <NEW_LINE> def text(self): <NEW_LINE> <INDENT> return super(CompatLogCaptureFixture, self).text <NEW_LINE> <DEDENT> @CallableList.compat_property <NEW_LINE> def records(self): <NEW_LINE> <INDENT> return super(CompatLogCaptureFixture, self).records <NEW_LINE> <DEDENT> @CallableList.compat_property <NEW_LINE> def record_tuples(self): <NEW_LINE> <INDENT> return super(CompatLogCaptureFixture, self).record_tuples <NEW_LINE> <DEDENT> def setLevel(self, level, logger=None): <NEW_LINE> <INDENT> self._warn_compat(old="'caplog.setLevel()'", new="'caplog.set_level()'") <NEW_LINE> return self.set_level(level, logger) <NEW_LINE> <DEDENT> def atLevel(self, level, logger=None): <NEW_LINE> <INDENT> self._warn_compat(old="'caplog.atLevel()'", new="'caplog.at_level()'") <NEW_LINE> return self.at_level(level, logger)
Backward compatibility with pytest-capturelog.
62598fac4a966d76dd5eeea8
class Catalog(Updateable, Pretty): <NEW_LINE> <INDENT> pretty_attrs = ['name', 'items'] <NEW_LINE> def __init__(self, name=None, description=None, items=None): <NEW_LINE> <INDENT> self.name = name <NEW_LINE> self.description = description <NEW_LINE> self.items = items <NEW_LINE> <DEDENT> def create(self): <NEW_LINE> <INDENT> sel.force_navigate('catalog_new') <NEW_LINE> sel.wait_for_element(form.name_text) <NEW_LINE> web_ui.fill(form, {'name_text': self.name, 'description_text': self.description, 'button_multiselect': self.items}, action=form.add_button) <NEW_LINE> flash_str = 'Catalog "{}" was saved' <NEW_LINE> flash.assert_success_message(flash_str.format(self.name)) <NEW_LINE> <DEDENT> def update(self, updates): <NEW_LINE> <INDENT> sel.force_navigate('catalog_edit', context={'catalog': self}) <NEW_LINE> web_ui.fill(form, {'name_text': updates.get('name', None), 'description_text': updates.get('description', None), 'button_multiselect': updates.get('items', None)}, action=form.save_button) <NEW_LINE> flash.assert_success_message('Catalog "{}" was saved'.format(self.name)) <NEW_LINE> <DEDENT> def delete(self): <NEW_LINE> <INDENT> sel.force_navigate('catalog', context={'catalog': self}) <NEW_LINE> cfg_btn("Remove Item from the VMDB", invokes_alert=True) <NEW_LINE> sel.handle_alert() <NEW_LINE> flash.assert_success_message( 'Catalog "{}": Delete successful'.format(self.description or self.name))
Represents a Catalog
62598fac3317a56b869be52e
class Resize(object): <NEW_LINE> <INDENT> def __init__(self, target_shape, correct_box = False): <NEW_LINE> <INDENT> self.h_target, self.w_target = target_shape <NEW_LINE> self.correct_box = correct_box <NEW_LINE> <DEDENT> def __call__(self, img, bboxes = None): <NEW_LINE> <INDENT> h_org, w_org, _= img.shape <NEW_LINE> img = cv2.cvtColor(img, cv2.COLOR_BGR2RGB).astype(np.float32) <NEW_LINE> resize_ratio = min(1.0 * self.w_target / w_org, 1.0 * self.h_target / h_org) <NEW_LINE> resize_w = max(int(resize_ratio * w_org), 1) <NEW_LINE> resize_h = max(int(resize_ratio * h_org), 1) <NEW_LINE> image_resized = cv2.resize(img, (resize_w, resize_h)) <NEW_LINE> image_paded = np.full((self.h_target, self.w_target, 3), 128.0) <NEW_LINE> dw = int((self.w_target - resize_w) / 2) <NEW_LINE> dh = int((self.h_target - resize_h) / 2) <NEW_LINE> image_paded[dh:resize_h + dh, dw:resize_w + dw, :] = image_resized <NEW_LINE> image = image_paded / 255.0 <NEW_LINE> if self.correct_box: <NEW_LINE> <INDENT> bboxes[:, [0, 2]] = bboxes[:, [0, 2]] * resize_ratio + dw <NEW_LINE> bboxes[:, [1, 3]] = bboxes[:, [1, 3]] * resize_ratio + dh <NEW_LINE> return image, bboxes <NEW_LINE> <DEDENT> return image
调整图片大小 __init__ args: target_shape: (h_target, w_target),调整后的图片大小 correct_box: bool = False,对框也进行对应调整 __call__ args: img: 待调整的图片 bboxes: default = None,待调整的框,实际值 returns: image: 调整后的图片 bboxes: 调整后的框(如果correct_box == True),实际值 notes: 将图片转为目标大小,BGR转换为RGB,归一化到[0, 1]上 bboxes依然是以图片大小为参考的实际值
62598fac8e7ae83300ee906a
class IPListMixin(object): <NEW_LINE> <INDENT> def __iter__(self): <NEW_LINE> <INDENT> start_ip = IPAddress(self.first, self.version) <NEW_LINE> end_ip = IPAddress(self.last, self.version) <NEW_LINE> return iter_iprange(start_ip, end_ip) <NEW_LINE> <DEDENT> @property <NEW_LINE> def size(self): <NEW_LINE> <INDENT> return int(self.last - self.first + 1) <NEW_LINE> <DEDENT> def __len__(self): <NEW_LINE> <INDENT> size = self.size <NEW_LINE> if size > _sys_maxint: <NEW_LINE> <INDENT> raise IndexError(("range contains more than %d (index size max) " "IP addresses! Use the .size property instead." % _sys_maxint)) <NEW_LINE> <DEDENT> return size <NEW_LINE> <DEDENT> def __getitem__(self, index): <NEW_LINE> <INDENT> item = None <NEW_LINE> if hasattr(index, 'indices'): <NEW_LINE> <INDENT> if self._module.version == 6: <NEW_LINE> <INDENT> raise TypeError('IPv6 slices are not supported!') <NEW_LINE> <DEDENT> (start, stop, step) = index.indices(self.size) <NEW_LINE> if (start + step < 0) or (step > stop): <NEW_LINE> <INDENT> item = iter([IPAddress(self.first, self.version)]) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> start_ip = IPAddress(self.first + start, self.version) <NEW_LINE> end_ip = IPAddress(self.first + stop - step, self.version) <NEW_LINE> item = iter_iprange(start_ip, end_ip, step) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> index = int(index) <NEW_LINE> if (- self.size) <= index < 0: <NEW_LINE> <INDENT> item = IPAddress(self.last + index + 1, self.version) <NEW_LINE> <DEDENT> elif 0 <= index <= (self.size - 1): <NEW_LINE> <INDENT> item = IPAddress(self.first + index, self.version) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise IndexError('index out range for address range size!') <NEW_LINE> <DEDENT> <DEDENT> except ValueError: <NEW_LINE> <INDENT> raise TypeError('unsupported index type %r!' % index) <NEW_LINE> <DEDENT> <DEDENT> return item <NEW_LINE> <DEDENT> def __contains__(self, other): <NEW_LINE> <INDENT> if self.version != other.version: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> if hasattr(other, '_value') and not hasattr(other, '_prefixlen'): <NEW_LINE> <INDENT> return other._value >= self.first and other._value <= self.last <NEW_LINE> <DEDENT> return other.first >= self.first and other.last <= self.last <NEW_LINE> <DEDENT> def __nonzero__(self): <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> __bool__ = __nonzero__
A mixin class providing shared list-like functionality to classes representing groups of IP addresses.
62598fac091ae35668704be6
class DataSelectionPreferencesManager(object): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def serialize(self, filename): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def deserialize(self, filename): <NEW_LINE> <INDENT> pass
Stores GUI preference info, e.g. the most-recently uses directory for browsing data files.
62598facac7a0e7691f724d1
class Option: <NEW_LINE> <INDENT> option = None <NEW_LINE> is_Flag = False <NEW_LINE> requires = [] <NEW_LINE> excludes = [] <NEW_LINE> after = [] <NEW_LINE> before = [] <NEW_LINE> @classmethod <NEW_LINE> def default(cls): <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def preprocess(cls, option): <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def postprocess(cls, options): <NEW_LINE> <INDENT> pass
Base class for all kinds of options.
62598facf7d966606f747fad
class BidResponse(Object): <NEW_LINE> <INDENT> id = Field(String, required=True) <NEW_LINE> seatbid = Field(Array(SeatBid), required=True) <NEW_LINE> bidid = Field(String) <NEW_LINE> cur = Field(String) <NEW_LINE> customdata = Field(String) <NEW_LINE> nbr = Field(constants.NoBidReason) <NEW_LINE> ext = Field(Object) <NEW_LINE> @classmethod <NEW_LINE> def minimal(cls, id, bid_id, bid_impid, bid_price): <NEW_LINE> <INDENT> return cls(id=id, seatbid=[ SeatBid(bid=[ Bid(id=bid_id, impid=bid_impid, price=bid_price) ]) ]) <NEW_LINE> <DEDENT> def first_bid(self): <NEW_LINE> <INDENT> return self.seatbid[0].bid[0] <NEW_LINE> <DEDENT> def get_bid_id(self): <NEW_LINE> <INDENT> return self.first_bid().id <NEW_LINE> <DEDENT> def get_imp_id(self): <NEW_LINE> <INDENT> return self.first_bid().impid <NEW_LINE> <DEDENT> def get_ad_id(self): <NEW_LINE> <INDENT> return self.first_bid().adid <NEW_LINE> <DEDENT> def get_first_price(self): <NEW_LINE> <INDENT> return self.first_bid().price
The top-level bid response object. The “id” attribute is a reflection of the bid request ID for logging purposes. Similarly, “bidid” is an optional response tracking ID for bidders. If specified, it can be included in the subsequent win notice call if the bidder wins. At least one “seatbid” object is required, which contains a bid on at least one impression. Other attributes are optional since an exchange may establish default values.
62598fac9c8ee82313040155
@fixed_state <NEW_LINE> class Protein(IonComplex): <NEW_LINE> <INDENT> _state = {'name': 'Protein name.', 'members': 'Name of the peptide members.' } <NEW_LINE> sequences = tuple() <NEW_LINE> def __init__(self, name=None, ids=None, sequences=None, members=None): <NEW_LINE> <INDENT> self._name = name <NEW_LINE> if members is not None: <NEW_LINE> <INDENT> self._members = tuple(members) <NEW_LINE> return <NEW_LINE> <DEDENT> if sequences is None: <NEW_LINE> <INDENT> ids, sequences = self._from_pdb() <NEW_LINE> <DEDENT> elif ids is None: <NEW_LINE> <INDENT> ids = tuple(['{}:{}'.format(self.name, ascii_uppercase[idx]) for idx in range(len(sequences))]) <NEW_LINE> <DEDENT> self._members = tuple([Peptide(name=id, sequence=sequence) for id, sequence in zip(ids, sequences)]) <NEW_LINE> <DEDENT> def _from_pdb(self): <NEW_LINE> <INDENT> temploc = tempfile.mkdtemp() <NEW_LINE> try: <NEW_LINE> <INDENT> file_ = lister.retrieve_pdb_file(self.name, pdir=temploc) <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> raise RuntimeError( 'Could not download {} from the PDB.'.format(self.name)) <NEW_LINE> <DEDENT> structure = parser.get_structure(self.name, file_) <NEW_LINE> ids = [] <NEW_LINE> sequences = [] <NEW_LINE> for chain in structure.get_chains(): <NEW_LINE> <INDENT> ids.append('{}:{}'.format(self.name, chain.id)) <NEW_LINE> sequences.append( str(builder.build_peptides(chain)[0].get_sequence())) <NEW_LINE> <DEDENT> return tuple(ids), tuple(sequences)
Protein represents an ion composed of a complex of peptides. :param name: Name of the protein. :param ids: Names of the peptide members. :param sequences: Sequences of the peptide members. :param members: An iterable of the peptide members. If members and sequences are not provided, the name will be searched in the Protein DataBase (PDB). If a protein of the same name is available, the sequences of the peptides will be gathered from the PDB.
62598fac435de62698e9bdbc
class GridFS(object): <NEW_LINE> <INDENT> def __init__(self, database, collection="fs"): <NEW_LINE> <INDENT> if not isinstance(database, Database): <NEW_LINE> <INDENT> raise TypeError("database must be an instance of Database") <NEW_LINE> <DEDENT> self.__database = database <NEW_LINE> self.__collection = database[collection] <NEW_LINE> self.__files = self.__collection.files <NEW_LINE> self.__chunks = self.__collection.chunks <NEW_LINE> if not database.slave_okay and not database.read_preference: <NEW_LINE> <INDENT> self.__chunks.ensure_index([("files_id", ASCENDING), ("n", ASCENDING)], unique=True) <NEW_LINE> <DEDENT> <DEDENT> def new_file(self, **kwargs): <NEW_LINE> <INDENT> return GridIn(self.__collection, **kwargs) <NEW_LINE> <DEDENT> def put(self, data, **kwargs): <NEW_LINE> <INDENT> grid_file = GridIn(self.__collection, **kwargs) <NEW_LINE> try: <NEW_LINE> <INDENT> grid_file.write(data) <NEW_LINE> <DEDENT> finally: <NEW_LINE> <INDENT> grid_file.close() <NEW_LINE> <DEDENT> return grid_file._id <NEW_LINE> <DEDENT> def get(self, file_id): <NEW_LINE> <INDENT> return GridOut(self.__collection, file_id) <NEW_LINE> <DEDENT> def get_version(self, filename=None, version=-1, **kwargs): <NEW_LINE> <INDENT> database = self.__database <NEW_LINE> if not database.slave_okay and not database.read_preference: <NEW_LINE> <INDENT> self.__files.ensure_index([("filename", ASCENDING), ("uploadDate", DESCENDING)]) <NEW_LINE> <DEDENT> query = kwargs <NEW_LINE> if filename is not None: <NEW_LINE> <INDENT> query["filename"] = filename <NEW_LINE> <DEDENT> cursor = self.__files.find(query) <NEW_LINE> if version < 0: <NEW_LINE> <INDENT> skip = abs(version) - 1 <NEW_LINE> cursor.limit(-1).skip(skip).sort("uploadDate", DESCENDING) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> cursor.limit(-1).skip(version).sort("uploadDate", ASCENDING) <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> grid_file = cursor.next() <NEW_LINE> return GridOut(self.__collection, file_document=grid_file) <NEW_LINE> <DEDENT> except StopIteration: <NEW_LINE> <INDENT> raise NoFile("no version %d for filename %r" % (version, filename)) <NEW_LINE> <DEDENT> <DEDENT> def get_last_version(self, filename=None, **kwargs): <NEW_LINE> <INDENT> return self.get_version(filename=filename, **kwargs) <NEW_LINE> <DEDENT> def delete(self, file_id): <NEW_LINE> <INDENT> self.__files.remove({"_id": file_id}, safe=True) <NEW_LINE> self.__chunks.remove({"files_id": file_id}) <NEW_LINE> <DEDENT> def list(self): <NEW_LINE> <INDENT> return self.__files.distinct("filename") <NEW_LINE> <DEDENT> def exists(self, document_or_id=None, **kwargs): <NEW_LINE> <INDENT> if kwargs: <NEW_LINE> <INDENT> return self.__files.find_one(kwargs, ["_id"]) is not None <NEW_LINE> <DEDENT> return self.__files.find_one(document_or_id, ["_id"]) is not None <NEW_LINE> <DEDENT> def open(self, *args, **kwargs): <NEW_LINE> <INDENT> raise UnsupportedAPI("The open method is no longer supported.") <NEW_LINE> <DEDENT> def remove(self, *args, **kwargs): <NEW_LINE> <INDENT> raise UnsupportedAPI("The remove method is no longer supported. " "Please use the delete method instead.")
An instance of GridFS on top of a single Database.
62598fac7d43ff24874273e6
class hr_recruitment_stage(osv.osv): <NEW_LINE> <INDENT> _name = "hr.recruitment.stage" <NEW_LINE> _description = "Stage of Recruitment" <NEW_LINE> _order = 'sequence' <NEW_LINE> _columns = { 'name': fields.char('Name', required=True, translate=True), 'sequence': fields.integer('Sequence', help="Gives the sequence order when displaying a list of stages."), 'department_id':fields.many2one('hr.department', 'Specific to a Department', help="Stages of the recruitment process may be different per department. If this stage is common to all departments, keep this field empty."), 'requirements': fields.text('Requirements'), 'template_id': fields.many2one('email.template', 'Use template', help="If set, a message is posted on the applicant using the template when the applicant is set to the stage."), 'fold': fields.boolean('Folded in Kanban View', help='This stage is folded in the kanban view when' 'there are no records in that stage to display.'), } <NEW_LINE> _defaults = { 'sequence': 1, }
Stage of HR Recruitment
62598fac7b25080760ed7477
@override_settings( DOVECOT_LOOKUP_PATH=["{}/dovecot".format(os.path.dirname(__file__))]) <NEW_LINE> class MailboxOperationTestCase(ModoTestCase): <NEW_LINE> <INDENT> @classmethod <NEW_LINE> def setUpTestData(cls): <NEW_LINE> <INDENT> super(MailboxOperationTestCase, cls).setUpTestData() <NEW_LINE> factories.populate_database() <NEW_LINE> <DEDENT> def setUp(self): <NEW_LINE> <INDENT> super(MailboxOperationTestCase, self).setUp() <NEW_LINE> self.workdir = tempfile.mkdtemp() <NEW_LINE> path = "{}/test.com/admin".format(self.workdir) <NEW_LINE> os.makedirs(path) <NEW_LINE> self.set_global_parameter("handle_mailboxes", True) <NEW_LINE> self.set_global_parameter("enable_admin_limits", False, app="limits") <NEW_LINE> <DEDENT> def tearDown(self): <NEW_LINE> <INDENT> shutil.rmtree(self.workdir) <NEW_LINE> <DEDENT> @mock.patch("modoboa.admin.models.Mailbox.mail_home") <NEW_LINE> def test_delete_account(self, mail_home_mock): <NEW_LINE> <INDENT> path = "{}/test.com/admin".format(self.workdir) <NEW_LINE> mail_home_mock.__get__ = mock.Mock(return_value=path) <NEW_LINE> mb = models.Mailbox.objects.select_related("user").get( address="admin", domain__name="test.com") <NEW_LINE> self.ajax_post( reverse("admin:account_delete", args=[mb.user.pk]), {} ) <NEW_LINE> call_command("handle_mailbox_operations") <NEW_LINE> self.assertFalse(models.MailboxOperation.objects.exists()) <NEW_LINE> self.assertFalse(os.path.exists(mb.mail_home)) <NEW_LINE> <DEDENT> @mock.patch("modoboa.admin.models.Mailbox.mail_home") <NEW_LINE> def test_rename_account(self, mail_home_mock): <NEW_LINE> <INDENT> path = "{}/test.com/admin".format(self.workdir) <NEW_LINE> mail_home_mock.__get__ = mock.Mock(return_value=path) <NEW_LINE> mb = models.Mailbox.objects.select_related("user").get( address="admin", domain__name="test.com") <NEW_LINE> values = { "username": "admin2@test.com", "role": "DomainAdmins", "is_active": True, "email": "admin2@test.com" } <NEW_LINE> self.ajax_post( reverse("admin:account_change", args=[mb.user.pk]), values ) <NEW_LINE> path = "{}/test.com/admin2".format(self.workdir) <NEW_LINE> mail_home_mock.__get__ = mock.Mock(return_value=path) <NEW_LINE> call_command("handle_mailbox_operations") <NEW_LINE> self.assertFalse(models.MailboxOperation.objects.exists()) <NEW_LINE> self.assertTrue(os.path.exists(mb.mail_home)) <NEW_LINE> <DEDENT> @mock.patch("modoboa.admin.models.Mailbox.mail_home") <NEW_LINE> def test_delete_domain(self, mail_home_mock): <NEW_LINE> <INDENT> path = "{}/test.com/admin".format(self.workdir) <NEW_LINE> mail_home_mock.__get__ = mock.Mock(return_value=path) <NEW_LINE> domain = models.Domain.objects.get(name="test.com") <NEW_LINE> self.ajax_post(reverse("admin:domain_delete", args=[domain.pk])) <NEW_LINE> call_command("handle_mailbox_operations") <NEW_LINE> self.assertFalse(models.MailboxOperation.objects.exists()) <NEW_LINE> self.assertFalse(os.path.exists(path))
Test management command.
62598fac3539df3088ecc27a
class BugNotFound(BugWatchUpdateWarning): <NEW_LINE> <INDENT> pass
The bug was not found in the external bug tracker.
62598fac4e4d5625663723ee
class Commune: <NEW_LINE> <INDENT> def __init__(self, code_commune, nom_commune, pop_totale, code_dept): <NEW_LINE> <INDENT> self._code_commune = code_commune <NEW_LINE> self._nom_commune = nom_commune <NEW_LINE> self._pop_totale = pop_totale <NEW_LINE> self._code_dept = code_dept <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return "#" + self._code_commune + " " + self._nom_commune + " : " + self._pop_totale
Cette classe représente une commune Attr: _code_commune (int): l'identifiant commune (PK) _nom_commune (str): le nom de la commune _pop_totale (int): le nombre d'habitants _code_dept (int): pseudo clé étrangère sur les département
62598fac498bea3a75a57ae6
class IMeasureGroup(form.Schema, IAttributeUUID, IMeasureFormDefinition, IMeasureSourceType, ): <NEW_LINE> <INDENT> pass
Measure group (folderish) content interface. Measure groups contain both measure and common topic/collection/dataset items used by all measures contained within.
62598fac2c8b7c6e89bd378e
class EventHandler(pyinotify.ProcessEvent): <NEW_LINE> <INDENT> def _get_profile_ids(self, event): <NEW_LINE> <INDENT> path = os.path.basename(event.pathname) <NEW_LINE> device_uid = os.path.basename(os.path.dirname(event.pathname)) <NEW_LINE> if path.endswith(".macros") and not path.startswith("."): <NEW_LINE> <INDENT> id_no = path.split(".")[0] <NEW_LINE> return ( id_no, device_uid ) <NEW_LINE> <DEDENT> <DEDENT> def _notify(self, event): <NEW_LINE> <INDENT> ids = self._get_profile_ids(event) <NEW_LINE> if ids: <NEW_LINE> <INDENT> for profile_listener in profile_listeners: <NEW_LINE> <INDENT> profile_listener(ids[0], ids[1]) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def process_IN_MODIFY(self, event): <NEW_LINE> <INDENT> self._notify(event) <NEW_LINE> <DEDENT> def process_IN_CREATE(self, event): <NEW_LINE> <INDENT> self._notify(event) <NEW_LINE> <DEDENT> def process_IN_ATTRIB(self, event): <NEW_LINE> <INDENT> self._notify(event) <NEW_LINE> <DEDENT> def process_IN_DELETE(self, event): <NEW_LINE> <INDENT> self._notify(event)
Event handle the listens for the inotify events and informs all callbacks that are registered in the profile_listeners variable
62598fac3d592f4c4edbae94
class LaunchFlows(renderers.AngularDirectiveRenderer): <NEW_LINE> <INDENT> description = "Start new flows" <NEW_LINE> behaviours = frozenset(["Host"]) <NEW_LINE> order = 10 <NEW_LINE> directive = "grr-start-flow-view" <NEW_LINE> def Layout(self, request, response): <NEW_LINE> <INDENT> self.directive_args = {} <NEW_LINE> self.directive_args["client-id"] = request.REQ.get("client_id") <NEW_LINE> return super(LaunchFlows, self).Layout(request, response)
Launches a new flow.
62598fac6aa9bd52df0d4e90
class Fellow(Person): <NEW_LINE> <INDENT> designation = 'FELLOW' <NEW_LINE> office = None <NEW_LINE> living_space = None <NEW_LINE> def __init__(self, name, wants_accommodation): <NEW_LINE> <INDENT> super(Person, self).__init__() <NEW_LINE> self.name = name <NEW_LINE> self.wants_accommodation = wants_accommodation
docstring for Fellow
62598fac91f36d47f2230e8a
class PageCleaner(CleanupModelLearner): <NEW_LINE> <INDENT> def __init__(self, cleanup_model=None, cleanup_threshold=0.1, **kwargs): <NEW_LINE> <INDENT> Extractor.__init__(self, **kwargs) <NEW_LINE> self.cleanup_model = cleanup_model <NEW_LINE> self.cleanup_threshold = cleanup_threshold <NEW_LINE> assert self.cleanup_model, "PageCleaner extractor requires a cleanup model" <NEW_LINE> self.load_model(self.cleanup_model) <NEW_LINE> <DEDENT> def remove_empty_elements(self, root): <NEW_LINE> <INDENT> nodes = copy.copy(root.contents) <NEW_LINE> for node in nodes: <NEW_LINE> <INDENT> if isinstance(node, NavigableString): <NEW_LINE> <INDENT> if not self.clean_string(node.string): <NEW_LINE> <INDENT> node.extract() <NEW_LINE> <DEDENT> <DEDENT> elif isinstance(node, Tag): <NEW_LINE> <INDENT> self.remove_empty_elements(node) <NEW_LINE> if node.name not in ('br', 'hr'): <NEW_LINE> <INDENT> has_child_with_content = False <NEW_LINE> for child in node.contents: <NEW_LINE> <INDENT> if isinstance(child, NavigableString) or (isinstance(child, Tag) and child.name not in ('br', 'hr')): <NEW_LINE> <INDENT> has_child_with_content = True <NEW_LINE> break <NEW_LINE> <DEDENT> <DEDENT> if not has_child_with_content: <NEW_LINE> <INDENT> node.extract() <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> <DEDENT> <DEDENT> def extract_from_soup(self, soup, filename, relative_filename): <NEW_LINE> <INDENT> self.nodes_to_remove = [] <NEW_LINE> self.walk_elements(soup, '/') <NEW_LINE> for node in self.nodes_to_remove: <NEW_LINE> <INDENT> node.extract() <NEW_LINE> <DEDENT> self.remove_empty_elements(soup) <NEW_LINE> return soup <NEW_LINE> <DEDENT> def visit_leaf_element(self, node, path): <NEW_LINE> <INDENT> node_string = self.clean_string(node.string) <NEW_LINE> if node_string: <NEW_LINE> <INDENT> signature = self.signature(path) <NEW_LINE> if signature in self.skip_paths and self.skip_paths[signature] > self.cleanup_threshold: <NEW_LINE> <INDENT> self.nodes_to_remove.append(node)
Clean web pages based on a previously learned model
62598facaad79263cf42e79c
class InstanceViewStatus(msrest.serialization.Model): <NEW_LINE> <INDENT> _attribute_map = { 'code': {'key': 'code', 'type': 'str'}, 'level': {'key': 'level', 'type': 'str'}, 'display_status': {'key': 'displayStatus', 'type': 'str'}, 'message': {'key': 'message', 'type': 'str'}, 'time': {'key': 'time', 'type': 'iso-8601'}, } <NEW_LINE> def __init__( self, *, code: Optional[str] = None, level: Optional[Union[str, "StatusLevelTypes"]] = None, display_status: Optional[str] = None, message: Optional[str] = None, time: Optional[datetime.datetime] = None, **kwargs ): <NEW_LINE> <INDENT> super(InstanceViewStatus, self).__init__(**kwargs) <NEW_LINE> self.code = code <NEW_LINE> self.level = level <NEW_LINE> self.display_status = display_status <NEW_LINE> self.message = message <NEW_LINE> self.time = time
Instance view status. :ivar code: The status code. :vartype code: str :ivar level: The level code. Possible values include: "Info", "Warning", "Error". :vartype level: str or ~azure.mgmt.compute.v2017_03_30.models.StatusLevelTypes :ivar display_status: The short localizable label for the status. :vartype display_status: str :ivar message: The detailed status message, including for alerts and error messages. :vartype message: str :ivar time: The time of the status. :vartype time: ~datetime.datetime
62598facfff4ab517ebcd7ae
class FileReader: <NEW_LINE> <INDENT> def __init__(self, fname): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> with open(fname, 'r') as f: <NEW_LINE> <INDENT> self.file_lines_list = f.readlines() <NEW_LINE> <DEDENT> <DEDENT> except IOError: <NEW_LINE> <INDENT> error("Error opening file " + fname) <NEW_LINE> self.file_lines_list = None <NEW_LINE> self.success = False <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.file_pointer = 0 <NEW_LINE> self.success = True <NEW_LINE> <DEDENT> <DEDENT> def readline(self): <NEW_LINE> <INDENT> if self.file_pointer < len(self.file_lines_list): <NEW_LINE> <INDENT> self.file_pointer += 1 <NEW_LINE> return self.file_lines_list[self.file_pointer-1] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return '' <NEW_LINE> <DEDENT> <DEDENT> def join_lines(self, line): <NEW_LINE> <INDENT> while '&' in line.split('!')[0]: <NEW_LINE> <INDENT> line1 = line.split('&')[0] <NEW_LINE> line2 = self.readline() <NEW_LINE> line = line1 + line2.strip() <NEW_LINE> <DEDENT> return line <NEW_LINE> <DEDENT> def parse_comments(self,line): <NEW_LINE> <INDENT> com = [] <NEW_LINE> read_count = 0 <NEW_LINE> if fort_dox_comments.match(line): <NEW_LINE> <INDENT> com.append(line.split('!>')[1].strip()) <NEW_LINE> <DEDENT> elif fort_dox_inline.match(line): <NEW_LINE> <INDENT> com.append(line.split('!<')[1].strip()) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> warning("Bad line in parse_comments: " + line) <NEW_LINE> <DEDENT> while True: <NEW_LINE> <INDENT> line = self.readline().strip() <NEW_LINE> if line.startswith('!!'): <NEW_LINE> <INDENT> text = line.split('!!')[1] <NEW_LINE> if text.startswith(' '): <NEW_LINE> <INDENT> text = text[1:] <NEW_LINE> <DEDENT> com.append(text) <NEW_LINE> read_count+=1 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> if fort_dox_inline.match(line): <NEW_LINE> <INDENT> for i in range(read_count): <NEW_LINE> <INDENT> self.file_pointer -= 1 <NEW_LINE> <DEDENT> <DEDENT> self.file_pointer -= 1 <NEW_LINE> break <NEW_LINE> <DEDENT> <DEDENT> return com
Wrapper around file that provides facilities for backing up
62598fac21bff66bcd722c2f
class TestPostureManagementV1(): <NEW_LINE> <INDENT> @classmethod <NEW_LINE> def setup_class(cls): <NEW_LINE> <INDENT> if os.path.exists(config_file): <NEW_LINE> <INDENT> os.environ['IBM_CREDENTIALS_FILE'] = config_file <NEW_LINE> cls.posture_management_service = PostureManagementV1.new_instance( ) <NEW_LINE> assert cls.posture_management_service is not None <NEW_LINE> cls.config = read_external_sources( PostureManagementV1.DEFAULT_SERVICE_NAME) <NEW_LINE> assert cls.config is not None <NEW_LINE> cls.account_id = cls.config['ACCOUNT_ID'] <NEW_LINE> cls.profile_name = cls.config['PROFILE_NAME'] <NEW_LINE> cls.scopes_name = cls.config['SCOPES_NAME'] <NEW_LINE> assert cls.account_id is not None <NEW_LINE> assert cls.profile_name is not None <NEW_LINE> assert cls.scopes_name is not None <NEW_LINE> <DEDENT> print('Setup complete.') <NEW_LINE> <DEDENT> needscredentials = pytest.mark.skipif( not os.path.exists(config_file), reason="External configuration not available, skipping..." ) <NEW_LINE> @needscredentials <NEW_LINE> def test_list_profiles(self): <NEW_LINE> <INDENT> list_profiles_response = self.posture_management_service.list_profiles( account_id=self.account_id, name=self.profile_name, ) <NEW_LINE> assert list_profiles_response.get_status_code() == 200 <NEW_LINE> profiles_list = list_profiles_response.get_result() <NEW_LINE> assert profiles_list is not None <NEW_LINE> global profile_id <NEW_LINE> profile_id = profiles_list['profiles'][0]['profile_id'] <NEW_LINE> <DEDENT> @needscredentials <NEW_LINE> def test_list_scopes(self): <NEW_LINE> <INDENT> list_scopes_response = self.posture_management_service.list_scopes( account_id=self.account_id, name=self.scopes_name, ) <NEW_LINE> assert list_scopes_response.get_status_code() == 200 <NEW_LINE> scopes_list = list_scopes_response.get_result() <NEW_LINE> assert scopes_list is not None <NEW_LINE> global scope_id <NEW_LINE> scope_id = scopes_list['scopes'][0]['scope_id'] <NEW_LINE> <DEDENT> @needscredentials <NEW_LINE> def test_create_validation(self): <NEW_LINE> <INDENT> assert profile_id is not None <NEW_LINE> assert scope_id is not None <NEW_LINE> create_validation_response = self.posture_management_service.create_validation( account_id=self.account_id, scope_id=scope_id, profile_id=profile_id, group_profile_id=group_profile_id, ) <NEW_LINE> assert create_validation_response.get_status_code() == 202 <NEW_LINE> result = create_validation_response.get_result() <NEW_LINE> assert result is not None
Integration Test Class for PostureManagementV1
62598facf9cc0f698b1c52ad
class NetworkInterfaceLoadBalancersOperations(object): <NEW_LINE> <INDENT> models = models <NEW_LINE> def __init__(self, client, config, serializer, deserializer): <NEW_LINE> <INDENT> self._client = client <NEW_LINE> self._serialize = serializer <NEW_LINE> self._deserialize = deserializer <NEW_LINE> self.api_version = "2017-06-01" <NEW_LINE> self.config = config <NEW_LINE> <DEDENT> def list( self, resource_group_name, network_interface_name, custom_headers=None, raw=False, **operation_config): <NEW_LINE> <INDENT> def internal_paging(next_link=None, raw=False): <NEW_LINE> <INDENT> if not next_link: <NEW_LINE> <INDENT> url = '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkInterfaces/{networkInterfaceName}/loadBalancers' <NEW_LINE> path_format_arguments = { 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), 'networkInterfaceName': self._serialize.url("network_interface_name", network_interface_name, 'str'), 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str') } <NEW_LINE> url = self._client.format_url(url, **path_format_arguments) <NEW_LINE> query_parameters = {} <NEW_LINE> query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> url = next_link <NEW_LINE> query_parameters = {} <NEW_LINE> <DEDENT> header_parameters = {} <NEW_LINE> header_parameters['Content-Type'] = 'application/json; charset=utf-8' <NEW_LINE> if self.config.generate_client_request_id: <NEW_LINE> <INDENT> header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) <NEW_LINE> <DEDENT> if custom_headers: <NEW_LINE> <INDENT> header_parameters.update(custom_headers) <NEW_LINE> <DEDENT> if self.config.accept_language is not None: <NEW_LINE> <INDENT> header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') <NEW_LINE> <DEDENT> request = self._client.get(url, query_parameters) <NEW_LINE> response = self._client.send( request, header_parameters, stream=False, **operation_config) <NEW_LINE> if response.status_code not in [200]: <NEW_LINE> <INDENT> exp = CloudError(response) <NEW_LINE> exp.request_id = response.headers.get('x-ms-request-id') <NEW_LINE> raise exp <NEW_LINE> <DEDENT> return response <NEW_LINE> <DEDENT> deserialized = models.LoadBalancerPaged(internal_paging, self._deserialize.dependencies) <NEW_LINE> if raw: <NEW_LINE> <INDENT> header_dict = {} <NEW_LINE> client_raw_response = models.LoadBalancerPaged(internal_paging, self._deserialize.dependencies, header_dict) <NEW_LINE> return client_raw_response <NEW_LINE> <DEDENT> return deserialized
NetworkInterfaceLoadBalancersOperations operations. :param client: Client for service requests. :param config: Configuration of service client. :param serializer: An object model serializer. :param deserializer: An objec model deserializer. :ivar api_version: Client API version. Constant value: "2017-06-01".
62598fac1f037a2d8b9e40b7
class TLSSNI01Test(unittest.TestCase): <NEW_LINE> <INDENT> auth_key = jose.JWKRSA.load(test_util.load_vector("rsa512_key.pem")) <NEW_LINE> achalls = [ achallenges.KeyAuthorizationAnnotatedChallenge( challb=acme_util.chall_to_challb( challenges.TLSSNI01(token=b'token1'), "pending"), domain="encryption-example.demo", account_key=auth_key), achallenges.KeyAuthorizationAnnotatedChallenge( challb=acme_util.chall_to_challb( challenges.TLSSNI01(token=b'token2'), "pending"), domain="certbot.demo", account_key=auth_key), ] <NEW_LINE> def setUp(self): <NEW_LINE> <INDENT> self.tempdir = tempfile.mkdtemp() <NEW_LINE> configurator = mock.MagicMock() <NEW_LINE> configurator.config.config_dir = os.path.join(self.tempdir, "config") <NEW_LINE> configurator.config.work_dir = os.path.join(self.tempdir, "work") <NEW_LINE> from certbot.plugins.common import TLSSNI01 <NEW_LINE> self.sni = TLSSNI01(configurator=configurator) <NEW_LINE> <DEDENT> def tearDown(self): <NEW_LINE> <INDENT> shutil.rmtree(self.tempdir) <NEW_LINE> <DEDENT> def test_add_chall(self): <NEW_LINE> <INDENT> self.sni.add_chall(self.achalls[0], 0) <NEW_LINE> self.assertEqual(1, len(self.sni.achalls)) <NEW_LINE> self.assertEqual([0], self.sni.indices) <NEW_LINE> <DEDENT> def test_setup_challenge_cert(self): <NEW_LINE> <INDENT> mock_open, mock_safe_open = mock.mock_open(), mock.mock_open() <NEW_LINE> response = challenges.TLSSNI01Response() <NEW_LINE> achall = mock.MagicMock() <NEW_LINE> achall.chall.encode.return_value = "token" <NEW_LINE> key = test_util.load_pyopenssl_private_key("rsa512_key.pem") <NEW_LINE> achall.response_and_validation.return_value = ( response, (test_util.load_cert("cert_512.pem"), key)) <NEW_LINE> with mock.patch("certbot.plugins.common.open", mock_open, create=True): <NEW_LINE> <INDENT> with mock.patch("certbot.plugins.common.util.safe_open", mock_safe_open): <NEW_LINE> <INDENT> self.assertEqual(response, self.sni._setup_challenge_cert( achall, "randomS1")) <NEW_LINE> <DEDENT> <DEDENT> mock_open.assert_called_once_with(self.sni.get_cert_path(achall), "wb") <NEW_LINE> mock_open.return_value.write.assert_called_once_with( test_util.load_vector("cert_512.pem")) <NEW_LINE> mock_safe_open.assert_called_once_with( self.sni.get_key_path(achall), "wb", chmod=0o400) <NEW_LINE> mock_safe_open.return_value.write.assert_called_once_with( OpenSSL.crypto.dump_privatekey(OpenSSL.crypto.FILETYPE_PEM, key)) <NEW_LINE> <DEDENT> def test_get_z_domain(self): <NEW_LINE> <INDENT> achall = self.achalls[0] <NEW_LINE> self.assertEqual(self.sni.get_z_domain(achall), achall.response(achall.account_key).z_domain.decode("utf-8"))
Tests for certbot.plugins.common.TLSSNI01.
62598fac379a373c97d98fdc
class stadium(): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> print("cheese")
This is the class that stores stadium information
62598fadd58c6744b42dc2bb
@PatternPlayer.subcommand("text") <NEW_LINE> class TextCLI(cli.Application, PatternPlayerMixin): <NEW_LINE> <INDENT> def main(self): <NEW_LINE> <INDENT> self.main_from_renderer(text)
An experiment with drawing text to the SkyScreen.
62598fad56ac1b37e63021b4
class Echo(protocol.Protocol): <NEW_LINE> <INDENT> def writeToTransport(self, response): <NEW_LINE> <INDENT> self.transport.write(response.encode("ascii")) <NEW_LINE> <DEDENT> def dataReceived(self, data): <NEW_LINE> <INDENT> message = data.decode("ascii") <NEW_LINE> response = 'OK ... ' + message <NEW_LINE> self.writeToTransport(response) <NEW_LINE> self.factory.app.updateFields(message,response) <NEW_LINE> <DEDENT> def connectionMade(self): <NEW_LINE> <INDENT> response = "OK ... Hello there ...\n" <NEW_LINE> data = "Connection from " + str(self.transport.getPeer()) <NEW_LINE> self.writeToTransport(response) <NEW_LINE> self.factory.app.updateFields(data,response)
This is just about the simplest possible protocol
62598fadf548e778e596b56d
class RegisterForm(forms.Form): <NEW_LINE> <INDENT> email = forms.EmailField(required=True) <NEW_LINE> password = forms.CharField(required=True, min_length=5) <NEW_LINE> captcha = CaptchaField(error_messages={'invalid': '验证码错误!'})
注册信息验证
62598fad97e22403b383aed6
class JsonResource(HttpServer, resource.Resource): <NEW_LINE> <INDENT> isLeaf = True <NEW_LINE> _PathEntry = collections.namedtuple("_PathEntry", ["pattern", "callback"]) <NEW_LINE> def __init__(self): <NEW_LINE> <INDENT> resource.Resource.__init__(self) <NEW_LINE> self.path_regexs = {} <NEW_LINE> <DEDENT> def register_path(self, method, path_pattern, callback): <NEW_LINE> <INDENT> self.path_regexs.setdefault(method, []).append( self._PathEntry(path_pattern, callback) ) <NEW_LINE> <DEDENT> def start_listening(self, port): <NEW_LINE> <INDENT> reactor.listenTCP(port, server.Site(self)) <NEW_LINE> <DEDENT> def render(self, request): <NEW_LINE> <INDENT> self._async_render_with_logging_context(request) <NEW_LINE> return server.NOT_DONE_YET <NEW_LINE> <DEDENT> _request_id = 0 <NEW_LINE> @defer.inlineCallbacks <NEW_LINE> def _async_render_with_logging_context(self, request): <NEW_LINE> <INDENT> request_id = "%s-%s" % (request.method, JsonResource._request_id) <NEW_LINE> JsonResource._request_id += 1 <NEW_LINE> with LoggingContext(request_id) as request_context: <NEW_LINE> <INDENT> request_context.request = request_id <NEW_LINE> yield self._async_render(request) <NEW_LINE> <DEDENT> <DEDENT> @defer.inlineCallbacks <NEW_LINE> def _async_render(self, request): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> if request.method == "OPTIONS": <NEW_LINE> <INDENT> self._send_response(request, 200, {}) <NEW_LINE> return <NEW_LINE> <DEDENT> for path_entry in self.path_regexs.get(request.method, []): <NEW_LINE> <INDENT> m = path_entry.pattern.match(request.path) <NEW_LINE> if m: <NEW_LINE> <INDENT> code, response = yield path_entry.callback( request, *m.groups() ) <NEW_LINE> self._send_response(request, code, response) <NEW_LINE> return <NEW_LINE> <DEDENT> <DEDENT> self._send_response( request, 400, {"error": "Unrecognized request"} ) <NEW_LINE> <DEDENT> except CodeMessageException as e: <NEW_LINE> <INDENT> if isinstance(e, SynapseError): <NEW_LINE> <INDENT> logger.info("%s SynapseError: %s - %s", request, e.code, e.msg) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> logger.exception(e) <NEW_LINE> <DEDENT> self._send_response( request, e.code, cs_exception(e), response_code_message=e.response_code_message ) <NEW_LINE> <DEDENT> except Exception as e: <NEW_LINE> <INDENT> logger.exception(e) <NEW_LINE> self._send_response( request, 500, {"error": "Internal server error"} ) <NEW_LINE> <DEDENT> <DEDENT> def _send_response(self, request, code, response_json_object, response_code_message=None): <NEW_LINE> <INDENT> if request._disconnected: <NEW_LINE> <INDENT> logger.warn( "Not sending response to request %s, already disconnected.", request) <NEW_LINE> return <NEW_LINE> <DEDENT> if not self._request_user_agent_is_curl(request): <NEW_LINE> <INDENT> json_bytes = encode_canonical_json(response_json_object) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> json_bytes = encode_pretty_printed_json(response_json_object) <NEW_LINE> <DEDENT> respond_with_json_bytes(request, code, json_bytes, send_cors=True, response_code_message=response_code_message) <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def _request_user_agent_is_curl(request): <NEW_LINE> <INDENT> user_agents = request.requestHeaders.getRawHeaders( "User-Agent", default=[] ) <NEW_LINE> for user_agent in user_agents: <NEW_LINE> <INDENT> if "curl" in user_agent: <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> <DEDENT> return False
This implements the HttpServer interface and provides JSON support for Resources. Register callbacks via register_path()
62598fad63d6d428bbee2774
class IScopePrioritySetter(object): <NEW_LINE> <INDENT> pass
description of class
62598fad10dbd63aa1c70b7c
class ProvisaoBase(object): <NEW_LINE> <INDENT> def get_produto(self, ncm, ncm_ex): <NEW_LINE> <INDENT> raise NotImplementedError() <NEW_LINE> <DEDENT> def get_servico(self, nbs): <NEW_LINE> <INDENT> raise NotImplementedError()
Classe base para o provisionamento da consulta, tornando possível o armazenamento das consultas dos valores aproximados dos tributos em cache, acelerando a consulta para produtos e serviços recém consultados. Esta classe em particular não faz esse provisionamento, apenas fornece uma base para os métodos básicos que darão suporte ao provisionamento. Uma implementação de provisionamento mínima deverá sobrescrever os métodos :meth:`get_produto` e :meth:`get_servico`. .. versionadded:: 0.3
62598fad8c0ade5d55dc3676
class QHead(nn.Module): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> super().__init__() <NEW_LINE> self.d_classes = 200 <NEW_LINE> self.d_rest_code = 2 <NEW_LINE> self.d_code = self.d_classes + 2*self.d_rest_code <NEW_LINE> self.define_module() <NEW_LINE> <DEDENT> def define_module(self): <NEW_LINE> <INDENT> self.fc_q = nn.Sequential( nn.Linear(1024, 128), nn.BatchNorm1d(128), nn.LeakyReLU(0.1), nn.Linear(128, self.d_code) ) <NEW_LINE> <DEDENT> def forward(self, features): <NEW_LINE> <INDENT> d_classes = self.d_classes <NEW_LINE> d_rest_code = self.d_rest_code <NEW_LINE> code = self.fc_q(features) <NEW_LINE> label_logits = code[:, :d_classes] <NEW_LINE> rest_means = code[:, d_classes:d_classes+d_rest_code] <NEW_LINE> rest_vars = code[:, d_classes+d_rest_code:].exp() <NEW_LINE> return label_logits, rest_means, rest_vars
Discriminator head for predicting the latent code.
62598fad66656f66f7d5a3b9
class UtilsTests(unittest.TestCase): <NEW_LINE> <INDENT> def test_parameters_counter(self): <NEW_LINE> <INDENT> class ParamsHolder(torch.nn.Module): <NEW_LINE> <INDENT> def __init__(self, n_params): <NEW_LINE> <INDENT> super(ParamsHolder, self).__init__() <NEW_LINE> self.p1 = torch.nn.Parameter(torch.Tensor(n_params // 2)) <NEW_LINE> self.p2 = torch.nn.Parameter(torch.Tensor(n_params // 2)) <NEW_LINE> self.dummy = -1 <NEW_LINE> <DEDENT> <DEDENT> params_num = 1000 <NEW_LINE> module = ParamsHolder(params_num) <NEW_LINE> estimated_params = get_model_parameters_number(module, as_string=False) <NEW_LINE> self.assertEqual(estimated_params, params_num)
Tests for utils
62598fad7d847024c075c38c
class FreezeTime(object): <NEW_LINE> <INDENT> def __init__( self, dt, tz=datetime.timezone.utc, fold=0, tick=False, extra_patch_datetime=(), extra_patch_time=(), ): <NEW_LINE> <INDENT> datetime_targets = ('datetime.datetime',) + tuple(extra_patch_datetime) <NEW_LINE> time_targets = ('time.time',) + tuple(extra_patch_time) <NEW_LINE> self.patches = [ patch(target, FakeDateTime) for target in datetime_targets ] + [patch(target, fake_time) for target in time_targets] <NEW_LINE> self._dt = dt <NEW_LINE> self._tz = tz <NEW_LINE> self._fold = fold <NEW_LINE> self._tick = tick <NEW_LINE> <DEDENT> def __enter__(self): <NEW_LINE> <INDENT> FakeDateTime._initialize( self._dt, self._tz, fold=self._fold, tick=self._tick ) <NEW_LINE> for p in self.patches: <NEW_LINE> <INDENT> p.__enter__() <NEW_LINE> <DEDENT> <DEDENT> def __exit__(self, *args): <NEW_LINE> <INDENT> for p in reversed(self.patches): <NEW_LINE> <INDENT> p.__exit__(*args)
A context manager that freezes the datetime to the given datetime object. It simulates that the system timezone is the passed timezone. If `tick=True` is passed, the clock will tick, otherwise the clock will remain at the given datetime. Additional patch targets can be passed via `extra_patch_datetime` and `extra_patch_time` to patch the `datetime` class or `time` function if it was already imported in a different module. For example, if module `x` contains `from datetime import datetime` (as opposed to `import datetime`), it needs to be patched separately (`extra_patch_datetime=['x.datetime']`).
62598fad7d847024c075c38d
class DiceToken: <NEW_LINE> <INDENT> __slots__ = ["__sequence"] <NEW_LINE> def __init__(self, statement): <NEW_LINE> <INDENT> self.__sequence = [] <NEW_LINE> for token in tokenize( statement.lower(), specifications=[("DICE", r"\d*[d]\d*")] ): <NEW_LINE> <INDENT> if token._type == "DICE": <NEW_LINE> <INDENT> dice_split = token._value.split("d") <NEW_LINE> if not dice_split[0]: <NEW_LINE> <INDENT> dice_split[0] = 1 <NEW_LINE> <DEDENT> if not dice_split[1]: <NEW_LINE> <INDENT> dice_split[1] = 2 <NEW_LINE> <DEDENT> if len(self.__sequence) and type(self.__sequence[len(self.__sequence) - 1]) in (int, tuple): <NEW_LINE> <INDENT> self.__sequence.append(operator.add) <NEW_LINE> <DEDENT> self.__sequence.append(Dice(int(dice_split[1]), int(dice_split[0]))) <NEW_LINE> <DEDENT> elif token._type == "OP": <NEW_LINE> <INDENT> if token._value == "+": <NEW_LINE> <INDENT> self.__sequence.append(operator.add) <NEW_LINE> <DEDENT> elif token._value == "-": <NEW_LINE> <INDENT> self.__sequence.append(operator.sub) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise RuntimeError(f"Unexpected OP Token of value {token._value}") <NEW_LINE> <DEDENT> <DEDENT> elif token._type == "NUMBER": <NEW_LINE> <INDENT> self.__sequence.append(token._value) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise RuntimeError(f"Unexpected {token}") <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def roll(self, average = False): <NEW_LINE> <INDENT> sequence = self.__sequence.copy() <NEW_LINE> total = 0 <NEW_LINE> for i in range(0, len(sequence)): <NEW_LINE> <INDENT> if isinstance(sequence[i], Dice): <NEW_LINE> <INDENT> if not average: <NEW_LINE> <INDENT> total += sum(sequence[i].roll()) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> total += sequence[i].get_average() <NEW_LINE> <DEDENT> <DEDENT> elif type(sequence[i]) != int and i != len(sequence) - 1: <NEW_LINE> <INDENT> total = sequence[i](total, sequence[i + 1]) <NEW_LINE> <DEDENT> <DEDENT> return total
A sequence of dice rolls and numbers. Parameter: str A dice statement, in the format of #d#+#+d# or the such that is parsed for rolling.
62598faddd821e528d6d8efe
class TargetTemp(Resource): <NEW_LINE> <INDENT> def __init__(self, *args, **kwargs): <NEW_LINE> <INDENT> self.parser = reqparse.RequestParser() <NEW_LINE> self.parser.add_argument( 'target_temp_c', type=int, choices=range(150, 551), help='target_temp can be 150..550 deg F. Note that ' 'hardware seems to limit this to 520F.' ) <NEW_LINE> self.parser.add_argument( 'target_temp_f', type=int, choices=range(150, 551), help='target_temp can be 150..550 deg F. Note that ' 'hardware seems to limit this to 520F.' ) <NEW_LINE> <DEDENT> def get(self): <NEW_LINE> <INDENT> if device_sr700.connected: <NEW_LINE> <INDENT> return { 'target_temp_f': round(device_sr700.target_temp, 0), 'target_temp_c': round( utils.f_to_c(device_sr700.target_temp), 0) } <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return ({ 'error': 'Hardware not connected.' }, 503 ) <NEW_LINE> <DEDENT> <DEDENT> def put(self): <NEW_LINE> <INDENT> if device_sr700.connected: <NEW_LINE> <INDENT> kwargs = self.parser.parse_args() <NEW_LINE> value_c = kwargs['target_temp_c'] <NEW_LINE> value_f = kwargs['target_temp_f'] <NEW_LINE> if value_f is None: <NEW_LINE> <INDENT> if value_c is not None: <NEW_LINE> <INDENT> value_f = int(round(utils.c_to_f(value_c), 0)) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return ( { 'error': 'Must supply target_temp_c or target_temp_f.', }, 400 ) <NEW_LINE> <DEDENT> <DEDENT> try: <NEW_LINE> <INDENT> device_sr700.target_temp = value_f <NEW_LINE> return { 'target_temp_f': device_sr700.target_temp, 'target_temp_c': int( round(utils.f_to_c(device_sr700.target_temp), 0)) } <NEW_LINE> <DEDENT> except freshroastsr700.exceptions.RoasterValueError: <NEW_LINE> <INDENT> return ( { 'error': 'Could not set requested value. Out of range?', }, 400 )
When freshroastsr700 is in thermostat mode, this is the set point value for the chamber temperature.
62598fad30bbd7224646995d
class SqliteDBError(sqlite3.OperationalError): <NEW_LINE> <INDENT> pass
General error exception encountered during database operations.
62598fad0c0af96317c5634c
class ConfigurationError(Exception): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> super().__init__()
Exception to be raised when a configuration error occurs.
62598fad63b5f9789fe85130
class ServerAnnounceObserver(Observer): <NEW_LINE> <INDENT> def __init__(self, target='/dev/null', pct_interval=10): <NEW_LINE> <INDENT> self.pct_interval = pct_interval <NEW_LINE> self.target_handle = open(target, 'w') <NEW_LINE> self.last_update = 0 <NEW_LINE> super(ServerAnnounceObserver, self).__init__() <NEW_LINE> <DEDENT> def start(self, max_value): <NEW_LINE> <INDENT> self._send_output('Starting render of %d total tiles' % max_value) <NEW_LINE> super(ServerAnnounceObserver, self).start(max_value) <NEW_LINE> <DEDENT> def finish(self): <NEW_LINE> <INDENT> self._send_output('Render complete!') <NEW_LINE> super(ServerAnnounceObserver, self).finish() <NEW_LINE> self.target_handle.close() <NEW_LINE> <DEDENT> def update(self, current_value): <NEW_LINE> <INDENT> super(ServerAnnounceObserver, self).update(current_value) <NEW_LINE> if self._need_update(): <NEW_LINE> <INDENT> self._send_output('Rendered %d of %d tiles, %d%% complete' % (self.get_current_value(), self.get_max_value(), self.get_percentage())) <NEW_LINE> self.last_update = current_value <NEW_LINE> <DEDENT> <DEDENT> def _need_update(self): <NEW_LINE> <INDENT> return(self.get_percentage() - (self.last_update * 100.0 / self.get_max_value()) >= self.pct_interval) <NEW_LINE> <DEDENT> def _send_output(self, output): <NEW_LINE> <INDENT> self.target_handle.write('say %s\n' % output) <NEW_LINE> self.target_handle.flush()
Send the output to a Minecraft server via FIFO or stdin
62598fadbd1bec0571e150a8
class Category(models.Model): <NEW_LINE> <INDENT> category_id = models.AutoField(serialize=False, primary_key=True) <NEW_LINE> name = models.CharField(max_length=100, unique = True) <NEW_LINE> def save(self, *args, **kwargs): <NEW_LINE> <INDENT> self.name = self.name.lower() <NEW_LINE> super(Category, self).save(*args, **kwargs)
Model for Category
62598fad5166f23b2e2433a2
class FidoClientMockError(FidoClientMock): <NEW_LINE> <INDENT> async def fetch_data(self): <NEW_LINE> <INDENT> raise PyFidoErrorMock("Fake Error")
Fake Fido client error.
62598fad236d856c2adc9422
class Container(Object): <NEW_LINE> <INDENT> def __init__(self, name, location=None, owner=None): <NEW_LINE> <INDENT> super(Container, self).__init__(name, location, owner) <NEW_LINE> self.locks.insert = locks.Pass() <NEW_LINE> self.locks.remove = locks.Pass()
An otherwise-default Object whose insert and remove locks are Pass().
62598fad57b8e32f52508100
class SlugRedirect(ModelBase): <NEW_LINE> <INDENT> content_type = models.ForeignKey(ContentType) <NEW_LINE> old_object_slug = models.CharField(max_length=200) <NEW_LINE> new_object_id = models.PositiveIntegerField() <NEW_LINE> new_object = generic.GenericForeignKey('content_type', 'new_object_id') <NEW_LINE> def __unicode__(self): <NEW_LINE> <INDENT> return u'slug "%s" -> %s' % (self.old_object_slug, self.new_object) <NEW_LINE> <DEDENT> class Meta: <NEW_LINE> <INDENT> unique_together = ("content_type", "old_object_slug")
A model to represent a redirect from an old slug This is particular useful when we merge two candidates, but don't want the old URL to break
62598fad67a9b606de545f96
class LookupDimension(Dimension): <NEW_LINE> <INDENT> def __init__(self, expression, lookup, **kwargs): <NEW_LINE> <INDENT> if "default" in kwargs: <NEW_LINE> <INDENT> kwargs["lookup_default"] = kwargs.pop("default") <NEW_LINE> <DEDENT> kwargs["lookup"] = lookup <NEW_LINE> super(LookupDimension, self).__init__(expression, **kwargs)
DEPRECATED Returns the expression value looked up in a lookup dictionary
62598fade5267d203ee6b8d3
class ShowSnapshot3Test(BaseTest): <NEW_LINE> <INDENT> fixtureDB = True <NEW_LINE> fixtureCmds = ["aptly snapshot create snap1 from mirror wheezy-non-free"] <NEW_LINE> runCmd = "aptly snapshot show snap1" <NEW_LINE> outputMatchPrepare = lambda _, s: re.sub(r"Created At: [0-9:A-Za-z -]+\n", "", s)
show snapshot: from mirror w/o packages
62598fad76e4537e8c3ef577
class DeleteKnowledgeBaseRequest(proto.Message): <NEW_LINE> <INDENT> name = proto.Field(proto.STRING, number=1,) <NEW_LINE> force = proto.Field(proto.BOOL, number=2,)
Request message for [KnowledgeBases.DeleteKnowledgeBase][google.cloud.dialogflow.v2beta1.KnowledgeBases.DeleteKnowledgeBase]. Attributes: name (str): Required. The name of the knowledge base to delete. Format: ``projects/<Project ID>/locations/<Location ID>/knowledgeBases/<Knowledge Base ID>``. force (bool): Optional. Force deletes the knowledge base. When set to true, any documents in the knowledge base are also deleted.
62598fadac7a0e7691f724d3
class PortStateWrite(FeedbackCommand): <NEW_LINE> <INDENT> def __init__(self, State, WriteMask = [0xff, 0xff, 0xff]): <NEW_LINE> <INDENT> self.state = State <NEW_LINE> self.writeMask = WriteMask <NEW_LINE> self.cmdBytes = [ 27 ] + WriteMask + State <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return "<u3.PortStateWrite( State = %s, WriteMask = %s )>" % (self.state, self.writeMask)
PortStateWrite Feedback command State: A list of 3 bytes representing FIO, EIO, CIO WriteMask: A list of 3 bytes, representing which to update. The Default is all ones. >>> import u3 >>> d = u3.U3() >>> d.debug = True >>> d.getFeedback(u3.PortStateWrite(State = [0xab, 0xcd, 0xef], WriteMask = [0xff, 0xff, 0xff])) Sent: [0x81, 0xf8, 0x4, 0x0, 0x7f, 0x5, 0x0, 0x1b, 0xff, 0xff, 0xff, 0xab, 0xcd, 0xef] Response: [0xfa, 0xf8, 0x2, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0] [None]
62598fad4f88993c371f04ef
class StuckDoorDriver(PerfectDoorDriver): <NEW_LINE> <INDENT> def __init__(self, transit_time, accelerate_time): <NEW_LINE> <INDENT> super(StuckDoorDriver, self).__init__(transit_time, accelerate_time) <NEW_LINE> self.stuck_count = 0 <NEW_LINE> self.instance = self <NEW_LINE> <DEDENT> def start_door_signal(self): <NEW_LINE> <INDENT> self.stuck_count += 1 <NEW_LINE> super(StuckDoorDriver, self).start_door_signal() <NEW_LINE> <DEDENT> def _accelerate_timer_timeout(self): <NEW_LINE> <INDENT> self.accelerate_timer = False <NEW_LINE> if self.stuck_count >= 2: <NEW_LINE> <INDENT> if self.lower_limit_switch: <NEW_LINE> <INDENT> self.lower_limit_switch = False <NEW_LINE> signal(SIGNAL_LOWER_SWITCH_CHANGED).send(self.instance) <NEW_LINE> <DEDENT> elif self.upper_limit_switch: <NEW_LINE> <INDENT> self.upper_limit_switch = False <NEW_LINE> signal(SIGNAL_UPPER_SWITCH_CHANGED).send(self.instance) <NEW_LINE> <DEDENT> <DEDENT> elif self.transit_timer: <NEW_LINE> <INDENT> self.transit_timer.cancel() <NEW_LINE> self.transit_timer = False <NEW_LINE> <DEDENT> <DEDENT> def _transit_timer_timeout(self): <NEW_LINE> <INDENT> self.stuck_count = 0 <NEW_LINE> super(StuckDoorDriver, self)._transit_timer_timeout()
This driver emulates a door wich stucks on first trigger.
62598fad7d43ff24874273e7
class so(LieAlgebra): <NEW_LINE> <INDENT> abelian = False <NEW_LINE> def get_dimension(self): <NEW_LINE> <INDENT> n = self.get_shape() <NEW_LINE> return int(n*(n-1)/2) <NEW_LINE> <DEDENT> def get_vector(self): <NEW_LINE> <INDENT> n = self.get_shape() <NEW_LINE> vlen = int(n*(n-1)/2) <NEW_LINE> vector = np.zeros(vlen) <NEW_LINE> k = 0 <NEW_LINE> for i in range(n-1, 0, -1): <NEW_LINE> <INDENT> for j in range(n, i, -1): <NEW_LINE> <INDENT> vector[k] = (self[i-1, j-1])/(-1)**(i+j) <NEW_LINE> k += 1 <NEW_LINE> <DEDENT> <DEDENT> return vector <NEW_LINE> <DEDENT> def set_vector(self, vector): <NEW_LINE> <INDENT> vector = np.array(vector, dtype=np.float64) <NEW_LINE> n = self.shape[0] <NEW_LINE> vlen = int(n*(n-1)/2) <NEW_LINE> if vlen != len(vector): <NEW_LINE> <INDENT> raise ValueError <NEW_LINE> <DEDENT> mat = np.zeros((n, n)) <NEW_LINE> k = 0 <NEW_LINE> for i in range(n-1, 0, -1): <NEW_LINE> <INDENT> for j in range(n, i, -1): <NEW_LINE> <INDENT> mat[i-1, j-1] = (-1)**(i+j)*vector[k] <NEW_LINE> k += 1 <NEW_LINE> <DEDENT> <DEDENT> np.copyto(self, mat - mat.T)
Lie algebra :math:`so(n)`. For a Lie algebra element of the form :math:`(x, y, z)`, the matrix representation is of the form: .. math:: \begin{bmatrix} 0 & -z & y \\ z & 0 & -x \\ -y & x & 0 \end{bmatrix}
62598fad7b180e01f3e49035
class ParsableErrorMiddleware(object): <NEW_LINE> <INDENT> def __init__(self, app): <NEW_LINE> <INDENT> self.app = app <NEW_LINE> <DEDENT> def __call__(self, environ, start_response): <NEW_LINE> <INDENT> state = {} <NEW_LINE> def replacement_start_response(status, headers, exc_info=None): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> status_code = int(status.split(' ')[0]) <NEW_LINE> state['status_code'] = status_code <NEW_LINE> <DEDENT> except (ValueError, TypeError): <NEW_LINE> <INDENT> raise Exception(_( 'ErrorDocumentMiddleware received an invalid ' 'status %s') % status) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> if (state['status_code'] // 100) not in (2, 3): <NEW_LINE> <INDENT> headers = [(h, v) for (h, v) in headers if h not in ('Content-Length', 'Content-Type') ] <NEW_LINE> <DEDENT> state['headers'] = headers <NEW_LINE> return start_response(status, headers, exc_info) <NEW_LINE> <DEDENT> <DEDENT> if 'HTTP_ACCEPT' not in environ or environ['HTTP_ACCEPT'] == '*/*': <NEW_LINE> <INDENT> environ['HTTP_ACCEPT'] = 'application/json' <NEW_LINE> <DEDENT> app_iter = self.app(environ, replacement_start_response) <NEW_LINE> if (state['status_code'] // 100) not in (2, 3): <NEW_LINE> <INDENT> req = webob.Request(environ) <NEW_LINE> if (req.accept.best_match(['application/json', 'application/xml']) == 'application/xml'): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> body = [et.ElementTree.tostring( et.ElementTree.fromstring('<error_message>' + '\n'.join(app_iter) + '</error_message>'))] <NEW_LINE> <DEDENT> except et.ElementTree.ParseError as err: <NEW_LINE> <INDENT> LOG.error('Error parsing HTTP response: %s', err) <NEW_LINE> body = ['<error_message>%s' % state['status_code'] + '</error_message>'] <NEW_LINE> <DEDENT> state['headers'].append(('Content-Type', 'application/xml')) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> if six.PY3: <NEW_LINE> <INDENT> app_iter = [i.decode('utf-8') for i in app_iter] <NEW_LINE> <DEDENT> body = [json.dumps({'error_message': '\n'.join(app_iter)})] <NEW_LINE> if six.PY3: <NEW_LINE> <INDENT> body = [item.encode('utf-8') for item in body] <NEW_LINE> <DEDENT> state['headers'].append(('Content-Type', 'application/json')) <NEW_LINE> <DEDENT> state['headers'].append(('Content-Length', str(len(body[0])))) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> body = app_iter <NEW_LINE> <DEDENT> return body
Replace error body with something the client can parse.
62598fad3539df3088ecc27c
class MZD(object): <NEW_LINE> <INDENT> def __init__(self,iterable=None): <NEW_LINE> <INDENT> self.d = dict() <NEW_LINE> if iterable: <NEW_LINE> <INDENT> for key,val in iterable: <NEW_LINE> <INDENT> self[key]=val <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def __contains__(self,other): <NEW_LINE> <INDENT> for key in self.d.keys(): <NEW_LINE> <INDENT> if key == other: <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> <DEDENT> return False <NEW_LINE> <DEDENT> def __getitem__(self,key): <NEW_LINE> <INDENT> for skey in self.d.keys(): <NEW_LINE> <INDENT> if key == skey: <NEW_LINE> <INDENT> return self.d[skey] <NEW_LINE> <DEDENT> <DEDENT> raise KeyError(str(key)) <NEW_LINE> <DEDENT> def __setitem__(self,key,val): <NEW_LINE> <INDENT> for skey in self.d.keys(): <NEW_LINE> <INDENT> if key == skey: <NEW_LINE> <INDENT> nkey = (key+skey) / 2 <NEW_LINE> nval = (self.d[skey] + val) / 2 <NEW_LINE> del self.d[skey] <NEW_LINE> self.d[nkey] = nval <NEW_LINE> return <NEW_LINE> <DEDENT> <DEDENT> self.d[key] = val <NEW_LINE> <DEDENT> def update(self,other_dict): <NEW_LINE> <INDENT> for key,val in other_dict.items(): <NEW_LINE> <INDENT> self[key] = val <NEW_LINE> <DEDENT> <DEDENT> def items(self): <NEW_LINE> <INDENT> for key,val in self.d.items(): <NEW_LINE> <INDENT> yield key,val <NEW_LINE> <DEDENT> <DEDENT> def keys(self): <NEW_LINE> <INDENT> return self.d.keys() <NEW_LINE> <DEDENT> def values(self): <NEW_LINE> <INDENT> return self.d.values() <NEW_LINE> <DEDENT> def __iter__(self): <NEW_LINE> <INDENT> return self.d.__iter__() <NEW_LINE> <DEDENT> def __len__(self): <NEW_LINE> <INDENT> return len(self.d) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return self.d.__repr__() <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return str(self.d)
dict like object for storing {MZ:intensity} data. overrides __contains__ in order to do MZ __eq__ method Note: MZ objects are mutable, weird things can happen making dicts from mutable ojbects... might re-implement in Cython to solve this problem. see below for example: https://stackoverflow.com/questions/4828080/how-to-make-an-immutable-object-in-python
62598fad99cbb53fe6830ea2
class SQARequirements(MooseMarkdownCommon, Pattern): <NEW_LINE> <INDENT> RE = r'(?<!`)!sqa requirements' <NEW_LINE> @staticmethod <NEW_LINE> def defaultSettings(): <NEW_LINE> <INDENT> settings = MooseMarkdownCommon.defaultSettings() <NEW_LINE> return settings <NEW_LINE> <DEDENT> def __init__(self, markdown_instance=None, repo=None, **kwargs): <NEW_LINE> <INDENT> MooseMarkdownCommon.__init__(self, **kwargs) <NEW_LINE> Pattern.__init__(self, self.RE, markdown_instance) <NEW_LINE> self._repo = repo <NEW_LINE> <DEDENT> def handleMatch(self, match): <NEW_LINE> <INDENT> repo_issue = "https://github.com/idaholab/moose/issues" <NEW_LINE> ol = etree.Element('ol') <NEW_LINE> ol.set('class', 'collection browser-default') <NEW_LINE> for req in get_requirements(): <NEW_LINE> <INDENT> li = etree.SubElement(ol, 'li') <NEW_LINE> li.set('class', 'collection-item') <NEW_LINE> p = etree.SubElement(li, 'p') <NEW_LINE> p.text = req.requirement <NEW_LINE> p = etree.SubElement(li, 'p') <NEW_LINE> p.text = 'Specification: ' <NEW_LINE> a = etree.SubElement(p, 'a') <NEW_LINE> a.set('href', '{}/{}'.format(self._repo, req.path)) <NEW_LINE> a.text = '{}:{}'.format(req.path, req.name) <NEW_LINE> if req.design: <NEW_LINE> <INDENT> p = etree.SubElement(li, 'p') <NEW_LINE> p.text = 'Design: ' <NEW_LINE> for design in req.design.split(): <NEW_LINE> <INDENT> node = self.getFilename(design) <NEW_LINE> a = etree.SubElement(p, 'a') <NEW_LINE> a.set("href", '/' + node[1].destination) <NEW_LINE> a.text = node[1].name + ' ' <NEW_LINE> <DEDENT> <DEDENT> if req.issues: <NEW_LINE> <INDENT> p = etree.SubElement(li, 'p') <NEW_LINE> p.text = 'Issues: ' <NEW_LINE> for issue in req.issues.split(): <NEW_LINE> <INDENT> a = etree.SubElement(p, 'a') <NEW_LINE> a.set("href", "{}/{}".format(repo_issue, issue[1:])) <NEW_LINE> a.text = issue + ' ' <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> return ol
Builds SQA requirement list from test specification files.
62598fad7047854f4633f3a4
class ElevatorDoor(mc.RoomExit): <NEW_LINE> <INDENT> def __init__(self, *arg, **kwarg): <NEW_LINE> <INDENT> mc.RoomExit.__init__(self, *arg, **kwarg) <NEW_LINE> self.isNoisey = True <NEW_LINE> self.isOpen = False <NEW_LINE> <DEDENT> def open_state(self, isOpen): <NEW_LINE> <INDENT> if isOpen == self.isOpen: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> mc.RoomExit.open_state(self, isOpen) <NEW_LINE> if self.isNoisey and self.isOpen: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> if self.isOpen: <NEW_LINE> <INDENT> self.emote(text="slides open.") <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.emote(text="slides closed.")
This special door class cannot be opened by players but is never locked.
62598fad91f36d47f2230e8b
class Decoder(nn.Module): <NEW_LINE> <INDENT> def __init__(self, **kwargs): <NEW_LINE> <INDENT> super(Decoder, self).__init__(**kwargs) <NEW_LINE> <DEDENT> def init_state(self, enc_outputs, *args): <NEW_LINE> <INDENT> raise NotImplementedError <NEW_LINE> <DEDENT> def forward(self, X, state): <NEW_LINE> <INDENT> raise NotImplementedError
The base decoder interface for the encoder-decoder archtecture.
62598fad5fc7496912d48267
class FileRelationshipFactory(factory.django.DjangoModelFactory): <NEW_LINE> <INDENT> class Meta: <NEW_LINE> <INDENT> model = "tool_manager.FileRelationship"
Minimal representation of a FileRelationship
62598fada8370b77170f03a7
class MainViewTestCase(untitled.ClickAppTestCase): <NEW_LINE> <INDENT> def test_initial_label(self): <NEW_LINE> <INDENT> app = self.launch_application() <NEW_LINE> label = app.main_view.select_single(objectName='label') <NEW_LINE> self.assertThat(label.text, Equals('Hello..')) <NEW_LINE> <DEDENT> def test_click_button_should_update_label(self): <NEW_LINE> <INDENT> app = self.launch_application() <NEW_LINE> button = app.main_view.select_single(objectName='button') <NEW_LINE> app.pointing_device.click_object(button) <NEW_LINE> label = app.main_view.select_single(objectName='label') <NEW_LINE> self.assertThat(label.text, Eventually(Equals('..world!')))
Generic tests for the Hello World
62598fad8a43f66fc4bf2147
class Job(object): <NEW_LINE> <INDENT> _ident = 0 <NEW_LINE> _lock = threading.Lock() <NEW_LINE> def __init__(self, func, job_props, interval, when=None, job_id=None): <NEW_LINE> <INDENT> self._props = job_props <NEW_LINE> self._func = func <NEW_LINE> if when is None: <NEW_LINE> <INDENT> self._when = time.time() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self._when = when <NEW_LINE> <DEDENT> self._interval = interval <NEW_LINE> if job_id is not None: <NEW_LINE> <INDENT> self._id = job_id <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> with Job._lock: <NEW_LINE> <INDENT> self._id = Job._ident + 1 <NEW_LINE> Job._ident = Job._ident + 1 <NEW_LINE> <DEDENT> <DEDENT> self._stopped = False <NEW_LINE> <DEDENT> def ident(self): <NEW_LINE> <INDENT> return self._id <NEW_LINE> <DEDENT> def get_interval(self): <NEW_LINE> <INDENT> return self._interval <NEW_LINE> <DEDENT> def set_interval(self, interval): <NEW_LINE> <INDENT> self._interval = interval <NEW_LINE> <DEDENT> def get_expiration(self): <NEW_LINE> <INDENT> return self._when <NEW_LINE> <DEDENT> def set_initial_due_time(self, when): <NEW_LINE> <INDENT> if self._when is None: <NEW_LINE> <INDENT> self._when = when <NEW_LINE> <DEDENT> <DEDENT> def update_expiration(self): <NEW_LINE> <INDENT> self._when += self._interval <NEW_LINE> <DEDENT> def get(self, key, default): <NEW_LINE> <INDENT> return self._props.get(key, default) <NEW_LINE> <DEDENT> def get_props(self): <NEW_LINE> <INDENT> return self._props <NEW_LINE> <DEDENT> def set_props(self, props): <NEW_LINE> <INDENT> self._props = props <NEW_LINE> <DEDENT> def __cmp__(self, other): <NEW_LINE> <INDENT> if other is None: <NEW_LINE> <INDENT> return 1 <NEW_LINE> <DEDENT> self_k = (self.get_expiration(), self.ident()) <NEW_LINE> other_k = (other.get_expiration(), other.ident()) <NEW_LINE> if self_k == other_k: <NEW_LINE> <INDENT> return 0 <NEW_LINE> <DEDENT> elif self_k < other_k: <NEW_LINE> <INDENT> return -1 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return 1 <NEW_LINE> <DEDENT> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> return isinstance(other, Job) and (self.ident() == other.ident()) <NEW_LINE> <DEDENT> def __hash__(self): <NEW_LINE> <INDENT> return hash(self.ident()) <NEW_LINE> <DEDENT> def __ne__(self, other): <NEW_LINE> <INDENT> return self.__cmp__(other) != 0 <NEW_LINE> <DEDENT> def __gt__(self, other): <NEW_LINE> <INDENT> return self.__cmp__(other) > 0 <NEW_LINE> <DEDENT> def __lt__(self, other): <NEW_LINE> <INDENT> return self.__cmp__(other) < 0 <NEW_LINE> <DEDENT> def __ge__(self, other): <NEW_LINE> <INDENT> return self.__cmp__(other) >= 0 <NEW_LINE> <DEDENT> def __le__(self, other): <NEW_LINE> <INDENT> return self.__cmp__(other) <= 0 <NEW_LINE> <DEDENT> def __call__(self): <NEW_LINE> <INDENT> self._func(self) <NEW_LINE> <DEDENT> def stop(self): <NEW_LINE> <INDENT> self._stopped = True <NEW_LINE> <DEDENT> def stopped(self): <NEW_LINE> <INDENT> return self._stopped
Timer wraps the callback and timestamp related stuff
62598fad7047854f4633f3a5
class StockCode(scrapy.Item): <NEW_LINE> <INDENT> name = scrapy.Field() <NEW_LINE> code = scrapy.Field()
stock code and it's name
62598fad63d6d428bbee2776
class peakFilter(): <NEW_LINE> <INDENT> def __init__(self,c,xmin,xmax): <NEW_LINE> <INDENT> self.c = c <NEW_LINE> self.xmin = xmin <NEW_LINE> self.xmax = xmax <NEW_LINE> <DEDENT> def peakPos(self,x,y): <NEW_LINE> <INDENT> allpeaks = [] <NEW_LINE> pieces_x, pieces_y, pieces_id = scissor(self.c,x,y) <NEW_LINE> for px, py, pid in zip(pieces_x, pieces_y, pieces_id): <NEW_LINE> <INDENT> peak_x, peak_y, peak_id = peakIden(px,py,pid) <NEW_LINE> peaks = [{'xv':xv, 'yv':yv, 'pid':pid} for xv, yv,pid in zip(peak_x, peak_y, peak_id) if xv>self.xmin and xv<self.xmax] <NEW_LINE> [allpeaks.append(peak) for peak in peaks] <NEW_LINE> <DEDENT> return pd.DataFrame(allpeaks) <NEW_LINE> <DEDENT> def markPeak(self,ax,x,y,s,color='k'): <NEW_LINE> <INDENT> allpeaks = self.peakPos(x,y) <NEW_LINE> if not allpeaks.empty: <NEW_LINE> <INDENT> ax.scatter(allpeaks.xv,allpeaks.yv,s=s,marker='v',color=color) <NEW_LINE> <DEDENT> return None
Call function peakIden to do the job within a range of (xmin,xmax) Arguments: c: a critical value below which the input data is ignored xmin: lower bound of the range xmax: upper bound of the range Methods: peakPos: call peakIden to find peaks within (xmin,xmax) markPeak: mark the peak position (x,y) on existing axis handle (ax)
62598fad55399d3f056264ef
class RubyArtifact: <NEW_LINE> <INDENT> def __init__(self, platform, arch): <NEW_LINE> <INDENT> self.name = 'ruby_native_gem_%s_%s' % (platform, arch) <NEW_LINE> self.platform = platform <NEW_LINE> self.arch = arch <NEW_LINE> self.labels = ['artifact', 'ruby', platform, arch] <NEW_LINE> <DEDENT> def pre_build_jobspecs(self): <NEW_LINE> <INDENT> return [] <NEW_LINE> <DEDENT> def build_jobspec(self): <NEW_LINE> <INDENT> if self.platform == 'windows': <NEW_LINE> <INDENT> raise Exception("Not supported yet") <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> if self.platform == 'linux': <NEW_LINE> <INDENT> environ = {} <NEW_LINE> if self.arch == 'x86': <NEW_LINE> <INDENT> environ['SETARCH_CMD'] = 'linux32' <NEW_LINE> <DEDENT> return create_docker_jobspec(self.name, 'tools/dockerfile/grpc_artifact_linux_%s' % self.arch, 'tools/run_tests/build_artifact_ruby.sh', environ=environ) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return create_jobspec(self.name, ['tools/run_tests/build_artifact_ruby.sh'])
Builds ruby native gem.
62598fad66656f66f7d5a3bb