content
stringlengths
7
1.05M
fixed_cases
stringlengths
1
1.28M
#! /usr/bin/python3 #! @authot: @ruhend (Mudigonda Himansh) #! CRC Encoder def list2int(mylist): char_divisor = [str(integer) for integer in mylist] str_divisor = "". join(char_divisor) int_divisor = int(str_divisor,2 ) return int_divisor def DecimalToBinary(num): answer = [] if num >= 1: DecimalToBinary(num // 2) bindigit = num % 2 answer.append(bindigit) return answer def perform_XOR(divisor, divider): divider = list2int(divider) divisor = list2int(divisor) xor = divider^divisor xor = DecimalToBinary(xor) print("xor ",xor) dataword = [int (i) for i in input().split(' ')] # print(dataword) divisor = [int (i) for i in input().split(' ')] # print(divisor) extra_zeros = len(divisor)-1 for i in range(extra_zeros): dataword.append(0) # print(dataword) remainder = 0 i = 0 while (dataword): i = i+1 print("iteration: ",i) if remainder==0: first_element = dataword[0] else: first_element=remainder[0] divider = dataword[0:4] if first_element: print("remainder: ",remainder ) if remainder!=0: divider = remainder print(divider) remainder = perform_XOR(divisor, divider) print(remainder) # divider.pop(0) dataword.pop(0) else: # divider.pop(0) dataword.pop(0) # print(first_element)
def list2int(mylist): char_divisor = [str(integer) for integer in mylist] str_divisor = ''.join(char_divisor) int_divisor = int(str_divisor, 2) return int_divisor def decimal_to_binary(num): answer = [] if num >= 1: decimal_to_binary(num // 2) bindigit = num % 2 answer.append(bindigit) return answer def perform_xor(divisor, divider): divider = list2int(divider) divisor = list2int(divisor) xor = divider ^ divisor xor = decimal_to_binary(xor) print('xor ', xor) dataword = [int(i) for i in input().split(' ')] divisor = [int(i) for i in input().split(' ')] extra_zeros = len(divisor) - 1 for i in range(extra_zeros): dataword.append(0) remainder = 0 i = 0 while dataword: i = i + 1 print('iteration: ', i) if remainder == 0: first_element = dataword[0] else: first_element = remainder[0] divider = dataword[0:4] if first_element: print('remainder: ', remainder) if remainder != 0: divider = remainder print(divider) remainder = perform_xor(divisor, divider) print(remainder) dataword.pop(0) else: dataword.pop(0)
class PexOSDeprovisionHandler(object): def __init__(self): pass def run(self, force=False, deluser=False): return def run_changed_unique_id(self): return
class Pexosdeprovisionhandler(object): def __init__(self): pass def run(self, force=False, deluser=False): return def run_changed_unique_id(self): return
class TPDO: def __init__(self, index): pass class RPDO: def __init__(self, index): pass
class Tpdo: def __init__(self, index): pass class Rpdo: def __init__(self, index): pass
class Field(object): def __init__(self, name): self.name = name self.internal_name = '_' + self.name def __get__(self, instance, instance_type): if instance is None: return self return getattr(instance, self.internal_name, '') def __set__(self, instance, value): setattr(instance, self.internal_name, value) class Customer(object): """ >>> foo = Customer() >>> print('Before:', repr(foo.first_name), foo.__dict__) Before: '' {} >>> foo.first_name = 'Euclid' >>> print('After: ', repr(foo.first_name), foo.__dict__) After: 'Euclid' {'_first_name': 'Euclid'} """ # class attributes first_name = Field('first_name') last_name = Field('last_name') prefix = Field('prefix') suffix = Field('suffix') class GoodField(object): def __init__(self): # these will be assigned by the metaclass. self.name = None self.internal_name = None def __get__(self, instance, instance_type): if instance is None: return self return getattr(instance, self.internal_name, '') def __set__(self, instance, value): setattr(instance, self.internal_name, value) class Meta(type): def __new__(meta, name, bases, class_dict): for key, value in class_dict.items(): if isinstance(value, GoodField): value.name = key value.internal_name = '_' + key cls = type.__new__(meta, name, bases, class_dict) return cls class DatabaseRow(object, metaclass=Meta): pass class BetterCustomer(DatabaseRow): """ By using the metaclass, the new DatabaseRow base class, and the new GoodField descriptor, the class defination for a database row no longer has the redundancy from before """ first_name = GoodField() last_name = GoodField() prefix = GoodField() suffix = GoodField() def main(): foo = BetterCustomer() print('Before:', repr(foo.first_name), foo.__dict__) foo.first_name = 'Euler' print('After: ', repr(foo.first_name), foo.__dict__) if __name__ == '__main__': main()
class Field(object): def __init__(self, name): self.name = name self.internal_name = '_' + self.name def __get__(self, instance, instance_type): if instance is None: return self return getattr(instance, self.internal_name, '') def __set__(self, instance, value): setattr(instance, self.internal_name, value) class Customer(object): """ >>> foo = Customer() >>> print('Before:', repr(foo.first_name), foo.__dict__) Before: '' {} >>> foo.first_name = 'Euclid' >>> print('After: ', repr(foo.first_name), foo.__dict__) After: 'Euclid' {'_first_name': 'Euclid'} """ first_name = field('first_name') last_name = field('last_name') prefix = field('prefix') suffix = field('suffix') class Goodfield(object): def __init__(self): self.name = None self.internal_name = None def __get__(self, instance, instance_type): if instance is None: return self return getattr(instance, self.internal_name, '') def __set__(self, instance, value): setattr(instance, self.internal_name, value) class Meta(type): def __new__(meta, name, bases, class_dict): for (key, value) in class_dict.items(): if isinstance(value, GoodField): value.name = key value.internal_name = '_' + key cls = type.__new__(meta, name, bases, class_dict) return cls class Databaserow(object, metaclass=Meta): pass class Bettercustomer(DatabaseRow): """ By using the metaclass, the new DatabaseRow base class, and the new GoodField descriptor, the class defination for a database row no longer has the redundancy from before """ first_name = good_field() last_name = good_field() prefix = good_field() suffix = good_field() def main(): foo = better_customer() print('Before:', repr(foo.first_name), foo.__dict__) foo.first_name = 'Euler' print('After: ', repr(foo.first_name), foo.__dict__) if __name__ == '__main__': main()
def merge_notch_list(notch_list_1, notch_list_2): """Merge two notches list Parameters ---------- notch_list_1 : list First notch list to merge notch_list_2 : list Second notch list to merge Returns ------- notch_list : list list of dictionary with key: "begin_angle", "end_angle", "obj" """ N1 = len(notch_list_1) N2 = len(notch_list_2) merged = [] ii, jj = 0, 0 # Index to go thought the lists while ii < N1 and jj < N2: if ( notch_list_1[ii]["begin_angle"] < notch_list_2[jj]["begin_angle"] and notch_list_1[ii]["end_angle"] <= notch_list_2[jj]["begin_angle"] ): # Add a notch from notch_list_1 merged.append(notch_list_1[ii]) ii += 1 elif ( notch_list_2[jj]["begin_angle"] < notch_list_1[ii]["begin_angle"] and notch_list_2[jj]["end_angle"] <= notch_list_1[ii]["begin_angle"] ): # Add a notch from notch_list_2 merged.append(notch_list_2[jj]) jj += 1 else: raise NotchError( "Notches and/or Slots are coliding:\n" + str(notch_list_1[ii]) + "\n" + str(notch_list_2[ii]) ) # One of the list is not "finished" merged = merged + notch_list_1[ii:] + notch_list_2[jj:] return merged class NotchError(Exception): """Raised when notch are coliding """ pass
def merge_notch_list(notch_list_1, notch_list_2): """Merge two notches list Parameters ---------- notch_list_1 : list First notch list to merge notch_list_2 : list Second notch list to merge Returns ------- notch_list : list list of dictionary with key: "begin_angle", "end_angle", "obj" """ n1 = len(notch_list_1) n2 = len(notch_list_2) merged = [] (ii, jj) = (0, 0) while ii < N1 and jj < N2: if notch_list_1[ii]['begin_angle'] < notch_list_2[jj]['begin_angle'] and notch_list_1[ii]['end_angle'] <= notch_list_2[jj]['begin_angle']: merged.append(notch_list_1[ii]) ii += 1 elif notch_list_2[jj]['begin_angle'] < notch_list_1[ii]['begin_angle'] and notch_list_2[jj]['end_angle'] <= notch_list_1[ii]['begin_angle']: merged.append(notch_list_2[jj]) jj += 1 else: raise notch_error('Notches and/or Slots are coliding:\n' + str(notch_list_1[ii]) + '\n' + str(notch_list_2[ii])) merged = merged + notch_list_1[ii:] + notch_list_2[jj:] return merged class Notcherror(Exception): """Raised when notch are coliding """ pass
"""pywxclient package module.""" __version__ = '0.1.2'
"""pywxclient package module.""" __version__ = '0.1.2'
# Copyright 2012 Google Inc. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Classes for cloud/file references yielded by gsutil iterators.""" class BucketListingRefType(object): """Enum class for describing BucketListingRefs.""" BUCKET = 'bucket' # Cloud bucket OBJECT = 'object' # Cloud object or filesystem file PREFIX = 'prefix' # Cloud bucket subdir or filesystem directory class BucketListingRef(object): """A reference to one fully expanded iterator result. This allows polymorphic iteration over wildcard-iterated URLs. The reference contains a fully expanded URL string containing no wildcards and referring to exactly one entity (if a wildcard is contained, it is assumed this is part of the raw string and should never be treated as a wildcard). Each reference represents a Bucket, Object, or Prefix. For filesystem URLs, Objects represent files and Prefixes represent directories. The root_object member contains the underlying object as it was retrieved. It is populated by the calling iterator, which may only request certain fields to reduce the number of server requests. For filesystem URLs, root_object is not populated. """ def __init__(self, url_string, ref_type, root_object=None): """Instantiates a BucketListingRef from the URL string and object metadata. Args: url_string: String describing the referenced object. ref_type: BucketListingRefType for the underlying object. root_object: Underlying object metadata, if available. Raises: BucketListingRefException: If reference type is invalid. """ if ref_type not in (BucketListingRefType.BUCKET, BucketListingRefType.OBJECT, BucketListingRefType.PREFIX): raise BucketListingRefException('Invalid ref_type %s' % ref_type) self.url_string = url_string self.ref_type = ref_type self.root_object = root_object def GetUrlString(self): return self.url_string def __str__(self): return self.url_string class BucketListingRefException(StandardError): """Exception raised for invalid BucketListingRef requests.""" def __init__(self, reason): StandardError.__init__(self) self.reason = reason def __repr__(self): return 'BucketListingRefException: %s' % self.reason def __str__(self): return 'BucketListingRefException: %s' % self.reason
"""Classes for cloud/file references yielded by gsutil iterators.""" class Bucketlistingreftype(object): """Enum class for describing BucketListingRefs.""" bucket = 'bucket' object = 'object' prefix = 'prefix' class Bucketlistingref(object): """A reference to one fully expanded iterator result. This allows polymorphic iteration over wildcard-iterated URLs. The reference contains a fully expanded URL string containing no wildcards and referring to exactly one entity (if a wildcard is contained, it is assumed this is part of the raw string and should never be treated as a wildcard). Each reference represents a Bucket, Object, or Prefix. For filesystem URLs, Objects represent files and Prefixes represent directories. The root_object member contains the underlying object as it was retrieved. It is populated by the calling iterator, which may only request certain fields to reduce the number of server requests. For filesystem URLs, root_object is not populated. """ def __init__(self, url_string, ref_type, root_object=None): """Instantiates a BucketListingRef from the URL string and object metadata. Args: url_string: String describing the referenced object. ref_type: BucketListingRefType for the underlying object. root_object: Underlying object metadata, if available. Raises: BucketListingRefException: If reference type is invalid. """ if ref_type not in (BucketListingRefType.BUCKET, BucketListingRefType.OBJECT, BucketListingRefType.PREFIX): raise bucket_listing_ref_exception('Invalid ref_type %s' % ref_type) self.url_string = url_string self.ref_type = ref_type self.root_object = root_object def get_url_string(self): return self.url_string def __str__(self): return self.url_string class Bucketlistingrefexception(StandardError): """Exception raised for invalid BucketListingRef requests.""" def __init__(self, reason): StandardError.__init__(self) self.reason = reason def __repr__(self): return 'BucketListingRefException: %s' % self.reason def __str__(self): return 'BucketListingRefException: %s' % self.reason
'''THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, TITLE AND NON-INFRINGEMENT. IN NO EVENT SHALL THE COPYRIGHT HOLDERS OR ANYONE DISTRIBUTING THE SOFTWARE BE LIABLE FOR ANY DAMAGES OR OTHER LIABILITY, WHETHER IN CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.''' # Bitcoin Cash (BCH) qpz32c4lg7x7lnk9jg6qg7s4uavdce89myax5v5nuk # Ether (ETH) - 0x843d3DEC2A4705BD4f45F674F641cE2D0022c9FB # Litecoin (LTC) - Lfk5y4F7KZa9oRxpazETwjQnHszEPvqPvu # Bitcoin (BTC) - 34L8qWiQyKr8k4TnHDacfjbaSqQASbBtTd # contact :- github@jamessawyer.co.uk def median(nums): """ Find median of a list of numbers. >>> median([0]) 0 >>> median([4,1,3,2]) 2.5 Args: nums: List of nums Returns: Median. """ sorted_list = sorted(nums) med = None if len(sorted_list) % 2 == 0: mid_index_1 = len(sorted_list) // 2 mid_index_2 = (len(sorted_list) // 2) - 1 med = (sorted_list[mid_index_1] + sorted_list[mid_index_2]) / float(2) else: mid_index = (len(sorted_list) - 1) // 2 med = sorted_list[mid_index] return med def main(): print("Odd number of numbers:") print(median([2, 4, 6, 8, 20, 50, 70])) print("Even number of numbers:") print(median([2, 4, 6, 8, 20, 50])) if __name__ == "__main__": main()
"""THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, TITLE AND NON-INFRINGEMENT. IN NO EVENT SHALL THE COPYRIGHT HOLDERS OR ANYONE DISTRIBUTING THE SOFTWARE BE LIABLE FOR ANY DAMAGES OR OTHER LIABILITY, WHETHER IN CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.""" def median(nums): """ Find median of a list of numbers. >>> median([0]) 0 >>> median([4,1,3,2]) 2.5 Args: nums: List of nums Returns: Median. """ sorted_list = sorted(nums) med = None if len(sorted_list) % 2 == 0: mid_index_1 = len(sorted_list) // 2 mid_index_2 = len(sorted_list) // 2 - 1 med = (sorted_list[mid_index_1] + sorted_list[mid_index_2]) / float(2) else: mid_index = (len(sorted_list) - 1) // 2 med = sorted_list[mid_index] return med def main(): print('Odd number of numbers:') print(median([2, 4, 6, 8, 20, 50, 70])) print('Even number of numbers:') print(median([2, 4, 6, 8, 20, 50])) if __name__ == '__main__': main()
fname = input('Enter a filename:') try: fhand = open(fname) except: print('File cannot be opened:', fname) exit() for line in fhand: words = line.split() if len(words) >= 3 and words[0] == 'From': print(words[2])
fname = input('Enter a filename:') try: fhand = open(fname) except: print('File cannot be opened:', fname) exit() for line in fhand: words = line.split() if len(words) >= 3 and words[0] == 'From': print(words[2])
load( ":poms_for_testing.bzl", "COMPLEX_POM", "GRANDPARENT_POM", "MERGED_EXPECTED_POM", "PARENT_POM", ) load(":testing.bzl", "asserts", "test_suite") load("//maven:poms.bzl", "poms") load("//maven:xml.bzl", "xml") def merge_properties_test(env): parent = """<project><properties> <foo>foo</foo> <bar>bar</bar> </properties></project>""" child = """<project><properties> <baz>baz</baz> <bar>blah</bar> </properties></project>""" merged = poms.merge_parent(parent = poms.parse(parent), child = poms.parse(child)) properties = poms.extract_properties(merged) asserts.equals(env, 3, len(properties), "number of properties") asserts.equals(env, "foo", properties["foo"], "property foo") asserts.equals(env, "blah", properties["bar"], "property bar") asserts.equals(env, "baz", properties["baz"], "property baz") def merge_dependency_test(env): parent = """<project><dependencies><dependency> <groupId>foo</groupId><artifactId>bar</artifactId><version>1.0</version><scope>test</scope> </dependency></dependencies></project>""" child = """<project><dependencies><dependency> <groupId>foo</groupId><artifactId>bar</artifactId><version>2.0</version> </dependency></dependencies></project>""" merged = poms.merge_parent(parent = poms.parse(parent), child = poms.parse(child)) dependencies = poms.extract_dependencies(merged) asserts.equals(env, 1, len(dependencies), "number of dependencies") dependency = dependencies[0] asserts.equals(env, "foo", dependency.group_id, "groupId") asserts.equals(env, "bar", dependency.artifact_id, "artifactId") asserts.equals(env, "2.0", dependency.version, "version") asserts.equals(env, "test", dependency.scope, "scope") def merge_simpler_grandparent_parent_test(env): grandparent = poms.parse(GRANDPARENT_POM) parent = poms.parse(PARENT_POM) # verify precondition asserts.equals(env, "foo", poms.extract_properties(grandparent).get("foo", None), "original value of 'foo'") merged = poms.merge_parent(parent = grandparent, child = parent) asserts.equals(env, "test.group", xml.find_first(merged, "groupId").content, "merged groupId") asserts.equals(env, "parent", xml.find_first(merged, "artifactId").content, "merged artifactId") asserts.equals(env, "1.0", xml.find_first(merged, "version").content, "merged version") asserts.equals(env, "pom", xml.find_first(merged, "packaging").content, "merged packaging") properties = poms.extract_properties(merged) asserts.equals(env, 6, len(properties), "number of properties") asserts.equals(env, "bar", properties["foo"], "merged value of 'foo'") asserts.equals(env, "1.0", properties["findbugs.jsr305"], "merged value of 'findbugs.jsr305'") asserts.equals(env, "blah", properties["baz"], "merged value of 'baz'") asserts.equals(env, "test.group", properties["project.groupId"], "merged value of 'project.groupId'") asserts.equals(env, "parent", properties["project.artifactId"], "merged value of 'project.artifactId'") asserts.equals(env, "1.0", properties["project.version"], "merged value of 'project.version'") dependencies = poms.extract_dependencies(merged) asserts.equals(env, 1, len(dependencies), "number of dependencies") deps_mgt = poms.extract_dependency_management(merged) asserts.equals(env, 1, len(deps_mgt), "number of dependency management") asserts.equals(env, "test", deps_mgt[0].scope) def merge_full_chain_test(env): grandparent = poms.parse(GRANDPARENT_POM) parent = poms.parse(PARENT_POM) child = poms.parse(COMPLEX_POM) expected = poms.parse(MERGED_EXPECTED_POM) merged_parents = poms.merge_parent(parent = grandparent, child = parent) merged = poms.merge_parent(parent = merged_parents, child = child) asserts.equals(env, "test.group", xml.find_first(merged, "groupId").content, "merged groupId") asserts.equals(env, "child", xml.find_first(merged, "artifactId").content, "merged artifactId") asserts.equals(env, "1.0", xml.find_first(merged, "version").content, "merged version") asserts.equals(env, "jar", xml.find_first(merged, "packaging").content, "merged packaging") properties = poms.extract_properties(merged) asserts.equals(env, 7, len(properties), "number of properties") asserts.equals(env, "bar", properties["foo"], "merged value of 'foo'") asserts.equals(env, "1.0", properties["findbugs.jsr305"], "merged value of 'findbugs.jsr305'") asserts.equals(env, "blah", properties["baz"], "merged value of 'baz'") asserts.equals(env, "5.0", properties["animal.sniffer.version"], "merged value of 'animal.sniffer.version'") asserts.equals(env, "test.group", properties["project.groupId"], "merged value of 'project.groupId'") asserts.equals(env, "child", properties["project.artifactId"], "merged value of 'project.artifactId'") asserts.equals(env, "1.0", properties["project.version"], "merged value of 'project.version'") dependencies = poms.extract_dependencies(merged) asserts.equals(env, 5, len(dependencies), "number of dependencies") # Values from extract_dependencies may include inferred values - that's separately tested. deps_mgt = poms.extract_dependency_management(merged) asserts.equals(env, 1, len(deps_mgt), "number of dependency management") asserts.equals(env, "test", deps_mgt[0].scope) # A bit of a brittle "Golden" file test, but order does end up being deterministic based on xml content # Spits out JSON if not matching which can be cut-and-pasted into a file, formatted, and compared. # The core functionality should be asserted about more precisely in the above assertions. asserts.equals(env, expected.to_json(), merged.to_json(), "merged pom node tree") def inferred_values_in_dependencies_test(env): dependencies = poms.extract_dependencies(poms.parse(MERGED_EXPECTED_POM)) indexed_dependencies = {} for dep in dependencies: indexed_dependencies[dep.coordinates] = dep asserts.equals(env, 5, len(dependencies), "number of dependencies") asserts.true(env, indexed_dependencies.get("com.google.code.findbugs:jsr305", None), "has jsr305") asserts.equals( env = env, expected = "1.0", actual = indexed_dependencies.get("com.google.code.findbugs:jsr305", None).version, message = "jsr305 sould have version 1.0 substituted for ${findbugs.jsr305}", ) asserts.true(env, indexed_dependencies.get("junit:junit", None), "has junit:junit") asserts.equals(env, "test", indexed_dependencies.get("junit:junit", None).scope, "junit is test scoped") TESTS = [ merge_properties_test, merge_dependency_test, merge_simpler_grandparent_parent_test, merge_full_chain_test, inferred_values_in_dependencies_test, ] # Roll-up function. def suite(): return test_suite("pom merging", tests = TESTS)
load(':poms_for_testing.bzl', 'COMPLEX_POM', 'GRANDPARENT_POM', 'MERGED_EXPECTED_POM', 'PARENT_POM') load(':testing.bzl', 'asserts', 'test_suite') load('//maven:poms.bzl', 'poms') load('//maven:xml.bzl', 'xml') def merge_properties_test(env): parent = '<project><properties>\n <foo>foo</foo>\n <bar>bar</bar>\n </properties></project>' child = '<project><properties>\n <baz>baz</baz>\n <bar>blah</bar>\n </properties></project>' merged = poms.merge_parent(parent=poms.parse(parent), child=poms.parse(child)) properties = poms.extract_properties(merged) asserts.equals(env, 3, len(properties), 'number of properties') asserts.equals(env, 'foo', properties['foo'], 'property foo') asserts.equals(env, 'blah', properties['bar'], 'property bar') asserts.equals(env, 'baz', properties['baz'], 'property baz') def merge_dependency_test(env): parent = '<project><dependencies><dependency>\n <groupId>foo</groupId><artifactId>bar</artifactId><version>1.0</version><scope>test</scope>\n </dependency></dependencies></project>' child = '<project><dependencies><dependency>\n <groupId>foo</groupId><artifactId>bar</artifactId><version>2.0</version>\n </dependency></dependencies></project>' merged = poms.merge_parent(parent=poms.parse(parent), child=poms.parse(child)) dependencies = poms.extract_dependencies(merged) asserts.equals(env, 1, len(dependencies), 'number of dependencies') dependency = dependencies[0] asserts.equals(env, 'foo', dependency.group_id, 'groupId') asserts.equals(env, 'bar', dependency.artifact_id, 'artifactId') asserts.equals(env, '2.0', dependency.version, 'version') asserts.equals(env, 'test', dependency.scope, 'scope') def merge_simpler_grandparent_parent_test(env): grandparent = poms.parse(GRANDPARENT_POM) parent = poms.parse(PARENT_POM) asserts.equals(env, 'foo', poms.extract_properties(grandparent).get('foo', None), "original value of 'foo'") merged = poms.merge_parent(parent=grandparent, child=parent) asserts.equals(env, 'test.group', xml.find_first(merged, 'groupId').content, 'merged groupId') asserts.equals(env, 'parent', xml.find_first(merged, 'artifactId').content, 'merged artifactId') asserts.equals(env, '1.0', xml.find_first(merged, 'version').content, 'merged version') asserts.equals(env, 'pom', xml.find_first(merged, 'packaging').content, 'merged packaging') properties = poms.extract_properties(merged) asserts.equals(env, 6, len(properties), 'number of properties') asserts.equals(env, 'bar', properties['foo'], "merged value of 'foo'") asserts.equals(env, '1.0', properties['findbugs.jsr305'], "merged value of 'findbugs.jsr305'") asserts.equals(env, 'blah', properties['baz'], "merged value of 'baz'") asserts.equals(env, 'test.group', properties['project.groupId'], "merged value of 'project.groupId'") asserts.equals(env, 'parent', properties['project.artifactId'], "merged value of 'project.artifactId'") asserts.equals(env, '1.0', properties['project.version'], "merged value of 'project.version'") dependencies = poms.extract_dependencies(merged) asserts.equals(env, 1, len(dependencies), 'number of dependencies') deps_mgt = poms.extract_dependency_management(merged) asserts.equals(env, 1, len(deps_mgt), 'number of dependency management') asserts.equals(env, 'test', deps_mgt[0].scope) def merge_full_chain_test(env): grandparent = poms.parse(GRANDPARENT_POM) parent = poms.parse(PARENT_POM) child = poms.parse(COMPLEX_POM) expected = poms.parse(MERGED_EXPECTED_POM) merged_parents = poms.merge_parent(parent=grandparent, child=parent) merged = poms.merge_parent(parent=merged_parents, child=child) asserts.equals(env, 'test.group', xml.find_first(merged, 'groupId').content, 'merged groupId') asserts.equals(env, 'child', xml.find_first(merged, 'artifactId').content, 'merged artifactId') asserts.equals(env, '1.0', xml.find_first(merged, 'version').content, 'merged version') asserts.equals(env, 'jar', xml.find_first(merged, 'packaging').content, 'merged packaging') properties = poms.extract_properties(merged) asserts.equals(env, 7, len(properties), 'number of properties') asserts.equals(env, 'bar', properties['foo'], "merged value of 'foo'") asserts.equals(env, '1.0', properties['findbugs.jsr305'], "merged value of 'findbugs.jsr305'") asserts.equals(env, 'blah', properties['baz'], "merged value of 'baz'") asserts.equals(env, '5.0', properties['animal.sniffer.version'], "merged value of 'animal.sniffer.version'") asserts.equals(env, 'test.group', properties['project.groupId'], "merged value of 'project.groupId'") asserts.equals(env, 'child', properties['project.artifactId'], "merged value of 'project.artifactId'") asserts.equals(env, '1.0', properties['project.version'], "merged value of 'project.version'") dependencies = poms.extract_dependencies(merged) asserts.equals(env, 5, len(dependencies), 'number of dependencies') deps_mgt = poms.extract_dependency_management(merged) asserts.equals(env, 1, len(deps_mgt), 'number of dependency management') asserts.equals(env, 'test', deps_mgt[0].scope) asserts.equals(env, expected.to_json(), merged.to_json(), 'merged pom node tree') def inferred_values_in_dependencies_test(env): dependencies = poms.extract_dependencies(poms.parse(MERGED_EXPECTED_POM)) indexed_dependencies = {} for dep in dependencies: indexed_dependencies[dep.coordinates] = dep asserts.equals(env, 5, len(dependencies), 'number of dependencies') asserts.true(env, indexed_dependencies.get('com.google.code.findbugs:jsr305', None), 'has jsr305') asserts.equals(env=env, expected='1.0', actual=indexed_dependencies.get('com.google.code.findbugs:jsr305', None).version, message='jsr305 sould have version 1.0 substituted for ${findbugs.jsr305}') asserts.true(env, indexed_dependencies.get('junit:junit', None), 'has junit:junit') asserts.equals(env, 'test', indexed_dependencies.get('junit:junit', None).scope, 'junit is test scoped') tests = [merge_properties_test, merge_dependency_test, merge_simpler_grandparent_parent_test, merge_full_chain_test, inferred_values_in_dependencies_test] def suite(): return test_suite('pom merging', tests=TESTS)
""" Classes to represent error states """ class Error(Exception): """Base exception class for NPC""" pass class FormatError(Error): """ Raised when trying to use a malformed file Attributes: strerror (str): Error message describing what happened """ def __init__(self, strerror): self.strerror = strerror class ParseError(Error): """ Raised when a parsing operation fails Attributes: strerror (str): Error message describing what happened path (PathLike): Path to the offending file lineno (int): Line number of the failure, if known. Defaults to 0. colno (int): Column of the failure, if known. Defaults to 0. """ def __init__(self, strerror, path, lineno=0, colno=0): self.strerror = strerror self.path = path self.lineno = lineno self.colno = colno
""" Classes to represent error states """ class Error(Exception): """Base exception class for NPC""" pass class Formaterror(Error): """ Raised when trying to use a malformed file Attributes: strerror (str): Error message describing what happened """ def __init__(self, strerror): self.strerror = strerror class Parseerror(Error): """ Raised when a parsing operation fails Attributes: strerror (str): Error message describing what happened path (PathLike): Path to the offending file lineno (int): Line number of the failure, if known. Defaults to 0. colno (int): Column of the failure, if known. Defaults to 0. """ def __init__(self, strerror, path, lineno=0, colno=0): self.strerror = strerror self.path = path self.lineno = lineno self.colno = colno
def main(): lM = lambda arg: arg * 2 print(lM(5)) # 10 print(lM(0)) # 0 print(lM(10)) # 20 my_list = [1, 5, 4, 6, 8, 11, 3, 12] new_list = list(filter(lambda x: (x%2 == 0), my_list)) print(new_list) pass if __name__ == "__main__": main()
def main(): l_m = lambda arg: arg * 2 print(l_m(5)) print(l_m(0)) print(l_m(10)) my_list = [1, 5, 4, 6, 8, 11, 3, 12] new_list = list(filter(lambda x: x % 2 == 0, my_list)) print(new_list) pass if __name__ == '__main__': main()
input = """ n(1). n(2). n(3). n(4). a(1,2). a(1,3). a(4,3). a(3,1). a(2,3). a(1,4). b(1,2,1). b(1,1,1). b(2,1,2). b(3,1,1). b(2,2,2). c(1,2,3,4). c(1,2,3,5). c(4,3,2,1). c(1,2,4,3). c(2,1,3,4). d(1,2,3,4,5). d(5,4,3,2,1). d(1,2,3,5,1). %p(X,Y) v q(X,Z) v r(X,Y,Z,U,W) :- n(X), a(X,Y), b(X,Y,Z), c(X,Y,Z,U), %d(X,Y,Z,U,W). p(X,Y) :- n(X), n(Y), not a(X,Y), not b(1,X,Y). """ output = """ {a(1,2), a(1,3), a(1,4), a(2,3), a(3,1), a(4,3), b(1,1,1), b(1,2,1), b(2,1,2), b(2,2,2), b(3,1,1), c(1,2,3,4), c(1,2,3,5), c(1,2,4,3), c(2,1,3,4), c(4,3,2,1), d(1,2,3,4,5), d(1,2,3,5,1), d(5,4,3,2,1), n(1), n(2), n(3), n(4), p(2,2), p(2,4), p(3,2), p(3,3), p(3,4), p(4,1), p(4,2), p(4,4)} """
input = '\n\nn(1).\nn(2).\nn(3).\nn(4).\n\na(1,2).\na(1,3).\na(4,3).\na(3,1).\na(2,3).\na(1,4).\n\nb(1,2,1).\nb(1,1,1).\nb(2,1,2).\nb(3,1,1).\nb(2,2,2).\n\nc(1,2,3,4).\nc(1,2,3,5).\nc(4,3,2,1).\nc(1,2,4,3).\nc(2,1,3,4).\n\nd(1,2,3,4,5).\nd(5,4,3,2,1).\nd(1,2,3,5,1).\n\n%p(X,Y) v q(X,Z) v r(X,Y,Z,U,W) :- n(X), a(X,Y), b(X,Y,Z), c(X,Y,Z,U),\n%d(X,Y,Z,U,W).\n\np(X,Y) :- n(X), n(Y), not a(X,Y), not b(1,X,Y).\n' output = '\n{a(1,2), a(1,3), a(1,4), a(2,3), a(3,1), a(4,3), b(1,1,1), b(1,2,1), b(2,1,2), b(2,2,2), b(3,1,1), c(1,2,3,4), c(1,2,3,5), c(1,2,4,3), c(2,1,3,4), c(4,3,2,1), d(1,2,3,4,5), d(1,2,3,5,1), d(5,4,3,2,1), n(1), n(2), n(3), n(4), p(2,2), p(2,4), p(3,2), p(3,3), p(3,4), p(4,1), p(4,2), p(4,4)}\n'
N=10 for x in range (N): for y in range (N): print (x,y,x*y)
n = 10 for x in range(N): for y in range(N): print(x, y, x * y)
''' Exercise 2: Write in pseudo code a function merge(listA: List, listB: List) that returns a sorted list containing the elements of both list where listA and listB are two sorted lists of integers. If an element exists in both lists, it must appear multiple times in the returned list. For example: >>> merge([1,3,4,7],[2,3,5]) [1,2,3,3,4,5,7] ''' def merge(listA, listB): AnB = [] while listA!=[] and listB!=[]: if listA[0]<=listB[0]: AnB.append(listA[0]) listA.pop(0) else: AnB.append(listB[0]) listB.pop(0) if listA==[]: AnB+=listB else: AnB+=listA return AnB print(merge(list(map(int,input('Enter an Array seperated by space: ').split())),list(map(int,input('Enter an Array seperated by space: ').split()))))
""" Exercise 2: Write in pseudo code a function merge(listA: List, listB: List) that returns a sorted list containing the elements of both list where listA and listB are two sorted lists of integers. If an element exists in both lists, it must appear multiple times in the returned list. For example: >>> merge([1,3,4,7],[2,3,5]) [1,2,3,3,4,5,7] """ def merge(listA, listB): an_b = [] while listA != [] and listB != []: if listA[0] <= listB[0]: AnB.append(listA[0]) listA.pop(0) else: AnB.append(listB[0]) listB.pop(0) if listA == []: an_b += listB else: an_b += listA return AnB print(merge(list(map(int, input('Enter an Array seperated by space: ').split())), list(map(int, input('Enter an Array seperated by space: ').split()))))
""" PSET-4 Word Game Part 6: Playing a Game A game consists of playing multiple hands. We need to implement one final function to complete our word-game program. Write the code that implements the playGame function. You should remove the code that is currently uncommented in the playGame body. Read through the specification and make sure you understand what this function accomplishes. For the game, you should use the HAND_SIZE constant to determine the number of cards in a hand. """ def playGame(wordList): """ Allow the user to play an arbitrary number of hands. 1) Asks the user to input 'n' or 'r' or 'e'. * If the user inputs 'n', let the user play a new (random) hand. * If the user inputs 'r', let the user play the last hand again. * If the user inputs 'e', exit the game. * If the user inputs anything else, tell them their input was invalid. 2) When done playing the hand, repeat from step 1 """ PROMPT_STR = "Enter n to deal a new hand, r to replay the last hand, or e to end game: " NO_REPL_AVAIL_STR = "You have not played a hand yet. Please play a new hand first!" INVALID_CMD = "Invalid command." firstGame = True lastHand = {} while True: userInput = raw_input(PROMPT_STR) if userInput == 'n': hand = dealHand(HAND_SIZE) lastHand = hand.copy() playHand(hand, wordList, HAND_SIZE) elif userInput == 'r': if len(lastHand) == 0: print(NO_REPL_AVAIL_STR) else: playHand(lastHand, wordList, HAND_SIZE) elif userInput == 'e': break else: print(INVALID_CMD) print
""" PSET-4 Word Game Part 6: Playing a Game A game consists of playing multiple hands. We need to implement one final function to complete our word-game program. Write the code that implements the playGame function. You should remove the code that is currently uncommented in the playGame body. Read through the specification and make sure you understand what this function accomplishes. For the game, you should use the HAND_SIZE constant to determine the number of cards in a hand. """ def play_game(wordList): """ Allow the user to play an arbitrary number of hands. 1) Asks the user to input 'n' or 'r' or 'e'. * If the user inputs 'n', let the user play a new (random) hand. * If the user inputs 'r', let the user play the last hand again. * If the user inputs 'e', exit the game. * If the user inputs anything else, tell them their input was invalid. 2) When done playing the hand, repeat from step 1 """ prompt_str = 'Enter n to deal a new hand, r to replay the last hand, or e to end game: ' no_repl_avail_str = 'You have not played a hand yet. Please play a new hand first!' invalid_cmd = 'Invalid command.' first_game = True last_hand = {} while True: user_input = raw_input(PROMPT_STR) if userInput == 'n': hand = deal_hand(HAND_SIZE) last_hand = hand.copy() play_hand(hand, wordList, HAND_SIZE) elif userInput == 'r': if len(lastHand) == 0: print(NO_REPL_AVAIL_STR) else: play_hand(lastHand, wordList, HAND_SIZE) elif userInput == 'e': break else: print(INVALID_CMD) print
""" Space : O(n) Time : O(n) """ class Solution: def reverseStr(self, s: str, k: int) -> str: ans = '' n = len(s) rev = True for i in range(0, n, k): chunk = s[i:i+k] if rev: ans += "".join(chunk[::-1]) rev = False else: ans += "".join(chunk) rev = True return ans
""" Space : O(n) Time : O(n) """ class Solution: def reverse_str(self, s: str, k: int) -> str: ans = '' n = len(s) rev = True for i in range(0, n, k): chunk = s[i:i + k] if rev: ans += ''.join(chunk[::-1]) rev = False else: ans += ''.join(chunk) rev = True return ans
# Licensed to Modin Development Team under one or more contributor license agreements. # See the NOTICE file distributed with this work for additional information regarding # copyright ownership. The Modin Development Team licenses this file to you under the # Apache License, Version 2.0 (the "License"); you may not use this file except in # compliance with the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software distributed under # the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF # ANY KIND, either express or implied. See the License for the specific language # governing permissions and limitations under the License. def unwrap_partitions(api_layer_object, axis=None, bind_ip=False): """ Unwrap partitions of the `api_layer_object`. Parameters ---------- api_layer_object : DataFrame or Series The API layer object. axis : None, 0 or 1. Default is None The axis to unwrap partitions for (0 - row partitions, 1 - column partitions). If axis is None, all the partitions of the API layer object are unwrapped. bind_ip : boolean. Default is False Whether to bind node ip address to each partition or not. Returns ------- list A list of Ray.ObjectRef/Dask.Future to partitions of the `api_layer_object` if Ray/Dask is used as an engine. Notes ----- In case bind_ip=True, a list containing tuples of Ray.ObjectRef/Dask.Future to node ip addresses and partitions of the `api_layer_object`, respectively, is returned if Ray/Dask is used as an engine. """ if not hasattr(api_layer_object, "_query_compiler"): raise ValueError( f"Only API Layer objects may be passed in here, got {type(api_layer_object)} instead." ) if axis is None: def _unwrap_partitions(oid): if bind_ip: return [ (partition.ip, getattr(partition, oid)) for row in api_layer_object._query_compiler._modin_frame._partitions for partition in row ] else: return [ getattr(partition, oid) for row in api_layer_object._query_compiler._modin_frame._partitions for partition in row ] actual_engine = type( api_layer_object._query_compiler._modin_frame._partitions[0][0] ).__name__ if actual_engine in ("PandasOnRayFramePartition",): return _unwrap_partitions("oid") elif actual_engine in ("PandasOnDaskFramePartition",): return _unwrap_partitions("future") raise ValueError( f"Do not know how to unwrap '{actual_engine}' underlying partitions" ) else: partitions = ( api_layer_object._query_compiler._modin_frame._frame_mgr_cls.axis_partition( api_layer_object._query_compiler._modin_frame._partitions, axis ^ 1 ) ) return [ part.coalesce(bind_ip=bind_ip).unwrap(squeeze=True, bind_ip=bind_ip) for part in partitions ]
def unwrap_partitions(api_layer_object, axis=None, bind_ip=False): """ Unwrap partitions of the `api_layer_object`. Parameters ---------- api_layer_object : DataFrame or Series The API layer object. axis : None, 0 or 1. Default is None The axis to unwrap partitions for (0 - row partitions, 1 - column partitions). If axis is None, all the partitions of the API layer object are unwrapped. bind_ip : boolean. Default is False Whether to bind node ip address to each partition or not. Returns ------- list A list of Ray.ObjectRef/Dask.Future to partitions of the `api_layer_object` if Ray/Dask is used as an engine. Notes ----- In case bind_ip=True, a list containing tuples of Ray.ObjectRef/Dask.Future to node ip addresses and partitions of the `api_layer_object`, respectively, is returned if Ray/Dask is used as an engine. """ if not hasattr(api_layer_object, '_query_compiler'): raise value_error(f'Only API Layer objects may be passed in here, got {type(api_layer_object)} instead.') if axis is None: def _unwrap_partitions(oid): if bind_ip: return [(partition.ip, getattr(partition, oid)) for row in api_layer_object._query_compiler._modin_frame._partitions for partition in row] else: return [getattr(partition, oid) for row in api_layer_object._query_compiler._modin_frame._partitions for partition in row] actual_engine = type(api_layer_object._query_compiler._modin_frame._partitions[0][0]).__name__ if actual_engine in ('PandasOnRayFramePartition',): return _unwrap_partitions('oid') elif actual_engine in ('PandasOnDaskFramePartition',): return _unwrap_partitions('future') raise value_error(f"Do not know how to unwrap '{actual_engine}' underlying partitions") else: partitions = api_layer_object._query_compiler._modin_frame._frame_mgr_cls.axis_partition(api_layer_object._query_compiler._modin_frame._partitions, axis ^ 1) return [part.coalesce(bind_ip=bind_ip).unwrap(squeeze=True, bind_ip=bind_ip) for part in partitions]
def unique_paths_with_obstacles(obstacle_grid): if obstacle_grid is None or len(obstacle_grid) == 0 or obstacle_grid[0] is None or len(obstacle_grid[0]) == 0: return 0 m = len(obstacle_grid) n = len(obstacle_grid[0]) dp = [0 for i in range(n)] for i in range(n): if obstacle_grid[0][i] == 0: dp[i] = 1 else: break first_vertical_elements = [0 for i in range(m)] for i in range(m): if obstacle_grid[i][0] == 0: first_vertical_elements[i] = 1 else: break for i in range(1, m): dp[0] = first_vertical_elements[i] for j in range(1, n): if obstacle_grid[i][j] == 1: dp[j] = 0 else: dp[j] += dp[j - 1] return dp[n - 1] if __name__ == '__main__': obstacle_grid_ = [ [0, 0, 0], [0, 1, 0], [0, 0, 0] ] print(unique_paths_with_obstacles(obstacle_grid_))
def unique_paths_with_obstacles(obstacle_grid): if obstacle_grid is None or len(obstacle_grid) == 0 or obstacle_grid[0] is None or (len(obstacle_grid[0]) == 0): return 0 m = len(obstacle_grid) n = len(obstacle_grid[0]) dp = [0 for i in range(n)] for i in range(n): if obstacle_grid[0][i] == 0: dp[i] = 1 else: break first_vertical_elements = [0 for i in range(m)] for i in range(m): if obstacle_grid[i][0] == 0: first_vertical_elements[i] = 1 else: break for i in range(1, m): dp[0] = first_vertical_elements[i] for j in range(1, n): if obstacle_grid[i][j] == 1: dp[j] = 0 else: dp[j] += dp[j - 1] return dp[n - 1] if __name__ == '__main__': obstacle_grid_ = [[0, 0, 0], [0, 1, 0], [0, 0, 0]] print(unique_paths_with_obstacles(obstacle_grid_))
def undocumented(f): """ Apply the undocumented decorator to handler methods if they should not turn up in the API help """ f.undocumented = True return f
def undocumented(f): """ Apply the undocumented decorator to handler methods if they should not turn up in the API help """ f.undocumented = True return f
######################################################################## # # License: BSD # Created: February 25, 2005 # Author: Ivan Vilata - reverse:net.selidor@ivan # # $Id$ # ######################################################################## """ Parameters for PyTables. Misc variables: `__docformat__` The format of documentation strings in this module. `__version__` Repository version of this file. """ __docformat__ = 'reStructuredText' """The format of documentation strings in this module.""" __version__ = '$Revision$' """Repository version of this file.""" _KB = 1024 """The size of a Kilobyte in bytes""" _MB = 1024*_KB """The size of a Megabyte in bytes""" # Tunable parameters # ================== # Be careful when touching these! # Parameters for different internal caches # ---------------------------------------- BOUNDS_MAX_SIZE = 1*_MB """The maximum size for bounds values cached during index lookups.""" BOUNDS_MAX_SLOTS = 4*1024 """The maximum number of slots for the BOUNDS cache.""" ITERSEQ_MAX_ELEMENTS = 1024 """The maximum number of iterator elements cached in data lookups.""" ITERSEQ_MAX_SIZE = 1*_MB """The maximum space that will take ITERSEQ cache (in bytes).""" ITERSEQ_MAX_SLOTS = 128 """The maximum number of slots in ITERSEQ cache.""" LIMBOUNDS_MAX_SIZE = 256*_KB """The maximum size for the query limits (for example, ``(lim1, lim2)`` in conditions like ``lim1 <= col < lim2``) cached during index lookups (in bytes).""" LIMBOUNDS_MAX_SLOTS = 128 """The maximum number of slots for LIMBOUNDS cache.""" TABLE_MAX_SIZE = 1*_MB """The maximum size for table chunks cached during index queries.""" SORTED_MAX_SIZE = 1*_MB """The maximum size for sorted values cached during index lookups.""" SORTEDLR_MAX_SIZE = 8*_MB """The maximum size for chunks in last row cached in index lookups (in bytes).""" SORTEDLR_MAX_SLOTS = 1024 """The maximum number of chunks for SORTEDLR cache.""" # Parameters for general cache behaviour # -------------------------------------- # # The next parameters will not be effective if passed to the # `openFile()` function (so, they can only be changed in a *global* # way). You can change them in the file, but this is strongly # discouraged unless you know well what you are doing. DISABLE_EVERY_CYCLES = 10 """The number of cycles in which a cache will be forced to be disabled if the hit ratio is lower than the LOWEST_HIT_RATIO (see below). This value should provide time enough to check whether the cache is being efficient or not.""" ENABLE_EVERY_CYCLES = 50 """The number of cycles in which a cache will be forced to be (re-)enabled, irregardingly of the hit ratio. This will provide a chance for checking if we are in a better scenario for doing caching again.""" LOWEST_HIT_RATIO = 0.6 """The minimum acceptable hit ratio for a cache to avoid disabling (and freeing) it.""" # Tunable parameters # ================== # Be careful when touching these! # Recommended maximum values # -------------------------- # Following are the recommended values for several limits. However, # these limits are somewhat arbitrary and can be increased if you have # enough resources. MAX_COLUMNS = 512 """Maximum number of columns in ``Table`` objects before a ``PerformanceWarning`` is issued. This limit is somewhat arbitrary and can be increased. """ MAX_NODE_ATTRS = 4096 """Maximum allowed number of attributes in a node.""" MAX_GROUP_WIDTH = 16384 """Maximum allowed number of children hanging from a group.""" MAX_TREE_DEPTH = 2048 """Maximum depth in object tree allowed.""" MAX_UNDO_PATH_LENGTH = 10240 """Maximum length of paths allowed in undo/redo operations.""" # Cache limits # ------------ COND_CACHE_SLOTS = 128 """Maximum number of conditions for table queries to be kept in memory. """ CHUNK_CACHE_NELMTS = 521 """Number of elements for HDF5 chunk cache.""" CHUNK_CACHE_PREEMPT = 0.0 """Chunk preemption policy. This value should be between 0 and 1 inclusive and indicates how much chunks that have been fully read are favored for preemption. A value of zero means fully read chunks are treated no differently than other chunks (the preemption is strictly LRU) while a value of one means fully read chunks are always preempted before other chunks.""" CHUNK_CACHE_SIZE = 2*_MB """Size (in bytes) for HDF5 chunk cache.""" # Size for new metadata cache system in HDF5 1.8.x METADATA_CACHE_SIZE = 1*_MB # 1 MB is the default for HDF5 """Size (in bytes) of the HDF5 metadata cache. This only takes effect if using HDF5 1.8.x series.""" # NODE_CACHE_SLOTS tells the number of nodes that fits in the cache. # # There are several forces driving the election of this number: # 1.- As more nodes, better chances to re-use nodes # --> better performance # 2.- As more nodes, the re-ordering of the LRU cache takes more time # --> less performance # 3.- As more nodes, the memory needs for PyTables grows, specially for table # writings (that could take double of memory than table reads!). # # The default value here is quite conservative. If you have a system # with tons of memory, and if you are touching regularly a very large # number of leaves, try increasing this value and see if it fits better # for you. Please report back your feedback. NODE_CACHE_SLOTS = 64 """Maximum number of unreferenced nodes to be kept in memory. If positive, this is the number of *unreferenced* nodes to be kept in the metadata cache. Least recently used nodes are unloaded from memory when this number of loaded nodes is reached. To load a node again, simply access it as usual. Nodes referenced by user variables are not taken into account nor unloaded. Negative value means that all the touched nodes will be kept in an internal dictionary. This is the faster way to load/retrieve nodes. However, and in order to avoid a large memory comsumption, the user will be warned when the number of loaded nodes will reach the ``-NODE_CACHE_SLOTS`` value. Finally, a value of zero means that any cache mechanism is disabled. """ # Parameters for the I/O buffer in `Leaf` objects # ----------------------------------------------- IO_BUFFER_SIZE = 1*_MB """The PyTables internal buffer size for I/O purposes. Should not exceed the amount of highest level cache size in your CPU.""" BUFFER_TIMES = 100 """The maximum buffersize/rowsize ratio before issuing a ``PerformanceWarning``.""" # Miscellaneous # ------------- EXPECTED_ROWS_EARRAY = 1000 """Default expected number of rows for ``EArray`` objects.""" EXPECTED_ROWS_TABLE = 10000 """Default expected number of rows for ``Table`` objects.""" PYTABLES_SYS_ATTRS = True """Set this to ``False`` if you don't want to create PyTables system attributes in datasets. Also, if set to ``False`` the possible existing system attributes are not considered for guessing the class of the node during its loading from disk (this work is delegated to the PyTables' class discoverer function for general HDF5 files).""" MAX_THREADS = None """The maximum number of threads that PyTables should use internally (mainly in Blosc and Numexpr currently). If `None`, it is automatically set to the number of cores in your machine. In general, it is a good idea to set this to the number of cores in your machine or, when your machine has many of them (e.g. > 4), perhaps one less than this.""" ## Local Variables: ## mode: python ## py-indent-offset: 4 ## tab-width: 4 ## fill-column: 72 ## End:
""" Parameters for PyTables. Misc variables: `__docformat__` The format of documentation strings in this module. `__version__` Repository version of this file. """ __docformat__ = 'reStructuredText' 'The format of documentation strings in this module.' __version__ = '$Revision$' 'Repository version of this file.' _kb = 1024 'The size of a Kilobyte in bytes' _mb = 1024 * _KB 'The size of a Megabyte in bytes' bounds_max_size = 1 * _MB 'The maximum size for bounds values cached during index lookups.' bounds_max_slots = 4 * 1024 'The maximum number of slots for the BOUNDS cache.' iterseq_max_elements = 1024 'The maximum number of iterator elements cached in data lookups.' iterseq_max_size = 1 * _MB 'The maximum space that will take ITERSEQ cache (in bytes).' iterseq_max_slots = 128 'The maximum number of slots in ITERSEQ cache.' limbounds_max_size = 256 * _KB 'The maximum size for the query limits (for example, ``(lim1, lim2)``\nin conditions like ``lim1 <= col < lim2``) cached during index lookups\n(in bytes).' limbounds_max_slots = 128 'The maximum number of slots for LIMBOUNDS cache.' table_max_size = 1 * _MB 'The maximum size for table chunks cached during index queries.' sorted_max_size = 1 * _MB 'The maximum size for sorted values cached during index lookups.' sortedlr_max_size = 8 * _MB 'The maximum size for chunks in last row cached in index lookups (in\nbytes).' sortedlr_max_slots = 1024 'The maximum number of chunks for SORTEDLR cache.' disable_every_cycles = 10 'The number of cycles in which a cache will be forced to be disabled\nif the hit ratio is lower than the LOWEST_HIT_RATIO (see below). This\nvalue should provide time enough to check whether the cache is being\nefficient or not.' enable_every_cycles = 50 'The number of cycles in which a cache will be forced to be\n(re-)enabled, irregardingly of the hit ratio. This will provide a chance\nfor checking if we are in a better scenario for doing caching again.' lowest_hit_ratio = 0.6 'The minimum acceptable hit ratio for a cache to avoid disabling (and\nfreeing) it.' max_columns = 512 'Maximum number of columns in ``Table`` objects before a\n``PerformanceWarning`` is issued. This limit is somewhat arbitrary and\ncan be increased.\n' max_node_attrs = 4096 'Maximum allowed number of attributes in a node.' max_group_width = 16384 'Maximum allowed number of children hanging from a group.' max_tree_depth = 2048 'Maximum depth in object tree allowed.' max_undo_path_length = 10240 'Maximum length of paths allowed in undo/redo operations.' cond_cache_slots = 128 'Maximum number of conditions for table queries to be kept in memory.\n' chunk_cache_nelmts = 521 'Number of elements for HDF5 chunk cache.' chunk_cache_preempt = 0.0 'Chunk preemption policy. This value should be between 0 and 1\ninclusive and indicates how much chunks that have been fully read are\nfavored for preemption. A value of zero means fully read chunks are\ntreated no differently than other chunks (the preemption is strictly\nLRU) while a value of one means fully read chunks are always preempted\nbefore other chunks.' chunk_cache_size = 2 * _MB 'Size (in bytes) for HDF5 chunk cache.' metadata_cache_size = 1 * _MB 'Size (in bytes) of the HDF5 metadata cache. This only takes effect\nif using HDF5 1.8.x series.' node_cache_slots = 64 'Maximum number of unreferenced nodes to be kept in memory.\n\nIf positive, this is the number of *unreferenced* nodes to be kept in\nthe metadata cache. Least recently used nodes are unloaded from memory\nwhen this number of loaded nodes is reached. To load a node again,\nsimply access it as usual. Nodes referenced by user variables are not\ntaken into account nor unloaded.\n\nNegative value means that all the touched nodes will be kept in an\ninternal dictionary. This is the faster way to load/retrieve nodes.\nHowever, and in order to avoid a large memory comsumption, the user will\nbe warned when the number of loaded nodes will reach the\n``-NODE_CACHE_SLOTS`` value.\n\nFinally, a value of zero means that any cache mechanism is disabled.\n' io_buffer_size = 1 * _MB 'The PyTables internal buffer size for I/O purposes. Should not\nexceed the amount of highest level cache size in your CPU.' buffer_times = 100 'The maximum buffersize/rowsize ratio before issuing a\n``PerformanceWarning``.' expected_rows_earray = 1000 'Default expected number of rows for ``EArray`` objects.' expected_rows_table = 10000 'Default expected number of rows for ``Table`` objects.' pytables_sys_attrs = True "Set this to ``False`` if you don't want to create PyTables system\nattributes in datasets. Also, if set to ``False`` the possible existing\nsystem attributes are not considered for guessing the class of the node\nduring its loading from disk (this work is delegated to the PyTables'\nclass discoverer function for general HDF5 files)." max_threads = None 'The maximum number of threads that PyTables should use internally\n(mainly in Blosc and Numexpr currently). If `None`, it is automatically\nset to the number of cores in your machine. In general, it is a good\nidea to set this to the number of cores in your machine or, when your\nmachine has many of them (e.g. > 4), perhaps one less than this.'
a=float(input()) b=float(input()) c=float(input()) d=float(input()) e=float(input()) f=float(input()) x=0 y=0 if a>=0: x=x+1 y=y+a if b>=0: x=x+1 y=y+b if c>=0: x=x+1 y=y+c if d>=0: x=x+1 y=y+d if e>=0: x=x+1 y=y+e if f>=0: x=x+1 y=y+f media=y/x print("%d valores positivos"%x) print("%.1f"%media)
a = float(input()) b = float(input()) c = float(input()) d = float(input()) e = float(input()) f = float(input()) x = 0 y = 0 if a >= 0: x = x + 1 y = y + a if b >= 0: x = x + 1 y = y + b if c >= 0: x = x + 1 y = y + c if d >= 0: x = x + 1 y = y + d if e >= 0: x = x + 1 y = y + e if f >= 0: x = x + 1 y = y + f media = y / x print('%d valores positivos' % x) print('%.1f' % media)
"""Constants for HACS""" VERSION = "0.12.1" NAME_LONG = "HACS (Home Assistant Community Store)" NAME_SHORT = "HACS" DOMAIN = "hacs" PROJECT_URL = "https://github.com/custom-components/hacs/" CUSTOM_UPDATER_LOCATIONS = [ "{}/custom_components/custom_updater.py", "{}/custom_components/custom_updater/__init__.py", ] ISSUE_URL = "{}issues".format(PROJECT_URL) DOMAIN_DATA = "{}_data".format(NAME_SHORT.lower()) ELEMENT_TYPES = ["integration", "plugin"] IFRAME = { "title": "Community", "icon": "mdi:alpha-c-box", "url": "/community_overview", "path": "community", "require_admin": True, } # Messages CUSTOM_UPDATER_WARNING = """ This cannot be used with custom_updater. To use this you need to remove custom_updater form {} """ DEV_MODE = "You have 'dev' enabled for HACS, this is not intended for regular use, no support will be given if you break something." STARTUP = """ ------------------------------------------------------------------- {} Version: {} This is a custom integration If you have any issues with this you need to open an issue here: {} ------------------------------------------------------------------- """.format( NAME_LONG, VERSION, ISSUE_URL )
"""Constants for HACS""" version = '0.12.1' name_long = 'HACS (Home Assistant Community Store)' name_short = 'HACS' domain = 'hacs' project_url = 'https://github.com/custom-components/hacs/' custom_updater_locations = ['{}/custom_components/custom_updater.py', '{}/custom_components/custom_updater/__init__.py'] issue_url = '{}issues'.format(PROJECT_URL) domain_data = '{}_data'.format(NAME_SHORT.lower()) element_types = ['integration', 'plugin'] iframe = {'title': 'Community', 'icon': 'mdi:alpha-c-box', 'url': '/community_overview', 'path': 'community', 'require_admin': True} custom_updater_warning = '\nThis cannot be used with custom_updater.\nTo use this you need to remove custom_updater form {}\n' dev_mode = "You have 'dev' enabled for HACS, this is not intended for regular use, no support will be given if you break something." startup = '\n-------------------------------------------------------------------\n{}\nVersion: {}\nThis is a custom integration\nIf you have any issues with this you need to open an issue here:\n{}\n-------------------------------------------------------------------\n'.format(NAME_LONG, VERSION, ISSUE_URL)
def main(): SX, SY, GX, GY = map(int, input().split()) print((SY*GX+SX*GY) / (SY+GY)) if __name__ == "__main__": main()
def main(): (sx, sy, gx, gy) = map(int, input().split()) print((SY * GX + SX * GY) / (SY + GY)) if __name__ == '__main__': main()
'''Implement a program to calculate sum of odd digits present in the given number Input Format a number from the user Constraints n>0 Output Format print sum of odd digits Sample Input 0 123 Sample Output 0 4 Sample Input 1 101 Sample Output 1 2''' #solution n = input() sum = 0 for i in n: if int(i)%2 == 1: sum = sum + int(i) print(sum)
"""Implement a program to calculate sum of odd digits present in the given number Input Format a number from the user Constraints n>0 Output Format print sum of odd digits Sample Input 0 123 Sample Output 0 4 Sample Input 1 101 Sample Output 1 2""" n = input() sum = 0 for i in n: if int(i) % 2 == 1: sum = sum + int(i) print(sum)
def GetCardType(models): return "Japanese (recognition&recall)" def MakeCard(data): result = {} if 'front_word' not in data and 'back_word' not in data or 'read_word' not in data: return result result['Expression'] = data['front_word'] result['Meaning'] = data['back_word'] result['Reading'] = data['read_word'] return result
def get_card_type(models): return 'Japanese (recognition&recall)' def make_card(data): result = {} if 'front_word' not in data and 'back_word' not in data or 'read_word' not in data: return result result['Expression'] = data['front_word'] result['Meaning'] = data['back_word'] result['Reading'] = data['read_word'] return result
class Solution: def containsNearbyAlmostDuplicate(self, nums: List[int], k: int, t: int) -> bool: if t < 0 or k <= 0: return False table = {} w = 1 + t for i, num in enumerate(nums): curr = num // w if curr in table: return True if curr - 1 in table and num - table[curr - 1] <= t: return True if curr + 1 in table and table[curr + 1] - num <= t: return True table[curr] = num if i >= k: del table[nums[i - k] // w] return False
class Solution: def contains_nearby_almost_duplicate(self, nums: List[int], k: int, t: int) -> bool: if t < 0 or k <= 0: return False table = {} w = 1 + t for (i, num) in enumerate(nums): curr = num // w if curr in table: return True if curr - 1 in table and num - table[curr - 1] <= t: return True if curr + 1 in table and table[curr + 1] - num <= t: return True table[curr] = num if i >= k: del table[nums[i - k] // w] return False
def make_devision(n: float): """ This closure returns a function that returns the devision of an x number by n """ def division(x: float) -> float: return x / n return division def run(): n = float(input('Insert n: ')) devision_n = make_devision(n) x = float(input('Insert x: ')) print(devision_n(x)) if __name__ == '__main__': run()
def make_devision(n: float): """ This closure returns a function that returns the devision of an x number by n """ def division(x: float) -> float: return x / n return division def run(): n = float(input('Insert n: ')) devision_n = make_devision(n) x = float(input('Insert x: ')) print(devision_n(x)) if __name__ == '__main__': run()
while True: try: a,b=map(int,input().split()) c=a^b print(c) except EOFError:break
while True: try: (a, b) = map(int, input().split()) c = a ^ b print(c) except EOFError: break
def subsets(arr): return return_subsets(arr, 0) def return_subsets(arr, index): # [5,7] # [7] # [] """ :param: arr - input integer array Return - list of lists (two dimensional array) where each list represents a subset TODO: complete this method to return subsets of an array """ if len(arr) <= index: return [[]] subsets_out = return_subsets(arr, index + 1) # [], [15] output = [] for el in subsets_out: output.append(el) for el in subsets_out: current = [] current.append(arr[index]) current.extend(el) output.append(current) return output arr = [9, 12, 15] solution = [[], [15], [12], [12, 15], [9], [9, 15], [9, 12], [9, 12, 15]] print(subsets(arr))
def subsets(arr): return return_subsets(arr, 0) def return_subsets(arr, index): """ :param: arr - input integer array Return - list of lists (two dimensional array) where each list represents a subset TODO: complete this method to return subsets of an array """ if len(arr) <= index: return [[]] subsets_out = return_subsets(arr, index + 1) output = [] for el in subsets_out: output.append(el) for el in subsets_out: current = [] current.append(arr[index]) current.extend(el) output.append(current) return output arr = [9, 12, 15] solution = [[], [15], [12], [12, 15], [9], [9, 15], [9, 12], [9, 12, 15]] print(subsets(arr))
catlog = ['Show Filter On Binary', 'Show Smooth The Rec', 'Show Smooth The Gear', 'Show Dog The Gear', 'Show QRCode Art', '-', 'Show Dilation', 'Show Erosion', 'Show Open And Close', '-', 'Show Fill Holes', 'Show Outline', 'Show ConvexHull', 'Show Skeleton', '-', 'Show Distance', 'Show Max Circle', 'Show Medial Axis', 'Show Skeleton And MidAix', 'Show Voronoi', 'Show Binary Watershed', 'Show Repair Lines', 'Show Buffer By Distance', 'Show Make Parabola', '-', 'Show Base Analysis', 'Show Holes Count', 'Show Region Solidity', 'Show Circle And Ellipse', 'Show Region Analysis', 'Show Region Filter', '-', 'Show Global Statistic', 'Show Mask', 'Show Label Mask', 'Show Intensity Analysis', 'Show How To Get Mask', 'Intensity Filter', 'Show The Lighter One', 'Show The Pure One', '-', 'Show Cell Analysis', 'Show Cell Report', 'Show Cell Report']
catlog = ['Show Filter On Binary', 'Show Smooth The Rec', 'Show Smooth The Gear', 'Show Dog The Gear', 'Show QRCode Art', '-', 'Show Dilation', 'Show Erosion', 'Show Open And Close', '-', 'Show Fill Holes', 'Show Outline', 'Show ConvexHull', 'Show Skeleton', '-', 'Show Distance', 'Show Max Circle', 'Show Medial Axis', 'Show Skeleton And MidAix', 'Show Voronoi', 'Show Binary Watershed', 'Show Repair Lines', 'Show Buffer By Distance', 'Show Make Parabola', '-', 'Show Base Analysis', 'Show Holes Count', 'Show Region Solidity', 'Show Circle And Ellipse', 'Show Region Analysis', 'Show Region Filter', '-', 'Show Global Statistic', 'Show Mask', 'Show Label Mask', 'Show Intensity Analysis', 'Show How To Get Mask', 'Intensity Filter', 'Show The Lighter One', 'Show The Pure One', '-', 'Show Cell Analysis', 'Show Cell Report', 'Show Cell Report']
limit = 25 subreddit = 'cscareerquestions' client_id = '???' client_secret = '???' user_agent = 'python:X.Y.Z:v1.0.0 (by /u/???)' gmail_user = '???@???.???' gmail_password = '???' sender = '???@???.???' recipient = '???@???.???' subject = 'Top Trending on /r/%s' % (subreddit)
limit = 25 subreddit = 'cscareerquestions' client_id = '???' client_secret = '???' user_agent = 'python:X.Y.Z:v1.0.0 (by /u/???)' gmail_user = '???@???.???' gmail_password = '???' sender = '???@???.???' recipient = '???@???.???' subject = 'Top Trending on /r/%s' % subreddit
NOT_ALLOWED = ["", None, True, False, [], {}] def arrayLength(arr, length): if len(arr) == length: return True else: return False def checkFields(staticFields, dataFields): requiredfields = list(set(staticFields) - set(dataFields)) extraFields = list(set(dataFields) - set(staticFields)) if len(requiredfields) == 0: return (extraFields[0] + " is not alllowed") else: return (requiredfields[0] + " is required") def notEmpty(data): resp = {"status": True} for k in data: if data[k] in NOT_ALLOWED: resp = {"status": False, "message": k + " should not be Empty"} break return resp def strLength(data, keysList): for key in keysList: if data.get(key) != None: if len(data.get(key)) < keysList[key]["min"]: return {"status": False, "message": key + " should be greater than " + str(keysList[key]["min"]-1) + " in length"} elif len(data.get(key)) > keysList[key]["max"]: return {"status": False, "message": key + " should be lesser than " + str(keysList[key]["max"]+1) + " in length"} else: return {"status": True} def count_range_in_list(li, min, max): count = 0 for x in li: if min < x <= max: count += 1 return count
not_allowed = ['', None, True, False, [], {}] def array_length(arr, length): if len(arr) == length: return True else: return False def check_fields(staticFields, dataFields): requiredfields = list(set(staticFields) - set(dataFields)) extra_fields = list(set(dataFields) - set(staticFields)) if len(requiredfields) == 0: return extraFields[0] + ' is not alllowed' else: return requiredfields[0] + ' is required' def not_empty(data): resp = {'status': True} for k in data: if data[k] in NOT_ALLOWED: resp = {'status': False, 'message': k + ' should not be Empty'} break return resp def str_length(data, keysList): for key in keysList: if data.get(key) != None: if len(data.get(key)) < keysList[key]['min']: return {'status': False, 'message': key + ' should be greater than ' + str(keysList[key]['min'] - 1) + ' in length'} elif len(data.get(key)) > keysList[key]['max']: return {'status': False, 'message': key + ' should be lesser than ' + str(keysList[key]['max'] + 1) + ' in length'} else: return {'status': True} def count_range_in_list(li, min, max): count = 0 for x in li: if min < x <= max: count += 1 return count
# TODO get wall-tower headings for Transdec regions # PINGER_FREQUENCY = 30000 # Region A # PINGER_FREQUENCY = 40000 # Region B # PINGER_FREQUENCY = 25000 # Region C # PINGER_FREQUENCY = 35000 # Region D PINGER_FREQUENCY = 30000 TRACK_MAG_THRESH = 10.8 TRACK_COOLDOWN_SAMPLES = 188000 RIGHT_HANDED = False PATH_1_BEND_RIGHT = RIGHT_HANDED PATH_2_BEND_RIGHT = RIGHT_HANDED
pinger_frequency = 30000 track_mag_thresh = 10.8 track_cooldown_samples = 188000 right_handed = False path_1_bend_right = RIGHT_HANDED path_2_bend_right = RIGHT_HANDED
""" Circular Queue Problem Statement: Circular Queue is a linear data structure which follows FIFO principle The difference is that the last position is connected back to the first position to make a circle. Perform insertion, deletion and traversal operations on Circular Queue. """ class CircularQueue: # Defining and initializing the class def __init__(self, quesize): # initializing start and end of queue with -1 self.quehead = -1 # initializing queue with zero value self.queue = [0 for k in range(quesize)] self.quetail = -1 # size of queue as quesize self.size = quesize def traverse(self): # checking if the queue is Underflow if(self.quehead == -1): print ("Underflow") #else printing the elements of queue else: print("Elements in Queue: ") for k in range(self.quehead, self.quetail + 1): print(self.queue[k], end = " ") def enqueue(self, val): # checking if the queue is Overflow if (self.quetail + 1 == self.quehead): print("Overflow") # checking if the queue is Underflow elif (self.quehead == -1): self.quetail = self.quehead = 0 self.queue[self.quetail] = val # else incrementing and adding the next element in queue else: self.quetail += 1 self.queue[self.quetail] = val def dequeue(self): # checking if the queue is empty if (self.quehead == -1): print ("Underflow") # checking if the queue have one element elif (self.quetail == self.quehead): print(self.queue[self.quehead]) self.quehead = self.quetail = -1 # deleting for the rest of condition else: print(self.queue[self.quehead]) self.quehead += 1 # input the size of the queue size = int(input("Enter the size of queue: ")) # input the number of elements to be deleted delete = int(input("Enter the number of elements to be deleted: ")) # creating object pf queue to access the elements and perform operations queobj = CircularQueue(int(size)) # taking the input the elements from user for i in range(0, size): key = int(input()) queobj.enqueue(key) # adding them in queue # printing the elements inside the queue queobj.traverse() print("\nDeleted values: ") # deleting the elements of queue for k in range(0, delete): queobj.dequeue() # printing the elements inside the queue now queobj.traverse() """ Test case 1 input - Enter the size of queue: 5 Enter the number of elements to be deleted: 2 1 2 3 4 5 output - Elements in Queue: 1 2 3 4 5 Deleted values: 1 2 Elements in Queue: 3 4 5 Test case 2 input - Enter the size of queue: 10 Enter the number of elements to be deleted: 4 33 44 22 55 99 11 22 77 45 63 output - Elements in Queue: 33 44 22 55 99 11 22 77 45 63 Deleted values: 33 44 22 55 Elements in Queue: 99 11 22 77 45 63 Time Complexity: O(size) since 'size' number of elements are inserted in Circular Queue Space Complexity: O(size) since creating a list and explicitly allocating memory """
""" Circular Queue Problem Statement: Circular Queue is a linear data structure which follows FIFO principle The difference is that the last position is connected back to the first position to make a circle. Perform insertion, deletion and traversal operations on Circular Queue. """ class Circularqueue: def __init__(self, quesize): self.quehead = -1 self.queue = [0 for k in range(quesize)] self.quetail = -1 self.size = quesize def traverse(self): if self.quehead == -1: print('Underflow') else: print('Elements in Queue: ') for k in range(self.quehead, self.quetail + 1): print(self.queue[k], end=' ') def enqueue(self, val): if self.quetail + 1 == self.quehead: print('Overflow') elif self.quehead == -1: self.quetail = self.quehead = 0 self.queue[self.quetail] = val else: self.quetail += 1 self.queue[self.quetail] = val def dequeue(self): if self.quehead == -1: print('Underflow') elif self.quetail == self.quehead: print(self.queue[self.quehead]) self.quehead = self.quetail = -1 else: print(self.queue[self.quehead]) self.quehead += 1 size = int(input('Enter the size of queue: ')) delete = int(input('Enter the number of elements to be deleted: ')) queobj = circular_queue(int(size)) for i in range(0, size): key = int(input()) queobj.enqueue(key) queobj.traverse() print('\nDeleted values: ') for k in range(0, delete): queobj.dequeue() queobj.traverse() "\nTest case 1 \ninput -\nEnter the size of queue: 5\nEnter the number of elements to be deleted: 2\n1\n2\n3\n4\n5\noutput -\nElements in Queue: \n1 2 3 4 5 \nDeleted values: \n1\n2\nElements in Queue: \n3 4 5 \n\nTest case 2\ninput -\nEnter the size of queue: 10\nEnter the number of elements to be deleted: 4\n33\n44\n22\n55\n99\n11\n22\n77\n45\n63\noutput -\nElements in Queue: \n33 44 22 55 99 11 22 77 45 63 \nDeleted values: \n33\n44\n22\n55\nElements in Queue: \n99 11 22 77 45 63 \n\nTime Complexity: O(size) \n since 'size' number of elements are inserted in Circular Queue\nSpace Complexity: O(size)\n since creating a list and explicitly allocating memory\n"
manager22222 ssssss
manager22222 ssssss
dtype = object transformers = [ lambda _: _.transpose(), lambda _: _.rename_axis( mapper='variable', axis='index' ), ]
dtype = object transformers = [lambda _: _.transpose(), lambda _: _.rename_axis(mapper='variable', axis='index')]
# Created by MechAviv # Map ID :: 807000000 # Momijigaoka : Momijigaoka sm.setTemporarySkillSet(0) sm.setInGameDirectionMode(False, True, False, False)
sm.setTemporarySkillSet(0) sm.setInGameDirectionMode(False, True, False, False)
class Solution: def wordBreak(self, s: str, wordDict: List[str]) -> bool: #base case if(len(s) == 1): if(s in wordDict): return True; else: return False; pyt queue = [0]; visited = [0] * len(s) while(len(queue) != 0): start = queue.pop(0); if(visited[start] == 0): for end in range(start, len(s)): word = s[start:end + 1] if(word in wordDict): if(end == len(s) - 1): return True; queue.append(end + 1); visited[start] = 1; return False;
class Solution: def word_break(self, s: str, wordDict: List[str]) -> bool: if len(s) == 1: if s in wordDict: return True else: return False pyt queue = [0] visited = [0] * len(s) while len(queue) != 0: start = queue.pop(0) if visited[start] == 0: for end in range(start, len(s)): word = s[start:end + 1] if word in wordDict: if end == len(s) - 1: return True queue.append(end + 1) visited[start] = 1 return False
class Canister: def __init__(self, agent, canister_id, candid): self.agent = agent self.canister_id = canister_id self.candid = candid
class Canister: def __init__(self, agent, canister_id, candid): self.agent = agent self.canister_id = canister_id self.candid = candid
expected_output = { "configuration": { "vpg_name": "VirtualPortGroup2", "vpg_ip_addr": "192.168.2.1", "vpg_ip_mask": "255.255.255.0", "sng_name": "SNG-APPQOE", "sng_ip_addr": "192.168.2.2", }, "status": {"operational_state": "RUNNING"}, }
expected_output = {'configuration': {'vpg_name': 'VirtualPortGroup2', 'vpg_ip_addr': '192.168.2.1', 'vpg_ip_mask': '255.255.255.0', 'sng_name': 'SNG-APPQOE', 'sng_ip_addr': '192.168.2.2'}, 'status': {'operational_state': 'RUNNING'}}
# Python3 Program to decompose # a matrix into lower and # upper traingular matrix MAX = 100; def luDecomposition(mat, n): lower = [[0 for x in range(n)] for y in range(n)] upper = [[0 for x in range(n)] for y in range(n)] # Decomposing matrix into Upper # and Lower triangular matrix for i in range(n): # Upper Triangular for k in range(i, n): # Summation of L(i, j) * U(j, k) sum = 0 for j in range(i): sum += (lower[i][j] * upper[j][k]) # Evaluating U(i, k) upper[i][k] = mat[i][k] - sum # Lower Triangular for k in range(i, n): if (i == k): lower[i][i] = 1 # Diagonal as 1 else: # Summation of L(k, j) * U(j, i) sum = 0 for j in range(i): sum += (lower[k][j] * upper[j][i]) # Evaluating L(k, i) lower[k][i] = int((mat[k][i] - sum) / upper[i][i]) # setw is for displaying nicely print("Lower Triangular\t\tUpper Triangular") # Displaying the result : for i in range(n): # Lower for j in range(n): print(lower[i][j], end="\t") print("", end="\t") # Upper for j in range(n): print(upper[i][j], end="\t") print("") # Driver code def mult_matrix(M, N): """Multiply square matrices of same dimension M and N""" # Converts N into a list of tuples of columns tuple_N = zip(*N) # Nested list comprehension to calculate matrix multiplication return [[sum(el_m * el_n for el_m, el_n in zip(row_m, col_n)) for col_n in tuple_N] for row_m in M] def pivot_matrix(M): """Returns the pivoting matrix for M, used in Doolittle's method.""" m = len(M) # Create an identity matrix, with floating point values id_mat = [[float(i ==j) for i in range(m)] for j in range(m)] # Rearrange the identity matrix such that the largest element of # each column of M is placed on the diagonal of of M for j in range(m): row = max(range(j, m), key=lambda i: abs(M[i][j])) if j != row: # Swap the rows id_mat[j], id_mat[row] = id_mat[row], id_mat[j] return id_mat def lu_decomposition(A): """Performs an LU Decomposition of A (which must be square) into PA = LU. The function returns P, L and U.""" n = len(A) # Create zero matrices for L and U L = [[0.0] * n for i in range(n)] U = [[0.0] * n for i in range(n)] # Create the pivot matrix P and the multipled matrix PA P = pivot_matrix(A) PA = mult_matrix(P, A) # Perform the LU Decomposition for j in range(n): # All diagonal entries of L are set to unity L[j][j] = 1.0 # LaTeX: u_{ij} = a_{ij} - \sum_{k=1}^{i-1} u_{kj} l_{ik} for i in range(j+1): s1 = sum(U[k][j] * L[i][k] for k in range(i)) U[i][j] = PA[i][j] - s1 # LaTeX: l_{ij} = \frac{1}{u_{jj}} (a_{ij} - \sum_{k=1}^{j-1} u_{kj} l_{ik} ) for i in range(j, n): s2 = sum(U[k][j] * L[i][k] for k in range(j)) L[i][j] = (PA[i][j] - s2) / U[j][j] return (P, L, U) A = [[1, 8, 2, 3], [-6, -3, 8, 1], [2, 4, 4, 2], [10, 5, -5, 6]] P = pivot_matrix(A) PA = mult_matrix(P, A) luDecomposition(PA, 4)
max = 100 def lu_decomposition(mat, n): lower = [[0 for x in range(n)] for y in range(n)] upper = [[0 for x in range(n)] for y in range(n)] for i in range(n): for k in range(i, n): sum = 0 for j in range(i): sum += lower[i][j] * upper[j][k] upper[i][k] = mat[i][k] - sum for k in range(i, n): if i == k: lower[i][i] = 1 else: sum = 0 for j in range(i): sum += lower[k][j] * upper[j][i] lower[k][i] = int((mat[k][i] - sum) / upper[i][i]) print('Lower Triangular\t\tUpper Triangular') for i in range(n): for j in range(n): print(lower[i][j], end='\t') print('', end='\t') for j in range(n): print(upper[i][j], end='\t') print('') def mult_matrix(M, N): """Multiply square matrices of same dimension M and N""" tuple_n = zip(*N) return [[sum((el_m * el_n for (el_m, el_n) in zip(row_m, col_n))) for col_n in tuple_N] for row_m in M] def pivot_matrix(M): """Returns the pivoting matrix for M, used in Doolittle's method.""" m = len(M) id_mat = [[float(i == j) for i in range(m)] for j in range(m)] for j in range(m): row = max(range(j, m), key=lambda i: abs(M[i][j])) if j != row: (id_mat[j], id_mat[row]) = (id_mat[row], id_mat[j]) return id_mat def lu_decomposition(A): """Performs an LU Decomposition of A (which must be square) into PA = LU. The function returns P, L and U.""" n = len(A) l = [[0.0] * n for i in range(n)] u = [[0.0] * n for i in range(n)] p = pivot_matrix(A) pa = mult_matrix(P, A) for j in range(n): L[j][j] = 1.0 for i in range(j + 1): s1 = sum((U[k][j] * L[i][k] for k in range(i))) U[i][j] = PA[i][j] - s1 for i in range(j, n): s2 = sum((U[k][j] * L[i][k] for k in range(j))) L[i][j] = (PA[i][j] - s2) / U[j][j] return (P, L, U) a = [[1, 8, 2, 3], [-6, -3, 8, 1], [2, 4, 4, 2], [10, 5, -5, 6]] p = pivot_matrix(A) pa = mult_matrix(P, A) lu_decomposition(PA, 4)
# Based on the user's input print("Give me any 2 numbers! I'll find how many y is in x percent!") x = input("Enter the first number: "); y = input("Enter the second nubmer: ") def percentage(x, y): return (float(x) * float(y)) / 100.0 print("{0}% of {1} is equal to {2}".format(x, y, percentage(x, y)));
print("Give me any 2 numbers! I'll find how many y is in x percent!") x = input('Enter the first number: ') y = input('Enter the second nubmer: ') def percentage(x, y): return float(x) * float(y) / 100.0 print('{0}% of {1} is equal to {2}'.format(x, y, percentage(x, y)))
def print_msg(number): def printer(): "Here we are using the nonlocal keyword" nonlocal number number=3 print(number) printer() print(number) print_msg(9) def transmit_to_space(message): "This is the enclosing function" def data_transmitter(): "The nested function" print(message) return data_transmitter fun2 = transmit_to_space("Burn the Sun!") fun2() def multiplier_of(n): def multiplier(number): return number*n return multiplier multiplywith5 = multiplier_of(5) print(multiplywith5(9))
def print_msg(number): def printer(): """Here we are using the nonlocal keyword""" nonlocal number number = 3 print(number) printer() print(number) print_msg(9) def transmit_to_space(message): """This is the enclosing function""" def data_transmitter(): """The nested function""" print(message) return data_transmitter fun2 = transmit_to_space('Burn the Sun!') fun2() def multiplier_of(n): def multiplier(number): return number * n return multiplier multiplywith5 = multiplier_of(5) print(multiplywith5(9))
a,b = input().split(" ") x = float(a) y = float(b) if x > 0.00 and y > 0.00: print("Q1") elif x > 0.00 and y < 0.00: print("Q4") elif x < 0.00 and y > 0.00: print("Q2") elif x < 0.00 and y < 0.00: print("Q3") elif x == 0.00 and y ==0.00: print("Origem") elif x == 0.00 and y > 0.0: print("Eixo Y") elif x == 0.00 and y < 0.00: print("Eixo Y") elif x > 0.00 and y == 0.00: print("Eixo X") elif x < 0.00 and y == 0.00: print("Eixo X")
(a, b) = input().split(' ') x = float(a) y = float(b) if x > 0.0 and y > 0.0: print('Q1') elif x > 0.0 and y < 0.0: print('Q4') elif x < 0.0 and y > 0.0: print('Q2') elif x < 0.0 and y < 0.0: print('Q3') elif x == 0.0 and y == 0.0: print('Origem') elif x == 0.0 and y > 0.0: print('Eixo Y') elif x == 0.0 and y < 0.0: print('Eixo Y') elif x > 0.0 and y == 0.0: print('Eixo X') elif x < 0.0 and y == 0.0: print('Eixo X')
class Event: """ Superclass representing any sort of event that can be managed by the EventManager """ def __init__(self): self.name = "Generic Event" class TickEvent(Event): """ An event to tick the game state """ def __init__(self): self.name = "Tick Event"
class Event: """ Superclass representing any sort of event that can be managed by the EventManager """ def __init__(self): self.name = 'Generic Event' class Tickevent(Event): """ An event to tick the game state """ def __init__(self): self.name = 'Tick Event'
#!/usr/bin/env python """ The complete code of Ch. 4, Recipe 7 -- Extending built-in types: Enforcing member-type on collections """ class TypedList(list): """ Provides a list-based sequence that only allows certain member-types """ # - Keep track of allowed member-types as a read-only property @property def member_types(self): try: return self._member_types except AttributeError: return tuple() def __init__(self, values, **kwargs): # - Make sure that member_types is a sequence of values if type(values) not in (list, tuple): raise TypeError( '%s expects a list or tuple of values, but ' 'was passed "%s" (%s)' % ( self.__class__.__name__, str(values), type(values).__name__ ) ) member_types = kwargs.get('member_types') if not member_types: raise ValueError( '%s expects a list or tuple of allowed ' 'types to be specified as a member_types ' 'keyword argument, but none were supplied' % ( self.__class__.__name__, str(values), type(values).__name__ ) ) bad_types = [v for v in member_types if type(v) != type] if bad_types: raise ValueError( '%s expects a list or tuple of allowed ' 'types to be specified as a member_types ' 'keyword argument, but was passed "%s" (%s), ' 'which contained invalid value-types (%s)' % ( self.__class__.__name__, str(values), type(values).__name__, ', '.join(bad_types) ) ) # - Set the allowed member-types self._member_types = tuple(member_types) # - Check the provided values for member in values: self._type_check(member) # - If everything checks as valid, then call the parent # object-initializer (list.__init__) list.__init__(self, values) # - Create a type-checking helper-method def _type_check(self, member): # - Using isinstance instead of a straight type- # comparison, so that extensions of types will be # accepted too if not isinstance(member, self.member_types): raise TypeError( 'This instance of %s only accepts %s values: ' '%s (%s) is not allowed' % ( self.__class__.__name__, '|'.join( [t.__name__ for t in self.member_types] ), str(member), type(member).__name__ ) ) # - Wrap all of the list methods that involve adding a member # with type-checking def __add__(self, other): # - Called when <list> + <list2> is executed for member in other: self._type_check(member) # - list.__add__ returns a new list with the new members return TypedList( list.__add__(self, other), member_types=self.member_types ) def __iadd__(self, other): # - Called when <list> += <list2> is executed for member in other: self._type_check(member) # - list.__iadd__ returns the instance after it's # been modified return list.__iadd__(self, other) def __mul__(self, other): # - Called when <list> * <int> is executed # - list.__mul__ returns a new list with the new members return TypedList( list.__mul__(self, other), member_types=self.member_types ) def append(self, member): self._type_check(member) return list.append(self, member) def extend(self, other): for member in other: self._type_check(member) return list.extend(self, other) def insert(self, index, member): self._type_check(member) return list.insert(self, index, member) number_list = TypedList([1,], member_types=(float,int)) print(number_list) print(type(number_list)) try: number_list = TypedList(['not-a-number',], member_types=(float,int)) print(number_list) print(type(number_list)) except Exception as error: print('%s: %s' % (error.__class__.__name__, error)) number_list = TypedList([1,], member_types=(float,int)) number_list = number_list + [3.14] print(number_list) print(type(number_list)) number_list = number_list * 2 print(number_list) print(type(number_list)) # ~ number_list = TypedList([1,], member_types=(float,int)) # ~ number_list += [2.3] # ~ print(number_list) # ~ print(type(number_list)) # ~ number_list.append(4.5) # ~ print(number_list) # ~ print(type(number_list)) number_list = TypedList([1,], member_types=(float,int)) number_list *= 2 print(number_list) print(type(number_list)) number_list = TypedList([1,], member_types=(float,int)) # ~ number_list.insert(0, 0) # ~ print(number_list) # ~ print(type(number_list)) number_list = TypedList([1,], member_types=(float,int)) # ~ number_list.extend([7,8.9]) # ~ print(number_list) # ~ print(type(number_list))
""" The complete code of Ch. 4, Recipe 7 -- Extending built-in types: Enforcing member-type on collections """ class Typedlist(list): """ Provides a list-based sequence that only allows certain member-types """ @property def member_types(self): try: return self._member_types except AttributeError: return tuple() def __init__(self, values, **kwargs): if type(values) not in (list, tuple): raise type_error('%s expects a list or tuple of values, but was passed "%s" (%s)' % (self.__class__.__name__, str(values), type(values).__name__)) member_types = kwargs.get('member_types') if not member_types: raise value_error('%s expects a list or tuple of allowed types to be specified as a member_types keyword argument, but none were supplied' % (self.__class__.__name__, str(values), type(values).__name__)) bad_types = [v for v in member_types if type(v) != type] if bad_types: raise value_error('%s expects a list or tuple of allowed types to be specified as a member_types keyword argument, but was passed "%s" (%s), which contained invalid value-types (%s)' % (self.__class__.__name__, str(values), type(values).__name__, ', '.join(bad_types))) self._member_types = tuple(member_types) for member in values: self._type_check(member) list.__init__(self, values) def _type_check(self, member): if not isinstance(member, self.member_types): raise type_error('This instance of %s only accepts %s values: %s (%s) is not allowed' % (self.__class__.__name__, '|'.join([t.__name__ for t in self.member_types]), str(member), type(member).__name__)) def __add__(self, other): for member in other: self._type_check(member) return typed_list(list.__add__(self, other), member_types=self.member_types) def __iadd__(self, other): for member in other: self._type_check(member) return list.__iadd__(self, other) def __mul__(self, other): return typed_list(list.__mul__(self, other), member_types=self.member_types) def append(self, member): self._type_check(member) return list.append(self, member) def extend(self, other): for member in other: self._type_check(member) return list.extend(self, other) def insert(self, index, member): self._type_check(member) return list.insert(self, index, member) number_list = typed_list([1], member_types=(float, int)) print(number_list) print(type(number_list)) try: number_list = typed_list(['not-a-number'], member_types=(float, int)) print(number_list) print(type(number_list)) except Exception as error: print('%s: %s' % (error.__class__.__name__, error)) number_list = typed_list([1], member_types=(float, int)) number_list = number_list + [3.14] print(number_list) print(type(number_list)) number_list = number_list * 2 print(number_list) print(type(number_list)) number_list = typed_list([1], member_types=(float, int)) number_list *= 2 print(number_list) print(type(number_list)) number_list = typed_list([1], member_types=(float, int)) number_list = typed_list([1], member_types=(float, int))
class Singleton: __instance = None @staticmethod def getinstance(): if Singleton.__instance is None: raise Exception("Singleton does not exist!") return Singleton.__instance def __init__(self, name): if Singleton.__instance is not None: raise Exception("This class is a username!") else: self.name = name Singleton.__instance = self.name @staticmethod def reset(): Singleton.__instance = None
class Singleton: __instance = None @staticmethod def getinstance(): if Singleton.__instance is None: raise exception('Singleton does not exist!') return Singleton.__instance def __init__(self, name): if Singleton.__instance is not None: raise exception('This class is a username!') else: self.name = name Singleton.__instance = self.name @staticmethod def reset(): Singleton.__instance = None
def set(bit): """Set the specifeid bit (1-indexed) eg. set(8) == 0x80""" return 1 << (bit - 1) def setN(n): """Set the first n specified bits eg. setN(7) == 0x7F""" return set(n + 1) - 1 def reverse(value, length): """Reverse an integer value with length bits eg. reverse(0b10,2) == 0b01""" output = 0 for bit in range(length): if value & set(bit + 1) != 0: output |= set(length - bit) return output def find_last_set(value): """ Returns the position of the last set bit (1-indexed) eg. find_last_set(0x8012) == 16 """ output = 0 while value > 0: value >>= 1 output += 1 return output
def set(bit): """Set the specifeid bit (1-indexed) eg. set(8) == 0x80""" return 1 << bit - 1 def set_n(n): """Set the first n specified bits eg. setN(7) == 0x7F""" return set(n + 1) - 1 def reverse(value, length): """Reverse an integer value with length bits eg. reverse(0b10,2) == 0b01""" output = 0 for bit in range(length): if value & set(bit + 1) != 0: output |= set(length - bit) return output def find_last_set(value): """ Returns the position of the last set bit (1-indexed) eg. find_last_set(0x8012) == 16 """ output = 0 while value > 0: value >>= 1 output += 1 return output
class Solution: """ @param num1: An integer @param num2: An integer @param num3: An integer @return: an interger """ def maxOfThreeNumbers(self, num1, num2, num3): # write your code here return max(max(num1, num2), max(num2, num3))
class Solution: """ @param num1: An integer @param num2: An integer @param num3: An integer @return: an interger """ def max_of_three_numbers(self, num1, num2, num3): return max(max(num1, num2), max(num2, num3))
class Row(list): def __init__(self, values=None): self.labels = [] self.values = [] if values: for idx in values: self.__setitem__(idx, values[idx]) def __setitem__(self, idx, value): if type(idx) is str: if idx in self.labels: self.values[self.labels.index(idx)] = value else: self.labels.append(idx) self.values.append(value) else: self.values[idx] = value def __getitem__(self, idx): if type(idx) is str: return self.values[self.labels.index(idx)] else: return self.values[idx] def __iter__(self): return self.values.__iter__()
class Row(list): def __init__(self, values=None): self.labels = [] self.values = [] if values: for idx in values: self.__setitem__(idx, values[idx]) def __setitem__(self, idx, value): if type(idx) is str: if idx in self.labels: self.values[self.labels.index(idx)] = value else: self.labels.append(idx) self.values.append(value) else: self.values[idx] = value def __getitem__(self, idx): if type(idx) is str: return self.values[self.labels.index(idx)] else: return self.values[idx] def __iter__(self): return self.values.__iter__()
#from subprocess import call __all__ = [ 'check_numbers', 'command_line', 'match_spaces', 'split_conflict', 'wrap_sentences', ]
__all__ = ['check_numbers', 'command_line', 'match_spaces', 'split_conflict', 'wrap_sentences']
numbers = str(input("Input a list of coma-separated numbers: ")) numbers = ''.join(numbers.split()) numList = numbers.split(",") numListClean = list(dict.fromkeys(numList)) print(f"\n{numListClean}")
numbers = str(input('Input a list of coma-separated numbers: ')) numbers = ''.join(numbers.split()) num_list = numbers.split(',') num_list_clean = list(dict.fromkeys(numList)) print(f'\n{numListClean}')
H = [[0, -1, [68.16,1]], [0, -1, [10.2465,1]], [0, -1, [2.34648,1]], [0, -1, [0.67332,1]], [0, -1, [0.22466,1]], [0, -1, [0.082217,1]], [1, 0, [1.3,1]], [1, 0, [0.33,1]], [2, 0, [1.0,1]], ] C = [[0, -1, [16371.074,1]], [0, -1, [2426.9925,1]], [0, -1, [544.54418,1]], [0, -1, [150.80487,1]], [0, -1, [47.708143,1]], [0, -1, [16.457241,1]], [0, -1, [6.0845578,1]], [0, -1, [2.3824631,1]], [0, -1, [0.6619866,1]], [0, -1, [0.24698997,1]], [0, -1, [0.0949873,1]], [1, 0, [40.790423,1]], [1, 0, [9.5034633,1]], [1, 0, [2.9408357,1]], [1, 0, [1.0751115,1]], [1, 0, [0.4267024,1]], [1, 0, [0.17481926,1]], [1, 0, [0.07113054,1]], [2, 0, [0.35,1]], [2, 0, [1.4,1]], ] F = [[0, -1, [37736.0,1]], [0, -1, [5867.0791,1]], [0, -1, [1332.4679,1]], [0, -1, [369.4406,1]], [0, -1, [116.843,1]], [0, -1, [40.34877,1]], [0, -1, [14.96627,1]], [0, -1, [5.8759295,1]], [0, -1, [1.6533352,1]], [0, -1, [0.61083583,1]], [0, -1, [0.23328922,1]], [1, 0, [102.26192,1]], [1, 0, [23.938381,1]], [1, 0, [7.5205914,1]], [1, 0, [2.7724566,1]], [1, 0, [1.1000514,1]], [1, 0, [0.44677512,1]], [1, 0, [0.17187009,1]], [2, 0, [1.4,1]], [2, 0, [0.35,1]],] Cl = [[0, -1, [105818.82,1]], [0, -1, [15872.006,1]], [0, -1, [3619.6548,1]], [0, -1, [1030.8038,1]], [0, -1, [339.90788,1]], [0, -1, [124.5381,1]], [0, -1, [49.513502,1]], [0, -1, [20.805604,1]], [0, -1, [6.4648238,1]], [0, -1, [2.5254537,1]], [0, -1, [1.16544849,1]], [0, -1, [0.53783215,1]], [0, -1, [0.19349716,1]], [1, 0, [622.02736,1]], [1, 0, [145.49719,1]], [1, 0, [45.008659,1]], [1, 0, [15.900889,1]], [1, 0, [5.9259437,1]], [1, 0, [2.2943822,1]], [1, 0, [0.6280655,1]], [1, 0, [0.18123318,1]], [2, 0, [2.5,1]], [2, 0, [0.8,1]], [2, 0, [0.25,1]], ]
h = [[0, -1, [68.16, 1]], [0, -1, [10.2465, 1]], [0, -1, [2.34648, 1]], [0, -1, [0.67332, 1]], [0, -1, [0.22466, 1]], [0, -1, [0.082217, 1]], [1, 0, [1.3, 1]], [1, 0, [0.33, 1]], [2, 0, [1.0, 1]]] c = [[0, -1, [16371.074, 1]], [0, -1, [2426.9925, 1]], [0, -1, [544.54418, 1]], [0, -1, [150.80487, 1]], [0, -1, [47.708143, 1]], [0, -1, [16.457241, 1]], [0, -1, [6.0845578, 1]], [0, -1, [2.3824631, 1]], [0, -1, [0.6619866, 1]], [0, -1, [0.24698997, 1]], [0, -1, [0.0949873, 1]], [1, 0, [40.790423, 1]], [1, 0, [9.5034633, 1]], [1, 0, [2.9408357, 1]], [1, 0, [1.0751115, 1]], [1, 0, [0.4267024, 1]], [1, 0, [0.17481926, 1]], [1, 0, [0.07113054, 1]], [2, 0, [0.35, 1]], [2, 0, [1.4, 1]]] f = [[0, -1, [37736.0, 1]], [0, -1, [5867.0791, 1]], [0, -1, [1332.4679, 1]], [0, -1, [369.4406, 1]], [0, -1, [116.843, 1]], [0, -1, [40.34877, 1]], [0, -1, [14.96627, 1]], [0, -1, [5.8759295, 1]], [0, -1, [1.6533352, 1]], [0, -1, [0.61083583, 1]], [0, -1, [0.23328922, 1]], [1, 0, [102.26192, 1]], [1, 0, [23.938381, 1]], [1, 0, [7.5205914, 1]], [1, 0, [2.7724566, 1]], [1, 0, [1.1000514, 1]], [1, 0, [0.44677512, 1]], [1, 0, [0.17187009, 1]], [2, 0, [1.4, 1]], [2, 0, [0.35, 1]]] cl = [[0, -1, [105818.82, 1]], [0, -1, [15872.006, 1]], [0, -1, [3619.6548, 1]], [0, -1, [1030.8038, 1]], [0, -1, [339.90788, 1]], [0, -1, [124.5381, 1]], [0, -1, [49.513502, 1]], [0, -1, [20.805604, 1]], [0, -1, [6.4648238, 1]], [0, -1, [2.5254537, 1]], [0, -1, [1.16544849, 1]], [0, -1, [0.53783215, 1]], [0, -1, [0.19349716, 1]], [1, 0, [622.02736, 1]], [1, 0, [145.49719, 1]], [1, 0, [45.008659, 1]], [1, 0, [15.900889, 1]], [1, 0, [5.9259437, 1]], [1, 0, [2.2943822, 1]], [1, 0, [0.6280655, 1]], [1, 0, [0.18123318, 1]], [2, 0, [2.5, 1]], [2, 0, [0.8, 1]], [2, 0, [0.25, 1]]]
pessoas = [] dados = [] while True: dados.append(input('Digite seu nome: ')) dados.append(float(input('Digite seu peso: '))) pessoas.append(dados[:]) dados.clear() r = input('Quer continuar? [S/N]: ') if r in 'Nn': break print(f'{len(pessoas)} pessoas foram cadastradas!') pesado = 0 leve = 0 nome_pesado = nome_leve = '' for c in pessoas: if pesado == 0 and leve == 0: leve = c[1] pesado = c[1] nome_pesado = c[0] nome_leve = c[0] elif c[1] >= pesado: pesado = c[1] nome_pesado = c[0] elif c[1] <= leve: leve = c[1] nome_leve = c[0] print(f'O maior peso foi de {pesado}kg. Peso de ', end='') for cont in pessoas: if cont[1] == pesado: print(f'[{cont[0]}] ', end='') print(f'\nO menor peso foi de {leve}kg. Peso de ', end='') for p in pessoas: if p[1] == leve: print(f'[{p[0] }] ', end='')
pessoas = [] dados = [] while True: dados.append(input('Digite seu nome: ')) dados.append(float(input('Digite seu peso: '))) pessoas.append(dados[:]) dados.clear() r = input('Quer continuar? [S/N]: ') if r in 'Nn': break print(f'{len(pessoas)} pessoas foram cadastradas!') pesado = 0 leve = 0 nome_pesado = nome_leve = '' for c in pessoas: if pesado == 0 and leve == 0: leve = c[1] pesado = c[1] nome_pesado = c[0] nome_leve = c[0] elif c[1] >= pesado: pesado = c[1] nome_pesado = c[0] elif c[1] <= leve: leve = c[1] nome_leve = c[0] print(f'O maior peso foi de {pesado}kg. Peso de ', end='') for cont in pessoas: if cont[1] == pesado: print(f'[{cont[0]}] ', end='') print(f'\nO menor peso foi de {leve}kg. Peso de ', end='') for p in pessoas: if p[1] == leve: print(f'[{p[0]}] ', end='')
ITP_TEMPLATE = """\ ;----------------------------TITLE ----------------------------------------------------------------------------------------- ; None ; ; This file was generated at {time} on {date} using the ATB API, using information generated by ; ; Automatic Topology Builder ; ; REVISION {revision} ;--------------------------------------------------------------------------------------------------------------------------- ; Authors : Martin Stroet, Bertrand Caron, Alpeshkumar K. Malde, Thomas Lee, Alan E. Mark ; ; Institute : Molecular Dynamics group, ; School of Chemistry and Molecular Biosciences (SCMB), ; The University of Queensland, QLD 4072, Australia ; URL : https://atb.uq.edu.au ; Citations : 1. Malde AK, Zuo L, Breeze M, Stroet M, Poger D, Nair PC, Oostenbrink C, Mark AE. ; An Automated force field Topology Builder (ATB) and repository: version 1.0. ; Journal of Chemical Theory and Computation, 2011, 7, 4026-4037. ; 2. Stroet M, Caron B, Visscher K, Geerke D, Malde AK, Mark AE. ; Automated Topology Builder version 3.0: Prediction of solvation free enthalpies in water and hexane. ; DOI:10.1021/acs.jctc.8b00768 ; ; Disclaimer : ; While every effort has been made to ensure the accuracy and validity of parameters provided below ; the assignment of parameters is being based on an automated procedure combining data provided by a ; given user as well as calculations performed using third party software. They are provided as a guide. ; The authors of the ATB cannot guarantee that the parameters are complete or that the parameters provided ; are appropriate for use in any specific application. Users are advised to treat these parameters with discretion ; and to perform additional validation tests for their specific application if required. Neither the authors ; of the ATB or The University of Queensland except any responsibly for how the parameters may be used. ; ; Release notes and warnings: ; (1) The topology is based on a set of atomic coordinates and other data provided by the user after ; after quantum mechanical optimization of the structure using different levels of theory depending on ; the nature of the molecule. ; (2) In some cases the automatic bond, bond angle and dihedral type assignment is ambiguous. ; In these cases alternative type codes are provided at the end of the line. ; (3) While bonded parameters are taken where possible from the nominated force field non-standard bond, angle and dihedral ; type code may be incorporated in cases where an exact match could not be found. These are marked as "non-standard" ; or "uncertain" in comments. ; (4) In some cases it is not possible to assign an appropriate parameter automatically. "%%" is used as a place holder ; for those fields that could not be determined automatically. The parameters in these fields must be assigned manually ; before the file can be used. ;--------------------------------------------------------------------------------------------------------------------------- ; Input Structure : {residue_name} ; Output : {resolution_upper} topology ; Use in conjunction with the corresponding {resolution} PDB file. ;--------------------------------------------------------------------------------------------------------------------------- ; Citing this topology file ; ATB molid: {molecule_molid} ; ATB Topology Hash: {molecule_hash} ;--------------------------------------------------------------------------------------------------------------------------- ; Intermediate Topology Generation was performed using: ; A B3LYP/6-31G* optimized geometry. ; Bonded and van der Waals parameters were taken from the GROMOS 54A7 parameter set. ; Initial charges were estimated using the ESP method of Merz-Kollman. ; Final charges and charge groups were generated by method described in the ATB paper. ;--------------------------------------------------------------------------------------------------------------------------- ; ; [ moleculetype ] ; Name nrexcl {residue_name} 3 [ atoms ] ; nr type resnr resid atom cgnr charge mass {atoms} ; total charge of the molecule: {total_charge:.3f} [ bonds ] ; ai aj funct c0 c1 {bonds} [ pairs ] ; ai aj funct ; all 1-4 pairs but the ones excluded in GROMOS itp {pairs} [ angles ] ; ai aj ak funct angle fc {angles} [ dihedrals ] ; GROMOS improper dihedrals ; ai aj ak al funct angle fc {impropers} [ dihedrals ] ; ai aj ak al funct ph0 cp mult {dihedrals} [ exclusions ] ; ai aj funct ; GROMOS 1-4 exclusions {exclusions}"""
itp_template = ';----------------------------TITLE -----------------------------------------------------------------------------------------\n; None\n;\n; This file was generated at {time} on {date} using the ATB API, using information generated by\n;\n; Automatic Topology Builder \n;\n; REVISION {revision}\n;---------------------------------------------------------------------------------------------------------------------------\n; Authors : Martin Stroet, Bertrand Caron, Alpeshkumar K. Malde, Thomas Lee, Alan E. Mark\n;\n; Institute : Molecular Dynamics group, \n; School of Chemistry and Molecular Biosciences (SCMB),\n; The University of Queensland, QLD 4072, Australia\n; URL : https://atb.uq.edu.au\n; Citations : 1. Malde AK, Zuo L, Breeze M, Stroet M, Poger D, Nair PC, Oostenbrink C, Mark AE.\n; An Automated force field Topology Builder (ATB) and repository: version 1.0.\n; Journal of Chemical Theory and Computation, 2011, 7, 4026-4037.\n; 2. Stroet M, Caron B, Visscher K, Geerke D, Malde AK, Mark AE.\n; Automated Topology Builder version 3.0: Prediction of solvation free enthalpies in water and hexane.\n; DOI:10.1021/acs.jctc.8b00768\n;\n; Disclaimer : \n; While every effort has been made to ensure the accuracy and validity of parameters provided below\n; the assignment of parameters is being based on an automated procedure combining data provided by a\n; given user as well as calculations performed using third party software. They are provided as a guide.\n; The authors of the ATB cannot guarantee that the parameters are complete or that the parameters provided\n; are appropriate for use in any specific application. Users are advised to treat these parameters with discretion\n; and to perform additional validation tests for their specific application if required. Neither the authors\n; of the ATB or The University of Queensland except any responsibly for how the parameters may be used.\n;\n; Release notes and warnings: \n; (1) The topology is based on a set of atomic coordinates and other data provided by the user after\n; after quantum mechanical optimization of the structure using different levels of theory depending on\n; the nature of the molecule.\n; (2) In some cases the automatic bond, bond angle and dihedral type assignment is ambiguous.\n; In these cases alternative type codes are provided at the end of the line.\n; (3) While bonded parameters are taken where possible from the nominated force field non-standard bond, angle and dihedral\n; type code may be incorporated in cases where an exact match could not be found. These are marked as "non-standard"\n; or "uncertain" in comments.\n; (4) In some cases it is not possible to assign an appropriate parameter automatically. "%%" is used as a place holder\n; for those fields that could not be determined automatically. The parameters in these fields must be assigned manually\n; before the file can be used.\n;---------------------------------------------------------------------------------------------------------------------------\n; Input Structure : {residue_name}\n; Output : {resolution_upper} topology\n;\tUse in conjunction with the corresponding {resolution} PDB file.\n;---------------------------------------------------------------------------------------------------------------------------\n; Citing this topology file\n; ATB molid: {molecule_molid}\n; ATB Topology Hash: {molecule_hash}\n;---------------------------------------------------------------------------------------------------------------------------\n; Intermediate Topology Generation was performed using:\n; A B3LYP/6-31G* optimized geometry.\n; Bonded and van der Waals parameters were taken from the GROMOS 54A7 parameter set.\n; Initial charges were estimated using the ESP method of Merz-Kollman.\n; Final charges and charge groups were generated by method described in the ATB paper.\n;---------------------------------------------------------------------------------------------------------------------------\n;\n;\n[ moleculetype ]\n; Name nrexcl\n{residue_name} 3\n[ atoms ]\n; nr type resnr resid atom cgnr charge mass\n{atoms}\n; total charge of the molecule: {total_charge:.3f}\n[ bonds ]\n; ai aj funct c0 c1\n{bonds}\n[ pairs ]\n; ai aj funct ; all 1-4 pairs but the ones excluded in GROMOS itp\n{pairs}\n[ angles ]\n; ai aj ak funct angle fc\n{angles}\n[ dihedrals ]\n; GROMOS improper dihedrals\n; ai aj ak al funct angle fc\n{impropers}\n[ dihedrals ]\n; ai aj ak al funct ph0 cp mult\n{dihedrals}\n[ exclusions ]\n; ai aj funct ; GROMOS 1-4 exclusions\n{exclusions}'
a = [1,2,3,4,5] def getMinList(numbers): minNumber = numbers[0] for i in numbers: if i < minNumber: minNumber = i return minNumber print(getMinList(a)) print(getMinList([9,3,8,9]))
a = [1, 2, 3, 4, 5] def get_min_list(numbers): min_number = numbers[0] for i in numbers: if i < minNumber: min_number = i return minNumber print(get_min_list(a)) print(get_min_list([9, 3, 8, 9]))
class DnsEndPoint(EndPoint): """ Represents a network endpoint as a host name or a string representation of an IP address and a port number. DnsEndPoint(host: str,port: int) DnsEndPoint(host: str,port: int,addressFamily: AddressFamily) """ def Equals(self, comparand): """ Equals(self: DnsEndPoint,comparand: object) -> bool Compares two System.Net.DnsEndPoint objects. comparand: A System.Net.DnsEndPoint instance to compare to the current instance. Returns: true if the two System.Net.DnsEndPoint instances are equal; otherwise,false. """ pass def GetHashCode(self): """ GetHashCode(self: DnsEndPoint) -> int Returns a hash value for a System.Net.DnsEndPoint. Returns: An integer hash value for the System.Net.DnsEndPoint. """ pass def ToString(self): """ ToString(self: DnsEndPoint) -> str Returns the host name or string representation of the IP address and port number of the System.Net.DnsEndPoint. Returns: A string containing the address family,host name or IP address string,and the port number of the specified System.Net.DnsEndPoint. """ pass def __eq__(self, *args): """ x.__eq__(y) <==> x==y """ pass @staticmethod def __new__(self, host, port, addressFamily=None): """ __new__(cls: type,host: str,port: int) __new__(cls: type,host: str,port: int,addressFamily: AddressFamily) """ pass def __ne__(self, *args): pass AddressFamily = property( lambda self: object(), lambda self, v: None, lambda self: None ) """Gets the Internet Protocol (IP) address family. Get: AddressFamily(self: DnsEndPoint) -> AddressFamily """ Host = property(lambda self: object(), lambda self, v: None, lambda self: None) """Gets the host name or string representation of the Internet Protocol (IP) address of the host. Get: Host(self: DnsEndPoint) -> str """ Port = property(lambda self: object(), lambda self, v: None, lambda self: None) """Gets the port number of the System.Net.DnsEndPoint. Get: Port(self: DnsEndPoint) -> int """
class Dnsendpoint(EndPoint): """ Represents a network endpoint as a host name or a string representation of an IP address and a port number. DnsEndPoint(host: str,port: int) DnsEndPoint(host: str,port: int,addressFamily: AddressFamily) """ def equals(self, comparand): """ Equals(self: DnsEndPoint,comparand: object) -> bool Compares two System.Net.DnsEndPoint objects. comparand: A System.Net.DnsEndPoint instance to compare to the current instance. Returns: true if the two System.Net.DnsEndPoint instances are equal; otherwise,false. """ pass def get_hash_code(self): """ GetHashCode(self: DnsEndPoint) -> int Returns a hash value for a System.Net.DnsEndPoint. Returns: An integer hash value for the System.Net.DnsEndPoint. """ pass def to_string(self): """ ToString(self: DnsEndPoint) -> str Returns the host name or string representation of the IP address and port number of the System.Net.DnsEndPoint. Returns: A string containing the address family,host name or IP address string,and the port number of the specified System.Net.DnsEndPoint. """ pass def __eq__(self, *args): """ x.__eq__(y) <==> x==y """ pass @staticmethod def __new__(self, host, port, addressFamily=None): """ __new__(cls: type,host: str,port: int) __new__(cls: type,host: str,port: int,addressFamily: AddressFamily) """ pass def __ne__(self, *args): pass address_family = property(lambda self: object(), lambda self, v: None, lambda self: None) 'Gets the Internet Protocol (IP) address family.\n\n\n\nGet: AddressFamily(self: DnsEndPoint) -> AddressFamily\n\n\n\n' host = property(lambda self: object(), lambda self, v: None, lambda self: None) 'Gets the host name or string representation of the Internet Protocol (IP) address of the host.\n\n\n\nGet: Host(self: DnsEndPoint) -> str\n\n\n\n' port = property(lambda self: object(), lambda self, v: None, lambda self: None) 'Gets the port number of the System.Net.DnsEndPoint.\n\n\n\nGet: Port(self: DnsEndPoint) -> int\n\n\n\n'
def q3(datas: List[int]) -> int: '''find the best time point to buy and sell stocks(at most one transaction)''' max_profit = 0 min_price = None for item in datas: if min_price is None: min_price = item continue if item > min_price: if item - min_price > max_profit: max_profit = item - min_price elif item < min_price: min_price = item return max_profit test_list1 = [7, 1, 5, 3, 6, 4] print(q3(test_list1)) test_list2 = [7, 6, 4, 3, 1] print(q3(test_list2))
def q3(datas: List[int]) -> int: """find the best time point to buy and sell stocks(at most one transaction)""" max_profit = 0 min_price = None for item in datas: if min_price is None: min_price = item continue if item > min_price: if item - min_price > max_profit: max_profit = item - min_price elif item < min_price: min_price = item return max_profit test_list1 = [7, 1, 5, 3, 6, 4] print(q3(test_list1)) test_list2 = [7, 6, 4, 3, 1] print(q3(test_list2))
''' Created on Jul 10, 2012 @author: Chris ''' class Node(object): def __init__(self): self.next = None self.previous = None self.element = None class LinkedList(object): def __init__(self): self.n = 0 self.last = Node() self.first = self.last def append(self, element): self.last.element = element self.last.next = Node() tmp = self.last self.last = self.last.next self.last.previous = tmp self.n += 1 def front(self): if self.n == 0: return None e = self.first.element self.first = self.first.next self.n -= 1 return e def back(self): if self.n == 0: return None e = self.last.previous.element self.last = self.last.previous self.last.next = Node() self.n -= 1 return e def size(self): return self.n def elements(self): i = self.first while i.element: yield i.element i = i.next class LinkedQueue(object): def __init__(self): self.l = LinkedList() def clear(self): while not self.empty(): self.l.front() def enqueue(self, element): self.l.append(element) def dequeue(self): return self.l.front() def empty(self): return self.l.size() == 0 def size(self): return self.l.size() def elements(self): return [x for x in self.l.elements()]
""" Created on Jul 10, 2012 @author: Chris """ class Node(object): def __init__(self): self.next = None self.previous = None self.element = None class Linkedlist(object): def __init__(self): self.n = 0 self.last = node() self.first = self.last def append(self, element): self.last.element = element self.last.next = node() tmp = self.last self.last = self.last.next self.last.previous = tmp self.n += 1 def front(self): if self.n == 0: return None e = self.first.element self.first = self.first.next self.n -= 1 return e def back(self): if self.n == 0: return None e = self.last.previous.element self.last = self.last.previous self.last.next = node() self.n -= 1 return e def size(self): return self.n def elements(self): i = self.first while i.element: yield i.element i = i.next class Linkedqueue(object): def __init__(self): self.l = linked_list() def clear(self): while not self.empty(): self.l.front() def enqueue(self, element): self.l.append(element) def dequeue(self): return self.l.front() def empty(self): return self.l.size() == 0 def size(self): return self.l.size() def elements(self): return [x for x in self.l.elements()]
class Member: def __init__(self, name , age): self.name = name self.age=age self.id=0 def __str__(self): return "name:{} ,age: {},id:{}".format(self.name,self.age,self.id) class Post: def __init__(self , title , content): self.title = title self.content = content self.id=0 def __str__(self): return "post title:{},\n post content:{} ".format(self.title,self.content)
class Member: def __init__(self, name, age): self.name = name self.age = age self.id = 0 def __str__(self): return 'name:{} ,age: {},id:{}'.format(self.name, self.age, self.id) class Post: def __init__(self, title, content): self.title = title self.content = content self.id = 0 def __str__(self): return 'post title:{},\n post content:{} '.format(self.title, self.content)
class Solution: def solve(self, heights): stack = [] for i in range(len(heights)): while stack and heights[stack[-1]] <= heights[i]: stack.pop() stack.append(i) return stack
class Solution: def solve(self, heights): stack = [] for i in range(len(heights)): while stack and heights[stack[-1]] <= heights[i]: stack.pop() stack.append(i) return stack
''' Given the head of a linked list, remove the nth node from the end of the list and return its head. Eg. Input: head = [1,2,3,4,5], n = 2 Output: [1,2,3,5] ''' # Definition for singly-linked list. # class ListNode: # def __init__(self, val=0, next=None): # self.val = val # self.next = next class Solution: def removeNthFromEnd(self, head: ListNode, n: int) -> ListNode: if not head.next: return None length = 0 cur = head while cur: length += 1 cur = cur.next index = length - n if index == 0: head = head.next return head cur = head prev = None while index != 0 and cur: prev = cur cur = cur.next index -= 1 prev.next = cur.next return head
""" Given the head of a linked list, remove the nth node from the end of the list and return its head. Eg. Input: head = [1,2,3,4,5], n = 2 Output: [1,2,3,5] """ class Solution: def remove_nth_from_end(self, head: ListNode, n: int) -> ListNode: if not head.next: return None length = 0 cur = head while cur: length += 1 cur = cur.next index = length - n if index == 0: head = head.next return head cur = head prev = None while index != 0 and cur: prev = cur cur = cur.next index -= 1 prev.next = cur.next return head
#!/usr/bin/env python3 ''' Write a function called gcd that takes parameters a and b and returns their greatest common divisor ''' def gcd(a, b): if b == 0: return a else: return gcd(b, a % b) print(gcd(12, 8))
""" Write a function called gcd that takes parameters a and b and returns their greatest common divisor """ def gcd(a, b): if b == 0: return a else: return gcd(b, a % b) print(gcd(12, 8))
t = int(input()) while t > 0: t -= 1 n, m = input().split() blocos = [int(x) for x in input().split()] blocos.sort() qt = 0 n = int(n)-1 m = int(m) while m > 0: while m < blocos[n]: n -= 1 m -= blocos[n] qt += 1 print(qt)
t = int(input()) while t > 0: t -= 1 (n, m) = input().split() blocos = [int(x) for x in input().split()] blocos.sort() qt = 0 n = int(n) - 1 m = int(m) while m > 0: while m < blocos[n]: n -= 1 m -= blocos[n] qt += 1 print(qt)
# -*- coding: utf-8 -*- """General methods to show data like structures. Maybe redundant (BERT). no mpl specific stuff. """
"""General methods to show data like structures. Maybe redundant (BERT). no mpl specific stuff. """
""" Given an alphanumeric string s, return the second largest numerical digit that appears in s, or -1 if it does not exist. An alphanumeric string is a string consisting of lowercase English letters and digits. Example 1: Input: s = "dfa12321afd" Output: 2 Explanation: The digits that appear in s are [1, 2, 3]. The second largest digit is 2. Example 2: Input: s = "abc1111" Output: -1 Explanation: The digits that appear in s are [1]. There is no second largest digit. """ class Solution: def secondHighest(self, s: str) -> int: s = sorted(set([int(c) for c in s if c.isdigit()]), reverse=True) if s and len(s) >= 2: return s[1] return -1
""" Given an alphanumeric string s, return the second largest numerical digit that appears in s, or -1 if it does not exist. An alphanumeric string is a string consisting of lowercase English letters and digits. Example 1: Input: s = "dfa12321afd" Output: 2 Explanation: The digits that appear in s are [1, 2, 3]. The second largest digit is 2. Example 2: Input: s = "abc1111" Output: -1 Explanation: The digits that appear in s are [1]. There is no second largest digit. """ class Solution: def second_highest(self, s: str) -> int: s = sorted(set([int(c) for c in s if c.isdigit()]), reverse=True) if s and len(s) >= 2: return s[1] return -1
def grade(arg, key): if "flag{crash}".lower() == key.lower(): return True, "Denial of service attacks are boring." else: return False, "Try crashing the program."
def grade(arg, key): if 'flag{crash}'.lower() == key.lower(): return (True, 'Denial of service attacks are boring.') else: return (False, 'Try crashing the program.')
# Copyright: Copyright (c) 2020., Adam Jakab # # Author: Adam Jakab <adam at jakab dot pro> # Created: 2/24/20, 12:09 AM # License: See LICENSE.txt __version__ = '1.3.4'
__version__ = '1.3.4'
# START LAB EXERCISE 06 print('Lab Exercise 06 \n') #SETUP chinese_desserts = [ ["Wheat Flour Cake", 190], ["Egg Yolk", 260], ["Green Bean Cake", 100], ["Taro Pastry", 227], ["Durian Cake", 360], ["Flower Pastry", 130], ["Sun Cake", 172] ] # END SETUP # PROBLEM 1 (3 points) # TODO Implement function name = None # call function print(f"\n1. First dessert item: {name}") # PROBLEM 2 (3 points) # TODO Implement function calories = None # call function print(f"\n2A. Calories of the second dessert item: {calories}") # PROBLEM 3 (5 points) # TODO Implement function # TODO call function print(f"\n3. {chinese_desserts}") # Problem 4 (6 points) # TODO Create variable # TODO call function print(f"\n4. {chinese_desserts}") # Problem 5 (3 points) # TODO Implement function # TODO call function print(f"\n5. {chinese_desserts}") # END LAB EXERCISE
print('Lab Exercise 06 \n') chinese_desserts = [['Wheat Flour Cake', 190], ['Egg Yolk', 260], ['Green Bean Cake', 100], ['Taro Pastry', 227], ['Durian Cake', 360], ['Flower Pastry', 130], ['Sun Cake', 172]] name = None print(f'\n1. First dessert item: {name}') calories = None print(f'\n2A. Calories of the second dessert item: {calories}') print(f'\n3. {chinese_desserts}') print(f'\n4. {chinese_desserts}') print(f'\n5. {chinese_desserts}')
# Copyright (c) Facebook, Inc. and its affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. load("//antlir/bzl:query.bzl", "query") # Find the feature JSON belonging to this layer. def layer_features_json_query(layer): return "$(query_targets_and_outputs '{query}')".format( query = query.attrfilter( label = "type", value = "image_feature", expr = query.deps( expr = query.set(layer), # Limit depth to 1 to get just the `__layer-feature` target. # All other features are at distance 2+. depth = 1, ), ), ) # Find features JSONs and fetched package targets/outputs for the transitive # deps of `layer`. We need this to construct the full set of features for # the layer and its parent layers. def layer_included_features_query(layer): return "$(query_targets_and_outputs '{query}')".format( query = query.attrregexfilter( label = "type", pattern = "|".join([ "image_layer", "image_feature", "image_layer_from_package", "fetched_package_with_nondeterministic_fs_metadata", ]), expr = query.deps( expr = query.set(layer), depth = query.UNBOUNDED, ), ), ) # Any "layer package builder" implementations need to tag themselves with # this label to be included when packaging a layer for replay deployment. ANTLIR_BUILD_PKG_LABEL = "antlir_build_pkg" # Find all package builders for any mounted packages in `layer` (and its # parents). We use these to package the mounts when we package the layer. def layer_included_builders_query(layer): return "$(query_targets_and_outputs '{query}')".format( query = query.attrfilter( label = "labels", value = ANTLIR_BUILD_PKG_LABEL, expr = query.deps( expr = query.set(layer), depth = query.UNBOUNDED, ), ), ) def _location(target): return "$(location {})".format(target) # A convenient way to access the results of the above queries in Python # unit tests. Use the Python function `build_env_map` to deserialize. def test_env_map(infix_to_layer): return { "antlir_test__{}__{}".format(infix, env_name): query_fn(target) for infix, target in infix_to_layer for env_name, query_fn in [ ("builders", layer_included_builders_query), ("layer_feature_json", layer_features_json_query), ("layer_output", _location), ("target_path_pairs", layer_included_features_query), ] }
load('//antlir/bzl:query.bzl', 'query') def layer_features_json_query(layer): return "$(query_targets_and_outputs '{query}')".format(query=query.attrfilter(label='type', value='image_feature', expr=query.deps(expr=query.set(layer), depth=1))) def layer_included_features_query(layer): return "$(query_targets_and_outputs '{query}')".format(query=query.attrregexfilter(label='type', pattern='|'.join(['image_layer', 'image_feature', 'image_layer_from_package', 'fetched_package_with_nondeterministic_fs_metadata']), expr=query.deps(expr=query.set(layer), depth=query.UNBOUNDED))) antlir_build_pkg_label = 'antlir_build_pkg' def layer_included_builders_query(layer): return "$(query_targets_and_outputs '{query}')".format(query=query.attrfilter(label='labels', value=ANTLIR_BUILD_PKG_LABEL, expr=query.deps(expr=query.set(layer), depth=query.UNBOUNDED))) def _location(target): return '$(location {})'.format(target) def test_env_map(infix_to_layer): return {'antlir_test__{}__{}'.format(infix, env_name): query_fn(target) for (infix, target) in infix_to_layer for (env_name, query_fn) in [('builders', layer_included_builders_query), ('layer_feature_json', layer_features_json_query), ('layer_output', _location), ('target_path_pairs', layer_included_features_query)]}
# ------------------------------ # # Routines to work with Genesis lattices. # C # #------------------------------------------------- # Helper functions def ele_types(eles): """ Returns a list of unique types in eles """ return list(set([e['type'] for e in eles] )) def eles_by_type(eles): """ Separated eles by type, returns a dict of: <type>:[<eles>] """ tlist = ele_types(eles) tlat = {} # initialize for t in tlist: tlat[t] = [] for e in eles: t = e['type'] tlat[t].append(e) return tlat def s0(ele): # Returns beginning s-position of an element return ele['s'] - ele['L'] def zsort(eles): """ Sorts elements by 's' """ return sorted(eles, key = lambda e: e['s']) #------------------------------------------------- # Standard lattice def standard_eles_from_eles(eles, remove_zero_strengths=True): """ Converts raw ele dicts to an ordered list of elements, with absolute positions s s is at the end of the element Comments are dropped. """ lat = [] z0 = {} for t in ele_types(eles): z0[t] = 0 for ele in eles: e = ele.copy() t = e['type'] if t == 'comment': continue zbeg = z0[t] + e['d'] zend = e['L']+zbeg z0[t] = zend e['s'] = zend e.pop('d') # Remove d if remove_zero_strengths and e['strength'] == 0.0: continue lat.append(e) return zsort(lat ) #------------------------------------------------- # Utilities def create_names(eles): """ Invents names for elements """ counter = {} for t in ele_types(eles): counter[t] = 0 for ele in eles: t = ele['type'] counter[t] = counter[t]+1 ele['name'] = ele['type']+'_'+str(counter[t]) def make_dummies_for_single_type(eles, smax): """ Finds the gaps in a lattice and makes dummy (zero-strength) elements """ types = ele_types(eles) assert len(types) == 1, 'Only one type of element allowed' my_type = types[0] lat = zsort(eles) ref = lat[0] # End of previous ele dummies = [] for i in range(1, len(lat)): # next ele ele = lat[i] # Distance from this ele to the previous ele end zbeg = ref['s'] zend = s0(ele) L = zend - zbeg assert L >= 0, 'Overlapping eles!'# + ref['name']+' overlaps '+ele['name']+' by '+str(L) dummy = {'type': my_type, 'strength':0, 'L':L, 's':zend} dummies.append(dummy) # next z ref = ele if ele['s'] < smax: # Make final dummy L = smax - ele['s'] dummy = {'type': my_type, 'strength':0, 'L':L, 's':smax} dummies.append(dummy) return dummies def lattice_dummies(eles): """ Makes dummy elements to fill in gaps """ # Separate by types tlat = eles_by_type(eles) smax = max([e['s'] for e in eles if e['type'] not in ['comment']]) #print(smax) dummies = [] for t in tlat: eles2 = tlat[t] dummies.extend(make_dummies_for_single_type(eles2, smax)) return dummies #------------------------------------------------- # Export genesis lattice def genesis_lattice_from_standard_lattice(standard_lattice,include_name=False, include_comment=False): """ Forms lines of a Genesis lattice file from a standard lattice Pads all types with zero strength dummy elements """ unitlength = standard_lattice['param']['unitlength'] version = standard_lattice['param']['version'] # Make copy eles = [e.copy() for e in standard_lattice['eles']] tlist = ele_types(eles) # Add dummies eles = eles + lattice_dummies(eles) # Sort eles = zsort(eles) # Separate lattice by types glat = {} # lattice z = {} # z at end of each type # initialize for t in tlist: glat[t] = [] z[t] = 0 for ele in eles: t = ele['type'] if t in ['comment', 'drift']: # Skip these continue d = s0(ele) - z[t] # distance from previous element of the same type z[t] = ele['s'] line = str(ele['type']) + ' ' + str(ele['strength']) + ' ' + str(ele['L']) + ' ' + str(d) if include_name and 'name' in ele: line += ' #'+ele['name'] if include_comment and 'comment' in ele: line += ' # '+ele['comment'] glat[t].append(line) # header outlines = ['? VERSION = '+ str(version), '? UNITLENGTH = '+str(unitlength) +' # meters'] for t in tlist: if t in ['comment', 'drift']: # Skip these continue outlines.append('') outlines.append('#------------') outlines.append('# '+ t) for line in glat[t]: outlines.append(line) return outlines def write_lattice(filePath, standard_lattice): lines = genesis_lattice_from_standard_lattice(standard_lattice) with open(filePath, 'w') as f: for l in lines: f.write(l+'\n') #------------------------------------------------- # Print def print_ele(e): line = '' if e['type']=='comment': c = e['comment'] if c == '!': print('') else: #pass print(c) return if 'name' in e: name = e else: name = '' line = name+': '+e['type'] l = len(line) for key in e: if key in ['s', 'name', 'type', 'original']: continue val = str(e[key]) s = key+'='+val l += len(s) if l > 100: append = ',\n '+s l = len(append) else: append = ', '+s line = line + append print(line) def join_eles(eles1, eles2): zlist = [e['s'] for e in eles1] zmax = max(zlist) for ele in eles2: ele['s'] += zmax merged = eles1 + eles2 return merged
def ele_types(eles): """ Returns a list of unique types in eles """ return list(set([e['type'] for e in eles])) def eles_by_type(eles): """ Separated eles by type, returns a dict of: <type>:[<eles>] """ tlist = ele_types(eles) tlat = {} for t in tlist: tlat[t] = [] for e in eles: t = e['type'] tlat[t].append(e) return tlat def s0(ele): return ele['s'] - ele['L'] def zsort(eles): """ Sorts elements by 's' """ return sorted(eles, key=lambda e: e['s']) def standard_eles_from_eles(eles, remove_zero_strengths=True): """ Converts raw ele dicts to an ordered list of elements, with absolute positions s s is at the end of the element Comments are dropped. """ lat = [] z0 = {} for t in ele_types(eles): z0[t] = 0 for ele in eles: e = ele.copy() t = e['type'] if t == 'comment': continue zbeg = z0[t] + e['d'] zend = e['L'] + zbeg z0[t] = zend e['s'] = zend e.pop('d') if remove_zero_strengths and e['strength'] == 0.0: continue lat.append(e) return zsort(lat) def create_names(eles): """ Invents names for elements """ counter = {} for t in ele_types(eles): counter[t] = 0 for ele in eles: t = ele['type'] counter[t] = counter[t] + 1 ele['name'] = ele['type'] + '_' + str(counter[t]) def make_dummies_for_single_type(eles, smax): """ Finds the gaps in a lattice and makes dummy (zero-strength) elements """ types = ele_types(eles) assert len(types) == 1, 'Only one type of element allowed' my_type = types[0] lat = zsort(eles) ref = lat[0] dummies = [] for i in range(1, len(lat)): ele = lat[i] zbeg = ref['s'] zend = s0(ele) l = zend - zbeg assert L >= 0, 'Overlapping eles!' dummy = {'type': my_type, 'strength': 0, 'L': L, 's': zend} dummies.append(dummy) ref = ele if ele['s'] < smax: l = smax - ele['s'] dummy = {'type': my_type, 'strength': 0, 'L': L, 's': smax} dummies.append(dummy) return dummies def lattice_dummies(eles): """ Makes dummy elements to fill in gaps """ tlat = eles_by_type(eles) smax = max([e['s'] for e in eles if e['type'] not in ['comment']]) dummies = [] for t in tlat: eles2 = tlat[t] dummies.extend(make_dummies_for_single_type(eles2, smax)) return dummies def genesis_lattice_from_standard_lattice(standard_lattice, include_name=False, include_comment=False): """ Forms lines of a Genesis lattice file from a standard lattice Pads all types with zero strength dummy elements """ unitlength = standard_lattice['param']['unitlength'] version = standard_lattice['param']['version'] eles = [e.copy() for e in standard_lattice['eles']] tlist = ele_types(eles) eles = eles + lattice_dummies(eles) eles = zsort(eles) glat = {} z = {} for t in tlist: glat[t] = [] z[t] = 0 for ele in eles: t = ele['type'] if t in ['comment', 'drift']: continue d = s0(ele) - z[t] z[t] = ele['s'] line = str(ele['type']) + ' ' + str(ele['strength']) + ' ' + str(ele['L']) + ' ' + str(d) if include_name and 'name' in ele: line += ' #' + ele['name'] if include_comment and 'comment' in ele: line += ' # ' + ele['comment'] glat[t].append(line) outlines = ['? VERSION = ' + str(version), '? UNITLENGTH = ' + str(unitlength) + ' # meters'] for t in tlist: if t in ['comment', 'drift']: continue outlines.append('') outlines.append('#------------') outlines.append('# ' + t) for line in glat[t]: outlines.append(line) return outlines def write_lattice(filePath, standard_lattice): lines = genesis_lattice_from_standard_lattice(standard_lattice) with open(filePath, 'w') as f: for l in lines: f.write(l + '\n') def print_ele(e): line = '' if e['type'] == 'comment': c = e['comment'] if c == '!': print('') else: print(c) return if 'name' in e: name = e else: name = '' line = name + ': ' + e['type'] l = len(line) for key in e: if key in ['s', 'name', 'type', 'original']: continue val = str(e[key]) s = key + '=' + val l += len(s) if l > 100: append = ',\n ' + s l = len(append) else: append = ', ' + s line = line + append print(line) def join_eles(eles1, eles2): zlist = [e['s'] for e in eles1] zmax = max(zlist) for ele in eles2: ele['s'] += zmax merged = eles1 + eles2 return merged
############################################################### VERSION = '0.11.11' ############################################################### default_app_config = 'swingtime.apps.SwingtimeConfig' ###############################################################
version = '0.11.11' default_app_config = 'swingtime.apps.SwingtimeConfig'
# Definition for a binary tree node. class TreeNode: def __init__(self, x): self.val = x self.left = None self.right = None class Solution: def maxPathSum(self, root: TreeNode) -> int: """ Either traversal is fine - preorder (left, right, node) or postorder (right, left, node). Recursion approach to compute the max path at tail-end of recursion call. The maximum triangle path sum may formed by either from a triangle (left, node, right) or from a single combination of right/left child or from a single node """ def mps(node: TreeNode) -> (int, int): if not node: # Possible to allow negative num to be maximum return (float("-inf"), float("-inf")) # left triangle sum, left node child sum lts, lns = mps(node.left) # right triangle sum, right node child sum rts, rns = mps(node.right) # node sum val = node.val # single child sum sc = max(lns, rns) # node child sum ns = max(val, val + sc) # node triangle sum ts = max(ns, val + lns + rns) # maximum triangle sum mts = max(ts, lts, rts) # To prevent from getting multiple triangle # Allow only a single connection from a node # maximum triangle sum or node child sum return mts, ns max_path_sum, max_node_sum = mps(root) return max_path_sum
class Treenode: def __init__(self, x): self.val = x self.left = None self.right = None class Solution: def max_path_sum(self, root: TreeNode) -> int: """ Either traversal is fine - preorder (left, right, node) or postorder (right, left, node). Recursion approach to compute the max path at tail-end of recursion call. The maximum triangle path sum may formed by either from a triangle (left, node, right) or from a single combination of right/left child or from a single node """ def mps(node: TreeNode) -> (int, int): if not node: return (float('-inf'), float('-inf')) (lts, lns) = mps(node.left) (rts, rns) = mps(node.right) val = node.val sc = max(lns, rns) ns = max(val, val + sc) ts = max(ns, val + lns + rns) mts = max(ts, lts, rts) return (mts, ns) (max_path_sum, max_node_sum) = mps(root) return max_path_sum
"""Constants for the Eaton xComfort Bridge integration.""" DOMAIN = "xcomfort_bridge" CONF_AUTH_KEY = "auth_key" CONF_IDENTIFIER = "identifier" CONF_DIMMING = "dimming" CONF_GATEWAYS = "gateways" PLATFORMS = ["light"] VERBOSE = True
"""Constants for the Eaton xComfort Bridge integration.""" domain = 'xcomfort_bridge' conf_auth_key = 'auth_key' conf_identifier = 'identifier' conf_dimming = 'dimming' conf_gateways = 'gateways' platforms = ['light'] verbose = True
#!/usr/bin/env python """ https://leetcode.com/problems/merge-sorted-array/description/ Created on 2018-11-16 @author: 'Jiezhi.G@gmail.com' Reference: """ class Solution: def merge(self, nums1, m, nums2, n): """ :type nums1: List[int] :type m: int :type nums2: List[int] :type n: int :rtype: void Do not return anything, modify nums1 in-place instead. """ n1 = 0 n2 = 0 while n2 < n: # nums1 run out of number, just copy all left nums2 numbers if n1 >= m + n2: for i in range(n2, n): nums1[n1 + i - n2] = nums2[i] return if nums1[n1] > nums2[n2]: for i in range(m + n2, n1, -1): nums1[i] = nums1[i - 1] nums1[n1] = nums2[n2] n2 += 1 n1 += 1 def test(): nums1 = [1, 2, 3, 0, 0, 0] nums2 = [2, 5, 6] Solution().merge(nums1, 3, nums2, 3) assert nums1 == [1, 2, 2, 3, 5, 6]
""" https://leetcode.com/problems/merge-sorted-array/description/ Created on 2018-11-16 @author: 'Jiezhi.G@gmail.com' Reference: """ class Solution: def merge(self, nums1, m, nums2, n): """ :type nums1: List[int] :type m: int :type nums2: List[int] :type n: int :rtype: void Do not return anything, modify nums1 in-place instead. """ n1 = 0 n2 = 0 while n2 < n: if n1 >= m + n2: for i in range(n2, n): nums1[n1 + i - n2] = nums2[i] return if nums1[n1] > nums2[n2]: for i in range(m + n2, n1, -1): nums1[i] = nums1[i - 1] nums1[n1] = nums2[n2] n2 += 1 n1 += 1 def test(): nums1 = [1, 2, 3, 0, 0, 0] nums2 = [2, 5, 6] solution().merge(nums1, 3, nums2, 3) assert nums1 == [1, 2, 2, 3, 5, 6]
class Card: def __init__(self, name, team, active): self.name = name self.team = team # "teamid :: black=0, team1=1, team2=2, grey=3" self.active = active # "known=False, unknown=True" self.toggle = True def __str__(self): return "[" + '{:^15}'.format(self.name) + '{:3}'.format(str(self.team)) + '{:6}'.format(str(self.active))+"]"
class Card: def __init__(self, name, team, active): self.name = name self.team = team self.active = active self.toggle = True def __str__(self): return '[' + '{:^15}'.format(self.name) + '{:3}'.format(str(self.team)) + '{:6}'.format(str(self.active)) + ']'
# TODO: #54: DOC # TODO: provide context managers! class NetworkBackendNotifications: """ This interface describes the interaction between the :py:class:`.SimulationManager`, the :py:class:`.NetworkManager` and the :py:class:`.NetworkBackend`. """ ######################################### # Per step ######################################### def before_simulation_step(self, simulation_manager, step_cnt, network_backend, emulation_nodes, **kwargs): """ Called before the next simulation step is about to be performed. Called every step. Parameters ---------- simulation_manager : SimulationManager step_cnt : int network_backend : NetworkBackend emulation_nodes : EmulationNodes """ pass def after_simulation_step(self, simulation_manager, step_cnt, network_backend, emulation_nodes, **kwargs): """ Called after a simulation step is over. Called every step. Parameters ---------- simulation_manager emulation_nodes network_backend step_cnt """ pass def before_distance_matrix_changed(self, simulation_manager, network_backend, changed_distance_matrix, full_distance_matrix, **kwargs): """ Called only if the distance matrix changed. Parameters ---------- changed_distance_matrix simulation_manager network_backend full_distance_matrix kwargs Returns ------- """ pass def after_distance_matrix_changed(self, simulation_manager, network_backend, changed_distance_matrix, full_distance_matrix, **kwargs): """ Called only if the distance matrix changed. Parameters ---------- changed_distance_matrix simulation_manager network_backend full_distance_matrix kwargs Returns ------- """ pass ######################################### # Per node ######################################### def before_link_initial_start(self, network_backend, emulation_node_x, emulation_node_y, interface_x, interface_y, connection_info, start_activated=False, **kwargs): """ Called before a link between the two nodes on the supplied interfaces is going to be created. Let the NetworkBackend decide whether the links really shall be connected. 1) SimulationManager->NetworkManager->NetworkBackend-| SimulationManager<-NetworkManager-| <- connected, connection ... 2) after_link_initial_start() -> ... Parameters ---------- network_backend emulation_node_x emulation_node_y interface_x interface_y connection_info : ConnectionInfo start_activated Returns ------- Bool, AbstractSwitch, AbstractConnection Whether the nodes are connected and the appropriate connection """ pass # TODO: REMOVE? def after_link_initial_start(self, network_backend_connected, switch, connection, network_backend, emulation_node_x, emulation_node_y, interface_x, interface_y, connection_info, start_activated=False, **kwargs): """ Parameters ---------- switch network_backend_connected : Bool Return value from :py:meth:`.before_link_initial_start` connection : AbstractConnection Return value from :py:meth:`.before_link_initial_start` network_backend emulation_node_x emulation_node_y interface_x interface_y connection_info : ConnectionInfo start_activated Returns ------- """ pass def before_link_quality_adjustment(self, connection, link_quality_still_connected, link_quality_dict, network_backend, emulation_node_x, emulation_node_y, interface_x, interface_y, connection_info, **kwargs): """ Called only for connected nodes. There the :py:class:`.LinkQualityModel` and the :py:class:`.NetworkBackend` agreed on a connection. Parameters ---------- connection link_quality_still_connected link_quality_dict network_backend emulation_node_x emulation_node_y interface_x interface_y connection_info : ConnectionInfo kwargs """ pass def after_link_quality_adjustment(self, connection, link_quality_still_connected, link_quality_dict, network_backend, emulation_node_x, emulation_node_y, interface_x, interface_y, connection_info, **kwargs): """ Called only for connected nodes. There the :py:class:`.LinkQualityModel` and the :py:class:`.NetworkBackend` agreed on a connection. Parameters ---------- connection link_quality_still_connected link_quality_dict network_backend emulation_node_x emulation_node_y interface_x interface_y connection_info : ConnectionInfo kwargs Returns ------- """ pass def link_up(self, connection, link_quality_dict, network_backend, emulation_node_x, emulation_node_y, interface_x, interface_y, connection_info, **kwargs): pass def link_down(self, connection, link_quality_dict, network_backend, emulation_node_x, emulation_node_y, interface_x, interface_y, connection_info, **kwargs): pass ######################################### # Distributed Mode ######################################### # TODO: PASS CONNECTION OBJECTS? # TODO: return type? Tunnel? def connection_across_servers(self, network_backend, emulation_node_x, emulation_node_y, remote_ip): """ Parameters ---------- network_backend emulation_node_x emulation_node_y remote_ip Returns ------- """ pass class ConnectionInfo: """ Attributes ---------- is_remote_conn : bool, optional (default is False) is_central : bool, optional (default is False) is_mgmt : bool, optional (default is False) """ def __init__(self, is_remote_conn=False, is_central=False, is_mgmt=False): self.is_remote_conn = is_remote_conn self.is_central = is_central self.is_mgmt = is_mgmt
class Networkbackendnotifications: """ This interface describes the interaction between the :py:class:`.SimulationManager`, the :py:class:`.NetworkManager` and the :py:class:`.NetworkBackend`. """ def before_simulation_step(self, simulation_manager, step_cnt, network_backend, emulation_nodes, **kwargs): """ Called before the next simulation step is about to be performed. Called every step. Parameters ---------- simulation_manager : SimulationManager step_cnt : int network_backend : NetworkBackend emulation_nodes : EmulationNodes """ pass def after_simulation_step(self, simulation_manager, step_cnt, network_backend, emulation_nodes, **kwargs): """ Called after a simulation step is over. Called every step. Parameters ---------- simulation_manager emulation_nodes network_backend step_cnt """ pass def before_distance_matrix_changed(self, simulation_manager, network_backend, changed_distance_matrix, full_distance_matrix, **kwargs): """ Called only if the distance matrix changed. Parameters ---------- changed_distance_matrix simulation_manager network_backend full_distance_matrix kwargs Returns ------- """ pass def after_distance_matrix_changed(self, simulation_manager, network_backend, changed_distance_matrix, full_distance_matrix, **kwargs): """ Called only if the distance matrix changed. Parameters ---------- changed_distance_matrix simulation_manager network_backend full_distance_matrix kwargs Returns ------- """ pass def before_link_initial_start(self, network_backend, emulation_node_x, emulation_node_y, interface_x, interface_y, connection_info, start_activated=False, **kwargs): """ Called before a link between the two nodes on the supplied interfaces is going to be created. Let the NetworkBackend decide whether the links really shall be connected. 1) SimulationManager->NetworkManager->NetworkBackend-| SimulationManager<-NetworkManager-| <- connected, connection ... 2) after_link_initial_start() -> ... Parameters ---------- network_backend emulation_node_x emulation_node_y interface_x interface_y connection_info : ConnectionInfo start_activated Returns ------- Bool, AbstractSwitch, AbstractConnection Whether the nodes are connected and the appropriate connection """ pass def after_link_initial_start(self, network_backend_connected, switch, connection, network_backend, emulation_node_x, emulation_node_y, interface_x, interface_y, connection_info, start_activated=False, **kwargs): """ Parameters ---------- switch network_backend_connected : Bool Return value from :py:meth:`.before_link_initial_start` connection : AbstractConnection Return value from :py:meth:`.before_link_initial_start` network_backend emulation_node_x emulation_node_y interface_x interface_y connection_info : ConnectionInfo start_activated Returns ------- """ pass def before_link_quality_adjustment(self, connection, link_quality_still_connected, link_quality_dict, network_backend, emulation_node_x, emulation_node_y, interface_x, interface_y, connection_info, **kwargs): """ Called only for connected nodes. There the :py:class:`.LinkQualityModel` and the :py:class:`.NetworkBackend` agreed on a connection. Parameters ---------- connection link_quality_still_connected link_quality_dict network_backend emulation_node_x emulation_node_y interface_x interface_y connection_info : ConnectionInfo kwargs """ pass def after_link_quality_adjustment(self, connection, link_quality_still_connected, link_quality_dict, network_backend, emulation_node_x, emulation_node_y, interface_x, interface_y, connection_info, **kwargs): """ Called only for connected nodes. There the :py:class:`.LinkQualityModel` and the :py:class:`.NetworkBackend` agreed on a connection. Parameters ---------- connection link_quality_still_connected link_quality_dict network_backend emulation_node_x emulation_node_y interface_x interface_y connection_info : ConnectionInfo kwargs Returns ------- """ pass def link_up(self, connection, link_quality_dict, network_backend, emulation_node_x, emulation_node_y, interface_x, interface_y, connection_info, **kwargs): pass def link_down(self, connection, link_quality_dict, network_backend, emulation_node_x, emulation_node_y, interface_x, interface_y, connection_info, **kwargs): pass def connection_across_servers(self, network_backend, emulation_node_x, emulation_node_y, remote_ip): """ Parameters ---------- network_backend emulation_node_x emulation_node_y remote_ip Returns ------- """ pass class Connectioninfo: """ Attributes ---------- is_remote_conn : bool, optional (default is False) is_central : bool, optional (default is False) is_mgmt : bool, optional (default is False) """ def __init__(self, is_remote_conn=False, is_central=False, is_mgmt=False): self.is_remote_conn = is_remote_conn self.is_central = is_central self.is_mgmt = is_mgmt
class UnknownColumn(Exception): pass class HasNoColumns(Exception): pass class TableDoesNotExist(Exception): pass class TableExists(Exception): pass class EmptySet(Exception): pass class AmbiguousColumn(Exception): pass class UnknownOperator(Exception): pass class ColumnMetadataUndefined(Exception): """ When the "column_metadata" field is not implemented by a table """ pass class JoinHasNoOnClause(Exception): """ Example: from a inner join b """ pass
class Unknowncolumn(Exception): pass class Hasnocolumns(Exception): pass class Tabledoesnotexist(Exception): pass class Tableexists(Exception): pass class Emptyset(Exception): pass class Ambiguouscolumn(Exception): pass class Unknownoperator(Exception): pass class Columnmetadataundefined(Exception): """ When the "column_metadata" field is not implemented by a table """ pass class Joinhasnoonclause(Exception): """ Example: from a inner join b """ pass
# Ejercicio 1 # https://leetcode.com/problems/unique-paths-ii/ class Solution: def uniquePathsWithObstacles(self, obstacleGrid): if obstacleGrid[0][0] != 0: return 0 rows, colums = len(obstacleGrid), len(obstacleGrid[0]) dp = [] for i in range(rows): arr = [] for j in range(colums): arr.append(0) dp.append(arr) dp[0][0] = 1 for i in range(rows): for j in range(colums): if obstacleGrid[i][j] != 0 or (i == 0 and j == 0): continue if i != 0: dp[i][j] += dp[i-1][j] if j != 0: dp[i][j] += dp[i][j-1] return dp[rows-1][colums-1]
class Solution: def unique_paths_with_obstacles(self, obstacleGrid): if obstacleGrid[0][0] != 0: return 0 (rows, colums) = (len(obstacleGrid), len(obstacleGrid[0])) dp = [] for i in range(rows): arr = [] for j in range(colums): arr.append(0) dp.append(arr) dp[0][0] = 1 for i in range(rows): for j in range(colums): if obstacleGrid[i][j] != 0 or (i == 0 and j == 0): continue if i != 0: dp[i][j] += dp[i - 1][j] if j != 0: dp[i][j] += dp[i][j - 1] return dp[rows - 1][colums - 1]
num1 =10 num2 = 20 num3 = 30 num4 = 100 num5 = 'orz'
num1 = 10 num2 = 20 num3 = 30 num4 = 100 num5 = 'orz'
latest_block_redis_key = "latest_block_from_chain" latest_block_hash_redis_key = "latest_blockhash_from_chain" most_recent_indexed_block_redis_key = "most_recently_indexed_block_from_db" most_recent_indexed_block_hash_redis_key = "most_recently_indexed_block_hash_from_db" most_recent_indexed_ipld_block_redis_key = "most_recent_indexed_ipld_block_redis_key" most_recent_indexed_ipld_block_hash_redis_key = ( "most_recent_indexed_ipld_block_hash_redis_key" ) most_recent_indexed_aggregate_user_block_redis_key = ( "most_recent_indexed_aggregate_user_block" ) index_aggregate_user_last_refresh_completion_redis_key = ( "index_aggregate_user:last-refresh-completion" ) trending_tracks_last_completion_redis_key = "trending:tracks:last-completion" trending_playlists_last_completion_redis_key = "trending-playlists:last-completion" challenges_last_processed_event_redis_key = "challenges:last-processed-event" user_balances_refresh_last_completion_redis_key = "user_balances:last-completion" latest_legacy_play_db_key = "latest_legacy_play_db_key" index_eth_last_completion_redis_key = "index_eth:last-completion" # Solana latest program keys latest_sol_play_program_tx_key = "latest_sol_program_tx:play:chain" latest_sol_play_db_tx_key = "latest_sol_program_tx:play:db" latest_sol_rewards_manager_program_tx_key = ( "latest_sol_program_tx:rewards_manager:chain" ) latest_sol_rewards_manager_db_tx_key = "latest_sol_program_tx:rewards_manager:db" latest_sol_user_bank_program_tx_key = "latest_sol_program_tx:user_bank:chain" latest_sol_user_bank_db_tx_key = "latest_sol_program_tx:user_bank:db"
latest_block_redis_key = 'latest_block_from_chain' latest_block_hash_redis_key = 'latest_blockhash_from_chain' most_recent_indexed_block_redis_key = 'most_recently_indexed_block_from_db' most_recent_indexed_block_hash_redis_key = 'most_recently_indexed_block_hash_from_db' most_recent_indexed_ipld_block_redis_key = 'most_recent_indexed_ipld_block_redis_key' most_recent_indexed_ipld_block_hash_redis_key = 'most_recent_indexed_ipld_block_hash_redis_key' most_recent_indexed_aggregate_user_block_redis_key = 'most_recent_indexed_aggregate_user_block' index_aggregate_user_last_refresh_completion_redis_key = 'index_aggregate_user:last-refresh-completion' trending_tracks_last_completion_redis_key = 'trending:tracks:last-completion' trending_playlists_last_completion_redis_key = 'trending-playlists:last-completion' challenges_last_processed_event_redis_key = 'challenges:last-processed-event' user_balances_refresh_last_completion_redis_key = 'user_balances:last-completion' latest_legacy_play_db_key = 'latest_legacy_play_db_key' index_eth_last_completion_redis_key = 'index_eth:last-completion' latest_sol_play_program_tx_key = 'latest_sol_program_tx:play:chain' latest_sol_play_db_tx_key = 'latest_sol_program_tx:play:db' latest_sol_rewards_manager_program_tx_key = 'latest_sol_program_tx:rewards_manager:chain' latest_sol_rewards_manager_db_tx_key = 'latest_sol_program_tx:rewards_manager:db' latest_sol_user_bank_program_tx_key = 'latest_sol_program_tx:user_bank:chain' latest_sol_user_bank_db_tx_key = 'latest_sol_program_tx:user_bank:db'
def check_i_islarger2(i, last_i): islarger = i > last_i last_i = i return islarger, last_i def omp_parallel_for_ordered(): sum = 0 is_larger = True last_i = 0 'omp parallel for schedule(static, 1) ordered' for i in range(1,100): ii = i if 'omp ordered': tmp_is_larger, last_i = check_i_islarger2(i, last_i) is_larger = tmp_is_larger and is_larger sum += ii known_sum = (99 * 100) / 2 return known_sum == sum and is_larger
def check_i_islarger2(i, last_i): islarger = i > last_i last_i = i return (islarger, last_i) def omp_parallel_for_ordered(): sum = 0 is_larger = True last_i = 0 'omp parallel for schedule(static, 1) ordered' for i in range(1, 100): ii = i if 'omp ordered': (tmp_is_larger, last_i) = check_i_islarger2(i, last_i) is_larger = tmp_is_larger and is_larger sum += ii known_sum = 99 * 100 / 2 return known_sum == sum and is_larger
# Definition for an interval. class Interval(object): def __init__(self, s=0, e=0): self.start = s self.end = e class Solution(object): def merge(self, intervals): """ :type intervals: List[Interval] :rtype: List[Interval] """ if len(intervals) <= 1: return intervals intervals.sort(key=lambda x: x.start) ind = 1 new_intervals = [intervals[0]] while ind < len(intervals): if new_intervals[-1].end >= intervals[ind].start: new_intervals[-1].end = max([intervals[ind].end, new_intervals[-1].end]) else: new_intervals.append(intervals[ind]) ind += 1 return new_intervals if __name__ == "__main__": lists = [[1,3],[2,6],[8,10],[15,18]] sol = Solution() sol.merge([Interval(s, e) for s, e in lists])
class Interval(object): def __init__(self, s=0, e=0): self.start = s self.end = e class Solution(object): def merge(self, intervals): """ :type intervals: List[Interval] :rtype: List[Interval] """ if len(intervals) <= 1: return intervals intervals.sort(key=lambda x: x.start) ind = 1 new_intervals = [intervals[0]] while ind < len(intervals): if new_intervals[-1].end >= intervals[ind].start: new_intervals[-1].end = max([intervals[ind].end, new_intervals[-1].end]) else: new_intervals.append(intervals[ind]) ind += 1 return new_intervals if __name__ == '__main__': lists = [[1, 3], [2, 6], [8, 10], [15, 18]] sol = solution() sol.merge([interval(s, e) for (s, e) in lists])
sal_info= dict () sal_info={'Austin':911985, 'Dallas': 89999, 'San Jose': 100989, 'Atlanta': 89286,'Portland':101367} #reassigns the salary for Atlanta sal_info['Atlanta']= 92340 print (sal_info) #del sal_info['Atlanta'] #print (sal_info) #print (sal_info['Atlanta']) #del sal_info #sal_info.clear() #print (sal_info) if ('Seattle' not in sal_info): sal_info['Seattle']= 110340 else: print ("key exists") print (sal_info) #if ("Austin" in sal_db): # print (sal_db['Austin']) #if ("Seattle" not in sal_db): # sal_db['Seattle']= 100010 #print (sal_db) #del sal_db['Dallas'] #print (sal_db['Dallas']) #del sal_db #print (sal_db)
sal_info = dict() sal_info = {'Austin': 911985, 'Dallas': 89999, 'San Jose': 100989, 'Atlanta': 89286, 'Portland': 101367} sal_info['Atlanta'] = 92340 print(sal_info) if 'Seattle' not in sal_info: sal_info['Seattle'] = 110340 else: print('key exists') print(sal_info)
#This is an exercise program which is reference to Functions and User input #Initial version """ Multi line comment age = input("Enter your age:") new_age = float(age) + 50 print(new_age) """ def age_foo(age): new_age = float(age) + 50 return new_age age = input("Enter your age: ") print(age_foo(age))
""" Multi line comment age = input("Enter your age:") new_age = float(age) + 50 print(new_age) """ def age_foo(age): new_age = float(age) + 50 return new_age age = input('Enter your age: ') print(age_foo(age))
expected_output = { "instance": { "default": { "vrf": { "blue": { "address_family": { "vpnv4": { "prefixes": { "10.144.0.0/24": { "table_version": "88", "available_path": "4", "best_path": "1", "paths": "4 available, best #1, table blue", "index": { 1: { "next_hop": "10.3.3.3", "gateway": "10.6.6.6", "imported_path_from": "12:23:10.144.0.0/24", "originator": "10.6.6.6", "route_info": "1", "next_hop_igp_metric": "21", "localpref": 200, "metric": 0, "mpls_labels": { "in": "nolabel", "out": "37", }, "origin_codes": "?", "status_codes": "*>", "ext_community": "RT:12:23", "update_group": 6, }, 2: { "next_hop": "10.13.13.13", "gateway": "10.13.13.13", "imported_path_from": "12:23:10.144.0.0/24", "originator": "10.0.0.2", "route_info": "1", "next_hop_via": "green", "localpref": 100, "metric": 0, "origin_codes": "?", "status_codes": "* ", "ext_community": "RT:12:23 ", "recursive_via_connected": True, "update_group": 6, }, 3: { "next_hop": "10.3.3.3", "gateway": "10.7.7.7", "imported_path_from": "12:23:10.144.0.0/24", "originator": "10.7.7.7", "next_hop_igp_metric": "21", "localpref": 200, "metric": 0, "mpls_labels": { "in": "nolabel", "out": "37", }, "origin_codes": "?", "route_info": "1", "status_codes": "* i", "ext_community": "RT:12:23", "update_group": 6, }, 4: { "next_hop": "10.11.11.11", "gateway": "10.11.11.11", "originator": "10.1.0.1", "ext_community": "RT:11:12 ", "recursive_via_connected": True, "update_group": 6, }, }, } } } } } } } } }
expected_output = {'instance': {'default': {'vrf': {'blue': {'address_family': {'vpnv4': {'prefixes': {'10.144.0.0/24': {'table_version': '88', 'available_path': '4', 'best_path': '1', 'paths': '4 available, best #1, table blue', 'index': {1: {'next_hop': '10.3.3.3', 'gateway': '10.6.6.6', 'imported_path_from': '12:23:10.144.0.0/24', 'originator': '10.6.6.6', 'route_info': '1', 'next_hop_igp_metric': '21', 'localpref': 200, 'metric': 0, 'mpls_labels': {'in': 'nolabel', 'out': '37'}, 'origin_codes': '?', 'status_codes': '*>', 'ext_community': 'RT:12:23', 'update_group': 6}, 2: {'next_hop': '10.13.13.13', 'gateway': '10.13.13.13', 'imported_path_from': '12:23:10.144.0.0/24', 'originator': '10.0.0.2', 'route_info': '1', 'next_hop_via': 'green', 'localpref': 100, 'metric': 0, 'origin_codes': '?', 'status_codes': '* ', 'ext_community': 'RT:12:23 ', 'recursive_via_connected': True, 'update_group': 6}, 3: {'next_hop': '10.3.3.3', 'gateway': '10.7.7.7', 'imported_path_from': '12:23:10.144.0.0/24', 'originator': '10.7.7.7', 'next_hop_igp_metric': '21', 'localpref': 200, 'metric': 0, 'mpls_labels': {'in': 'nolabel', 'out': '37'}, 'origin_codes': '?', 'route_info': '1', 'status_codes': '* i', 'ext_community': 'RT:12:23', 'update_group': 6}, 4: {'next_hop': '10.11.11.11', 'gateway': '10.11.11.11', 'originator': '10.1.0.1', 'ext_community': 'RT:11:12 ', 'recursive_via_connected': True, 'update_group': 6}}}}}}}}}}}
def example_1(): l = [1, 2, 3] print(l * 5) print(5 * 'abcd') def example_2(): board = [['_'] * 3 for i in range(3)] print(board) board[1][2] = 'X' print(board) def example_3(): weird_board = [['_'] * 3] * 3 print(weird_board) weird_board[1][2] = '0' print(weird_board) def example_4(): row = ['_'] * 3 board = [] for i in range(3): board.append(row) print(board) board[2][0] = 'X' print(board) board = [] for i in range(3): row = ['_'] * 3 board.append(row) print(board) board[2][0] = 'X' print(board) if __name__ == '__main__': example_1() example_2() example_3() example_4()
def example_1(): l = [1, 2, 3] print(l * 5) print(5 * 'abcd') def example_2(): board = [['_'] * 3 for i in range(3)] print(board) board[1][2] = 'X' print(board) def example_3(): weird_board = [['_'] * 3] * 3 print(weird_board) weird_board[1][2] = '0' print(weird_board) def example_4(): row = ['_'] * 3 board = [] for i in range(3): board.append(row) print(board) board[2][0] = 'X' print(board) board = [] for i in range(3): row = ['_'] * 3 board.append(row) print(board) board[2][0] = 'X' print(board) if __name__ == '__main__': example_1() example_2() example_3() example_4()
t = int(input()) for _ in range(t): s = input() a = list(s).count('A') b = list(s).count('B'); #print(a, b) i = 1 while(i < len(s) - 1): if s[i] == '.': start = i j = i + 1 for j in range(len(s) - 1): if s[j] != '.': end = j - 1 break i = j else: i += 1 print(j, start, end) if s[start - 1] == 'A' and s[end + 1] == 'A': a += end - start + 1 continue if s[start - 1] == 'B' and s[end + 1] == 'B': b += end - start + 1 print(a, b)
t = int(input()) for _ in range(t): s = input() a = list(s).count('A') b = list(s).count('B') i = 1 while i < len(s) - 1: if s[i] == '.': start = i j = i + 1 for j in range(len(s) - 1): if s[j] != '.': end = j - 1 break i = j else: i += 1 print(j, start, end) if s[start - 1] == 'A' and s[end + 1] == 'A': a += end - start + 1 continue if s[start - 1] == 'B' and s[end + 1] == 'B': b += end - start + 1 print(a, b)
# page on website that serves as base for archives. BASE_URL = \ 'https://awebsite.org/index' # only extract the links that have these strings # as part of the urls. BASE_MUST_CONTAIN = '/someword/' PAGE_MUST_CONTAIN = '/someword/' # urls to use for testing. TEST_SINGLE_PAGE_URL = \ 'https://awebsite.org/2019' TEST_SINGLE_PAGE_CONTENT = \ 'https://awebsite.org/2019/experience1' TEST_SINGLE_PAGE_CONTAINS = '/someword/' # AWS resource settings. AWS_REGION = 'your-region' DB_TABLE = 'your-dynamodb-table'
base_url = 'https://awebsite.org/index' base_must_contain = '/someword/' page_must_contain = '/someword/' test_single_page_url = 'https://awebsite.org/2019' test_single_page_content = 'https://awebsite.org/2019/experience1' test_single_page_contains = '/someword/' aws_region = 'your-region' db_table = 'your-dynamodb-table'
def solve(polymer, rules,values,steps = 10): for step in range(steps): new_polymer={} # Efficiently this can be done with the idea that we do not need to keep the order, # we just need to represent that if NN -> C then #NN == #NC and #NN == #CN for key in polymer: value = polymer[key] try: values[rules[key]] += value except: values[rules[key]] = value try: new_polymer [key[0] + rules[key]] += value except: new_polymer [key[0] + rules[key]] = value try: new_polymer [rules[key] + key[1]] += value except: new_polymer [rules[key] + key[1]] = value polymer = new_polymer return max(values.values()) - min(values.values()) polymer = "SCSCSKKVVBKVFKSCCSOV" values = {key: polymer.count(key) for key in set(polymer) } polymer_dict = {} for i in range(len(polymer)-1): try: polymer_dict[polymer[i:i+2]] += 1 except: polymer_dict[polymer[i:i+2]] = 1 rules = {} with open("Day14_input.txt",'r') as inputfile: for line in inputfile: line = line.split(" -> ") rules[line[0]] = line[1] [:-1] part1_sol = solve(polymer_dict,rules,values) print("Part 1 solution: ", part1_sol) polymer = "SCSCSKKVVBKVFKSCCSOV" values = {key: polymer.count(key) for key in set(polymer) } polymer_dict = {} for i in range(len(polymer)-1): try: polymer_dict[polymer[i:i+2]] += 1 except: polymer_dict[polymer[i:i+2]] = 1 part2_sol = solve(polymer_dict,rules,values,steps=40) print("Part 2 solution: ", part2_sol)
def solve(polymer, rules, values, steps=10): for step in range(steps): new_polymer = {} for key in polymer: value = polymer[key] try: values[rules[key]] += value except: values[rules[key]] = value try: new_polymer[key[0] + rules[key]] += value except: new_polymer[key[0] + rules[key]] = value try: new_polymer[rules[key] + key[1]] += value except: new_polymer[rules[key] + key[1]] = value polymer = new_polymer return max(values.values()) - min(values.values()) polymer = 'SCSCSKKVVBKVFKSCCSOV' values = {key: polymer.count(key) for key in set(polymer)} polymer_dict = {} for i in range(len(polymer) - 1): try: polymer_dict[polymer[i:i + 2]] += 1 except: polymer_dict[polymer[i:i + 2]] = 1 rules = {} with open('Day14_input.txt', 'r') as inputfile: for line in inputfile: line = line.split(' -> ') rules[line[0]] = line[1][:-1] part1_sol = solve(polymer_dict, rules, values) print('Part 1 solution: ', part1_sol) polymer = 'SCSCSKKVVBKVFKSCCSOV' values = {key: polymer.count(key) for key in set(polymer)} polymer_dict = {} for i in range(len(polymer) - 1): try: polymer_dict[polymer[i:i + 2]] += 1 except: polymer_dict[polymer[i:i + 2]] = 1 part2_sol = solve(polymer_dict, rules, values, steps=40) print('Part 2 solution: ', part2_sol)
sexo = str(input('digite seu sexo [M/F]: ')).strip().upper() while sexo not in 'MF': sexo = str(input('\033[31mdados invalidos.\n\033[mpor favor, informe seu sexo [M/F]: ')).strip().upper() print(f'sexo {sexo[0]} registrado com sucesso')
sexo = str(input('digite seu sexo [M/F]: ')).strip().upper() while sexo not in 'MF': sexo = str(input('\x1b[31mdados invalidos.\n\x1b[mpor favor, informe seu sexo [M/F]: ')).strip().upper() print(f'sexo {sexo[0]} registrado com sucesso')
'''Given a collection of distinct integers, return all possible permutations. Example: Input: [1, 2, 3] Output: [ [1, 2, 3], [1, 3, 2], [2, 1, 3], [2, 3, 1], [3, 1, 2], [3, 2, 1] ] ''' class Solution(object): def permute(self, nums): """ :type nums: List[int] :rtype: List[List[int]] """
"""Given a collection of distinct integers, return all possible permutations. Example: Input: [1, 2, 3] Output: [ [1, 2, 3], [1, 3, 2], [2, 1, 3], [2, 3, 1], [3, 1, 2], [3, 2, 1] ] """ class Solution(object): def permute(self, nums): """ :type nums: List[int] :rtype: List[List[int]] """
WOQL_AND_JSON = { "@type": "woql:And", "woql:query_list": [ { "@type": "woql:QueryListElement", "woql:index": {"@type": "xsd:nonNegativeInteger", "@value": 0}, "woql:query": { "@type": "woql:Triple", "woql:subject": {"@type": "woql:Node", "woql:node": "doc:a"}, "woql:predicate": {"@type": "woql:Node", "woql:node": "scm:b"}, "woql:object": { "@type": "woql:Datatype", "woql:datatype": {"@type": "xsd:string", "@value": "c"}, }, }, }, { "@type": "woql:QueryListElement", "woql:index": {"@type": "xsd:nonNegativeInteger", "@value": 1}, "woql:query": { "@type": "woql:Triple", "woql:subject": {"@type": "woql:Node", "woql:node": "doc:1"}, "woql:predicate": {"@type": "woql:Node", "woql:node": "scm:2"}, "woql:object": { "@type": "woql:Datatype", "woql:datatype": {"@type": "xsd:string", "@value": "3"}, }, }, }, ], }
woql_and_json = {'@type': 'woql:And', 'woql:query_list': [{'@type': 'woql:QueryListElement', 'woql:index': {'@type': 'xsd:nonNegativeInteger', '@value': 0}, 'woql:query': {'@type': 'woql:Triple', 'woql:subject': {'@type': 'woql:Node', 'woql:node': 'doc:a'}, 'woql:predicate': {'@type': 'woql:Node', 'woql:node': 'scm:b'}, 'woql:object': {'@type': 'woql:Datatype', 'woql:datatype': {'@type': 'xsd:string', '@value': 'c'}}}}, {'@type': 'woql:QueryListElement', 'woql:index': {'@type': 'xsd:nonNegativeInteger', '@value': 1}, 'woql:query': {'@type': 'woql:Triple', 'woql:subject': {'@type': 'woql:Node', 'woql:node': 'doc:1'}, 'woql:predicate': {'@type': 'woql:Node', 'woql:node': 'scm:2'}, 'woql:object': {'@type': 'woql:Datatype', 'woql:datatype': {'@type': 'xsd:string', '@value': '3'}}}}]}
#Switch database SPINE_SWITCH_TYPE = ["NX3164Q","NX9332PQ", "NX3132Q", "NX9504", "NX9508", "NX9516"] LEAF_SWITCH_TYPE = ["NX9372PX", "NX9372TX", "NX9396PX", "NX9396TX", "NX93120TX", "NX93128TX", "NX3172Q"] #Leaf Switch Interfaces SWITCH_HOST_IF_MAP = {"NX9372PX":["e1/1", "e1/2", "e1/3", "e1/4", "e1/5","e1/6", "e1/7", "e1/8", "e1/9", "e1/10","e1/11", "e1/12", "e1/13", "e1/14", "e1/15","e1/16", "e1/17", "e1/18", "e1/19", "e1/20","e1/21", "e1/22", "e1/23", "e1/24", "e1/25","e1/26", "e1/27", "e1/28", "e1/29", "e1/30", "e1/31", "e1/32", "e1/33", "e1/34", "e1/35","e1/36", "e1/37", "e1/38", "e1/39", "e1/40","e1/41", "e1/42", "e1/43", "e1/44", "e1/45","e1/46", "e1/47", "e1/48"],\ "NX9372TX":["e1/1", "e1/2", "e1/3", "e1/4", "e1/5","e1/6", "e1/7", "e1/8", "e1/9", "e1/10","e1/11", "e1/12", "e1/13", "e1/14", "e1/15","e1/16", "e1/17", "e1/18", "e1/19", "e1/20","e1/21", "e1/22", "e1/23", "e1/24", "e1/25","e1/26", "e1/27", "e1/28", "e1/29", "e1/30", "e1/31", "e1/32", "e1/33", "e1/34", "e1/35","e1/36", "e1/37", "e1/38", "e1/39", "e1/40","e1/41", "e1/42", "e1/43", "e1/44", "e1/45","e1/46", "e1/47", "e1/48"],\ "NX9396PX":["e1/1", "e1/2", "e1/3", "e1/4", "e1/5","e1/6", "e1/7", "e1/8", "e1/9", "e1/10","e1/11", "e1/12", "e1/13", "e1/14", "e1/15","e1/16", "e1/17", "e1/18", "e1/19", "e1/20","e1/21", "e1/22", "e1/23", "e1/24", "e1/25","e1/26", "e1/27", "e1/28", "e1/29", "e1/30", "e1/31", "e1/32", "e1/33", "e1/34", "e1/35","e1/36", "e1/37", "e1/38", "e1/39", "e1/40","e1/41", "e1/42", "e1/43", "e1/44", "e1/45","e1/46", "e1/47", "e1/48"],\ "NX9396TX":["e1/1", "e1/2", "e1/3", "e1/4", "e1/5","e1/6", "e1/7", "e1/8", "e1/9", "e1/10","e1/11", "e1/12", "e1/13", "e1/14", "e1/15","e1/16", "e1/17", "e1/18", "e1/19", "e1/20","e1/21", "e1/22", "e1/23", "e1/24", "e1/25","e1/26", "e1/27", "e1/28", "e1/29", "e1/30", "e1/31", "e1/32", "e1/33", "e1/34", "e1/35","e1/36", "e1/37", "e1/38", "e1/39", "e1/40","e1/41", "e1/42", "e1/43", "e1/44", "e1/45","e1/46", "e1/47", "e1/48"],\ "NX93120PX":["e1/1", "e1/2", "e1/3", "e1/4", "e1/5","e1/6", "e1/7", "e1/8", "e1/9", "e1/10","e1/11", "e1/12", "e1/13", "e1/14", "e1/15","e1/16", "e1/17", "e1/18", "e1/19", "e1/20","e1/21", "e1/22", "e1/23", "e1/24", "e1/25","e1/26", "e1/27", "e1/28", "e1/29", "e1/30", "e1/31", "e1/32", "e1/33", "e1/34", "e1/35","e1/36", "e1/37", "e1/38", "e1/39", "e1/40","e1/41", "e1/42", "e1/43", "e1/44", "e1/45","e1/46", "e1/47", "e1/48"],\ "NX93128TX":["e1/1", "e1/2", "e1/3", "e1/4", "e1/5","e1/6", "e1/7", "e1/8", "e1/9", "e1/10","e1/11", "e1/12", "e1/13", "e1/14", "e1/15","e1/16", "e1/17", "e1/18", "e1/19", "e1/20","e1/21", "e1/22", "e1/23", "e1/24", "e1/25","e1/26", "e1/27", "e1/28", "e1/29", "e1/30", "e1/31", "e1/32", "e1/33", "e1/34", "e1/35","e1/36", "e1/37", "e1/38", "e1/39", "e1/40","e1/41", "e1/42", "e1/43", "e1/44", "e1/45","e1/46", "e1/47", "e1/48"],\ "NX3172Q":["e1/1", "e1/2", "e1/3", "e1/4", "e1/5","e1/6", "e1/7", "e1/8", "e1/9", "e1/10","e1/11", "e1/12", "e1/13", "e1/14", "e1/15","e1/16", "e1/17", "e1/18", "e1/19", "e1/20","e1/21", "e1/22", "e1/23", "e1/24", "e1/25","e1/26", "e1/27", "e1/28", "e1/29", "e1/30", "e1/31", "e1/32", "e1/33", "e1/34", "e1/35","e1/36", "e1/37", "e1/38", "e1/39", "e1/40","e1/41", "e1/42", "e1/43", "e1/44", "e1/45","e1/46", "e1/47", "e1/48"],\ "NX3164Q":["e1/1", "e1/2", "e1/3", "e1/4", "e1/5","e1/6", "e1/7", "e1/8", "e1/9", "e1/10","e1/11", "e1/12", "e1/13", "e1/14", "e1/15","e1/16", "e1/17", "e1/18", "e1/19", "e1/20","e1/21", "e1/22", "e1/23", "e1/24", "e1/25","e1/26", "e1/27", "e1/28", "e1/29", "e1/30", "e1/31", "e1/32"],\ "NX9332PQ":["e1/1", "e1/2", "e1/3", "e1/4", "e1/5","e1/6", "e1/7", "e1/8", "e1/9", "e1/10","e1/11", "e1/12", "e1/13", "e1/14", "e1/15","e1/16", "e1/17", "e1/18", "e1/19", "e1/20","e1/21", "e1/22", "e1/23", "e1/24", "e1/25","e1/26", "e1/27", "e1/28", "e1/29", "e1/30", "e1/31", "e1/32"],\ "NX3132Q":["e1/1", "e1/2", "e1/3", "e1/4", "e1/5","e1/6", "e1/7", "e1/8", "e1/9", "e1/10","e1/11", "e1/12", "e1/13", "e1/14", "e1/15","e1/16", "e1/17", "e1/18", "e1/19", "e1/20","e1/21", "e1/22", "e1/23", "e1/24", "e1/25","e1/26", "e1/27", "e1/28", "e1/29", "e1/30", "e1/31", "e1/32"],\ "NX9504":["e1/1", "e1/2", "e1/3", "e1/4", "e1/5","e1/6", "e1/7", "e1/8", "e1/9", "e1/10","e1/11", "e1/12", "e1/13", "e1/14", "e1/15","e1/16", "e1/17", "e1/18", "e1/19", "e1/20","e1/21", "e1/22", "e1/23", "e1/24", "e1/25","e1/26", "e1/27", "e1/28", "e1/29", "e1/30", "e1/31", "e1/32"],\ "NX9508":["e1/1", "e1/2", "e1/3", "e1/4", "e1/5","e1/6", "e1/7", "e1/8", "e1/9", "e1/10","e1/11", "e1/12", "e1/13", "e1/14", "e1/15","e1/16", "e1/17", "e1/18", "e1/19", "e1/20","e1/21", "e1/22", "e1/23", "e1/24", "e1/25","e1/26", "e1/27", "e1/28", "e1/29", "e1/30", "e1/31", "e1/32"],\ "NX9516":["e1/1", "e1/2", "e1/3", "e1/4", "e1/5","e1/6", "e1/7", "e1/8", "e1/9", "e1/10","e1/11", "e1/12", "e1/13", "e1/14", "e1/15","e1/16", "e1/17", "e1/18", "e1/19", "e1/20","e1/21", "e1/22", "e1/23", "e1/24", "e1/25","e1/26", "e1/27", "e1/28", "e1/29", "e1/30", "e1/31", "e1/32"]} #Link Types TOPOLOGY_LINK_TYPES = ['Linkset-[1-9]+Link','VPC-[1-9]+Link']
spine_switch_type = ['NX3164Q', 'NX9332PQ', 'NX3132Q', 'NX9504', 'NX9508', 'NX9516'] leaf_switch_type = ['NX9372PX', 'NX9372TX', 'NX9396PX', 'NX9396TX', 'NX93120TX', 'NX93128TX', 'NX3172Q'] switch_host_if_map = {'NX9372PX': ['e1/1', 'e1/2', 'e1/3', 'e1/4', 'e1/5', 'e1/6', 'e1/7', 'e1/8', 'e1/9', 'e1/10', 'e1/11', 'e1/12', 'e1/13', 'e1/14', 'e1/15', 'e1/16', 'e1/17', 'e1/18', 'e1/19', 'e1/20', 'e1/21', 'e1/22', 'e1/23', 'e1/24', 'e1/25', 'e1/26', 'e1/27', 'e1/28', 'e1/29', 'e1/30', 'e1/31', 'e1/32', 'e1/33', 'e1/34', 'e1/35', 'e1/36', 'e1/37', 'e1/38', 'e1/39', 'e1/40', 'e1/41', 'e1/42', 'e1/43', 'e1/44', 'e1/45', 'e1/46', 'e1/47', 'e1/48'], 'NX9372TX': ['e1/1', 'e1/2', 'e1/3', 'e1/4', 'e1/5', 'e1/6', 'e1/7', 'e1/8', 'e1/9', 'e1/10', 'e1/11', 'e1/12', 'e1/13', 'e1/14', 'e1/15', 'e1/16', 'e1/17', 'e1/18', 'e1/19', 'e1/20', 'e1/21', 'e1/22', 'e1/23', 'e1/24', 'e1/25', 'e1/26', 'e1/27', 'e1/28', 'e1/29', 'e1/30', 'e1/31', 'e1/32', 'e1/33', 'e1/34', 'e1/35', 'e1/36', 'e1/37', 'e1/38', 'e1/39', 'e1/40', 'e1/41', 'e1/42', 'e1/43', 'e1/44', 'e1/45', 'e1/46', 'e1/47', 'e1/48'], 'NX9396PX': ['e1/1', 'e1/2', 'e1/3', 'e1/4', 'e1/5', 'e1/6', 'e1/7', 'e1/8', 'e1/9', 'e1/10', 'e1/11', 'e1/12', 'e1/13', 'e1/14', 'e1/15', 'e1/16', 'e1/17', 'e1/18', 'e1/19', 'e1/20', 'e1/21', 'e1/22', 'e1/23', 'e1/24', 'e1/25', 'e1/26', 'e1/27', 'e1/28', 'e1/29', 'e1/30', 'e1/31', 'e1/32', 'e1/33', 'e1/34', 'e1/35', 'e1/36', 'e1/37', 'e1/38', 'e1/39', 'e1/40', 'e1/41', 'e1/42', 'e1/43', 'e1/44', 'e1/45', 'e1/46', 'e1/47', 'e1/48'], 'NX9396TX': ['e1/1', 'e1/2', 'e1/3', 'e1/4', 'e1/5', 'e1/6', 'e1/7', 'e1/8', 'e1/9', 'e1/10', 'e1/11', 'e1/12', 'e1/13', 'e1/14', 'e1/15', 'e1/16', 'e1/17', 'e1/18', 'e1/19', 'e1/20', 'e1/21', 'e1/22', 'e1/23', 'e1/24', 'e1/25', 'e1/26', 'e1/27', 'e1/28', 'e1/29', 'e1/30', 'e1/31', 'e1/32', 'e1/33', 'e1/34', 'e1/35', 'e1/36', 'e1/37', 'e1/38', 'e1/39', 'e1/40', 'e1/41', 'e1/42', 'e1/43', 'e1/44', 'e1/45', 'e1/46', 'e1/47', 'e1/48'], 'NX93120PX': ['e1/1', 'e1/2', 'e1/3', 'e1/4', 'e1/5', 'e1/6', 'e1/7', 'e1/8', 'e1/9', 'e1/10', 'e1/11', 'e1/12', 'e1/13', 'e1/14', 'e1/15', 'e1/16', 'e1/17', 'e1/18', 'e1/19', 'e1/20', 'e1/21', 'e1/22', 'e1/23', 'e1/24', 'e1/25', 'e1/26', 'e1/27', 'e1/28', 'e1/29', 'e1/30', 'e1/31', 'e1/32', 'e1/33', 'e1/34', 'e1/35', 'e1/36', 'e1/37', 'e1/38', 'e1/39', 'e1/40', 'e1/41', 'e1/42', 'e1/43', 'e1/44', 'e1/45', 'e1/46', 'e1/47', 'e1/48'], 'NX93128TX': ['e1/1', 'e1/2', 'e1/3', 'e1/4', 'e1/5', 'e1/6', 'e1/7', 'e1/8', 'e1/9', 'e1/10', 'e1/11', 'e1/12', 'e1/13', 'e1/14', 'e1/15', 'e1/16', 'e1/17', 'e1/18', 'e1/19', 'e1/20', 'e1/21', 'e1/22', 'e1/23', 'e1/24', 'e1/25', 'e1/26', 'e1/27', 'e1/28', 'e1/29', 'e1/30', 'e1/31', 'e1/32', 'e1/33', 'e1/34', 'e1/35', 'e1/36', 'e1/37', 'e1/38', 'e1/39', 'e1/40', 'e1/41', 'e1/42', 'e1/43', 'e1/44', 'e1/45', 'e1/46', 'e1/47', 'e1/48'], 'NX3172Q': ['e1/1', 'e1/2', 'e1/3', 'e1/4', 'e1/5', 'e1/6', 'e1/7', 'e1/8', 'e1/9', 'e1/10', 'e1/11', 'e1/12', 'e1/13', 'e1/14', 'e1/15', 'e1/16', 'e1/17', 'e1/18', 'e1/19', 'e1/20', 'e1/21', 'e1/22', 'e1/23', 'e1/24', 'e1/25', 'e1/26', 'e1/27', 'e1/28', 'e1/29', 'e1/30', 'e1/31', 'e1/32', 'e1/33', 'e1/34', 'e1/35', 'e1/36', 'e1/37', 'e1/38', 'e1/39', 'e1/40', 'e1/41', 'e1/42', 'e1/43', 'e1/44', 'e1/45', 'e1/46', 'e1/47', 'e1/48'], 'NX3164Q': ['e1/1', 'e1/2', 'e1/3', 'e1/4', 'e1/5', 'e1/6', 'e1/7', 'e1/8', 'e1/9', 'e1/10', 'e1/11', 'e1/12', 'e1/13', 'e1/14', 'e1/15', 'e1/16', 'e1/17', 'e1/18', 'e1/19', 'e1/20', 'e1/21', 'e1/22', 'e1/23', 'e1/24', 'e1/25', 'e1/26', 'e1/27', 'e1/28', 'e1/29', 'e1/30', 'e1/31', 'e1/32'], 'NX9332PQ': ['e1/1', 'e1/2', 'e1/3', 'e1/4', 'e1/5', 'e1/6', 'e1/7', 'e1/8', 'e1/9', 'e1/10', 'e1/11', 'e1/12', 'e1/13', 'e1/14', 'e1/15', 'e1/16', 'e1/17', 'e1/18', 'e1/19', 'e1/20', 'e1/21', 'e1/22', 'e1/23', 'e1/24', 'e1/25', 'e1/26', 'e1/27', 'e1/28', 'e1/29', 'e1/30', 'e1/31', 'e1/32'], 'NX3132Q': ['e1/1', 'e1/2', 'e1/3', 'e1/4', 'e1/5', 'e1/6', 'e1/7', 'e1/8', 'e1/9', 'e1/10', 'e1/11', 'e1/12', 'e1/13', 'e1/14', 'e1/15', 'e1/16', 'e1/17', 'e1/18', 'e1/19', 'e1/20', 'e1/21', 'e1/22', 'e1/23', 'e1/24', 'e1/25', 'e1/26', 'e1/27', 'e1/28', 'e1/29', 'e1/30', 'e1/31', 'e1/32'], 'NX9504': ['e1/1', 'e1/2', 'e1/3', 'e1/4', 'e1/5', 'e1/6', 'e1/7', 'e1/8', 'e1/9', 'e1/10', 'e1/11', 'e1/12', 'e1/13', 'e1/14', 'e1/15', 'e1/16', 'e1/17', 'e1/18', 'e1/19', 'e1/20', 'e1/21', 'e1/22', 'e1/23', 'e1/24', 'e1/25', 'e1/26', 'e1/27', 'e1/28', 'e1/29', 'e1/30', 'e1/31', 'e1/32'], 'NX9508': ['e1/1', 'e1/2', 'e1/3', 'e1/4', 'e1/5', 'e1/6', 'e1/7', 'e1/8', 'e1/9', 'e1/10', 'e1/11', 'e1/12', 'e1/13', 'e1/14', 'e1/15', 'e1/16', 'e1/17', 'e1/18', 'e1/19', 'e1/20', 'e1/21', 'e1/22', 'e1/23', 'e1/24', 'e1/25', 'e1/26', 'e1/27', 'e1/28', 'e1/29', 'e1/30', 'e1/31', 'e1/32'], 'NX9516': ['e1/1', 'e1/2', 'e1/3', 'e1/4', 'e1/5', 'e1/6', 'e1/7', 'e1/8', 'e1/9', 'e1/10', 'e1/11', 'e1/12', 'e1/13', 'e1/14', 'e1/15', 'e1/16', 'e1/17', 'e1/18', 'e1/19', 'e1/20', 'e1/21', 'e1/22', 'e1/23', 'e1/24', 'e1/25', 'e1/26', 'e1/27', 'e1/28', 'e1/29', 'e1/30', 'e1/31', 'e1/32']} topology_link_types = ['Linkset-[1-9]+Link', 'VPC-[1-9]+Link']
# Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. class Secret: """Defines Kubernetes Secret Volume""" def __init__(self, deploy_type, deploy_target, secret, key): """Initialize a Kubernetes Secret Object. Used to track requested secrets from the user. :param deploy_type: The type of secret deploy in Kubernetes, either `env` or `volume` :type deploy_type: ``str`` :param deploy_target: The environment variable to be created in the worker. :type deploy_target: ``str`` :param secret: Name of the secrets object in Kubernetes :type secret: ``str`` :param key: Key of the secret within the Kubernetes Secret :type key: ``str`` """ self.deploy_type = deploy_type self.deploy_target = deploy_target.upper() self.secret = secret self.key = key
class Secret: """Defines Kubernetes Secret Volume""" def __init__(self, deploy_type, deploy_target, secret, key): """Initialize a Kubernetes Secret Object. Used to track requested secrets from the user. :param deploy_type: The type of secret deploy in Kubernetes, either `env` or `volume` :type deploy_type: ``str`` :param deploy_target: The environment variable to be created in the worker. :type deploy_target: ``str`` :param secret: Name of the secrets object in Kubernetes :type secret: ``str`` :param key: Key of the secret within the Kubernetes Secret :type key: ``str`` """ self.deploy_type = deploy_type self.deploy_target = deploy_target.upper() self.secret = secret self.key = key
class Solution: def XXX(self, nums): ret = [] path = [] def dfs(li): if len(li) == len(path): ret.append(path[:]) for i in li: if i not in path: path.append(i) dfs(li) path.remove(i) dfs(nums) return ret
class Solution: def xxx(self, nums): ret = [] path = [] def dfs(li): if len(li) == len(path): ret.append(path[:]) for i in li: if i not in path: path.append(i) dfs(li) path.remove(i) dfs(nums) return ret
# Assignment 1 def assignment_1(): for counter in range (1,11): print (counter) # Assignment 2 def assignment_2(): goOn = True number = 0 half = 0 while(goOn): print ("Please type a whole number and if you want to end please type 0:") number = input() # input() method automatically assigns string in variable print(type(number)) if (number == '0'): goOn = False else: half = number//2 print ("Number is ", number, "half is", half) def main(): assignment_1() assignment_2() print("End of assignments") if __name__ == '__main__': main()
def assignment_1(): for counter in range(1, 11): print(counter) def assignment_2(): go_on = True number = 0 half = 0 while goOn: print('Please type a whole number and if you want to end please type 0:') number = input() print(type(number)) if number == '0': go_on = False else: half = number // 2 print('Number is ', number, 'half is', half) def main(): assignment_1() assignment_2() print('End of assignments') if __name__ == '__main__': main()
f = open("mbox.txt") cnt = 0 for line in f.readlines(): if line.startswith('From'): print(line.split()[1]) cnt += 1 print ("there was {} people.".format(cnt))
f = open('mbox.txt') cnt = 0 for line in f.readlines(): if line.startswith('From'): print(line.split()[1]) cnt += 1 print('there was {} people.'.format(cnt))
# generated from genmsg/cmake/pkg-genmsg.context.in messages_str = "/xavier_ssd/TrekBot/TrekBot2_WS/src/geographic_info/geographic_msgs/msg/BoundingBox.msg;/xavier_ssd/TrekBot/TrekBot2_WS/src/geographic_info/geographic_msgs/msg/GeographicMapChanges.msg;/xavier_ssd/TrekBot/TrekBot2_WS/src/geographic_info/geographic_msgs/msg/GeographicMap.msg;/xavier_ssd/TrekBot/TrekBot2_WS/src/geographic_info/geographic_msgs/msg/GeoPath.msg;/xavier_ssd/TrekBot/TrekBot2_WS/src/geographic_info/geographic_msgs/msg/GeoPoint.msg;/xavier_ssd/TrekBot/TrekBot2_WS/src/geographic_info/geographic_msgs/msg/GeoPointStamped.msg;/xavier_ssd/TrekBot/TrekBot2_WS/src/geographic_info/geographic_msgs/msg/GeoPose.msg;/xavier_ssd/TrekBot/TrekBot2_WS/src/geographic_info/geographic_msgs/msg/GeoPoseStamped.msg;/xavier_ssd/TrekBot/TrekBot2_WS/src/geographic_info/geographic_msgs/msg/KeyValue.msg;/xavier_ssd/TrekBot/TrekBot2_WS/src/geographic_info/geographic_msgs/msg/MapFeature.msg;/xavier_ssd/TrekBot/TrekBot2_WS/src/geographic_info/geographic_msgs/msg/RouteNetwork.msg;/xavier_ssd/TrekBot/TrekBot2_WS/src/geographic_info/geographic_msgs/msg/RoutePath.msg;/xavier_ssd/TrekBot/TrekBot2_WS/src/geographic_info/geographic_msgs/msg/RouteSegment.msg;/xavier_ssd/TrekBot/TrekBot2_WS/src/geographic_info/geographic_msgs/msg/WayPoint.msg" services_str = "/xavier_ssd/TrekBot/TrekBot2_WS/src/geographic_info/geographic_msgs/srv/GetGeographicMap.srv;/xavier_ssd/TrekBot/TrekBot2_WS/src/geographic_info/geographic_msgs/srv/GetGeoPath.srv;/xavier_ssd/TrekBot/TrekBot2_WS/src/geographic_info/geographic_msgs/srv/GetRoutePlan.srv;/xavier_ssd/TrekBot/TrekBot2_WS/src/geographic_info/geographic_msgs/srv/UpdateGeographicMap.srv" pkg_name = "geographic_msgs" dependencies_str = "geometry_msgs;std_msgs;uuid_msgs" langs = "gencpp;geneus;genlisp;gennodejs;genpy" dep_include_paths_str = "geographic_msgs;/xavier_ssd/TrekBot/TrekBot2_WS/src/geographic_info/geographic_msgs/msg;geometry_msgs;/opt/ros/melodic/share/geometry_msgs/cmake/../msg;std_msgs;/opt/ros/melodic/share/std_msgs/cmake/../msg;uuid_msgs;/xavier_ssd/TrekBot/TrekBot2_WS/src/unique_identifier/uuid_msgs/msg" PYTHON_EXECUTABLE = "/usr/bin/python2" package_has_static_sources = '' == 'TRUE' genmsg_check_deps_script = "/opt/ros/melodic/share/genmsg/cmake/../../../lib/genmsg/genmsg_check_deps.py"
messages_str = '/xavier_ssd/TrekBot/TrekBot2_WS/src/geographic_info/geographic_msgs/msg/BoundingBox.msg;/xavier_ssd/TrekBot/TrekBot2_WS/src/geographic_info/geographic_msgs/msg/GeographicMapChanges.msg;/xavier_ssd/TrekBot/TrekBot2_WS/src/geographic_info/geographic_msgs/msg/GeographicMap.msg;/xavier_ssd/TrekBot/TrekBot2_WS/src/geographic_info/geographic_msgs/msg/GeoPath.msg;/xavier_ssd/TrekBot/TrekBot2_WS/src/geographic_info/geographic_msgs/msg/GeoPoint.msg;/xavier_ssd/TrekBot/TrekBot2_WS/src/geographic_info/geographic_msgs/msg/GeoPointStamped.msg;/xavier_ssd/TrekBot/TrekBot2_WS/src/geographic_info/geographic_msgs/msg/GeoPose.msg;/xavier_ssd/TrekBot/TrekBot2_WS/src/geographic_info/geographic_msgs/msg/GeoPoseStamped.msg;/xavier_ssd/TrekBot/TrekBot2_WS/src/geographic_info/geographic_msgs/msg/KeyValue.msg;/xavier_ssd/TrekBot/TrekBot2_WS/src/geographic_info/geographic_msgs/msg/MapFeature.msg;/xavier_ssd/TrekBot/TrekBot2_WS/src/geographic_info/geographic_msgs/msg/RouteNetwork.msg;/xavier_ssd/TrekBot/TrekBot2_WS/src/geographic_info/geographic_msgs/msg/RoutePath.msg;/xavier_ssd/TrekBot/TrekBot2_WS/src/geographic_info/geographic_msgs/msg/RouteSegment.msg;/xavier_ssd/TrekBot/TrekBot2_WS/src/geographic_info/geographic_msgs/msg/WayPoint.msg' services_str = '/xavier_ssd/TrekBot/TrekBot2_WS/src/geographic_info/geographic_msgs/srv/GetGeographicMap.srv;/xavier_ssd/TrekBot/TrekBot2_WS/src/geographic_info/geographic_msgs/srv/GetGeoPath.srv;/xavier_ssd/TrekBot/TrekBot2_WS/src/geographic_info/geographic_msgs/srv/GetRoutePlan.srv;/xavier_ssd/TrekBot/TrekBot2_WS/src/geographic_info/geographic_msgs/srv/UpdateGeographicMap.srv' pkg_name = 'geographic_msgs' dependencies_str = 'geometry_msgs;std_msgs;uuid_msgs' langs = 'gencpp;geneus;genlisp;gennodejs;genpy' dep_include_paths_str = 'geographic_msgs;/xavier_ssd/TrekBot/TrekBot2_WS/src/geographic_info/geographic_msgs/msg;geometry_msgs;/opt/ros/melodic/share/geometry_msgs/cmake/../msg;std_msgs;/opt/ros/melodic/share/std_msgs/cmake/../msg;uuid_msgs;/xavier_ssd/TrekBot/TrekBot2_WS/src/unique_identifier/uuid_msgs/msg' python_executable = '/usr/bin/python2' package_has_static_sources = '' == 'TRUE' genmsg_check_deps_script = '/opt/ros/melodic/share/genmsg/cmake/../../../lib/genmsg/genmsg_check_deps.py'