content
stringlengths
7
1.05M
fixed_cases
stringlengths
1
1.28M
# # PySNMP MIB module RAPID-HA-MIB (http://snmplabs.com/pysmi) # ASN.1 source file:///Users/davwang4/Dev/mibs.snmplabs.com/asn1/RAPID-HA-MIB # Produced by pysmi-0.3.4 at Wed May 1 14:51:59 2019 # On host DAVWANG4-M-1475 platform Darwin version 18.5.0 by user davwang4 # Using Python version 3.7.3 (default, Mar 27 2019, 09:23:15) # ObjectIdentifier, Integer, OctetString = mibBuilder.importSymbols("ASN1", "ObjectIdentifier", "Integer", "OctetString") NamedValues, = mibBuilder.importSymbols("ASN1-ENUMERATION", "NamedValues") ValueRangeConstraint, ConstraintsIntersection, SingleValueConstraint, ValueSizeConstraint, ConstraintsUnion = mibBuilder.importSymbols("ASN1-REFINEMENT", "ValueRangeConstraint", "ConstraintsIntersection", "SingleValueConstraint", "ValueSizeConstraint", "ConstraintsUnion") rapidstream, = mibBuilder.importSymbols("RAPID-MIB", "rapidstream") NotificationGroup, ModuleCompliance = mibBuilder.importSymbols("SNMPv2-CONF", "NotificationGroup", "ModuleCompliance") MibIdentifier, Bits, Integer32, Unsigned32, NotificationType, enterprises, Counter64, iso, Counter32, ModuleIdentity, TimeTicks, IpAddress, MibScalar, MibTable, MibTableRow, MibTableColumn, ObjectIdentity, Gauge32 = mibBuilder.importSymbols("SNMPv2-SMI", "MibIdentifier", "Bits", "Integer32", "Unsigned32", "NotificationType", "enterprises", "Counter64", "iso", "Counter32", "ModuleIdentity", "TimeTicks", "IpAddress", "MibScalar", "MibTable", "MibTableRow", "MibTableColumn", "ObjectIdentity", "Gauge32") DateAndTime, TextualConvention, DisplayString = mibBuilder.importSymbols("SNMPv2-TC", "DateAndTime", "TextualConvention", "DisplayString") rsInfoModule = ModuleIdentity((1, 3, 6, 1, 4, 1, 4355, 6)) rsInfoModule.setRevisions(('2002-11-01 12:00',)) if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0): if mibBuilder.loadTexts: rsInfoModule.setRevisionsDescriptions(('Initial revision.',)) if mibBuilder.loadTexts: rsInfoModule.setLastUpdated('0211011200Z') if mibBuilder.loadTexts: rsInfoModule.setOrganization('WatchGuard Technologies, Inc.') if mibBuilder.loadTexts: rsInfoModule.setContactInfo(' Ella Yu WatchGuard Technologies, Inc. 1841 Zanker Road San Jose, CA 95112 USA 408-519-4888 ella.yu@watchguard.com ') if mibBuilder.loadTexts: rsInfoModule.setDescription('The MIB module describes general information of RapidStream system. Mainly, the information obtained from this MIB is used by rsInfoSystemMIB, rsClientMIB, rsSystemStatisticsMIB, rsIpsecTunnelMIB, rsHAMIB.') rsHAMIB = ObjectIdentity((1, 3, 6, 1, 4, 1, 4355, 6, 6)) if mibBuilder.loadTexts: rsHAMIB.setStatus('current') if mibBuilder.loadTexts: rsHAMIB.setDescription('This is the base object identifier for all HA related branches.') rsHALocal = ObjectIdentity((1, 3, 6, 1, 4, 1, 4355, 6, 6, 1)) if mibBuilder.loadTexts: rsHALocal.setStatus('current') if mibBuilder.loadTexts: rsHALocal.setDescription('This is the base object identifier for all objects which are belong to local appliance.') rsHAPeer = ObjectIdentity((1, 3, 6, 1, 4, 1, 4355, 6, 6, 2)) if mibBuilder.loadTexts: rsHAPeer.setStatus('current') if mibBuilder.loadTexts: rsHAPeer.setDescription('This is the base object identifier for all objects which are belong to peer appliance.') rsHAStatus = MibScalar((1, 3, 6, 1, 4, 1, 4355, 6, 6, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11))).clone(namedValues=NamedValues(("disabled", 0), ("unknown", 1), ("as-primary-active", 2), ("as-secondary-active", 3), ("aa-primary-ative", 4), ("aa-secondary-active", 5), ("aa-primary-takeover", 6), ("aa-secondary-takeover", 7), ("standby", 8), ("admin", 9), ("failed", 10), ("unavailable", 11)))).setMaxAccess("readonly") if mibBuilder.loadTexts: rsHAStatus.setStatus('current') if mibBuilder.loadTexts: rsHAStatus.setDescription("Indicates current status of local appliance. disabled: The local appliance of HA system is not enabled. unknown: The local appliance of HA system is in initialization as-primary-active: The local appliance that is the primary appliance of HA/AS system is in active mode. This status is also called MASTER in some systems. as-secondary-active: The local appliance that is the secondary appliance of HA/AS system is in active mode. This status is also called BACKUP in some systems. aa-primary-ative: The local appliance that is the primary appliance of HA/AA system is in active mode. aa-secondary-active: The local appliance that is the secondary appliance of HA/AA system is in active mode. aa-primary-takeover: The local appliance that is the primary appliance of HA/AA system has taken over the peer's duty. aa-secondary-takeover: The local appliance of the secondary appliance of HA/AA system has taken over the peer's duty. standby: The local appliance of HA/AS system is in standby mode. admin: The local appliance of HA system detects an mismatched configuration and waits for system administrator to reslove the conflict. failed: The local appliance of the HA system is down due to forced failover or other reasons. unavailable: It's reported when local appliance of HA system is unabled to get status information. ") rsHAPeerStatus = MibScalar((1, 3, 6, 1, 4, 1, 4355, 6, 6, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3, 4))).clone(namedValues=NamedValues(("unavailable", 0), ("active", 1), ("standby", 2), ("admin", 3), ("failed", 4)))).setMaxAccess("readonly") if mibBuilder.loadTexts: rsHAPeerStatus.setStatus('current') if mibBuilder.loadTexts: rsHAPeerStatus.setDescription("Indicates current status of peer appliance. unavailable: It's reported when peer appliance of HA system is unabled to get status information. active: The peer applicance of HA system is in active mode. standby: The peer applicance of HA system is in standby mode. admin: The peer applicance of HA system dectects an mismatched configuration and waits for system administrator to reslove the conflict. failed: The peer appliance of HA system is down due to forced failover or other reasons. ") rsHALastDBSyncTime = MibScalar((1, 3, 6, 1, 4, 1, 4355, 6, 6, 1, 3), DateAndTime()).setMaxAccess("readonly") if mibBuilder.loadTexts: rsHALastDBSyncTime.setStatus('current') if mibBuilder.loadTexts: rsHALastDBSyncTime.setDescription('The last DB synchronized time of local appliance.') rsHAError = MibScalar((1, 3, 6, 1, 4, 1, 4355, 6, 6, 1, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14))).clone(namedValues=NamedValues(("no-error", 0), ("mismatched-ha-id", 1), ("mismatched-software", 2), ("mismatched-database", 3), ("mismatched-hardware", 4), ("forced-fail", 5), ("invalid-ha-role", 6), ("link-down", 7), ("lost-mia-heartbeat", 8), ("mia-not-responding", 9), ("admin-command-failed", 10), ("detect-ha-error", 11), ("unavailable", 12), ("hotsync-failed", 13), ("config-sync-failed", 14)))).setMaxAccess("readonly") if mibBuilder.loadTexts: rsHAError.setStatus('current') if mibBuilder.loadTexts: rsHAError.setDescription('Reports the current error that occurred in local appliance .') rsHAPeerError = MibScalar((1, 3, 6, 1, 4, 1, 4355, 6, 6, 1, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14))).clone(namedValues=NamedValues(("no-error", 0), ("mismatched-ha-id", 1), ("mismatched-software", 2), ("mismatched-database", 3), ("mismatched-hardware", 4), ("forced-fail", 5), ("invalid-ha-role", 6), ("link-down", 7), ("lost-mia-heartbeat", 8), ("mia-not-responding", 9), ("admin-command-failed", 10), ("detect-ha-error", 11), ("unavailable", 12), ("hotsync-failed", 13), ("config-sync-failed", 14)))).setMaxAccess("readonly") if mibBuilder.loadTexts: rsHAPeerError.setStatus('current') if mibBuilder.loadTexts: rsHAPeerError.setDescription('Reports the current error that occurred in peer appliance.') rsHAPeerSerialNumber = MibScalar((1, 3, 6, 1, 4, 1, 4355, 6, 6, 2, 1), OctetString()).setMaxAccess("readonly") if mibBuilder.loadTexts: rsHAPeerSerialNumber.setStatus('current') if mibBuilder.loadTexts: rsHAPeerSerialNumber.setDescription('The serial number of peer appliance.') rsHAPeerLastDBSyncTime = MibScalar((1, 3, 6, 1, 4, 1, 4355, 6, 6, 2, 2), DateAndTime()).setMaxAccess("readonly") if mibBuilder.loadTexts: rsHAPeerLastDBSyncTime.setStatus('current') if mibBuilder.loadTexts: rsHAPeerLastDBSyncTime.setDescription('The last DB synchronized time of peer appliance.') rsHAPeerDevice = ObjectIdentity((1, 3, 6, 1, 4, 1, 4355, 6, 6, 2, 3)) if mibBuilder.loadTexts: rsHAPeerDevice.setStatus('current') if mibBuilder.loadTexts: rsHAPeerDevice.setDescription('This is the base object for parameters and configuration data of devices in this entity.') rsHAPeerCounters = ObjectIdentity((1, 3, 6, 1, 4, 1, 4355, 6, 6, 2, 4)) if mibBuilder.loadTexts: rsHAPeerCounters.setStatus('current') if mibBuilder.loadTexts: rsHAPeerCounters.setDescription('This is the base object for parameters and configuration data of devices in this entity.') rsHAPeerIfNumber = MibScalar((1, 3, 6, 1, 4, 1, 4355, 6, 6, 2, 3, 1), Unsigned32()).setMaxAccess("readonly") if mibBuilder.loadTexts: rsHAPeerIfNumber.setStatus('current') if mibBuilder.loadTexts: rsHAPeerIfNumber.setDescription('The number of RapidCard installed in this entity.') rsHAPeerIfTable = MibTable((1, 3, 6, 1, 4, 1, 4355, 6, 6, 2, 3, 2), ) if mibBuilder.loadTexts: rsHAPeerIfTable.setStatus('current') if mibBuilder.loadTexts: rsHAPeerIfTable.setDescription('A list of RapidCard entries. The number of entries is given by the value of rsHAPeerDeviceNumber.') rsHAPeerIfEntry = MibTableRow((1, 3, 6, 1, 4, 1, 4355, 6, 6, 2, 3, 2, 1), ).setIndexNames((0, "RAPID-HA-MIB", "rsHAPeerIfIndex")) if mibBuilder.loadTexts: rsHAPeerIfEntry.setStatus('current') if mibBuilder.loadTexts: rsHAPeerIfEntry.setDescription('A RapidCard entry containing objects for a particular RapidCard.') rsHAPeerIfIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 4355, 6, 6, 2, 3, 2, 1, 1), Unsigned32()).setMaxAccess("readonly") if mibBuilder.loadTexts: rsHAPeerIfIndex.setStatus('current') if mibBuilder.loadTexts: rsHAPeerIfIndex.setDescription('The unique value for each interface.') rsHAPeerIfIpAddr = MibTableColumn((1, 3, 6, 1, 4, 1, 4355, 6, 6, 2, 3, 2, 1, 4), IpAddress()).setMaxAccess("readonly") if mibBuilder.loadTexts: rsHAPeerIfIpAddr.setStatus('current') if mibBuilder.loadTexts: rsHAPeerIfIpAddr.setDescription('The ip address of the interface.') rsHAPeerIfLinkStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 4355, 6, 6, 2, 3, 2, 1, 9), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2))).clone(namedValues=NamedValues(("down", 0), ("up", 1), ("other", 2)))).setMaxAccess("readonly") if mibBuilder.loadTexts: rsHAPeerIfLinkStatus.setStatus('current') if mibBuilder.loadTexts: rsHAPeerIfLinkStatus.setDescription('The current state of the interface.') rsHAPeerSystemCpuUtil = MibScalar((1, 3, 6, 1, 4, 1, 4355, 6, 6, 2, 4, 1), Gauge32()).setMaxAccess("readonly") if mibBuilder.loadTexts: rsHAPeerSystemCpuUtil.setStatus('current') if mibBuilder.loadTexts: rsHAPeerSystemCpuUtil.setDescription('The CPU utilization of the peer system in last 5 seconds.') rsHAPeerSystemTotalSendBytes = MibScalar((1, 3, 6, 1, 4, 1, 4355, 6, 6, 2, 4, 2), Counter64()).setMaxAccess("readonly") if mibBuilder.loadTexts: rsHAPeerSystemTotalSendBytes.setStatus('current') if mibBuilder.loadTexts: rsHAPeerSystemTotalSendBytes.setDescription('The total number of bytes sent since peer system is up.') rsHAPeerSystemTotalRecvBytes = MibScalar((1, 3, 6, 1, 4, 1, 4355, 6, 6, 2, 4, 3), Counter64()).setMaxAccess("readonly") if mibBuilder.loadTexts: rsHAPeerSystemTotalRecvBytes.setStatus('current') if mibBuilder.loadTexts: rsHAPeerSystemTotalRecvBytes.setDescription('The total number of bytes received since peer system is up.') rsHAPeerSystemTotalSendPackets = MibScalar((1, 3, 6, 1, 4, 1, 4355, 6, 6, 2, 4, 4), Counter64()).setMaxAccess("readonly") if mibBuilder.loadTexts: rsHAPeerSystemTotalSendPackets.setStatus('current') if mibBuilder.loadTexts: rsHAPeerSystemTotalSendPackets.setDescription('The total number of packets sent since peer system is up.') rsHAPeerSystemTotalRecvPackets = MibScalar((1, 3, 6, 1, 4, 1, 4355, 6, 6, 2, 4, 5), Counter64()).setMaxAccess("readonly") if mibBuilder.loadTexts: rsHAPeerSystemTotalRecvPackets.setStatus('current') if mibBuilder.loadTexts: rsHAPeerSystemTotalRecvPackets.setDescription('The total number of packets received since peer system is up.') rsHAPeerSystemStreamReqTotal = MibScalar((1, 3, 6, 1, 4, 1, 4355, 6, 6, 2, 4, 6), Counter64()).setMaxAccess("readonly") if mibBuilder.loadTexts: rsHAPeerSystemStreamReqTotal.setStatus('current') if mibBuilder.loadTexts: rsHAPeerSystemStreamReqTotal.setDescription('The total number of the connection requests since system is up.') rsHAPeerSystemStreamReqDrop = MibScalar((1, 3, 6, 1, 4, 1, 4355, 6, 6, 2, 4, 7), Counter64()).setMaxAccess("readonly") if mibBuilder.loadTexts: rsHAPeerSystemStreamReqDrop.setStatus('current') if mibBuilder.loadTexts: rsHAPeerSystemStreamReqDrop.setDescription('The total number of the connection requests being dropped since system is up.') rsHAPeerSystemCurrIpsecTunnels = MibScalar((1, 3, 6, 1, 4, 1, 4355, 6, 6, 2, 4, 8), Counter64()).setMaxAccess("readonly") if mibBuilder.loadTexts: rsHAPeerSystemCurrIpsecTunnels.setStatus('current') if mibBuilder.loadTexts: rsHAPeerSystemCurrIpsecTunnels.setDescription('The number of ipsec tunnels in the peer system currently.') rsHAPeerSystemCpuUtil1 = MibScalar((1, 3, 6, 1, 4, 1, 4355, 6, 6, 2, 4, 9), Gauge32()).setMaxAccess("readonly") if mibBuilder.loadTexts: rsHAPeerSystemCpuUtil1.setStatus('current') if mibBuilder.loadTexts: rsHAPeerSystemCpuUtil1.setDescription('The CPU utilization of the peer system in last 1 minute.') rsHAPeerSystemCpuUtil5 = MibScalar((1, 3, 6, 1, 4, 1, 4355, 6, 6, 2, 4, 10), Gauge32()).setMaxAccess("readonly") if mibBuilder.loadTexts: rsHAPeerSystemCpuUtil5.setStatus('current') if mibBuilder.loadTexts: rsHAPeerSystemCpuUtil5.setDescription('The CPU utilization of the peer system in last 5 minutes.') rsHAPeerSystemCpuUtil15 = MibScalar((1, 3, 6, 1, 4, 1, 4355, 6, 6, 2, 4, 11), Gauge32()).setMaxAccess("readonly") if mibBuilder.loadTexts: rsHAPeerSystemCpuUtil15.setStatus('current') if mibBuilder.loadTexts: rsHAPeerSystemCpuUtil15.setDescription('The CPU utilization of the peer system in last 15 minutes.') mibBuilder.exportSymbols("RAPID-HA-MIB", rsHAPeerIfNumber=rsHAPeerIfNumber, rsHALocal=rsHALocal, rsHAPeerIfIndex=rsHAPeerIfIndex, rsHAError=rsHAError, rsHAPeerLastDBSyncTime=rsHAPeerLastDBSyncTime, rsInfoModule=rsInfoModule, rsHALastDBSyncTime=rsHALastDBSyncTime, rsHAPeerDevice=rsHAPeerDevice, rsHAPeerSystemStreamReqTotal=rsHAPeerSystemStreamReqTotal, rsHAPeerSystemCpuUtil1=rsHAPeerSystemCpuUtil1, rsHAPeerSystemCpuUtil5=rsHAPeerSystemCpuUtil5, rsHAPeerSystemCpuUtil=rsHAPeerSystemCpuUtil, rsHAPeerStatus=rsHAPeerStatus, rsHAPeer=rsHAPeer, rsHAPeerSystemCurrIpsecTunnels=rsHAPeerSystemCurrIpsecTunnels, rsHAMIB=rsHAMIB, rsHAPeerSystemTotalSendBytes=rsHAPeerSystemTotalSendBytes, rsHAPeerCounters=rsHAPeerCounters, rsHAPeerIfIpAddr=rsHAPeerIfIpAddr, rsHAPeerIfEntry=rsHAPeerIfEntry, rsHAStatus=rsHAStatus, rsHAPeerError=rsHAPeerError, rsHAPeerIfLinkStatus=rsHAPeerIfLinkStatus, PYSNMP_MODULE_ID=rsInfoModule, rsHAPeerSystemCpuUtil15=rsHAPeerSystemCpuUtil15, rsHAPeerSystemTotalRecvBytes=rsHAPeerSystemTotalRecvBytes, rsHAPeerSystemStreamReqDrop=rsHAPeerSystemStreamReqDrop, rsHAPeerSerialNumber=rsHAPeerSerialNumber, rsHAPeerSystemTotalSendPackets=rsHAPeerSystemTotalSendPackets, rsHAPeerSystemTotalRecvPackets=rsHAPeerSystemTotalRecvPackets, rsHAPeerIfTable=rsHAPeerIfTable)
(object_identifier, integer, octet_string) = mibBuilder.importSymbols('ASN1', 'ObjectIdentifier', 'Integer', 'OctetString') (named_values,) = mibBuilder.importSymbols('ASN1-ENUMERATION', 'NamedValues') (value_range_constraint, constraints_intersection, single_value_constraint, value_size_constraint, constraints_union) = mibBuilder.importSymbols('ASN1-REFINEMENT', 'ValueRangeConstraint', 'ConstraintsIntersection', 'SingleValueConstraint', 'ValueSizeConstraint', 'ConstraintsUnion') (rapidstream,) = mibBuilder.importSymbols('RAPID-MIB', 'rapidstream') (notification_group, module_compliance) = mibBuilder.importSymbols('SNMPv2-CONF', 'NotificationGroup', 'ModuleCompliance') (mib_identifier, bits, integer32, unsigned32, notification_type, enterprises, counter64, iso, counter32, module_identity, time_ticks, ip_address, mib_scalar, mib_table, mib_table_row, mib_table_column, object_identity, gauge32) = mibBuilder.importSymbols('SNMPv2-SMI', 'MibIdentifier', 'Bits', 'Integer32', 'Unsigned32', 'NotificationType', 'enterprises', 'Counter64', 'iso', 'Counter32', 'ModuleIdentity', 'TimeTicks', 'IpAddress', 'MibScalar', 'MibTable', 'MibTableRow', 'MibTableColumn', 'ObjectIdentity', 'Gauge32') (date_and_time, textual_convention, display_string) = mibBuilder.importSymbols('SNMPv2-TC', 'DateAndTime', 'TextualConvention', 'DisplayString') rs_info_module = module_identity((1, 3, 6, 1, 4, 1, 4355, 6)) rsInfoModule.setRevisions(('2002-11-01 12:00',)) if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0): if mibBuilder.loadTexts: rsInfoModule.setRevisionsDescriptions(('Initial revision.',)) if mibBuilder.loadTexts: rsInfoModule.setLastUpdated('0211011200Z') if mibBuilder.loadTexts: rsInfoModule.setOrganization('WatchGuard Technologies, Inc.') if mibBuilder.loadTexts: rsInfoModule.setContactInfo(' Ella Yu WatchGuard Technologies, Inc. 1841 Zanker Road San Jose, CA 95112 USA 408-519-4888 ella.yu@watchguard.com ') if mibBuilder.loadTexts: rsInfoModule.setDescription('The MIB module describes general information of RapidStream system. Mainly, the information obtained from this MIB is used by rsInfoSystemMIB, rsClientMIB, rsSystemStatisticsMIB, rsIpsecTunnelMIB, rsHAMIB.') rs_hamib = object_identity((1, 3, 6, 1, 4, 1, 4355, 6, 6)) if mibBuilder.loadTexts: rsHAMIB.setStatus('current') if mibBuilder.loadTexts: rsHAMIB.setDescription('This is the base object identifier for all HA related branches.') rs_ha_local = object_identity((1, 3, 6, 1, 4, 1, 4355, 6, 6, 1)) if mibBuilder.loadTexts: rsHALocal.setStatus('current') if mibBuilder.loadTexts: rsHALocal.setDescription('This is the base object identifier for all objects which are belong to local appliance.') rs_ha_peer = object_identity((1, 3, 6, 1, 4, 1, 4355, 6, 6, 2)) if mibBuilder.loadTexts: rsHAPeer.setStatus('current') if mibBuilder.loadTexts: rsHAPeer.setDescription('This is the base object identifier for all objects which are belong to peer appliance.') rs_ha_status = mib_scalar((1, 3, 6, 1, 4, 1, 4355, 6, 6, 1, 1), integer32().subtype(subtypeSpec=constraints_union(single_value_constraint(0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11))).clone(namedValues=named_values(('disabled', 0), ('unknown', 1), ('as-primary-active', 2), ('as-secondary-active', 3), ('aa-primary-ative', 4), ('aa-secondary-active', 5), ('aa-primary-takeover', 6), ('aa-secondary-takeover', 7), ('standby', 8), ('admin', 9), ('failed', 10), ('unavailable', 11)))).setMaxAccess('readonly') if mibBuilder.loadTexts: rsHAStatus.setStatus('current') if mibBuilder.loadTexts: rsHAStatus.setDescription("Indicates current status of local appliance. disabled: The local appliance of HA system is not enabled. unknown: The local appliance of HA system is in initialization as-primary-active: The local appliance that is the primary appliance of HA/AS system is in active mode. This status is also called MASTER in some systems. as-secondary-active: The local appliance that is the secondary appliance of HA/AS system is in active mode. This status is also called BACKUP in some systems. aa-primary-ative: The local appliance that is the primary appliance of HA/AA system is in active mode. aa-secondary-active: The local appliance that is the secondary appliance of HA/AA system is in active mode. aa-primary-takeover: The local appliance that is the primary appliance of HA/AA system has taken over the peer's duty. aa-secondary-takeover: The local appliance of the secondary appliance of HA/AA system has taken over the peer's duty. standby: The local appliance of HA/AS system is in standby mode. admin: The local appliance of HA system detects an mismatched configuration and waits for system administrator to reslove the conflict. failed: The local appliance of the HA system is down due to forced failover or other reasons. unavailable: It's reported when local appliance of HA system is unabled to get status information. ") rs_ha_peer_status = mib_scalar((1, 3, 6, 1, 4, 1, 4355, 6, 6, 1, 2), integer32().subtype(subtypeSpec=constraints_union(single_value_constraint(0, 1, 2, 3, 4))).clone(namedValues=named_values(('unavailable', 0), ('active', 1), ('standby', 2), ('admin', 3), ('failed', 4)))).setMaxAccess('readonly') if mibBuilder.loadTexts: rsHAPeerStatus.setStatus('current') if mibBuilder.loadTexts: rsHAPeerStatus.setDescription("Indicates current status of peer appliance. unavailable: It's reported when peer appliance of HA system is unabled to get status information. active: The peer applicance of HA system is in active mode. standby: The peer applicance of HA system is in standby mode. admin: The peer applicance of HA system dectects an mismatched configuration and waits for system administrator to reslove the conflict. failed: The peer appliance of HA system is down due to forced failover or other reasons. ") rs_ha_last_db_sync_time = mib_scalar((1, 3, 6, 1, 4, 1, 4355, 6, 6, 1, 3), date_and_time()).setMaxAccess('readonly') if mibBuilder.loadTexts: rsHALastDBSyncTime.setStatus('current') if mibBuilder.loadTexts: rsHALastDBSyncTime.setDescription('The last DB synchronized time of local appliance.') rs_ha_error = mib_scalar((1, 3, 6, 1, 4, 1, 4355, 6, 6, 1, 4), integer32().subtype(subtypeSpec=constraints_union(single_value_constraint(0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14))).clone(namedValues=named_values(('no-error', 0), ('mismatched-ha-id', 1), ('mismatched-software', 2), ('mismatched-database', 3), ('mismatched-hardware', 4), ('forced-fail', 5), ('invalid-ha-role', 6), ('link-down', 7), ('lost-mia-heartbeat', 8), ('mia-not-responding', 9), ('admin-command-failed', 10), ('detect-ha-error', 11), ('unavailable', 12), ('hotsync-failed', 13), ('config-sync-failed', 14)))).setMaxAccess('readonly') if mibBuilder.loadTexts: rsHAError.setStatus('current') if mibBuilder.loadTexts: rsHAError.setDescription('Reports the current error that occurred in local appliance .') rs_ha_peer_error = mib_scalar((1, 3, 6, 1, 4, 1, 4355, 6, 6, 1, 5), integer32().subtype(subtypeSpec=constraints_union(single_value_constraint(0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14))).clone(namedValues=named_values(('no-error', 0), ('mismatched-ha-id', 1), ('mismatched-software', 2), ('mismatched-database', 3), ('mismatched-hardware', 4), ('forced-fail', 5), ('invalid-ha-role', 6), ('link-down', 7), ('lost-mia-heartbeat', 8), ('mia-not-responding', 9), ('admin-command-failed', 10), ('detect-ha-error', 11), ('unavailable', 12), ('hotsync-failed', 13), ('config-sync-failed', 14)))).setMaxAccess('readonly') if mibBuilder.loadTexts: rsHAPeerError.setStatus('current') if mibBuilder.loadTexts: rsHAPeerError.setDescription('Reports the current error that occurred in peer appliance.') rs_ha_peer_serial_number = mib_scalar((1, 3, 6, 1, 4, 1, 4355, 6, 6, 2, 1), octet_string()).setMaxAccess('readonly') if mibBuilder.loadTexts: rsHAPeerSerialNumber.setStatus('current') if mibBuilder.loadTexts: rsHAPeerSerialNumber.setDescription('The serial number of peer appliance.') rs_ha_peer_last_db_sync_time = mib_scalar((1, 3, 6, 1, 4, 1, 4355, 6, 6, 2, 2), date_and_time()).setMaxAccess('readonly') if mibBuilder.loadTexts: rsHAPeerLastDBSyncTime.setStatus('current') if mibBuilder.loadTexts: rsHAPeerLastDBSyncTime.setDescription('The last DB synchronized time of peer appliance.') rs_ha_peer_device = object_identity((1, 3, 6, 1, 4, 1, 4355, 6, 6, 2, 3)) if mibBuilder.loadTexts: rsHAPeerDevice.setStatus('current') if mibBuilder.loadTexts: rsHAPeerDevice.setDescription('This is the base object for parameters and configuration data of devices in this entity.') rs_ha_peer_counters = object_identity((1, 3, 6, 1, 4, 1, 4355, 6, 6, 2, 4)) if mibBuilder.loadTexts: rsHAPeerCounters.setStatus('current') if mibBuilder.loadTexts: rsHAPeerCounters.setDescription('This is the base object for parameters and configuration data of devices in this entity.') rs_ha_peer_if_number = mib_scalar((1, 3, 6, 1, 4, 1, 4355, 6, 6, 2, 3, 1), unsigned32()).setMaxAccess('readonly') if mibBuilder.loadTexts: rsHAPeerIfNumber.setStatus('current') if mibBuilder.loadTexts: rsHAPeerIfNumber.setDescription('The number of RapidCard installed in this entity.') rs_ha_peer_if_table = mib_table((1, 3, 6, 1, 4, 1, 4355, 6, 6, 2, 3, 2)) if mibBuilder.loadTexts: rsHAPeerIfTable.setStatus('current') if mibBuilder.loadTexts: rsHAPeerIfTable.setDescription('A list of RapidCard entries. The number of entries is given by the value of rsHAPeerDeviceNumber.') rs_ha_peer_if_entry = mib_table_row((1, 3, 6, 1, 4, 1, 4355, 6, 6, 2, 3, 2, 1)).setIndexNames((0, 'RAPID-HA-MIB', 'rsHAPeerIfIndex')) if mibBuilder.loadTexts: rsHAPeerIfEntry.setStatus('current') if mibBuilder.loadTexts: rsHAPeerIfEntry.setDescription('A RapidCard entry containing objects for a particular RapidCard.') rs_ha_peer_if_index = mib_table_column((1, 3, 6, 1, 4, 1, 4355, 6, 6, 2, 3, 2, 1, 1), unsigned32()).setMaxAccess('readonly') if mibBuilder.loadTexts: rsHAPeerIfIndex.setStatus('current') if mibBuilder.loadTexts: rsHAPeerIfIndex.setDescription('The unique value for each interface.') rs_ha_peer_if_ip_addr = mib_table_column((1, 3, 6, 1, 4, 1, 4355, 6, 6, 2, 3, 2, 1, 4), ip_address()).setMaxAccess('readonly') if mibBuilder.loadTexts: rsHAPeerIfIpAddr.setStatus('current') if mibBuilder.loadTexts: rsHAPeerIfIpAddr.setDescription('The ip address of the interface.') rs_ha_peer_if_link_status = mib_table_column((1, 3, 6, 1, 4, 1, 4355, 6, 6, 2, 3, 2, 1, 9), integer32().subtype(subtypeSpec=constraints_union(single_value_constraint(0, 1, 2))).clone(namedValues=named_values(('down', 0), ('up', 1), ('other', 2)))).setMaxAccess('readonly') if mibBuilder.loadTexts: rsHAPeerIfLinkStatus.setStatus('current') if mibBuilder.loadTexts: rsHAPeerIfLinkStatus.setDescription('The current state of the interface.') rs_ha_peer_system_cpu_util = mib_scalar((1, 3, 6, 1, 4, 1, 4355, 6, 6, 2, 4, 1), gauge32()).setMaxAccess('readonly') if mibBuilder.loadTexts: rsHAPeerSystemCpuUtil.setStatus('current') if mibBuilder.loadTexts: rsHAPeerSystemCpuUtil.setDescription('The CPU utilization of the peer system in last 5 seconds.') rs_ha_peer_system_total_send_bytes = mib_scalar((1, 3, 6, 1, 4, 1, 4355, 6, 6, 2, 4, 2), counter64()).setMaxAccess('readonly') if mibBuilder.loadTexts: rsHAPeerSystemTotalSendBytes.setStatus('current') if mibBuilder.loadTexts: rsHAPeerSystemTotalSendBytes.setDescription('The total number of bytes sent since peer system is up.') rs_ha_peer_system_total_recv_bytes = mib_scalar((1, 3, 6, 1, 4, 1, 4355, 6, 6, 2, 4, 3), counter64()).setMaxAccess('readonly') if mibBuilder.loadTexts: rsHAPeerSystemTotalRecvBytes.setStatus('current') if mibBuilder.loadTexts: rsHAPeerSystemTotalRecvBytes.setDescription('The total number of bytes received since peer system is up.') rs_ha_peer_system_total_send_packets = mib_scalar((1, 3, 6, 1, 4, 1, 4355, 6, 6, 2, 4, 4), counter64()).setMaxAccess('readonly') if mibBuilder.loadTexts: rsHAPeerSystemTotalSendPackets.setStatus('current') if mibBuilder.loadTexts: rsHAPeerSystemTotalSendPackets.setDescription('The total number of packets sent since peer system is up.') rs_ha_peer_system_total_recv_packets = mib_scalar((1, 3, 6, 1, 4, 1, 4355, 6, 6, 2, 4, 5), counter64()).setMaxAccess('readonly') if mibBuilder.loadTexts: rsHAPeerSystemTotalRecvPackets.setStatus('current') if mibBuilder.loadTexts: rsHAPeerSystemTotalRecvPackets.setDescription('The total number of packets received since peer system is up.') rs_ha_peer_system_stream_req_total = mib_scalar((1, 3, 6, 1, 4, 1, 4355, 6, 6, 2, 4, 6), counter64()).setMaxAccess('readonly') if mibBuilder.loadTexts: rsHAPeerSystemStreamReqTotal.setStatus('current') if mibBuilder.loadTexts: rsHAPeerSystemStreamReqTotal.setDescription('The total number of the connection requests since system is up.') rs_ha_peer_system_stream_req_drop = mib_scalar((1, 3, 6, 1, 4, 1, 4355, 6, 6, 2, 4, 7), counter64()).setMaxAccess('readonly') if mibBuilder.loadTexts: rsHAPeerSystemStreamReqDrop.setStatus('current') if mibBuilder.loadTexts: rsHAPeerSystemStreamReqDrop.setDescription('The total number of the connection requests being dropped since system is up.') rs_ha_peer_system_curr_ipsec_tunnels = mib_scalar((1, 3, 6, 1, 4, 1, 4355, 6, 6, 2, 4, 8), counter64()).setMaxAccess('readonly') if mibBuilder.loadTexts: rsHAPeerSystemCurrIpsecTunnels.setStatus('current') if mibBuilder.loadTexts: rsHAPeerSystemCurrIpsecTunnels.setDescription('The number of ipsec tunnels in the peer system currently.') rs_ha_peer_system_cpu_util1 = mib_scalar((1, 3, 6, 1, 4, 1, 4355, 6, 6, 2, 4, 9), gauge32()).setMaxAccess('readonly') if mibBuilder.loadTexts: rsHAPeerSystemCpuUtil1.setStatus('current') if mibBuilder.loadTexts: rsHAPeerSystemCpuUtil1.setDescription('The CPU utilization of the peer system in last 1 minute.') rs_ha_peer_system_cpu_util5 = mib_scalar((1, 3, 6, 1, 4, 1, 4355, 6, 6, 2, 4, 10), gauge32()).setMaxAccess('readonly') if mibBuilder.loadTexts: rsHAPeerSystemCpuUtil5.setStatus('current') if mibBuilder.loadTexts: rsHAPeerSystemCpuUtil5.setDescription('The CPU utilization of the peer system in last 5 minutes.') rs_ha_peer_system_cpu_util15 = mib_scalar((1, 3, 6, 1, 4, 1, 4355, 6, 6, 2, 4, 11), gauge32()).setMaxAccess('readonly') if mibBuilder.loadTexts: rsHAPeerSystemCpuUtil15.setStatus('current') if mibBuilder.loadTexts: rsHAPeerSystemCpuUtil15.setDescription('The CPU utilization of the peer system in last 15 minutes.') mibBuilder.exportSymbols('RAPID-HA-MIB', rsHAPeerIfNumber=rsHAPeerIfNumber, rsHALocal=rsHALocal, rsHAPeerIfIndex=rsHAPeerIfIndex, rsHAError=rsHAError, rsHAPeerLastDBSyncTime=rsHAPeerLastDBSyncTime, rsInfoModule=rsInfoModule, rsHALastDBSyncTime=rsHALastDBSyncTime, rsHAPeerDevice=rsHAPeerDevice, rsHAPeerSystemStreamReqTotal=rsHAPeerSystemStreamReqTotal, rsHAPeerSystemCpuUtil1=rsHAPeerSystemCpuUtil1, rsHAPeerSystemCpuUtil5=rsHAPeerSystemCpuUtil5, rsHAPeerSystemCpuUtil=rsHAPeerSystemCpuUtil, rsHAPeerStatus=rsHAPeerStatus, rsHAPeer=rsHAPeer, rsHAPeerSystemCurrIpsecTunnels=rsHAPeerSystemCurrIpsecTunnels, rsHAMIB=rsHAMIB, rsHAPeerSystemTotalSendBytes=rsHAPeerSystemTotalSendBytes, rsHAPeerCounters=rsHAPeerCounters, rsHAPeerIfIpAddr=rsHAPeerIfIpAddr, rsHAPeerIfEntry=rsHAPeerIfEntry, rsHAStatus=rsHAStatus, rsHAPeerError=rsHAPeerError, rsHAPeerIfLinkStatus=rsHAPeerIfLinkStatus, PYSNMP_MODULE_ID=rsInfoModule, rsHAPeerSystemCpuUtil15=rsHAPeerSystemCpuUtil15, rsHAPeerSystemTotalRecvBytes=rsHAPeerSystemTotalRecvBytes, rsHAPeerSystemStreamReqDrop=rsHAPeerSystemStreamReqDrop, rsHAPeerSerialNumber=rsHAPeerSerialNumber, rsHAPeerSystemTotalSendPackets=rsHAPeerSystemTotalSendPackets, rsHAPeerSystemTotalRecvPackets=rsHAPeerSystemTotalRecvPackets, rsHAPeerIfTable=rsHAPeerIfTable)
test = { 'name': 'q1c', 'points': 3, 'suites': [ { 'cases': [ { 'code': r""" >>> manhattan_taxi.shape (82800, 9) """, 'hidden': False, 'locked': False }, { 'code': r""" >>> sum(manhattan_taxi['duration']) 54551565 """, 'hidden': False, 'locked': False }, { 'code': r""" >>> manhattan_taxi.iloc[0,:]['duration'] 981 """, 'hidden': False, 'locked': False } ], 'scored': True, 'setup': '', 'teardown': '', 'type': 'doctest' } ] }
test = {'name': 'q1c', 'points': 3, 'suites': [{'cases': [{'code': '\n >>> manhattan_taxi.shape\n (82800, 9)\n ', 'hidden': False, 'locked': False}, {'code': "\n >>> sum(manhattan_taxi['duration'])\n 54551565\n ", 'hidden': False, 'locked': False}, {'code': "\n >>> manhattan_taxi.iloc[0,:]['duration']\n 981\n ", 'hidden': False, 'locked': False}], 'scored': True, 'setup': '', 'teardown': '', 'type': 'doctest'}]}
# Ex056.2 """Develop a program that reads the name, age and sex of for people. At the end of the program, show: The average age of the group, What's the name of the older man, How many women are under 20""" total_age = 0 older_man = 0 name_over_man = '' women20_cont = 0 for n in range(1, 4 + 1): print(f'\033[32mPerson number {n}\033[m') name = str(input('What is the name?: ')).title() age = int(input('What is the age?: ')) sex = str(input('What is the sex? [M /F]: ')).upper() total_age += age if sex == 'M' and age > older_man: older_man = age name_older_man = name if sex == 'F' and age < 20: women20_cont += 1 print('\033[31m-\033[m' * 30) average_age = total_age / 4 print(f'The average age of the group is \033[32m{average_age}\033[m') print(f'The name of the older man is \033[32m{name_older_man}\033[m with \033[32m{older_man}\033[m years old') print(f'Women under 20: \033[32m{women20_cont}\033[m')
"""Develop a program that reads the name, age and sex of for people. At the end of the program, show: The average age of the group, What's the name of the older man, How many women are under 20""" total_age = 0 older_man = 0 name_over_man = '' women20_cont = 0 for n in range(1, 4 + 1): print(f'\x1b[32mPerson number {n}\x1b[m') name = str(input('What is the name?: ')).title() age = int(input('What is the age?: ')) sex = str(input('What is the sex? [M /F]: ')).upper() total_age += age if sex == 'M' and age > older_man: older_man = age name_older_man = name if sex == 'F' and age < 20: women20_cont += 1 print('\x1b[31m-\x1b[m' * 30) average_age = total_age / 4 print(f'The average age of the group is \x1b[32m{average_age}\x1b[m') print(f'The name of the older man is \x1b[32m{name_older_man}\x1b[m with \x1b[32m{older_man}\x1b[m years old') print(f'Women under 20: \x1b[32m{women20_cont}\x1b[m')
def get_date_from_zip(zip_name: str) -> str: """ Helper function to parse a date from a ROM zip's name """ return zip_name.split("-")[-1].split(".")[0] def get_metadata_from_zip(zip_name: str) -> (str, str, str, str): """ Helper function to parse some data from ROM zip's name """ data = zip_name.replace(".zip", "").split("-") return data[1], data[2], data[3], data[4]
def get_date_from_zip(zip_name: str) -> str: """ Helper function to parse a date from a ROM zip's name """ return zip_name.split('-')[-1].split('.')[0] def get_metadata_from_zip(zip_name: str) -> (str, str, str, str): """ Helper function to parse some data from ROM zip's name """ data = zip_name.replace('.zip', '').split('-') return (data[1], data[2], data[3], data[4])
filt_dict = { 'db': [['Emissions', '(Kyoto Gases|co2)$', '(|Energy and Industrial Processes|AFOLU)$', '', '(world|r5.*)'], ['Policy cost', '(Additional Total Energy System Cost|consumption Loss|gdp Loss)', '', '', '(world|r5.*)'], ['Price', 'Carbon', '$', '', '(world|r5.*)'], ], }
filt_dict = {'db': [['Emissions', '(Kyoto Gases|co2)$', '(|Energy and Industrial Processes|AFOLU)$', '', '(world|r5.*)'], ['Policy cost', '(Additional Total Energy System Cost|consumption Loss|gdp Loss)', '', '', '(world|r5.*)'], ['Price', 'Carbon', '$', '', '(world|r5.*)']]}
# https://leetcode.com/problems/insert-interval/ # Given a set of non-overlapping intervals, insert a new interval into the # intervals (merge if necessary). # You may assume that the intervals were initially sorted according to their start # times. ################################################################################ # loop over intervals -> merge with newIntercal if overlap # append to ans if no overlap class Solution: def insert(self, intervals: List[List[int]], newInterval: List[int]) -> List[List[int]]: if not intervals: return [newInterval] ans = [] inserted = False for interval in intervals: if interval[1] < newInterval[0]: # no overlap ans.append(interval) elif interval[0] > newInterval[1]: # no overlap if not inserted: ans.append(newInterval) inserted = True ans.append(interval) else: # overlap, merge intervals newInterval[0] = min(newInterval[0], interval[0]) newInterval[1] = max(newInterval[1], interval[1]) if not inserted: ans.append(newInterval) return ans
class Solution: def insert(self, intervals: List[List[int]], newInterval: List[int]) -> List[List[int]]: if not intervals: return [newInterval] ans = [] inserted = False for interval in intervals: if interval[1] < newInterval[0]: ans.append(interval) elif interval[0] > newInterval[1]: if not inserted: ans.append(newInterval) inserted = True ans.append(interval) else: newInterval[0] = min(newInterval[0], interval[0]) newInterval[1] = max(newInterval[1], interval[1]) if not inserted: ans.append(newInterval) return ans
#!/usr/bin/env python3 def merge(L1, L2): L3 = L1+L2 for j in range(len(L3)): for i in range(0, len(L3)-j-1): if L3[i]> L3[i+1]: L3[i],L3[i+1] = L3[i+1] , L3[i] return (L3) def main(): print((merge([1,2,3],[1,6,7]))) pass if __name__ == "__main__": main()
def merge(L1, L2): l3 = L1 + L2 for j in range(len(L3)): for i in range(0, len(L3) - j - 1): if L3[i] > L3[i + 1]: (L3[i], L3[i + 1]) = (L3[i + 1], L3[i]) return L3 def main(): print(merge([1, 2, 3], [1, 6, 7])) pass if __name__ == '__main__': main()
class ControllerBase: @staticmethod def base(): return True
class Controllerbase: @staticmethod def base(): return True
__author__ = 'fatih' class SQL(): """ This class includes all using database commands such as insert, remove, select etc. Only need to do is you will write your sql command and format it into running command by given values """ #insert commands SQL_INSERT_CONFIG = "INSERT INTO apc_config(name, description, ip, radius_config_id, ssid, vlan_id, channel, channel_freq, date_added, date_modified) "\ "VALUES('{0}', '{1}', '{2}', '{3}', '{4}', '{5}', '{6}', '{7}', '{8}', '{9}') RETURNING id;" #(config_name, description, ip, radius_config_id, ssid, vlan_id, channel, channel_freq, date_added, date_modified) SQL_INSERT_DEVICE = "INSERT INTO apc_device(name, ip, config_id, username, password, date_added, date_modified) "\ "VALUES('{0}', '{1}', '{2}', '{3}', '{4}', '{5}', '{6}') RETURNING id;" #(nick, ip, config_id, username, password, date_added, date_modified) SQL_INSERT_GROUP = "INSERT INTO apc_groups(name, config_id) VALUES('{0}','{1}') RETURNING id;" #insert group to the database SQL_INSERT_VLAN_CONFIG = "INSERT INTO apc_vlan(name, ip, subnet, number, interface) " \ "VALUES('{0}','{1}','{2}','{3}','{4}')" #insert vlan config values to the database #remove commands SQL_REMOVE_CONFIG = "DELETE FROM apc_config WHERE name = '{0}';" SQL_REMOVE_DEVICE = "DELETE FROM apc_device WHERE name = '{0}' AND id = {1};" SQL_REMOVE_GROUP = "DELETE FROM apc_groups WHERE name = '{0}' AND id = {1};" SQL_REMOVE_VLAN = "DELETE FROM apc_vlan WHERE name = '{0}' AND id = {1};" SQL_REMOVE_DEVICE_FROM_GROUP = "DELETE FROM apc_device_group WHERE device_id = {0} AND group_id = {1};" #select queries #select device records SQL_SELECT_DEVICE = "SELECT * FROM apc_device d WHERE d.name IS NOT NULL AND d.id = {0};" SQL_SELECT_DEVICE_CONFIG = "SELECT * FROM apc_device d LEFT JOIN apc_config c ON d.config_id = c.id WHERE d.id = {0};" SQL_SELECT_DEVICE_ALL = "SELECT * FROM apc_device AS d WHERE d.name IS NOT NULL ORDER BY DATE(date_added) ASC;" #select config records SQL_SELECT_CONFIG = "SELECT * FROM apc_config c WHERE c.name IS NOT NULL ORDER BY date_added ASC;" SQL_SELECT_CONFIG_DETAIL = "SELECT * FROM apc_config AS c WHERE c.name IS NOT NULL AND c.name IS '{0}';" SQL_SELECT_GROUP_DETAIL = "SELECT * FROM apc_groups g WHERE g.name IS NOT NULL AND g.id = {0};" SQL_SELECT_GROUP_ALL = "SELECT * FROM apc_groups d WHERE d.name IS NOT NULL ORDER BY date_added ASC;" SQL_SELECT_GROUP_DEVICE = "SELECT * FROM apc_device d LEFT JOIN apc_group g ON d.config_id = g.id WHERE d.id = {0};" SQL_SELECT_VLAN = "SELECT * FROM apc_vlan v WHERE v.id IS NOT NULL ORDER BY date_added ASC;" SQL_SELECT_VLAN_DETAIL = "SELECT * FROM apc_config AS c WHERE c.name IS NOT NULL AND c.name IS '{0}';"
__author__ = 'fatih' class Sql: """ This class includes all using database commands such as insert, remove, select etc. Only need to do is you will write your sql command and format it into running command by given values """ sql_insert_config = "INSERT INTO apc_config(name, description, ip, radius_config_id, ssid, vlan_id, channel, channel_freq, date_added, date_modified) VALUES('{0}', '{1}', '{2}', '{3}', '{4}', '{5}', '{6}', '{7}', '{8}', '{9}') RETURNING id;" sql_insert_device = "INSERT INTO apc_device(name, ip, config_id, username, password, date_added, date_modified) VALUES('{0}', '{1}', '{2}', '{3}', '{4}', '{5}', '{6}') RETURNING id;" sql_insert_group = "INSERT INTO apc_groups(name, config_id) VALUES('{0}','{1}') RETURNING id;" sql_insert_vlan_config = "INSERT INTO apc_vlan(name, ip, subnet, number, interface) VALUES('{0}','{1}','{2}','{3}','{4}')" sql_remove_config = "DELETE FROM apc_config WHERE name = '{0}';" sql_remove_device = "DELETE FROM apc_device WHERE name = '{0}' AND id = {1};" sql_remove_group = "DELETE FROM apc_groups WHERE name = '{0}' AND id = {1};" sql_remove_vlan = "DELETE FROM apc_vlan WHERE name = '{0}' AND id = {1};" sql_remove_device_from_group = 'DELETE FROM apc_device_group WHERE device_id = {0} AND group_id = {1};' sql_select_device = 'SELECT * FROM apc_device d WHERE d.name IS NOT NULL AND d.id = {0};' sql_select_device_config = 'SELECT * FROM apc_device d LEFT JOIN apc_config c ON d.config_id = c.id WHERE d.id = {0};' sql_select_device_all = 'SELECT * FROM apc_device AS d WHERE d.name IS NOT NULL ORDER BY DATE(date_added) ASC;' sql_select_config = 'SELECT * FROM apc_config c WHERE c.name IS NOT NULL ORDER BY date_added ASC;' sql_select_config_detail = "SELECT * FROM apc_config AS c WHERE c.name IS NOT NULL AND c.name IS '{0}';" sql_select_group_detail = 'SELECT * FROM apc_groups g WHERE g.name IS NOT NULL AND g.id = {0};' sql_select_group_all = 'SELECT * FROM apc_groups d WHERE d.name IS NOT NULL ORDER BY date_added ASC;' sql_select_group_device = 'SELECT * FROM apc_device d LEFT JOIN apc_group g ON d.config_id = g.id WHERE d.id = {0};' sql_select_vlan = 'SELECT * FROM apc_vlan v WHERE v.id IS NOT NULL ORDER BY date_added ASC;' sql_select_vlan_detail = "SELECT * FROM apc_config AS c WHERE c.name IS NOT NULL AND c.name IS '{0}';"
''' You are given an array of length n which only contains the elements 0,1 and 2. You are supposed to sort the array in ascending order without the use of any sorting algorithms. Minimize time complexity. Input Format: The first line of input contains the value of n i.e. size of array. The next line contains n space separated integers, each representing an element of the array (0/1/2) Output Format: Print the sorted array as n space separated integers. Constraints: All array elements and n are within integer value limits. Example input: 8 1 0 2 1 2 1 0 0 Example output: [0, 0, 0, 1, 1, 1, 2, 2] Explanation: There are 3 zeroes, 3 ones and 2 twos. Sorted array is printed as space separated integers. ********************************************************************************************************************* ''' #Solution presented here has a time complexity of O(n) and uses a dictionary def arraySort(n, array): mappings = {0:0, 1:0, 2:0} for element in array: count = mappings.get(element) count = count+1 mappings.update({element:count}) arrayIndex = 0 count = mappings.get(0) for i in range(0, count): array[arrayIndex]=0 arrayIndex = arrayIndex+1 count = mappings.get(1) for i in range(0, count): array[arrayIndex]=1 arrayIndex = arrayIndex+1 count = mappings.get(2) for i in range(0, count): array[arrayIndex]=2 arrayIndex = arrayIndex+1 return array #Take input and call the function n = int(input()) array = [int(item) for item in input().split(' ')] array = arraySort(n, array) print(array)
""" You are given an array of length n which only contains the elements 0,1 and 2. You are supposed to sort the array in ascending order without the use of any sorting algorithms. Minimize time complexity. Input Format: The first line of input contains the value of n i.e. size of array. The next line contains n space separated integers, each representing an element of the array (0/1/2) Output Format: Print the sorted array as n space separated integers. Constraints: All array elements and n are within integer value limits. Example input: 8 1 0 2 1 2 1 0 0 Example output: [0, 0, 0, 1, 1, 1, 2, 2] Explanation: There are 3 zeroes, 3 ones and 2 twos. Sorted array is printed as space separated integers. ********************************************************************************************************************* """ def array_sort(n, array): mappings = {0: 0, 1: 0, 2: 0} for element in array: count = mappings.get(element) count = count + 1 mappings.update({element: count}) array_index = 0 count = mappings.get(0) for i in range(0, count): array[arrayIndex] = 0 array_index = arrayIndex + 1 count = mappings.get(1) for i in range(0, count): array[arrayIndex] = 1 array_index = arrayIndex + 1 count = mappings.get(2) for i in range(0, count): array[arrayIndex] = 2 array_index = arrayIndex + 1 return array n = int(input()) array = [int(item) for item in input().split(' ')] array = array_sort(n, array) print(array)
""" author: Akshay Chawla (https://github.com/akshaychawla) TEST:rs Test convert.py's ability to handle Deconvolution and Crop laye by converting voc-fcn8s .prototxt and .caffemodel present in the caffe/models/segmentation folder """ # import os # import inspect # import numpy as np # import keras.caffe.convert as convert # from scipy import misc # import matplotlib.pyplot as plt # from subprocess import call # check whether files are present in folder """ path = os.path.dirname(inspect.getfile(inspect.currentframe())) assert os.path.exists(path + "/deploy.prototxt"), "Err. Couldn't find the debug.prototxt file" assert os.path.exists(path + "/horse.png"), "Err. Couldn't find the horse.png image file" if not os.path.exists(path + "/fcn8s-heavy-pascal.caffemodel"): call(["wget http://dl.caffe.berkeleyvision.org/fcn8s-heavy-pascal.caffemodel -O " + "./" + path + "/fcn8s-heavy-pascal.caffemodel"], shell=True) assert os.path.exists(path + "/fcn8s-heavy-pascal.caffemodel"), "Err. Cannot find .caffemodel file. \ please download file using command : wget http://dl.caffe.berkeleyvision.org/fcn8s-heavy-pascal.caffemodel " model = convert.caffe_to_keras(path + "/deploy.prototxt", path + "/fcn8s-heavy-pascal.caffemodel", debug=1) print ("Yay!") # 1. load image img = misc.imread(path + "/horse.png") # modify it img = np.rollaxis(img, 2) img = np.expand_dims(img, 0) # 2. run forward pass op = model.predict(img) # 3. reshape output op = op[0] op = op.reshape((500, 500, 21)) op_arg = np.argmax(op, axis=2) # 4. plot output plt.imshow(op_arg) plt.show() print ("..done") """
""" author: Akshay Chawla (https://github.com/akshaychawla) TEST:rs Test convert.py's ability to handle Deconvolution and Crop laye by converting voc-fcn8s .prototxt and .caffemodel present in the caffe/models/segmentation folder """ '\npath = os.path.dirname(inspect.getfile(inspect.currentframe()))\nassert os.path.exists(path + "/deploy.prototxt"), "Err. Couldn\'t find the debug.prototxt file"\nassert os.path.exists(path + "/horse.png"), "Err. Couldn\'t find the horse.png image file"\nif not os.path.exists(path + "/fcn8s-heavy-pascal.caffemodel"):\n call(["wget http://dl.caffe.berkeleyvision.org/fcn8s-heavy-pascal.caffemodel -O "\n + "./" + path + "/fcn8s-heavy-pascal.caffemodel"],\n shell=True)\nassert os.path.exists(path + "/fcn8s-heavy-pascal.caffemodel"), "Err. Cannot find .caffemodel file.\tplease download file using command : wget http://dl.caffe.berkeleyvision.org/fcn8s-heavy-pascal.caffemodel "\n\nmodel = convert.caffe_to_keras(path + "/deploy.prototxt", path + "/fcn8s-heavy-pascal.caffemodel", debug=1)\n\nprint ("Yay!")\n\n# 1. load image\nimg = misc.imread(path + "/horse.png")\n\n# modify it\nimg = np.rollaxis(img, 2)\nimg = np.expand_dims(img, 0)\n\n# 2. run forward pass\nop = model.predict(img)\n\n# 3. reshape output\nop = op[0]\nop = op.reshape((500, 500, 21))\nop_arg = np.argmax(op, axis=2)\n\n# 4. plot output\nplt.imshow(op_arg)\nplt.show()\n\nprint ("..done")\n'
class BlocoMemoria: palavra: list endBlock: int atualizado: bool custo: int cacheHit: int ultimoUso: int def __init__(self): self.endBlock = -1 self.atualizado = False self.custo = 0 self.cacheHit = 0 ultimoUso: 2**31-1
class Blocomemoria: palavra: list end_block: int atualizado: bool custo: int cache_hit: int ultimo_uso: int def __init__(self): self.endBlock = -1 self.atualizado = False self.custo = 0 self.cacheHit = 0 ultimo_uso: 2 ** 31 - 1
AzToolchainInfo = provider( doc = "Azure toolchain rule parameters", fields = [ "az_tool_path", "az_tool_target", "azure_extension_dir", "az_extensions_installed", "jq_tool_path", ], ) AzConfigInfo = provider( fields = [ "debug", "global_args", "subscription", "verbose" ], )
az_toolchain_info = provider(doc='Azure toolchain rule parameters', fields=['az_tool_path', 'az_tool_target', 'azure_extension_dir', 'az_extensions_installed', 'jq_tool_path']) az_config_info = provider(fields=['debug', 'global_args', 'subscription', 'verbose'])
# Copyright 2018- The Pixie Authors. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # # SPDX-License-Identifier: Apache-2.0 def _fetch_licenses_impl(ctx): args = ctx.actions.args() args.add("--github_token", ctx.file.oauth_token) args.add("--modules", ctx.file.src) if ctx.attr.use_pkg_dev_go: args.add("--try_pkg_dev_go") if ctx.attr.disallow_missing: args.add("--fatal_if_missing") args.add("--json_manual_input", ctx.file.manual_licenses) args.add("--json_output", ctx.outputs.out_found) args.add("--json_missing_output", ctx.outputs.out_missing) ctx.actions.run( executable = ctx.file.fetch_tool, inputs = [ctx.file.src, ctx.file.oauth_token, ctx.file.manual_licenses], outputs = [ctx.outputs.out_found, ctx.outputs.out_missing], arguments = [args], progress_message = "Fetching licenses %s" % ctx.outputs.out_found, ) fetch_licenses = rule( implementation = _fetch_licenses_impl, attrs = dict({ "disallow_missing": attr.bool(), "fetch_tool": attr.label(mandatory = True, allow_single_file = True), "manual_licenses": attr.label(mandatory = True, allow_single_file = True), "oauth_token": attr.label(mandatory = True, allow_single_file = True), "out_found": attr.output(mandatory = True), "out_missing": attr.output(), "src": attr.label(mandatory = True, allow_single_file = True), "use_pkg_dev_go": attr.bool(), }), )
def _fetch_licenses_impl(ctx): args = ctx.actions.args() args.add('--github_token', ctx.file.oauth_token) args.add('--modules', ctx.file.src) if ctx.attr.use_pkg_dev_go: args.add('--try_pkg_dev_go') if ctx.attr.disallow_missing: args.add('--fatal_if_missing') args.add('--json_manual_input', ctx.file.manual_licenses) args.add('--json_output', ctx.outputs.out_found) args.add('--json_missing_output', ctx.outputs.out_missing) ctx.actions.run(executable=ctx.file.fetch_tool, inputs=[ctx.file.src, ctx.file.oauth_token, ctx.file.manual_licenses], outputs=[ctx.outputs.out_found, ctx.outputs.out_missing], arguments=[args], progress_message='Fetching licenses %s' % ctx.outputs.out_found) fetch_licenses = rule(implementation=_fetch_licenses_impl, attrs=dict({'disallow_missing': attr.bool(), 'fetch_tool': attr.label(mandatory=True, allow_single_file=True), 'manual_licenses': attr.label(mandatory=True, allow_single_file=True), 'oauth_token': attr.label(mandatory=True, allow_single_file=True), 'out_found': attr.output(mandatory=True), 'out_missing': attr.output(), 'src': attr.label(mandatory=True, allow_single_file=True), 'use_pkg_dev_go': attr.bool()}))
package(default_visibility = [ "//visibility:public" ]) load("@io_bazel_rules_dotnet//dotnet:defs.bzl", "net_import_library", "core_import_library") net_import_library( name = "net45", src = "lib/netstandard1.0/System.Threading.Tasks.Extensions.dll", ) core_import_library( name = "netcore", src = "lib/netstandard2.0/System.Threading.Tasks.Extensions.dll" )
package(default_visibility=['//visibility:public']) load('@io_bazel_rules_dotnet//dotnet:defs.bzl', 'net_import_library', 'core_import_library') net_import_library(name='net45', src='lib/netstandard1.0/System.Threading.Tasks.Extensions.dll') core_import_library(name='netcore', src='lib/netstandard2.0/System.Threading.Tasks.Extensions.dll')
class Solution: def repeatedStringMatch(self, A: str, B: str) -> int: if B in A: return 0 counter = 1 repeatedA = A while len(repeatedA) < len(B)*2: repeatedA += A if B in repeatedA: return counter counter += 1 return -1 a = 'abc' b = 'abcs' if find('c', a): print('yes') else: print('no') # print(not a.index('s'))
class Solution: def repeated_string_match(self, A: str, B: str) -> int: if B in A: return 0 counter = 1 repeated_a = A while len(repeatedA) < len(B) * 2: repeated_a += A if B in repeatedA: return counter counter += 1 return -1 a = 'abc' b = 'abcs' if find('c', a): print('yes') else: print('no')
WIDTH = 50 HEIGHT = 10 MAX_WIDTH = WIDTH - 2 MAX_HEIGHT = HEIGHT - 2
width = 50 height = 10 max_width = WIDTH - 2 max_height = HEIGHT - 2
# coding: utf-8 class CorpusInterface(object): def load_corpus(self, corpus): pass def read_corpus(self, filename): pass class Corpus(object): def load_corpus(self, corpus): raise NotImplementedError() def read_corpus(self, filename): raise NotImplementedError()
class Corpusinterface(object): def load_corpus(self, corpus): pass def read_corpus(self, filename): pass class Corpus(object): def load_corpus(self, corpus): raise not_implemented_error() def read_corpus(self, filename): raise not_implemented_error()
x = int(input()) for i in range(1, 11): resultado = i * x print("{} x {} = {}".format(i, x, resultado))
x = int(input()) for i in range(1, 11): resultado = i * x print('{} x {} = {}'.format(i, x, resultado))
patches = [ # Rename AWS::Lightsail::Instance.Disk to AWS::Lightsail::Instance.DiskProperty { "op": "move", "from": "/PropertyTypes/AWS::Lightsail::Instance.Disk", "path": "/PropertyTypes/AWS::Lightsail::Instance.DiskProperty", }, { "op": "replace", "path": "/PropertyTypes/AWS::Lightsail::Instance.Hardware/Properties/Disks/ItemType", "value": "DiskProperty", }, # Remove Location and State attribute properties { "op": "remove", "path": "/PropertyTypes/AWS::Lightsail::Instance.Location", }, { "op": "remove", "path": "/PropertyTypes/AWS::Lightsail::Instance.State", }, ]
patches = [{'op': 'move', 'from': '/PropertyTypes/AWS::Lightsail::Instance.Disk', 'path': '/PropertyTypes/AWS::Lightsail::Instance.DiskProperty'}, {'op': 'replace', 'path': '/PropertyTypes/AWS::Lightsail::Instance.Hardware/Properties/Disks/ItemType', 'value': 'DiskProperty'}, {'op': 'remove', 'path': '/PropertyTypes/AWS::Lightsail::Instance.Location'}, {'op': 'remove', 'path': '/PropertyTypes/AWS::Lightsail::Instance.State'}]
maximum = float("-inf") def max_path_sum(root): helper(root) return maximum def helper(root): if not root: return 0 left = helper(root.left) right = helper(root.right) maximum = max(maximum, left+right+root.val) return root.val + max(left, right)
maximum = float('-inf') def max_path_sum(root): helper(root) return maximum def helper(root): if not root: return 0 left = helper(root.left) right = helper(root.right) maximum = max(maximum, left + right + root.val) return root.val + max(left, right)
fruit = input() size_set = input() count_sets = float(input()) price_set = 0 if fruit == 'Watermelon': if size_set == 'small': price_set = count_sets * 56 * 2 elif size_set == 'big': price_set = count_sets * 28.7 * 5 elif fruit == 'Mango': if size_set == 'small': price_set = count_sets * 36.66 * 2 elif size_set == 'big': price_set = count_sets * 19.60 * 5 elif fruit == 'Pineapple': if size_set == 'small': price_set = count_sets * 42.1 * 2 elif size_set == 'big': price_set = count_sets * 24.8 * 5 elif fruit == 'Raspberry': if size_set == 'small': price_set = count_sets * 20 * 2 elif size_set == 'big': price_set = count_sets * 15.2 * 5 if 400 <= price_set <= 1000: price_set = price_set * 0.85 elif price_set > 1000: price_set = price_set * 0.5 print(f'{price_set:.2f} lv.')
fruit = input() size_set = input() count_sets = float(input()) price_set = 0 if fruit == 'Watermelon': if size_set == 'small': price_set = count_sets * 56 * 2 elif size_set == 'big': price_set = count_sets * 28.7 * 5 elif fruit == 'Mango': if size_set == 'small': price_set = count_sets * 36.66 * 2 elif size_set == 'big': price_set = count_sets * 19.6 * 5 elif fruit == 'Pineapple': if size_set == 'small': price_set = count_sets * 42.1 * 2 elif size_set == 'big': price_set = count_sets * 24.8 * 5 elif fruit == 'Raspberry': if size_set == 'small': price_set = count_sets * 20 * 2 elif size_set == 'big': price_set = count_sets * 15.2 * 5 if 400 <= price_set <= 1000: price_set = price_set * 0.85 elif price_set > 1000: price_set = price_set * 0.5 print(f'{price_set:.2f} lv.')
def test_socfaker_timestamp_in_the_past(socfaker_fixture): assert socfaker_fixture.timestamp.in_the_past() def test_socfaker_timestamp_in_the_future(socfaker_fixture): assert socfaker_fixture.timestamp.in_the_future() def test_socfaker_timestamp_current(socfaker_fixture): assert socfaker_fixture.timestamp.current def test_socfaker_timestamp_date_string(socfaker_fixture): assert socfaker_fixture.timestamp.date_string
def test_socfaker_timestamp_in_the_past(socfaker_fixture): assert socfaker_fixture.timestamp.in_the_past() def test_socfaker_timestamp_in_the_future(socfaker_fixture): assert socfaker_fixture.timestamp.in_the_future() def test_socfaker_timestamp_current(socfaker_fixture): assert socfaker_fixture.timestamp.current def test_socfaker_timestamp_date_string(socfaker_fixture): assert socfaker_fixture.timestamp.date_string
""" NewsTrader - a framework of news trading for individual investors This package is inspired by many other awesome Python packages """ # Shortcuts for key modules or functions __VERSION__ = "0.0.1"
""" NewsTrader - a framework of news trading for individual investors This package is inspired by many other awesome Python packages """ __version__ = '0.0.1'
releases = [ { "ocid": "A", "id": "1", "date": "2014-01-01", "tag": ["tender"], "tender": { "items": [ { "id": "1", "description": "Item 1", "quantity": 1 }, { "id": "2", "description": "Item 2", "quantity": 1 } ] } }, { "ocid": "A", "id": "2", "date": "2014-01-02", "tag": ["tender"], "tender": { "items": [ { "id": "1", "description": "Item 1", "quantity": 2 }, { "id": "3", "description": "Item 3", "quantity": 1 } ] } } ] compiledRelease = { "ocid": "A", "id": "2", "date": "2014-01-02", "tag": ["compiled"], "tender": { "items": [ { "id": "1", "description": "Item 1", "quantity": 2 }, { "id": "2", "description": "Item 2", "quantity": 1 }, { "id": "3", "description": "Item 3", "quantity": 1 } ] } } versionedRelease = { "ocid": "A", "tender": { "items": [ { "id": "1", "description": [ { "value": "Item 1", "releaseDate": "2014-01-01", "releaseTag": ["tender"], "releaseID": "1" } ], "quantity": [ { "value": 1, "releaseDate": "2014-01-01", "releaseTag": ["tender"], "releaseID": "1" }, { "value": 2, "releaseDate": "2014-01-02", "releaseTag": ["tender"], "releaseID": "2" } ] }, { "id": "2", "description": [ { "value": "Item 2", "releaseDate": "2014-01-01", "releaseTag": ["tender"], "releaseID": "1" } ], "quantity": [ { "value": 1, "releaseDate": "2014-01-01", "releaseTag": ["tender"], "releaseID": "1" }, ] }, { "id": "3", "description": [ { "value": "Item 3", "releaseDate": "2014-01-02", "releaseTag": ["tender"], "releaseID": "2" } ], "quantity": [ { "value": 1, "releaseDate": "2014-01-02", "releaseTag": ["tender"], "releaseID": "2" }, ] } ] } }
releases = [{'ocid': 'A', 'id': '1', 'date': '2014-01-01', 'tag': ['tender'], 'tender': {'items': [{'id': '1', 'description': 'Item 1', 'quantity': 1}, {'id': '2', 'description': 'Item 2', 'quantity': 1}]}}, {'ocid': 'A', 'id': '2', 'date': '2014-01-02', 'tag': ['tender'], 'tender': {'items': [{'id': '1', 'description': 'Item 1', 'quantity': 2}, {'id': '3', 'description': 'Item 3', 'quantity': 1}]}}] compiled_release = {'ocid': 'A', 'id': '2', 'date': '2014-01-02', 'tag': ['compiled'], 'tender': {'items': [{'id': '1', 'description': 'Item 1', 'quantity': 2}, {'id': '2', 'description': 'Item 2', 'quantity': 1}, {'id': '3', 'description': 'Item 3', 'quantity': 1}]}} versioned_release = {'ocid': 'A', 'tender': {'items': [{'id': '1', 'description': [{'value': 'Item 1', 'releaseDate': '2014-01-01', 'releaseTag': ['tender'], 'releaseID': '1'}], 'quantity': [{'value': 1, 'releaseDate': '2014-01-01', 'releaseTag': ['tender'], 'releaseID': '1'}, {'value': 2, 'releaseDate': '2014-01-02', 'releaseTag': ['tender'], 'releaseID': '2'}]}, {'id': '2', 'description': [{'value': 'Item 2', 'releaseDate': '2014-01-01', 'releaseTag': ['tender'], 'releaseID': '1'}], 'quantity': [{'value': 1, 'releaseDate': '2014-01-01', 'releaseTag': ['tender'], 'releaseID': '1'}]}, {'id': '3', 'description': [{'value': 'Item 3', 'releaseDate': '2014-01-02', 'releaseTag': ['tender'], 'releaseID': '2'}], 'quantity': [{'value': 1, 'releaseDate': '2014-01-02', 'releaseTag': ['tender'], 'releaseID': '2'}]}]}}
# -*- coding: utf-8 -*- """ exceptions.py Exceptions raised by the Kite Connect client. :copyright: (c) 2017 by Zerodha Technology. :license: see LICENSE for details. """ class KiteException(Exception): """ Base exception class representing a Kite client exception. Every specific Kite client exception is a subclass of this and exposes two instance variables `.code` (HTTP error code) and `.message` (error text). """ def __init__(self, message, code=500): """Initialize the exception.""" super(KiteException, self).__init__(message) self.code = code class GeneralException(KiteException): """An unclassified, general error. Default code is 500.""" def __init__(self, message, code=500): """Initialize the exception.""" super(GeneralException, self).__init__(message, code) class TokenException(KiteException): """Represents all token and authentication related errors. Default code is 403.""" def __init__(self, message, code=403): """Initialize the exception.""" super(TokenException, self).__init__(message, code) class PermissionException(KiteException): """Represents permission denied exceptions for certain calls. Default code is 403.""" def __init__(self, message, code=403): """Initialize the exception.""" super(PermissionException, self).__init__(message, code) class OrderException(KiteException): """Represents all order placement and manipulation errors. Default code is 500.""" def __init__(self, message, code=500): """Initialize the exception.""" super(OrderException, self).__init__(message, code) class InputException(KiteException): """Represents user input errors such as missing and invalid parameters. Default code is 400.""" def __init__(self, message, code=400): """Initialize the exception.""" super(InputException, self).__init__(message, code) class DataException(KiteException): """Represents a bad response from the backend Order Management System (OMS). Default code is 502.""" def __init__(self, message, code=502): """Initialize the exception.""" super(DataException, self).__init__(message, code) class NetworkException(KiteException): """Represents a network issue between Kite and the backend Order Management System (OMS). Default code is 503.""" def __init__(self, message, code=503): """Initialize the exception.""" super(NetworkException, self).__init__(message, code)
""" exceptions.py Exceptions raised by the Kite Connect client. :copyright: (c) 2017 by Zerodha Technology. :license: see LICENSE for details. """ class Kiteexception(Exception): """ Base exception class representing a Kite client exception. Every specific Kite client exception is a subclass of this and exposes two instance variables `.code` (HTTP error code) and `.message` (error text). """ def __init__(self, message, code=500): """Initialize the exception.""" super(KiteException, self).__init__(message) self.code = code class Generalexception(KiteException): """An unclassified, general error. Default code is 500.""" def __init__(self, message, code=500): """Initialize the exception.""" super(GeneralException, self).__init__(message, code) class Tokenexception(KiteException): """Represents all token and authentication related errors. Default code is 403.""" def __init__(self, message, code=403): """Initialize the exception.""" super(TokenException, self).__init__(message, code) class Permissionexception(KiteException): """Represents permission denied exceptions for certain calls. Default code is 403.""" def __init__(self, message, code=403): """Initialize the exception.""" super(PermissionException, self).__init__(message, code) class Orderexception(KiteException): """Represents all order placement and manipulation errors. Default code is 500.""" def __init__(self, message, code=500): """Initialize the exception.""" super(OrderException, self).__init__(message, code) class Inputexception(KiteException): """Represents user input errors such as missing and invalid parameters. Default code is 400.""" def __init__(self, message, code=400): """Initialize the exception.""" super(InputException, self).__init__(message, code) class Dataexception(KiteException): """Represents a bad response from the backend Order Management System (OMS). Default code is 502.""" def __init__(self, message, code=502): """Initialize the exception.""" super(DataException, self).__init__(message, code) class Networkexception(KiteException): """Represents a network issue between Kite and the backend Order Management System (OMS). Default code is 503.""" def __init__(self, message, code=503): """Initialize the exception.""" super(NetworkException, self).__init__(message, code)
__title__ = "PyMatting" __version__ = "1.1.3" __author__ = "The PyMatting Developers" __email__ = "pymatting@gmail.com" __license__ = "MIT" __uri__ = "https://pymatting.github.io" __summary__ = "Python package for alpha matting."
__title__ = 'PyMatting' __version__ = '1.1.3' __author__ = 'The PyMatting Developers' __email__ = 'pymatting@gmail.com' __license__ = 'MIT' __uri__ = 'https://pymatting.github.io' __summary__ = 'Python package for alpha matting.'
"""Chapter 12 - Be a pythonista""" # vars def dump(func): """Print input arguments and output value(s)""" def wrapped(*args, **kwargs): print("Function name: %s" % func.__name__) print("Input arguments: %s" % ' '.join(map(str, args))) print("Input keyword arguments: %s" % kwargs.items()) output = func(*args, **kwargs) print("Output:", output) return output return wrapped @dump def double(*args, **kwargs): """Double every argument""" output_list = [2 * arg for arg in args] output_dict = {k: 2*v for k, v in kwargs.items()} return output_list, output_dict # pdb def process_cities(filename): with open(filename, 'rt') as file: for line in file: line = line.strip() if 'quit' == line.lower(): return country, city = line.split(',') city = city.strip() country = country.strip() print(city.title(), country.title(), sep=',') def func(*arg, **kwargs): print('vars:', vars()) def main(): """ # working with vars() function func(1, 2, 3) func(['a', 'b', 'argh']) double(3, 5, first=100, next=98.6, last=-40) """ # working with pdb process_cities("cities.csv") if __name__ == '__main__': main()
"""Chapter 12 - Be a pythonista""" def dump(func): """Print input arguments and output value(s)""" def wrapped(*args, **kwargs): print('Function name: %s' % func.__name__) print('Input arguments: %s' % ' '.join(map(str, args))) print('Input keyword arguments: %s' % kwargs.items()) output = func(*args, **kwargs) print('Output:', output) return output return wrapped @dump def double(*args, **kwargs): """Double every argument""" output_list = [2 * arg for arg in args] output_dict = {k: 2 * v for (k, v) in kwargs.items()} return (output_list, output_dict) def process_cities(filename): with open(filename, 'rt') as file: for line in file: line = line.strip() if 'quit' == line.lower(): return (country, city) = line.split(',') city = city.strip() country = country.strip() print(city.title(), country.title(), sep=',') def func(*arg, **kwargs): print('vars:', vars()) def main(): """ # working with vars() function func(1, 2, 3) func(['a', 'b', 'argh']) double(3, 5, first=100, next=98.6, last=-40) """ process_cities('cities.csv') if __name__ == '__main__': main()
class Matrix: def __init__(self, mat): l_size = len(mat[0]) for line in mat: if l_size != len(line): raise ValueError('invalid matrix sizes') self._raw = mat @property def raw(self): return self._raw @property def trace(self): if self.size[0] == self.size[1]: return sum([ self[i][j] for j in range(self.size[0]) for i in range(self.size[1]) ]) else: print('nb lines != nb columns') @property def size(self): return self._raw.__len__(), self._raw[0].__len__() def __str__(self): s = "\n" for l in self._raw: s +='| ' for c in l: s += '{:6.2f} '.format( round(float(c), 3)) s += '|\n' return s def __call__(self, index): return self.col(self, index) @classmethod def col(cls, matrix, index, raw = False): col = [ line[index] for line in matrix._raw ] if raw: return col else: return Vector(col) def transpose(self): return Matrix([ self.col(self, i, True) for i in range(self.size[1]) ]) def __setitem__(self, key, item): if not type(item).__name__ == 'list' or len(item) != self.size[0]: print('invalid assignement') else: self._raw[key] = item def __getitem__(self, key): return Vector(self._raw[key], transpose = True) def __add__(self, other): if type(other).__name__ == 'Matrix' or type(other).__name__ == 'Vector': if self.size[0] == other.size[0] and self.size[1] == other.size[1]: return Matrix([ [ self._raw[i][j] + other._raw[i][j] for j in range(self.size[1])] for i in range(self.size[0])]) else: try: return Matrix([ [ self._raw[i][j] + other for j in range(self.size[1])] for i in range(self.size[0])]) except: print('cannot add') def __sub__(self, other): if type(other).__name__ == 'Matrix' or type(other).__name__ == 'Vector': if self.size[0] == other.size[0] and self.size[1] == other.size[1]: return Matrix([ [ self._raw[i][j] - other._raw[i][j] for j in range(self.size[1])] for i in range(self.size[0])]) else: try: return Matrix([ [ self._raw[i][j] - other for j in range(self.size[1])] for i in range(self.size[0])]) except: print('cannot substract') def __mul__(self, other): if type(other).__name__ == 'Matrix' or type(other).__name__ == 'Vector': if self.size[1] == other.size[0]: # nb c == nb l res = [] for i in range(self.size[0]): res += [[]] for j in range(other.size[1]): res[i] += [sum([m*n for m,n in zip(self._raw[i], self.col(other, j, True))])] return Matrix(res) else: try: return Matrix([ [ self[i][j] * other for j in range(self.size[1])] for i in range(self.size[0])]) except: print('cannot substract') @classmethod def gen(cls, l, c, fill = 0): mat = [[fill for j in range(c)] for i in range(l)] return Matrix(mat) class Vector(Matrix): def __init__(self, vect, transpose = False): self.transposed = transpose super().__init__([vect] if transpose else [ [elem] for elem in vect ] ) @property def raw(self): if self.transposed: return self._raw[0] else: return self.col(self, 0, True) @property def gravity(self): return sum(self.raw) / len(self.raw) def __setitem__(self, key, item): if self.transposed: self._raw[0][key] = item else: self._raw[key][0] = item def __getitem__(self, key): if self.transposed: if type(self._raw[0][key]).__name__ == 'list': return Vector(self._raw[0][key]) else: return self._raw[0][key] else: if type(self._raw[key][0]).__name__ == 'list': return Vector(self._raw[key][0]) else: return self._raw[key][0] @classmethod def gen(cls, l, fill = 0): mat = super().gen(l, 1, fill) return mat(0) # ================================================================== # m = [[1,2,3],[4,5,6],[7,8,9]] # mt = Matrix(m) # print(mt) # print(mt.transpose()) # print(mt.trace) # print(mt.size) # mt2 = mt.transpose() # mt3 = Matrix.gen(3,3,9) # print(mt3) # print(mt2 + mt,mt2 - mt, mt2 * mt)
class Matrix: def __init__(self, mat): l_size = len(mat[0]) for line in mat: if l_size != len(line): raise value_error('invalid matrix sizes') self._raw = mat @property def raw(self): return self._raw @property def trace(self): if self.size[0] == self.size[1]: return sum([self[i][j] for j in range(self.size[0]) for i in range(self.size[1])]) else: print('nb lines != nb columns') @property def size(self): return (self._raw.__len__(), self._raw[0].__len__()) def __str__(self): s = '\n' for l in self._raw: s += '| ' for c in l: s += '{:6.2f} '.format(round(float(c), 3)) s += '|\n' return s def __call__(self, index): return self.col(self, index) @classmethod def col(cls, matrix, index, raw=False): col = [line[index] for line in matrix._raw] if raw: return col else: return vector(col) def transpose(self): return matrix([self.col(self, i, True) for i in range(self.size[1])]) def __setitem__(self, key, item): if not type(item).__name__ == 'list' or len(item) != self.size[0]: print('invalid assignement') else: self._raw[key] = item def __getitem__(self, key): return vector(self._raw[key], transpose=True) def __add__(self, other): if type(other).__name__ == 'Matrix' or type(other).__name__ == 'Vector': if self.size[0] == other.size[0] and self.size[1] == other.size[1]: return matrix([[self._raw[i][j] + other._raw[i][j] for j in range(self.size[1])] for i in range(self.size[0])]) else: try: return matrix([[self._raw[i][j] + other for j in range(self.size[1])] for i in range(self.size[0])]) except: print('cannot add') def __sub__(self, other): if type(other).__name__ == 'Matrix' or type(other).__name__ == 'Vector': if self.size[0] == other.size[0] and self.size[1] == other.size[1]: return matrix([[self._raw[i][j] - other._raw[i][j] for j in range(self.size[1])] for i in range(self.size[0])]) else: try: return matrix([[self._raw[i][j] - other for j in range(self.size[1])] for i in range(self.size[0])]) except: print('cannot substract') def __mul__(self, other): if type(other).__name__ == 'Matrix' or type(other).__name__ == 'Vector': if self.size[1] == other.size[0]: res = [] for i in range(self.size[0]): res += [[]] for j in range(other.size[1]): res[i] += [sum([m * n for (m, n) in zip(self._raw[i], self.col(other, j, True))])] return matrix(res) else: try: return matrix([[self[i][j] * other for j in range(self.size[1])] for i in range(self.size[0])]) except: print('cannot substract') @classmethod def gen(cls, l, c, fill=0): mat = [[fill for j in range(c)] for i in range(l)] return matrix(mat) class Vector(Matrix): def __init__(self, vect, transpose=False): self.transposed = transpose super().__init__([vect] if transpose else [[elem] for elem in vect]) @property def raw(self): if self.transposed: return self._raw[0] else: return self.col(self, 0, True) @property def gravity(self): return sum(self.raw) / len(self.raw) def __setitem__(self, key, item): if self.transposed: self._raw[0][key] = item else: self._raw[key][0] = item def __getitem__(self, key): if self.transposed: if type(self._raw[0][key]).__name__ == 'list': return vector(self._raw[0][key]) else: return self._raw[0][key] elif type(self._raw[key][0]).__name__ == 'list': return vector(self._raw[key][0]) else: return self._raw[key][0] @classmethod def gen(cls, l, fill=0): mat = super().gen(l, 1, fill) return mat(0)
# todo remove in next major release as we no longer support django < 3.2 anyway. Note this would make dj-stripe unuseable for djang0 < 3.2 # for django < 3.2 default_app_config = "djstripe.apps.DjstripeAppConfig"
default_app_config = 'djstripe.apps.DjstripeAppConfig'
with open("mynewtextfile.txt","w+") as f: f.writelines("\nOtus we are learning python\nOtus we are learning python\nOtus we are learning python") f.seek(0) print(f.readlines()) print("Is readable:", f.readable()) print("Is writeable:", f.writable()) print("File no:", f.fileno()) print("Is connected to tty-like device:", f.isatty()) f.truncate(20) f.flush() f.seek(0) print(f.readline()) print(f.readline()) print(f.readline()) print(f.readline()) print(f.readline()) f.close()
with open('mynewtextfile.txt', 'w+') as f: f.writelines('\nOtus we are learning python\nOtus we are learning python\nOtus we are learning python') f.seek(0) print(f.readlines()) print('Is readable:', f.readable()) print('Is writeable:', f.writable()) print('File no:', f.fileno()) print('Is connected to tty-like device:', f.isatty()) f.truncate(20) f.flush() f.seek(0) print(f.readline()) print(f.readline()) print(f.readline()) print(f.readline()) print(f.readline()) f.close()
class Solution: def traverse(self, node: TreeNode, deep: int): if node is None: return deep deep += 1 if node.left is None: return self.traverse(node.right, deep) elif node.right is None: return self.traverse(node.left, deep) else: left_deep = self.traverse(node.left, deep) right_deep = self.traverse(node.right, deep) return min(left_deep, right_deep) def XXX(self, root: TreeNode) -> int: return self.traverse(root, 0)
class Solution: def traverse(self, node: TreeNode, deep: int): if node is None: return deep deep += 1 if node.left is None: return self.traverse(node.right, deep) elif node.right is None: return self.traverse(node.left, deep) else: left_deep = self.traverse(node.left, deep) right_deep = self.traverse(node.right, deep) return min(left_deep, right_deep) def xxx(self, root: TreeNode) -> int: return self.traverse(root, 0)
def rotation_saxs(t = 1): #sample = ['Hopper2_AGIB_AuPd_top', 'Hopper2_AGIB_AuPd_mid', 'Hopper2_AGIB_AuPd_bot'] #Change filename sample = ['AGIB3N_1top', 'AGIB3N_1mid', 'AGIB3N_1cen'] #Change filename #y_list = [-6.06, -6.04, -6.02] #hexapod is in mm #y_list = [-10320, -10300, -10280] #SmarAct is um y_list = [4760, 4810, 4860]#, 5210] #SmarAct is um assert len(y_list) == len(sample), f'Number of X coordinates ({len(x_list)}) is different from number of samples ({len(samples)})' # Detectors, motors: #dets = [pil1M, rayonix, pil300KW] dets = [pil1M, pil300KW] prs_range = [-90, 90, 91] waxs_range = [0, 26, 5] #step of 6.5 degrees det_exposure_time(t,t) #pil_pos_x = [-0.4997, -0.4997 + 4.3, -0.4997 + 4.3, -0.4997] #pil_pos_y = [-59.9987, -59.9987, -59.9987 + 4.3, -59.9987] #waxs_po = np.linspace(20.95, 2.95, 4) for sam, y in zip(sample, y_list): #yield from bps.mv(stage.y, y) #hexapod yield from bps.mv(piezo.y, y) #SmarAct name_fmt = '{sam}' sample_name = name_fmt.format(sam=sam) sample_id(user_name='MK', sample_name=sample_name) print(f'\n\t=== Sample: {sample_name} ===\n') yield from bp.grid_scan(dets, prs, *prs_range, waxs, *waxs_range, 1) sample_id(user_name='test', sample_name='test') det_exposure_time(0.3, 0.3) def rotation_saxs_fast(t = 1): sample = ['AGIB3DR_2fast_top', 'AGIB3DR_2fast_mid', 'AGIB3DR_2fast_cen'] #Change filename y_list = [5150, 5230, 5310] #SmarAct is um assert len(y_list) == len(sample), f'Number of X coordinates ({len(x_list)}) is different from number of samples ({len(samples)})' # Detectors, motors: dets = [pil1M, pil300KW] prs_range = np.linspace(-90, 90, 91) waxs_range = np.linspace(0, 26, 5) det_exposure_time(t,t) for sam, y in zip(sample, y_list): yield from bps.mv(piezo.y, y) for wa in waxs_range: yield from bps.mv(waxs, wa) for pr in prs_range: yield from bps.mv(prs, pr) name_fmt = '{sam}_wa{waxs}deg_{prs}deg' sample_name = name_fmt.format(sam=sam, waxs='%2.1f'%wa, prs='%3.3d'%pr) sample_id(user_name='MK', sample_name=sample_name) print(f'\n\t=== Sample: {sample_name} ===\n') yield from bp.count(dets, num=1) sample_id(user_name='test', sample_name='test') det_exposure_time(0.3, 0.3) def rotation_saxs_att(t = 1): #attenuated WAXS, so SAXS recorded separately first #sample = ['Disc3_AuPd_top-3', 'Disc3_AuPd_mid-3', 'Disc3_AuPd_bot-3'] #Change filename sample = ['Hopper1_AGIB_AuPd_top','Hopper1_AGIB_AuPd_mid', 'Hopper1_AGIB_AuPd_bot'] #Change filename #y_list = [-6.06, -6.04, -6.02] #hexapod is in mm #y_list = [-10320, -10300, -10280] #SmarAct is um y_list = [-9540, -9520, -9500] #SmarAct is um assert len(y_list) == len(sample), f'Number of X coordinates ({len(x_list)}) is different from number of samples ({len(samples)})' # Detectors, motors: #dets = [pil1M, rayonix, pil300KW] dets0 = [pil1M] dets = [pil300KW] det_exposure_time(t,t) pil_pos_x = [-0.4997, -0.4997 + 4.3, -0.4997 + 4.3, -0.4997] pil_pos_y = [-59.9987, -59.9987, -59.9987 + 4.3, -59.9987] waxs_po = np.linspace(20.95, 2.95, 4) for sam, y in zip(sample, y_list): #yield from bps.mv(stage.y, y) #hexapod yield from bps.mv(piezo.y, y) #SmarAct yield from bps.mv(waxs, 70) for angle in range(-90, 91, 1): yield from bps.mv(prs, angle) name_fmt = '{sam}_phi{angle}deg' sample_name = name_fmt.format(sam=sam, angle=angle) sample_id(user_name='MK', sample_name=sample_name) print(f'\n\t=== Sample: {sample_name} ===\n') yield from bp.count(dets0, num = 1) yield from bps.mv(att1_5, 'Insert') yield from bps.sleep(1) yield from bps.mv(att1_6, 'Insert') yield from bps.sleep(1) for sam, y in zip(sample, y_list): #yield from bps.mv(stage.y, y) #hexapod yield from bps.mv(piezo.y, y) #SmarAct for i, waxs_pos in enumerate(waxs_po): yield from bps.mv(waxs, waxs_pos) yield from bps.mv(pil1m_pos.x, pil_pos_x[i]) yield from bps.mv(pil1m_pos.y, pil_pos_y[i]) for angle in range(-90, 91, 1): yield from bps.mv(prs, angle) name_fmt = '{sam}_phi{angle}deg_{waxs_pos}deg' sample_name = name_fmt.format(sam=sam, angle=angle, waxs_pos = waxs_pos) sample_id(user_name='MK', sample_name=sample_name) print(f'\n\t=== Sample: {sample_name} ===\n') yield from bp.count(dets, num = 1) sample_id(user_name='test', sample_name='test') det_exposure_time(0.5, 0.5) yield from bps.mv(att1_5, 'Retract') yield from bps.sleep(1) yield from bps.mv(att1_6, 'Retract') yield from bps.sleep(1) yield from bps.mv(pil1m_pos.x, -0.4997) yield from bps.mv(pil1m_pos.y, -59.9987)
def rotation_saxs(t=1): sample = ['AGIB3N_1top', 'AGIB3N_1mid', 'AGIB3N_1cen'] y_list = [4760, 4810, 4860] assert len(y_list) == len(sample), f'Number of X coordinates ({len(x_list)}) is different from number of samples ({len(samples)})' dets = [pil1M, pil300KW] prs_range = [-90, 90, 91] waxs_range = [0, 26, 5] det_exposure_time(t, t) for (sam, y) in zip(sample, y_list): yield from bps.mv(piezo.y, y) name_fmt = '{sam}' sample_name = name_fmt.format(sam=sam) sample_id(user_name='MK', sample_name=sample_name) print(f'\n\t=== Sample: {sample_name} ===\n') yield from bp.grid_scan(dets, prs, *prs_range, waxs, *waxs_range, 1) sample_id(user_name='test', sample_name='test') det_exposure_time(0.3, 0.3) def rotation_saxs_fast(t=1): sample = ['AGIB3DR_2fast_top', 'AGIB3DR_2fast_mid', 'AGIB3DR_2fast_cen'] y_list = [5150, 5230, 5310] assert len(y_list) == len(sample), f'Number of X coordinates ({len(x_list)}) is different from number of samples ({len(samples)})' dets = [pil1M, pil300KW] prs_range = np.linspace(-90, 90, 91) waxs_range = np.linspace(0, 26, 5) det_exposure_time(t, t) for (sam, y) in zip(sample, y_list): yield from bps.mv(piezo.y, y) for wa in waxs_range: yield from bps.mv(waxs, wa) for pr in prs_range: yield from bps.mv(prs, pr) name_fmt = '{sam}_wa{waxs}deg_{prs}deg' sample_name = name_fmt.format(sam=sam, waxs='%2.1f' % wa, prs='%3.3d' % pr) sample_id(user_name='MK', sample_name=sample_name) print(f'\n\t=== Sample: {sample_name} ===\n') yield from bp.count(dets, num=1) sample_id(user_name='test', sample_name='test') det_exposure_time(0.3, 0.3) def rotation_saxs_att(t=1): sample = ['Hopper1_AGIB_AuPd_top', 'Hopper1_AGIB_AuPd_mid', 'Hopper1_AGIB_AuPd_bot'] y_list = [-9540, -9520, -9500] assert len(y_list) == len(sample), f'Number of X coordinates ({len(x_list)}) is different from number of samples ({len(samples)})' dets0 = [pil1M] dets = [pil300KW] det_exposure_time(t, t) pil_pos_x = [-0.4997, -0.4997 + 4.3, -0.4997 + 4.3, -0.4997] pil_pos_y = [-59.9987, -59.9987, -59.9987 + 4.3, -59.9987] waxs_po = np.linspace(20.95, 2.95, 4) for (sam, y) in zip(sample, y_list): yield from bps.mv(piezo.y, y) yield from bps.mv(waxs, 70) for angle in range(-90, 91, 1): yield from bps.mv(prs, angle) name_fmt = '{sam}_phi{angle}deg' sample_name = name_fmt.format(sam=sam, angle=angle) sample_id(user_name='MK', sample_name=sample_name) print(f'\n\t=== Sample: {sample_name} ===\n') yield from bp.count(dets0, num=1) yield from bps.mv(att1_5, 'Insert') yield from bps.sleep(1) yield from bps.mv(att1_6, 'Insert') yield from bps.sleep(1) for (sam, y) in zip(sample, y_list): yield from bps.mv(piezo.y, y) for (i, waxs_pos) in enumerate(waxs_po): yield from bps.mv(waxs, waxs_pos) yield from bps.mv(pil1m_pos.x, pil_pos_x[i]) yield from bps.mv(pil1m_pos.y, pil_pos_y[i]) for angle in range(-90, 91, 1): yield from bps.mv(prs, angle) name_fmt = '{sam}_phi{angle}deg_{waxs_pos}deg' sample_name = name_fmt.format(sam=sam, angle=angle, waxs_pos=waxs_pos) sample_id(user_name='MK', sample_name=sample_name) print(f'\n\t=== Sample: {sample_name} ===\n') yield from bp.count(dets, num=1) sample_id(user_name='test', sample_name='test') det_exposure_time(0.5, 0.5) yield from bps.mv(att1_5, 'Retract') yield from bps.sleep(1) yield from bps.mv(att1_6, 'Retract') yield from bps.sleep(1) yield from bps.mv(pil1m_pos.x, -0.4997) yield from bps.mv(pil1m_pos.y, -59.9987)
def factory(classToInstantiate): def f(*arg): def g(): return classToInstantiate(*arg) return g return f
def factory(classToInstantiate): def f(*arg): def g(): return class_to_instantiate(*arg) return g return f
class EmailBuilder: def __init__(self): self.message = {} def set_from_email(self, email): self.message['from'] = email return self def set_receiver_email(self, email): self.message['receiver'] = email def set_cc_emails(self, emails): self.message['cc'] = emails return self def set_attachaments(self, attachments): self.message['attachments'] = attachments return self def set_subject(self, subject): self.message['subject'] = subject return self def set_msg(self, message): self.message['message'] = message return self def set_priority(self, priority): self.message['priority'] = priority return self def build(self): return self.message
class Emailbuilder: def __init__(self): self.message = {} def set_from_email(self, email): self.message['from'] = email return self def set_receiver_email(self, email): self.message['receiver'] = email def set_cc_emails(self, emails): self.message['cc'] = emails return self def set_attachaments(self, attachments): self.message['attachments'] = attachments return self def set_subject(self, subject): self.message['subject'] = subject return self def set_msg(self, message): self.message['message'] = message return self def set_priority(self, priority): self.message['priority'] = priority return self def build(self): return self.message
# -*- coding: utf-8 -*- # Copyright 2018 ICON Foundation # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. TEST_REQUEST_TRANSFER_ICX = { "jsonrpc": "2.0", "method": "icx_sendTransaction", "id": 1234, "params": { "version": "0x3", "from": "hxbe258ceb872e08851f1f59694dac2558708ece11", "to": "hx5bfdb090f43a808005ffc27c25b213145e80b7cd", "value": "0xde0b6b3a7640000", "stepLimit": "0x12345", "timestamp": "0x563a6cf330136", "nid": "0x3f", "nonce": "0x1", "signature": "VAia7YZ2Ji6igKWzjR2YsGa2m53nKPrfK7uXYW78QLE+ATehAVZPC40szvAiA6NEU5gCYB4c4qaQzqDh2ugcHgA=" } } TEST_REQUEST_SCORE_FUNCTION_CALL = { "jsonrpc": "2.0", "method": "icx_sendTransaction", "id": 1234, "params": { "version": "0x3", "from": "hxbe258ceb872e08851f1f59694dac2558708ece11", "to": "cxb0776ee37f5b45bfaea8cff1d8232fbb6122ec32", "stepLimit": "0x12345", "timestamp": "0x563a6cf330136", "nid": "0x3f", "nonce": "0x1", "signature": "VAia7YZ2Ji6igKWzjR2YsGa2m53nKPrfK7uXYW78QLE+ATehAVZPC40szvAiA6NEU5gCYB4c4qaQzqDh2ugcHgA=", "dataType": "call", "data": { "method": "transfer", "params": { "to": "hxab2d8215eab14bc6bdd8bfb2c8151257032ecd8b", "value": "0x1" } } } } TEST_REQUEST_SCORE_ISNTALL = { "jsonrpc": "2.0", "method": "icx_sendTransaction", "id": 1234, "params": { "version": "0x3", "from": "hxbe258ceb872e08851f1f59694dac2558708ece11", "to": "cx0000000000000000000000000000000000000000", "stepLimit": "0x12345", "timestamp": "0x563a6cf330136", "nid": "0x3f", "nonce": "0x1", "signature": "VAia7YZ2Ji6igKWzjR2YsGa2m53nKPrfK7uXYW78QLE+ATehAVZPC40szvAiA6NEU5gCYB4c4qaQzqDh2ugcHgA=", "dataType": "deploy", "data": { "contentType": "application/zip", "content": "0x1867291283973610982301923812873419826abcdef91827319263187263a7326e...", "params": { "name": "ABCToken", "symbol": "abc", "decimals": "0x12" } } } } TEST_REQUEST_SCORE_UPDATE = { "jsonrpc": "2.0", "method": "icx_sendTransaction", "id": 1234, "params": { "version": "0x3", "from": "hxbe258ceb872e08851f1f59694dac2558708ece11", "to": "cxb0776ee37f5b45bfaea8cff1d8232fbb6122ec32", "stepLimit": "0x12345", "timestamp": "0x563a6cf330136", "nid": "0x3f", "nonce": "0x1", "signature": "VAia7YZ2Ji6igKWzjR2YsGa2m53nKPrfK7uXYW78QLE+ATehAVZPC40szvAiA6NEU5gCYB4c4qaQzqDh2ugcHgA=", "dataType": "deploy", "data": { "contentType": "application/zip", "content": "0x1867291283973610982301923812873419826abcdef91827319263187263a7326e...", "params": { "amount": "0x1234" } } } } TEST_REQUEST_SEND_MESSAGE = { "jsonrpc": "2.0", "method": "icx_sendTransaction", "id": 1234, "params": { "version": "0x3", "from": "hxbe258ceb872e08851f1f59694dac2558708ece11", "to": "hxbe258ceb872e08851f1f59694dac2558708ece11", "stepLimit": "0x12345", "timestamp": "0x563a6cf330136", "nid": "0x3f", "nonce": "0x1", "signature": "VAia7YZ2Ji6igKWzjR2YsGa2m53nKPrfK7uXYW78QLE+ATehAVZPC40szvAiA6NEU5gCYB4c4qaQzqDh2ugcHgA=", "dataType": "message", "data": "0x4c6f72656d20697073756d20646f6c6f722073697420616d65742c20636f6e7365637465747572206164697069736963696e6720656c69742c2073656420646f20656975736d6f642074656d706f7220696e6369646964756e74207574206c61626f726520657420646f6c6f7265206d61676e6120616c697175612e20557420656e696d206164206d696e696d2076656e69616d2c2071756973206e6f737472756420657865726369746174696f6e20756c6c616d636f206c61626f726973206e69736920757420616c697175697020657820656120636f6d6d6f646f20636f6e7365717561742e2044756973206175746520697275726520646f6c6f7220696e20726570726568656e646572697420696e20766f6c7570746174652076656c697420657373652063696c6c756d20646f6c6f726520657520667567696174206e756c6c612070617269617475722e204578636570746575722073696e74206f6363616563617420637570696461746174206e6f6e2070726f6964656e742c2073756e7420696e2063756c706120717569206f666669636961206465736572756e74206d6f6c6c697420616e696d20696420657374206c61626f72756d2e" } }
test_request_transfer_icx = {'jsonrpc': '2.0', 'method': 'icx_sendTransaction', 'id': 1234, 'params': {'version': '0x3', 'from': 'hxbe258ceb872e08851f1f59694dac2558708ece11', 'to': 'hx5bfdb090f43a808005ffc27c25b213145e80b7cd', 'value': '0xde0b6b3a7640000', 'stepLimit': '0x12345', 'timestamp': '0x563a6cf330136', 'nid': '0x3f', 'nonce': '0x1', 'signature': 'VAia7YZ2Ji6igKWzjR2YsGa2m53nKPrfK7uXYW78QLE+ATehAVZPC40szvAiA6NEU5gCYB4c4qaQzqDh2ugcHgA='}} test_request_score_function_call = {'jsonrpc': '2.0', 'method': 'icx_sendTransaction', 'id': 1234, 'params': {'version': '0x3', 'from': 'hxbe258ceb872e08851f1f59694dac2558708ece11', 'to': 'cxb0776ee37f5b45bfaea8cff1d8232fbb6122ec32', 'stepLimit': '0x12345', 'timestamp': '0x563a6cf330136', 'nid': '0x3f', 'nonce': '0x1', 'signature': 'VAia7YZ2Ji6igKWzjR2YsGa2m53nKPrfK7uXYW78QLE+ATehAVZPC40szvAiA6NEU5gCYB4c4qaQzqDh2ugcHgA=', 'dataType': 'call', 'data': {'method': 'transfer', 'params': {'to': 'hxab2d8215eab14bc6bdd8bfb2c8151257032ecd8b', 'value': '0x1'}}}} test_request_score_isntall = {'jsonrpc': '2.0', 'method': 'icx_sendTransaction', 'id': 1234, 'params': {'version': '0x3', 'from': 'hxbe258ceb872e08851f1f59694dac2558708ece11', 'to': 'cx0000000000000000000000000000000000000000', 'stepLimit': '0x12345', 'timestamp': '0x563a6cf330136', 'nid': '0x3f', 'nonce': '0x1', 'signature': 'VAia7YZ2Ji6igKWzjR2YsGa2m53nKPrfK7uXYW78QLE+ATehAVZPC40szvAiA6NEU5gCYB4c4qaQzqDh2ugcHgA=', 'dataType': 'deploy', 'data': {'contentType': 'application/zip', 'content': '0x1867291283973610982301923812873419826abcdef91827319263187263a7326e...', 'params': {'name': 'ABCToken', 'symbol': 'abc', 'decimals': '0x12'}}}} test_request_score_update = {'jsonrpc': '2.0', 'method': 'icx_sendTransaction', 'id': 1234, 'params': {'version': '0x3', 'from': 'hxbe258ceb872e08851f1f59694dac2558708ece11', 'to': 'cxb0776ee37f5b45bfaea8cff1d8232fbb6122ec32', 'stepLimit': '0x12345', 'timestamp': '0x563a6cf330136', 'nid': '0x3f', 'nonce': '0x1', 'signature': 'VAia7YZ2Ji6igKWzjR2YsGa2m53nKPrfK7uXYW78QLE+ATehAVZPC40szvAiA6NEU5gCYB4c4qaQzqDh2ugcHgA=', 'dataType': 'deploy', 'data': {'contentType': 'application/zip', 'content': '0x1867291283973610982301923812873419826abcdef91827319263187263a7326e...', 'params': {'amount': '0x1234'}}}} test_request_send_message = {'jsonrpc': '2.0', 'method': 'icx_sendTransaction', 'id': 1234, 'params': {'version': '0x3', 'from': 'hxbe258ceb872e08851f1f59694dac2558708ece11', 'to': 'hxbe258ceb872e08851f1f59694dac2558708ece11', 'stepLimit': '0x12345', 'timestamp': '0x563a6cf330136', 'nid': '0x3f', 'nonce': '0x1', 'signature': 'VAia7YZ2Ji6igKWzjR2YsGa2m53nKPrfK7uXYW78QLE+ATehAVZPC40szvAiA6NEU5gCYB4c4qaQzqDh2ugcHgA=', 'dataType': 'message', 'data': '0x4c6f72656d20697073756d20646f6c6f722073697420616d65742c20636f6e7365637465747572206164697069736963696e6720656c69742c2073656420646f20656975736d6f642074656d706f7220696e6369646964756e74207574206c61626f726520657420646f6c6f7265206d61676e6120616c697175612e20557420656e696d206164206d696e696d2076656e69616d2c2071756973206e6f737472756420657865726369746174696f6e20756c6c616d636f206c61626f726973206e69736920757420616c697175697020657820656120636f6d6d6f646f20636f6e7365717561742e2044756973206175746520697275726520646f6c6f7220696e20726570726568656e646572697420696e20766f6c7570746174652076656c697420657373652063696c6c756d20646f6c6f726520657520667567696174206e756c6c612070617269617475722e204578636570746575722073696e74206f6363616563617420637570696461746174206e6f6e2070726f6964656e742c2073756e7420696e2063756c706120717569206f666669636961206465736572756e74206d6f6c6c697420616e696d20696420657374206c61626f72756d2e'}}
#! /usr/bin/env python3 def main(): try: name = input('\nHello! What is your name? ') if name: print(f'\nWell, {name}, it is nice to meet you!\n') except: print('\n\nSorry. Something went wrong, please try again.\n') if __name__ == '__main__': main()
def main(): try: name = input('\nHello! What is your name? ') if name: print(f'\nWell, {name}, it is nice to meet you!\n') except: print('\n\nSorry. Something went wrong, please try again.\n') if __name__ == '__main__': main()
meta_pickups={ 'aux_domains': lambda r, common, data: { 'pos': r['head_pos'], 'head': r['head'], 'head_word': r['head_word'], **common, **data}, 'root_domains': lambda r, common, data: {'pos': r['upos'], 'rel': r['rel'], **common, **data}, 'verb_domains': lambda r, common, data: {'pos': r['upos'], 'rel': r['rel'], **common, **data}, 'predicate': lambda r, common, data: { 'pos': r['pos'], 'rel': r['rel'], 'segments':r['segments'] if 'segments' in r else [], **common, **data}, 'subj_domains': lambda r, common, data: { 'pos': r['head_pos'], 'head': r['head'], 'head_word': r['head_word'], **common, **data}, } def build_meta(r, data): # from sagas.conf.conf import cf type_name = r['type'] common = {'lemma': r['lemma'], 'word': r['word'], 'index': r['index'], 'stems': r['stems'], 'domain_type': type_name, } # if 'engine' not in data: # data['engine']=cf.engine(data['lang']) if type_name in meta_pickups: return meta_pickups[type_name](r, common, data) else: return {'rel': r['rel'], **common, **data}
meta_pickups = {'aux_domains': lambda r, common, data: {'pos': r['head_pos'], 'head': r['head'], 'head_word': r['head_word'], **common, **data}, 'root_domains': lambda r, common, data: {'pos': r['upos'], 'rel': r['rel'], **common, **data}, 'verb_domains': lambda r, common, data: {'pos': r['upos'], 'rel': r['rel'], **common, **data}, 'predicate': lambda r, common, data: {'pos': r['pos'], 'rel': r['rel'], 'segments': r['segments'] if 'segments' in r else [], **common, **data}, 'subj_domains': lambda r, common, data: {'pos': r['head_pos'], 'head': r['head'], 'head_word': r['head_word'], **common, **data}} def build_meta(r, data): type_name = r['type'] common = {'lemma': r['lemma'], 'word': r['word'], 'index': r['index'], 'stems': r['stems'], 'domain_type': type_name} if type_name in meta_pickups: return meta_pickups[type_name](r, common, data) else: return {'rel': r['rel'], **common, **data}
# # PySNMP MIB module CISCO-WAN-BBIF-ATM-CONN-STAT-MIB (http://snmplabs.com/pysmi) # ASN.1 source file:///Users/davwang4/Dev/mibs.snmplabs.com/asn1/CISCO-WAN-BBIF-ATM-CONN-STAT-MIB # Produced by pysmi-0.3.4 at Mon Apr 29 18:03:57 2019 # On host DAVWANG4-M-1475 platform Darwin version 18.5.0 by user davwang4 # Using Python version 3.7.3 (default, Mar 27 2019, 09:23:15) # OctetString, ObjectIdentifier, Integer = mibBuilder.importSymbols("ASN1", "OctetString", "ObjectIdentifier", "Integer") NamedValues, = mibBuilder.importSymbols("ASN1-ENUMERATION", "NamedValues") SingleValueConstraint, ValueRangeConstraint, ValueSizeConstraint, ConstraintsIntersection, ConstraintsUnion = mibBuilder.importSymbols("ASN1-REFINEMENT", "SingleValueConstraint", "ValueRangeConstraint", "ValueSizeConstraint", "ConstraintsIntersection", "ConstraintsUnion") bbChanCntGrp, = mibBuilder.importSymbols("BASIS-MIB", "bbChanCntGrp") ciscoWan, = mibBuilder.importSymbols("CISCOWAN-SMI", "ciscoWan") ModuleCompliance, ObjectGroup, NotificationGroup = mibBuilder.importSymbols("SNMPv2-CONF", "ModuleCompliance", "ObjectGroup", "NotificationGroup") MibIdentifier, Counter64, iso, MibScalar, MibTable, MibTableRow, MibTableColumn, IpAddress, Counter32, NotificationType, Unsigned32, TimeTicks, Gauge32, ObjectIdentity, Bits, ModuleIdentity, Integer32 = mibBuilder.importSymbols("SNMPv2-SMI", "MibIdentifier", "Counter64", "iso", "MibScalar", "MibTable", "MibTableRow", "MibTableColumn", "IpAddress", "Counter32", "NotificationType", "Unsigned32", "TimeTicks", "Gauge32", "ObjectIdentity", "Bits", "ModuleIdentity", "Integer32") TextualConvention, DisplayString = mibBuilder.importSymbols("SNMPv2-TC", "TextualConvention", "DisplayString") ciscoWanBbifAtmConnStatMIB = ModuleIdentity((1, 3, 6, 1, 4, 1, 351, 150, 36)) ciscoWanBbifAtmConnStatMIB.setRevisions(('2002-10-18 00:00',)) if mibBuilder.loadTexts: ciscoWanBbifAtmConnStatMIB.setLastUpdated('200210180000Z') if mibBuilder.loadTexts: ciscoWanBbifAtmConnStatMIB.setOrganization('Cisco Systems, Inc.') bbChanCntGrpTable = MibTable((1, 3, 6, 1, 4, 1, 351, 110, 5, 2, 7, 3, 1), ) if mibBuilder.loadTexts: bbChanCntGrpTable.setStatus('current') bbChanCntGrpEntry = MibTableRow((1, 3, 6, 1, 4, 1, 351, 110, 5, 2, 7, 3, 1, 1), ).setIndexNames((0, "CISCO-WAN-BBIF-ATM-CONN-STAT-MIB", "bbChanCntNum")) if mibBuilder.loadTexts: bbChanCntGrpEntry.setStatus('current') bbChanCntNum = MibTableColumn((1, 3, 6, 1, 4, 1, 351, 110, 5, 2, 7, 3, 1, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(16, 4111))).setMaxAccess("readonly") if mibBuilder.loadTexts: bbChanCntNum.setStatus('current') bbChanRcvClp0Cells = MibTableColumn((1, 3, 6, 1, 4, 1, 351, 110, 5, 2, 7, 3, 1, 1, 2), Counter32()).setMaxAccess("readonly") if mibBuilder.loadTexts: bbChanRcvClp0Cells.setStatus('current') bbChanRcvClp1Cells = MibTableColumn((1, 3, 6, 1, 4, 1, 351, 110, 5, 2, 7, 3, 1, 1, 3), Counter32()).setMaxAccess("readonly") if mibBuilder.loadTexts: bbChanRcvClp1Cells.setStatus('current') bbChanNonConformCellsAtGcra1Policer = MibTableColumn((1, 3, 6, 1, 4, 1, 351, 110, 5, 2, 7, 3, 1, 1, 4), Counter32()).setMaxAccess("readonly") if mibBuilder.loadTexts: bbChanNonConformCellsAtGcra1Policer.setStatus('current') bbChanNonConformCellsAtGcra2Policer = MibTableColumn((1, 3, 6, 1, 4, 1, 351, 110, 5, 2, 7, 3, 1, 1, 5), Counter32()).setMaxAccess("readonly") if mibBuilder.loadTexts: bbChanNonConformCellsAtGcra2Policer.setStatus('current') bbChanRcvEOFCells = MibTableColumn((1, 3, 6, 1, 4, 1, 351, 110, 5, 2, 7, 3, 1, 1, 6), Counter32()).setMaxAccess("readonly") if mibBuilder.loadTexts: bbChanRcvEOFCells.setStatus('current') bbChanDscdClp0Cells = MibTableColumn((1, 3, 6, 1, 4, 1, 351, 110, 5, 2, 7, 3, 1, 1, 7), Counter32()).setMaxAccess("readonly") if mibBuilder.loadTexts: bbChanDscdClp0Cells.setStatus('current') bbChanDscdClp1Cells = MibTableColumn((1, 3, 6, 1, 4, 1, 351, 110, 5, 2, 7, 3, 1, 1, 8), Counter32()).setMaxAccess("readonly") if mibBuilder.loadTexts: bbChanDscdClp1Cells.setStatus('current') bbChanRcvCellsSent = MibTableColumn((1, 3, 6, 1, 4, 1, 351, 110, 5, 2, 7, 3, 1, 1, 9), Counter32()).setMaxAccess("readonly") if mibBuilder.loadTexts: bbChanRcvCellsSent.setStatus('current') bbChanXmtClp0Cells = MibTableColumn((1, 3, 6, 1, 4, 1, 351, 110, 5, 2, 7, 3, 1, 1, 10), Counter32()).setMaxAccess("readonly") if mibBuilder.loadTexts: bbChanXmtClp0Cells.setStatus('current') bbChanXmtClp1Cells = MibTableColumn((1, 3, 6, 1, 4, 1, 351, 110, 5, 2, 7, 3, 1, 1, 11), Counter32()).setMaxAccess("readonly") if mibBuilder.loadTexts: bbChanXmtClp1Cells.setStatus('current') bbChanDscdClpZeroCellsToPort = MibTableColumn((1, 3, 6, 1, 4, 1, 351, 110, 5, 2, 7, 3, 1, 1, 12), Counter32()).setMaxAccess("readonly") if mibBuilder.loadTexts: bbChanDscdClpZeroCellsToPort.setStatus('current') bbChanDscdClpOneCellsToPort = MibTableColumn((1, 3, 6, 1, 4, 1, 351, 110, 5, 2, 7, 3, 1, 1, 13), Counter32()).setMaxAccess("readonly") if mibBuilder.loadTexts: bbChanDscdClpOneCellsToPort.setStatus('current') bbChanCntClrButton = MibTableColumn((1, 3, 6, 1, 4, 1, 351, 110, 5, 2, 7, 3, 1, 1, 14), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("noAction", 1), ("resetCounters", 2)))).setMaxAccess("readwrite") if mibBuilder.loadTexts: bbChanCntClrButton.setStatus('current') cwbAtmConnStatMIBConformance = MibIdentifier((1, 3, 6, 1, 4, 1, 351, 150, 36, 2)) cwbAtmConnStatMIBGroups = MibIdentifier((1, 3, 6, 1, 4, 1, 351, 150, 36, 2, 1)) cwbAtmConnStatMIBCompliances = MibIdentifier((1, 3, 6, 1, 4, 1, 351, 150, 36, 2, 2)) cwbAtmConnStatCompliance = ModuleCompliance((1, 3, 6, 1, 4, 1, 351, 150, 36, 2, 2, 1)).setObjects(("CISCO-WAN-BBIF-ATM-CONN-STAT-MIB", "cwbAtmConnStatsGroup")) if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0): cwbAtmConnStatCompliance = cwbAtmConnStatCompliance.setStatus('current') cwbAtmConnStatsGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 351, 150, 36, 2, 1, 1)).setObjects(("CISCO-WAN-BBIF-ATM-CONN-STAT-MIB", "bbChanCntNum"), ("CISCO-WAN-BBIF-ATM-CONN-STAT-MIB", "bbChanRcvClp0Cells"), ("CISCO-WAN-BBIF-ATM-CONN-STAT-MIB", "bbChanRcvClp1Cells"), ("CISCO-WAN-BBIF-ATM-CONN-STAT-MIB", "bbChanNonConformCellsAtGcra1Policer"), ("CISCO-WAN-BBIF-ATM-CONN-STAT-MIB", "bbChanNonConformCellsAtGcra2Policer"), ("CISCO-WAN-BBIF-ATM-CONN-STAT-MIB", "bbChanRcvEOFCells"), ("CISCO-WAN-BBIF-ATM-CONN-STAT-MIB", "bbChanDscdClp0Cells"), ("CISCO-WAN-BBIF-ATM-CONN-STAT-MIB", "bbChanDscdClp1Cells"), ("CISCO-WAN-BBIF-ATM-CONN-STAT-MIB", "bbChanRcvCellsSent"), ("CISCO-WAN-BBIF-ATM-CONN-STAT-MIB", "bbChanXmtClp0Cells"), ("CISCO-WAN-BBIF-ATM-CONN-STAT-MIB", "bbChanXmtClp1Cells"), ("CISCO-WAN-BBIF-ATM-CONN-STAT-MIB", "bbChanDscdClpZeroCellsToPort"), ("CISCO-WAN-BBIF-ATM-CONN-STAT-MIB", "bbChanDscdClpOneCellsToPort"), ("CISCO-WAN-BBIF-ATM-CONN-STAT-MIB", "bbChanCntClrButton")) if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0): cwbAtmConnStatsGroup = cwbAtmConnStatsGroup.setStatus('current') mibBuilder.exportSymbols("CISCO-WAN-BBIF-ATM-CONN-STAT-MIB", bbChanRcvCellsSent=bbChanRcvCellsSent, bbChanRcvClp1Cells=bbChanRcvClp1Cells, bbChanDscdClp0Cells=bbChanDscdClp0Cells, PYSNMP_MODULE_ID=ciscoWanBbifAtmConnStatMIB, bbChanDscdClpOneCellsToPort=bbChanDscdClpOneCellsToPort, bbChanNonConformCellsAtGcra1Policer=bbChanNonConformCellsAtGcra1Policer, cwbAtmConnStatMIBCompliances=cwbAtmConnStatMIBCompliances, bbChanXmtClp1Cells=bbChanXmtClp1Cells, cwbAtmConnStatMIBGroups=cwbAtmConnStatMIBGroups, bbChanRcvEOFCells=bbChanRcvEOFCells, bbChanRcvClp0Cells=bbChanRcvClp0Cells, cwbAtmConnStatsGroup=cwbAtmConnStatsGroup, cwbAtmConnStatMIBConformance=cwbAtmConnStatMIBConformance, bbChanCntClrButton=bbChanCntClrButton, bbChanXmtClp0Cells=bbChanXmtClp0Cells, bbChanCntNum=bbChanCntNum, bbChanDscdClpZeroCellsToPort=bbChanDscdClpZeroCellsToPort, bbChanDscdClp1Cells=bbChanDscdClp1Cells, bbChanCntGrpTable=bbChanCntGrpTable, ciscoWanBbifAtmConnStatMIB=ciscoWanBbifAtmConnStatMIB, bbChanCntGrpEntry=bbChanCntGrpEntry, cwbAtmConnStatCompliance=cwbAtmConnStatCompliance, bbChanNonConformCellsAtGcra2Policer=bbChanNonConformCellsAtGcra2Policer)
(octet_string, object_identifier, integer) = mibBuilder.importSymbols('ASN1', 'OctetString', 'ObjectIdentifier', 'Integer') (named_values,) = mibBuilder.importSymbols('ASN1-ENUMERATION', 'NamedValues') (single_value_constraint, value_range_constraint, value_size_constraint, constraints_intersection, constraints_union) = mibBuilder.importSymbols('ASN1-REFINEMENT', 'SingleValueConstraint', 'ValueRangeConstraint', 'ValueSizeConstraint', 'ConstraintsIntersection', 'ConstraintsUnion') (bb_chan_cnt_grp,) = mibBuilder.importSymbols('BASIS-MIB', 'bbChanCntGrp') (cisco_wan,) = mibBuilder.importSymbols('CISCOWAN-SMI', 'ciscoWan') (module_compliance, object_group, notification_group) = mibBuilder.importSymbols('SNMPv2-CONF', 'ModuleCompliance', 'ObjectGroup', 'NotificationGroup') (mib_identifier, counter64, iso, mib_scalar, mib_table, mib_table_row, mib_table_column, ip_address, counter32, notification_type, unsigned32, time_ticks, gauge32, object_identity, bits, module_identity, integer32) = mibBuilder.importSymbols('SNMPv2-SMI', 'MibIdentifier', 'Counter64', 'iso', 'MibScalar', 'MibTable', 'MibTableRow', 'MibTableColumn', 'IpAddress', 'Counter32', 'NotificationType', 'Unsigned32', 'TimeTicks', 'Gauge32', 'ObjectIdentity', 'Bits', 'ModuleIdentity', 'Integer32') (textual_convention, display_string) = mibBuilder.importSymbols('SNMPv2-TC', 'TextualConvention', 'DisplayString') cisco_wan_bbif_atm_conn_stat_mib = module_identity((1, 3, 6, 1, 4, 1, 351, 150, 36)) ciscoWanBbifAtmConnStatMIB.setRevisions(('2002-10-18 00:00',)) if mibBuilder.loadTexts: ciscoWanBbifAtmConnStatMIB.setLastUpdated('200210180000Z') if mibBuilder.loadTexts: ciscoWanBbifAtmConnStatMIB.setOrganization('Cisco Systems, Inc.') bb_chan_cnt_grp_table = mib_table((1, 3, 6, 1, 4, 1, 351, 110, 5, 2, 7, 3, 1)) if mibBuilder.loadTexts: bbChanCntGrpTable.setStatus('current') bb_chan_cnt_grp_entry = mib_table_row((1, 3, 6, 1, 4, 1, 351, 110, 5, 2, 7, 3, 1, 1)).setIndexNames((0, 'CISCO-WAN-BBIF-ATM-CONN-STAT-MIB', 'bbChanCntNum')) if mibBuilder.loadTexts: bbChanCntGrpEntry.setStatus('current') bb_chan_cnt_num = mib_table_column((1, 3, 6, 1, 4, 1, 351, 110, 5, 2, 7, 3, 1, 1, 1), integer32().subtype(subtypeSpec=value_range_constraint(16, 4111))).setMaxAccess('readonly') if mibBuilder.loadTexts: bbChanCntNum.setStatus('current') bb_chan_rcv_clp0_cells = mib_table_column((1, 3, 6, 1, 4, 1, 351, 110, 5, 2, 7, 3, 1, 1, 2), counter32()).setMaxAccess('readonly') if mibBuilder.loadTexts: bbChanRcvClp0Cells.setStatus('current') bb_chan_rcv_clp1_cells = mib_table_column((1, 3, 6, 1, 4, 1, 351, 110, 5, 2, 7, 3, 1, 1, 3), counter32()).setMaxAccess('readonly') if mibBuilder.loadTexts: bbChanRcvClp1Cells.setStatus('current') bb_chan_non_conform_cells_at_gcra1_policer = mib_table_column((1, 3, 6, 1, 4, 1, 351, 110, 5, 2, 7, 3, 1, 1, 4), counter32()).setMaxAccess('readonly') if mibBuilder.loadTexts: bbChanNonConformCellsAtGcra1Policer.setStatus('current') bb_chan_non_conform_cells_at_gcra2_policer = mib_table_column((1, 3, 6, 1, 4, 1, 351, 110, 5, 2, 7, 3, 1, 1, 5), counter32()).setMaxAccess('readonly') if mibBuilder.loadTexts: bbChanNonConformCellsAtGcra2Policer.setStatus('current') bb_chan_rcv_eof_cells = mib_table_column((1, 3, 6, 1, 4, 1, 351, 110, 5, 2, 7, 3, 1, 1, 6), counter32()).setMaxAccess('readonly') if mibBuilder.loadTexts: bbChanRcvEOFCells.setStatus('current') bb_chan_dscd_clp0_cells = mib_table_column((1, 3, 6, 1, 4, 1, 351, 110, 5, 2, 7, 3, 1, 1, 7), counter32()).setMaxAccess('readonly') if mibBuilder.loadTexts: bbChanDscdClp0Cells.setStatus('current') bb_chan_dscd_clp1_cells = mib_table_column((1, 3, 6, 1, 4, 1, 351, 110, 5, 2, 7, 3, 1, 1, 8), counter32()).setMaxAccess('readonly') if mibBuilder.loadTexts: bbChanDscdClp1Cells.setStatus('current') bb_chan_rcv_cells_sent = mib_table_column((1, 3, 6, 1, 4, 1, 351, 110, 5, 2, 7, 3, 1, 1, 9), counter32()).setMaxAccess('readonly') if mibBuilder.loadTexts: bbChanRcvCellsSent.setStatus('current') bb_chan_xmt_clp0_cells = mib_table_column((1, 3, 6, 1, 4, 1, 351, 110, 5, 2, 7, 3, 1, 1, 10), counter32()).setMaxAccess('readonly') if mibBuilder.loadTexts: bbChanXmtClp0Cells.setStatus('current') bb_chan_xmt_clp1_cells = mib_table_column((1, 3, 6, 1, 4, 1, 351, 110, 5, 2, 7, 3, 1, 1, 11), counter32()).setMaxAccess('readonly') if mibBuilder.loadTexts: bbChanXmtClp1Cells.setStatus('current') bb_chan_dscd_clp_zero_cells_to_port = mib_table_column((1, 3, 6, 1, 4, 1, 351, 110, 5, 2, 7, 3, 1, 1, 12), counter32()).setMaxAccess('readonly') if mibBuilder.loadTexts: bbChanDscdClpZeroCellsToPort.setStatus('current') bb_chan_dscd_clp_one_cells_to_port = mib_table_column((1, 3, 6, 1, 4, 1, 351, 110, 5, 2, 7, 3, 1, 1, 13), counter32()).setMaxAccess('readonly') if mibBuilder.loadTexts: bbChanDscdClpOneCellsToPort.setStatus('current') bb_chan_cnt_clr_button = mib_table_column((1, 3, 6, 1, 4, 1, 351, 110, 5, 2, 7, 3, 1, 1, 14), integer32().subtype(subtypeSpec=constraints_union(single_value_constraint(1, 2))).clone(namedValues=named_values(('noAction', 1), ('resetCounters', 2)))).setMaxAccess('readwrite') if mibBuilder.loadTexts: bbChanCntClrButton.setStatus('current') cwb_atm_conn_stat_mib_conformance = mib_identifier((1, 3, 6, 1, 4, 1, 351, 150, 36, 2)) cwb_atm_conn_stat_mib_groups = mib_identifier((1, 3, 6, 1, 4, 1, 351, 150, 36, 2, 1)) cwb_atm_conn_stat_mib_compliances = mib_identifier((1, 3, 6, 1, 4, 1, 351, 150, 36, 2, 2)) cwb_atm_conn_stat_compliance = module_compliance((1, 3, 6, 1, 4, 1, 351, 150, 36, 2, 2, 1)).setObjects(('CISCO-WAN-BBIF-ATM-CONN-STAT-MIB', 'cwbAtmConnStatsGroup')) if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0): cwb_atm_conn_stat_compliance = cwbAtmConnStatCompliance.setStatus('current') cwb_atm_conn_stats_group = object_group((1, 3, 6, 1, 4, 1, 351, 150, 36, 2, 1, 1)).setObjects(('CISCO-WAN-BBIF-ATM-CONN-STAT-MIB', 'bbChanCntNum'), ('CISCO-WAN-BBIF-ATM-CONN-STAT-MIB', 'bbChanRcvClp0Cells'), ('CISCO-WAN-BBIF-ATM-CONN-STAT-MIB', 'bbChanRcvClp1Cells'), ('CISCO-WAN-BBIF-ATM-CONN-STAT-MIB', 'bbChanNonConformCellsAtGcra1Policer'), ('CISCO-WAN-BBIF-ATM-CONN-STAT-MIB', 'bbChanNonConformCellsAtGcra2Policer'), ('CISCO-WAN-BBIF-ATM-CONN-STAT-MIB', 'bbChanRcvEOFCells'), ('CISCO-WAN-BBIF-ATM-CONN-STAT-MIB', 'bbChanDscdClp0Cells'), ('CISCO-WAN-BBIF-ATM-CONN-STAT-MIB', 'bbChanDscdClp1Cells'), ('CISCO-WAN-BBIF-ATM-CONN-STAT-MIB', 'bbChanRcvCellsSent'), ('CISCO-WAN-BBIF-ATM-CONN-STAT-MIB', 'bbChanXmtClp0Cells'), ('CISCO-WAN-BBIF-ATM-CONN-STAT-MIB', 'bbChanXmtClp1Cells'), ('CISCO-WAN-BBIF-ATM-CONN-STAT-MIB', 'bbChanDscdClpZeroCellsToPort'), ('CISCO-WAN-BBIF-ATM-CONN-STAT-MIB', 'bbChanDscdClpOneCellsToPort'), ('CISCO-WAN-BBIF-ATM-CONN-STAT-MIB', 'bbChanCntClrButton')) if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0): cwb_atm_conn_stats_group = cwbAtmConnStatsGroup.setStatus('current') mibBuilder.exportSymbols('CISCO-WAN-BBIF-ATM-CONN-STAT-MIB', bbChanRcvCellsSent=bbChanRcvCellsSent, bbChanRcvClp1Cells=bbChanRcvClp1Cells, bbChanDscdClp0Cells=bbChanDscdClp0Cells, PYSNMP_MODULE_ID=ciscoWanBbifAtmConnStatMIB, bbChanDscdClpOneCellsToPort=bbChanDscdClpOneCellsToPort, bbChanNonConformCellsAtGcra1Policer=bbChanNonConformCellsAtGcra1Policer, cwbAtmConnStatMIBCompliances=cwbAtmConnStatMIBCompliances, bbChanXmtClp1Cells=bbChanXmtClp1Cells, cwbAtmConnStatMIBGroups=cwbAtmConnStatMIBGroups, bbChanRcvEOFCells=bbChanRcvEOFCells, bbChanRcvClp0Cells=bbChanRcvClp0Cells, cwbAtmConnStatsGroup=cwbAtmConnStatsGroup, cwbAtmConnStatMIBConformance=cwbAtmConnStatMIBConformance, bbChanCntClrButton=bbChanCntClrButton, bbChanXmtClp0Cells=bbChanXmtClp0Cells, bbChanCntNum=bbChanCntNum, bbChanDscdClpZeroCellsToPort=bbChanDscdClpZeroCellsToPort, bbChanDscdClp1Cells=bbChanDscdClp1Cells, bbChanCntGrpTable=bbChanCntGrpTable, ciscoWanBbifAtmConnStatMIB=ciscoWanBbifAtmConnStatMIB, bbChanCntGrpEntry=bbChanCntGrpEntry, cwbAtmConnStatCompliance=cwbAtmConnStatCompliance, bbChanNonConformCellsAtGcra2Policer=bbChanNonConformCellsAtGcra2Policer)
things = ['a', 'b', 'c', 'd'] print(things) print(things[1]) things[1] = 'z' print(things[1]) print(things) things = ['a', 'b', 'c', 'd'] print("=" * 50) stuff = {'name' : 'Jinkyu', 'age' : 40, 'height' : 6 * 12 + 2} print(stuff) print(stuff['name']) print(stuff['age']) print(stuff['height']) stuff['city'] = "SF" print(stuff['city']) stuff[1] = "Wow" stuff[2] = "Neato" print(stuff) print(stuff[1]) print(stuff[2]) del stuff['city'] del stuff[1] del stuff[2] print(stuff)
things = ['a', 'b', 'c', 'd'] print(things) print(things[1]) things[1] = 'z' print(things[1]) print(things) things = ['a', 'b', 'c', 'd'] print('=' * 50) stuff = {'name': 'Jinkyu', 'age': 40, 'height': 6 * 12 + 2} print(stuff) print(stuff['name']) print(stuff['age']) print(stuff['height']) stuff['city'] = 'SF' print(stuff['city']) stuff[1] = 'Wow' stuff[2] = 'Neato' print(stuff) print(stuff[1]) print(stuff[2]) del stuff['city'] del stuff[1] del stuff[2] print(stuff)
class Solution(object): def canVisitAllRooms(self, rooms): """ :type rooms: List[List[int]] :rtype: bool """ seen = [False] * len(rooms) seen[0] = True stack = [0, ] while stack: roomIdx = stack.pop() for key in rooms[roomIdx]: if not seen[key]: seen[key] = True stack.append(key) return all(seen)
class Solution(object): def can_visit_all_rooms(self, rooms): """ :type rooms: List[List[int]] :rtype: bool """ seen = [False] * len(rooms) seen[0] = True stack = [0] while stack: room_idx = stack.pop() for key in rooms[roomIdx]: if not seen[key]: seen[key] = True stack.append(key) return all(seen)
# https://leetcode.com/problems/surface-area-of-3d-shapes class Solution: def surfaceArea(self, grid): N = len(grid) ans = 0 for i in range(N): for j in range(N): if grid[i][j] == 0: continue height = grid[i][j] ans += 2 for h in range(1, height + 1): adj = [[i + 1, j], [i - 1, j], [i, j + 1], [i, j - 1]] for a, b in adj: if a < 0 or N <= a or b < 0 or N <= b: ans += 1 else: if h <= grid[a][b]: ans += 0 else: ans += 1 return ans
class Solution: def surface_area(self, grid): n = len(grid) ans = 0 for i in range(N): for j in range(N): if grid[i][j] == 0: continue height = grid[i][j] ans += 2 for h in range(1, height + 1): adj = [[i + 1, j], [i - 1, j], [i, j + 1], [i, j - 1]] for (a, b) in adj: if a < 0 or N <= a or b < 0 or (N <= b): ans += 1 elif h <= grid[a][b]: ans += 0 else: ans += 1 return ans
USERDV = [ "USER_NAME", "USER_USERNAME", "USER_ID", "USER_PIC", "USER_BIO" ] class UserConfig(object): def UserName(self): """returns name of user""" return self.getdv("USER_NAME") or self.USER_NAME or self.name or None def UserUsername(self): """returns username of user""" return self.getdv("USER_USERNAME") or self.USER_USERNAME or self.username or None def UserMention(self): """returns mention of user""" return self.MentionMarkdown(self.UserId(), self.UserName()) if self.UserName() and self.UserId() else None def UserId(self): """returns telegram id of user""" return self.getdv("USER_ID") or self.USER_ID or self.id or None def UserDc(self): """returns telegram dc id of user""" return self.getdv("DC_ID") or self.dc_id or None def UserPic(self): """returns pic of user""" return self.getdv("USER_PIC") or self.USER_PIC or self.pic or None def UserBio(self): """returns bio of user""" return self.getdv("USER_BIO") or self.USER_BIO or self.bio or None
userdv = ['USER_NAME', 'USER_USERNAME', 'USER_ID', 'USER_PIC', 'USER_BIO'] class Userconfig(object): def user_name(self): """returns name of user""" return self.getdv('USER_NAME') or self.USER_NAME or self.name or None def user_username(self): """returns username of user""" return self.getdv('USER_USERNAME') or self.USER_USERNAME or self.username or None def user_mention(self): """returns mention of user""" return self.MentionMarkdown(self.UserId(), self.UserName()) if self.UserName() and self.UserId() else None def user_id(self): """returns telegram id of user""" return self.getdv('USER_ID') or self.USER_ID or self.id or None def user_dc(self): """returns telegram dc id of user""" return self.getdv('DC_ID') or self.dc_id or None def user_pic(self): """returns pic of user""" return self.getdv('USER_PIC') or self.USER_PIC or self.pic or None def user_bio(self): """returns bio of user""" return self.getdv('USER_BIO') or self.USER_BIO or self.bio or None
tempo = int(input()) velocida_media = int(input()) gasto_carro = 12 distancia = velocida_media * tempo print(f'{distancia / 12:.3f}')
tempo = int(input()) velocida_media = int(input()) gasto_carro = 12 distancia = velocida_media * tempo print(f'{distancia / 12:.3f}')
rows, cols = [int(n) for n in input().split(", ")] matrix = [] for _ in range(rows): matrix.append([int(n) for n in input().split(" ")]) for j in range(cols): total = 0 for row in matrix: total += row[j] print(total)
(rows, cols) = [int(n) for n in input().split(', ')] matrix = [] for _ in range(rows): matrix.append([int(n) for n in input().split(' ')]) for j in range(cols): total = 0 for row in matrix: total += row[j] print(total)
# Available methods METHODS = { # Dummy for HF "hf": ["hf"], "ricc2": ["rimp2", "rimp3", "rimp4", "ricc2"], # Hardcoded XC-functionals that can be selected from the dft submenu # of define. "dft_hardcoded": [ # Hardcoded in V7.3 "s-vwn", "s-vwn_Gaussian", "pwlda", "b-lyp", "b-vwn", "b-p", "pbe", "tpss", "bh-lyp", "b3-lyp", "b3-lyp_Gaussian", "pbe0", "tpssh", "pw6b95", "m06", "m06-l", "m06-2x", "lhf", "oep", "b97-d", "pbeh-3c", "b97-3c", "lh07t-svwn", "lh07s-svwn", "lh12ct-ssirpw92", "lh12ct-ssifpw92", "lh14t-calpbe", # Hardcoded in V7.4 "cam-b3lyp", # B2PLYP his is not easily supported right now as we would need an # additional MP2 calculation from rimp2/ricc2. # "b2-plyp", ], # Shorctus for XC functionals in V7.4 using LibXC "dft_libxc": [ "wb97", "wb97x", "sogga11", "sogga-11x", "mn12-l", "mn12-sx", "mn15", "mn15-l", "m06-libxc", "cam-b3lyp-libxc", "hse06-libxc", ], } # Available keywords KEYWORDS = { # Resolution of identity "ri": ["rijk", "ri", "marij"], # Dispersion correction "dsp": ["d3", "d3bj"], }
methods = {'hf': ['hf'], 'ricc2': ['rimp2', 'rimp3', 'rimp4', 'ricc2'], 'dft_hardcoded': ['s-vwn', 's-vwn_Gaussian', 'pwlda', 'b-lyp', 'b-vwn', 'b-p', 'pbe', 'tpss', 'bh-lyp', 'b3-lyp', 'b3-lyp_Gaussian', 'pbe0', 'tpssh', 'pw6b95', 'm06', 'm06-l', 'm06-2x', 'lhf', 'oep', 'b97-d', 'pbeh-3c', 'b97-3c', 'lh07t-svwn', 'lh07s-svwn', 'lh12ct-ssirpw92', 'lh12ct-ssifpw92', 'lh14t-calpbe', 'cam-b3lyp'], 'dft_libxc': ['wb97', 'wb97x', 'sogga11', 'sogga-11x', 'mn12-l', 'mn12-sx', 'mn15', 'mn15-l', 'm06-libxc', 'cam-b3lyp-libxc', 'hse06-libxc']} keywords = {'ri': ['rijk', 'ri', 'marij'], 'dsp': ['d3', 'd3bj']}
class Config: ''' General configuration parent class ''' NEWS_API_BASE_URL ='https://newsapi.org/v2/sources?apiKey={}' ARTICLE_API_BASE_URL = 'https://newsapi.org/v2/everything?sources={}&apiKey={}' class ProdConfig(Config): ''' Production configuration child class Args: Config: The parent configuration class with general configuration settings ''' pass class DevConfig(Config): ''' Development configuration child class Args: Config: The parent configuration class with general configuration settings ''' DEBUG = True
class Config: """ General configuration parent class """ news_api_base_url = 'https://newsapi.org/v2/sources?apiKey={}' article_api_base_url = 'https://newsapi.org/v2/everything?sources={}&apiKey={}' class Prodconfig(Config): """ Production configuration child class Args: Config: The parent configuration class with general configuration settings """ pass class Devconfig(Config): """ Development configuration child class Args: Config: The parent configuration class with general configuration settings """ debug = True
def main() -> None: K, X = map(int, input().split()) assert 1 <= K <= 100 assert 1 <= X <= 10**5 if __name__ == '__main__': main()
def main() -> None: (k, x) = map(int, input().split()) assert 1 <= K <= 100 assert 1 <= X <= 10 ** 5 if __name__ == '__main__': main()
# Copyright (c) The PyAMF Project. # See LICENSE.txt for details. """ Remoting tests. @since: 0.1.0 """
""" Remoting tests. @since: 0.1.0 """
# Definition for a binary tree node. class TreeNode: def __init__(self, x): self.val = x self.left = None self.right = None class Solution: def sumNumsR(self, root, s): if root is None: return 0 s = s * 10 + root.val if not root.left and not root.right: return s return self.sumNumsR(root.left, s) + self.sumNumsR(root.right, s) def sumNumbers0(self, root: TreeNode) -> int: if root is None: return 0 return self.sumNumsR(root, 0) def sumNumbersF(self, root: TreeNode) -> int: def sumNumsInR(root, s): if not root: return 0 s = s + root.val if not root.left and not root.right: return s return sumNumsInR(root.left, s * 10) + \ sumNumsInR(root.right, s * 10) return sumNumsInR(root, 0) def sumNumbers(self, root): """ :type root: TreeNode :rtype: int """ self.sum = 0 def dfs(root, pathsum): if root: pathsum += root.val left = dfs(root.left, pathsum * 10) right = dfs(root.right, pathsum * 10) if not left and not right: self.sum += pathsum return True dfs(root, 0) return self.sum
class Treenode: def __init__(self, x): self.val = x self.left = None self.right = None class Solution: def sum_nums_r(self, root, s): if root is None: return 0 s = s * 10 + root.val if not root.left and (not root.right): return s return self.sumNumsR(root.left, s) + self.sumNumsR(root.right, s) def sum_numbers0(self, root: TreeNode) -> int: if root is None: return 0 return self.sumNumsR(root, 0) def sum_numbers_f(self, root: TreeNode) -> int: def sum_nums_in_r(root, s): if not root: return 0 s = s + root.val if not root.left and (not root.right): return s return sum_nums_in_r(root.left, s * 10) + sum_nums_in_r(root.right, s * 10) return sum_nums_in_r(root, 0) def sum_numbers(self, root): """ :type root: TreeNode :rtype: int """ self.sum = 0 def dfs(root, pathsum): if root: pathsum += root.val left = dfs(root.left, pathsum * 10) right = dfs(root.right, pathsum * 10) if not left and (not right): self.sum += pathsum return True dfs(root, 0) return self.sum
# Copyright 2018 Google Inc. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """ This template creates a Runtime Configurator with the associated resources. """ def generate_config(context): """ Entry point for the deployment resources. """ resources = [] properties = context.properties project_id = properties.get('projectId', context.env['project']) name = properties.get('config', context.env['name']) parent = 'projects/{}/configs/{}'.format(project_id, name) # The runtimeconfig resource. runtime_config = { 'name': name, 'type': 'runtimeconfig.v1beta1.config', 'properties': { 'config': name, 'description': properties['description'] } } resources.append(runtime_config) # The runtimeconfig variable resources. for variable in properties.get('variables', []): variable['parent'] = parent variable['config'] = name variable_res = { 'name': variable['variable'], 'type': 'variable.py', 'properties': variable } resources.append(variable_res) # The runtimeconfig waiter resources. for waiter in properties.get('waiters', []): waiter['parent'] = parent waiter['config'] = name waiter_res = { 'name': waiter['waiter'], 'type': 'waiter.py', 'properties': waiter } resources.append(waiter_res) outputs = [{'name': 'configName', 'value': '$(ref.{}.name)'.format(name)}] return {'resources': resources, 'outputs': outputs}
""" This template creates a Runtime Configurator with the associated resources. """ def generate_config(context): """ Entry point for the deployment resources. """ resources = [] properties = context.properties project_id = properties.get('projectId', context.env['project']) name = properties.get('config', context.env['name']) parent = 'projects/{}/configs/{}'.format(project_id, name) runtime_config = {'name': name, 'type': 'runtimeconfig.v1beta1.config', 'properties': {'config': name, 'description': properties['description']}} resources.append(runtime_config) for variable in properties.get('variables', []): variable['parent'] = parent variable['config'] = name variable_res = {'name': variable['variable'], 'type': 'variable.py', 'properties': variable} resources.append(variable_res) for waiter in properties.get('waiters', []): waiter['parent'] = parent waiter['config'] = name waiter_res = {'name': waiter['waiter'], 'type': 'waiter.py', 'properties': waiter} resources.append(waiter_res) outputs = [{'name': 'configName', 'value': '$(ref.{}.name)'.format(name)}] return {'resources': resources, 'outputs': outputs}
__all__ = ('ascii_art_title_4client', 'ascii_art_title_4server') ascii_art_title_4client = r""" /$$$$$$ /$$ /$$ /$$__ $$| $$ | $$ /$$$$$$ /$$$$$$$ /$$$$$$ /$$$$$$$ /$$ /$$ /$$$$$$/$$$$ | $$ \__/| $$$$$$$ /$$$$$$ /$$$$$$ |____ $$| $$__ $$ /$$__ $$| $$__ $$| $$ | $$| $$_ $$_ $$| $$ | $$__ $$ |____ $$|_ $$_/ /$$$$$$$| $$ \ $$| $$ \ $$| $$ \ $$| $$ | $$| $$ \ $$ \ $$| $$ | $$ \ $$ /$$$$$$$ | $$ /$$__ $$| $$ | $$| $$ | $$| $$ | $$| $$ | $$| $$ | $$ | $$| $$ $$| $$ | $$ /$$__ $$ | $$ /$$ | $$$$$$$| $$ | $$| $$$$$$/| $$ | $$| $$$$$$$| $$ | $$ | $$| $$$$$$/| $$ | $$| $$$$$$$ | $$$$/ \_______/|__/ |__/ \______/ |__/ |__/ \____ $$|__/ |__/ |__/ \______/ |__/ |__/ \_______/ \___/ /$$ | $$ | $$$$$$/ \______/ """ ascii_art_title_4server = r""" _____ _ _ _____ / __ \ | | | / ___| __ _ _ __ ___ _ __ _ _ _ __ ___ | / \/ |__ __ _| |_ \ `--. ___ _ ____ _____ _ __ / _` | '_ \ / _ \| '_ \| | | | '_ ` _ \| | | '_ \ / _` | __| `--. \/ _ \ '__\ \ / / _ \ '__| | (_| | | | | (_) | | | | |_| | | | | | | \__/\ | | | (_| | |_ /\__/ / __/ | \ V / __/ | \__,_|_| |_|\___/|_| |_|\__, |_| |_| |_|\____/_| |_|\__,_|\__| \____/ \___|_| \_/ \___|_| __/ | |___/ """
__all__ = ('ascii_art_title_4client', 'ascii_art_title_4server') ascii_art_title_4client = '\n /$$$$$$ /$$ /$$ \n /$$__ $$| $$ | $$ \n /$$$$$$ /$$$$$$$ /$$$$$$ /$$$$$$$ /$$ /$$ /$$$$$$/$$$$ | $$ \\__/| $$$$$$$ /$$$$$$ /$$$$$$ \n |____ $$| $$__ $$ /$$__ $$| $$__ $$| $$ | $$| $$_ $$_ $$| $$ | $$__ $$ |____ $$|_ $$_/ \n /$$$$$$$| $$ \\ $$| $$ \\ $$| $$ \\ $$| $$ | $$| $$ \\ $$ \\ $$| $$ | $$ \\ $$ /$$$$$$$ | $$ \n /$$__ $$| $$ | $$| $$ | $$| $$ | $$| $$ | $$| $$ | $$ | $$| $$ $$| $$ | $$ /$$__ $$ | $$ /$$\n| $$$$$$$| $$ | $$| $$$$$$/| $$ | $$| $$$$$$$| $$ | $$ | $$| $$$$$$/| $$ | $$| $$$$$$$ | $$$$/\n \\_______/|__/ |__/ \\______/ |__/ |__/ \\____ $$|__/ |__/ |__/ \\______/ |__/ |__/ \\_______/ \\___/ \n /$$ | $$ \n | $$$$$$/ \n \\______/ \n' ascii_art_title_4server = "\n _____ _ _ _____ \n / __ \\ | | | / ___| \n __ _ _ __ ___ _ __ _ _ _ __ ___ | / \\/ |__ __ _| |_ \\ `--. ___ _ ____ _____ _ __ \n / _` | '_ \\ / _ \\| '_ \\| | | | '_ ` _ \\| | | '_ \\ / _` | __| `--. \\/ _ \\ '__\\ \\ / / _ \\ '__|\n| (_| | | | | (_) | | | | |_| | | | | | | \\__/\\ | | | (_| | |_ /\\__/ / __/ | \\ V / __/ | \n \\__,_|_| |_|\\___/|_| |_|\\__, |_| |_| |_|\\____/_| |_|\\__,_|\\__| \\____/ \\___|_| \\_/ \\___|_| \n __/ | \n |___/\n"
# -*- coding: utf-8 -*- description = "Setup for the LakeShore 340 temperature controller" group = "optional" includes = ["alias_T"] tango_base = "tango://phys.kws3.frm2:10000/kws3" tango_ls340 = tango_base + "/ls340" devices = dict( T_ls340 = device("nicos.devices.entangle.TemperatureController", description = "Temperature regulation", tangodevice = tango_ls340 + "/t_control1", pollinterval = 2, maxage = 5, abslimits = (0, 300), precision = 0.01, ), ls340_heaterrange = device("nicos.devices.entangle.DigitalOutput", description = "Temperature regulation", tangodevice = tango_ls340 + "/t_range1", unit = '', fmtstr = '%d', ), T_ls340_A = device("nicos.devices.entangle.Sensor", description = "Sensor A", tangodevice = tango_ls340 + "/t_sensor1", pollinterval = 2, maxage = 5, ), T_ls340_B = device("nicos.devices.entangle.Sensor", description = "Sensor B", tangodevice = tango_ls340 + "/t_sensor2", pollinterval = 2, maxage = 5, ), T_ls340_C = device("nicos.devices.entangle.Sensor", description = "Sensor C", tangodevice = tango_ls340 + "/t_sensor3", pollinterval = 2, maxage = 5, ), T_ls340_D = device("nicos.devices.entangle.Sensor", description = "Sensor D", tangodevice = tango_ls340 + "/t_sensor4", pollinterval = 2, maxage = 5, ), ) alias_config = { "T": { "T_%s" % setupname: 100 }, "Ts": { "T_%s_A" % setupname: 110, "T_%s_B" % setupname: 100, "T_%s_C" % setupname: 90, "T_%s_D" % setupname: 80, "T_%s" % setupname: 120, }, }
description = 'Setup for the LakeShore 340 temperature controller' group = 'optional' includes = ['alias_T'] tango_base = 'tango://phys.kws3.frm2:10000/kws3' tango_ls340 = tango_base + '/ls340' devices = dict(T_ls340=device('nicos.devices.entangle.TemperatureController', description='Temperature regulation', tangodevice=tango_ls340 + '/t_control1', pollinterval=2, maxage=5, abslimits=(0, 300), precision=0.01), ls340_heaterrange=device('nicos.devices.entangle.DigitalOutput', description='Temperature regulation', tangodevice=tango_ls340 + '/t_range1', unit='', fmtstr='%d'), T_ls340_A=device('nicos.devices.entangle.Sensor', description='Sensor A', tangodevice=tango_ls340 + '/t_sensor1', pollinterval=2, maxage=5), T_ls340_B=device('nicos.devices.entangle.Sensor', description='Sensor B', tangodevice=tango_ls340 + '/t_sensor2', pollinterval=2, maxage=5), T_ls340_C=device('nicos.devices.entangle.Sensor', description='Sensor C', tangodevice=tango_ls340 + '/t_sensor3', pollinterval=2, maxage=5), T_ls340_D=device('nicos.devices.entangle.Sensor', description='Sensor D', tangodevice=tango_ls340 + '/t_sensor4', pollinterval=2, maxage=5)) alias_config = {'T': {'T_%s' % setupname: 100}, 'Ts': {'T_%s_A' % setupname: 110, 'T_%s_B' % setupname: 100, 'T_%s_C' % setupname: 90, 'T_%s_D' % setupname: 80, 'T_%s' % setupname: 120}}
def response(status, message, data, status_code=200): return { "status": status, "message": message, "data": data, }, status_code
def response(status, message, data, status_code=200): return ({'status': status, 'message': message, 'data': data}, status_code)
""" Blocks TODO: * Avoid newline/indent on certain tags, like Blank/Pre: self.__class__.__name__ != "Pre" (necessary?) * Fixed: () popped len(token) twice """ ## +block def indented_block(self): print(f"Indent-dependent {self.tag} block started") start_O_line = self.O.line_number block_indent = self.I.indent_count + 1 if self.offset: self.O.offset -= block_indent #Opening block self.opening_tag() #Main block loop while 1: loop_line = self.I.line_number index, token = self.next_token() if index > 0: self.O.indents(count = self.I.indent_count) self.O.write(self.I.popto(index)) self.I.popto(len(token)) if token: self.routine(token) #refill line if self.I.line == '': try: self.I.readline() except: break #check if next line is in block if loop_line != self.I.line_number: if block_indent > self.I.indent_count: break else: self.O.newline() #check if preceding line was empty: if self.I.empty_line: self.I.empty_line = False self.O.indents(count = 0) self.O.newline() #Closing block if start_O_line != self.O.line_number: self.O.newline() self.O.indents(count = block_indent - 1) self.closing_tag() if self.offset: self.O.offset += block_indent ## @wrapper: def wrapping_block(self): print(f"Wrapping {self.tag} block started") start_O_line = self.O.line_number block_indent = self.I.indent_count if self.offset: self.O.offset -= block_indent #Opening block self.opening_tag() #Main block loop FIX while 1: loop_line = self.I.line_number index, token = self.next_token() if not token and index: self.O.indents(count = self.I.indent_count) self.O.write(self.I.popto(index)) self.I.popto(len(token)) if token: self.routine(token) #refill line if self.I.line == '': try: self.I.readline() except: break #check if next line is in block if loop_line != self.I.line_number: if block_indent > self.I.indent_count: break else: self.O.newline() #check if preceding line was empty: if self.I.empty_line: self.I.empty_line = False self.O.indents(count = 0) self.O.newline() #Closing block if start_O_line != self.O.line_number and self.__class__.__name__ != "Blank": self.O.newline() self.O.indents(count = block_indent) self.closing_tag() if self.offset: self.O.offset += block_indent ## +block() def bracketed_block(self): print(f"Bracketed {self.tag} block started") start_O_line = self.O.line_number block_indent = self.I.indent_count if self.offset: self.O.offset -= block_indent #Opening block self.opening_tag() #Main block loop FIX level = 0 #number of brackets must match while 1: loop_line = self.I.line_number index, token = self.next_token('(', ')') if not token or token == '(' or token == ')': #indent will be added in the token's routine, if needed self.O.indents(count = self.I.indent_count) self.O.write(self.I.popto(index)) self.I.popto(len(token)) if token == '(': self.O.write('(') level += 1 elif token == ')': if level == 0: break #end of block self.O.write(')') level -= 1 else: if token: self.routine(token) if self.I.line.isspace() or self.I.line == '': self.I.readline() #refill line if self.I.line == '': try: self.I.readline() except: break #check if next line is in block REMOVE?! + indent_count?? if loop_line != self.I.line_number: if block_indent > self.I.indent_count: break else: self.O.newline() #check if preceding line was empty: if self.I.empty_line: self.I.empty_line = False self.O.indents(count = 0) self.O.newline() #Closing block self.closing_tag() if self.offset: self.O.offset += block_indent #Selfclosing pseudo-block def selfclosing_block(self): print(f"Selfclosing {self.tag} tag started") self.opening_tag()
""" Blocks TODO: * Avoid newline/indent on certain tags, like Blank/Pre: self.__class__.__name__ != "Pre" (necessary?) * Fixed: () popped len(token) twice """ def indented_block(self): print(f'Indent-dependent {self.tag} block started') start_o_line = self.O.line_number block_indent = self.I.indent_count + 1 if self.offset: self.O.offset -= block_indent self.opening_tag() while 1: loop_line = self.I.line_number (index, token) = self.next_token() if index > 0: self.O.indents(count=self.I.indent_count) self.O.write(self.I.popto(index)) self.I.popto(len(token)) if token: self.routine(token) if self.I.line == '': try: self.I.readline() except: break if loop_line != self.I.line_number: if block_indent > self.I.indent_count: break else: self.O.newline() if self.I.empty_line: self.I.empty_line = False self.O.indents(count=0) self.O.newline() if start_O_line != self.O.line_number: self.O.newline() self.O.indents(count=block_indent - 1) self.closing_tag() if self.offset: self.O.offset += block_indent def wrapping_block(self): print(f'Wrapping {self.tag} block started') start_o_line = self.O.line_number block_indent = self.I.indent_count if self.offset: self.O.offset -= block_indent self.opening_tag() while 1: loop_line = self.I.line_number (index, token) = self.next_token() if not token and index: self.O.indents(count=self.I.indent_count) self.O.write(self.I.popto(index)) self.I.popto(len(token)) if token: self.routine(token) if self.I.line == '': try: self.I.readline() except: break if loop_line != self.I.line_number: if block_indent > self.I.indent_count: break else: self.O.newline() if self.I.empty_line: self.I.empty_line = False self.O.indents(count=0) self.O.newline() if start_O_line != self.O.line_number and self.__class__.__name__ != 'Blank': self.O.newline() self.O.indents(count=block_indent) self.closing_tag() if self.offset: self.O.offset += block_indent def bracketed_block(self): print(f'Bracketed {self.tag} block started') start_o_line = self.O.line_number block_indent = self.I.indent_count if self.offset: self.O.offset -= block_indent self.opening_tag() level = 0 while 1: loop_line = self.I.line_number (index, token) = self.next_token('(', ')') if not token or token == '(' or token == ')': self.O.indents(count=self.I.indent_count) self.O.write(self.I.popto(index)) self.I.popto(len(token)) if token == '(': self.O.write('(') level += 1 elif token == ')': if level == 0: break self.O.write(')') level -= 1 else: if token: self.routine(token) if self.I.line.isspace() or self.I.line == '': self.I.readline() if self.I.line == '': try: self.I.readline() except: break if loop_line != self.I.line_number: if block_indent > self.I.indent_count: break else: self.O.newline() if self.I.empty_line: self.I.empty_line = False self.O.indents(count=0) self.O.newline() self.closing_tag() if self.offset: self.O.offset += block_indent def selfclosing_block(self): print(f'Selfclosing {self.tag} tag started') self.opening_tag()
x = 0 for n in range(10): x = x + 1 assert x == 10
x = 0 for n in range(10): x = x + 1 assert x == 10
#Queue.py #20 Oct 2017 #Written By Amin Dehghan #DS & Algorithms With Python class Queue: def __init__(self): self.items=[] self.fronIdx=0 def __compress(self): newlst=[] for i in range(self.frontIdx,len(self.items)): newlst.append(self.items[i]) self.items=newlst self.frontIdx=0 def dequeue(self): if self.isEmpty(): raise RuntimeError("Attempt to dequeue an empty queue") if self.froniIdx*2< len(self.items): self.__compress() item=self.items[self.frontIdx] self.frontIdx+=1 return item def enqueue(self,val): self.items.append(val) def front(self): if self.isEmpty(): raise RuntimeError("Attempt to access front of empty queue") return self.items[frontIdx] def isEmpty(self): return len(self.items)==frontIdx
class Queue: def __init__(self): self.items = [] self.fronIdx = 0 def __compress(self): newlst = [] for i in range(self.frontIdx, len(self.items)): newlst.append(self.items[i]) self.items = newlst self.frontIdx = 0 def dequeue(self): if self.isEmpty(): raise runtime_error('Attempt to dequeue an empty queue') if self.froniIdx * 2 < len(self.items): self.__compress() item = self.items[self.frontIdx] self.frontIdx += 1 return item def enqueue(self, val): self.items.append(val) def front(self): if self.isEmpty(): raise runtime_error('Attempt to access front of empty queue') return self.items[frontIdx] def is_empty(self): return len(self.items) == frontIdx
patches = [ { "op": "move", "from": "/ResourceTypes/AWS::Transfer::Server/Properties/Protocols/ItemType", "path": "/ResourceTypes/AWS::Transfer::Server/Properties/Protocols/PrimitiveItemType", }, { "op": "replace", "path": "/ResourceTypes/AWS::Transfer::Server/Properties/Protocols/PrimitiveItemType", "value": "String", }, { "op": "move", "from": "/ResourceTypes/AWS::Transfer::User/Properties/SshPublicKeys/ItemType", "path": "/ResourceTypes/AWS::Transfer::User/Properties/SshPublicKeys/PrimitiveItemType", }, { "op": "replace", "path": "/ResourceTypes/AWS::Transfer::User/Properties/SshPublicKeys/PrimitiveItemType", "value": "String", }, ]
patches = [{'op': 'move', 'from': '/ResourceTypes/AWS::Transfer::Server/Properties/Protocols/ItemType', 'path': '/ResourceTypes/AWS::Transfer::Server/Properties/Protocols/PrimitiveItemType'}, {'op': 'replace', 'path': '/ResourceTypes/AWS::Transfer::Server/Properties/Protocols/PrimitiveItemType', 'value': 'String'}, {'op': 'move', 'from': '/ResourceTypes/AWS::Transfer::User/Properties/SshPublicKeys/ItemType', 'path': '/ResourceTypes/AWS::Transfer::User/Properties/SshPublicKeys/PrimitiveItemType'}, {'op': 'replace', 'path': '/ResourceTypes/AWS::Transfer::User/Properties/SshPublicKeys/PrimitiveItemType', 'value': 'String'}]
def foo(x): if x == 0: return 1 elif x % 2 == 0: return 2 * x * foo(x - 2) else: return (x -3) * x * foo(x + 1) print(foo(4))
def foo(x): if x == 0: return 1 elif x % 2 == 0: return 2 * x * foo(x - 2) else: return (x - 3) * x * foo(x + 1) print(foo(4))
def diff(n, mid) : if (n > (mid * mid * mid)) : return (n - (mid * mid * mid)) else : return ((mid * mid * mid) - n) # Returns cube root of a no n def cubicRoot(n) : # Set start and end for binary # search start = 0 end = n # Set precision e = 0.0000001 while (True) : mid = (start + end) / 2 error = diff(n, mid) # If error is less than e # then mid is our answer # so return mid if (error <= e) : return mid # If mid*mid*mid is greater # than n set end = mid if ((mid * mid * mid) > n) : end = mid # If mid*mid*mid is less # than n set start = mid else : start = mid # Driver code n = 3 print("Cubic root of", n, "is", round(cubicRoot(n),6))
def diff(n, mid): if n > mid * mid * mid: return n - mid * mid * mid else: return mid * mid * mid - n def cubic_root(n): start = 0 end = n e = 1e-07 while True: mid = (start + end) / 2 error = diff(n, mid) if error <= e: return mid if mid * mid * mid > n: end = mid else: start = mid n = 3 print('Cubic root of', n, 'is', round(cubic_root(n), 6))
# Base Parameters assets = asset_list('FX') # Trading Parameters horizon = 'H1' pair = 0 # Mass Imports my_data = mass_import(pair, horizon) # Parameters long_ema = 26 short_ema = 12 signal_ema = 9 def ma(Data, lookback, close, where): Data = adder(Data, 1) for i in range(len(Data)): try: Data[i, where] = (Data[i - lookback + 1:i + 1, close].mean()) except IndexError: pass # Cleaning Data = jump(Data, lookback) return Data def ema(Data, alpha, lookback, what, where): alpha = alpha / (lookback + 1.0) beta = 1 - alpha # First value is a simple SMA Data = ma(Data, lookback, what, where) # Calculating first EMA Data[lookback + 1, where] = (Data[lookback + 1, what] * alpha) + (Data[lookback, where] * beta) # Calculating the rest of EMA for i in range(lookback + 2, len(Data)): try: Data[i, where] = (Data[i, what] * alpha) + (Data[i - 1, where] * beta) except IndexError: pass return Data def macd(Data, what, long_ema, short_ema, signal_ema, where): Data = adder(Data, 1) Data = ema(Data, 2, long_ema, what, where) Data = ema(Data, 2, short_ema, what, where + 1) Data[:, where + 2] = Data[:, where + 1] - Data[:, where] Data = jump(Data, long_ema) Data = ema(Data, 2, signal_ema, where + 2, where + 3) Data = deleter(Data, where, 2) Data = jump(Data, signal_ema) return Data def indicator_plot_double_macd(Data, MACD_line, MACD_signal, window = 250): fig, ax = plt.subplots(2, figsize = (8, 5)) Chosen = Data[-window:, ] for i in range(len(Chosen)): ax[0].vlines(x = i, ymin = Chosen[i, 2], ymax = Chosen[i, 1], color = 'black', linewidth = 1) ax[0].grid() for i in range(len(Chosen)): if Chosen[i, MACD_line] > 0: ax[1].vlines(x = i, ymin = 0, ymax = Chosen[i, MACD_line], color = 'green', linewidth = 1) if Chosen[i, MACD_line] < 0: ax[1].vlines(x = i, ymin = Chosen[i, MACD_line], ymax = 0, color = 'red', linewidth = 1) if Chosen[i, MACD_line] == 0: ax[1].vlines(x = i, ymin = Chosen[i, MACD_line], ymax = 0, color = 'black', linewidth = 1) ax[1].grid() ax[1].axhline(y = 0, color = 'black', linewidth = 0.5, linestyle = '--') ax[1].plot(Data[-window:, MACD_signal], color = 'blue', linewidth = 0.75, linestyle = 'dashed') my_data = macd(my_data, 3, 26, 12, 9, 4) indicator_plot_double_macd(my_data, 4, 5, window = 250)
assets = asset_list('FX') horizon = 'H1' pair = 0 my_data = mass_import(pair, horizon) long_ema = 26 short_ema = 12 signal_ema = 9 def ma(Data, lookback, close, where): data = adder(Data, 1) for i in range(len(Data)): try: Data[i, where] = Data[i - lookback + 1:i + 1, close].mean() except IndexError: pass data = jump(Data, lookback) return Data def ema(Data, alpha, lookback, what, where): alpha = alpha / (lookback + 1.0) beta = 1 - alpha data = ma(Data, lookback, what, where) Data[lookback + 1, where] = Data[lookback + 1, what] * alpha + Data[lookback, where] * beta for i in range(lookback + 2, len(Data)): try: Data[i, where] = Data[i, what] * alpha + Data[i - 1, where] * beta except IndexError: pass return Data def macd(Data, what, long_ema, short_ema, signal_ema, where): data = adder(Data, 1) data = ema(Data, 2, long_ema, what, where) data = ema(Data, 2, short_ema, what, where + 1) Data[:, where + 2] = Data[:, where + 1] - Data[:, where] data = jump(Data, long_ema) data = ema(Data, 2, signal_ema, where + 2, where + 3) data = deleter(Data, where, 2) data = jump(Data, signal_ema) return Data def indicator_plot_double_macd(Data, MACD_line, MACD_signal, window=250): (fig, ax) = plt.subplots(2, figsize=(8, 5)) chosen = Data[-window:,] for i in range(len(Chosen)): ax[0].vlines(x=i, ymin=Chosen[i, 2], ymax=Chosen[i, 1], color='black', linewidth=1) ax[0].grid() for i in range(len(Chosen)): if Chosen[i, MACD_line] > 0: ax[1].vlines(x=i, ymin=0, ymax=Chosen[i, MACD_line], color='green', linewidth=1) if Chosen[i, MACD_line] < 0: ax[1].vlines(x=i, ymin=Chosen[i, MACD_line], ymax=0, color='red', linewidth=1) if Chosen[i, MACD_line] == 0: ax[1].vlines(x=i, ymin=Chosen[i, MACD_line], ymax=0, color='black', linewidth=1) ax[1].grid() ax[1].axhline(y=0, color='black', linewidth=0.5, linestyle='--') ax[1].plot(Data[-window:, MACD_signal], color='blue', linewidth=0.75, linestyle='dashed') my_data = macd(my_data, 3, 26, 12, 9, 4) indicator_plot_double_macd(my_data, 4, 5, window=250)
x = 'heLLo world' print("Swap the case: " + x.swapcase()) print("Set all cast to upper: " + x.upper()) print("Set all cast to lower: " + x.lower()) print("Set all cast to lower aggresivly: " + x.casefold()) print("Set every word\'s first letter to upper: " + x.title()) print("Set the first word\'s first letter to upper in a sentence: " + x.capitalize()) x = x.split() y = '--'.join(x) print(y)
x = 'heLLo world' print('Swap the case: ' + x.swapcase()) print('Set all cast to upper: ' + x.upper()) print('Set all cast to lower: ' + x.lower()) print('Set all cast to lower aggresivly: ' + x.casefold()) print("Set every word's first letter to upper: " + x.title()) print("Set the first word's first letter to upper in a sentence: " + x.capitalize()) x = x.split() y = '--'.join(x) print(y)
''' constants for the project ''' TRAIN_LOSS = 0 TRAIN_ACCURACY = 1 VAL_LOSS = 2 VAL_ACCURACY = 3
""" constants for the project """ train_loss = 0 train_accuracy = 1 val_loss = 2 val_accuracy = 3
# -*- coding: utf-8 -*- STATSD_ENABLED = False STATSD_HOST = "localhost" STATSD_PORT = 8125 STATSD_LOG_PERIODIC = True STATSD_LOG_EVERY = 5 STATSD_HANDLER = "scrapy_statsd_extension.handlers.StatsdBase" STATSD_PREFIX = "scrapy" STATSD_LOG_ONLY = [] STATSD_TAGGING = False STATSD_TAGS = {"spider_name": True} STATSD_IGNORE = []
statsd_enabled = False statsd_host = 'localhost' statsd_port = 8125 statsd_log_periodic = True statsd_log_every = 5 statsd_handler = 'scrapy_statsd_extension.handlers.StatsdBase' statsd_prefix = 'scrapy' statsd_log_only = [] statsd_tagging = False statsd_tags = {'spider_name': True} statsd_ignore = []
N, r = map(int, input().split()) for i in range(N): R = int(input()) if R>=r: print('Good boi') else: print('Bad boi')
(n, r) = map(int, input().split()) for i in range(N): r = int(input()) if R >= r: print('Good boi') else: print('Bad boi')
# WRITE YOUR SOLUTION HERE: class Employee: def __init__(self, name: str): self.name = name self.subordinates = [] def add_subordinate(self, employee: 'Employee'): self.subordinates.append(employee) def count_subordinates(employee: Employee): count = len(employee.subordinates) if len(employee.subordinates) != 0: for sub_employee in employee.subordinates: count += count_subordinates(sub_employee) return count if __name__ == "__main__": t1 = Employee("Sally") t2 = Employee("Eric") t3 = Employee("Matthew") t4 = Employee("Emily") t5 = Employee("Adele") t6 = Employee("Claire") t1.add_subordinate(t4) t1.add_subordinate(t6) t4.add_subordinate(t2) t4.add_subordinate(t3) t4.add_subordinate(t5) print(count_subordinates(t1)) print(count_subordinates(t4)) print(count_subordinates(t5))
class Employee: def __init__(self, name: str): self.name = name self.subordinates = [] def add_subordinate(self, employee: 'Employee'): self.subordinates.append(employee) def count_subordinates(employee: Employee): count = len(employee.subordinates) if len(employee.subordinates) != 0: for sub_employee in employee.subordinates: count += count_subordinates(sub_employee) return count if __name__ == '__main__': t1 = employee('Sally') t2 = employee('Eric') t3 = employee('Matthew') t4 = employee('Emily') t5 = employee('Adele') t6 = employee('Claire') t1.add_subordinate(t4) t1.add_subordinate(t6) t4.add_subordinate(t2) t4.add_subordinate(t3) t4.add_subordinate(t5) print(count_subordinates(t1)) print(count_subordinates(t4)) print(count_subordinates(t5))
#!/usr/bin/env python3 def palindrome(x): return str(x) == str(x)[::-1] def number_palindrome(n, base): if base == 2: binary = bin(n)[2:] return palindrome(binary) if base == 10: return palindrome(n) return False def double_base_palindrome(x): return number_palindrome(x, 10) and number_palindrome(x, 2) def sum_double_palindrome_numbers_below(limit): return sum((x for x in range(1, limit) if double_base_palindrome(x))) def solve(): return sum_double_palindrome_numbers_below(1000000) if __name__ == '__main__': result = solve() print(result)
def palindrome(x): return str(x) == str(x)[::-1] def number_palindrome(n, base): if base == 2: binary = bin(n)[2:] return palindrome(binary) if base == 10: return palindrome(n) return False def double_base_palindrome(x): return number_palindrome(x, 10) and number_palindrome(x, 2) def sum_double_palindrome_numbers_below(limit): return sum((x for x in range(1, limit) if double_base_palindrome(x))) def solve(): return sum_double_palindrome_numbers_below(1000000) if __name__ == '__main__': result = solve() print(result)
#for request headers headers = { 'Access-Control-Allow-Origin': '*', 'Access-Control-Allow-Methods': 'GET', 'Access-Control-Allow-Headers': 'Content-Type', 'Access-Control-Max-Age': '3600', 'User-Agent': 'Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:52.0) Gecko/20100101 Firefox/52.0' }
headers = {'Access-Control-Allow-Origin': '*', 'Access-Control-Allow-Methods': 'GET', 'Access-Control-Allow-Headers': 'Content-Type', 'Access-Control-Max-Age': '3600', 'User-Agent': 'Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:52.0) Gecko/20100101 Firefox/52.0'}
# Create an array for the points of the line line_points = [ {"x":5, "y":5}, {"x":70, "y":70}, {"x":120, "y":10}, {"x":180, "y":60}, {"x":240, "y":10}] # Create style style_line = lv.style_t() style_line.init() style_line.set_line_width(8) style_line.set_line_color(lv.palette_main(lv.PALETTE.BLUE)) style_line.set_line_rounded(True) # Create a line and apply the new style line1 = lv.line(lv.scr_act()) line1.set_points(line_points, 5) # Set the points line1.add_style(style_line, 0) line1.center()
line_points = [{'x': 5, 'y': 5}, {'x': 70, 'y': 70}, {'x': 120, 'y': 10}, {'x': 180, 'y': 60}, {'x': 240, 'y': 10}] style_line = lv.style_t() style_line.init() style_line.set_line_width(8) style_line.set_line_color(lv.palette_main(lv.PALETTE.BLUE)) style_line.set_line_rounded(True) line1 = lv.line(lv.scr_act()) line1.set_points(line_points, 5) line1.add_style(style_line, 0) line1.center()
# In case it's not obvious, a list comprehension produces a list, but # it doesn't have to be given a list to iterate over. # # You can use a list comprehension with any iterable type, so we'll # write a comprehension to convert dimensions from inches to centimetres. # # Our dimensions will be represented by a tuple, for the length, width and height. # # There are 2.54 centimetres to 1 inch. inch_measurement = (3, 8, 20) cm_measurement = [x * 2.54 for x in inch_measurement] print(cm_measurement) # Once you've got the correct values, change the code to produce a tuple, rather than a list. cm_measurement = [(x, x * 2.54) for x in inch_measurement] print(cm_measurement) cm_measurement = tuple(x * 2.54 for x in inch_measurement) print(cm_measurement)
inch_measurement = (3, 8, 20) cm_measurement = [x * 2.54 for x in inch_measurement] print(cm_measurement) cm_measurement = [(x, x * 2.54) for x in inch_measurement] print(cm_measurement) cm_measurement = tuple((x * 2.54 for x in inch_measurement)) print(cm_measurement)
r=s=t=1 #--- I1 print(r + s + t) r=s=t='1' #--- I2 print(r + s + t)
r = s = t = 1 print(r + s + t) r = s = t = '1' print(r + s + t)
"""Genetic Programming in Python, with a scikit-learn inspired API ``gplearn`` is a set of algorithms for learning genetic programming models. """ __version__ = '0.4.dev0' __all__ = ['genetic', 'functions', 'fitness'] print("GPLEARN MOD")
"""Genetic Programming in Python, with a scikit-learn inspired API ``gplearn`` is a set of algorithms for learning genetic programming models. """ __version__ = '0.4.dev0' __all__ = ['genetic', 'functions', 'fitness'] print('GPLEARN MOD')
_base_ = '../../base.py' # model settings model = dict( type='Classification', pretrained=None, backbone=dict( type='ResNet', depth=50, out_indices=[4], # 4: stage-4 norm_cfg=dict(type='BN')), head=dict( type='ClsHead', with_avg_pool=True, in_channels=2048, num_classes=10)) # dataset settings data_source_cfg = dict(type='Cifar10', root='/root/data/zq/data/cifar/') dataset_type = 'ClassificationDataset' img_norm_cfg = dict(mean=[0.4914, 0.4822, 0.4465], std=[0.2023, 0.1994, 0.201]) train_pipeline = [ dict(type='RandomCrop', size=32, padding=4), dict(type='RandomHorizontalFlip'), dict(type='ToTensor'), dict(type='Normalize', **img_norm_cfg), ] test_pipeline = [ dict(type='ToTensor'), dict(type='Normalize', **img_norm_cfg), ] data = dict( imgs_per_gpu=128, workers_per_gpu=2, train=dict( type=dataset_type, data_source=dict(split='train', **data_source_cfg), pipeline=train_pipeline), val=dict( type=dataset_type, data_source=dict(split='test', **data_source_cfg), pipeline=test_pipeline), test=dict( type=dataset_type, data_source=dict(split='test', **data_source_cfg), pipeline=test_pipeline)) # additional hooks custom_hooks = [ dict( type='ValidateHook', dataset=data['val'], initial=True, interval=10, imgs_per_gpu=128, workers_per_gpu=8, eval_param=dict(topk=(1, 5))) ] # optimizer optimizer = dict(type='SGD', lr=0.1, momentum=0.9, weight_decay=0.0005) # learning policy lr_config = dict(policy='step', step=[150, 250]) checkpoint_config = dict(interval=50) # runtime settings total_epochs = 350
_base_ = '../../base.py' model = dict(type='Classification', pretrained=None, backbone=dict(type='ResNet', depth=50, out_indices=[4], norm_cfg=dict(type='BN')), head=dict(type='ClsHead', with_avg_pool=True, in_channels=2048, num_classes=10)) data_source_cfg = dict(type='Cifar10', root='/root/data/zq/data/cifar/') dataset_type = 'ClassificationDataset' img_norm_cfg = dict(mean=[0.4914, 0.4822, 0.4465], std=[0.2023, 0.1994, 0.201]) train_pipeline = [dict(type='RandomCrop', size=32, padding=4), dict(type='RandomHorizontalFlip'), dict(type='ToTensor'), dict(type='Normalize', **img_norm_cfg)] test_pipeline = [dict(type='ToTensor'), dict(type='Normalize', **img_norm_cfg)] data = dict(imgs_per_gpu=128, workers_per_gpu=2, train=dict(type=dataset_type, data_source=dict(split='train', **data_source_cfg), pipeline=train_pipeline), val=dict(type=dataset_type, data_source=dict(split='test', **data_source_cfg), pipeline=test_pipeline), test=dict(type=dataset_type, data_source=dict(split='test', **data_source_cfg), pipeline=test_pipeline)) custom_hooks = [dict(type='ValidateHook', dataset=data['val'], initial=True, interval=10, imgs_per_gpu=128, workers_per_gpu=8, eval_param=dict(topk=(1, 5)))] optimizer = dict(type='SGD', lr=0.1, momentum=0.9, weight_decay=0.0005) lr_config = dict(policy='step', step=[150, 250]) checkpoint_config = dict(interval=50) total_epochs = 350
class BruteForceProtectionException(Exception): pass class BruteForceProtectionBanException(BruteForceProtectionException): pass class BruteForceProtectionCaptchaException(BruteForceProtectionException): pass
class Bruteforceprotectionexception(Exception): pass class Bruteforceprotectionbanexception(BruteForceProtectionException): pass class Bruteforceprotectioncaptchaexception(BruteForceProtectionException): pass
string = "John Doe lives at 221B Baker Street." pattern = re.compile(r""" ([a-zA-Z ]+) # Save as many letters and spaces as possible to group 1 \ lives\ at\ # Match " lives at " (?P<address>.*) # Save everything in between as a group named `address` \. # Match the period at the end """, re.VERBOSE) new_string = re.sub(pattern, r"\g<address> is occupied by \1.", string) print("New string is '{0}'".format(new_string))
string = 'John Doe lives at 221B Baker Street.' pattern = re.compile('\n ([a-zA-Z ]+) # Save as many letters and spaces as possible to group 1\n \\ lives\\ at\\ # Match " lives at "\n (?P<address>.*) # Save everything in between as a group named `address`\n \\. # Match the period at the end\n', re.VERBOSE) new_string = re.sub(pattern, '\\g<address> is occupied by \\1.', string) print("New string is '{0}'".format(new_string))
class LanguageModel: def infer(x): """Run language model on input x Args: x (str): Prompt to run inference on Returns: (str) Output of inference """ return prompt
class Languagemodel: def infer(x): """Run language model on input x Args: x (str): Prompt to run inference on Returns: (str) Output of inference """ return prompt
# Copyright 2016 x620 <https://github.com/x620> # Copyright 2016,2020 Ivan Yelizariev <https://it-projects.info/team/yelizariev> # Copyright 2018 Ruslan Ronzhin # Copyright 2019 Artem Rafailov <https://it-projects.info/team/Ommo73/> # License LGPL-3.0 (https://www.gnu.org/licenses/lgpl.html). { "name": """Show message recipients""", "summary": """Allows you be sure, that all discussion participants were notified""", "category": "Discuss", "images": ["images/1.png"], "version": "12.0.1.1.1", "author": "IT-Projects LLC, Pavel Romanchenko", "support": "apps@itpp.dev", "website": "https://itpp.dev", "license": "LGPL-3", "price": 40.00, "currency": "EUR", "depends": ["mail"], "external_dependencies": {"python": [], "bin": []}, "data": ["templates.xml"], "qweb": ["static/src/xml/recipient.xml"], "demo": [], "installable": True, "auto_install": False, }
{'name': 'Show message recipients', 'summary': 'Allows you be sure, that all discussion participants were notified', 'category': 'Discuss', 'images': ['images/1.png'], 'version': '12.0.1.1.1', 'author': 'IT-Projects LLC, Pavel Romanchenko', 'support': 'apps@itpp.dev', 'website': 'https://itpp.dev', 'license': 'LGPL-3', 'price': 40.0, 'currency': 'EUR', 'depends': ['mail'], 'external_dependencies': {'python': [], 'bin': []}, 'data': ['templates.xml'], 'qweb': ['static/src/xml/recipient.xml'], 'demo': [], 'installable': True, 'auto_install': False}
"""Config file tools for edx_lint.""" def merge_configs(main, tweaks): """Merge tweaks into a main config file.""" for section in tweaks.sections(): for option in tweaks.options(section): value = tweaks.get(section, option) if option.endswith("+"): option = option[:-1] value = main.get(section, option) + value main.set(section, option, value)
"""Config file tools for edx_lint.""" def merge_configs(main, tweaks): """Merge tweaks into a main config file.""" for section in tweaks.sections(): for option in tweaks.options(section): value = tweaks.get(section, option) if option.endswith('+'): option = option[:-1] value = main.get(section, option) + value main.set(section, option, value)
model = Model() i1 = Input("input", "TENSOR_FLOAT32", "{1, 2, 2, 1}") axis = Parameter("axis", "TENSOR_INT32", "{1}", [2]) keepDims = False output = Output("output", "TENSOR_FLOAT32", "{1, 2, 1}") model = model.Operation("REDUCE_MIN", i1, axis, keepDims).To(output) # Example 1. Input in operand 0, input0 = {i1: # input 0 [2.0, 1.0, 3.0, 4.0]} output0 = {output: # output 0 [1.0, 3.0]} # Instantiate an example Example((input0, output0))
model = model() i1 = input('input', 'TENSOR_FLOAT32', '{1, 2, 2, 1}') axis = parameter('axis', 'TENSOR_INT32', '{1}', [2]) keep_dims = False output = output('output', 'TENSOR_FLOAT32', '{1, 2, 1}') model = model.Operation('REDUCE_MIN', i1, axis, keepDims).To(output) input0 = {i1: [2.0, 1.0, 3.0, 4.0]} output0 = {output: [1.0, 3.0]} example((input0, output0))
# https://leetcode.com/problems/subrectangle-queries class SubrectangleQueries: def __init__(self, rectangle): self.rectangle = rectangle def updateSubrectangle(self, row1, col1, row2, col2, newValue): for row in range(row1, row2 + 1): for col in range(col1, col2 + 1): self.rectangle[row][col] = newValue def getValue(self, row, col): return self.rectangle[row][col]
class Subrectanglequeries: def __init__(self, rectangle): self.rectangle = rectangle def update_subrectangle(self, row1, col1, row2, col2, newValue): for row in range(row1, row2 + 1): for col in range(col1, col2 + 1): self.rectangle[row][col] = newValue def get_value(self, row, col): return self.rectangle[row][col]
entries = [ { "env-title": "atari-alien", "env-variant": "No-op start", "score": 6482.10, }, { "env-title": "atari-amidar", "env-variant": "No-op start", "score": 833, }, { "env-title": "atari-assault", "env-variant": "No-op start", "score": 11013.50, }, { "env-title": "atari-asterix", "env-variant": "No-op start", "score": 36238.50, }, { "env-title": "atari-asteroids", "env-variant": "No-op start", "score": 2780.40, }, { "env-title": "atari-atlantis", "env-variant": "No-op start", "score": 308258, }, { "env-title": "atari-bank-heist", "env-variant": "No-op start", "score": 988.70, }, { "env-title": "atari-battle-zone", "env-variant": "No-op start", "score": 61220, }, { "env-title": "atari-beam-rider", "env-variant": "No-op start", "score": 8566.50, }, { "env-title": "atari-berzerk", "env-variant": "No-op start", "score": 1641.40, }, { "env-title": "atari-bowling", "env-variant": "No-op start", "score": 75.40, }, { "env-title": "atari-boxing", "env-variant": "No-op start", "score": 99.40, }, { "env-title": "atari-breakout", "env-variant": "No-op start", "score": 518.40, }, { "env-title": "atari-centipede", "env-variant": "No-op start", "score": 3402.80, }, { "env-title": "atari-chopper-command", "env-variant": "No-op start", "score": 37568, }, { "env-title": "atari-crazy-climber", "env-variant": "No-op start", "score": 194347, }, { "env-title": "atari-defender", "env-variant": "No-op start", "score": 113128, }, { "env-title": "atari-demon-attack", "env-variant": "No-op start", "score": 100189, }, { "env-title": "atari-double-dunk", "env-variant": "No-op start", "score": 11.40, }, { "env-title": "atari-enduro", "env-variant": "No-op start", "score": 2230.10, }, { "env-title": "atari-fishing-derby", "env-variant": "No-op start", "score": 23.20, }, { "env-title": "atari-freeway", "env-variant": "No-op start", "score": 31.40, }, { "env-title": "atari-frostbite", "env-variant": "No-op start", "score": 8042.10, }, { "env-title": "atari-gopher", "env-variant": "No-op start", "score": 69135.10, }, { "env-title": "atari-gravitar", "env-variant": "No-op start", "score": 1073.80, }, { "env-title": "atari-hero", "env-variant": "No-op start", "score": 35542.20, }, { "env-title": "atari-ice-hockey", "env-variant": "No-op start", "score": 3.40, }, { "env-title": "atari-jamesbond", "env-variant": "No-op start", "score": 7869.20, }, { "env-title": "atari-kangaroo", "env-variant": "No-op start", "score": 10484.50, }, { "env-title": "atari-krull", "env-variant": "No-op start", "score": 9930.80, }, { "env-title": "atari-kung-fu-master", "env-variant": "No-op start", "score": 59799.50, }, { "env-title": "atari-montezuma-revenge", "env-variant": "No-op start", "score": 2643.50, }, { "env-title": "atari-ms-pacman", "env-variant": "No-op start", "score": 2724.30, }, { "env-title": "atari-name-this-game", "env-variant": "No-op start", "score": 9907.20, }, { "env-title": "atari-phoenix", "env-variant": "No-op start", "score": 40092.20, }, { "env-title": "atari-pitfall", "env-variant": "No-op start", "score": -3.50, }, { "env-title": "atari-pong", "env-variant": "No-op start", "score": 20.70, }, { "env-title": "atari-private-eye", "env-variant": "No-op start", "score": 15177.10, }, { "env-title": "atari-qbert", "env-variant": "No-op start", "score": 22956.50, }, { "env-title": "atari-riverraid", "env-variant": "No-op start", "score": 16608.30, }, { "env-title": "atari-road-runner", "env-variant": "No-op start", "score": 71168, }, { "env-title": "atari-robotank", "env-variant": "No-op start", "score": 68.50, }, { "env-title": "atari-seaquest", "env-variant": "No-op start", "score": 8425.80, }, { "env-title": "atari-skiing", "env-variant": "No-op start", "score": -10753.40, }, { "env-title": "atari-solaris", "env-variant": "No-op start", "score": 2760, }, { "env-title": "atari-space-invaders", "env-variant": "No-op start", "score": 2448.60, }, { "env-title": "atari-star-gunner", "env-variant": "No-op start", "score": 70038, }, { "env-title": "atari-surround", "env-variant": "No-op start", "score": 6.70, }, { "env-title": "atari-tennis", "env-variant": "No-op start", "score": 23.30, }, { "env-title": "atari-time-pilot", "env-variant": "No-op start", "score": 19401, }, { "env-title": "atari-tutankham", "env-variant": "No-op start", "score": 272.60, }, { "env-title": "atari-up-n-down", "env-variant": "No-op start", "score": 64354.20, }, { "env-title": "atari-venture", "env-variant": "No-op start", "score": 1597.50, }, { "env-title": "atari-video-pinball", "env-variant": "No-op start", "score": 469366, }, { "env-title": "atari-wizard-of-wor", "env-variant": "No-op start", "score": 13170.50, }, { "env-title": "atari-yars-revenge", "env-variant": "No-op start", "score": 102760, }, { "env-title": "atari-zaxxon", "env-variant": "No-op start", "score": 25215.50, }, ]
entries = [{'env-title': 'atari-alien', 'env-variant': 'No-op start', 'score': 6482.1}, {'env-title': 'atari-amidar', 'env-variant': 'No-op start', 'score': 833}, {'env-title': 'atari-assault', 'env-variant': 'No-op start', 'score': 11013.5}, {'env-title': 'atari-asterix', 'env-variant': 'No-op start', 'score': 36238.5}, {'env-title': 'atari-asteroids', 'env-variant': 'No-op start', 'score': 2780.4}, {'env-title': 'atari-atlantis', 'env-variant': 'No-op start', 'score': 308258}, {'env-title': 'atari-bank-heist', 'env-variant': 'No-op start', 'score': 988.7}, {'env-title': 'atari-battle-zone', 'env-variant': 'No-op start', 'score': 61220}, {'env-title': 'atari-beam-rider', 'env-variant': 'No-op start', 'score': 8566.5}, {'env-title': 'atari-berzerk', 'env-variant': 'No-op start', 'score': 1641.4}, {'env-title': 'atari-bowling', 'env-variant': 'No-op start', 'score': 75.4}, {'env-title': 'atari-boxing', 'env-variant': 'No-op start', 'score': 99.4}, {'env-title': 'atari-breakout', 'env-variant': 'No-op start', 'score': 518.4}, {'env-title': 'atari-centipede', 'env-variant': 'No-op start', 'score': 3402.8}, {'env-title': 'atari-chopper-command', 'env-variant': 'No-op start', 'score': 37568}, {'env-title': 'atari-crazy-climber', 'env-variant': 'No-op start', 'score': 194347}, {'env-title': 'atari-defender', 'env-variant': 'No-op start', 'score': 113128}, {'env-title': 'atari-demon-attack', 'env-variant': 'No-op start', 'score': 100189}, {'env-title': 'atari-double-dunk', 'env-variant': 'No-op start', 'score': 11.4}, {'env-title': 'atari-enduro', 'env-variant': 'No-op start', 'score': 2230.1}, {'env-title': 'atari-fishing-derby', 'env-variant': 'No-op start', 'score': 23.2}, {'env-title': 'atari-freeway', 'env-variant': 'No-op start', 'score': 31.4}, {'env-title': 'atari-frostbite', 'env-variant': 'No-op start', 'score': 8042.1}, {'env-title': 'atari-gopher', 'env-variant': 'No-op start', 'score': 69135.1}, {'env-title': 'atari-gravitar', 'env-variant': 'No-op start', 'score': 1073.8}, {'env-title': 'atari-hero', 'env-variant': 'No-op start', 'score': 35542.2}, {'env-title': 'atari-ice-hockey', 'env-variant': 'No-op start', 'score': 3.4}, {'env-title': 'atari-jamesbond', 'env-variant': 'No-op start', 'score': 7869.2}, {'env-title': 'atari-kangaroo', 'env-variant': 'No-op start', 'score': 10484.5}, {'env-title': 'atari-krull', 'env-variant': 'No-op start', 'score': 9930.8}, {'env-title': 'atari-kung-fu-master', 'env-variant': 'No-op start', 'score': 59799.5}, {'env-title': 'atari-montezuma-revenge', 'env-variant': 'No-op start', 'score': 2643.5}, {'env-title': 'atari-ms-pacman', 'env-variant': 'No-op start', 'score': 2724.3}, {'env-title': 'atari-name-this-game', 'env-variant': 'No-op start', 'score': 9907.2}, {'env-title': 'atari-phoenix', 'env-variant': 'No-op start', 'score': 40092.2}, {'env-title': 'atari-pitfall', 'env-variant': 'No-op start', 'score': -3.5}, {'env-title': 'atari-pong', 'env-variant': 'No-op start', 'score': 20.7}, {'env-title': 'atari-private-eye', 'env-variant': 'No-op start', 'score': 15177.1}, {'env-title': 'atari-qbert', 'env-variant': 'No-op start', 'score': 22956.5}, {'env-title': 'atari-riverraid', 'env-variant': 'No-op start', 'score': 16608.3}, {'env-title': 'atari-road-runner', 'env-variant': 'No-op start', 'score': 71168}, {'env-title': 'atari-robotank', 'env-variant': 'No-op start', 'score': 68.5}, {'env-title': 'atari-seaquest', 'env-variant': 'No-op start', 'score': 8425.8}, {'env-title': 'atari-skiing', 'env-variant': 'No-op start', 'score': -10753.4}, {'env-title': 'atari-solaris', 'env-variant': 'No-op start', 'score': 2760}, {'env-title': 'atari-space-invaders', 'env-variant': 'No-op start', 'score': 2448.6}, {'env-title': 'atari-star-gunner', 'env-variant': 'No-op start', 'score': 70038}, {'env-title': 'atari-surround', 'env-variant': 'No-op start', 'score': 6.7}, {'env-title': 'atari-tennis', 'env-variant': 'No-op start', 'score': 23.3}, {'env-title': 'atari-time-pilot', 'env-variant': 'No-op start', 'score': 19401}, {'env-title': 'atari-tutankham', 'env-variant': 'No-op start', 'score': 272.6}, {'env-title': 'atari-up-n-down', 'env-variant': 'No-op start', 'score': 64354.2}, {'env-title': 'atari-venture', 'env-variant': 'No-op start', 'score': 1597.5}, {'env-title': 'atari-video-pinball', 'env-variant': 'No-op start', 'score': 469366}, {'env-title': 'atari-wizard-of-wor', 'env-variant': 'No-op start', 'score': 13170.5}, {'env-title': 'atari-yars-revenge', 'env-variant': 'No-op start', 'score': 102760}, {'env-title': 'atari-zaxxon', 'env-variant': 'No-op start', 'score': 25215.5}]
class BaseViewTemplate(): def get_template(self): if self.request.user.is_authenticated: template = "core/base.html" else: template = "core/base-nav.html" return template
class Baseviewtemplate: def get_template(self): if self.request.user.is_authenticated: template = 'core/base.html' else: template = 'core/base-nav.html' return template
{ "roadMapId" : "2", "mapIds" : { "1" : { "parameter" : [ "-l" ], "code" : "import json\ndef eventHandler(event, context, callback):\n\tjsonString = json.dumps(event)\n\tprint(jsonString)\n\tif event[\"present\"] == \"person\":\n\t\tprint(\"OK\")\n\telse:\n\t\tprint(\"None\")", "deviceId" : "deviceId1", "serverId" : "serverId1", "brokerId" : "brokerId1" } } }
{'roadMapId': '2', 'mapIds': {'1': {'parameter': ['-l'], 'code': 'import json\ndef eventHandler(event, context, callback):\n\tjsonString = json.dumps(event)\n\tprint(jsonString)\n\tif event["present"] == "person":\n\t\tprint("OK")\n\telse:\n\t\tprint("None")', 'deviceId': 'deviceId1', 'serverId': 'serverId1', 'brokerId': 'brokerId1'}}}
# program to converte API text file to markdown for wiki. all_apis = [] with open("stepspy_api.txt","rt") as fid_api: api = [] line = fid_api.readline().strip() api.append(line) while True: line = fid_api.readline() if len(line)==0: if len(api) != 0: all_apis.append(tuple(api)) break else: line = line.strip() if not line.startswith("API "): api.append(line) else: all_apis.append(tuple(api)) api = [] api.append(line) with open("stepspy_api.md","wt") as fid_md: for api in all_apis: current_cat = "count" for line in api: if line.startswith("API "): #fid_md.write("# "+line+": ") fid_md.write("# ") current_cat = "format" continue if current_cat == "format": api_name = line.strip("Format:").strip() api_name = api_name[:api_name.find("(")] fid_md.write(api_name+"\n") fid_md.write(line+" \n") current_cat = "description" continue if current_cat == "description": fid_md.write(line+" \n") if line.startswith("Args:"): current_cat = "args" continue if current_cat == "args": if not line.startswith("Rets:"): fid_md.write("> "+line+" \n") else: fid_md.write("\n"+line+" \n") current_cat = "rets" continue if current_cat == "rets": if not line.startswith("Example:") and not line.startswith("Tips:"): fid_md.write("> "+line+" \n") else: fid_md.write("\n"+line+" \n") if line.startswith("Tips:"): current_cat = "tips" else: fid_md.write("```python\n") current_cat = "example" continue if current_cat == "tips": if not line.startswith("Example:"): fid_md.write("> "+line+" \n") else: fid_md.write("\n"+line+" \n") fid_md.write("```python\n") current_cat = "example" continue if current_cat == "example": if len(line)!=0: fid_md.write(line+" \n") continue if current_cat == "example": fid_md.write("```\n\n") else: fid_md.write("\n\n")
all_apis = [] with open('stepspy_api.txt', 'rt') as fid_api: api = [] line = fid_api.readline().strip() api.append(line) while True: line = fid_api.readline() if len(line) == 0: if len(api) != 0: all_apis.append(tuple(api)) break else: line = line.strip() if not line.startswith('API '): api.append(line) else: all_apis.append(tuple(api)) api = [] api.append(line) with open('stepspy_api.md', 'wt') as fid_md: for api in all_apis: current_cat = 'count' for line in api: if line.startswith('API '): fid_md.write('# ') current_cat = 'format' continue if current_cat == 'format': api_name = line.strip('Format:').strip() api_name = api_name[:api_name.find('(')] fid_md.write(api_name + '\n') fid_md.write(line + ' \n') current_cat = 'description' continue if current_cat == 'description': fid_md.write(line + ' \n') if line.startswith('Args:'): current_cat = 'args' continue if current_cat == 'args': if not line.startswith('Rets:'): fid_md.write('> ' + line + ' \n') else: fid_md.write('\n' + line + ' \n') current_cat = 'rets' continue if current_cat == 'rets': if not line.startswith('Example:') and (not line.startswith('Tips:')): fid_md.write('> ' + line + ' \n') else: fid_md.write('\n' + line + ' \n') if line.startswith('Tips:'): current_cat = 'tips' else: fid_md.write('```python\n') current_cat = 'example' continue if current_cat == 'tips': if not line.startswith('Example:'): fid_md.write('> ' + line + ' \n') else: fid_md.write('\n' + line + ' \n') fid_md.write('```python\n') current_cat = 'example' continue if current_cat == 'example': if len(line) != 0: fid_md.write(line + ' \n') continue if current_cat == 'example': fid_md.write('```\n\n') else: fid_md.write('\n\n')
type_input = input() symbol = input() def int_type(num): number = int(num) result = number * 2 print(result) def real_type(num): number = float(num) result = number * 1.5 print(f"{result:.2f}") def string_type(text): string = "$" + text + "$" print(string) if type_input == "int": int_type(symbol) elif type_input == "real": real_type(symbol) else: string_type(symbol)
type_input = input() symbol = input() def int_type(num): number = int(num) result = number * 2 print(result) def real_type(num): number = float(num) result = number * 1.5 print(f'{result:.2f}') def string_type(text): string = '$' + text + '$' print(string) if type_input == 'int': int_type(symbol) elif type_input == 'real': real_type(symbol) else: string_type(symbol)
class Wrapper: "Wrapper to disable commit in sqla" def __init__(self, obj): self.obj = obj def __getattr__(self, attr): if attr in ["commit", "rollback"]: return lambda *args, **kwargs: None obj = getattr(self.obj, attr) if attr not in ["cursor", "execute"]: return obj if attr == "cursor": return type(self)(obj) return self.wrapper(obj) def wrapper(self, obj): "Implement if you need to make your customized wrapper" return obj def __call__(self, *args, **kwargs): self.obj = self.obj(*args, **kwargs) return self
class Wrapper: """Wrapper to disable commit in sqla""" def __init__(self, obj): self.obj = obj def __getattr__(self, attr): if attr in ['commit', 'rollback']: return lambda *args, **kwargs: None obj = getattr(self.obj, attr) if attr not in ['cursor', 'execute']: return obj if attr == 'cursor': return type(self)(obj) return self.wrapper(obj) def wrapper(self, obj): """Implement if you need to make your customized wrapper""" return obj def __call__(self, *args, **kwargs): self.obj = self.obj(*args, **kwargs) return self
load("@bazel_skylib//lib:paths.bzl", "paths") load("@bazel_skylib//lib:shell.bzl", "shell") load("@fbcode_macros//build_defs/config:read_configs.bzl", "read_choice") load("@fbcode_macros//build_defs/lib:allocators.bzl", "allocators") load("@fbcode_macros//build_defs/lib:build_info.bzl", "build_info") load("@fbcode_macros//build_defs/lib:cpp_common.bzl", "cpp_common") load("@fbcode_macros//build_defs/lib:label_utils.bzl", "label_utils") load("@fbcode_macros//build_defs/lib:python_typing.bzl", "gen_typing_config", "get_typing_config_target") load("@fbcode_macros//build_defs/lib:python_versioning.bzl", "python_versioning") load("@fbcode_macros//build_defs/lib:src_and_dep_helpers.bzl", "src_and_dep_helpers") load("@fbcode_macros//build_defs/lib:string_macros.bzl", "string_macros") load("@fbcode_macros//build_defs/lib:target_utils.bzl", "target_utils") load("@fbcode_macros//build_defs/lib:third_party.bzl", "third_party") load("@fbcode_macros//build_defs:compiler.bzl", "compiler") load("@fbcode_macros//build_defs:config.bzl", "config") load("@fbcode_macros//build_defs:coverage.bzl", "coverage") load("@fbcode_macros//build_defs:platform_utils.bzl", "platform_utils") load("@fbcode_macros//build_defs:sanitizers.bzl", "sanitizers") load("@fbsource//tools/build_defs:buckconfig.bzl", "read_bool") load("@fbsource//tools/build_defs:fb_native_wrapper.bzl", "fb_native") load("@fbsource//tools/build_defs:type_defs.bzl", "is_dict", "is_list") _INTERPRETERS = [ # name suffix, main module, dependencies ("interp", "libfb.py.python_interp", "//libfb/py:python_interp"), ("ipython", "libfb.py.ipython_interp", "//libfb/py:ipython_interp"), ("vs_debugger", "libfb.py.vs_debugger", "//libfb/py:vs_debugger"), ] _MANIFEST_TEMPLATE = """\ import sys class Manifest(object): def __init__(self): self._modules = None self.__file__ = __file__ self.__name__ = __name__ @property def modules(self): if self._modules is None: import os, sys modules = set() for root, dirs, files in os.walk(sys.path[0]): rel_root = os.path.relpath(root, sys.path[0]) if rel_root == '.': package_prefix = '' else: package_prefix = rel_root.replace(os.sep, '.') + '.' for name in files: base, ext = os.path.splitext(name) # Note that this loop includes all *.so files, regardless # of whether they are actually python modules or just # regular dynamic libraries if ext in ('.py', '.pyc', '.pyo', '.so'): if rel_root == "." and base == "__manifest__": # The manifest generation logic for normal pars # does not include the __manifest__ module itself continue modules.add(package_prefix + base) # Skip __pycache__ directories try: dirs.remove("__pycache__") except ValueError: pass self._modules = sorted(modules) return self._modules fbmake = {{ {fbmake} }} sys.modules[__name__] = Manifest() """ def _get_version_universe(python_version): """ Get the version universe for a specific python version Args: python_version: A `PythonVersion` that the universe should be fetched for Returns: The first third-party version universe string that corresponds to the python version """ return third_party.get_version_universe([("python", python_version.version_string)]) def _interpreter_binaries( name, buck_cxx_platform, python_version, python_platform, deps, platform_deps, preload_deps, visibility): """ Generate rules to build intepreter helpers. Args: name: The base name for the interpreter rules buck_cxx_platform: The buck-formatted cxx_platform to use for the interpreter binary python_version: A `PythonVersion` struct for the version of python to use python_platform: The python platform to pass to buck deps: The deps to pass to the binary in addition to interpeter deps platform_deps: The platform deps to pass to buck preload_deps: The preload deps to pass to buck visibility: The visibilty of the rule Returns: The list of names of all generated rules """ rule_names = [] for interp, interp_main_module, interp_dep in _INTERPRETERS: rule_name = name + "-" + interp fb_native.python_binary( name = rule_name, visibility = visibility, main_module = interp_main_module, cxx_platform = buck_cxx_platform, platform = python_platform, version_universe = _get_version_universe(python_version), deps = [interp_dep] + deps, platform_deps = platform_deps, preload_deps = preload_deps, package_style = "inplace", ) rule_names.append(rule_name) return rule_names def _get_interpreter_for_platform(python_platform): """ Get the interpreter to use for a buck-native python platform """ return native.read_config("python#" + python_platform, "interpreter") def _get_build_info( base_path, name, fbconfig_rule_type, main_module, fbcode_platform, python_platform): """ Return the build info attributes to install for python rules. Args: base_path: The package for the current build file name: The name of the rule being built fbconfig_rule_type: The name of the main rule being built; used for build_info main_module: The python main module of the binary/test fbcode_platform: The fbcode platform used for the binary/test python_platform: The buck-compatible python_platform that is being used Returns: A dictionary of key/value strings to put into a build manifest """ interpreter = _get_interpreter_for_platform(python_platform) # Iteration order is deterministic for dictionaries in buck/skylark py_build_info = { "build_tool": "buck", "main_module": main_module, "par_style": "live", "python_command": interpreter, "python_home": paths.dirname(paths.dirname(interpreter)), } # Include the standard build info, converting the keys to the names we # use for python. key_mappings = { "package_name": "package", "package_version": "version", "rule": "build_rule", "rule_type": "build_rule_type", } info = build_info.get_build_info( base_path, name, fbconfig_rule_type, fbcode_platform, ) for key in build_info.BUILD_INFO_KEYS: py_build_info[key_mappings.get(key, key)] = getattr(info, key) return py_build_info def _manifest_library( base_path, name, fbconfig_rule_type, main_module, fbcode_platform, python_platform, visibility): """ Build the rules that create the `__manifest__` module. Args: base_path: The package of this rule name: The name of the primary rule that was generated fbconfig_rule_type: The name of the main rule being built; used for build_info main_module: The main module of the python binary/test fbcode_platform: The fbcode platform to use in build info python_platform: The buck-compatible python platform to use visibility: The visiblity for the main python_library Returns: The name of a library that contains a __mainfest__.py with build information in it. """ build_info = _get_build_info( base_path, name, fbconfig_rule_type, main_module, fbcode_platform, python_platform, ) fbmake = "\n ".join([ "{}: {},".format(repr(k), repr(v)) for k, v in build_info.items() ]) manifest = _MANIFEST_TEMPLATE.format(fbmake = fbmake) manifest_name = name + "-manifest" manifest_lib_name = name + "-manifest-lib" fb_native.genrule( name = manifest_name, labels = ["generated"], visibility = None, out = name + "-__manifest__.py", cmd = "echo -n {} > $OUT".format(shell.quote(manifest)), ) fb_native.python_library( name = manifest_lib_name, labels = ["generated"], visibility = visibility, base_module = "", srcs = {"__manifest__.py": ":" + manifest_name}, ) return manifest_lib_name def _file_to_python_module(src, base_module): """Python implementation of Buck's toModuleName(). Original in com.facebook.buck.python.PythonUtil.toModuleName. """ src = paths.join(base_module, src) src, _ext = paths.split_extension(src) return src.replace("/", ".") # sic, not os.sep def _test_modules_library( base_path, library_name, library_srcs, library_base_module, visibility, generate_test_modules): """" Create the rule that generates a __test_modules__.py file for a library Args: base_path: The package for the current build file library_name: The name of the original library that was built library_srcs: The list of srcs (files or labels) that were given to the original library that this test_modules_library is for library_base_module: The base_module of the original library visibility: The visibility for this rule generate_test_modules: Whether to actually materialize the rule. If False, just return the name of the rule Returns: The name of the generated python library that contains __test_modules__.py """ testmodules_library_name = library_name + "-testmodules-lib" # If we don't actually want to generate the library (generate_test_modules), # at least return the name if not generate_test_modules: return testmodules_library_name lines = ["TEST_MODULES = ["] for src in sorted(library_srcs): lines.append( ' "{}",'.format( _file_to_python_module(src, library_base_module or base_path), ), ) lines.append("]") genrule_name = library_name + "-testmodules" fb_native.genrule( name = genrule_name, visibility = None, out = library_name + "-__test_modules__.py", cmd = " && ".join([ "echo {} >> $OUT".format(shell.quote(line)) for line in lines ]), ) fb_native.python_library( name = testmodules_library_name, visibility = visibility, base_module = "", deps = ["//python:fbtestmain", ":" + library_name], srcs = {"__test_modules__.py": ":" + genrule_name}, ) return testmodules_library_name def _typecheck_test( name, main_module, buck_cxx_platform, python_platform, python_version, deps, platform_deps, preload_deps, typing_options, visibility, emails, library_target, library_versioned_srcs, library_srcs, library_resources, library_base_module): """ Create a test and associated libraries for running typechecking Args: name: The name of the original binary/test to run typechecks on main_module: The main module of hte binary/test buck_cxx_platform: The buck-formatted cxx_platform to use for the interpreter binary python_version: A `PythonVersion` struct for the version of python to use python_platform: The python platform to pass to buck deps: The deps to pass to the binary in addition to interpeter deps platform_deps: The platform deps to pass to buck preload_deps: The preload deps to pass to buck typing_options: A comma delimited list of strings that configure typing for this binary/library visibility: The visibilty of the rule library_target: The fully qualified target for the original library used in the binary/test. This is used to determine whether the following library_* properties are used in the final test rule library_versioned_srcs: The versioned_srcs property from the library used to create the original binary/test. This should be the final value passed to buck: No intermediate representations library_srcs: The srcs property from the library used to create the original binary/test. This should be the final value passed to buck: No intermediate representations library_resources: The resources property from the library used to create the original binary/test. This should be the final value passed to buck: No intermediate representations library_base_module: The base_module property from the library used to create the original binary/test. This should be the final value passed to buck: No intermediate representations Returns: The name of the test library that was created """ typing_config = get_typing_config_target() typecheck_deps = deps[:] if ":python_typecheck-library" not in typecheck_deps: # Buck doesn't like duplicate dependencies. typecheck_deps.append("//libfb/py:python_typecheck-library") if not typing_config: typecheck_deps.append("//python/typeshed_internal:global_mypy_ini") env = {} # If the passed library is not a dependency, add its sources here. # This enables python_unittest targets to be type-checked, too. add_library_attrs = library_target not in typecheck_deps if not add_library_attrs: library_versioned_srcs = None library_srcs = None library_resources = None library_base_module = None if main_module not in ("__fb_test_main__", "libfb.py.testslide.unittest"): # Tests are properly enumerated from passed sources (see above). # For binary targets, we need this subtle hack to let # python_typecheck know where to start type checking the program. env["PYTHON_TYPECHECK_ENTRY_POINT"] = main_module typing_options_list = [ option.strip() for option in typing_options.split(",") ] if typing_options else [] use_pyre = typing_options and "pyre" in typing_options_list if use_pyre: typing_options_list.remove("pyre") typing_options = ",".join(typing_options_list) env["PYRE_ENABLED"] = "1" if typing_config: cmd = "$(exe {}) gather ".format(typing_config) if use_pyre: genrule_name = name + "-typing=pyre.json" genrule_out = "pyre.json" cmd += "--pyre=True " else: genrule_name = name + "-typing=mypy.ini" genrule_out = "mypy.ini" if typing_options: cmd += '--options="{}" '.format(typing_options) cmd += "$(location {}-typing) $OUT".format(library_target) fb_native.genrule( name = genrule_name, out = genrule_out, cmd = cmd, visibility = visibility, ) if use_pyre: typing_library_name = name + "-pyre_json" else: typing_library_name = name + "-mypy_ini" fb_native.python_library( name = typing_library_name, visibility = visibility, base_module = "", srcs = [":" + genrule_name], ) typecheck_deps.append(":" + typing_library_name) typecheck_rule_name = name + "-typecheck" fb_native.python_test( name = typecheck_rule_name, main_module = "python_typecheck", cxx_platform = buck_cxx_platform, platform = python_platform, deps = typecheck_deps, platform_deps = platform_deps, preload_deps = preload_deps, package_style = "inplace", # TODO(ambv): labels here shouldn't be hard-coded. labels = ["buck", "python"], version_universe = _get_version_universe(python_version), contacts = emails, visibility = visibility, env = env, versioned_srcs = library_versioned_srcs, srcs = library_srcs, resources = library_resources, base_module = library_base_module, ) return typecheck_rule_name def _monkeytype_binary( rule_type, attributes, library_name): """ Create a python binary/test that enables monkeytype but otherwise looks like another binary/test Args: rule_type: The type of rule to create (python_binary or python_test) attributes: The attributes of the original binary/test that we are enabling monkeytype for. These should be final values passed to buck, not intermediaries, as they are copied directly into a library_name: The name of the implicit library created for the binary/test """ name = attributes["name"] visibility = attributes.get("visibility") lib_main_module_attrs_name = None if "main_module" in attributes: # we need to preserve the original main_module, so we inject a # library with a module for it that the main wrapper picks up main_module_name = name + "-monkeytype_main_module" script = ( "#!/usr/bin/env python3\n\n" + "def monkeytype_main_module() -> str:\n" + " return '{}'\n".format(attributes["main_module"]) ) fb_native.genrule( name = main_module_name, visibility = visibility, out = name + "-__monkeytype_main_module__.py", cmd = "echo {} > $OUT".format(shell.quote(script)), ) lib_main_module_attrs_name = name + "-monkeytype_main_module-lib" fb_native.python_library( name = lib_main_module_attrs_name, visibility = visibility, base_module = "", deps = ["//python:fbtestmain", ":" + name], srcs = { "__monkeytype_main_module__.py": ":" + main_module_name, }, ) # Create a variant of the target that is running with monkeytype if rule_type == "python_binary": wrapper_rule_constructor = fb_native.python_binary elif rule_type == "python_test": wrapper_rule_constructor = fb_native.python_test else: fail("Invalid rule type specified: " + rule_type) wrapper_attrs = dict(attributes) wrapper_attrs["name"] = name + "-monkeytype" wrapper_attrs["visibility"] = visibility if "deps" in wrapper_attrs: wrapper_deps = list(wrapper_attrs["deps"]) else: wrapper_deps = [] library_target = ":" + library_name if library_target not in wrapper_deps: wrapper_deps.append(library_target) stub_gen_deps = list(wrapper_deps) if "//python/monkeytype:main_wrapper" not in wrapper_deps: wrapper_deps.append("//python/monkeytype/tools:main_wrapper") if lib_main_module_attrs_name != None: wrapper_deps.append(":" + lib_main_module_attrs_name) wrapper_attrs["deps"] = wrapper_deps wrapper_attrs["base_module"] = "" wrapper_attrs["main_module"] = "python.monkeytype.tools.main_wrapper" wrapper_rule_constructor(**wrapper_attrs) if "//python/monkeytype/tools:stubs_lib" not in wrapper_deps: stub_gen_deps.append("//python/monkeytype/tools:stubs_lib") # And create a target that can be used for stub creation fb_native.python_binary( name = name + "-monkeytype-gen-stubs", visibility = visibility, main_module = "python.monkeytype.tools.get_stub", cxx_platform = attributes["cxx_platform"], platform = attributes["platform"], deps = stub_gen_deps, platform_deps = attributes["platform_deps"], preload_deps = attributes["preload_deps"], package_style = "inplace", version_universe = attributes["version_universe"], ) def _analyze_import_binary( name, buck_cxx_platform, python_platform, python_version, deps, platform_deps, preload_deps, visibility): """ Generate a binary to analyze the imports of a given python library """ generate_imports_deps = list(deps) if ":generate_par_imports" not in generate_imports_deps: generate_imports_deps.append("//libfb/py:generate_par_imports") if ":parutil" not in generate_imports_deps: generate_imports_deps.append("//libfb/py:parutil") version_universe = _get_version_universe(python_version) generate_par_name = name + "-generate-imports" fb_native.python_binary( name = generate_par_name, main_module = "libfb.py.generate_par_imports", cxx_platform = buck_cxx_platform, platform = python_platform, deps = generate_imports_deps, platform_deps = platform_deps, preload_deps = preload_deps, # TODO(ambv): labels here shouldn't be hard-coded. labels = ["buck", "python"], version_universe = version_universe, visibility = visibility, ) genrule_name = name + "-gen-rule" fb_native.genrule( name = genrule_name, srcs = [":" + generate_par_name], out = "{}-imports_file.py".format(name), cmd = '$(exe :{}) >"$OUT"'.format(generate_par_name), ) lib_name = name + "-analyze-lib" fb_native.python_library( name = lib_name, srcs = {"imports_file.py": ":" + genrule_name}, base_module = "", deps = [":" + genrule_name], ) analyze_deps = list(deps) analyze_deps.append(":" + lib_name) if ":analyze_par_imports" not in analyze_deps: analyze_deps.append("//libfb/py:analyze_par_imports") fb_native.python_binary( name = name + "-analyze-imports", main_module = "libfb.py.analyze_par_imports", cxx_platform = buck_cxx_platform, platform = python_platform, deps = analyze_deps, platform_deps = platform_deps, preload_deps = preload_deps, # TODO(ambv): labels here shouldn't be hard-coded. labels = ["buck", "python"], version_universe = version_universe, visibility = visibility, ) _GEN_SRCS_LINK = "https://fburl.com/203312823" def _parse_srcs(base_path, param, srcs): # type: (str, str, Union[List[str], Dict[str, str]]) -> Dict[str, Union[str, RuleTarget]] """ Converts `srcs` to a `srcs` dictionary for use in python_* rule Fails if a RuleTarget object is passed in, but a source file name cannot be determined Args: base_path: The package for the rule param: The name of the parameter being parsed. Used in error messages srcs: Either a dictionary of file/target -> destination in the library, or a list of source files or RuleTarget objects that the source named can be divined from. Returns: A mapping of destination filename -> file str / RuleTarget """ # Parse sources in dict form. if is_dict(srcs) or hasattr(srcs, "items"): out_srcs = ( src_and_dep_helpers.parse_source_map( base_path, {v: k for k, v in srcs.items()}, ) ) # Parse sources in list form. else: out_srcs = {} # Format sources into a dict of logical name of value. for src in src_and_dep_helpers.parse_source_list(base_path, srcs): # Path names are the same as path values. if not target_utils.is_rule_target(src): out_srcs[src] = src continue # If the source comes from a `custom_rule`/`genrule`, and the # user used the `=` notation which encodes the source's "name", # we can extract and use that. if "=" in src.name: name = src.name.rsplit("=", 1)[1] out_srcs[name] = src continue # Otherwise, we don't have a good way of deducing the name. # This actually looks to be pretty rare, so just throw a useful # error prompting the user to use the `=` notation above, or # switch to an explicit `dict`. fail( 'parameter `{}`: cannot infer a "name" to use for ' + "`{}`. If this is an output from a `custom_rule`, " + "consider using the `<rule-name>=<out>` notation instead. " + "Otherwise, please specify this parameter as `dict` " + 'mapping sources to explicit "names" (see {} for details).' .format(param, target_utils.target_to_label(src), _GEN_SRCS_LINK), ) return out_srcs def _parse_gen_srcs(base_path, srcs): # type: (str, Union[List[str], Dict[str, str]]) -> Dict[str, Union[str, RuleTarget]] """ Parse the given sources as input to the `gen_srcs` parameter. """ out_srcs = _parse_srcs(base_path, "gen_srcs", srcs) # Do a final pass to verify that all sources in `gen_srcs` are rule # references. for src in out_srcs.values(): if not target_utils.is_rule_target(src): fail( "parameter `gen_srcs`: `{}` must be a reference to rule " + "that generates a source (e.g. `//foo:bar`, `:bar`) " + " (see {} for details)." .format(src, GEN_SRCS_LINK), ) return out_srcs def _get_par_build_args( base_path, name, rule_type, platform, argcomplete = None, strict_tabs = None, compile = None, par_style = None, strip_libpar = None, needed_coverage = None, python = None): """ Return the arguments we need to pass to the PAR builder wrapper. """ build_args = [] build_mode = config.get_build_mode() if config.get_use_custom_par_args(): # Arguments that we wanted directly threaded into `make_par`. passthrough_args = [] if argcomplete == True: passthrough_args.append("--argcomplete") if strict_tabs == False: passthrough_args.append("--no-strict-tabs") if compile == False: passthrough_args.append("--no-compile") passthrough_args.append("--store-source") elif compile == "with-source": passthrough_args.append("--store-source") elif compile != True and compile != None: fail( ( "Invalid value {} for `compile`, must be True, False, " + '"with-source", or None (default)' ).format(compile), ) if par_style != None: passthrough_args.append("--par-style=" + par_style) if needed_coverage != None or coverage.get_coverage(): passthrough_args.append("--store-source") if build_mode.startswith("opt"): passthrough_args.append("--optimize") # Add arguments to populate build info. mode = build_info.get_build_info_mode(base_path, name) if mode == "none": fail("Invalid build info mode specified") info = ( build_info.get_explicit_build_info( base_path, name, mode, rule_type, platform, compiler.get_compiler_for_current_buildfile(), ) ) passthrough_args.append( "--build-info-build-mode=" + info.build_mode, ) passthrough_args.append("--build-info-build-tool=buck") if info.package_name != None: passthrough_args.append( "--build-info-package-name=" + info.package_name, ) if info.package_release != None: passthrough_args.append( "--build-info-package-release=" + info.package_release, ) if info.package_version != None: passthrough_args.append( "--build-info-package-version=" + info.package_version, ) passthrough_args.append("--build-info-platform=" + info.platform) passthrough_args.append("--build-info-rule-name=" + info.rule) passthrough_args.append("--build-info-rule-type=" + info.rule_type) build_args.extend(["--passthrough=" + a for a in passthrough_args]) # Arguments for stripping libomnibus. dbg builds should never strip. if not build_mode.startswith("dbg"): if strip_libpar == True: build_args.append("--omnibus-debug-info=strip") elif strip_libpar == "extract": build_args.append("--omnibus-debug-info=extract") else: build_args.append("--omnibus-debug-info=separate") # Set an explicit python interpreter. if python != None: build_args.append("--python-override=" + python) return build_args def _associated_targets_library(base_path, name, deps, visibility): """ Associated Targets are buck rules that need to be built, when This target is built, but are not a code dependency. Which is why we wrap them in a cxx_library so they could never be a code dependency TODO: Python just needs the concept of runtime deps if it doesn't have it. Also, what is the actual use case for this? """ rule_name = name + "-build_also" buck_platform = platform_utils.get_buck_platform_for_base_path(base_path) fb_native.cxx_library( name = rule_name, visibility = visibility, deps = deps, default_platform = buck_platform, defaults = {"platform": buck_platform}, ) return rule_name def _jemalloc_malloc_conf_library(base_path, name, malloc_conf, deps, visibility): """ Build a rule which wraps the JEMalloc allocator and links default configuration via the `jemalloc_conf` variable. """ buck_platform = platform_utils.get_buck_platform_for_base_path(base_path) jemalloc_config_line = ",".join([ "{}:{}".format(k, v) for k, v in sorted(malloc_conf.items()) ]) src_rule_name = "__{}_jemalloc_conf_src__".format(name) fb_native.genrule( name = src_rule_name, visibility = visibility, out = "jemalloc_conf.c", cmd = 'echo \'const char* malloc_conf = "{}";\' > "$OUT"'.format(jemalloc_config_line), ) deps, platform_deps = src_and_dep_helpers.format_all_deps(deps) lib_rule_name = "__{}_jemalloc_conf_lib__".format(name) fb_native.cxx_library( name = lib_rule_name, visibility = visibility, srcs = [":" + src_rule_name], default_platform = buck_platform, defaults = {"platform": buck_platform}, deps = deps, platform_deps = platform_deps, ) return target_utils.RootRuleTarget(base_path, lib_rule_name) def _convert_needed_coverage_spec(base_path, spec): """ Converts `needed_coverage` from fbcode's spec into the buck native spec Args: base_path: The base path for this rule; used to get fully qualified targets spec: A tuple of (<needed percentage as int>, <target as a string>) Returns: A buck-compatible spec. This is a tuple of two elements if no source name is detected in the target name (with an =) or three elements if it is detected in the form of (<percentage as int>, <full target as string>, <file as string>?) """ if len(spec) != 2: fail(( "parameter `needed_coverage`: `{}` must have exactly 2 " + "elements, a ratio and a target." ).format(spec)) ratio, target = spec if "=" not in target: return ( ratio, src_and_dep_helpers.convert_build_target(base_path, target), ) target, path = target.rsplit("=", 1) return (ratio, src_and_dep_helpers.convert_build_target(base_path, target), path) def _should_generate_interp_rules(helper_deps): """ Return whether we should generate the interp helpers. This is controlled by both the mode, the property, and buckconfig settings Args: helper_deps: The value of the `helper_deps` attribute on the users rule. Should be True or False """ # We can only work in @mode/dev if not config.get_build_mode().startswith("dev"): return False # Our current implementation of the interp helpers is costly when using # omnibus linking, only generate these if explicitly set via config or TARGETS config_setting = read_bool("python", "helpers", required = False) if config_setting == None: # No CLI option is set, respect the TARGETS file option. return helper_deps return config_setting def _preload_deps(base_path, name, allocator, jemalloc_conf = None, visibility = None): """ Add C/C++ deps which need to preloaded by Python binaries. Returns: A list of additional dependencies (as strings) which should be added to the python binary """ deps = [] sanitizer = sanitizers.get_sanitizer() # If we're using sanitizers, add the dep on the sanitizer-specific # support library. if sanitizer != None: sanitizer = sanitizers.get_short_name(sanitizer) deps.append( target_utils.RootRuleTarget( "tools/build/sanitizers", "{}-py".format(sanitizer), ), ) # Generate sanitizer configuration even if sanitizers are not used deps.append( cpp_common.create_sanitizer_configuration( base_path, name, enable_lsan = False, ), ) # If we're using an allocator, and not a sanitizer, add the allocator- # specific deps. if allocator != None and sanitizer == None: allocator_deps = allocators.get_allocator_deps(allocator) if allocator.startswith("jemalloc") and jemalloc_conf != None: conf_dep = _jemalloc_malloc_conf_library( base_path, name, jemalloc_conf, allocator_deps, visibility, ) allocator_deps = [conf_dep] deps.extend(allocator_deps) return deps def _get_ldflags(base_path, name, fbconfig_rule_type, strip_libpar = True): """ Return ldflags to use when linking omnibus libraries in python binaries. """ # We override stripping for python binaries unless we're in debug mode # (which doesn't get stripped by default). If either `strip_libpar` # is set or any level of stripping is enabled via config, we do full # stripping. strip_mode = cpp_common.get_strip_mode(base_path, name) if (not config.get_build_mode().startswith("dbg") and (strip_mode != "none" or strip_libpar == True)): strip_mode = "full" return cpp_common.get_ldflags( base_path, name, fbconfig_rule_type, strip_mode = strip_mode, ) def _get_package_style(): """ Get the package_style to use for binary rules from the configuration See https://buckbuild.com/rule/python_binary.html#package_style """ return read_choice( "python", "package_style", ("inplace", "standalone"), "standalone", ) def _implicit_python_library( name, is_test_companion, base_module = None, srcs = (), versioned_srcs = (), gen_srcs = (), deps = (), tests = (), tags = (), external_deps = (), visibility = None, resources = (), cpp_deps = (), py_flavor = "", version_subdirs = None): # Not used for now, will be used in a subsequent diff """ Creates a python_library and all supporting libraries This library may or may not be consumed as a companion library to a python_binary, or a python_test. The attributes returned vary based on how it will be used. Args: name: The name of this library is_test_companion: Whether this library is being created and consumed directly by a test rule base_module: The basemodule for the library (https://buckbuild.com/rule/python_library.html#base_module) srcs: A sequence of sources/targets to use as srcs. Note that only files ending in .py are considered sources. All other srcs are added as resources. Note if this is a dictionary, the key and value are swapped from the official buck implementation. That is,this rule expects {<src>: <destination in the library>} versioned_srcs: If provided, a list of tuples of (<python version constraint string>, <srcs as above>) These sources are then added to the versioned_srcs attribute in the library gen_srcs: DEPRECATED A list of srcs that come from `custom_rule`s to be merged into the final srcs list. deps: A sequence of dependencies for the library. These should only be python libraries, as python's typing support assumes that dependencies also have a companion -typing rule tests: The targets that test this library tags: Arbitrary metadata to attach to this library. See https://buckbuild.com/rule/python_library.html#labels external_deps: A sequence of tuples of external dependencies visibility: The visibility of the library resources: A sequence of sources/targets that should be explicitly added as resoruces. Note that if a dictionary is used, the key and value are swapped from the official buck implementation. That is, this rule expects {<src>: <destination in the library>} cpp_deps: A sequence of C++ library depenencies that will be loaded at runtime py_flavor: The flavor of python to use. By default ("") this is cpython version_subdirs: A sequence of tuples of (<buck version constring>, <version subdir>). This points to the subdirectory (or "") that each version constraint uses. This helps us rewrite things like versioned_srcs for third-party2 targets. Returns: The kwargs to pass to a native.python_library rule """ base_path = native.package_name() attributes = {} attributes["name"] = name # Normalize all the sources from the various parameters. parsed_srcs = {} # type: Dict[str, Union[str, RuleTarget]] parsed_srcs.update(_parse_srcs(base_path, "srcs", srcs)) parsed_srcs.update(_parse_gen_srcs(base_path, gen_srcs)) # Parse the version constraints and normalize all source paths in # `versioned_srcs`: parsed_versioned_srcs = [ ( python_versioning.python_version_constraint(pvc), _parse_srcs(base_path, "versioned_srcs", vs), ) for pvc, vs in versioned_srcs ] # Contains a mapping of platform name to sources to use for that # platform. all_versioned_srcs = [] # If we're TP project, install all sources via the `versioned_srcs` # parameter. `py_flavor` is ignored since flavored Pythons are only # intended for use by internal projects. if third_party.is_tp2(base_path): if version_subdirs == None: fail("`version_subdirs` must be specified on third-party projects") # TP2 projects have multiple "pre-built" source dirs, so we install # them via the `versioned_srcs` parameter along with the versions # of deps that was used to build them, so that Buck can select the # correct one based on version resolution. for constraints, subdir in version_subdirs: build_srcs = [parsed_srcs] if parsed_versioned_srcs: py_vers = None for target, constraint_version in constraints.items(): if target.endswith("/python:__project__"): py_vers = python_versioning.python_version(constraint_version) # 'is None' can become == None when the custom version classes # go away if py_vers == None: fail("Could not get python version for versioned_srcs") build_srcs.extend([ dict(vs) for vc, vs in parsed_versioned_srcs if python_versioning.constraint_matches(vc, py_vers, check_minor = True) ]) vsrc = {} for build_src in build_srcs: for name, src in build_src.items(): if target_utils.is_rule_target(src): vsrc[name] = src else: vsrc[name] = paths.join(subdir, src) all_versioned_srcs.append((constraints, vsrc)) # Reset `srcs`, since we're using `versioned_srcs`. parsed_srcs = {} # If we're an fbcode project, and `py_flavor` is not specified, then # keep the regular sources parameter and only use the `versioned_srcs` # parameter for the input parameter of the same name; if `py_flavor` is # specified, then we have to install all sources via `versioned_srcs` else: pytarget = third_party.get_tp2_project_target("python") platforms = platform_utils.get_platforms_for_host_architecture() # Iterate over all potential Python versions and collect srcs for # each version: for pyversion in python_versioning.get_all_versions(): if not python_versioning.version_supports_flavor(pyversion, py_flavor): continue ver_srcs = {} if py_flavor: ver_srcs.update(parsed_srcs) for constraint, pvsrcs in parsed_versioned_srcs: constraint = python_versioning.normalize_constraint(constraint) if python_versioning.constraint_matches(constraint, pyversion): ver_srcs.update(pvsrcs) if ver_srcs: all_versioned_srcs.append( ( { target_utils.target_to_label(pytarget, fbcode_platform = p): pyversion.version_string for p in platforms if python_versioning.platform_has_version(p, pyversion) }, ver_srcs, ), ) if py_flavor: parsed_srcs = {} attributes["base_module"] = base_module if parsed_srcs: # Need to split the srcs into srcs & resources as Buck # expects all test srcs to be python modules. if is_test_companion: formatted_srcs = src_and_dep_helpers.format_source_map({ k: v for k, v in parsed_srcs.items() if k.endswith(".py") }) formatted_resources = src_and_dep_helpers.format_source_map({ k: v for k, v in parsed_srcs.items() if not k.endswith(".py") }) attributes["resources"] = formatted_resources.value attributes["platform_resources"] = formatted_resources.platform_value else: formatted_srcs = src_and_dep_helpers.format_source_map(parsed_srcs) attributes["srcs"] = formatted_srcs.value attributes["platform_srcs"] = formatted_srcs.platform_value # Emit platform-specific sources. We split them between the # `platform_srcs` and `platform_resources` parameter based on their # extension, so that directories with only resources don't end up # creating stray `__init__.py` files for in-place binaries. out_versioned_srcs = [] out_versioned_resources = [] for vcollection, ver_srcs in all_versioned_srcs: out_srcs = {} out_resources = {} non_platform_ver_srcs = src_and_dep_helpers.without_platforms( src_and_dep_helpers.format_source_map(ver_srcs), ) for dst, src in non_platform_ver_srcs.items(): if dst.endswith(".py") or dst.endswith(".so"): out_srcs[dst] = src else: out_resources[dst] = src out_versioned_srcs.append((vcollection, out_srcs)) out_versioned_resources.append((vcollection, out_resources)) if out_versioned_srcs: attributes["versioned_srcs"] = \ python_versioning.add_flavored_versions(out_versioned_srcs) if out_versioned_resources: attributes["versioned_resources"] = \ python_versioning.add_flavored_versions(out_versioned_resources) dependencies = [] if third_party.is_tp2(base_path): dependencies.append( target_utils.target_to_label( third_party.get_tp2_project_target( third_party.get_tp2_project_name(base_path), ), fbcode_platform = third_party.get_tp2_platform(base_path), ), ) for target in deps: dependencies.append( src_and_dep_helpers.convert_build_target(base_path, target), ) if cpp_deps: dependencies.extend(cpp_deps) if dependencies: attributes["deps"] = dependencies attributes["tests"] = tests if visibility != None: attributes["visibility"] = visibility if external_deps: attributes["platform_deps"] = ( src_and_dep_helpers.format_platform_deps( [ src_and_dep_helpers.normalize_external_dep( dep, lang_suffix = "-py", parse_version = True, ) for dep in external_deps ], # We support the auxiliary versions hack for neteng/Django. deprecated_auxiliary_deps = True, ) ) attributes["labels"] = tags # The above code does a magical dance to split `gen_srcs`, `srcs`, # and `versioned_srcs` into pure-Python `srcs` and "everything else" # `resources`. In practice, it drops `__init__.py` into non-Python # data included with Python libraries, whereas `resources` does not. attributes.setdefault("resources", {}).update({ # For resources of the form {":target": "dest/path"}, we have to # format the parsed `RuleTarget` struct as a string before # passing it to Buck. k: src_and_dep_helpers.format_source(v) for k, v in _parse_srcs( base_path, "resources", resources, ).items() }) return attributes def _convert_library( is_test, is_library, base_path, name, base_module, check_types, cpp_deps, deps, external_deps, gen_srcs, py_flavor, resources, runtime_deps, srcs, tags, tests, typing, typing_options, version_subdirs, versioned_srcs, visibility): """ Gathers the attributes implicit python_library and creates associated rules This is suitable for usage by either python_binary, python_unittest or python_library. See `implicit_python_library` for more details Returns: Attributes for a native.python_library, """ # for binary we need a separate library if is_library: library_name = name else: library_name = name + "-library" if is_library and check_types: fail( "parameter `check_types` is not supported for libraries, did you " + "mean to specify `typing`?", ) if get_typing_config_target(): gen_typing_config( library_name, base_module if base_module != None else base_path, srcs, [src_and_dep_helpers.convert_build_target(base_path, dep) for dep in deps], typing or check_types, typing_options, visibility, ) if runtime_deps: associated_targets_name = _associated_targets_library( base_path, library_name, runtime_deps, visibility, ) deps = list(deps) + [":" + associated_targets_name] extra_tags = [] if not is_library: extra_tags.append("generated") if is_test: extra_tags.append("unittest-library") return _implicit_python_library( library_name, is_test_companion = is_test, base_module = base_module, srcs = srcs, versioned_srcs = versioned_srcs, gen_srcs = gen_srcs, deps = deps, tests = tests, tags = list(tags) + extra_tags, external_deps = external_deps, visibility = visibility, resources = resources, cpp_deps = cpp_deps, py_flavor = py_flavor, version_subdirs = version_subdirs, ) def _single_binary_or_unittest( base_path, name, implicit_library_target, implicit_library_attributes, fbconfig_rule_type, buck_rule_type, is_test, tests, py_version, py_flavor, main_module, strip_libpar, tags, par_style, emails, needed_coverage, argcomplete, strict_tabs, compile, args, env, python, allocator, check_types, preload_deps, jemalloc_conf, # TODO: This does not appear to be used anywhere typing_options, helper_deps, visibility, analyze_imports, additional_coverage_targets, generate_test_modules): if is_test and par_style == None: par_style = "xar" dependencies = [] platform_deps = [] out_preload_deps = [] platform = platform_utils.get_platform_for_base_path(base_path) python_version = python_versioning.get_default_version( platform = platform, constraint = py_version, flavor = py_flavor, ) if python_version == None: fail( ( "Unable to find Python version matching constraint" + "'{}' and flavor '{}' on '{}'." ).format(py_version, py_flavor, platform), ) python_platform = platform_utils.get_buck_python_platform( platform, major_version = python_version.major, flavor = py_flavor, ) if allocator == None: allocator = allocators.normalize_allocator(allocator) attributes = {} attributes["name"] = name if is_test and additional_coverage_targets: attributes["additional_coverage_targets"] = additional_coverage_targets if visibility != None: attributes["visibility"] = visibility # If this is a test, we need to merge the library rule into this # one and inherit its deps. if is_test: for param in ("versioned_srcs", "srcs", "resources", "base_module"): val = implicit_library_attributes.get(param) if val != None: attributes[param] = val dependencies.extend(implicit_library_attributes.get("deps", [])) platform_deps.extend(implicit_library_attributes.get("platform_deps", [])) # Add the "coverage" library as a dependency for all python tests. platform_deps.extend( src_and_dep_helpers.format_platform_deps( [target_utils.ThirdPartyRuleTarget("coverage", "coverage-py")], ), ) # Otherwise, this is a binary, so just the library portion as a dep. else: dependencies.append(":" + implicit_library_attributes["name"]) # Sanitize the main module, so that it's a proper module reference. if main_module != None: main_module = main_module.replace("/", ".") if main_module.endswith(".py"): main_module = main_module[:-3] attributes["main_module"] = main_module elif is_test: main_module = "__fb_test_main__" attributes["main_module"] = main_module # Add in the PAR build args. if _get_package_style() == "standalone": build_args = ( _get_par_build_args( base_path, name, buck_rule_type, platform, argcomplete = argcomplete, strict_tabs = strict_tabs, compile = compile, par_style = par_style, strip_libpar = strip_libpar, needed_coverage = needed_coverage, python = python, ) ) if build_args: attributes["build_args"] = build_args # Add any special preload deps. default_preload_deps = ( _preload_deps(base_path, name, allocator, jemalloc_conf, visibility) ) out_preload_deps.extend(src_and_dep_helpers.format_deps(default_preload_deps)) # Add user-provided preloaded deps. for dep in preload_deps: out_preload_deps.append(src_and_dep_helpers.convert_build_target(base_path, dep)) # Add the C/C++ build info lib to preload deps. cxx_build_info = cpp_common.cxx_build_info_rule( base_path, name, fbconfig_rule_type, platform, static = False, visibility = visibility, ) out_preload_deps.append(target_utils.target_to_label(cxx_build_info)) # Provide a standard set of backport deps to all binaries platform_deps.extend( src_and_dep_helpers.format_platform_deps( [ target_utils.ThirdPartyRuleTarget("typing", "typing-py"), target_utils.ThirdPartyRuleTarget("python-future", "python-future-py"), ], ), ) # Provide a hook for the nuclide debugger in @mode/dev builds, so # that one can have `PYTHONBREAKPOINT=nuclide.set_trace` in their # environment (eg .bashrc) and then simply write `breakpoint()` # to launch a debugger with no fuss if _get_package_style() == "inplace": dependencies.append("//nuclide:debugger-hook") # Add in a specialized manifest when building inplace binaries. # # TODO(#11765906): We shouldn't need to create this manifest rule for # standalone binaries. However, since target determinator runs in dev # mode, we sometimes pass these manifest targets in the explicit target # list into `opt` builds, which then fails with a missing build target # error. So, for now, just always generate the manifest library, but # only use it when building inplace binaries. manifest_name = _manifest_library( base_path, name, fbconfig_rule_type, main_module, platform, python_platform, visibility, ) if _get_package_style() == "inplace": dependencies.append(":" + manifest_name) buck_cxx_platform = platform_utils.get_buck_platform_for_base_path(base_path) attributes["cxx_platform"] = buck_cxx_platform attributes["platform"] = python_platform attributes["version_universe"] = _get_version_universe(python_version) attributes["linker_flags"] = ( _get_ldflags(base_path, name, fbconfig_rule_type, strip_libpar = strip_libpar) ) attributes["labels"] = list(tags) if is_test: attributes["labels"].extend(label_utils.convert_labels(platform, "python")) attributes["tests"] = tests if args: attributes["args"] = ( string_macros.convert_args_with_macros( base_path, args, platform = platform, ) ) if env: attributes["env"] = ( string_macros.convert_env_with_macros( env, platform = platform, ) ) if emails: attributes["contacts"] = emails if out_preload_deps: attributes["preload_deps"] = out_preload_deps if needed_coverage: attributes["needed_coverage"] = [ _convert_needed_coverage_spec(base_path, s) for s in needed_coverage ] # Generate the interpreter helpers, and add them to our deps. Note that # we must do this last, so that the interp rules get the same deps as # the main binary which we've built up to this point. # We also do this based on an attribute so that we don't have to dedupe # rule creation. We'll revisit this in the near future. # TODO: Better way to not generate duplicates if _should_generate_interp_rules(helper_deps): interp_deps = list(dependencies) if is_test: testmodules_library_name = _test_modules_library( base_path, implicit_library_attributes["name"], implicit_library_attributes.get("srcs") or (), implicit_library_attributes.get("base_module"), visibility, generate_test_modules = generate_test_modules, ) interp_deps.append(":" + testmodules_library_name) interp_rules = _interpreter_binaries( name, buck_cxx_platform, python_version, python_platform, interp_deps, platform_deps, out_preload_deps, visibility, ) dependencies.extend([":" + interp_rule for interp_rule in interp_rules]) if check_types: if python_version.major != 3: fail("parameter `check_types` is only supported on Python 3.") typecheck_rule_name = _typecheck_test( name, main_module, buck_cxx_platform, python_platform, python_version, dependencies, platform_deps, out_preload_deps, typing_options, visibility, emails, implicit_library_target, implicit_library_attributes.get("versioned_srcs"), implicit_library_attributes.get("srcs"), implicit_library_attributes.get("resources"), implicit_library_attributes.get("base_module"), ) attributes["tests"] = ( list(attributes["tests"]) + [":" + typecheck_rule_name] ) if analyze_imports: _analyze_import_binary( name, buck_cxx_platform, python_platform, python_version, dependencies, platform_deps, out_preload_deps, visibility, ) if is_test: if not dependencies: dependencies = [] dependencies.append("//python:fbtestmain") if dependencies: attributes["deps"] = dependencies if platform_deps: attributes["platform_deps"] = platform_deps if ( read_bool("fbcode", "monkeytype", False) and python_version.major == 3 ): _monkeytype_binary(buck_rule_type, attributes, implicit_library_attributes["name"]) return attributes def _convert_binary( is_test, fbconfig_rule_type, buck_rule_type, base_path, name, py_version, py_flavor, base_module, main_module, strip_libpar, srcs, versioned_srcs, tags, gen_srcs, deps, tests, par_style, emails, external_deps, needed_coverage, argcomplete, strict_tabs, compile, args, env, python, allocator, check_types, preload_deps, visibility, resources, jemalloc_conf, typing, typing_options, check_types_options, runtime_deps, cpp_deps, helper_deps, analyze_imports, additional_coverage_targets, version_subdirs): """ Generate binary rules and library rules for a python_binary or python_unittest Returns: A list of kwargs for all unittests/binaries that need to be created """ library_attributes = _convert_library( is_test = is_test, is_library = False, base_path = base_path, name = name, base_module = base_module, check_types = check_types, cpp_deps = cpp_deps, deps = deps, external_deps = external_deps, gen_srcs = gen_srcs, py_flavor = py_flavor, resources = resources, runtime_deps = runtime_deps, srcs = srcs, tags = tags, tests = tests, typing = typing, typing_options = typing_options, version_subdirs = version_subdirs, versioned_srcs = versioned_srcs, visibility = visibility, ) # People use -library of unittests fb_native.python_library(**library_attributes) # For binary rules, create a separate library containing the sources. # This will be added as a dep for python binaries and merged in for # python tests. if is_list(py_version) and len(py_version) == 1: py_version = py_version[0] if not is_list(py_version): versions = {py_version: name} else: versions = {} platform = platform_utils.get_platform_for_base_path(base_path) for py_ver in py_version: python_version = python_versioning.get_default_version(platform, py_ver) new_name = name + "-" + python_version.version_string versions[py_ver] = new_name # There are some sub-libraries that get generated based on the # name of the original library, not the binary. Make sure they're only # generated once. is_first_binary = True all_binary_attributes = [] for py_ver, py_name in sorted(versions.items()): # Turn off check types for py2 targets when py3 is in versions # so we can have the py3 parts type check without a separate target if ( check_types and python_versioning.constraint_matches_major(py_ver, version = 2) and any([ python_versioning.constraint_matches_major(v, version = 3) for v in versions ]) ): _check_types = False print( base_path + ":" + py_name, "will not be typechecked because it is the python 2 part", ) else: _check_types = check_types binary_attributes = _single_binary_or_unittest( base_path, py_name, implicit_library_target = ":" + library_attributes["name"], implicit_library_attributes = library_attributes, fbconfig_rule_type = fbconfig_rule_type, buck_rule_type = buck_rule_type, is_test = is_test, tests = tests, py_version = py_ver, py_flavor = py_flavor, main_module = main_module, strip_libpar = strip_libpar, tags = tags, par_style = par_style, emails = emails, needed_coverage = needed_coverage, argcomplete = argcomplete, strict_tabs = strict_tabs, compile = compile, args = args, env = env, python = python, allocator = allocator, check_types = _check_types, preload_deps = preload_deps, jemalloc_conf = jemalloc_conf, typing_options = check_types_options, helper_deps = helper_deps, visibility = visibility, analyze_imports = analyze_imports, additional_coverage_targets = additional_coverage_targets, generate_test_modules = is_first_binary, ) is_first_binary = False all_binary_attributes.append(binary_attributes) return all_binary_attributes python_common = struct( convert_binary = _convert_binary, convert_library = _convert_library, )
load('@bazel_skylib//lib:paths.bzl', 'paths') load('@bazel_skylib//lib:shell.bzl', 'shell') load('@fbcode_macros//build_defs/config:read_configs.bzl', 'read_choice') load('@fbcode_macros//build_defs/lib:allocators.bzl', 'allocators') load('@fbcode_macros//build_defs/lib:build_info.bzl', 'build_info') load('@fbcode_macros//build_defs/lib:cpp_common.bzl', 'cpp_common') load('@fbcode_macros//build_defs/lib:label_utils.bzl', 'label_utils') load('@fbcode_macros//build_defs/lib:python_typing.bzl', 'gen_typing_config', 'get_typing_config_target') load('@fbcode_macros//build_defs/lib:python_versioning.bzl', 'python_versioning') load('@fbcode_macros//build_defs/lib:src_and_dep_helpers.bzl', 'src_and_dep_helpers') load('@fbcode_macros//build_defs/lib:string_macros.bzl', 'string_macros') load('@fbcode_macros//build_defs/lib:target_utils.bzl', 'target_utils') load('@fbcode_macros//build_defs/lib:third_party.bzl', 'third_party') load('@fbcode_macros//build_defs:compiler.bzl', 'compiler') load('@fbcode_macros//build_defs:config.bzl', 'config') load('@fbcode_macros//build_defs:coverage.bzl', 'coverage') load('@fbcode_macros//build_defs:platform_utils.bzl', 'platform_utils') load('@fbcode_macros//build_defs:sanitizers.bzl', 'sanitizers') load('@fbsource//tools/build_defs:buckconfig.bzl', 'read_bool') load('@fbsource//tools/build_defs:fb_native_wrapper.bzl', 'fb_native') load('@fbsource//tools/build_defs:type_defs.bzl', 'is_dict', 'is_list') _interpreters = [('interp', 'libfb.py.python_interp', '//libfb/py:python_interp'), ('ipython', 'libfb.py.ipython_interp', '//libfb/py:ipython_interp'), ('vs_debugger', 'libfb.py.vs_debugger', '//libfb/py:vs_debugger')] _manifest_template = 'import sys\n\n\nclass Manifest(object):\n\n def __init__(self):\n self._modules = None\n self.__file__ = __file__\n self.__name__ = __name__\n\n @property\n def modules(self):\n if self._modules is None:\n import os, sys\n modules = set()\n for root, dirs, files in os.walk(sys.path[0]):\n rel_root = os.path.relpath(root, sys.path[0])\n if rel_root == \'.\':\n package_prefix = \'\'\n else:\n package_prefix = rel_root.replace(os.sep, \'.\') + \'.\'\n\n for name in files:\n base, ext = os.path.splitext(name)\n # Note that this loop includes all *.so files, regardless\n # of whether they are actually python modules or just\n # regular dynamic libraries\n if ext in (\'.py\', \'.pyc\', \'.pyo\', \'.so\'):\n if rel_root == "." and base == "__manifest__":\n # The manifest generation logic for normal pars\n # does not include the __manifest__ module itself\n continue\n modules.add(package_prefix + base)\n # Skip __pycache__ directories\n try:\n dirs.remove("__pycache__")\n except ValueError:\n pass\n self._modules = sorted(modules)\n return self._modules\n\n fbmake = {{\n {fbmake}\n }}\n\n\nsys.modules[__name__] = Manifest()\n' def _get_version_universe(python_version): """ Get the version universe for a specific python version Args: python_version: A `PythonVersion` that the universe should be fetched for Returns: The first third-party version universe string that corresponds to the python version """ return third_party.get_version_universe([('python', python_version.version_string)]) def _interpreter_binaries(name, buck_cxx_platform, python_version, python_platform, deps, platform_deps, preload_deps, visibility): """ Generate rules to build intepreter helpers. Args: name: The base name for the interpreter rules buck_cxx_platform: The buck-formatted cxx_platform to use for the interpreter binary python_version: A `PythonVersion` struct for the version of python to use python_platform: The python platform to pass to buck deps: The deps to pass to the binary in addition to interpeter deps platform_deps: The platform deps to pass to buck preload_deps: The preload deps to pass to buck visibility: The visibilty of the rule Returns: The list of names of all generated rules """ rule_names = [] for (interp, interp_main_module, interp_dep) in _INTERPRETERS: rule_name = name + '-' + interp fb_native.python_binary(name=rule_name, visibility=visibility, main_module=interp_main_module, cxx_platform=buck_cxx_platform, platform=python_platform, version_universe=_get_version_universe(python_version), deps=[interp_dep] + deps, platform_deps=platform_deps, preload_deps=preload_deps, package_style='inplace') rule_names.append(rule_name) return rule_names def _get_interpreter_for_platform(python_platform): """ Get the interpreter to use for a buck-native python platform """ return native.read_config('python#' + python_platform, 'interpreter') def _get_build_info(base_path, name, fbconfig_rule_type, main_module, fbcode_platform, python_platform): """ Return the build info attributes to install for python rules. Args: base_path: The package for the current build file name: The name of the rule being built fbconfig_rule_type: The name of the main rule being built; used for build_info main_module: The python main module of the binary/test fbcode_platform: The fbcode platform used for the binary/test python_platform: The buck-compatible python_platform that is being used Returns: A dictionary of key/value strings to put into a build manifest """ interpreter = _get_interpreter_for_platform(python_platform) py_build_info = {'build_tool': 'buck', 'main_module': main_module, 'par_style': 'live', 'python_command': interpreter, 'python_home': paths.dirname(paths.dirname(interpreter))} key_mappings = {'package_name': 'package', 'package_version': 'version', 'rule': 'build_rule', 'rule_type': 'build_rule_type'} info = build_info.get_build_info(base_path, name, fbconfig_rule_type, fbcode_platform) for key in build_info.BUILD_INFO_KEYS: py_build_info[key_mappings.get(key, key)] = getattr(info, key) return py_build_info def _manifest_library(base_path, name, fbconfig_rule_type, main_module, fbcode_platform, python_platform, visibility): """ Build the rules that create the `__manifest__` module. Args: base_path: The package of this rule name: The name of the primary rule that was generated fbconfig_rule_type: The name of the main rule being built; used for build_info main_module: The main module of the python binary/test fbcode_platform: The fbcode platform to use in build info python_platform: The buck-compatible python platform to use visibility: The visiblity for the main python_library Returns: The name of a library that contains a __mainfest__.py with build information in it. """ build_info = _get_build_info(base_path, name, fbconfig_rule_type, main_module, fbcode_platform, python_platform) fbmake = '\n '.join(['{}: {},'.format(repr(k), repr(v)) for (k, v) in build_info.items()]) manifest = _MANIFEST_TEMPLATE.format(fbmake=fbmake) manifest_name = name + '-manifest' manifest_lib_name = name + '-manifest-lib' fb_native.genrule(name=manifest_name, labels=['generated'], visibility=None, out=name + '-__manifest__.py', cmd='echo -n {} > $OUT'.format(shell.quote(manifest))) fb_native.python_library(name=manifest_lib_name, labels=['generated'], visibility=visibility, base_module='', srcs={'__manifest__.py': ':' + manifest_name}) return manifest_lib_name def _file_to_python_module(src, base_module): """Python implementation of Buck's toModuleName(). Original in com.facebook.buck.python.PythonUtil.toModuleName. """ src = paths.join(base_module, src) (src, _ext) = paths.split_extension(src) return src.replace('/', '.') def _test_modules_library(base_path, library_name, library_srcs, library_base_module, visibility, generate_test_modules): """" Create the rule that generates a __test_modules__.py file for a library Args: base_path: The package for the current build file library_name: The name of the original library that was built library_srcs: The list of srcs (files or labels) that were given to the original library that this test_modules_library is for library_base_module: The base_module of the original library visibility: The visibility for this rule generate_test_modules: Whether to actually materialize the rule. If False, just return the name of the rule Returns: The name of the generated python library that contains __test_modules__.py """ testmodules_library_name = library_name + '-testmodules-lib' if not generate_test_modules: return testmodules_library_name lines = ['TEST_MODULES = ['] for src in sorted(library_srcs): lines.append(' "{}",'.format(_file_to_python_module(src, library_base_module or base_path))) lines.append(']') genrule_name = library_name + '-testmodules' fb_native.genrule(name=genrule_name, visibility=None, out=library_name + '-__test_modules__.py', cmd=' && '.join(['echo {} >> $OUT'.format(shell.quote(line)) for line in lines])) fb_native.python_library(name=testmodules_library_name, visibility=visibility, base_module='', deps=['//python:fbtestmain', ':' + library_name], srcs={'__test_modules__.py': ':' + genrule_name}) return testmodules_library_name def _typecheck_test(name, main_module, buck_cxx_platform, python_platform, python_version, deps, platform_deps, preload_deps, typing_options, visibility, emails, library_target, library_versioned_srcs, library_srcs, library_resources, library_base_module): """ Create a test and associated libraries for running typechecking Args: name: The name of the original binary/test to run typechecks on main_module: The main module of hte binary/test buck_cxx_platform: The buck-formatted cxx_platform to use for the interpreter binary python_version: A `PythonVersion` struct for the version of python to use python_platform: The python platform to pass to buck deps: The deps to pass to the binary in addition to interpeter deps platform_deps: The platform deps to pass to buck preload_deps: The preload deps to pass to buck typing_options: A comma delimited list of strings that configure typing for this binary/library visibility: The visibilty of the rule library_target: The fully qualified target for the original library used in the binary/test. This is used to determine whether the following library_* properties are used in the final test rule library_versioned_srcs: The versioned_srcs property from the library used to create the original binary/test. This should be the final value passed to buck: No intermediate representations library_srcs: The srcs property from the library used to create the original binary/test. This should be the final value passed to buck: No intermediate representations library_resources: The resources property from the library used to create the original binary/test. This should be the final value passed to buck: No intermediate representations library_base_module: The base_module property from the library used to create the original binary/test. This should be the final value passed to buck: No intermediate representations Returns: The name of the test library that was created """ typing_config = get_typing_config_target() typecheck_deps = deps[:] if ':python_typecheck-library' not in typecheck_deps: typecheck_deps.append('//libfb/py:python_typecheck-library') if not typing_config: typecheck_deps.append('//python/typeshed_internal:global_mypy_ini') env = {} add_library_attrs = library_target not in typecheck_deps if not add_library_attrs: library_versioned_srcs = None library_srcs = None library_resources = None library_base_module = None if main_module not in ('__fb_test_main__', 'libfb.py.testslide.unittest'): env['PYTHON_TYPECHECK_ENTRY_POINT'] = main_module typing_options_list = [option.strip() for option in typing_options.split(',')] if typing_options else [] use_pyre = typing_options and 'pyre' in typing_options_list if use_pyre: typing_options_list.remove('pyre') typing_options = ','.join(typing_options_list) env['PYRE_ENABLED'] = '1' if typing_config: cmd = '$(exe {}) gather '.format(typing_config) if use_pyre: genrule_name = name + '-typing=pyre.json' genrule_out = 'pyre.json' cmd += '--pyre=True ' else: genrule_name = name + '-typing=mypy.ini' genrule_out = 'mypy.ini' if typing_options: cmd += '--options="{}" '.format(typing_options) cmd += '$(location {}-typing) $OUT'.format(library_target) fb_native.genrule(name=genrule_name, out=genrule_out, cmd=cmd, visibility=visibility) if use_pyre: typing_library_name = name + '-pyre_json' else: typing_library_name = name + '-mypy_ini' fb_native.python_library(name=typing_library_name, visibility=visibility, base_module='', srcs=[':' + genrule_name]) typecheck_deps.append(':' + typing_library_name) typecheck_rule_name = name + '-typecheck' fb_native.python_test(name=typecheck_rule_name, main_module='python_typecheck', cxx_platform=buck_cxx_platform, platform=python_platform, deps=typecheck_deps, platform_deps=platform_deps, preload_deps=preload_deps, package_style='inplace', labels=['buck', 'python'], version_universe=_get_version_universe(python_version), contacts=emails, visibility=visibility, env=env, versioned_srcs=library_versioned_srcs, srcs=library_srcs, resources=library_resources, base_module=library_base_module) return typecheck_rule_name def _monkeytype_binary(rule_type, attributes, library_name): """ Create a python binary/test that enables monkeytype but otherwise looks like another binary/test Args: rule_type: The type of rule to create (python_binary or python_test) attributes: The attributes of the original binary/test that we are enabling monkeytype for. These should be final values passed to buck, not intermediaries, as they are copied directly into a library_name: The name of the implicit library created for the binary/test """ name = attributes['name'] visibility = attributes.get('visibility') lib_main_module_attrs_name = None if 'main_module' in attributes: main_module_name = name + '-monkeytype_main_module' script = '#!/usr/bin/env python3\n\n' + 'def monkeytype_main_module() -> str:\n' + " return '{}'\n".format(attributes['main_module']) fb_native.genrule(name=main_module_name, visibility=visibility, out=name + '-__monkeytype_main_module__.py', cmd='echo {} > $OUT'.format(shell.quote(script))) lib_main_module_attrs_name = name + '-monkeytype_main_module-lib' fb_native.python_library(name=lib_main_module_attrs_name, visibility=visibility, base_module='', deps=['//python:fbtestmain', ':' + name], srcs={'__monkeytype_main_module__.py': ':' + main_module_name}) if rule_type == 'python_binary': wrapper_rule_constructor = fb_native.python_binary elif rule_type == 'python_test': wrapper_rule_constructor = fb_native.python_test else: fail('Invalid rule type specified: ' + rule_type) wrapper_attrs = dict(attributes) wrapper_attrs['name'] = name + '-monkeytype' wrapper_attrs['visibility'] = visibility if 'deps' in wrapper_attrs: wrapper_deps = list(wrapper_attrs['deps']) else: wrapper_deps = [] library_target = ':' + library_name if library_target not in wrapper_deps: wrapper_deps.append(library_target) stub_gen_deps = list(wrapper_deps) if '//python/monkeytype:main_wrapper' not in wrapper_deps: wrapper_deps.append('//python/monkeytype/tools:main_wrapper') if lib_main_module_attrs_name != None: wrapper_deps.append(':' + lib_main_module_attrs_name) wrapper_attrs['deps'] = wrapper_deps wrapper_attrs['base_module'] = '' wrapper_attrs['main_module'] = 'python.monkeytype.tools.main_wrapper' wrapper_rule_constructor(**wrapper_attrs) if '//python/monkeytype/tools:stubs_lib' not in wrapper_deps: stub_gen_deps.append('//python/monkeytype/tools:stubs_lib') fb_native.python_binary(name=name + '-monkeytype-gen-stubs', visibility=visibility, main_module='python.monkeytype.tools.get_stub', cxx_platform=attributes['cxx_platform'], platform=attributes['platform'], deps=stub_gen_deps, platform_deps=attributes['platform_deps'], preload_deps=attributes['preload_deps'], package_style='inplace', version_universe=attributes['version_universe']) def _analyze_import_binary(name, buck_cxx_platform, python_platform, python_version, deps, platform_deps, preload_deps, visibility): """ Generate a binary to analyze the imports of a given python library """ generate_imports_deps = list(deps) if ':generate_par_imports' not in generate_imports_deps: generate_imports_deps.append('//libfb/py:generate_par_imports') if ':parutil' not in generate_imports_deps: generate_imports_deps.append('//libfb/py:parutil') version_universe = _get_version_universe(python_version) generate_par_name = name + '-generate-imports' fb_native.python_binary(name=generate_par_name, main_module='libfb.py.generate_par_imports', cxx_platform=buck_cxx_platform, platform=python_platform, deps=generate_imports_deps, platform_deps=platform_deps, preload_deps=preload_deps, labels=['buck', 'python'], version_universe=version_universe, visibility=visibility) genrule_name = name + '-gen-rule' fb_native.genrule(name=genrule_name, srcs=[':' + generate_par_name], out='{}-imports_file.py'.format(name), cmd='$(exe :{}) >"$OUT"'.format(generate_par_name)) lib_name = name + '-analyze-lib' fb_native.python_library(name=lib_name, srcs={'imports_file.py': ':' + genrule_name}, base_module='', deps=[':' + genrule_name]) analyze_deps = list(deps) analyze_deps.append(':' + lib_name) if ':analyze_par_imports' not in analyze_deps: analyze_deps.append('//libfb/py:analyze_par_imports') fb_native.python_binary(name=name + '-analyze-imports', main_module='libfb.py.analyze_par_imports', cxx_platform=buck_cxx_platform, platform=python_platform, deps=analyze_deps, platform_deps=platform_deps, preload_deps=preload_deps, labels=['buck', 'python'], version_universe=version_universe, visibility=visibility) _gen_srcs_link = 'https://fburl.com/203312823' def _parse_srcs(base_path, param, srcs): """ Converts `srcs` to a `srcs` dictionary for use in python_* rule Fails if a RuleTarget object is passed in, but a source file name cannot be determined Args: base_path: The package for the rule param: The name of the parameter being parsed. Used in error messages srcs: Either a dictionary of file/target -> destination in the library, or a list of source files or RuleTarget objects that the source named can be divined from. Returns: A mapping of destination filename -> file str / RuleTarget """ if is_dict(srcs) or hasattr(srcs, 'items'): out_srcs = src_and_dep_helpers.parse_source_map(base_path, {v: k for (k, v) in srcs.items()}) else: out_srcs = {} for src in src_and_dep_helpers.parse_source_list(base_path, srcs): if not target_utils.is_rule_target(src): out_srcs[src] = src continue if '=' in src.name: name = src.name.rsplit('=', 1)[1] out_srcs[name] = src continue fail('parameter `{}`: cannot infer a "name" to use for ' + '`{}`. If this is an output from a `custom_rule`, ' + 'consider using the `<rule-name>=<out>` notation instead. ' + 'Otherwise, please specify this parameter as `dict` ' + 'mapping sources to explicit "names" (see {} for details).'.format(param, target_utils.target_to_label(src), _GEN_SRCS_LINK)) return out_srcs def _parse_gen_srcs(base_path, srcs): """ Parse the given sources as input to the `gen_srcs` parameter. """ out_srcs = _parse_srcs(base_path, 'gen_srcs', srcs) for src in out_srcs.values(): if not target_utils.is_rule_target(src): fail('parameter `gen_srcs`: `{}` must be a reference to rule ' + 'that generates a source (e.g. `//foo:bar`, `:bar`) ' + ' (see {} for details).'.format(src, GEN_SRCS_LINK)) return out_srcs def _get_par_build_args(base_path, name, rule_type, platform, argcomplete=None, strict_tabs=None, compile=None, par_style=None, strip_libpar=None, needed_coverage=None, python=None): """ Return the arguments we need to pass to the PAR builder wrapper. """ build_args = [] build_mode = config.get_build_mode() if config.get_use_custom_par_args(): passthrough_args = [] if argcomplete == True: passthrough_args.append('--argcomplete') if strict_tabs == False: passthrough_args.append('--no-strict-tabs') if compile == False: passthrough_args.append('--no-compile') passthrough_args.append('--store-source') elif compile == 'with-source': passthrough_args.append('--store-source') elif compile != True and compile != None: fail(('Invalid value {} for `compile`, must be True, False, ' + '"with-source", or None (default)').format(compile)) if par_style != None: passthrough_args.append('--par-style=' + par_style) if needed_coverage != None or coverage.get_coverage(): passthrough_args.append('--store-source') if build_mode.startswith('opt'): passthrough_args.append('--optimize') mode = build_info.get_build_info_mode(base_path, name) if mode == 'none': fail('Invalid build info mode specified') info = build_info.get_explicit_build_info(base_path, name, mode, rule_type, platform, compiler.get_compiler_for_current_buildfile()) passthrough_args.append('--build-info-build-mode=' + info.build_mode) passthrough_args.append('--build-info-build-tool=buck') if info.package_name != None: passthrough_args.append('--build-info-package-name=' + info.package_name) if info.package_release != None: passthrough_args.append('--build-info-package-release=' + info.package_release) if info.package_version != None: passthrough_args.append('--build-info-package-version=' + info.package_version) passthrough_args.append('--build-info-platform=' + info.platform) passthrough_args.append('--build-info-rule-name=' + info.rule) passthrough_args.append('--build-info-rule-type=' + info.rule_type) build_args.extend(['--passthrough=' + a for a in passthrough_args]) if not build_mode.startswith('dbg'): if strip_libpar == True: build_args.append('--omnibus-debug-info=strip') elif strip_libpar == 'extract': build_args.append('--omnibus-debug-info=extract') else: build_args.append('--omnibus-debug-info=separate') if python != None: build_args.append('--python-override=' + python) return build_args def _associated_targets_library(base_path, name, deps, visibility): """ Associated Targets are buck rules that need to be built, when This target is built, but are not a code dependency. Which is why we wrap them in a cxx_library so they could never be a code dependency TODO: Python just needs the concept of runtime deps if it doesn't have it. Also, what is the actual use case for this? """ rule_name = name + '-build_also' buck_platform = platform_utils.get_buck_platform_for_base_path(base_path) fb_native.cxx_library(name=rule_name, visibility=visibility, deps=deps, default_platform=buck_platform, defaults={'platform': buck_platform}) return rule_name def _jemalloc_malloc_conf_library(base_path, name, malloc_conf, deps, visibility): """ Build a rule which wraps the JEMalloc allocator and links default configuration via the `jemalloc_conf` variable. """ buck_platform = platform_utils.get_buck_platform_for_base_path(base_path) jemalloc_config_line = ','.join(['{}:{}'.format(k, v) for (k, v) in sorted(malloc_conf.items())]) src_rule_name = '__{}_jemalloc_conf_src__'.format(name) fb_native.genrule(name=src_rule_name, visibility=visibility, out='jemalloc_conf.c', cmd='echo \'const char* malloc_conf = "{}";\' > "$OUT"'.format(jemalloc_config_line)) (deps, platform_deps) = src_and_dep_helpers.format_all_deps(deps) lib_rule_name = '__{}_jemalloc_conf_lib__'.format(name) fb_native.cxx_library(name=lib_rule_name, visibility=visibility, srcs=[':' + src_rule_name], default_platform=buck_platform, defaults={'platform': buck_platform}, deps=deps, platform_deps=platform_deps) return target_utils.RootRuleTarget(base_path, lib_rule_name) def _convert_needed_coverage_spec(base_path, spec): """ Converts `needed_coverage` from fbcode's spec into the buck native spec Args: base_path: The base path for this rule; used to get fully qualified targets spec: A tuple of (<needed percentage as int>, <target as a string>) Returns: A buck-compatible spec. This is a tuple of two elements if no source name is detected in the target name (with an =) or three elements if it is detected in the form of (<percentage as int>, <full target as string>, <file as string>?) """ if len(spec) != 2: fail(('parameter `needed_coverage`: `{}` must have exactly 2 ' + 'elements, a ratio and a target.').format(spec)) (ratio, target) = spec if '=' not in target: return (ratio, src_and_dep_helpers.convert_build_target(base_path, target)) (target, path) = target.rsplit('=', 1) return (ratio, src_and_dep_helpers.convert_build_target(base_path, target), path) def _should_generate_interp_rules(helper_deps): """ Return whether we should generate the interp helpers. This is controlled by both the mode, the property, and buckconfig settings Args: helper_deps: The value of the `helper_deps` attribute on the users rule. Should be True or False """ if not config.get_build_mode().startswith('dev'): return False config_setting = read_bool('python', 'helpers', required=False) if config_setting == None: return helper_deps return config_setting def _preload_deps(base_path, name, allocator, jemalloc_conf=None, visibility=None): """ Add C/C++ deps which need to preloaded by Python binaries. Returns: A list of additional dependencies (as strings) which should be added to the python binary """ deps = [] sanitizer = sanitizers.get_sanitizer() if sanitizer != None: sanitizer = sanitizers.get_short_name(sanitizer) deps.append(target_utils.RootRuleTarget('tools/build/sanitizers', '{}-py'.format(sanitizer))) deps.append(cpp_common.create_sanitizer_configuration(base_path, name, enable_lsan=False)) if allocator != None and sanitizer == None: allocator_deps = allocators.get_allocator_deps(allocator) if allocator.startswith('jemalloc') and jemalloc_conf != None: conf_dep = _jemalloc_malloc_conf_library(base_path, name, jemalloc_conf, allocator_deps, visibility) allocator_deps = [conf_dep] deps.extend(allocator_deps) return deps def _get_ldflags(base_path, name, fbconfig_rule_type, strip_libpar=True): """ Return ldflags to use when linking omnibus libraries in python binaries. """ strip_mode = cpp_common.get_strip_mode(base_path, name) if not config.get_build_mode().startswith('dbg') and (strip_mode != 'none' or strip_libpar == True): strip_mode = 'full' return cpp_common.get_ldflags(base_path, name, fbconfig_rule_type, strip_mode=strip_mode) def _get_package_style(): """ Get the package_style to use for binary rules from the configuration See https://buckbuild.com/rule/python_binary.html#package_style """ return read_choice('python', 'package_style', ('inplace', 'standalone'), 'standalone') def _implicit_python_library(name, is_test_companion, base_module=None, srcs=(), versioned_srcs=(), gen_srcs=(), deps=(), tests=(), tags=(), external_deps=(), visibility=None, resources=(), cpp_deps=(), py_flavor='', version_subdirs=None): """ Creates a python_library and all supporting libraries This library may or may not be consumed as a companion library to a python_binary, or a python_test. The attributes returned vary based on how it will be used. Args: name: The name of this library is_test_companion: Whether this library is being created and consumed directly by a test rule base_module: The basemodule for the library (https://buckbuild.com/rule/python_library.html#base_module) srcs: A sequence of sources/targets to use as srcs. Note that only files ending in .py are considered sources. All other srcs are added as resources. Note if this is a dictionary, the key and value are swapped from the official buck implementation. That is,this rule expects {<src>: <destination in the library>} versioned_srcs: If provided, a list of tuples of (<python version constraint string>, <srcs as above>) These sources are then added to the versioned_srcs attribute in the library gen_srcs: DEPRECATED A list of srcs that come from `custom_rule`s to be merged into the final srcs list. deps: A sequence of dependencies for the library. These should only be python libraries, as python's typing support assumes that dependencies also have a companion -typing rule tests: The targets that test this library tags: Arbitrary metadata to attach to this library. See https://buckbuild.com/rule/python_library.html#labels external_deps: A sequence of tuples of external dependencies visibility: The visibility of the library resources: A sequence of sources/targets that should be explicitly added as resoruces. Note that if a dictionary is used, the key and value are swapped from the official buck implementation. That is, this rule expects {<src>: <destination in the library>} cpp_deps: A sequence of C++ library depenencies that will be loaded at runtime py_flavor: The flavor of python to use. By default ("") this is cpython version_subdirs: A sequence of tuples of (<buck version constring>, <version subdir>). This points to the subdirectory (or "") that each version constraint uses. This helps us rewrite things like versioned_srcs for third-party2 targets. Returns: The kwargs to pass to a native.python_library rule """ base_path = native.package_name() attributes = {} attributes['name'] = name parsed_srcs = {} parsed_srcs.update(_parse_srcs(base_path, 'srcs', srcs)) parsed_srcs.update(_parse_gen_srcs(base_path, gen_srcs)) parsed_versioned_srcs = [(python_versioning.python_version_constraint(pvc), _parse_srcs(base_path, 'versioned_srcs', vs)) for (pvc, vs) in versioned_srcs] all_versioned_srcs = [] if third_party.is_tp2(base_path): if version_subdirs == None: fail('`version_subdirs` must be specified on third-party projects') for (constraints, subdir) in version_subdirs: build_srcs = [parsed_srcs] if parsed_versioned_srcs: py_vers = None for (target, constraint_version) in constraints.items(): if target.endswith('/python:__project__'): py_vers = python_versioning.python_version(constraint_version) if py_vers == None: fail('Could not get python version for versioned_srcs') build_srcs.extend([dict(vs) for (vc, vs) in parsed_versioned_srcs if python_versioning.constraint_matches(vc, py_vers, check_minor=True)]) vsrc = {} for build_src in build_srcs: for (name, src) in build_src.items(): if target_utils.is_rule_target(src): vsrc[name] = src else: vsrc[name] = paths.join(subdir, src) all_versioned_srcs.append((constraints, vsrc)) parsed_srcs = {} else: pytarget = third_party.get_tp2_project_target('python') platforms = platform_utils.get_platforms_for_host_architecture() for pyversion in python_versioning.get_all_versions(): if not python_versioning.version_supports_flavor(pyversion, py_flavor): continue ver_srcs = {} if py_flavor: ver_srcs.update(parsed_srcs) for (constraint, pvsrcs) in parsed_versioned_srcs: constraint = python_versioning.normalize_constraint(constraint) if python_versioning.constraint_matches(constraint, pyversion): ver_srcs.update(pvsrcs) if ver_srcs: all_versioned_srcs.append(({target_utils.target_to_label(pytarget, fbcode_platform=p): pyversion.version_string for p in platforms if python_versioning.platform_has_version(p, pyversion)}, ver_srcs)) if py_flavor: parsed_srcs = {} attributes['base_module'] = base_module if parsed_srcs: if is_test_companion: formatted_srcs = src_and_dep_helpers.format_source_map({k: v for (k, v) in parsed_srcs.items() if k.endswith('.py')}) formatted_resources = src_and_dep_helpers.format_source_map({k: v for (k, v) in parsed_srcs.items() if not k.endswith('.py')}) attributes['resources'] = formatted_resources.value attributes['platform_resources'] = formatted_resources.platform_value else: formatted_srcs = src_and_dep_helpers.format_source_map(parsed_srcs) attributes['srcs'] = formatted_srcs.value attributes['platform_srcs'] = formatted_srcs.platform_value out_versioned_srcs = [] out_versioned_resources = [] for (vcollection, ver_srcs) in all_versioned_srcs: out_srcs = {} out_resources = {} non_platform_ver_srcs = src_and_dep_helpers.without_platforms(src_and_dep_helpers.format_source_map(ver_srcs)) for (dst, src) in non_platform_ver_srcs.items(): if dst.endswith('.py') or dst.endswith('.so'): out_srcs[dst] = src else: out_resources[dst] = src out_versioned_srcs.append((vcollection, out_srcs)) out_versioned_resources.append((vcollection, out_resources)) if out_versioned_srcs: attributes['versioned_srcs'] = python_versioning.add_flavored_versions(out_versioned_srcs) if out_versioned_resources: attributes['versioned_resources'] = python_versioning.add_flavored_versions(out_versioned_resources) dependencies = [] if third_party.is_tp2(base_path): dependencies.append(target_utils.target_to_label(third_party.get_tp2_project_target(third_party.get_tp2_project_name(base_path)), fbcode_platform=third_party.get_tp2_platform(base_path))) for target in deps: dependencies.append(src_and_dep_helpers.convert_build_target(base_path, target)) if cpp_deps: dependencies.extend(cpp_deps) if dependencies: attributes['deps'] = dependencies attributes['tests'] = tests if visibility != None: attributes['visibility'] = visibility if external_deps: attributes['platform_deps'] = src_and_dep_helpers.format_platform_deps([src_and_dep_helpers.normalize_external_dep(dep, lang_suffix='-py', parse_version=True) for dep in external_deps], deprecated_auxiliary_deps=True) attributes['labels'] = tags attributes.setdefault('resources', {}).update({k: src_and_dep_helpers.format_source(v) for (k, v) in _parse_srcs(base_path, 'resources', resources).items()}) return attributes def _convert_library(is_test, is_library, base_path, name, base_module, check_types, cpp_deps, deps, external_deps, gen_srcs, py_flavor, resources, runtime_deps, srcs, tags, tests, typing, typing_options, version_subdirs, versioned_srcs, visibility): """ Gathers the attributes implicit python_library and creates associated rules This is suitable for usage by either python_binary, python_unittest or python_library. See `implicit_python_library` for more details Returns: Attributes for a native.python_library, """ if is_library: library_name = name else: library_name = name + '-library' if is_library and check_types: fail('parameter `check_types` is not supported for libraries, did you ' + 'mean to specify `typing`?') if get_typing_config_target(): gen_typing_config(library_name, base_module if base_module != None else base_path, srcs, [src_and_dep_helpers.convert_build_target(base_path, dep) for dep in deps], typing or check_types, typing_options, visibility) if runtime_deps: associated_targets_name = _associated_targets_library(base_path, library_name, runtime_deps, visibility) deps = list(deps) + [':' + associated_targets_name] extra_tags = [] if not is_library: extra_tags.append('generated') if is_test: extra_tags.append('unittest-library') return _implicit_python_library(library_name, is_test_companion=is_test, base_module=base_module, srcs=srcs, versioned_srcs=versioned_srcs, gen_srcs=gen_srcs, deps=deps, tests=tests, tags=list(tags) + extra_tags, external_deps=external_deps, visibility=visibility, resources=resources, cpp_deps=cpp_deps, py_flavor=py_flavor, version_subdirs=version_subdirs) def _single_binary_or_unittest(base_path, name, implicit_library_target, implicit_library_attributes, fbconfig_rule_type, buck_rule_type, is_test, tests, py_version, py_flavor, main_module, strip_libpar, tags, par_style, emails, needed_coverage, argcomplete, strict_tabs, compile, args, env, python, allocator, check_types, preload_deps, jemalloc_conf, typing_options, helper_deps, visibility, analyze_imports, additional_coverage_targets, generate_test_modules): if is_test and par_style == None: par_style = 'xar' dependencies = [] platform_deps = [] out_preload_deps = [] platform = platform_utils.get_platform_for_base_path(base_path) python_version = python_versioning.get_default_version(platform=platform, constraint=py_version, flavor=py_flavor) if python_version == None: fail(('Unable to find Python version matching constraint' + "'{}' and flavor '{}' on '{}'.").format(py_version, py_flavor, platform)) python_platform = platform_utils.get_buck_python_platform(platform, major_version=python_version.major, flavor=py_flavor) if allocator == None: allocator = allocators.normalize_allocator(allocator) attributes = {} attributes['name'] = name if is_test and additional_coverage_targets: attributes['additional_coverage_targets'] = additional_coverage_targets if visibility != None: attributes['visibility'] = visibility if is_test: for param in ('versioned_srcs', 'srcs', 'resources', 'base_module'): val = implicit_library_attributes.get(param) if val != None: attributes[param] = val dependencies.extend(implicit_library_attributes.get('deps', [])) platform_deps.extend(implicit_library_attributes.get('platform_deps', [])) platform_deps.extend(src_and_dep_helpers.format_platform_deps([target_utils.ThirdPartyRuleTarget('coverage', 'coverage-py')])) else: dependencies.append(':' + implicit_library_attributes['name']) if main_module != None: main_module = main_module.replace('/', '.') if main_module.endswith('.py'): main_module = main_module[:-3] attributes['main_module'] = main_module elif is_test: main_module = '__fb_test_main__' attributes['main_module'] = main_module if _get_package_style() == 'standalone': build_args = _get_par_build_args(base_path, name, buck_rule_type, platform, argcomplete=argcomplete, strict_tabs=strict_tabs, compile=compile, par_style=par_style, strip_libpar=strip_libpar, needed_coverage=needed_coverage, python=python) if build_args: attributes['build_args'] = build_args default_preload_deps = _preload_deps(base_path, name, allocator, jemalloc_conf, visibility) out_preload_deps.extend(src_and_dep_helpers.format_deps(default_preload_deps)) for dep in preload_deps: out_preload_deps.append(src_and_dep_helpers.convert_build_target(base_path, dep)) cxx_build_info = cpp_common.cxx_build_info_rule(base_path, name, fbconfig_rule_type, platform, static=False, visibility=visibility) out_preload_deps.append(target_utils.target_to_label(cxx_build_info)) platform_deps.extend(src_and_dep_helpers.format_platform_deps([target_utils.ThirdPartyRuleTarget('typing', 'typing-py'), target_utils.ThirdPartyRuleTarget('python-future', 'python-future-py')])) if _get_package_style() == 'inplace': dependencies.append('//nuclide:debugger-hook') manifest_name = _manifest_library(base_path, name, fbconfig_rule_type, main_module, platform, python_platform, visibility) if _get_package_style() == 'inplace': dependencies.append(':' + manifest_name) buck_cxx_platform = platform_utils.get_buck_platform_for_base_path(base_path) attributes['cxx_platform'] = buck_cxx_platform attributes['platform'] = python_platform attributes['version_universe'] = _get_version_universe(python_version) attributes['linker_flags'] = _get_ldflags(base_path, name, fbconfig_rule_type, strip_libpar=strip_libpar) attributes['labels'] = list(tags) if is_test: attributes['labels'].extend(label_utils.convert_labels(platform, 'python')) attributes['tests'] = tests if args: attributes['args'] = string_macros.convert_args_with_macros(base_path, args, platform=platform) if env: attributes['env'] = string_macros.convert_env_with_macros(env, platform=platform) if emails: attributes['contacts'] = emails if out_preload_deps: attributes['preload_deps'] = out_preload_deps if needed_coverage: attributes['needed_coverage'] = [_convert_needed_coverage_spec(base_path, s) for s in needed_coverage] if _should_generate_interp_rules(helper_deps): interp_deps = list(dependencies) if is_test: testmodules_library_name = _test_modules_library(base_path, implicit_library_attributes['name'], implicit_library_attributes.get('srcs') or (), implicit_library_attributes.get('base_module'), visibility, generate_test_modules=generate_test_modules) interp_deps.append(':' + testmodules_library_name) interp_rules = _interpreter_binaries(name, buck_cxx_platform, python_version, python_platform, interp_deps, platform_deps, out_preload_deps, visibility) dependencies.extend([':' + interp_rule for interp_rule in interp_rules]) if check_types: if python_version.major != 3: fail('parameter `check_types` is only supported on Python 3.') typecheck_rule_name = _typecheck_test(name, main_module, buck_cxx_platform, python_platform, python_version, dependencies, platform_deps, out_preload_deps, typing_options, visibility, emails, implicit_library_target, implicit_library_attributes.get('versioned_srcs'), implicit_library_attributes.get('srcs'), implicit_library_attributes.get('resources'), implicit_library_attributes.get('base_module')) attributes['tests'] = list(attributes['tests']) + [':' + typecheck_rule_name] if analyze_imports: _analyze_import_binary(name, buck_cxx_platform, python_platform, python_version, dependencies, platform_deps, out_preload_deps, visibility) if is_test: if not dependencies: dependencies = [] dependencies.append('//python:fbtestmain') if dependencies: attributes['deps'] = dependencies if platform_deps: attributes['platform_deps'] = platform_deps if read_bool('fbcode', 'monkeytype', False) and python_version.major == 3: _monkeytype_binary(buck_rule_type, attributes, implicit_library_attributes['name']) return attributes def _convert_binary(is_test, fbconfig_rule_type, buck_rule_type, base_path, name, py_version, py_flavor, base_module, main_module, strip_libpar, srcs, versioned_srcs, tags, gen_srcs, deps, tests, par_style, emails, external_deps, needed_coverage, argcomplete, strict_tabs, compile, args, env, python, allocator, check_types, preload_deps, visibility, resources, jemalloc_conf, typing, typing_options, check_types_options, runtime_deps, cpp_deps, helper_deps, analyze_imports, additional_coverage_targets, version_subdirs): """ Generate binary rules and library rules for a python_binary or python_unittest Returns: A list of kwargs for all unittests/binaries that need to be created """ library_attributes = _convert_library(is_test=is_test, is_library=False, base_path=base_path, name=name, base_module=base_module, check_types=check_types, cpp_deps=cpp_deps, deps=deps, external_deps=external_deps, gen_srcs=gen_srcs, py_flavor=py_flavor, resources=resources, runtime_deps=runtime_deps, srcs=srcs, tags=tags, tests=tests, typing=typing, typing_options=typing_options, version_subdirs=version_subdirs, versioned_srcs=versioned_srcs, visibility=visibility) fb_native.python_library(**library_attributes) if is_list(py_version) and len(py_version) == 1: py_version = py_version[0] if not is_list(py_version): versions = {py_version: name} else: versions = {} platform = platform_utils.get_platform_for_base_path(base_path) for py_ver in py_version: python_version = python_versioning.get_default_version(platform, py_ver) new_name = name + '-' + python_version.version_string versions[py_ver] = new_name is_first_binary = True all_binary_attributes = [] for (py_ver, py_name) in sorted(versions.items()): if check_types and python_versioning.constraint_matches_major(py_ver, version=2) and any([python_versioning.constraint_matches_major(v, version=3) for v in versions]): _check_types = False print(base_path + ':' + py_name, 'will not be typechecked because it is the python 2 part') else: _check_types = check_types binary_attributes = _single_binary_or_unittest(base_path, py_name, implicit_library_target=':' + library_attributes['name'], implicit_library_attributes=library_attributes, fbconfig_rule_type=fbconfig_rule_type, buck_rule_type=buck_rule_type, is_test=is_test, tests=tests, py_version=py_ver, py_flavor=py_flavor, main_module=main_module, strip_libpar=strip_libpar, tags=tags, par_style=par_style, emails=emails, needed_coverage=needed_coverage, argcomplete=argcomplete, strict_tabs=strict_tabs, compile=compile, args=args, env=env, python=python, allocator=allocator, check_types=_check_types, preload_deps=preload_deps, jemalloc_conf=jemalloc_conf, typing_options=check_types_options, helper_deps=helper_deps, visibility=visibility, analyze_imports=analyze_imports, additional_coverage_targets=additional_coverage_targets, generate_test_modules=is_first_binary) is_first_binary = False all_binary_attributes.append(binary_attributes) return all_binary_attributes python_common = struct(convert_binary=_convert_binary, convert_library=_convert_library)
''' Given a list of non negative integers, arrange them such that they form the largest number. Example 1: Input: [10,2] Output: "210" Example 2: Input: [3,30,34,5,9] Output: "9534330" Note: The result may be very large, so you need to return a string instead of an integer. ''' class Solution(object): def largestNumber(self, nums): """ :type nums: List[int] :rtype: str """ for i in xrange(len(nums)): for j in xrange(i + 1, len(nums)): if str(nums[i]) + str(nums[j]) < str(nums[j]) + str(nums[i]): nums[i], nums[j] = nums[j], nums[i] res = [str(x) for x in nums] while len(res) > 1 and res[0] == '0': res.pop(0) return ''.join(res)
""" Given a list of non negative integers, arrange them such that they form the largest number. Example 1: Input: [10,2] Output: "210" Example 2: Input: [3,30,34,5,9] Output: "9534330" Note: The result may be very large, so you need to return a string instead of an integer. """ class Solution(object): def largest_number(self, nums): """ :type nums: List[int] :rtype: str """ for i in xrange(len(nums)): for j in xrange(i + 1, len(nums)): if str(nums[i]) + str(nums[j]) < str(nums[j]) + str(nums[i]): (nums[i], nums[j]) = (nums[j], nums[i]) res = [str(x) for x in nums] while len(res) > 1 and res[0] == '0': res.pop(0) return ''.join(res)
class Solution: def findAndReplacePattern(self, words: List[str], pattern: str) -> List[str]: result = [] if not words or not pattern: return result for word in words: mapping = {} isMapped = True for i, c in enumerate(word): p = pattern[i] if p in mapping and mapping[p] != c: isMapped = False break mapping[p] = c values = mapping.values() if len(values) != len(set(values)): isMapped = False if isMapped: result.append(word) return result
class Solution: def find_and_replace_pattern(self, words: List[str], pattern: str) -> List[str]: result = [] if not words or not pattern: return result for word in words: mapping = {} is_mapped = True for (i, c) in enumerate(word): p = pattern[i] if p in mapping and mapping[p] != c: is_mapped = False break mapping[p] = c values = mapping.values() if len(values) != len(set(values)): is_mapped = False if isMapped: result.append(word) return result
CONSUMER_KEY = 'WVQrIJcorH11hQoP6mHKvXIZJ' CONSUMER_SECRET = 'Ui3V1dEsa5owJnhu3nLNyqdz2hFf6HmvICPObiShmkzBszKnah' ACCESS_TOKEN = '218405160-0iabe9XqpwAJ4z4BYsaXwH3ydKpFZhnzj5xpHxpI' ACCESS_SECRET = 'PdPNfcgkc5x7TO54cxVjGOjSrqY2jbcaayV46ys9IkLj3'
consumer_key = 'WVQrIJcorH11hQoP6mHKvXIZJ' consumer_secret = 'Ui3V1dEsa5owJnhu3nLNyqdz2hFf6HmvICPObiShmkzBszKnah' access_token = '218405160-0iabe9XqpwAJ4z4BYsaXwH3ydKpFZhnzj5xpHxpI' access_secret = 'PdPNfcgkc5x7TO54cxVjGOjSrqY2jbcaayV46ys9IkLj3'
var.nexus_allowAllDigitNames = True # put it somewhere else var.doCheckForDuplicateSequences = False t = var.trees[0] a = var.alignments[0] t.data = Data() t.model.dump() print('\nAfter optimizing, the composition of the model for the non-root nodes is:') print(t.model.parts[0].comps[0].val) print('...and:') print(t.model.parts[0].comps[1].val) print('and ...') print(t.model.parts[0].comps[2].val) print('and root comp...') print(t.model.parts[0].comps[3].val) t.write() t.draw() print(dir(t.model.parts[0])) print(t.model.parts[0].ndch2_writeComps) func.reseedCRandomizer(os.getpid()) print (t.model.parts[0]) # The char "symbols", AAs in this case, are available as a.symbols; that is why # I gave a name to var.alignments[0]. Also available as # d.parts[partNum].symbols, so d.parts[0].symbols are also 'arndcqeghilkmfpstwyv' print(a.symbols) counts = [0] * 2 for rep in range(1000): ancSt = t.ancestralStateDraw() for i in range(2): ch = a.symbols[i] # '01' cnt = ancSt.count(ch) counts[i] += cnt mySum = float(sum(counts)) print("\nsymbol optimized draws") for i in range(2): print(" %s %.5f %.4f" % (a.symbols[i], t.model.parts[0].comps[2].val[i], counts[i]/mySum)) #calculate predicted OGT according to Zeldovich for i in range(4): print("For composition " + str(i)) print(t.model.parts[0].comps[i].nNodes) f_ivywrel = 0 f_ivywrel = t.model.parts[0].comps[i].val[1] print("F(IVYWREL) = " + str(f_ivywrel)) print("T_opt estimate according to Zeldovich: " + str(937.0*float(f_ivywrel) - 335.0))
var.nexus_allowAllDigitNames = True var.doCheckForDuplicateSequences = False t = var.trees[0] a = var.alignments[0] t.data = data() t.model.dump() print('\nAfter optimizing, the composition of the model for the non-root nodes is:') print(t.model.parts[0].comps[0].val) print('...and:') print(t.model.parts[0].comps[1].val) print('and ...') print(t.model.parts[0].comps[2].val) print('and root comp...') print(t.model.parts[0].comps[3].val) t.write() t.draw() print(dir(t.model.parts[0])) print(t.model.parts[0].ndch2_writeComps) func.reseedCRandomizer(os.getpid()) print(t.model.parts[0]) print(a.symbols) counts = [0] * 2 for rep in range(1000): anc_st = t.ancestralStateDraw() for i in range(2): ch = a.symbols[i] cnt = ancSt.count(ch) counts[i] += cnt my_sum = float(sum(counts)) print('\nsymbol optimized draws') for i in range(2): print(' %s %.5f %.4f' % (a.symbols[i], t.model.parts[0].comps[2].val[i], counts[i] / mySum)) for i in range(4): print('For composition ' + str(i)) print(t.model.parts[0].comps[i].nNodes) f_ivywrel = 0 f_ivywrel = t.model.parts[0].comps[i].val[1] print('F(IVYWREL) = ' + str(f_ivywrel)) print('T_opt estimate according to Zeldovich: ' + str(937.0 * float(f_ivywrel) - 335.0))
""" A very basic study on variables, their types and some operators """ # defining variables and values integer_value, floatValue, boolean_value = 36, 5.3, True adition = integer_value + floatValue division = integer_value / floatValue exponention1 = 3 ** 3 exponention2 = 27 ** (1 / 3) floor = integer_value // floatValue modulo = 50 % 9 string1 = 'I never liked the song "Nothing Else Matters" by Metallica.' string2 = "I've always loved the song \"Until It Sleeps\", also by Metallica." quote = string1[23] substring1 = string2[:17] substring2 = string1[24:44] substring3 = string2[51:] # printing variables print(integer_value, floatValue, boolean_value) print(adition, division, exponention1, exponention2, floor, modulo) print(string1, string2) print(substring1, quote ,substring2, quote, substring3) print(len(string1), str(floatValue)[1], string1.lower(), string2.upper()) print(type(string1), type(integer_value), type(floatValue)) print(id(string1), id(integer_value), id(floatValue)) # input values from user name = input('What is your name? ') age = input('How old are you? ') occupation = input('What do yo do for living? ') print('User %s is %s years old and works like a %s.' %(name, age, occupation)) # older = age * 1.5 - this is WRONG # print('older =', older)
""" A very basic study on variables, their types and some operators """ (integer_value, float_value, boolean_value) = (36, 5.3, True) adition = integer_value + floatValue division = integer_value / floatValue exponention1 = 3 ** 3 exponention2 = 27 ** (1 / 3) floor = integer_value // floatValue modulo = 50 % 9 string1 = 'I never liked the song "Nothing Else Matters" by Metallica.' string2 = 'I\'ve always loved the song "Until It Sleeps", also by Metallica.' quote = string1[23] substring1 = string2[:17] substring2 = string1[24:44] substring3 = string2[51:] print(integer_value, floatValue, boolean_value) print(adition, division, exponention1, exponention2, floor, modulo) print(string1, string2) print(substring1, quote, substring2, quote, substring3) print(len(string1), str(floatValue)[1], string1.lower(), string2.upper()) print(type(string1), type(integer_value), type(floatValue)) print(id(string1), id(integer_value), id(floatValue)) name = input('What is your name? ') age = input('How old are you? ') occupation = input('What do yo do for living? ') print('User %s is %s years old and works like a %s.' % (name, age, occupation))
# # PySNMP MIB module HUAWEI-DATASYNC-MIB (http://snmplabs.com/pysmi) # ASN.1 source file:///Users/davwang4/Dev/mibs.snmplabs.com/asn1/HUAWEI-DATASYNC-MIB # Produced by pysmi-0.3.4 at Wed May 1 13:43:58 2019 # On host DAVWANG4-M-1475 platform Darwin version 18.5.0 by user davwang4 # Using Python version 3.7.3 (default, Mar 27 2019, 09:23:15) # OctetString, ObjectIdentifier, Integer = mibBuilder.importSymbols("ASN1", "OctetString", "ObjectIdentifier", "Integer") NamedValues, = mibBuilder.importSymbols("ASN1-ENUMERATION", "NamedValues") SingleValueConstraint, ConstraintsUnion, ValueSizeConstraint, ValueRangeConstraint, ConstraintsIntersection = mibBuilder.importSymbols("ASN1-REFINEMENT", "SingleValueConstraint", "ConstraintsUnion", "ValueSizeConstraint", "ValueRangeConstraint", "ConstraintsIntersection") hwDatacomm, = mibBuilder.importSymbols("HUAWEI-MIB", "hwDatacomm") NotificationGroup, ModuleCompliance, ObjectGroup = mibBuilder.importSymbols("SNMPv2-CONF", "NotificationGroup", "ModuleCompliance", "ObjectGroup") Integer32, TimeTicks, MibScalar, MibTable, MibTableRow, MibTableColumn, Counter32, Unsigned32, Counter64, MibIdentifier, NotificationType, Gauge32, IpAddress, ModuleIdentity, iso, Bits, ObjectIdentity = mibBuilder.importSymbols("SNMPv2-SMI", "Integer32", "TimeTicks", "MibScalar", "MibTable", "MibTableRow", "MibTableColumn", "Counter32", "Unsigned32", "Counter64", "MibIdentifier", "NotificationType", "Gauge32", "IpAddress", "ModuleIdentity", "iso", "Bits", "ObjectIdentity") RowStatus, TextualConvention, DisplayString = mibBuilder.importSymbols("SNMPv2-TC", "RowStatus", "TextualConvention", "DisplayString") hwDataSync = ModuleIdentity((1, 3, 6, 1, 4, 1, 2011, 5, 25, 191)) hwDataSync.setRevisions(('2015-07-16 13:49', '2014-09-04 17:10', '2009-03-17 10:27',)) if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0): if mibBuilder.loadTexts: hwDataSync.setRevisionsDescriptions(('Add hwCfgLastSaveFailNotify .', 'The MIB module for Data sync between host and netmanager.', 'The initial revision of this MIB module .',)) if mibBuilder.loadTexts: hwDataSync.setLastUpdated('201507161349Z') if mibBuilder.loadTexts: hwDataSync.setOrganization('Huawei Technologies Co.,Ltd.') if mibBuilder.loadTexts: hwDataSync.setContactInfo("Huawei Industrial Base Bantian, Longgang Shenzhen 518129 People's Republic of China Website: http://www.huawei.com Email: support@huawei.com ") if mibBuilder.loadTexts: hwDataSync.setDescription('Modified hwCfgChgTerminalID.') class DateAndTime(TextualConvention, OctetString): description = "A date-time specification. field octets contents range ----- ------ -------- ----- 1 1-2 year* 0..65536 2 3 month 1..12 3 4 day 1..31 4 5 hour 0..23 5 6 minutes 0..59 6 7 seconds 0..60 (use 60 for leap-second) 7 8 deci-seconds 0..9 8 9 direction from UTC '+' / '-' 9 10 hours from UTC* 0..13 10 11 minutes from UTC 0..59 * Notes: - the value of year is in network-byte order - daylight saving time in New Zealand is +13 For example, Tuesday May 26, 1992 at 1:30:15 PM EDT would be displayed as: 1992-5-26,13:30:15.0,-4:0 Note that if only local time is known, then timezone information (fields 8-10) is not present." status = 'current' displayHint = '2d-1d-1d,1d:1d:1d.1d,1a1d:1d' subtypeSpec = OctetString.subtypeSpec + ConstraintsUnion(ValueSizeConstraint(8, 8), ValueSizeConstraint(11, 11), ) hwDataSyncScalarObjects = MibIdentifier((1, 3, 6, 1, 4, 1, 2011, 5, 25, 191, 1)) hwDataSyncTableObjects = MibIdentifier((1, 3, 6, 1, 4, 1, 2011, 5, 25, 191, 2)) hwDataSyncNotifications = MibIdentifier((1, 3, 6, 1, 4, 1, 2011, 5, 25, 191, 3)) hwDataSyncConformance = MibIdentifier((1, 3, 6, 1, 4, 1, 2011, 5, 25, 191, 4)) hwCurrentCfgChgSeqID = MibScalar((1, 3, 6, 1, 4, 1, 2011, 5, 25, 191, 1, 1), Integer32()).setMaxAccess("readonly") if mibBuilder.loadTexts: hwCurrentCfgChgSeqID.setStatus('current') if mibBuilder.loadTexts: hwCurrentCfgChgSeqID.setDescription('The value of this object identifies the ID of the current configuration change. The value ranges from 0 to 65535. After the ID of the configuration change reaches the maximum value, the value of the ID starts from 1 again. After the device is restarted, the value of the ID becomes 0.') hwCfgChgSeqIDReveralCount = MibScalar((1, 3, 6, 1, 4, 1, 2011, 5, 25, 191, 1, 2), Integer32()).setMaxAccess("readonly") if mibBuilder.loadTexts: hwCfgChgSeqIDReveralCount.setStatus('current') if mibBuilder.loadTexts: hwCfgChgSeqIDReveralCount.setDescription('The value of this object identifies the cycle count of the index of configuration change table.') hwCfgChgTableMaxItem = MibScalar((1, 3, 6, 1, 4, 1, 2011, 5, 25, 191, 1, 3), Integer32()).setMaxAccess("readonly") if mibBuilder.loadTexts: hwCfgChgTableMaxItem.setStatus('current') if mibBuilder.loadTexts: hwCfgChgTableMaxItem.setDescription('The value of this object identifies the maximum number of entries in hwCfgChgTable. ') hwCfgBaselineTime = MibScalar((1, 3, 6, 1, 4, 1, 2011, 5, 25, 191, 1, 4), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 20))).setMaxAccess("readonly") if mibBuilder.loadTexts: hwCfgBaselineTime.setStatus('current') if mibBuilder.loadTexts: hwCfgBaselineTime.setDescription('Specifies the time of system confiuration was baseline.') hwDataSyncGroups = MibIdentifier((1, 3, 6, 1, 4, 1, 2011, 5, 25, 191, 4, 1)) hwDataSyncScalarObjectsGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 2011, 5, 25, 191, 4, 1, 1)).setObjects(("HUAWEI-DATASYNC-MIB", "hwCurrentCfgChgSeqID"), ("HUAWEI-DATASYNC-MIB", "hwCfgChgSeqIDReveralCount"), ("HUAWEI-DATASYNC-MIB", "hwCfgChgTableMaxItem"), ("HUAWEI-DATASYNC-MIB", "hwCfgBaselineTime")) if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0): hwDataSyncScalarObjectsGroup = hwDataSyncScalarObjectsGroup.setStatus('current') if mibBuilder.loadTexts: hwDataSyncScalarObjectsGroup.setDescription('A collection of objects on DataSync ScalarObjects Information.') hwCfgChgNotifyGroup = NotificationGroup((1, 3, 6, 1, 4, 1, 2011, 5, 25, 191, 4, 1, 2)).setObjects(("HUAWEI-DATASYNC-MIB", "hwCfgChgNotify")) if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0): hwCfgChgNotifyGroup = hwCfgChgNotifyGroup.setStatus('current') if mibBuilder.loadTexts: hwCfgChgNotifyGroup.setDescription('A collection of objects on Configuration Change Information.') hwDataSyncNotifyGroup = NotificationGroup((1, 3, 6, 1, 4, 1, 2011, 5, 25, 191, 4, 1, 3)).setObjects(("HUAWEI-DATASYNC-MIB", "hwCfgLastSaveFailNotify")) if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0): hwDataSyncNotifyGroup = hwDataSyncNotifyGroup.setStatus('current') if mibBuilder.loadTexts: hwDataSyncNotifyGroup.setDescription('A collection of objects on synchronization Configuration Notify Information.') hwDataSyncCompliances = MibIdentifier((1, 3, 6, 1, 4, 1, 2011, 5, 25, 191, 4, 2)) hwDataSyncCompliance = ModuleCompliance((1, 3, 6, 1, 4, 1, 2011, 5, 25, 191, 4, 2, 1)).setObjects() if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0): hwDataSyncCompliance = hwDataSyncCompliance.setStatus('current') if mibBuilder.loadTexts: hwDataSyncCompliance.setDescription('The compliance statement for entities that support the huawei DataSync MIB.') hwCfgChgTable = MibTable((1, 3, 6, 1, 4, 1, 2011, 5, 25, 191, 2, 1), ) if mibBuilder.loadTexts: hwCfgChgTable.setStatus('current') if mibBuilder.loadTexts: hwCfgChgTable.setDescription('This table is used to record configuration changes. In this table, you can find the configuration change based on the specific index.') hwCfgChgEntry = MibTableRow((1, 3, 6, 1, 4, 1, 2011, 5, 25, 191, 2, 1, 1), ).setIndexNames((0, "HUAWEI-DATASYNC-MIB", "hwCfgChgSeqID")) if mibBuilder.loadTexts: hwCfgChgEntry.setStatus('current') if mibBuilder.loadTexts: hwCfgChgEntry.setDescription('Entry of hwCfgChgTable.') hwCfgChgSeqID = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 191, 2, 1, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535))).setMaxAccess("readonly") if mibBuilder.loadTexts: hwCfgChgSeqID.setStatus('current') if mibBuilder.loadTexts: hwCfgChgSeqID.setDescription('The value of this object identifies the configuration change ID. When configuration is changed, the sequence id will plus 1.') hwCfgChgTime = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 191, 2, 1, 1, 2), DateAndTime()).setMaxAccess("readonly") if mibBuilder.loadTexts: hwCfgChgTime.setStatus('current') if mibBuilder.loadTexts: hwCfgChgTime.setDescription('This object indicates the configuration change time.') hwCfgChgTerminalType = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 191, 2, 1, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("snmp", 1), ("telnet", 2), ("netconf", 3)))).setMaxAccess("readonly") if mibBuilder.loadTexts: hwCfgChgTerminalType.setStatus('current') if mibBuilder.loadTexts: hwCfgChgTerminalType.setDescription('This object indicates the type of the terminal.') hwCfgChgTerminalID = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 191, 2, 1, 1, 4), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 2147483647))).setMaxAccess("readonly") if mibBuilder.loadTexts: hwCfgChgTerminalID.setStatus('current') if mibBuilder.loadTexts: hwCfgChgTerminalID.setDescription('The value of this object identifies the terminal ID.') hwCfgChgType = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 191, 2, 1, 1, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("create", 1), ("modify", 2), ("delete", 3)))).setMaxAccess("readonly") if mibBuilder.loadTexts: hwCfgChgType.setStatus('current') if mibBuilder.loadTexts: hwCfgChgType.setDescription('This object indicates the configuration change type.') hwCfgChgViewName = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 191, 2, 1, 1, 6), OctetString()).setMaxAccess("readonly") if mibBuilder.loadTexts: hwCfgChgViewName.setStatus('current') if mibBuilder.loadTexts: hwCfgChgViewName.setDescription('This object indicates the name of the view in which the configuration change occurs. For the command operation, the object is the name of the view in which the command is run. For the SNMP operation, the object is the OID of the MIB table or the scalar object.') hwCfgChgCmdID = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 191, 2, 1, 1, 7), Integer32()).setMaxAccess("readonly") if mibBuilder.loadTexts: hwCfgChgCmdID.setStatus('current') if mibBuilder.loadTexts: hwCfgChgCmdID.setDescription('The value of this object identifies the ID of the configuration change command. For the SNMP operation, the value is 0.') hwCfgChgDetailInfo = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 191, 2, 1, 1, 8), OctetString()).setMaxAccess("readonly") if mibBuilder.loadTexts: hwCfgChgDetailInfo.setStatus('current') if mibBuilder.loadTexts: hwCfgChgDetailInfo.setDescription('This object indicates detailed configuration change information. For the command operation, the object is the command line. For the SNMP operation, the object is the index of the MIB table. When there are multiple indexes, the format of index1.index2.index3 is adopted.') hwCollectTable = MibTable((1, 3, 6, 1, 4, 1, 2011, 5, 25, 191, 2, 2), ) if mibBuilder.loadTexts: hwCollectTable.setStatus('current') if mibBuilder.loadTexts: hwCollectTable.setDescription('This table is used to enable the NMS to send the collecting script to the device to trigger the collection, and then monitor the collection status.') hwCollectEntry = MibTableRow((1, 3, 6, 1, 4, 1, 2011, 5, 25, 191, 2, 2, 1), ).setIndexNames((0, "HUAWEI-DATASYNC-MIB", "hwCollectIndex")) if mibBuilder.loadTexts: hwCollectEntry.setStatus('current') if mibBuilder.loadTexts: hwCollectEntry.setDescription('Entry of hwCollectTable.') hwCollectIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 191, 2, 2, 1, 1), Integer32()) if mibBuilder.loadTexts: hwCollectIndex.setStatus('current') if mibBuilder.loadTexts: hwCollectIndex.setDescription('The value of this object identifies the collection index.') hwCollectNetManageId = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 191, 2, 2, 1, 2), Integer32()).setMaxAccess("readwrite") if mibBuilder.loadTexts: hwCollectNetManageId.setStatus('current') if mibBuilder.loadTexts: hwCollectNetManageId.setDescription('The value of this object identifies the NMS ID.') hwCollectOperation = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 191, 2, 2, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("begin", 1), ("stop", 2)))).setMaxAccess("readwrite") if mibBuilder.loadTexts: hwCollectOperation.setStatus('current') if mibBuilder.loadTexts: hwCollectOperation.setDescription('This object indicates the instruction for the collection operation. Default value is stop.') hwCollectInScriptFile = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 191, 2, 2, 1, 4), OctetString().subtype(subtypeSpec=ValueSizeConstraint(1, 255))).setMaxAccess("readwrite") if mibBuilder.loadTexts: hwCollectInScriptFile.setStatus('current') if mibBuilder.loadTexts: hwCollectInScriptFile.setDescription('This object indicates the name of the script file. T he length of the file name ranges from 1 character to 255 characters.') hwCollectInResultFile = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 191, 2, 2, 1, 5), OctetString().subtype(subtypeSpec=ValueSizeConstraint(1, 255))).setMaxAccess("readwrite") if mibBuilder.loadTexts: hwCollectInResultFile.setStatus('current') if mibBuilder.loadTexts: hwCollectInResultFile.setDescription('This object indicates the name of the result file. The length of the file name ranges from 1 character to 255 characters.') hwCollectState = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 191, 2, 2, 1, 6), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("idle", 1), ("collecting", 2)))).setMaxAccess("readonly") if mibBuilder.loadTexts: hwCollectState.setStatus('current') if mibBuilder.loadTexts: hwCollectState.setDescription('This object indicates the collection status.') hwCollectRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 191, 2, 2, 1, 7), RowStatus()).setMaxAccess("readcreate") if mibBuilder.loadTexts: hwCollectRowStatus.setStatus('current') if mibBuilder.loadTexts: hwCollectRowStatus.setDescription('This object indicates the row status.') hwCfgChgNotify = NotificationType((1, 3, 6, 1, 4, 1, 2011, 5, 25, 191, 3, 1)).setObjects(("HUAWEI-DATASYNC-MIB", "hwCurrentCfgChgSeqID"), ("HUAWEI-DATASYNC-MIB", "hwCfgChgSeqIDReveralCount"), ("HUAWEI-DATASYNC-MIB", "hwCfgChgTableMaxItem"), ("HUAWEI-DATASYNC-MIB", "hwCfgBaselineTime")) if mibBuilder.loadTexts: hwCfgChgNotify.setStatus('current') if mibBuilder.loadTexts: hwCfgChgNotify.setDescription('This trap is generated when a configuration change occurs on the device within a specified period.') hwCfgLastSaveFailNotify = NotificationType((1, 3, 6, 1, 4, 1, 2011, 5, 25, 191, 3, 2)) if mibBuilder.loadTexts: hwCfgLastSaveFailNotify.setStatus('current') if mibBuilder.loadTexts: hwCfgLastSaveFailNotify.setDescription('The last save operation failed, please check the configuration.') mibBuilder.exportSymbols("HUAWEI-DATASYNC-MIB", hwDataSyncNotifications=hwDataSyncNotifications, hwCfgChgNotifyGroup=hwCfgChgNotifyGroup, DateAndTime=DateAndTime, hwDataSync=hwDataSync, hwCollectIndex=hwCollectIndex, hwDataSyncScalarObjectsGroup=hwDataSyncScalarObjectsGroup, hwCfgChgSeqID=hwCfgChgSeqID, hwCollectState=hwCollectState, hwCfgBaselineTime=hwCfgBaselineTime, hwCfgChgViewName=hwCfgChgViewName, hwDataSyncScalarObjects=hwDataSyncScalarObjects, hwCfgChgType=hwCfgChgType, hwCfgChgCmdID=hwCfgChgCmdID, hwCfgChgEntry=hwCfgChgEntry, hwCollectTable=hwCollectTable, hwDataSyncConformance=hwDataSyncConformance, hwCfgChgTerminalID=hwCfgChgTerminalID, hwCurrentCfgChgSeqID=hwCurrentCfgChgSeqID, hwCollectEntry=hwCollectEntry, hwCollectNetManageId=hwCollectNetManageId, hwCfgChgDetailInfo=hwCfgChgDetailInfo, hwCollectInScriptFile=hwCollectInScriptFile, hwCfgChgTable=hwCfgChgTable, hwCollectInResultFile=hwCollectInResultFile, PYSNMP_MODULE_ID=hwDataSync, hwDataSyncNotifyGroup=hwDataSyncNotifyGroup, hwDataSyncTableObjects=hwDataSyncTableObjects, hwCfgChgNotify=hwCfgChgNotify, hwCollectOperation=hwCollectOperation, hwCfgChgSeqIDReveralCount=hwCfgChgSeqIDReveralCount, hwCfgLastSaveFailNotify=hwCfgLastSaveFailNotify, hwCfgChgTableMaxItem=hwCfgChgTableMaxItem, hwDataSyncCompliance=hwDataSyncCompliance, hwDataSyncCompliances=hwDataSyncCompliances, hwCfgChgTime=hwCfgChgTime, hwCfgChgTerminalType=hwCfgChgTerminalType, hwDataSyncGroups=hwDataSyncGroups, hwCollectRowStatus=hwCollectRowStatus)
(octet_string, object_identifier, integer) = mibBuilder.importSymbols('ASN1', 'OctetString', 'ObjectIdentifier', 'Integer') (named_values,) = mibBuilder.importSymbols('ASN1-ENUMERATION', 'NamedValues') (single_value_constraint, constraints_union, value_size_constraint, value_range_constraint, constraints_intersection) = mibBuilder.importSymbols('ASN1-REFINEMENT', 'SingleValueConstraint', 'ConstraintsUnion', 'ValueSizeConstraint', 'ValueRangeConstraint', 'ConstraintsIntersection') (hw_datacomm,) = mibBuilder.importSymbols('HUAWEI-MIB', 'hwDatacomm') (notification_group, module_compliance, object_group) = mibBuilder.importSymbols('SNMPv2-CONF', 'NotificationGroup', 'ModuleCompliance', 'ObjectGroup') (integer32, time_ticks, mib_scalar, mib_table, mib_table_row, mib_table_column, counter32, unsigned32, counter64, mib_identifier, notification_type, gauge32, ip_address, module_identity, iso, bits, object_identity) = mibBuilder.importSymbols('SNMPv2-SMI', 'Integer32', 'TimeTicks', 'MibScalar', 'MibTable', 'MibTableRow', 'MibTableColumn', 'Counter32', 'Unsigned32', 'Counter64', 'MibIdentifier', 'NotificationType', 'Gauge32', 'IpAddress', 'ModuleIdentity', 'iso', 'Bits', 'ObjectIdentity') (row_status, textual_convention, display_string) = mibBuilder.importSymbols('SNMPv2-TC', 'RowStatus', 'TextualConvention', 'DisplayString') hw_data_sync = module_identity((1, 3, 6, 1, 4, 1, 2011, 5, 25, 191)) hwDataSync.setRevisions(('2015-07-16 13:49', '2014-09-04 17:10', '2009-03-17 10:27')) if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0): if mibBuilder.loadTexts: hwDataSync.setRevisionsDescriptions(('Add hwCfgLastSaveFailNotify .', 'The MIB module for Data sync between host and netmanager.', 'The initial revision of this MIB module .')) if mibBuilder.loadTexts: hwDataSync.setLastUpdated('201507161349Z') if mibBuilder.loadTexts: hwDataSync.setOrganization('Huawei Technologies Co.,Ltd.') if mibBuilder.loadTexts: hwDataSync.setContactInfo("Huawei Industrial Base Bantian, Longgang Shenzhen 518129 People's Republic of China Website: http://www.huawei.com Email: support@huawei.com ") if mibBuilder.loadTexts: hwDataSync.setDescription('Modified hwCfgChgTerminalID.') class Dateandtime(TextualConvention, OctetString): description = "A date-time specification. field octets contents range ----- ------ -------- ----- 1 1-2 year* 0..65536 2 3 month 1..12 3 4 day 1..31 4 5 hour 0..23 5 6 minutes 0..59 6 7 seconds 0..60 (use 60 for leap-second) 7 8 deci-seconds 0..9 8 9 direction from UTC '+' / '-' 9 10 hours from UTC* 0..13 10 11 minutes from UTC 0..59 * Notes: - the value of year is in network-byte order - daylight saving time in New Zealand is +13 For example, Tuesday May 26, 1992 at 1:30:15 PM EDT would be displayed as: 1992-5-26,13:30:15.0,-4:0 Note that if only local time is known, then timezone information (fields 8-10) is not present." status = 'current' display_hint = '2d-1d-1d,1d:1d:1d.1d,1a1d:1d' subtype_spec = OctetString.subtypeSpec + constraints_union(value_size_constraint(8, 8), value_size_constraint(11, 11)) hw_data_sync_scalar_objects = mib_identifier((1, 3, 6, 1, 4, 1, 2011, 5, 25, 191, 1)) hw_data_sync_table_objects = mib_identifier((1, 3, 6, 1, 4, 1, 2011, 5, 25, 191, 2)) hw_data_sync_notifications = mib_identifier((1, 3, 6, 1, 4, 1, 2011, 5, 25, 191, 3)) hw_data_sync_conformance = mib_identifier((1, 3, 6, 1, 4, 1, 2011, 5, 25, 191, 4)) hw_current_cfg_chg_seq_id = mib_scalar((1, 3, 6, 1, 4, 1, 2011, 5, 25, 191, 1, 1), integer32()).setMaxAccess('readonly') if mibBuilder.loadTexts: hwCurrentCfgChgSeqID.setStatus('current') if mibBuilder.loadTexts: hwCurrentCfgChgSeqID.setDescription('The value of this object identifies the ID of the current configuration change. The value ranges from 0 to 65535. After the ID of the configuration change reaches the maximum value, the value of the ID starts from 1 again. After the device is restarted, the value of the ID becomes 0.') hw_cfg_chg_seq_id_reveral_count = mib_scalar((1, 3, 6, 1, 4, 1, 2011, 5, 25, 191, 1, 2), integer32()).setMaxAccess('readonly') if mibBuilder.loadTexts: hwCfgChgSeqIDReveralCount.setStatus('current') if mibBuilder.loadTexts: hwCfgChgSeqIDReveralCount.setDescription('The value of this object identifies the cycle count of the index of configuration change table.') hw_cfg_chg_table_max_item = mib_scalar((1, 3, 6, 1, 4, 1, 2011, 5, 25, 191, 1, 3), integer32()).setMaxAccess('readonly') if mibBuilder.loadTexts: hwCfgChgTableMaxItem.setStatus('current') if mibBuilder.loadTexts: hwCfgChgTableMaxItem.setDescription('The value of this object identifies the maximum number of entries in hwCfgChgTable. ') hw_cfg_baseline_time = mib_scalar((1, 3, 6, 1, 4, 1, 2011, 5, 25, 191, 1, 4), display_string().subtype(subtypeSpec=value_size_constraint(0, 20))).setMaxAccess('readonly') if mibBuilder.loadTexts: hwCfgBaselineTime.setStatus('current') if mibBuilder.loadTexts: hwCfgBaselineTime.setDescription('Specifies the time of system confiuration was baseline.') hw_data_sync_groups = mib_identifier((1, 3, 6, 1, 4, 1, 2011, 5, 25, 191, 4, 1)) hw_data_sync_scalar_objects_group = object_group((1, 3, 6, 1, 4, 1, 2011, 5, 25, 191, 4, 1, 1)).setObjects(('HUAWEI-DATASYNC-MIB', 'hwCurrentCfgChgSeqID'), ('HUAWEI-DATASYNC-MIB', 'hwCfgChgSeqIDReveralCount'), ('HUAWEI-DATASYNC-MIB', 'hwCfgChgTableMaxItem'), ('HUAWEI-DATASYNC-MIB', 'hwCfgBaselineTime')) if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0): hw_data_sync_scalar_objects_group = hwDataSyncScalarObjectsGroup.setStatus('current') if mibBuilder.loadTexts: hwDataSyncScalarObjectsGroup.setDescription('A collection of objects on DataSync ScalarObjects Information.') hw_cfg_chg_notify_group = notification_group((1, 3, 6, 1, 4, 1, 2011, 5, 25, 191, 4, 1, 2)).setObjects(('HUAWEI-DATASYNC-MIB', 'hwCfgChgNotify')) if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0): hw_cfg_chg_notify_group = hwCfgChgNotifyGroup.setStatus('current') if mibBuilder.loadTexts: hwCfgChgNotifyGroup.setDescription('A collection of objects on Configuration Change Information.') hw_data_sync_notify_group = notification_group((1, 3, 6, 1, 4, 1, 2011, 5, 25, 191, 4, 1, 3)).setObjects(('HUAWEI-DATASYNC-MIB', 'hwCfgLastSaveFailNotify')) if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0): hw_data_sync_notify_group = hwDataSyncNotifyGroup.setStatus('current') if mibBuilder.loadTexts: hwDataSyncNotifyGroup.setDescription('A collection of objects on synchronization Configuration Notify Information.') hw_data_sync_compliances = mib_identifier((1, 3, 6, 1, 4, 1, 2011, 5, 25, 191, 4, 2)) hw_data_sync_compliance = module_compliance((1, 3, 6, 1, 4, 1, 2011, 5, 25, 191, 4, 2, 1)).setObjects() if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0): hw_data_sync_compliance = hwDataSyncCompliance.setStatus('current') if mibBuilder.loadTexts: hwDataSyncCompliance.setDescription('The compliance statement for entities that support the huawei DataSync MIB.') hw_cfg_chg_table = mib_table((1, 3, 6, 1, 4, 1, 2011, 5, 25, 191, 2, 1)) if mibBuilder.loadTexts: hwCfgChgTable.setStatus('current') if mibBuilder.loadTexts: hwCfgChgTable.setDescription('This table is used to record configuration changes. In this table, you can find the configuration change based on the specific index.') hw_cfg_chg_entry = mib_table_row((1, 3, 6, 1, 4, 1, 2011, 5, 25, 191, 2, 1, 1)).setIndexNames((0, 'HUAWEI-DATASYNC-MIB', 'hwCfgChgSeqID')) if mibBuilder.loadTexts: hwCfgChgEntry.setStatus('current') if mibBuilder.loadTexts: hwCfgChgEntry.setDescription('Entry of hwCfgChgTable.') hw_cfg_chg_seq_id = mib_table_column((1, 3, 6, 1, 4, 1, 2011, 5, 25, 191, 2, 1, 1, 1), integer32().subtype(subtypeSpec=value_range_constraint(0, 65535))).setMaxAccess('readonly') if mibBuilder.loadTexts: hwCfgChgSeqID.setStatus('current') if mibBuilder.loadTexts: hwCfgChgSeqID.setDescription('The value of this object identifies the configuration change ID. When configuration is changed, the sequence id will plus 1.') hw_cfg_chg_time = mib_table_column((1, 3, 6, 1, 4, 1, 2011, 5, 25, 191, 2, 1, 1, 2), date_and_time()).setMaxAccess('readonly') if mibBuilder.loadTexts: hwCfgChgTime.setStatus('current') if mibBuilder.loadTexts: hwCfgChgTime.setDescription('This object indicates the configuration change time.') hw_cfg_chg_terminal_type = mib_table_column((1, 3, 6, 1, 4, 1, 2011, 5, 25, 191, 2, 1, 1, 3), integer32().subtype(subtypeSpec=constraints_union(single_value_constraint(1, 2, 3))).clone(namedValues=named_values(('snmp', 1), ('telnet', 2), ('netconf', 3)))).setMaxAccess('readonly') if mibBuilder.loadTexts: hwCfgChgTerminalType.setStatus('current') if mibBuilder.loadTexts: hwCfgChgTerminalType.setDescription('This object indicates the type of the terminal.') hw_cfg_chg_terminal_id = mib_table_column((1, 3, 6, 1, 4, 1, 2011, 5, 25, 191, 2, 1, 1, 4), integer32().subtype(subtypeSpec=value_range_constraint(0, 2147483647))).setMaxAccess('readonly') if mibBuilder.loadTexts: hwCfgChgTerminalID.setStatus('current') if mibBuilder.loadTexts: hwCfgChgTerminalID.setDescription('The value of this object identifies the terminal ID.') hw_cfg_chg_type = mib_table_column((1, 3, 6, 1, 4, 1, 2011, 5, 25, 191, 2, 1, 1, 5), integer32().subtype(subtypeSpec=constraints_union(single_value_constraint(1, 2, 3))).clone(namedValues=named_values(('create', 1), ('modify', 2), ('delete', 3)))).setMaxAccess('readonly') if mibBuilder.loadTexts: hwCfgChgType.setStatus('current') if mibBuilder.loadTexts: hwCfgChgType.setDescription('This object indicates the configuration change type.') hw_cfg_chg_view_name = mib_table_column((1, 3, 6, 1, 4, 1, 2011, 5, 25, 191, 2, 1, 1, 6), octet_string()).setMaxAccess('readonly') if mibBuilder.loadTexts: hwCfgChgViewName.setStatus('current') if mibBuilder.loadTexts: hwCfgChgViewName.setDescription('This object indicates the name of the view in which the configuration change occurs. For the command operation, the object is the name of the view in which the command is run. For the SNMP operation, the object is the OID of the MIB table or the scalar object.') hw_cfg_chg_cmd_id = mib_table_column((1, 3, 6, 1, 4, 1, 2011, 5, 25, 191, 2, 1, 1, 7), integer32()).setMaxAccess('readonly') if mibBuilder.loadTexts: hwCfgChgCmdID.setStatus('current') if mibBuilder.loadTexts: hwCfgChgCmdID.setDescription('The value of this object identifies the ID of the configuration change command. For the SNMP operation, the value is 0.') hw_cfg_chg_detail_info = mib_table_column((1, 3, 6, 1, 4, 1, 2011, 5, 25, 191, 2, 1, 1, 8), octet_string()).setMaxAccess('readonly') if mibBuilder.loadTexts: hwCfgChgDetailInfo.setStatus('current') if mibBuilder.loadTexts: hwCfgChgDetailInfo.setDescription('This object indicates detailed configuration change information. For the command operation, the object is the command line. For the SNMP operation, the object is the index of the MIB table. When there are multiple indexes, the format of index1.index2.index3 is adopted.') hw_collect_table = mib_table((1, 3, 6, 1, 4, 1, 2011, 5, 25, 191, 2, 2)) if mibBuilder.loadTexts: hwCollectTable.setStatus('current') if mibBuilder.loadTexts: hwCollectTable.setDescription('This table is used to enable the NMS to send the collecting script to the device to trigger the collection, and then monitor the collection status.') hw_collect_entry = mib_table_row((1, 3, 6, 1, 4, 1, 2011, 5, 25, 191, 2, 2, 1)).setIndexNames((0, 'HUAWEI-DATASYNC-MIB', 'hwCollectIndex')) if mibBuilder.loadTexts: hwCollectEntry.setStatus('current') if mibBuilder.loadTexts: hwCollectEntry.setDescription('Entry of hwCollectTable.') hw_collect_index = mib_table_column((1, 3, 6, 1, 4, 1, 2011, 5, 25, 191, 2, 2, 1, 1), integer32()) if mibBuilder.loadTexts: hwCollectIndex.setStatus('current') if mibBuilder.loadTexts: hwCollectIndex.setDescription('The value of this object identifies the collection index.') hw_collect_net_manage_id = mib_table_column((1, 3, 6, 1, 4, 1, 2011, 5, 25, 191, 2, 2, 1, 2), integer32()).setMaxAccess('readwrite') if mibBuilder.loadTexts: hwCollectNetManageId.setStatus('current') if mibBuilder.loadTexts: hwCollectNetManageId.setDescription('The value of this object identifies the NMS ID.') hw_collect_operation = mib_table_column((1, 3, 6, 1, 4, 1, 2011, 5, 25, 191, 2, 2, 1, 3), integer32().subtype(subtypeSpec=constraints_union(single_value_constraint(1, 2))).clone(namedValues=named_values(('begin', 1), ('stop', 2)))).setMaxAccess('readwrite') if mibBuilder.loadTexts: hwCollectOperation.setStatus('current') if mibBuilder.loadTexts: hwCollectOperation.setDescription('This object indicates the instruction for the collection operation. Default value is stop.') hw_collect_in_script_file = mib_table_column((1, 3, 6, 1, 4, 1, 2011, 5, 25, 191, 2, 2, 1, 4), octet_string().subtype(subtypeSpec=value_size_constraint(1, 255))).setMaxAccess('readwrite') if mibBuilder.loadTexts: hwCollectInScriptFile.setStatus('current') if mibBuilder.loadTexts: hwCollectInScriptFile.setDescription('This object indicates the name of the script file. T he length of the file name ranges from 1 character to 255 characters.') hw_collect_in_result_file = mib_table_column((1, 3, 6, 1, 4, 1, 2011, 5, 25, 191, 2, 2, 1, 5), octet_string().subtype(subtypeSpec=value_size_constraint(1, 255))).setMaxAccess('readwrite') if mibBuilder.loadTexts: hwCollectInResultFile.setStatus('current') if mibBuilder.loadTexts: hwCollectInResultFile.setDescription('This object indicates the name of the result file. The length of the file name ranges from 1 character to 255 characters.') hw_collect_state = mib_table_column((1, 3, 6, 1, 4, 1, 2011, 5, 25, 191, 2, 2, 1, 6), integer32().subtype(subtypeSpec=constraints_union(single_value_constraint(1, 2))).clone(namedValues=named_values(('idle', 1), ('collecting', 2)))).setMaxAccess('readonly') if mibBuilder.loadTexts: hwCollectState.setStatus('current') if mibBuilder.loadTexts: hwCollectState.setDescription('This object indicates the collection status.') hw_collect_row_status = mib_table_column((1, 3, 6, 1, 4, 1, 2011, 5, 25, 191, 2, 2, 1, 7), row_status()).setMaxAccess('readcreate') if mibBuilder.loadTexts: hwCollectRowStatus.setStatus('current') if mibBuilder.loadTexts: hwCollectRowStatus.setDescription('This object indicates the row status.') hw_cfg_chg_notify = notification_type((1, 3, 6, 1, 4, 1, 2011, 5, 25, 191, 3, 1)).setObjects(('HUAWEI-DATASYNC-MIB', 'hwCurrentCfgChgSeqID'), ('HUAWEI-DATASYNC-MIB', 'hwCfgChgSeqIDReveralCount'), ('HUAWEI-DATASYNC-MIB', 'hwCfgChgTableMaxItem'), ('HUAWEI-DATASYNC-MIB', 'hwCfgBaselineTime')) if mibBuilder.loadTexts: hwCfgChgNotify.setStatus('current') if mibBuilder.loadTexts: hwCfgChgNotify.setDescription('This trap is generated when a configuration change occurs on the device within a specified period.') hw_cfg_last_save_fail_notify = notification_type((1, 3, 6, 1, 4, 1, 2011, 5, 25, 191, 3, 2)) if mibBuilder.loadTexts: hwCfgLastSaveFailNotify.setStatus('current') if mibBuilder.loadTexts: hwCfgLastSaveFailNotify.setDescription('The last save operation failed, please check the configuration.') mibBuilder.exportSymbols('HUAWEI-DATASYNC-MIB', hwDataSyncNotifications=hwDataSyncNotifications, hwCfgChgNotifyGroup=hwCfgChgNotifyGroup, DateAndTime=DateAndTime, hwDataSync=hwDataSync, hwCollectIndex=hwCollectIndex, hwDataSyncScalarObjectsGroup=hwDataSyncScalarObjectsGroup, hwCfgChgSeqID=hwCfgChgSeqID, hwCollectState=hwCollectState, hwCfgBaselineTime=hwCfgBaselineTime, hwCfgChgViewName=hwCfgChgViewName, hwDataSyncScalarObjects=hwDataSyncScalarObjects, hwCfgChgType=hwCfgChgType, hwCfgChgCmdID=hwCfgChgCmdID, hwCfgChgEntry=hwCfgChgEntry, hwCollectTable=hwCollectTable, hwDataSyncConformance=hwDataSyncConformance, hwCfgChgTerminalID=hwCfgChgTerminalID, hwCurrentCfgChgSeqID=hwCurrentCfgChgSeqID, hwCollectEntry=hwCollectEntry, hwCollectNetManageId=hwCollectNetManageId, hwCfgChgDetailInfo=hwCfgChgDetailInfo, hwCollectInScriptFile=hwCollectInScriptFile, hwCfgChgTable=hwCfgChgTable, hwCollectInResultFile=hwCollectInResultFile, PYSNMP_MODULE_ID=hwDataSync, hwDataSyncNotifyGroup=hwDataSyncNotifyGroup, hwDataSyncTableObjects=hwDataSyncTableObjects, hwCfgChgNotify=hwCfgChgNotify, hwCollectOperation=hwCollectOperation, hwCfgChgSeqIDReveralCount=hwCfgChgSeqIDReveralCount, hwCfgLastSaveFailNotify=hwCfgLastSaveFailNotify, hwCfgChgTableMaxItem=hwCfgChgTableMaxItem, hwDataSyncCompliance=hwDataSyncCompliance, hwDataSyncCompliances=hwDataSyncCompliances, hwCfgChgTime=hwCfgChgTime, hwCfgChgTerminalType=hwCfgChgTerminalType, hwDataSyncGroups=hwDataSyncGroups, hwCollectRowStatus=hwCollectRowStatus)
def main(name="User", name2="Your Pal"): print(f"Hello, {name}! I am {name2}!") if __name__=="__main__": main()
def main(name='User', name2='Your Pal'): print(f'Hello, {name}! I am {name2}!') if __name__ == '__main__': main()
def _dup(file,mode,checked=True): """Replacement for perl built-in open function when the mode contains '&'.""" global OS_ERROR, TRACEBACK, AUTODIE try: if isinstance(file, io.IOBase): # file handle file.flush() return os.fdopen(os.dup(file.fileno()), mode, encoding=file.encoding, errors=file.errors) if (_m:=re.match(r'=?(\d+)', file)): file = int(_m.group(1)) elif file in _DUP_MAP: file = _DUP_MAP[file] return _create_fh_methods(os.fdopen(os.dup(file), mode)) except Exception as _e: OS_ERROR = str(_e) if TRACEBACK: _cluck(f"dup failed: {OS_ERROR}",skip=2) if AUTODIE: raise if checked: return None fh = io.StringIO() fh.close() return _create_fh_methods(fh)
def _dup(file, mode, checked=True): """Replacement for perl built-in open function when the mode contains '&'.""" global OS_ERROR, TRACEBACK, AUTODIE try: if isinstance(file, io.IOBase): file.flush() return os.fdopen(os.dup(file.fileno()), mode, encoding=file.encoding, errors=file.errors) if (_m := re.match('=?(\\d+)', file)): file = int(_m.group(1)) elif file in _DUP_MAP: file = _DUP_MAP[file] return _create_fh_methods(os.fdopen(os.dup(file), mode)) except Exception as _e: os_error = str(_e) if TRACEBACK: _cluck(f'dup failed: {OS_ERROR}', skip=2) if AUTODIE: raise if checked: return None fh = io.StringIO() fh.close() return _create_fh_methods(fh)
# -*- coding: utf-8 -*- """ Created on Tue Jul 14 16:15:05 2020 Logical condition: If statement @author: Ashish """ day_of_week = input("What day of the week is it today? ") if day_of_week == "Monday": print("Have a great start to your week!") elif day_of_week == "Friday": print("It's ok to finish a bit early!") else: print("Full speed ahead!") # -- Problem: user not entering what we expect -- day_of_week = input("What day of the week is it today? ").lower() if day_of_week == "monday": print("Have a great start to your week!") elif day_of_week == "friday": print("It's ok to finish a bit early!") else: print("Full speed ahead!")
""" Created on Tue Jul 14 16:15:05 2020 Logical condition: If statement @author: Ashish """ day_of_week = input('What day of the week is it today? ') if day_of_week == 'Monday': print('Have a great start to your week!') elif day_of_week == 'Friday': print("It's ok to finish a bit early!") else: print('Full speed ahead!') day_of_week = input('What day of the week is it today? ').lower() if day_of_week == 'monday': print('Have a great start to your week!') elif day_of_week == 'friday': print("It's ok to finish a bit early!") else: print('Full speed ahead!')
class PeekableIterator: def __init__(self, nums): self.nums = nums self.i = 0 def peek(self): return self.nums[self.i] def next(self): self.i += 1 return self.nums[self.i-1] def hasnext(self): return self.i < len(self.nums)
class Peekableiterator: def __init__(self, nums): self.nums = nums self.i = 0 def peek(self): return self.nums[self.i] def next(self): self.i += 1 return self.nums[self.i - 1] def hasnext(self): return self.i < len(self.nums)
# DADSA - Assignment 1 # Reece Benson class Player(): _id = None _name = None _gender = None _score = None _points = None def __init__(self, _name, _gender, _id): self._id = _id self._name = _name self._gender = _gender self._score = { } self._points = 0 def __cmp__(self, other): """Compare Override""" if(self._points < other._points): return -1 elif(self._points > other._points): return 1 else: return 0 # Comparison Overrides def __eq__(self, other): return not self._points < other._points and not other._points < self._points def __ne__(self, other): return self._points < other._points or other._points < self._points def __gt__(self, other): return other._points < self._points def __ge__(self, other): return not self._points < other._points def __le__(self, other): return not other._points < self._points def get_name(self): return self._name def get_gender(self): return self._gender def get_score(self, _match): return self._score[_match] def set_score(self, _match, _score): self.score[_match] = _score return self._score[_match] def get_points(self): return self._points def set_points(self, _points, append = False): if(append): self._points += _points else: self._points = _points return self._points
class Player: _id = None _name = None _gender = None _score = None _points = None def __init__(self, _name, _gender, _id): self._id = _id self._name = _name self._gender = _gender self._score = {} self._points = 0 def __cmp__(self, other): """Compare Override""" if self._points < other._points: return -1 elif self._points > other._points: return 1 else: return 0 def __eq__(self, other): return not self._points < other._points and (not other._points < self._points) def __ne__(self, other): return self._points < other._points or other._points < self._points def __gt__(self, other): return other._points < self._points def __ge__(self, other): return not self._points < other._points def __le__(self, other): return not other._points < self._points def get_name(self): return self._name def get_gender(self): return self._gender def get_score(self, _match): return self._score[_match] def set_score(self, _match, _score): self.score[_match] = _score return self._score[_match] def get_points(self): return self._points def set_points(self, _points, append=False): if append: self._points += _points else: self._points = _points return self._points
_champernownes_constant = "" def _calculate_champernownes_nth_decimal(length): res = [] curr_length = 0 i = 1 while curr_length < length: res += [str(i)] curr_length += len(res[-1]) i += 1 return "".join(res) def champernownes_nth_decimal(n): global _champernownes_constant if len(_champernownes_constant) >= n: return int(_champernownes_constant[n - 1]) _champernownes_constant = _calculate_champernownes_nth_decimal(2 * n) return champernownes_nth_decimal(n)
_champernownes_constant = '' def _calculate_champernownes_nth_decimal(length): res = [] curr_length = 0 i = 1 while curr_length < length: res += [str(i)] curr_length += len(res[-1]) i += 1 return ''.join(res) def champernownes_nth_decimal(n): global _champernownes_constant if len(_champernownes_constant) >= n: return int(_champernownes_constant[n - 1]) _champernownes_constant = _calculate_champernownes_nth_decimal(2 * n) return champernownes_nth_decimal(n)
class Solution: solution = [] def inorderTraversal(self, root: TreeNode) -> List[int]: if (root == None): return self.solution = [] self.inorderHelper(root) return self.solution def inorderHelper(self, root: TreeNode): if (root == None): return self.inorderHelper(root.left) self.solution.append(root.val) self.inorderHelper(root.right)
class Solution: solution = [] def inorder_traversal(self, root: TreeNode) -> List[int]: if root == None: return self.solution = [] self.inorderHelper(root) return self.solution def inorder_helper(self, root: TreeNode): if root == None: return self.inorderHelper(root.left) self.solution.append(root.val) self.inorderHelper(root.right)