blob_id
stringlengths
40
40
directory_id
stringlengths
40
40
path
stringlengths
3
281
content_id
stringlengths
40
40
detected_licenses
listlengths
0
57
license_type
stringclasses
2 values
repo_name
stringlengths
6
116
snapshot_id
stringlengths
40
40
revision_id
stringlengths
40
40
branch_name
stringclasses
313 values
visit_date
timestamp[us]
revision_date
timestamp[us]
committer_date
timestamp[us]
github_id
int64
18.2k
668M
star_events_count
int64
0
102k
fork_events_count
int64
0
38.2k
gha_license_id
stringclasses
17 values
gha_event_created_at
timestamp[us]
gha_created_at
timestamp[us]
gha_language
stringclasses
107 values
src_encoding
stringclasses
20 values
language
stringclasses
1 value
is_vendor
bool
2 classes
is_generated
bool
2 classes
length_bytes
int64
4
6.02M
extension
stringclasses
78 values
content
stringlengths
2
6.02M
authors
listlengths
1
1
author
stringlengths
0
175
649284409ed743419ddc62681ca42392e631d7aa
837827fe283704a9de1f9a244a295b4f2e72cbdd
/tunnelTest/main.py
6c9b0895722a852e79a66f180625bc7c867e8cfd
[]
no_license
josephclaymiller/audiotron
c228c98981b60422906e97f0e14dde0a314bb425
668362fb7868ef3a11d0636ce9559cbf18fd3812
refs/heads/master
2016-09-06T14:54:40.847965
2008-12-03T11:21:01
2008-12-03T11:21:01
32,127,850
1
0
null
null
null
null
UTF-8
Python
false
false
3,417
py
import direct.directbase.DirectStart from pandac.PandaModules import Fog from direct.showbase.DirectObject import DirectObject from direct.interval.MetaInterval import Sequence #needed to continuously move the tunnel from pandac.PandaModules import VBase3, VBase4 from direct.interval.LerpInterval import LerpPosHprInterval #needed to move and rotate tunnel from direct.interval.FunctionInterval import Func #needed to continuously move the tunnel from pandac.PandaModules import NodePath, PandaNode #used to make a tunnel Node Path which controlls tunnel specific lights from pandac.PandaModules import DirectionalLight, AmbientLight #needed to setup lighting import sys #used to exit from music import MusicController #needed for playing game music and pulsing lights from tunnel import Tunnel #needed for the tunnel! :P from Enemy import Enemy from pandac.PandaModules import CollisionHandlerEvent, CollisionTraverser from direct.actor.Actor import Actor class World(DirectObject): def __init__(self): ###Standard initialization stuff #Cammera settings base.disableMouse() #disable mouse control so that we can place the camera camera.setPos(0,0,0) camera.lookAt(0,0,-100) base.setBackgroundColor(0,0,0) #set the background color to black #load music controller self.GMC=MusicController() #Load the tunel and start the tunnel self.tunnel=Tunnel(self.GMC) #setup collisions and load an enemy self.setupCollision() self.enemyTest() #Define the keyboard input #Escape closes the demo self.accept('escape', sys.exit) #Handle pausing the tunnel self.accept('p', self.tunnel.handlePause) #Handle turning the fog on and off self.accept('t', ToggleFog, [self.tunnel.NP, self.tunnel.fog]) def enemyTest(self): #create node path self.enemyNP = NodePath(PandaNode("enemyNP")) self.enemyNP.reparentTo(render) self.enemyNP.setPos(0,0,-20) #load actor self.enemy1=Enemy(self.enemyNP) #self.model = Actor("../assets/models/ememydtb.egg") #self.model.reparentTo(self.enemyNP) self.enemy1.model.setPos(0,0,-20) self.enemy1.model.setScale(.25) #self.model.reparentTo(self.enemyNP) #create lighting alight = AmbientLight('alight') alight.setColor(VBase4(.5, 0, 0, 1)) alnp = self.enemyNP.attachNewNode(alight) self.enemyNP.setLight(alnp) dlight = DirectionalLight('dlight') dlight.setColor(VBase4(0, 0, .75, 1)) dlnp = self.enemyNP.attachNewNode(dlight) dlnp.setHpr(0, -60, 0) self.enemyNP.setLight(dlnp) dlight = DirectionalLight('dlight') dlight.setColor(VBase4(1, 1, 1, 1)) dlnp = self.enemyNP.attachNewNode(dlight) dlnp.setHpr(0, 60, 0) self.enemyNP.setLight(dlnp) #create pulse pulse = [x*4 for x in range(self.GMC.numSixteenths/4)] self.GMC.addPulsingElement(self.enemyNP, pulse) def setupCollision(self): self.cHandler = CollisionHandlerEvent() self.cHandler.setInPattern("%fn-into-%in") base.cHandler = self.cHandler self.cTrav = CollisionTraverser() base.cTrav = self.cTrav #This function will toggle fog on a node def ToggleFog(node, fog): #If the fog attached to the node is equal to the one we passed in, then #fog is on and we should clear it if node.getFog() == fog: node.clearFog() #Otherwise fog is not set so we should set it else: node.setFog(fog) w = World() run()
[ "BrianStewartMcDonald@df45ab90-1348-0410-a7a9-55bda8f8a9ff" ]
BrianStewartMcDonald@df45ab90-1348-0410-a7a9-55bda8f8a9ff
35479810c6d35ab75199ee140a8a576f6ebf45b2
e7ccb3896e7750cc66921d5a79c1a211e2d55be9
/main.py
aeb26b2db91441d32bb272f44103d22f6e66539d
[ "MIT" ]
permissive
salimsuprayogi/program_library_siswa
2becff48e05e0f7f0d1f55b0d94bc843da7876b9
3b75e149876e08fe7b7e08a72478200947abcaa6
refs/heads/master
2021-01-14T23:22:15.666338
2020-02-26T16:40:27
2020-02-26T16:40:27
242,793,179
1
1
null
null
null
null
UTF-8
Python
false
false
3,685
py
# Mentor = Muhammad Nasrullah # Author = Salim Suprayogi # - [ ] Implementasi kode # - [ Setup Library ] Kebutuhan libarary & instalasi # - [ Implementasi ] Menghubungkan database dg kode # - [ Implementasi ] Membuat fungsi crud pada kode # pip3 install pymysql # pip install MySQL-python # ref : https://dev.mysql.com/doc/connector-python/en/connector-python-installation.html # ref : https://www.tutorialspoint.com/python3/python_database_access.htm # untuk import fungsi di dalam file siswa # * untuk memanggil semua fungsi di dalam file siswa from siswa import * # untuk import fungsi di dalam file jurusan from jurusan import * # untuk import fungsi di dalam file pertanyaan from pertanyaan import * # untuk import fungsi di dalam file jawaban from jawaban import * if __name__ == "__main__": # ini digunakan untuk tes setiap fungsi dalam pembuatan awal fungsi satu persatu # insert_siswa() # show_siswa() # delete_siswa() # update_siswa() # insert_jurusan() # menampilkan program print("Library Siswa") # daftar menu menus = [ "1. Tambah Siswa", "2. Tampilkan Siswa", "3. Hapus Siswa", "4. Ubah Siswa", "===========================\n" "5. Tambah Jurusan", "6. Tampilkan Jurusan", "7. Hapus Jurusan", "8. Ubah Jurusan", "===========================\n" "9. Tambah Pertanyaan", "10. Tampilkan Pertanyaan", "11. Hapus Pertanyaan", "12. Ubah Pertanyaan", "===========================\n" "13. Tambah Jawaban", "14. Tampilkan Jawaban", "15. Hapus Jawaban", "16. Ubah Jawaban", "===========================\n" "17. Keluar\n" "===========================\n" ] # perulangan selama menu di tampilkan dan tidak memilih selain keluar # jika di pilih keluar, maka program akan berhenti while True: # menu for menu in menus: print(menu) # pilih daftar menu dengan angka selected = input("Pilih Menu : ") # siswa # jika pilih 1, akan eksekusi kondisi ini if (selected == "1"): insert_siswa() # jika pilih 2, akan eksekusi kondisi ini elif (selected == "2"): show_siswa() # jika pilih 3, akan eksekusi kondisi ini elif (selected == "3"): delete_siswa() # jika pilih 4, akan eksekusi kondisi ini elif (selected == "4"): update_siswa() # jika pilih 5 / selain daftar menu akan eksekusi program ini # jurusan elif (selected == "5"): insert_jurusan() elif (selected == "6"): show_jurusan() elif (selected == "7"): delete_jurusan() elif (selected == "8"): update_jurusan() # pertanyaan elif (selected == "9"): insert_pertanyaan() elif (selected == "10"): show_pertanyaan() elif (selected == "11"): delete_pertanyaan() elif (selected == "12"): update_pertanyaan() # jawaban elif (selected == "13"): insert_jawaban() elif (selected == "14"): show_jawaban() elif (selected == "15"): delete_jawaban() elif (selected == "16"): update_jawaban() # keluar else: print("Sampai jumpa kembali... :-D") # exit code=0 artinya program keluar dengan benar/tidak terjadi error # exit code=1 artinya program keluar tidak semestinya/error exit(code=0) # terima kasih.......
[ "khan.razes@gmail.com" ]
khan.razes@gmail.com
449b4ad21c79a9864610111fbf3661076f001bc8
9ae6ce54bf9a2a86201961fdbd5e7b0ec913ff56
/google/ads/googleads/v10/services/services/customer_asset_service/client.py
eb20ce17a4036bc57b106277613558af2955bb19
[ "Apache-2.0" ]
permissive
GerhardusM/google-ads-python
73b275a06e5401e6b951a6cd99af98c247e34aa3
676ac5fcb5bec0d9b5897f4c950049dac5647555
refs/heads/master
2022-07-06T19:05:50.932553
2022-06-17T20:41:17
2022-06-17T20:41:17
207,535,443
0
0
Apache-2.0
2019-09-10T10:58:55
2019-09-10T10:58:55
null
UTF-8
Python
false
false
21,026
py
# -*- coding: utf-8 -*- # Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # from collections import OrderedDict import os import re from typing import Dict, Optional, Sequence, Tuple, Type, Union import pkg_resources from google.api_core import client_options as client_options_lib from google.api_core import gapic_v1 from google.api_core import retry as retries from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport import mtls # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore from google.auth.exceptions import MutualTLSChannelError # type: ignore from google.oauth2 import service_account # type: ignore try: OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] except AttributeError: # pragma: NO COVER OptionalRetry = Union[retries.Retry, object] # type: ignore from google.ads.googleads.v10.services.types import customer_asset_service from google.rpc import status_pb2 # type: ignore from .transports.base import CustomerAssetServiceTransport, DEFAULT_CLIENT_INFO from .transports.grpc import CustomerAssetServiceGrpcTransport class CustomerAssetServiceClientMeta(type): """Metaclass for the CustomerAssetService client. This provides class-level methods for building and retrieving support objects (e.g. transport) without polluting the client instance objects. """ _transport_registry = ( OrderedDict() ) # type: Dict[str, Type[CustomerAssetServiceTransport]] _transport_registry["grpc"] = CustomerAssetServiceGrpcTransport def get_transport_class( cls, label: str = None, ) -> Type[CustomerAssetServiceTransport]: """Returns an appropriate transport class. Args: label: The name of the desired transport. If none is provided, then the first transport in the registry is used. Returns: The transport class to use. """ # If a specific transport is requested, return that one. if label: return cls._transport_registry[label] # No transport is requested; return the default (that is, the first one # in the dictionary). return next(iter(cls._transport_registry.values())) class CustomerAssetServiceClient(metaclass=CustomerAssetServiceClientMeta): """Service to manage customer assets.""" @staticmethod def _get_default_mtls_endpoint(api_endpoint): """Converts api endpoint to mTLS endpoint. Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. Args: api_endpoint (Optional[str]): the api endpoint to convert. Returns: str: converted mTLS api endpoint. """ if not api_endpoint: return api_endpoint mtls_endpoint_re = re.compile( r"(?P<name>[^.]+)(?P<mtls>\.mtls)?(?P<sandbox>\.sandbox)?(?P<googledomain>\.googleapis\.com)?" ) m = mtls_endpoint_re.match(api_endpoint) name, mtls, sandbox, googledomain = m.groups() if mtls or not googledomain: return api_endpoint if sandbox: return api_endpoint.replace( "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" ) return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") DEFAULT_ENDPOINT = "googleads.googleapis.com" DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore DEFAULT_ENDPOINT ) @classmethod def from_service_account_info(cls, info: dict, *args, **kwargs): """Creates an instance of this client using the provided credentials info. Args: info (dict): The service account private key info. args: Additional arguments to pass to the constructor. kwargs: Additional arguments to pass to the constructor. Returns: CustomerAssetServiceClient: The constructed client. """ credentials = service_account.Credentials.from_service_account_info( info ) kwargs["credentials"] = credentials return cls(*args, **kwargs) @classmethod def from_service_account_file(cls, filename: str, *args, **kwargs): """Creates an instance of this client using the provided credentials file. Args: filename (str): The path to the service account private key json file. args: Additional arguments to pass to the constructor. kwargs: Additional arguments to pass to the constructor. Returns: CustomerAssetServiceClient: The constructed client. """ credentials = service_account.Credentials.from_service_account_file( filename ) kwargs["credentials"] = credentials return cls(*args, **kwargs) from_service_account_json = from_service_account_file @property def transport(self) -> CustomerAssetServiceTransport: """Returns the transport used by the client instance. Returns: CustomerAssetServiceTransport: The transport used by the client instance. """ return self._transport def __enter__(self): return self def __exit__(self, type, value, traceback): """Releases underlying transport's resources. .. warning:: ONLY use as a context manager if the transport is NOT shared with other clients! Exiting the with block will CLOSE the transport and may cause errors in other clients! """ self.transport.close() @staticmethod def asset_path( customer_id: str, asset_id: str, ) -> str: """Returns a fully-qualified asset string.""" return "customers/{customer_id}/assets/{asset_id}".format( customer_id=customer_id, asset_id=asset_id, ) @staticmethod def parse_asset_path(path: str) -> Dict[str, str]: """Parses a asset path into its component segments.""" m = re.match( r"^customers/(?P<customer_id>.+?)/assets/(?P<asset_id>.+?)$", path ) return m.groupdict() if m else {} @staticmethod def customer_asset_path( customer_id: str, asset_id: str, field_type: str, ) -> str: """Returns a fully-qualified customer_asset string.""" return "customers/{customer_id}/customerAssets/{asset_id}~{field_type}".format( customer_id=customer_id, asset_id=asset_id, field_type=field_type, ) @staticmethod def parse_customer_asset_path(path: str) -> Dict[str, str]: """Parses a customer_asset path into its component segments.""" m = re.match( r"^customers/(?P<customer_id>.+?)/customerAssets/(?P<asset_id>.+?)~(?P<field_type>.+?)$", path, ) return m.groupdict() if m else {} @staticmethod def common_billing_account_path( billing_account: str, ) -> str: """Returns a fully-qualified billing_account string.""" return "billingAccounts/{billing_account}".format( billing_account=billing_account, ) @staticmethod def parse_common_billing_account_path(path: str) -> Dict[str, str]: """Parse a billing_account path into its component segments.""" m = re.match(r"^billingAccounts/(?P<billing_account>.+?)$", path) return m.groupdict() if m else {} @staticmethod def common_folder_path( folder: str, ) -> str: """Returns a fully-qualified folder string.""" return "folders/{folder}".format( folder=folder, ) @staticmethod def parse_common_folder_path(path: str) -> Dict[str, str]: """Parse a folder path into its component segments.""" m = re.match(r"^folders/(?P<folder>.+?)$", path) return m.groupdict() if m else {} @staticmethod def common_organization_path( organization: str, ) -> str: """Returns a fully-qualified organization string.""" return "organizations/{organization}".format( organization=organization, ) @staticmethod def parse_common_organization_path(path: str) -> Dict[str, str]: """Parse a organization path into its component segments.""" m = re.match(r"^organizations/(?P<organization>.+?)$", path) return m.groupdict() if m else {} @staticmethod def common_project_path( project: str, ) -> str: """Returns a fully-qualified project string.""" return "projects/{project}".format( project=project, ) @staticmethod def parse_common_project_path(path: str) -> Dict[str, str]: """Parse a project path into its component segments.""" m = re.match(r"^projects/(?P<project>.+?)$", path) return m.groupdict() if m else {} @staticmethod def common_location_path( project: str, location: str, ) -> str: """Returns a fully-qualified location string.""" return "projects/{project}/locations/{location}".format( project=project, location=location, ) @staticmethod def parse_common_location_path(path: str) -> Dict[str, str]: """Parse a location path into its component segments.""" m = re.match( r"^projects/(?P<project>.+?)/locations/(?P<location>.+?)$", path ) return m.groupdict() if m else {} def __init__( self, *, credentials: Optional[ga_credentials.Credentials] = None, transport: Union[str, CustomerAssetServiceTransport, None] = None, client_options: Optional[client_options_lib.ClientOptions] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: """Instantiates the customer asset service client. Args: credentials (Optional[google.auth.credentials.Credentials]): The authorization credentials to attach to requests. These credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. transport (Union[str, CustomerAssetServiceTransport]): The transport to use. If set to None, a transport is chosen automatically. client_options (google.api_core.client_options.ClientOptions): Custom options for the client. It won't take effect if a ``transport`` instance is provided. (1) The ``api_endpoint`` property can be used to override the default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT environment variable can also be used to override the endpoint: "always" (always use the default mTLS endpoint), "never" (always use the default regular endpoint) and "auto" (auto switch to the default mTLS endpoint if client certificate is present, this is the default value). However, the ``api_endpoint`` property takes precedence if provided. (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable is "true", then the ``client_cert_source`` property can be used to provide client certificate for mutual TLS transport. If not provided, the default SSL client certificate will be used if present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not set, no client certificate will be used. client_info (google.api_core.gapic_v1.client_info.ClientInfo): The client info used to send a user-agent string along with API requests. If ``None``, then default info will be used. Generally, you only need to set this if you're developing your own client library. Raises: google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport creation failed for any reason. """ if isinstance(client_options, dict): client_options = client_options_lib.from_dict(client_options) if client_options is None: client_options = client_options_lib.ClientOptions() # Create SSL credentials for mutual TLS if needed. if os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") not in ( "true", "false", ): raise ValueError( "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" ) use_client_cert = ( os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") == "true" ) client_cert_source_func = None is_mtls = False if use_client_cert: if client_options.client_cert_source: is_mtls = True client_cert_source_func = client_options.client_cert_source else: is_mtls = mtls.has_default_client_cert_source() if is_mtls: client_cert_source_func = mtls.default_client_cert_source() else: client_cert_source_func = None # Figure out which api endpoint to use. if client_options.api_endpoint is not None: api_endpoint = client_options.api_endpoint else: use_mtls_env = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") if use_mtls_env == "never": api_endpoint = self.DEFAULT_ENDPOINT elif use_mtls_env == "always": api_endpoint = self.DEFAULT_MTLS_ENDPOINT elif use_mtls_env == "auto": api_endpoint = ( self.DEFAULT_MTLS_ENDPOINT if is_mtls else self.DEFAULT_ENDPOINT ) else: raise MutualTLSChannelError( "Unsupported GOOGLE_API_USE_MTLS_ENDPOINT value. Accepted " "values: never, auto, always" ) # Save or instantiate the transport. # Ordinarily, we provide the transport, but allowing a custom transport # instance provides an extensibility point for unusual situations. if isinstance(transport, CustomerAssetServiceTransport): # transport is a CustomerAssetServiceTransport instance. if credentials or client_options.credentials_file: raise ValueError( "When providing a transport instance, " "provide its credentials directly." ) if client_options.scopes: raise ValueError( "When providing a transport instance, provide its scopes " "directly." ) self._transport = transport else: Transport = type(self).get_transport_class(transport) self._transport = Transport( credentials=credentials, credentials_file=client_options.credentials_file, host=api_endpoint, scopes=client_options.scopes, client_cert_source_for_mtls=client_cert_source_func, quota_project_id=client_options.quota_project_id, client_info=client_info, always_use_jwt_access=True, ) def mutate_customer_assets( self, request: Union[ customer_asset_service.MutateCustomerAssetsRequest, dict ] = None, *, customer_id: str = None, operations: Sequence[ customer_asset_service.CustomerAssetOperation ] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> customer_asset_service.MutateCustomerAssetsResponse: r"""Creates, updates, or removes customer assets. Operation statuses are returned. List of thrown errors: `AssetLinkError <>`__ `AuthenticationError <>`__ `AuthorizationError <>`__ `FieldError <>`__ `HeaderError <>`__ `InternalError <>`__ `MutateError <>`__ `QuotaError <>`__ `RequestError <>`__ Args: request (Union[google.ads.googleads.v10.services.types.MutateCustomerAssetsRequest, dict]): The request object. Request message for [CustomerAssetService.MutateCustomerAssets][google.ads.googleads.v10.services.CustomerAssetService.MutateCustomerAssets]. customer_id (str): Required. The ID of the customer whose customer assets are being modified. This corresponds to the ``customer_id`` field on the ``request`` instance; if ``request`` is provided, this should not be set. operations (Sequence[google.ads.googleads.v10.services.types.CustomerAssetOperation]): Required. The list of operations to perform on individual customer assets. This corresponds to the ``operations`` field on the ``request`` instance; if ``request`` is provided, this should not be set. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. Returns: google.ads.googleads.v10.services.types.MutateCustomerAssetsResponse: Response message for a customer asset mutate. """ # Create or coerce a protobuf request object. # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([customer_id, operations]) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." ) # Minor optimization to avoid making a copy if the user passes # in a customer_asset_service.MutateCustomerAssetsRequest. # There's no risk of modifying the input as we've already verified # there are no flattened fields. if not isinstance( request, customer_asset_service.MutateCustomerAssetsRequest ): request = customer_asset_service.MutateCustomerAssetsRequest( request ) # If we have keyword arguments corresponding to fields on the # request, apply these. if customer_id is not None: request.customer_id = customer_id if operations is not None: request.operations = operations # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. rpc = self._transport._wrapped_methods[ self._transport.mutate_customer_assets ] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata( (("customer_id", request.customer_id),) ), ) # Send the request. response = rpc( request, retry=retry, timeout=timeout, metadata=metadata, ) # Done; return the response. return response try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( gapic_version=pkg_resources.get_distribution( "google-ads", ).version, ) except pkg_resources.DistributionNotFound: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() __all__ = ("CustomerAssetServiceClient",)
[ "noreply@github.com" ]
noreply@github.com
53786cfd3a118db63e76437c20de607fc13212ac
d6ed572ee7efc0dc821f92df0f6b7c2fb0931f7c
/USB/hook_button.py
90c88ad6ba2bea87542d00c408d41bf27ea79cfd
[]
no_license
nfredrik/pyjunk
92df3693230d0537d23f03ddee7796c4f9dbedd9
94eb532611743d1fed0da972c34ff1e509b61acb
refs/heads/master
2022-05-28T10:24:53.700587
2017-10-10T20:56:38
2017-10-10T20:56:38
260,439,872
0
0
null
null
null
null
UTF-8
Python
false
false
3,236
py
# """ Simple example on how to handle usage control events """ from time import sleep from msvcrt import kbhit import pywinusb.hid as hid def test_telephony_hook(): """Browse for non system HID class devices, if a telephony page hook usage control is available monitor value change events""" # play with this value (or set it if you know your device capabilities) # this allows to poll the telephony device for the current usage value input_interrupt_transfers = False # get all currently connected HID devices we could filter by doing # something like hid.HidDeviceFilter(VendorId=0x1234), product Id # filters (with masks) and other capabilities also available all_devices = hid.HidDeviceFilter().get_devices() if not all_devices: print "No HID class devices attached." else: # search for our target usage (the hook button) #target pageId, usageId usage_telephony_hook = hid.get_full_usage_id(0xb, 0x20) def hook_pressed(new_value, event_type): "simple usage control handler" # this simple handler is called on 'pressed' events # this means the usage value has changed from '1' to '0' # no need to check the value event_type = event_type #avoid pylint warnings if new_value: print "On Hook!" else: print "Off Hook!" for device in all_devices: try: device.open() # browse input reports all_input_reports = device.find_input_reports() for input_report in all_input_reports: if usage_telephony_hook in input_report: #found a telephony device w/ hook button print "\nMonitoring %s %s device.\n" \ % (device.vendor_name, device.product_name) print "Press any key to exit monitoring "\ "(or remove HID device)..." # add event handler (example of other available # events: EVT_PRESSED, EVT_RELEASED, EVT_ALL, ...) device.add_event_handler(usage_telephony_hook, hook_pressed, hid.HID_EVT_CHANGED) #level usage if input_interrupt_transfers: # poll the current value (GET_REPORT directive), # allow handler to process result input_report.get() while not kbhit() and device.is_plugged(): #just keep the device opened to receive events sleep(0.5) return finally: device.close() print "Sorry, no one of the attached HID class devices "\ "provide any Telephony Hook button" # if __name__ == '__main__': test_telephony_hook()
[ "nfredrik@localhost" ]
nfredrik@localhost
7b297c2b84f94916505935159fe4c8239cfa6d6c
170a991bc2cd5e5c2810c5466341f849c1fbfe24
/googlesheets.py
3edb11b85165c4c008a5dcf634ec859079fd44e2
[]
no_license
derekYankie/my_googlesheets
d04ac08e3257177ff4fb57e02752aaf1f121a170
ac3c0a82250e756a93d67f3401796474b85ab1ed
refs/heads/master
2020-03-16T09:06:48.557562
2018-05-09T01:05:39
2018-05-09T01:05:39
132,608,896
1
0
null
null
null
null
UTF-8
Python
false
false
696
py
import gspread from oauth2client.service_account import ServiceAccountCredentials #my #permit cleint credentails to intercact with Google Drive Api and Google sheets api scope = ['https://spreadsheets.google.com/feeds', 'https://www.googleapis.com/auth/drive'] #store credentails my_creds = ServiceAccountCredentials.from_json_keyfile_name('client_secret.json', scope) #bridge client client = gspread.authorize(my_creds) #Locate Google spreadsheet and open it my_sheet = client.open("Legislators").sheet1 #Show all content in table all_records = my_sheet.get_all_records() single_row = my_sheet.row_values(1) print "Here are the rows of this spread sheet: \n", single_row #print(all_records)
[ "derek.afriyie@gmail.com" ]
derek.afriyie@gmail.com
94cd40578f30825025b17f2297e50eb9b0f8a635
c1960138a37d9b87bbc6ebd225ec54e09ede4a33
/adafruit-circuitpython-bundle-py-20210402/lib/adafruit_epd/ssd1680.py
66392a74f69fe9a3ecffe574e03a380758bc1e95
[]
no_license
apalileo/ACCD_PHCR_SP21
76d0e27c4203a2e90270cb2d84a75169f5db5240
37923f70f4c5536b18f0353470bedab200c67bad
refs/heads/main
2023-04-07T00:01:35.922061
2021-04-15T18:02:22
2021-04-15T18:02:22
332,101,844
0
0
null
null
null
null
UTF-8
Python
false
false
6,592
py
# SPDX-FileCopyrightText: 2018 Dean Miller for Adafruit Industries # # SPDX-License-Identifier: MIT """ `adafruit_epd.ssd1680` - Adafruit SSD1680 - ePaper display driver ==================================================================================== CircuitPython driver for Adafruit SSD1680 display breakouts * Author(s): Melissa LeBlanc-Williams """ import time from micropython import const import adafruit_framebuf from adafruit_epd.epd import Adafruit_EPD __version__ = "2.9.3" __repo__ = "https://github.com/adafruit/Adafruit_CircuitPython_EPD.git" _SSD1680_DRIVER_CONTROL = const(0x01) _SSD1680_GATE_VOLTAGE = const(0x03) _SSD1680_SOURCE_VOLTAGE = const(0x04) _SSD1680_INIT_SETTING = const(0x08) _SSD1680_INIT_WRITE_REG = const(0x09) _SSD1680_INIT_READ_REG = const(0x0A) _SSD1680_BOOSTER_SOFT_START = const(0x0C) _SSD1680_DEEP_SLEEP = const(0x10) _SSD1680_DATA_MODE = const(0x11) _SSD1680_SW_RESET = const(0x12) _SSD1680_HV_DETECT = const(0x14) _SSD1680_VCI_DETECT = const(0x15) _SSD1680_TEMP_CONTROL = const(0x18) _SSD1680_TEMP_WRITE = const(0x1A) _SSD1680_TEMP_READ = const(0x1B) _SSD1680_EXTTEMP_WRITE = const(0x1C) _SSD1680_MASTER_ACTIVATE = const(0x20) _SSD1680_DISP_CTRL1 = const(0x21) _SSD1680_DISP_CTRL2 = const(0x22) _SSD1680_WRITE_BWRAM = const(0x24) _SSD1680_WRITE_REDRAM = const(0x26) _SSD1680_READ_RAM = const(0x27) _SSD1680_VCOM_SENSE = const(0x28) _SSD1680_VCOM_DURATION = const(0x29) _SSD1680_WRITE_VCOM_OTP = const(0x2A) _SSD1680_WRITE_VCOM_CTRL = const(0x2B) _SSD1680_WRITE_VCOM_REG = const(0x2C) _SSD1680_READ_OTP = const(0x2D) _SSD1680_READ_USERID = const(0x2E) _SSD1680_READ_STATUS = const(0x2F) _SSD1680_WRITE_WS_OTP = const(0x30) _SSD1680_LOAD_WS_OTP = const(0x31) _SSD1680_WRITE_LUT = const(0x32) _SSD1680_CRC_CALC = const(0x34) _SSD1680_CRC_READ = const(0x35) _SSD1680_PROG_OTP = const(0x36) _SSD1680_WRITE_DISPLAY_OPT = const(0x37) _SSD1680_WRITE_USERID = const(0x38) _SSD1680_OTP_PROGMODE = const(0x39) _SSD1680_WRITE_BORDER = const(0x3C) _SSD1680_END_OPTION = const(0x3F) _SSD1680_SET_RAMXPOS = const(0x44) _SSD1680_SET_RAMYPOS = const(0x45) _SSD1680_AUTOWRITE_RED = const(0x46) _SSD1680_AUTOWRITE_BW = const(0x47) _SSD1680_SET_RAMXCOUNT = const(0x4E) _SSD1680_SET_RAMYCOUNT = const(0x4F) _SSD1680_NOP = const(0xFF) class Adafruit_SSD1680(Adafruit_EPD): """driver class for Adafruit SSD1680 ePaper display breakouts""" # pylint: disable=too-many-arguments def __init__( self, width, height, spi, *, cs_pin, dc_pin, sramcs_pin, rst_pin, busy_pin ): super().__init__( width, height, spi, cs_pin, dc_pin, sramcs_pin, rst_pin, busy_pin ) if width % 8 != 0: width += 8 - width % 8 self._buffer1_size = int(width * height / 8) self._buffer2_size = self._buffer1_size if sramcs_pin: self._buffer1 = self.sram.get_view(0) self._buffer2 = self.sram.get_view(self._buffer1_size) else: self._buffer1 = bytearray(self._buffer1_size) self._buffer2 = bytearray(self._buffer2_size) self._framebuf1 = adafruit_framebuf.FrameBuffer( self._buffer1, width, height, buf_format=adafruit_framebuf.MHMSB ) self._framebuf2 = adafruit_framebuf.FrameBuffer( self._buffer2, width, height, buf_format=adafruit_framebuf.MHMSB ) self.set_black_buffer(0, True) self.set_color_buffer(1, False) # pylint: enable=too-many-arguments def begin(self, reset=True): """Begin communication with the display and set basic settings""" if reset: self.hardware_reset() self.power_down() def busy_wait(self): """Wait for display to be done with current task, either by polling the busy pin, or pausing""" if self._busy: while self._busy.value: time.sleep(0.01) else: time.sleep(0.5) def power_up(self): """Power up the display in preparation for writing RAM and updating""" self.hardware_reset() self.busy_wait() self.command(_SSD1680_SW_RESET) self.busy_wait() # driver output control self.command( _SSD1680_DRIVER_CONTROL, bytearray([self._height - 1, (self._height - 1) >> 8, 0x00]), ) # data entry mode self.command(_SSD1680_DATA_MODE, bytearray([0x03])) # Set voltages self.command(_SSD1680_WRITE_VCOM_REG, bytearray([0x36])) self.command(_SSD1680_GATE_VOLTAGE, bytearray([0x17])) self.command(_SSD1680_SOURCE_VOLTAGE, bytearray([0x41, 0x00, 0x32])) # Set ram X start/end postion self.command(_SSD1680_SET_RAMXPOS, bytearray([0x01, 0x10])) # Set ram Y start/end postion self.command( _SSD1680_SET_RAMYPOS, bytearray([0, 0, self._height - 1, (self._height - 1) >> 8]), ) # Set border waveform self.command(_SSD1680_WRITE_BORDER, bytearray([0x05])) # Set ram X count self.command(_SSD1680_SET_RAMXCOUNT, bytearray([0x01])) # Set ram Y count self.command(_SSD1680_SET_RAMYCOUNT, bytearray([self._height - 1, 0])) self.busy_wait() def power_down(self): """Power down the display - required when not actively displaying!""" self.command(_SSD1680_DEEP_SLEEP, bytearray([0x01])) time.sleep(0.1) def update(self): """Update the display from internal memory""" self.command(_SSD1680_DISP_CTRL2, bytearray([0xF4])) self.command(_SSD1680_MASTER_ACTIVATE) self.busy_wait() if not self._busy: time.sleep(3) # wait 3 seconds def write_ram(self, index): """Send the one byte command for starting the RAM write process. Returns the byte read at the same time over SPI. index is the RAM buffer, can be 0 or 1 for tri-color displays.""" if index == 0: return self.command(_SSD1680_WRITE_BWRAM, end=False) if index == 1: return self.command(_SSD1680_WRITE_REDRAM, end=False) raise RuntimeError("RAM index must be 0 or 1") def set_ram_address(self, x, y): # pylint: disable=unused-argument, no-self-use """Set the RAM address location, not used on this chipset but required by the superclass""" # Set RAM X address counter self.command(_SSD1680_SET_RAMXCOUNT, bytearray([x + 1])) # Set RAM Y address counter self.command(_SSD1680_SET_RAMYCOUNT, bytearray([y, y >> 8]))
[ "55570902+apalileo@users.noreply.github.com" ]
55570902+apalileo@users.noreply.github.com
795e1fb93d180391f6871a8b83279551c75bfce7
50cd47781972b0906bdeb86ec19d66afca146479
/experiment/financial_statement.py
20eb0ed53f64ade87d12a72352c17057f3b05697
[]
no_license
Niyokee/coodori-batch
500f8499cab70dc779c4a30af42c5072bc37d694
5860af70f27999b838e99f553154add1d5a324f6
refs/heads/master
2023-02-12T02:13:58.646853
2021-01-06T00:20:07
2021-01-06T00:20:07
289,264,257
0
0
null
null
null
null
UTF-8
Python
false
false
20,995
py
import re import sys import traceback from datetime import datetime import requests import pandas as pd import pandas.io.sql as psql import numpy as np from db_util import * from bs4 import BeautifulSoup from sqlalchemy import create_engine import pandas.io.sql as psql import psycopg2 as pg from logging import getLogger, StreamHandler, Formatter, FileHandler, DEBUG def setup_logger(log_folder, modname=__name__): logger = getLogger(modname) logger.setLevel(DEBUG) sh = StreamHandler() sh.setLevel(DEBUG) formatter = Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s') sh.setFormatter(formatter) logger.addHandler(sh) fh = FileHandler(log_folder) #fh = file handler fh.setLevel(DEBUG) fh_formatter = Formatter('%(asctime)s - %(filename)s - %(name)s - %(lineno)d - %(levelname)s - %(message)s') fh.setFormatter(fh_formatter) logger.addHandler(fh) return logger class FinancialStatement: BASE_URL = "https://www.sec.gov" def __init__(self, path): self.path = path def _xml_summary(self, path): path_to_xml_summary = FinancialStatement.BASE_URL + path content = requests.get(path_to_xml_summary).json() item_name = [item['name'] for item in content['directory']['item'] if item['name'] == 'FilingSummary.xml'] # Grab the filing summary and create a new url leading to the file so we can download it xml_summary = FinancialStatement.BASE_URL + content['directory']['name'] + "/" + item_name[0] return xml_summary def report_list(self): xml_summary = self._xml_summary(self.path) base_url = xml_summary.replace('FilingSummary.xml', '') content = requests.get(xml_summary).content soup = BeautifulSoup(content, 'lxml') # find the 'myreports' tag because this contains all the individual reports submitted. reports = soup.find('myreports') # a list to store all the individual components of the report report_list = [] # loop through each report in the 'myreports' tag but avoid the last one as this will cause an error. for report in reports.find_all('report')[:-1]: # let's create a dictionary to store all the different parts we need. report_dict = {'name_short': report.shortname.text, 'name_long': report.longname.text} try: report_dict['url'] = base_url + report.htmlfilename.text except AttributeError: report_dict['url'] = base_url + report.xmlfilename.text # append the dictionary to the master list. report_list.append(report_dict) return report_list def statements_dict(self, report_list): # create the list to hold the statement urls statements_dict = [] for report_dict in report_list: if re.match('paren', report_dict['name_short'], re.IGNORECASE): continue # バランスシート balance_sheet_patterns = [ '.*balance.*', '.*financial ?position.*', '.*financial ?condition.*'] # 損益計算書 income_statement_patterns = [ '.*of ?income.*', '.*of ?operation.*', '.*of ?earnings'] # キャッシュフロー計算書 cash_flow_patterns = ['.*cash ?flow.*'] # report_list = [balontinue statements_dict = {} for index, patterns in enumerate([balance_sheet_patterns, income_statement_patterns, cash_flow_patterns]): for pattern in patterns: tmp_title_dict = {} for report_dict in report_list: if re.match(pattern, report_dict['name_short'], re.IGNORECASE): key = f"({index}){report_dict['name_short']}" tmp_title_dict[key] = report_dict['url'] if len(tmp_title_dict) == 1: statements_dict.update(tmp_title_dict) elif len(tmp_title_dict) > 1: tmp_dict = {min(tmp_title_dict): tmp_title_dict[min(tmp_title_dict)]} statements_dict.update(tmp_dict) elif len(tmp_title_dict) == 0: continue return statements_dict def statements_data(self, statement_name, statement_url): # let's assume we want all the statements in a single data set. statements_data = [] # define a dictionary that will store the different parts of the statement. statement_data = {'statement_name': statement_name, 'headers': [], 'sections': [], 'data': []} # request the statement file content logger.info(f'statement_name is {statement_name} statement_url is {statement_url}') content = requests.get(statement_url).content report_soup = BeautifulSoup(content, 'html') first_row = report_soup.table.find_all('tr')[0].get_text() # find all the rows, figure out what type of row it is, parse the elements, and store in the statement file list. for index, row in enumerate(report_soup.table.find_all('tr')): # first let's get all the elements. cols = row.find_all('td') # if it's a regular row and not a section or a table header reg_row = [] if (len(row.find_all('th')) == 0 and len(row.find_all('strong')) == 0): try: for ele in cols: if ele.get_text(): reg_row += [ele.find('td').text] else: reg_row += ['Nan'] except AttributeError: reg_row = [ele.text.strip() for ele in cols] statement_data['data'].append(reg_row) # if it's a regular row and a section but not a table header elif (len(row.find_all('th')) == 0 and len(row.find_all('strong')) != 0): sec_row = cols[0].text.strip() statement_data['sections'].append(sec_row) # finally if it's not any of those it must be a header elif (len(row.find_all('th')) != 0): hed_row = [ele.text.strip() for ele in row.find_all('th')] statement_data['headers'].append(hed_row) else: logger.info('We encountered an error.') # append it to the master list. statements_data.append(statement_data) return statements_data def denomination(self): if re.search(f'.*thousand*', self.statements_data[0]['headers'][0][0], re.IGNORECASE): return 1000 elif re.search(f'.*million*', self.statements_data[0]['headers'][0][0], re.IGNORECASE): return 1000000 else: return 1 def income_header(self): try: income_header = self.statements_data[0]['headers'][1] except IndexError: income_header = self.statements_data[0]['headers'][0] income_header = [element for element in income_header if not '$' in element] return income_header def trim_value(self): income_data = self.statements_data[0]['data'] income_df = pd.DataFrame(income_data) # Define the Index column, rename it, and we need to make sure to drop the old column once we reindex. income_df.index = income_df[0] income_df.index.name = 'category' income_df = income_df.drop(0, axis=1) # Get rid of the '$', '(', ')', and convert the '' to NaNs. income_df = income_df.replace('[\$,)]', '', regex=True).replace( '[(]', '-', regex=True).replace('', 'NaN', regex=True) # everything is a string, so let's convert all the data to a float. try: income_df = income_df.astype(float) except: print(self.statement_url) # Change the column headers income_df.columns = self.header column_list = [] for column in self.header: column_list += [datetime.strptime( column.replace('.', ''), "%b %d, %Y")] column_index = column_list.index(max(column_list)) latest_column_name = self.header[column_index] values = income_df[latest_column_name] if type(income_df[latest_column_name]) != type(pd.Series(1)): values = income_df[latest_column_name].iloc[:, 0] return values def is_same_element(self, value_list): """ 取得したlistが全て同じ要素で構成されているか確かめる """ return value_list == [value_list[0]] * len(value_list) if value_list else False def trim_index(self, index_list): """正規表現 + 内包for文で取得したindexのリストを整形するメソッド     同一の値で構成される index_list が len(index_list) > 1 の時 len(index_list) = 1 にする """ if len(set(index_list)) == 1: return list(set(index_list))[0] else: # FIXME index_listが同一の値で構成されていないときの処理 return index_list[0] def find_category_with_regex(self, pattern): """正規表現に一致するリストを返す """ return [category for category in self.values.index.values if re.search(pattern, category, re.IGNORECASE)] def insert_df(self, table_name): DBUtil.insertDf(self._make_df(), table_name ,if_exists="append", index=False) class BalanceSheet(FinancialStatement): def __init__(self, **kwargs): self.__dict__.update(kwargs) self.statements_data = self.statements_data(self.statement_name, self.statement_url) self.header = self.income_header() self.values = self.trim_value() class ProfitLoss(FinancialStatement): def __init__(self, **kwargs): self.__dict__.update(kwargs) self.statements_data = self.statements_data(self.statement_name, self.statement_url) self.header = self.income_header() self.values = self.trim_value() def get_shares_outstanding(self): logger.info(f'{sys._getframe().f_code.co_name}') values_diluted = [] indices_shares_outstanding = self.find_category_with_regex('diluted') for i in range(len(indices_shares_outstanding)): values_diluted.append(profit_loss.values[indices_shares_outstanding[i]]) if len(values_diluted) != 2: logger.info(f"values_diluted: {values_diluted}") return max(values_diluted) def get_diluted_eps(self): logger.info(f'{sys._getframe().f_code.co_name}') values_diluted = [] indices_shares_outstanding = self.find_category_with_regex('diluted') for i in range(len(indices_shares_outstanding)): values_diluted.append(profit_loss.values[indices_shares_outstanding[i]]) if len(values_diluted) != 2: logger.info(f"values_diluted: {values_diluted}") return min(values_diluted) def get_dividends(self): logger.info(f'{sys._getframe().f_code.co_name}') indices_dividend = self.find_category_with_regex('dividend') if len(indices_dividend) == 0: return 0.0 else: try: dividends = self.values[indices_dividend].unique()[0] except AttributeError: dividends = self.values[indices_dividend] return dividends def get_sales(self): pass def _get_operating_activities(self): sql = f"""SELECT operating_activities FROM cash_flow WHERE cik = '{profit_loss.cik}' and year = {profit_loss.year} and quater = {profit_loss.quater} """ operating_activities_df = psql.read_sql(sql, DBUtil.getConnect()) return operating_activities_df.operating_activities.values[0] def get_cash_flow_per_share(self): """CFPSを計算するメソッド   CFPS = (cash flow + amortization) / shares ourstanding """ operating_activities = self._get_operating_activities() # TODO balance sheetから減価償却費を取得する amortization = 0.0 shares_outstanding = self.get_shares_outstanding() return (operating_activities + shares_outstanding) / shares_outstanding def _make_df(self): return pd.DataFrame({'id': None, 'dps': [self.get_dividends()], 'eps': [self.get_diluted_eps()], 'cfps': [self.get_cash_flow_per_share()], 'sps': [self.get_diluted_eps()], 'shares_outstanding': [self.get_shares_outstanding()], 'cik': [self.cik], 'year': [self.year], 'quater': [self.quater], 'form_type': [self.form_type], 'created_at': [datetime.now().strftime('%Y-%m-%d %H:%M:%S')], 'source': [self.statement_url] }) class CashFlow(FinancialStatement): def __init__(self, **kwargs): self.__dict__.update(kwargs) self.statements_data = self.statements_data( self.statement_name, self.statement_url) self.header = self.income_header() self.values = self.trim_value() def get_operating_activities_value(self): """item名=operating activitiesのindexを取得して、その値を返すメソッド """ indices_income_from_operation = self.find_category_with_regex('operating activities') index_income_from_operation = self.trim_index(indices_income_from_operation) try: cash_from_operating_activities = self.values[index_income_from_operation].unique()[0] * self.denomination() except AttributeError: cash_from_operating_activities = self.values[index_income_from_operation] * self.denomination() return cash_from_operating_activities def get_financing_activities_value(self): """item名=financing activitiesのindexを取得して、その値を返すメソッド """ indices_income_from_financing = self.find_category_with_regex('financing activities') index_income_from_financing = self.trim_index(indices_income_from_financing) try: cash_from_financing_activities = self.values[index_income_from_financing].unique()[0] * self.denomination() except AttributeError: cash_from_financing_activities = self.values[index_income_from_financing] * self.denomination() return cash_from_financing_activities def get_cash_beginning(self): """item名=cash beginging of periodのindexを取得して、その値を返すメソッド """ indices_cash_beginning = self.find_category_with_regex('beginning of') index_income_cash_beginning = self.trim_index(indices_cash_beginning) try: cash_beginning = self.values[index_income_cash_beginning].unique()[0] * self.denomination() except AttributeError: cash_beginning = self.values[index_income_cash_beginning] * self.denomination() return cash_beginning def get_cash_end(self): """item名=item名=cash end of periodのindexを取得して、その値を返すメソッド """ indices_cash_end = self.find_category_with_regex('end of') index_income_cash_end = self.trim_index(indices_cash_end) try: cash_end = self.values[index_income_cash_end].unique()[0] * self.denomination() except AttributeError: cash_end = self.values[index_income_cash_end] * self.denomination() return cash_end def get_investing_activities_value(self): """item名=investing activitiesのindexを取得して、その値を返すメソッド """ indices_income_from_investing = self.find_category_with_regex('investing activities') index_income_from_investing = self.trim_index(indices_income_from_investing) try: cash_from_investing_activities = self.values[index_income_from_investing].unique()[0] * self.denomination() except AttributeError: cash_from_investing_activities = self.values[index_income_from_investing] * self.denomination() return cash_from_investing_activities def _make_df(self): return pd.DataFrame({'id': None, 'operating_activities': [self.get_operating_activities_value()], 'financing_activities': [self.get_financing_activities_value()], 'investing_activities': [self.get_investing_activities_value()], 'cash_beginning_of_period': [self.get_cash_beginning()], 'cash_end_of_period': [self.get_cash_end()], 'cik': [self.cik], 'year': [self.year], 'quater': [self.quater], 'form_type': [self.form_type], 'created_at': [datetime.now().strftime('%Y-%m-%d %H:%M:%S')], 'source': [self.statement_url] }) if __name__ == '__main__': # 保存するファイル名を指定 # log_folder = '{0}.log'.format(datetime.date.today()) # ログの初期設定を行う logger = setup_logger('logging.log') start_year = 2018 year = 2018 end_year = 2019 start_quarter = 1 quater = 1 end_quarter = 2 form_type = '10-K' # start_year = int(os.environ['start_year']) # end_year = int(os.environ['end_year']) # start_quarter = int(os.environ['start_quarter']) # end_quarter = int(os.environ['end_quarter']) # form_type = os.environ['form_type'] header_list = [] name_list_1 = [] name_list_2 = [] url_list = [] for year in range(start_year, end_year): for quater in range(start_quarter, end_quarter): source_df = pd.read_csv(f'./data/{year}_QTR{quater}.csv') source_df = source_df[source_df['Form_Type'] == form_type] for _, row in source_df.iterrows(): try: # logger.info(row) FinancialStatement.cik = str(row['CIK']) FinancialStatement.year = year FinancialStatement.quater = quater FinancialStatement.form_type = form_type financial_statement = FinancialStatement(str(row['url'])) report_list = financial_statement.report_list() statements_dict = financial_statement.statements_dict(report_list) for statement_name, statement_url in statements_dict.items(): if '(0)' in statement_name: balance_sheet = BalanceSheet(statement_name=statement_name, statement_url=statement_url) elif '(1)' in statement_name: profit_loss = ProfitLoss(statement_name=statement_name, statement_url=statement_url) logger.info(profit_loss.cik) logger.info('='*80) logger.info(f"header: {profit_loss.statements_data[0]['headers'][0][0]}") header_list.append(profit_loss.statements_data[0]['headers'][0][0]) logger.info(f"regex_diluted_name: {profit_loss.find_category_with_regex('diluted')}") if len(profit_loss.find_category_with_regex('diluted')) > 1: name_list_1.append(profit_loss.find_category_with_regex('diluted')[0]) name_list_2.append(profit_loss.find_category_with_regex('diluted')[1]) else: name_list_1.append(profit_loss.find_category_with_regex('diluted')) name_list_2.append(None) url_list.append(profit_loss.statement_url) # profit_loss.insert_df('profit_loss') elif '(2)' in statement_name: cash_flow = CashFlow(statement_name=statement_name, statement_url=statement_url) cash_flow.insert_df('cash_flow') except BaseException as e: logger.error(e) logger.error(row) pd.DataFrame({'header': header_list, 'diluted_match1': name_list_1, 'diluted_match2': name_list_2, 'url_list': url_list}).to_csv('./result.csv')
[ "knk43nok@gmail.com" ]
knk43nok@gmail.com
5ba84151402c306929ff08ad121723a5b154fdab
a2d13dcf9bd687c6b288f0cd00520ad3e7e360f2
/rl/algos/mirror_ppo.py
f21d52d38ca46bbd9e1e0534039d04e6e3a0d5f0
[ "MIT" ]
permissive
RohanPankaj/apex
f3f7286b5c420cf0040dd7243351f7618d634b6c
74e96386bf9446d1179106d6d65ea0368c1b5b27
refs/heads/master
2020-07-11T13:10:46.329222
2019-08-26T22:30:09
2019-08-26T22:30:09
204,548,555
0
0
null
null
null
null
UTF-8
Python
false
false
7,541
py
import time from copy import deepcopy import torch import torch.optim as optim from torch.utils.data.sampler import BatchSampler, SubsetRandomSampler from torch.distributions import kl_divergence import numpy as np from rl.algos import PPO # TODO: # env.mirror() vs env.matrix? # TODO: use magic to make this reuse more code (callbacks etc?) class MirrorPPO(PPO): def update(self, policy, old_policy, optimizer, observations, actions, returns, advantages, env_fn ): env = env_fn() mirror_observation = env.mirror_observation if env.clock_based: mirror_observation = env.mirror_clock_observation mirror_action = env.mirror_action minibatch_size = self.minibatch_size or advantages.numel() for _ in range(self.epochs): losses = [] sampler = BatchSampler( SubsetRandomSampler(range(advantages.numel())), minibatch_size, drop_last=True ) for indices in sampler: indices = torch.LongTensor(indices) obs_batch = observations[indices] # obs_batch = torch.cat( # [obs_batch, # obs_batch @ torch.Tensor(env.obs_symmetry_matrix)] # ).detach() action_batch = actions[indices] # action_batch = torch.cat( # [action_batch, # action_batch @ torch.Tensor(env.action_symmetry_matrix)] # ).detach() return_batch = returns[indices] # return_batch = torch.cat( # [return_batch, # return_batch] # ).detach() advantage_batch = advantages[indices] # advantage_batch = torch.cat( # [advantage_batch, # advantage_batch] # ).detach() values, pdf = policy.evaluate(obs_batch) # TODO, move this outside loop? with torch.no_grad(): _, old_pdf = old_policy.evaluate(obs_batch) old_log_probs = old_pdf.log_prob(action_batch).sum(-1, keepdim=True) log_probs = pdf.log_prob(action_batch).sum(-1, keepdim=True) ratio = (log_probs - old_log_probs).exp() cpi_loss = ratio * advantage_batch clip_loss = ratio.clamp(1.0 - self.clip, 1.0 + self.clip) * advantage_batch actor_loss = -torch.min(cpi_loss, clip_loss).mean() critic_loss = 0.5 * (return_batch - values).pow(2).mean() # Mirror Symmetry Loss _, deterministic_actions = policy(obs_batch) if env.clock_based: mir_obs = mirror_observation(obs_batch, env.clock_inds) _, mirror_actions = policy(mir_obs) else: _, mirror_actions = policy(mirror_observation(obs_batch)) mirror_actions = mirror_action(mirror_actions) mirror_loss = 4 * (deterministic_actions - mirror_actions).pow(2).mean() entropy_penalty = -self.entropy_coeff * pdf.entropy().mean() # TODO: add ability to optimize critic and actor seperately, with different learning rates optimizer.zero_grad() (actor_loss + critic_loss + mirror_loss + entropy_penalty).backward() # Clip the gradient norm to prevent "unlucky" minibatches from # causing pathalogical updates torch.nn.utils.clip_grad_norm_(policy.parameters(), self.grad_clip) optimizer.step() losses.append([actor_loss.item(), pdf.entropy().mean().item(), critic_loss.item(), ratio.mean().item(), mirror_loss.item()]) # TODO: add verbosity arguments to suppress this print(' '.join(["%g"%x for x in np.mean(losses, axis=0)])) # Early stopping if kl_divergence(pdf, old_pdf).mean() > 0.02: print("Max kl reached, stopping optimization early.") break def train(self, env_fn, policy, n_itr, logger=None): old_policy = deepcopy(policy) optimizer = optim.Adam(policy.parameters(), lr=self.lr, eps=self.eps) start_time = time.time() for itr in range(n_itr): print("********** Iteration {} ************".format(itr)) sample_start = time.time() batch = self.sample_parallel(env_fn, policy, self.num_steps, self.max_traj_len) print("time elapsed: {:.2f} s".format(time.time() - start_time)) print("sample time elapsed: {:.2f} s".format(time.time() - sample_start)) observations, actions, returns, values = map(torch.Tensor, batch.get()) advantages = returns - values advantages = (advantages - advantages.mean()) / (advantages.std() + self.eps) minibatch_size = self.minibatch_size or advantages.numel() print("timesteps in batch: %i" % advantages.numel()) old_policy.load_state_dict(policy.state_dict()) # WAY faster than deepcopy optimizer_start = time.time() self.update(policy, old_policy, optimizer, observations, actions, returns, advantages, env_fn) print("optimizer time elapsed: {:.2f} s".format(time.time() - optimizer_start)) if logger is not None: evaluate_start = time.time() test = self.sample_parallel(env_fn, policy, 800 // self.n_proc, self.max_traj_len, deterministic=True) print("evaluate time elapsed: {:.2f} s".format(time.time() - evaluate_start)) _, pdf = policy.evaluate(observations) _, old_pdf = old_policy.evaluate(observations) entropy = pdf.entropy().mean().item() kl = kl_divergence(pdf, old_pdf).mean().item() logger.record('Return (test)', np.mean(test.ep_returns), itr, 'Return', x_var_name='Iterations', split_name='test') logger.record('Return (batch)', np.mean(batch.ep_returns), itr, 'Return', x_var_name='Iterations', split_name='batch') logger.record('Mean Eplen', np.mean(batch.ep_lens), itr, 'Mean Eplen', x_var_name='Iterations', split_name='batch') logger.record('Mean KL Div', kl, itr, 'Mean KL Div', x_var_name='Iterations', split_name='batch') logger.record('Mean Entropy', entropy, itr, 'Mean Entropy', x_var_name='Iterations', split_name='batch') logger.dump() # TODO: add option for how often to save model if np.mean(test.ep_returns) > self.max_return: self.max_return = np.mean(test.ep_returns) self.save(policy) print("Total time: {:.2f} s".format(time.time() - start_time))
[ "yesh.godse@gmail.com" ]
yesh.godse@gmail.com
7fdb8d3b069cc269c6095853e94a07255525f040
4424f20569fbc450ea4900af542577fc0e73763d
/Lesson10/Peaks.py
5f0be85c963cd389d4db33466891b5b5e9054f6a
[]
no_license
AustinKuo/Codility_Python
040f5ed9eba6886aca5ef307e6921ced3e97ae58
3754a643a96434a23936d81deb0add7fa0af1689
refs/heads/master
2021-04-05T23:48:35.135036
2019-03-19T08:23:00
2019-03-19T08:23:00
124,468,052
0
0
null
null
null
null
UTF-8
Python
false
false
739
py
# you can write to stdout for debugging purposes, e.g. # print("this is a debug message") def solution(A): # write your code in Python 3.6 N = len(A) dis = [] count = 1 # Get next peak distance for i in range(1, N-1): if A[i] > A[i-1] and A[i] > A[i+1]: dis.append(count) count = 0 count += 1 maxR = len(dis) for i in range(maxR, 0, -1): if N % i != 0: continue block = int(N/i) posA = 0 posB = 0 for num in dis: posA += num if posB <= posA: posB += block if posB <= posA: break if posB == N: return i return 0
[ "noreply@github.com" ]
noreply@github.com
2c577726ddb93acc298d9aa48b796d856a11327a
ca7aa979e7059467e158830b76673f5b77a0f5a3
/Python_codes/p02818/s867983819.py
672fccfd638abcdbc2e8bfd4c826f7fa452e2450
[]
no_license
Aasthaengg/IBMdataset
7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901
f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8
refs/heads/main
2023-04-22T10:22:44.763102
2021-05-13T17:27:22
2021-05-13T17:27:22
367,112,348
0
0
null
null
null
null
UTF-8
Python
false
false
204
py
A, B, K = map(int, input().split()) count = 0 if K == 0: print('%d %d' % (A, B)) elif K <= A: print('%d %d' % (A-K, B)) elif A < K <= B+A: print('%d %d' % (0, B-(K-A))) else: print('0 0')
[ "66529651+Aastha2104@users.noreply.github.com" ]
66529651+Aastha2104@users.noreply.github.com
55847be3e5c35a5f6af9a51b4cfebaad77105fa8
8ae383214b802cca919f9b646455a03ded014fac
/Cara_gui.py
f801a7a0f6fd9c611be6f10fe848a9b09bebd9c6
[]
no_license
abhidya/PreTTy
3c1a1ebc0ca8d670ebe79cdf440f9d177ac1041f
70e032fb1caa8c57b28270994612b4ccac783b5f
refs/heads/master
2020-04-09T15:11:53.592270
2018-11-28T17:58:14
2018-11-28T17:58:14
160,419,014
2
0
null
null
null
null
UTF-8
Python
false
false
1,175
py
import tkinter as tk import PIL from PIL import Image, ImageTk # --- functions --- #<<<<<<< HEAD def on_click(event=None): # `command=` calls function without argument # `bind` calls function with one argument print("image clicked") # --- main --- root = tk.Tk() # load image and resize it basewidth = 200 photo = Image.open("../gui/images/alpaca.gif") wpercent = (basewidth / float(photo.size[0])) hsize = int((float(photo.size[1]) * float(wpercent))) photo = photo.resize((basewidth, hsize), PIL.Image.ANTIALIAS) photo.save("resized_image.gif") photo = ImageTk.PhotoImage(photo) # label with image l = tk.Label(root, image=photo) l.pack() # bind click event to image l.bind('<Button-1>', on_click) # button with image binded to the same function b = tk.Button(root, image=photo, command=on_click) b.pack() # button with text closing window b = tk.Button(root, text="Close", fg="red", activeforeground="blue", background="cyan", command=root.destroy) b.pack() #bg="#ff340a" #b = tk.Button(root) #photo = tk.PhotoImage(file="BadHombre.png") #b.config(image=photo,width="100",height="100") #b.pack(side=LEFT) root.geometry("1000x725") root.mainloop()
[ "cscott57@vols.utk.edu" ]
cscott57@vols.utk.edu
b6ccec191dbc184ce1f567885c0785911098364f
deb8655fd05c80e3b81d3bf6100ad0970c959aec
/contact/tests/test_signup.py
f339d88b76a054eff3d880d9d6e0232cc81421c6
[]
no_license
mayank-agarwal-96/phonebook
5c20e151194f1c54dabc8c92aae4c5e16efe78dd
015917a6a861e8a13e42c896f7e9d10045016e2d
refs/heads/master
2021-01-23T01:59:52.550498
2017-03-23T13:53:47
2017-03-23T13:53:47
85,954,923
0
0
null
null
null
null
UTF-8
Python
false
false
2,912
py
import datetime import time from django.test import TestCase, LiveServerTestCase from django.contrib.auth.models import User from selenium import webdriver from selenium.webdriver.support.ui import WebDriverWait class SignUpTest(LiveServerTestCase): def setUp(self): self.selenium = webdriver.Chrome() self.selenium.maximize_window() super(SignUpTest, self).setUp() def tearDown(self): self.selenium.quit() super(SignUpTest, self).tearDown() def fill_registration_form(self, values): username = self.selenium.find_element_by_name('username') email = self.selenium.find_element_by_name('email') password = self.selenium.find_element_by_name('password') username.send_keys(values['username']) email.send_keys(values['email']) password.send_keys(values['password']) self.selenium.find_element_by_id("userform").submit() def test_null_values(self): self.selenium.get('%s%s' % (self.live_server_url, "/register/")) values = {'username': '', 'email': '', 'password': ''} self.fill_registration_form(values) self.assertEqual(len(User.objects.all()), 0) def test_successful_registration(self): self.selenium.get('%s%s' % (self.live_server_url, "/register/")) values = {'username': 'testuser', 'email': 'testuser@test.com', 'password': 'testpass'} self.fill_registration_form(values) self.assertEqual(self.selenium.current_url, self.live_server_url+'/login/') self.assertEqual(len(User.objects.all()), 1) def test_duplicate_username(self): self.selenium.get('%s%s' % (self.live_server_url, "/register/")) values = {'username': 'testuser', 'email': 'testuser@test.com', 'password': 'testpass'} self.fill_registration_form(values) self.assertEqual(self.selenium.current_url, self.live_server_url+'/login/') self.assertEqual(len(User.objects.all()), 1) self.selenium.get('%s%s' % (self.live_server_url, "/register/")) values = {'username': 'testuser', 'email': 'testuser1@test.com', 'password': 'testpass'} self.fill_registration_form(values) self.assertEqual(len(User.objects.all()), 1) def test_duplicate_email(self): self.selenium.get('%s%s' % (self.live_server_url, "/register/")) values = {'username': 'testuser', 'email': 'testuser@test.com', 'password': 'testpass'} self.fill_registration_form(values) self.assertEqual(self.selenium.current_url, self.live_server_url+'/login/') self.assertEqual(len(User.objects.all()), 1) self.selenium.get('%s%s' % (self.live_server_url, "/register/")) values = {'username': 'testuser1', 'email': 'testuser@test.com', 'password': 'testpass'} self.fill_registration_form(values) self.assertEqual(len(User.objects.all()), 1)
[ "agarwalm214@gmail.com" ]
agarwalm214@gmail.com
5a9945e71b3b8a15401b395d1287eb71026ce281
d610a70b24d071c00b7b665324305bae84057cec
/frontend.py
d73f7faea1afc88567cf8f21d7e0b6c04da21659
[]
no_license
akshat1198/bookSearch
60d39d27c2ce23a3656f521e6a3987cbb607eaad
f3a4c59a974cb3cf83f450c75e2c43adae3a749c
refs/heads/master
2020-09-27T04:32:38.764044
2019-12-30T04:34:33
2019-12-30T04:34:33
226,430,807
0
0
null
null
null
null
UTF-8
Python
false
false
3,474
py
from tkinter import * from backend import Database database=Database("books.db") class Window(object): def __init__(self,window): self.window = window self.window.wm_title("BookStore") l1=Label(window,text="Title") l1.grid(row=0,column=0) l2=Label(window,text="Author") l2.grid(row=0,column=2) l3=Label(window,text="Year") l3.grid(row=1,column=0) l4=Label(window,text="ISBN") l4.grid(row=1,column=2) self.title_text=StringVar() self.e1=Entry(window,textvariable=self.title_text) self.e1.grid(row=0,column=1) self.author_text=StringVar() self.e2=Entry(window,textvariable=self.author_text) self.e2.grid(row=0,column=3) self.year_text=StringVar() self.e3=Entry(window,textvariable=self.year_text) self.e3.grid(row=1,column=1) self.isbn_text=StringVar() self.e4=Entry(window,textvariable=self.isbn_text) self.e4.grid(row=1,column=3) self.list1=Listbox(window, height=6,width=35) self.list1.grid(row=2,column=0,rowspan=6,columnspan=2) sb1=Scrollbar(window) sb1.grid(row=2,column=2,rowspan=6) self.list1.configure(yscrollcommand=sb1.set) sb1.configure(command=self.list1.yview) self.list1.bind('<<ListboxSelect>>',self.get_selected_row) b1=Button(window,text="View all", width=12,command=self.view_command) b1.grid(row=2,column=3) b2=Button(window,text="Search entry", width=12,command=self.search_command) b2.grid(row=3,column=3) b3=Button(window,text="Add entry", width=12,command=self.add_command) b3.grid(row=4,column=3) b4=Button(window,text="Update selected", width=12,command=self.update_command) b4.grid(row=5,column=3) b5=Button(window,text="Delete selected", width=12,command=self.delete_command) b5.grid(row=6,column=3) b6=Button(window,text="Close", width=12,command=window.destroy) b6.grid(row=7,column=3) def get_selected_row(self,event): index=self.list1.curselection()[0] self.selected_tuple=self.list1.get(index) self.e1.delete(0,END) self.e1.insert(END,self.selected_tuple[1]) self.e2.delete(0,END) self.e2.insert(END,self.selected_tuple[2]) self.e3.delete(0,END) self.e3.insert(END,self.selected_tuple[3]) self.e4.delete(0,END) self.e4.insert(END,self.selected_tuple[4]) def view_command(self): self.list1.delete(0,END) for row in database.view(): self.list1.insert(END,row) def search_command(self): self.list1.delete(0,END) for row in database.search(self.title_text.get(),self.author_text.get(),self.year_text.get(),self.isbn_text.get()): self.list1.insert(END,row) def add_command(self): database.insert(self.title_text.get(),self.author_text.get(),self.year_text.get(),self.isbn_text.get()) self.list1.delete(0,END) self.list1.insert(END,(self.title_text.get(),self.author_text.get(),self.year_text.get(),self.isbn_text.get())) def delete_command(self): database.delete(self.selected_tuple[0]) def update_command(self): database.update(self.selected_tuple[0],self.title_text.get(),self.author_text.get(),self.year_text.get(),self.isbn_text.get()) window=Tk() Window(window) window.mainloop()
[ "akshat1198@gmail.com" ]
akshat1198@gmail.com
649162ec2eee906ff40ed4a11742014eddbea62c
490c4f252522ba00fcbb6544cf77afc883db63a8
/supremes/helpers.py
5fcb3d1364fba7c616dfc628be5628496b04be5e
[]
no_license
stillmatic/supremes
a6c7b9f25d7d8ef92511469e837c1ee78e6bf2ff
a34f3b6ac038c0a66030df1dba92aa249f53ae17
refs/heads/master
2020-06-25T13:02:00.093656
2019-07-28T20:24:36
2019-07-28T20:24:36
199,315,962
3
0
null
null
null
null
UTF-8
Python
false
false
1,221
py
""" Utils, e.g. for I/O. """ import hashlib import requests import rapidjson import glob from models import Case from typing import List, Any, Dict, Optional PATH = "D:/code/supremes/oyez/downloaded" def load_from_remote(url: str, overwrite: bool = False, verbose: bool = True) -> Any: """ Load from cache if possible, else, read from remote. """ key = hashlib.sha1(url.encode("utf-8")).hexdigest() desired_path = f"{PATH}/{key}.json" if glob.glob(desired_path) and not overwrite: if verbose: print(f"Loading {url} from cache instead!") with open(desired_path, "r") as f: return rapidjson.loads(f.read()) else: if verbose: print(f"Loading {url} from web") res = rapidjson.loads(requests.get(url).content) with open(desired_path, "w") as f: rapidjson.dump(res, f) return res def get_cases_for_term(term: int, verbose: bool = True) -> Optional[List["Case"]]: url = f"https://api.oyez.org/cases?per_page=0&filter=term:{term}" docket = load_from_remote(url, verbose = verbose) cases = [Case.from_id(case["term"], case["docket_number"], verbose) for case in docket] return cases
[ "hua.christopher@gmail.com" ]
hua.christopher@gmail.com
df5dc53a7f4dbc860b2fdc75152abf5d48d28b8c
8cda0cebf7fdc7352236c8c114491742fd3bf68a
/gtfsrt/reader.py
16fc8f9f1ef4a2e01fc324104b5b777badf35b36
[]
no_license
pravahio/py-gtfsr-analysis
0cd23b296a6f02adf0e7fb8643c5e9f3ad5dac9d
46233e1fd7f726652b4b5223d97aaea7d4125e77
refs/heads/master
2020-07-08T10:57:42.647372
2019-08-21T19:33:52
2019-08-21T19:33:52
203,652,035
0
0
null
null
null
null
UTF-8
Python
false
false
3,329
py
import os import hashlib import json from datetime import datetime from google.transit import gtfs_realtime_pb2 from google.protobuf.json_format import MessageToDict GL_unique = {} class GTFSRtReader: READ_TRIP_UPDATES = 0 READ_VEHICLE_POSITION = 1 READ_SERVICE_ALERTS = 3 SAmode = False VPmode = False TUmode = False def __init__(self, path, read_mode): self.path = path a = [] if isinstance(read_mode, int): a.append(read_mode) else: a.extend(read_mode) for i in a: if i == self.READ_SERVICE_ALERTS: self.SAmode = True if i == self.READ_VEHICLE_POSITION: self.VPmode = True if i == self.READ_TRIP_UPDATES: self.TUmode = True def read_file(self, f_name, tu, sa, vp): gtfs_msg = self.read_gtfs_rt_file(f_name) if gtfs_msg is None: return #print(f_name, ' Len: ', len(gtfs_msg.entity)) for feed in gtfs_msg.entity: if self.TUmode and feed.HasField('trip_update'): tu.append(MessageToDict(feed.trip_update)) if self.SAmode and feed.HasField('alert'): sa.append(MessageToDict(feed.alert)) if self.VPmode and feed.HasField('vehicle'): mgg = MessageToDict(feed.vehicle) # check if timestamp, pos->lat, pos->long exist. cl_txt = (str(mgg['timestamp']) + str(mgg['position']['latitude']) + str(mgg['position']['longitude'])).encode('utf-8') dt = datetime.fromtimestamp(int(mgg['timestamp'][:10])) mgg['timestamp'] = dt mgg['_id'] = hashlib.md5(cl_txt).hexdigest() if mgg['_id'] in GL_unique: continue GL_unique[mgg['_id']] = 1 vp.append(mgg) def compute_res(self, tu, sa, vp): res = {} if len(sa) != 0: res['alert'] = sa if len(vp) != 0: res['vehicle'] = vp if len(tu) != 0: res['trip_update'] = tu return res def read(self): tu = [] sa = [] vp = [] for r, d, f in os.walk(self.path): for file in f: f_name = os.path.join(r, file) self.read_file(f_name) return self.compute_res(tu, sa, vp) def read_batch(self, batch_size): tu = [] sa = [] vp = [] cnt = 0 for r, d, f in os.walk(self.path): for file in f: f_name = os.path.join(r, file) self.read_file(f_name, tu, sa, vp) if cnt % batch_size == 0: yield self.compute_res(tu, sa, vp) tu = [] sa = [] vp = [] yield self.compute_res(tu, sa, vp) def read_gtfs_rt_file(self, fullpath): feed = gtfs_realtime_pb2.FeedMessage() with open(fullpath, 'rb') as f: data = f.read() try: feed.ParseFromString(data) except: print('Error parsing GTFS-RT') return None return feed
[ "a.upperwal@gmail.com" ]
a.upperwal@gmail.com
7efbf58e4724def6d2a3904a3b96739ea7bf1929
422abf35d7b168bdc9050922e6a8e7c6ddb753b1
/settings/settings_manager.py
05ace33352f94617f9c41866ac9b53ea78a2c43a
[]
no_license
MycatQieZi/RemoteManager
a4b8c4b499c3065642fa74ad51aec37dea613681
ef998b6a4046f81004a618f06762dcbf1715787c
refs/heads/main
2023-06-24T19:04:10.658823
2021-07-06T09:45:48
2021-07-06T09:45:48
373,416,855
2
2
null
2021-07-23T02:29:09
2021-06-03T07:19:55
Python
UTF-8
Python
false
false
5,031
py
import configparser, os, logging, yaml, sys from conf.reg import reg_get_QTHZ_path from misc.enumerators import FilePath, SettingsCategories, SettingsItems from settings.consts import DEFAULT_FILE_TEMPLATE from misc.decorators import singleton @singleton class SettingsManager(): def __init__(self): self.logger = logging.getLogger("box_helper_common_logger") self.logger.info("Starting the settings manager...") if len(sys.argv)<2 or not sys.argv[1]=='debug': self.dev_mode = False else: self.dev_mode = True self.__settings_path = "./settings/settings.ini" self.read_settings(self.__settings_path) self.read_QTHZ_inst_path() self.logger.info("Settings finished loading...") def get_paths(self): return { FilePath.CONFIG: self.qthz_inst_path+self.__config[SettingsCategories.PATHS.value][SettingsItems.CONFIG.value], FilePath.FS: self.__config[SettingsCategories.PATHS.value][SettingsItems.FS.value], FilePath.FS_CONF: self.__config[SettingsCategories.PATHS.value][SettingsItems.FS.value] +self.__config[SettingsCategories.PATHS.value][SettingsItems.FS_CONF.value], FilePath.JAVA: self.__config[SettingsCategories.PATHS.value][SettingsItems.JAVA.value], FilePath.JAR: self.qthz_inst_path+self.__config[SettingsCategories.PATHS.value][SettingsItems.JAR.value], FilePath.JAVA_PID: self.qthz_inst_path+self.__config[SettingsCategories.PATHS.value][SettingsItems.JAVA_PID.value], FilePath.APP_YML: self.qthz_inst_path+self.__config[SettingsCategories.PATHS.value][SettingsItems.APP_YML.value], FilePath.PATH_BAT: self.qthz_inst_path+self.__config[SettingsCategories.PATHS.value][SettingsItems.PATH_BAT.value] } def get_filenames(self): return { FilePath.JAR: self.__config[SettingsCategories.PATHS.value][SettingsItems.JAR.value], FilePath.JAVA_PID: self.__config[SettingsCategories.PATHS.value][SettingsItems.JAVA_PID.value], FilePath.APP_YML: self.__config[SettingsCategories.PATHS.value][SettingsItems.APP_YML.value], } def get_host_addr(self): return self.__config[SettingsCategories.GENERAL.value][SettingsItems.HOST_ADDR.value] def get_heartbeat_timer(self): return self.__config[SettingsCategories.TIMER.value][SettingsItems.HB.value] def get_versioncheck_timer(self): return self.__config[SettingsCategories.TIMER.value][SettingsItems.VC.value] def get_env(self): return self.__config[SettingsCategories.GENERAL.value][SettingsItems.ENV.value] def get_logging_level(self): return self.__config[SettingsCategories.GENERAL.value][SettingsItems.LOGGING.value] def get_log_expiration(self): return self.__config[SettingsCategories.GENERAL.value][SettingsItems.LOG_EXP.value] def get_QTHZ_inst_path(self): return self.qthz_inst_path def get_sqlite_db_path(self): return self.qthz_inst_path+self.__config[SettingsCategories.PATHS.value]['data'] def get_backup_dir_path(self): return self.qthz_inst_path+self.__config[SettingsCategories.PATHS.value]['backup'] def get_patch_dir_path(self): return self.qthz_inst_path+self.__config[SettingsCategories.PATHS.value][SettingsItems.PATCH.value] def get_patch_meta_path(self): return self.get_patch_dir_path()+self.__config[SettingsCategories.PATHS.value][SettingsItems.PATCHMETA.value] def get_heartbeat_timer(self): return self.__config[SettingsCategories.TIMER.value]['heartbeat'] def get_version_check_timer(self): return self.__config[SettingsCategories.TIMER.value]['versionCheck'] def read_QTHZ_inst_path(self): self.qthz_inst_path = reg_get_QTHZ_path() def read_settings(self, path): self.verify_config_file_existence(path) self.__config = self.read_ini_into_config(path) def verify_config_file_existence(self, path): if not os.path.isfile(path): self.logger.warning(f"Settings file at {path} doesn't exist, creating default settings...") with open(path, "x") as settings_file: settings_file.write(DEFAULT_FILE_TEMPLATE) def read_ini_into_config(self, path): config = configparser.ConfigParser() config.read(path, encoding="UTF-8") return config def write_config_to_ini_file(self, config, filepath): with open(filepath, 'w') as configfile: config.write(configfile) self.logger.debug("保存配置完成") def get_yaml_info(self, filepath): with open(filepath, mode = 'r', encoding='utf-8') as stream: data = yaml.safe_load(stream) return data def write_yaml_info(self, filepath, data): with open(filepath, mode = 'w', encoding='utf-8') as stream: yaml.dump(data, stream, allow_unicode = True)
[ "zhangxy@ect888.com" ]
zhangxy@ect888.com
da601365143788ddbeeff357a89115a32b9b44a6
de770a1a7238cdcec2c321f673a517ed21772731
/backend/cache_model.py
f1e1a3cc1cb810abd7370a93063c76bc7a84c52b
[ "MIT" ]
permissive
tlkh/serverless-transformers
b4a873e351016d6ac39c05ecafab505664a566be
98cc9968a3ac1c7d9e779c6193f1e083c19ee7b3
refs/heads/main
2023-01-20T09:33:57.566895
2020-11-18T09:37:52
2020-11-18T09:37:52
313,572,143
2
3
MIT
2020-11-28T18:40:38
2020-11-17T09:40:43
Python
UTF-8
Python
false
false
293
py
import os os.environ["CUDA_DEVICE_ORDER"] = "PCI_BUS_ID" os.environ["CUDA_VISIBLE_DEVICES"] = "" import torch from transformers import * MODEL_NAME = "allenai/unifiedqa-t5-base" tokenizer = AutoTokenizer.from_pretrained(MODEL_NAME) model = AutoModelForSeq2SeqLM.from_pretrained(MODEL_NAME)
[ "tlkh.xms@gmail.com" ]
tlkh.xms@gmail.com
a0844a6b122f516ecc3798d4bad710627ee5512a
52c990629932dcc5f13b4753af23c7d395bb4b1b
/STOCK/wallet/migrations/0013_auto_20210220_2205.py
a60b9f13b0368c83b6b6b1db975d02e261c119c6
[]
no_license
Strzelba2/STOCK
4a0158534cf3a231df59ead0873d1ac50d6b1ee8
b1904057a40f74f54abd7629fd8726807229c44c
refs/heads/main
2023-03-14T17:19:04.662137
2021-03-21T19:45:08
2021-03-21T19:45:08
313,441,960
0
0
null
null
null
null
UTF-8
Python
false
false
477
py
# Generated by Django 3.1.3 on 2021-02-20 21:05 from django.db import migrations, models import django.db.models.deletion class Migration(migrations.Migration): dependencies = [ ('wallet', '0012_auto_20210220_2205'), ] operations = [ migrations.AlterField( model_name='stocks', name='broker', field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='wallet.broker_account'), ), ]
[ "artur_strzelczyk@wp.pl" ]
artur_strzelczyk@wp.pl
5a77ff53b47783a74d0756216f1c09c0dcf2c10e
8796273a71427c8d9869431926341fbcac54095f
/imdemo/utils/singleton.py
efdc68aae590b919e315b4fbb42972ee95d1400c
[]
no_license
fatelei/im-demo
e2c377a4fc9c7ce5ab31210ed76f1532d537a790
032bac4e0cfe7365e389c64a1ce3a5aec7dd9208
refs/heads/master
2021-01-09T21:46:21.401059
2016-01-17T08:14:55
2016-01-17T08:14:55
45,176,036
4
2
null
null
null
null
UTF-8
Python
false
false
383
py
# -*- coding: utf8 -*- """ imdemo.utils.singleton ~~~~~~~~~~~~~~~~~~~~~~ Singleton mode. """ def singleton_class(obj): instances = {} def wrapper(*args, **kwargs): name = obj.__name__ if name not in instances: instance = obj(*args, **kwargs) instances[name] = instance return instances[name] return wrapper
[ "fatelei@gmail.com" ]
fatelei@gmail.com
fe55479c2bc11a0237fb2eecee964409893ab01c
de77db1947bc0c1e99c493ffb90aabce8de197e7
/products/utils.py
93ee3df7ae1ac8d9a6e524e1f6794447d9b19b56
[]
no_license
thesaadmirza/djangoecommerce
a6e17d0125ae5f85b8065341564ffd548a4a3520
a4dbb14cc04b956497a8524c46141c6aa9ff50c0
refs/heads/master
2020-03-30T20:08:36.934892
2018-10-09T13:53:50
2018-10-09T13:53:50
151,574,842
0
1
null
null
null
null
UTF-8
Python
false
false
864
py
import random import string from django.utils.text import slugify def random_string_generator(size=10, chars=string.ascii_lowercase + string.digits): return ''.join(random.choice(chars) for _ in range(size)) def unique_slug_generator(instance, new_slug=None): """ This is for a Django project and it assumes your instance has a model with a slug field and a title character (char) field. """ if new_slug is not None: slug = new_slug else: slug = slugify(instance.title) Klass = instance.__class__ qs_exists = Klass.objects.filter(slug=slug).exists() if qs_exists: new_slug = "{slug}-{randstr}".format( slug=slug, randstr=random_string_generator(size=4) ) return unique_slug_generator(instance, new_slug=new_slug) return slug
[ "saadmirza009@gmail.com" ]
saadmirza009@gmail.com
8d633cacf961f2aaf372bd6e6e2760fa67fc75f3
e66635486a8abc7710432b52aa15c2d6e4488c94
/vmware_nsx_tempest/tests/nsxv3/api/test_l2_gateway_connection.py
a08d7212ce08967fe35b5b32d802971f61f21334
[ "Apache-2.0" ]
permissive
yfauser/vmware-nsx
ba2bff4c3cc982b7af03ac7d9891a067018a7233
1fb08a7555efd820c2d5625665ab77d7e69d3b0c
refs/heads/master
2021-01-18T17:41:40.411620
2016-06-02T21:13:43
2016-06-02T21:13:43
60,336,943
2
0
null
2016-06-03T09:42:43
2016-06-03T09:42:43
null
UTF-8
Python
false
false
19,234
py
# Copyright 2015 OpenStack Foundation # Copyright 2016 VMware Inc # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import netaddr from tempest import config from tempest.lib.common.utils import data_utils from tempest.lib import decorators from tempest import test from vmware_nsx_tempest._i18n import _LI from vmware_nsx_tempest.common import constants from vmware_nsx_tempest.services import base_l2gw CONF = config.CONF LOG = constants.log.getLogger(__name__) class L2GatewayConnectionTest(base_l2gw.BaseL2GatewayTest): """ Test l2 gateway connection operations. """ @classmethod def resource_setup(cls): """ Setting up the resources for the test. """ super(L2GatewayConnectionTest, cls).resource_setup() # Create a network. cls.network = cls.create_network() # Create subnet on the network just created. cls.SUBNET_1_NETWORK_CIDR = CONF.l2gw.subnet_1_cidr network_cidr = cls.SUBNET_1_NETWORK_CIDR.split("/") cls.SUBNET_1_MASK = network_cidr[1] subnet_info = {} # cidr must be presented & in IPNetwork structure. cls.CIDR = netaddr.IPNetwork(cls.SUBNET_1_NETWORK_CIDR) cls.subnet = cls.create_subnet(cls.network, cidr=cls.CIDR, mask_bits=int(cls.SUBNET_1_MASK), **subnet_info) @classmethod def resource_cleanup(cls): """ Clean all the resources used during the test. """ super(L2GatewayConnectionTest, cls).resource_cleanup() cls._try_delete_resource(cls.networks_client.delete_network, cls.network["id"]) @classmethod def l2gw_cleanup(cls): """ Delete created L2GWs and L2GWCs. """ for l2gwc_id in cls.l2gwc_created.keys(): cls.l2gwc_client.delete_l2_gateway_connection(l2gwc_id) cls.l2gwc_created.pop(l2gwc_id) for l2gw_id in cls.l2gw_created.keys(): cls.l2gw_client.delete_l2_gateway(l2gw_id) cls.l2gw_created.pop(l2gw_id) @test.attr(type="nsxv3") @decorators.skip_because(bug="634513") @test.idempotent_id("81edfb9e-4722-4565-939c-6593b8405ff4") def test_l2_gateway_connection_create(self): """ Create l2 gateway connection using one vlan. Vlan parameter is passed into L2GW create. """ LOG.info(_LI("Testing test_l2_gateway_connection_create api")) cluster_info = self.nsx_bridge_cluster_info() device_name, interface_name = cluster_info[0][0], cluster_info[0][1] l2gw_name = data_utils.rand_name(constants.L2GW) device_1 = {"dname": device_name, "iname": interface_name, "vlans": [self.VLAN_1]} l2gw_param = [device_1] l2gw_rsp, _ = self.create_l2gw(l2gw_name, l2gw_param) l2gwc_param = {"l2_gateway_id": l2gw_rsp[constants.L2GW]["id"], "network_id": self.network["id"]} l2gwc_rsp = self.create_l2gw_connection(l2gwc_param) # Assert if create fails. self.assertEqual(constants.EXPECTED_HTTP_RESPONSE_201, l2gwc_rsp.response["status"], "Response code is not %(code)s" % { "code": constants.EXPECTED_HTTP_RESPONSE_201}) self.assertEqual(l2gwc_param["l2_gateway_id"], l2gwc_rsp[constants.L2GWC]["l2_gateway_id"], "l2gw id is not same as expected in " "create l2gw connection response") self.assertEqual(l2gwc_param["network_id"], l2gwc_rsp[constants.L2GWC]["network_id"], "network id is not same as expected in " "create l2gw connection response") self.addCleanup(self.l2gw_cleanup) @test.attr(type="nsxv3") @decorators.skip_because(bug="634513") @test.idempotent_id("7db4f6c9-18c5-4a99-93c1-68bc2ecb48a7") def test_l2_gateway_connection_create_with_multiple_vlans(self): """ Create l2 gateway connection using multiple vlans. Vlan parameter is passed into L2GW create. """ LOG.info(_LI("Testing test_l2_gateway_connection_create api")) cluster_info = self.nsx_bridge_cluster_info() device_name, interface_name = cluster_info[0][0], cluster_info[0][1] l2gw_name = data_utils.rand_name(constants.L2GW) device_1 = {"dname": device_name, "iname": interface_name, "vlans": [self.VLAN_1, self.VLAN_2]} l2gw_param = [device_1] l2gw_rsp, _ = self.create_l2gw(l2gw_name, l2gw_param) l2gwc_param = {"l2_gateway_id": l2gw_rsp[constants.L2GW]["id"], "network_id": self.network["id"]} l2gwc_rsp = self.create_l2gw_connection(l2gwc_param) # Assert if create fails. self.assertEqual(constants.EXPECTED_HTTP_RESPONSE_201, l2gwc_rsp.response["status"], "Response code is not %(code)s" % { "code": constants.EXPECTED_HTTP_RESPONSE_201}) self.assertEqual(l2gwc_param["l2_gateway_id"], l2gwc_rsp[constants.L2GWC]["l2_gateway_id"], "l2gw id is not same as expected in " "create l2gw connection response") self.assertEqual(l2gwc_param["network_id"], l2gwc_rsp[constants.L2GWC]["network_id"], "network id is not same as expected in " "create l2gw connection response") self.addCleanup(self.l2gw_cleanup) @test.attr(type="nsxv3") @test.idempotent_id("de70d6a2-d454-4a09-b06b-8f39be67b635") def test_l2_gateway_connection_with_seg_id_create(self): """ Create l2 gateway connection using one vlan. Vlan parameter is passed into L2GW connection create. """ LOG.info(_LI("Testing test_l2_gateway_connection_create api")) cluster_info = self.nsx_bridge_cluster_info() device_name, interface_name = cluster_info[0][0], cluster_info[0][1] l2gw_name = data_utils.rand_name(constants.L2GW) device_1 = {"dname": device_name, "iname": interface_name} l2gw_param = [device_1] l2gw_rsp, _ = self.create_l2gw(l2gw_name, l2gw_param) l2gwc_param = {"l2_gateway_id": l2gw_rsp[constants.L2GW]["id"], "network_id": self.network["id"], "segmentation_id": self.VLAN_1} l2gwc_rsp = self.create_l2gw_connection(l2gwc_param) # Assert if create fails. self.assertEqual(constants.EXPECTED_HTTP_RESPONSE_201, l2gwc_rsp.response["status"], "Response code is not %(code)s" % { "code": constants.EXPECTED_HTTP_RESPONSE_201}) self.assertEqual(l2gwc_param["l2_gateway_id"], l2gwc_rsp[constants.L2GWC]["l2_gateway_id"], "l2gw id is not same as expected in " "create l2gw connection response") self.assertEqual(l2gwc_param["network_id"], l2gwc_rsp[constants.L2GWC]["network_id"], "network id is not same as expected in " "create l2gw connection response") self.assertEqual(l2gwc_param["segmentation_id"], l2gwc_rsp[constants.L2GWC]["segmentation_id"], "segmentation id is not same as expected in " "create l2gw connection response") self.addCleanup(self.l2gw_cleanup) @test.attr(type="nsxv3") @test.idempotent_id("819d9b50-9159-48d0-be2a-493ec686534c") def test_l2_gateway_connection_show(self): """ Create l2 gateway connection using one vlan and tes l2 gateway connection show api """ LOG.info(_LI("Testing test_l2_gateway_connection_create api")) cluster_info = self.nsx_bridge_cluster_info() device_name, interface_name = cluster_info[0][0], cluster_info[0][1] l2gw_name = data_utils.rand_name(constants.L2GW) device_1 = {"dname": device_name, "iname": interface_name} l2gw_param = [device_1] l2gw_rsp, _ = self.create_l2gw(l2gw_name, l2gw_param) l2gwc_param = {"l2_gateway_id": l2gw_rsp[constants.L2GW]["id"], "network_id": self.network["id"], "segmentation_id": self.VLAN_1} l2gwc_rsp = self.create_l2gw_connection(l2gwc_param) # Assert if create fails. self.assertEqual(constants.EXPECTED_HTTP_RESPONSE_201, l2gwc_rsp.response["status"], "Response code is not %(code)s" % { "code": constants.EXPECTED_HTTP_RESPONSE_201}) l2gwc_id = l2gwc_rsp[constants.L2GWC]["id"] show_rsp = self.l2gwc_client.show_l2_gateway_connection(l2gwc_id) self.assertEqual(constants.EXPECTED_HTTP_RESPONSE_200, show_rsp.response["status"], "Response code is not %(code)s" % { "code": constants.EXPECTED_HTTP_RESPONSE_200}) self.assertEqual(l2gwc_param["l2_gateway_id"], show_rsp[constants.L2GWC]["l2_gateway_id"], "l2gw id is not same as expected in " "show l2gw connection response") self.assertEqual(l2gwc_param["network_id"], show_rsp[constants.L2GWC]["network_id"], "network id is not same as expected in " "show l2gw connection response") show_rsp_seg_id = str(show_rsp[constants.L2GWC][ "segmentation_id"]) self.assertEqual(l2gwc_param["segmentation_id"], show_rsp_seg_id, "segmentation id is not same as expected in " "show l2gw connection response") self.addCleanup(self.l2gw_cleanup) @test.attr(type="nsxv3") @test.idempotent_id("4188f8e7-cd65-427e-92b8-2a9e0492ab21") def test_l2_gateway_connection_list(self): """ Create l2 gateway connection using one vlan and test l2 gateway connection list api. """ LOG.info(_LI("Testing test_l2_gateway_connection_create api")) cluster_info = self.nsx_bridge_cluster_info() device_name, interface_name = cluster_info[0][0], cluster_info[0][1] l2gw_name = data_utils.rand_name(constants.L2GW) device_1 = {"dname": device_name, "iname": interface_name} l2gw_param = [device_1] # Create 2 l2 gateways. l2gw_rsp, _ = self.create_l2gw(l2gw_name, l2gw_param) # Create 2 l2 gateway connections. l2gwc_param = {"l2_gateway_id": l2gw_rsp[constants.L2GW]["id"], "network_id": self.network["id"], "segmentation_id": self.VLAN_1} l2gwc_rsp = self.create_l2gw_connection(l2gwc_param) # Assert if create fails. self.assertEqual(constants.EXPECTED_HTTP_RESPONSE_201, l2gwc_rsp.response["status"], "Response code is not %(code)s" % { "code": constants.EXPECTED_HTTP_RESPONSE_201}) list_rsp = self.l2gwc_client.list_l2_gateway_connections() LOG.info(_LI("l2gw connection list response: %s") % list_rsp) # Assert in case of failure. self.assertEqual(constants.EXPECTED_HTTP_RESPONSE_200, list_rsp.response["status"], "Response code is not %(code)s" % { "code": constants.EXPECTED_HTTP_RESPONSE_200}) self.assertEqual(l2gwc_rsp["l2_gateway_connection"]["id"], list_rsp["l2_gateway_connections"][0]["id"], "l2gw connection list does not show proper id") self.assertEqual(l2gwc_rsp["l2_gateway_connection"]["l2_gateway_id"], list_rsp["l2_gateway_connections"][0][ "l2_gateway_id"], "l2gw connection list does not show proper " "l2_gateway_id") self.assertEqual(l2gwc_rsp["l2_gateway_connection"]["network_id"], list_rsp["l2_gateway_connections"][0]["network_id"], "l2gw connection list does not show proper " "network_id") self.assertEqual(l2gwc_rsp["l2_gateway_connection"]["tenant_id"], list_rsp["l2_gateway_connections"][0]["tenant_id"], "l2gw connection list does not show proper tenant_id") self.assertEqual(l2gwc_rsp["l2_gateway_connection"]["segmentation_id"], str(list_rsp["l2_gateway_connections"][0][ "segmentation_id"]), "l2gw connection list does not show proper " "segmentation_id") self.addCleanup(self.l2gw_cleanup) @test.attr(type="nsxv3") @test.idempotent_id("4d71111f-3d2b-4557-97c7-2e149a6f41fb") def test_l2_gateway_connection_recreate(self): """ Recreate l2 gateway connection. - Create l2GW. - Create l2gw connection. - delete l2gw connection. - Recreate l2gw connection - verify with l2gw connection list API. """ LOG.info(_LI("Testing test_l2_gateway_connection_create api")) # List all the L2GW connection. list_rsp = self.l2gwc_client.list_l2_gateway_connections() LOG.info(_LI("l2gw connection list response: %s") % list_rsp) # Assert in case of failure. self.assertEqual(constants.EXPECTED_HTTP_RESPONSE_200, list_rsp.response["status"], "Response code is not %(code)s" % { "code": constants.EXPECTED_HTTP_RESPONSE_200}) list_rsp = list_rsp["l2_gateway_connections"] l2gwc_ids = [item.get("id") for item in list_rsp if "id" in item] cluster_info = self.nsx_bridge_cluster_info() device_name, interface_name = cluster_info[0][0], cluster_info[0][1] l2gw_name = data_utils.rand_name(constants.L2GW) device_1 = {"dname": device_name, "iname": interface_name} l2gw_param = [device_1] l2gw_rsp, _ = self.create_l2gw(l2gw_name, l2gw_param) l2gwc_param = {"l2_gateway_id": l2gw_rsp[constants.L2GW]["id"], "network_id": self.network["id"], "segmentation_id": self.VLAN_1} l2gwc_rsp = self.create_l2gw_connection(l2gwc_param) # Assert if create fails. self.assertEqual(constants.EXPECTED_HTTP_RESPONSE_201, l2gwc_rsp.response["status"], "Response code is not %(code)s" % { "code": constants.EXPECTED_HTTP_RESPONSE_201}) l2gwc_id = l2gwc_rsp[constants.L2GWC]["id"] # Delete l2gw. rsp = self.delete_l2gw_connection(l2gwc_id) self.assertEqual(constants.EXPECTED_HTTP_RESPONSE_204, rsp.response["status"], "Response code is not %(code)s" % { "code": constants.EXPECTED_HTTP_RESPONSE_204}) # Since we delete l2gwc pop that id from list. self.l2gwc_created.pop(l2gwc_id) l2gwc_rsp = self.create_l2gw_connection(l2gwc_param) # Assert if create fails. self.assertEqual(constants.EXPECTED_HTTP_RESPONSE_201, l2gwc_rsp.response["status"], "Response code is not %(code)s" % { "code": constants.EXPECTED_HTTP_RESPONSE_201}) # List all the L2GW connection. list_rsp = self.l2gwc_client.list_l2_gateway_connections() LOG.info(_LI("l2gw connection list response: %s") % list_rsp) # Assert in case of failure. self.assertEqual(constants.EXPECTED_HTTP_RESPONSE_200, list_rsp.response["status"], "Response code is not %(code)s" % { "code": constants.EXPECTED_HTTP_RESPONSE_200}) list_rsp = list_rsp["l2_gateway_connections"] l2gwc_ids = l2gwc_ids + [item.get("id") for item in list_rsp if "id" in item] self.assertNotIn(l2gwc_id, l2gwc_ids, "l2gwc list api shows hanging " "l2gwc id") self.addCleanup(self.l2gw_cleanup) @test.attr(type="nsxv3") @test.idempotent_id("670cacb5-134e-467d-ba41-0d7cdbcf3903") def test_l2_gateway_connection_delete(self): """ Delete l2gw will create l2gw and delete recently created l2gw. To delete l2gw we need l2gw id. """ LOG.info(_LI("Testing l2_gateway_connection_delete api")) cluster_info = self.nsx_bridge_cluster_info() device_name, interface_name = cluster_info[0][0], cluster_info[0][1] l2gw_name = data_utils.rand_name(constants.L2GW) device_1 = {"dname": device_name, "iname": interface_name} l2gw_param = [device_1] l2gw_rsp, _ = self.create_l2gw(l2gw_name, l2gw_param) l2gwc_param = {"l2_gateway_id": l2gw_rsp[constants.L2GW]["id"], "network_id": self.network["id"], "segmentation_id": self.VLAN_1} l2gwc_rsp = self.create_l2gw_connection(l2gwc_param) # Assert if create fails. self.assertEqual(constants.EXPECTED_HTTP_RESPONSE_201, l2gwc_rsp.response["status"], "Response code is not %(code)s" % { "code": constants.EXPECTED_HTTP_RESPONSE_201}) l2gwc_id = l2gwc_rsp[constants.L2GWC]["id"] # Delete l2gw. rsp = self.delete_l2gw_connection(l2gwc_id) self.assertEqual(constants.EXPECTED_HTTP_RESPONSE_204, rsp.response["status"], "Response code is not %(code)s" % { "code": constants.EXPECTED_HTTP_RESPONSE_204}) # Since we delete l2gwc pop that id from list. self.l2gwc_created.pop(l2gwc_id) self.addCleanup(self.l2gw_cleanup)
[ "ksamoray@vmware.com" ]
ksamoray@vmware.com
34e948024f0bec94ff0ac644ed0ec34b906fbcf6
c058f51b99f91faebf27183b2b579e9f96e0d8f5
/botorch/sampling/index_sampler.py
ac64388a6725fbe6d9d097bcda515413de547a4f
[ "MIT" ]
permissive
pytorch/botorch
255d62f698cc615c750e9343c278a63c7e96a586
4cc5ed59b2e8a9c780f786830c548e05cc74d53c
refs/heads/main
2023-08-22T15:23:51.071048
2023-08-22T05:30:38
2023-08-22T05:30:38
142,940,093
2,891
373
MIT
2023-09-13T00:16:13
2018-07-30T23:59:57
Jupyter Notebook
UTF-8
Python
false
false
2,289
py
#!/usr/bin/env python3 # Copyright (c) Meta Platforms, Inc. and affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. r""" Sampler to be used with `EnsemblePosteriors` to enable deterministic optimization of acquisition functions with ensemble models. """ from __future__ import annotations import torch from botorch.posteriors.ensemble import EnsemblePosterior from botorch.sampling.base import MCSampler from torch import Tensor class IndexSampler(MCSampler): r"""A sampler that calls `posterior.rsample_from_base_samples` to generate the samples via index base samples.""" def forward(self, posterior: EnsemblePosterior) -> Tensor: r"""Draws MC samples from the posterior. Args: posterior: The ensemble posterior to sample from. Returns: The samples drawn from the posterior. """ self._construct_base_samples(posterior=posterior) samples = posterior.rsample_from_base_samples( sample_shape=self.sample_shape, base_samples=self.base_samples ) return samples def _construct_base_samples(self, posterior: EnsemblePosterior) -> None: r"""Constructs base samples as indices to sample with them from the Posterior. Args: posterior: The ensemble posterior to construct the base samples for. """ if self.base_samples is None or self.base_samples.shape != self.sample_shape: with torch.random.fork_rng(): torch.manual_seed(self.seed) base_samples = torch.multinomial( posterior.weights, num_samples=self.sample_shape.numel(), replacement=True, ).reshape(self.sample_shape) self.register_buffer("base_samples", base_samples) if self.base_samples.device != posterior.device: self.to(device=posterior.device) # pragma: nocover def _update_base_samples( self, posterior: EnsemblePosterior, base_sampler: IndexSampler ) -> None: r"""Null operation just needed for compatibility with `CachedCholeskyAcquisitionFunction`.""" pass
[ "facebook-github-bot@users.noreply.github.com" ]
facebook-github-bot@users.noreply.github.com
ae6316a772722d458445a524f69b025e1ec1ac9e
246fc123d539e150ae9e82720f843b1ab5890532
/shau.py
3d7e55e187f5baafca30bcaa50ea87fcf4ba249e
[]
no_license
Raeshmithaa/guvi
3a5edd0b33d7971eae1a73aa4d543eb8f0a0aec2
cd20cda104b409aba23dcb15210ab9285fc946fc
refs/heads/master
2020-05-31T06:39:44.349687
2019-07-26T10:30:46
2019-07-26T10:30:46
190,146,725
0
0
null
null
null
null
UTF-8
Python
false
false
72
py
r=int(input("enter the value")) for i in xrange(r): print ("Hello")
[ "noreply@github.com" ]
noreply@github.com
c1e93bd7232a7db99271954b0b80e062ac990c1e
7e8363cea094afdc23532265776d135167e4af84
/new_cb.py
558bee1c17e81f8c89e5e2650be1bc38a68ce958
[]
no_license
flaresky/stock
2c8f92c0144ae48b328265398716bd154c91d887
720ede20b04908e873920456fa1ead54a1114d7c
refs/heads/master
2020-04-11T20:53:31.365704
2019-08-16T01:44:26
2019-08-16T01:44:26
162,087,117
0
0
null
null
null
null
UTF-8
Python
false
false
1,038
py
# -*- coding: utf-8 -*- import re import datetime import utils import traceback from EmailSender import send_mail notify_rate = 55 def main(): global notify_rate today = datetime.datetime.today() ts = today.strftime("%Y-%m-%d") # ts = "2019-04-19" stocks = [] url = "https://www.jisilu.cn/data/cbnew/pre_list/?___jsl=LST___t" jo = utils.fetch_json(url) for row in jo['rows']: cell = row['cell'] apply_dt = cell['apply_date'] pma_rt = 100 if 'pma_rt' in cell and cell['pma_rt'] is not None: pma_rt = float(cell['pma_rt']) if apply_dt == ts and pma_rt >= notify_rate and cell['cb_type'] == u'可转债': stocks.append("%s-%s-%.2f%%"%(cell['bond_nm'], cell['apply_cd'], pma_rt)) if len(stocks) > 0: ns = "申购可转债:%s"%(' '.join(stocks)) send_mail("申购可转债", ns) utils.print_with_time(ns) utils.print_with_time("Done") if __name__ == '__main__': main()
[ "flaresky@gmail.com" ]
flaresky@gmail.com
f706442a26c3078a4ba76a8bf393f15c6c2a95f4
4e30c855c253cc1d972d29e83edb9d5ef662d30a
/product/models/stock.py
bfd6d68450fd500838468f3728f011573c8ed506
[ "MIT" ]
permissive
rajeshr188/django-onex
8b531fc2f519d004d1da64f87b10ffacbd0f2719
0a190ca9bcf96cf44f7773686205f2c1f83f3769
refs/heads/master
2023-08-21T22:36:43.898564
2023-08-15T12:08:24
2023-08-15T12:08:24
163,012,755
2
0
NOASSERTION
2023-07-22T09:47:28
2018-12-24T17:46:35
Python
UTF-8
Python
false
false
17,058
py
from decimal import Decimal from django.db import models from django.db.models import OuterRef, Subquery, Sum from django.db.models.functions import Coalesce from django.shortcuts import reverse from dea.models import Journal from utils.friendlyid import encode from ..managers import StockLotManager, StockManager class Stock(models.Model): """ represents stock for each product variant.this stock is used in sale/purchase purposes """ created = models.DateTimeField(auto_now_add=True) updated_on = models.DateTimeField(auto_now=True) reorderat = models.IntegerField(default=1) variant = models.ForeignKey( "product.ProductVariant", on_delete=models.CASCADE, related_name="stocks" ) objects = StockManager() class Meta: ordering = ("-created",) def __str__(self): cb = self.current_balance() return f"{self.variant} {cb['wt']} {cb['qty']}" def get_absolute_url(self): return reverse("product_stock_detail", args=(self.pk,)) def get_update_url(self): return reverse("product_stock_update", args=(self.pk,)) def get_pure_by_melting(self): bal = self.current_balance() return bal["wt"] * self.melting def get_pure_by_cost(self): bal = self.current_balance() return bal["wt"] * self.cost def audit(self): """ get last audit cb,totalin,total out and then append following """ try: last_statement = self.stockstatement_set.latest() except StockStatement.DoesNotExist: last_statement = None if last_statement is not None: ls_wt = last_statement.Closing_wt ls_qty = last_statement.Closing_qty else: ls_wt = 0 ls_qty = 0 stock_in = self.stock_in_txns(last_statement) stock_out = self.stock_out_txns(last_statement) cb_wt = ls_wt + (stock_in["wt"] - stock_out["wt"]) cb_qty = ls_qty + (stock_in["qty"] - stock_out["qty"]) return StockStatement.objects.create( stock=self, Closing_wt=cb_wt, Closing_qty=cb_qty, total_wt_in=stock_in["wt"], total_qty_in=stock_in["qty"], total_wt_out=stock_out["wt"], total_qty_out=stock_out["qty"], ) def stock_in_txns(self, ls): """ return all the In transactions since last audit""" st = self.stocktransaction_set.all() if ls: st = st.filter(created__gte=ls.created) st = st.filter(movement_type__in=["P", "SR", "AR", "AD", "IN"]) return st.aggregate( qty=Coalesce(models.Sum("quantity", output_field=models.IntegerField()), 0), wt=Coalesce( models.Sum("weight", output_field=models.DecimalField()), Decimal(0.0) ), ) def stock_out_txns(self, ls): """ return all Out Transactions since last audit """ st = self.stocktransaction_set.all() if ls: st = st.filter(created__gte=ls.created) st = st.filter(movement_type__in=["PR", "S", "A", "RM", "OT"]) return st.aggregate( qty=Coalesce(models.Sum("quantity", output_field=models.IntegerField()), 0), wt=Coalesce( models.Sum("weight", output_field=models.DecimalField()), Decimal(0.0) ), ) def current_balance(self): """ compute balance from last audit and append following """ bal = {} Closing_wt: Decimal = 0 Closing_qty: int = 0 try: ls = self.stockstatement_set.latest() Closing_wt = ls.Closing_wt Closing_qty = ls.Closing_qty except StockStatement.DoesNotExist: ls = None in_txns = self.stock_in_txns(ls) out_txns = self.stock_out_txns(ls) bal["wt"] = Closing_wt + (in_txns["wt"] - out_txns["wt"]) bal["qty"] = Closing_qty + (in_txns["qty"] - out_txns["qty"]) return bal # def get_age(self): # """ # returns age of stock in days # """ # return (self.created - self.updated_on).days def transact(self, weight, quantity, journal, movement_type): """ Modifies weight and quantity associated with the stock based on movement type Returns none """ StockTransaction.objects.create( journal=journal, stock=self, weight=weight, quantity=quantity, movement_type_id=movement_type, ) self.update_status() def merge_lots(self): """ merges all lots in to individual lots representing this stock of its product variant. single operation to merge lots blindly. merge only non huid/non-unique lots """ all_lots = self.lots.exclude(is_unique=True) current = all_lots.current_balance() new_lot = StockLot.objects.create( wt=current.wt, qty=current.qty, stock=current.stock ) new_lot.transact( wt=current.wt, qty=current.qty, journal=None, movement_type="AD" ) for i in all_lots: i.transact(wt=current.wt, qty=current.qty, journal=None, movement_type="RM") return new_lot class StockLot(models.Model): """ StockLot core idea: 1 productV has many lots and all lots[productv] reference one stock on purchase add to stocklot from purchase_item on sale choose from stocklot from sale_item a lot belongs to a purchase and can be split/merged into new lot belonging to same purchase smaller lots can be stockout'ed and stockin'ed seperately """ # should this be mptt?Maybe created = models.DateTimeField(auto_now_add=True) updated_on = models.DateTimeField(auto_now=True) quantity = models.IntegerField(default=0) weight = models.DecimalField(max_digits=10, decimal_places=3) barcode = models.CharField( max_length=155, null=True, blank=True, unique=True, editable=False ) huid = models.CharField(max_length=6, null=True, blank=True, unique=True) stock_code = models.CharField(max_length=4, blank=True, null=True) purchase_touch = models.DecimalField(max_digits=10, decimal_places=3) purchase_rate = models.DecimalField( max_digits=10, decimal_places=3, null=True, blank=True ) is_unique = models.BooleanField(default=False) status = models.CharField( max_length=10, choices=( ("Empty", "Empty"), ("Available", "Available"), ("Sold", "Sold"), ("Approval", "Approval"), ("Return", "Return"), ), default="Empty", ) # related fields stock = models.ForeignKey(Stock, on_delete=models.CASCADE, related_name="lots") # redundant aint it? variant = models.ForeignKey( "product.ProductVariant", on_delete=models.CASCADE, related_name="stock_lots" ) purchase_item = models.ForeignKey( "purchase.InvoiceItem", on_delete=models.CASCADE, null=True, blank=True, related_name="item_lots", ) objects = StockLotManager() def __str__(self): return f"{self.barcode} | {self.huid or ''} | {self.variant} | {self.current_balance()}" @classmethod def with_balance(cls): balance_subquery = ( StockLotBalance.objects.filter(stocklot_id=OuterRef("pk")) .values("stocklot_id") .annotate(total_balance=Coalesce(Sum("balance"), 0)) .values("total_balance") ) queryset = cls.objects.annotate(balance=Subquery(balance_subquery)) return queryset def generate_barcode(self): print("generating barcode") if not self.barcode: self.barcode = encode(self.pk) self.save() def update_status(self): cb = self.current_balance() if cb["wt"] <= 0.0 or cb["qty"] <= 0: self.status = "Empty" else: self.status = "Available" self.save() def audit(self): try: last_statement = self.stockstatement_set.latest() except StockStatement.DoesNotExist: last_statement = None if last_statement is not None: ls_wt = last_statement.Closing_wt ls_qty = last_statement.Closing_qty else: ls_wt = 0 ls_qty = 0 stock_in = self.stock_in_txns(last_statement) stock_out = self.stock_out_txns(last_statement) cb_wt = ls_wt + (stock_in["wt"] - stock_out["wt"]) cb_qty = ls_qty + (stock_in["qty"] - stock_out["qty"]) return StockStatement.objects.create( stock=self.stock, stock_batch=self, Closing_wt=cb_wt, Closing_qty=cb_qty, total_wt_in=stock_in["wt"] if stock_in["wt"] else 0.0, total_qty_in=stock_in["qty"] if stock_in["qty"] else 0, total_wt_out=stock_out["wt"] if stock_out["wt"] else 0.0, total_qty_out=stock_out["qty"] if stock_out["qty"] else 0, ) def stock_in_txns(self, ls): # filter since last audit st = self.stocktransaction_set.all() if ls: st = st.filter(created__gte=ls.created) st = st.filter(movement_type__id__in=["P", "SR", "AR", "AD", "IN"]) return st.aggregate( qty=Coalesce(models.Sum("quantity", output_field=models.IntegerField()), 0), wt=Coalesce( models.Sum("weight", output_field=models.DecimalField()), Decimal(0.0) ), ) def stock_out_txns(self, ls): # filter since last audit st = self.stocktransaction_set.all() if ls: st = st.filter(created__gte=ls.created) st = st.filter(movement_type__id__in=["PR", "S", "A", "RM", "OT"]) return st.aggregate( qty=Coalesce(models.Sum("quantity", output_field=models.IntegerField()), 0), wt=Coalesce( models.Sum("weight", output_field=models.DecimalField()), Decimal(0.0) ), ) def current_balance(self): # compute cb from last audit and append following bal = {} try: ls = self.stockstatement_set.latest() Closing_wt = ls.Closing_wt Closing_qty = ls.Closing_qty except StockStatement.DoesNotExist: ls = None Closing_wt = 0 Closing_qty = 0 in_txns = self.stock_in_txns(ls) out_txns = self.stock_out_txns(ls) bal["wt"] = Closing_wt + (in_txns["wt"] - out_txns["wt"]) bal["qty"] = Closing_qty + (in_txns["qty"] - out_txns["qty"]) return bal def get_total_sold(self): return self.sold_items.aggregate( qty=Coalesce(models.Sum("quantity", output_field=models.IntegerField()), 0), wt=Coalesce( models.Sum("weight", output_field=models.DecimalField()), Decimal(0.0) ), ) def transact(self, weight, quantity, journal, movement_type): """ Modifies weight and quantity associated with the stock based on movement type Returns none """ StockTransaction.objects.create( journal=journal, lot=self, weight=weight, quantity=quantity, movement_type_id=movement_type, stock=self.stock, ) self.update_status() def merge(self, lot: int): """ a lots qty and weight remains same troughout its life, any add/remove/merge/split on a lot is performed via transactions, and current balance of a lot is derived from transaction. Return : new_lot:StockLot """ if self.variant != lot.variant or self.stock != lot.stock: raise Exception( "cannot merge lots from different variant or associated with different stock" ) new_lot = StockLot( variant=self.variant, weight=lot.weight + self.eight, quantity=lot.quantity + self.quantity, ) self.transact(self.weight, self.quantity, journal=None, movement_type="RM") lot.transact(lot.weight, lot.quantity, journal=None, movement_type="RM") new_lot.transact( self.weight + lot.weight, self.quantity + lot.quantity, journal=None, movement_type="AD", ) return new_lot def split(self, wt: Decimal, qty: int): """ split a lot by creating a new lot and transfering the wt & qty to new lot """ if not self.is_unique and self.quantity > qty and self.weight > wt: new_lot = StockLot(variant=self.variant, weight=wt, quantity=qty) new_lot.transact(wt, qty, journal=None, movement_type="AD") self.transact(wt, qty, journal=None, movement_type="RM") return new_lot raise Exception("Unique lots cant be split") def get_age(self): return (timezone.now() - self.created).days class Movement(models.Model): """represents movement_type with direction of stock/lot transaction ex: [('purchase','+'),('purchase return','-'),('sales','-'),('sale return','+'), ('split','-'),('merge','+')] """ id = models.CharField(max_length=3, primary_key=True) name = models.CharField(max_length=30) direction = models.CharField(max_length=1, default="+") class StockTransaction(models.Model): created = models.DateTimeField(auto_now_add=True) quantity = models.IntegerField(default=0) weight = models.DecimalField(max_digits=10, decimal_places=3, default=0) description = models.TextField() # relational Fields # user = models.ForeignKey(CustomUser) movement_type = models.ForeignKey(Movement, on_delete=models.CASCADE, default="P") stock = models.ForeignKey(Stock, on_delete=models.CASCADE) lot = models.ForeignKey(StockLot, on_delete=models.CASCADE, default=1) journal = models.ForeignKey(Journal, on_delete=models.CASCADE, related_name="stxns") class Meta: ordering = ("-created",) get_latest_by = ["created"] def __str__(self): return str(self.pk) def get_absolute_url(self): return reverse("product_stocktransaction_detail", args=(self.pk,)) def get_update_url(self): return reverse("product_stocktransaction_update", args=(self.pk,)) class StockStatement(models.Model): ss_method = ( ("Auto", "Auto"), ("Physical", "Physical"), ) method = models.CharField(max_length=20, choices=ss_method, default="Auto") stock = models.ForeignKey(Stock, on_delete=models.CASCADE) lot = models.ForeignKey(StockLot, on_delete=models.CASCADE, null=True) created = models.DateTimeField(auto_now=True) Closing_wt = models.DecimalField(max_digits=14, decimal_places=3) Closing_qty = models.IntegerField() total_wt_in = models.DecimalField(max_digits=14, decimal_places=3, default=0.0) total_wt_out = models.DecimalField(max_digits=14, decimal_places=3, default=0.0) total_qty_in = models.IntegerField(default=0.0) total_qty_out = models.IntegerField(default=0.0) class Meta: ordering = ("created",) get_latest_by = ["created"] def __str__(self): return f"{self.stock} - qty:{self.Closing_qty} wt:{self.Closing_wt}" class StockBalance(models.Model): stock = models.OneToOneField(Stock, on_delete=models.DO_NOTHING, primary_key=True) Closing_wt = models.DecimalField(max_digits=14, decimal_places=3) Closing_qty = models.IntegerField() in_wt = models.DecimalField(max_digits=14, decimal_places=3) in_qty = models.IntegerField() out_wt = models.DecimalField(max_digits=14, decimal_places=3) out_qty = models.IntegerField() class Meta: managed = False db_table = "stock_balance" def get_qty_bal(self): return self.Closing_qty + self.in_qty - self.out_qty def get_wt_bal(self): return self.Closing_wt + self.in_wt - self.out_wt class StockLotBalance(models.Model): lot = models.OneToOneField(StockLot, on_delete=models.DO_NOTHING, primary_key=True) Closing_wt = models.DecimalField(max_digits=14, decimal_places=3) Closing_qty = models.IntegerField() in_wt = models.DecimalField(max_digits=14, decimal_places=3, default=0.0) in_qty = models.IntegerField(default=0) out_wt = models.DecimalField(max_digits=14, decimal_places=3, default=0.0) out_qty = models.IntegerField(default=0) class Meta: managed = False db_table = "stockbatch_balance" def get_qty_bal(self): return self.Closing_qty + self.in_qty - self.out_qty def get_wt_bal(self): return self.Closing_wt + self.in_wt - self.out_wt
[ "rajeshrathodh@gmail.com" ]
rajeshrathodh@gmail.com
a1c0fe09c2da797f27037c5a052110c6c7af18cd
c256a2afd147589b86cd342c3b4d8d6bf091b544
/zonefile_parser/main.py
e5fa2d902d5fee1ffc3bda70e9b9e06ed62e51fc
[ "MIT" ]
permissive
mlytics-william/zonefile-parser
bef8c0560063d36892474877328431506eec1f8b
d0af3258f415993e2974aafe3479126064577365
refs/heads/master
2023-02-02T11:05:13.607069
2020-12-09T00:42:08
2020-12-09T00:42:08
null
0
0
null
null
null
null
UTF-8
Python
false
false
2,742
py
import os from zonefile_parser.helper import remove_comments from zonefile_parser.helper import remove_trailing_spaces from zonefile_parser.helper import default_ttl from zonefile_parser.helper import default_origin from zonefile_parser.helper import find_soa_lines from zonefile_parser.helper import parted_soa from zonefile_parser.parser import parse_record import shlex import shlex def clean(text:str): lines = text.splitlines() clean_lines = [] for line in lines: line = remove_comments(line) line = remove_trailing_spaces(line) if len(line) == 0: continue clean_lines.append(line) return "\n".join(clean_lines) # TODO unit test # TODO break apart # TODO error handling def parse(text:str): text = clean(text) lines = text.splitlines() ttl = default_ttl(text) origin = default_origin(text) # find the SOA, process it, and add it back as a single line soa_lines = find_soa_lines(text) raw_soa = "\n".join([lines[index] for index in soa_lines]) soa_parts = parted_soa(raw_soa) for index in reversed(soa_lines): lines.pop(index) lines.insert(soa_lines[0]," ".join(soa_parts)) # remove all the $TTL & $ORIGIN lines, we have the values, # they are no longer needed. record_lines = list( filter( lambda x : "$TTL" not in x and "$ORIGIN" not in x, lines ) ) # each line now represents a single record # we need to fill in the name of each record # go through the zone file and add a name to every record normalized_records = [] last_name = None for record_line in record_lines: # replace all tabs with spaces record_line = record_line.replace("\t"," ") if record_line[0] == "@" and origin is not None: record_line = record_line.replace("@",origin) last_name = origin if record_line[0] == " ": record_line = last_name + record_line else: name = record_line[:record_line.index(" ")] last_name = name normalized_records.append(record_line) normalized_records = list( map( lambda x : shlex.split(x), normalized_records ) ) # add a TTL to records where one doesn't exist def add_ttl(record:list): if record[1] == "IN": record.insert(1,ttl) return record normalized_records = list( map( lambda x : add_ttl(x), normalized_records ) ) normalized_records = list( map( lambda x : parse_record(x), normalized_records ) ) return normalized_records
[ "aredwood@users.noreply.github.com" ]
aredwood@users.noreply.github.com
c8e6ec9859340312be86d4aa0d4624585f7ff9d9
4d0bf402a07e152cdd9b457e7eb6b48fb185d451
/source/lib/python3.6/_bootlocale.py
640ba8eb3d36da8fe9357c6d653d70a53eb3957c
[]
no_license
sajivfrancis/opteylearningtool
d61fab9367d264bea0e9ec051a43a4a02a2b413e
6b91b0f1653deeee674e1512b0ce4431521b1f69
refs/heads/master
2020-04-28T07:03:20.842795
2019-05-08T15:24:31
2019-05-08T15:24:31
152,605,200
0
1
null
null
null
null
UTF-8
Python
false
false
57
py
/Users/sajivfrancis/anaconda/lib/python3.6/_bootlocale.py
[ "sajivfrancis@Sajivs-MacBook-Pro.local" ]
sajivfrancis@Sajivs-MacBook-Pro.local
1776f070e04378fb62f8d7fa0efb71da985ee265
66d35c4c299cfebe70acb45cbfe69eee74e63c45
/quantum/vazirani/L03a-bellstate/main.py
dfc5f84107e9016c2ff12d43b59fed96cabc4a35
[]
no_license
numpde/misc
8b7b245e7cb4ab403a6ecce3ebd7aaf63fe1c1c5
d57a0715800adda4acc1920b52d811e193ac29dc
refs/heads/master
2022-12-07T19:38:15.829569
2021-03-19T11:41:11
2021-03-19T11:41:11
196,041,509
0
0
null
2022-09-30T18:39:24
2019-07-09T16:02:18
Python
UTF-8
Python
false
false
459
py
# RA, 2019-12-10 import qsharp import numpy as np from Quantum.Bell import Corr, Corr_SignBasis, Corr_SignBasis_Simul r = 100 * np.mean([Corr.simulate() for __ in range(1000)]) print(F"Agreement (0/1 basis): {r}%") r = 100 * np.mean([Corr_SignBasis.simulate() for __ in range(1000)]) print(F"Agreement (+/- basis): {r}%") r = np.mean([((-1) ** Corr_SignBasis_Simul.simulate()) for __ in range(1000)]) print(F"Average eigenvalue (+/- basis, simul): {r}")
[ "21158052+numpde@users.noreply.github.com" ]
21158052+numpde@users.noreply.github.com
2bb7d800683997697c30b40167e239a1b671acbd
9f5fcff2513f2d78f27e5313698dcc47fce1e754
/Experiment/RL_EA_search/graphnas/rs_trainer.py
60d453b4a9dc3b195ba7af0fdb0ad1d16b376820
[ "Apache-2.0" ]
permissive
ncucjm/notebook
c2495f790e9fc2ca55c1c29a8eaa2dc1bfe7463f
7271a0d1b10cdd6298e223c7ff150d4df031aa76
refs/heads/master
2023-07-20T05:55:48.946687
2021-01-27T09:12:19
2021-01-27T09:12:19
202,633,012
0
0
null
2023-07-06T21:28:29
2019-08-16T00:58:45
Jupyter Notebook
UTF-8
Python
false
false
1,510
py
import time import torch import numpy as np from collections import deque from graphnas.trainer import Trainer class RandomSearch_Trainer(Trainer): """ This class implements a Random Search method, on the Search Space provided to it. """ def __init__(self, args): super(RandomSearch_Trainer, self).__init__(args) self.args = args self.random_seed = args.random_seed self.cycles = args.cycles def train(self): print("\n\n===== Random Search ====") start_time = time.time() self.best_ind_acc = 0.0 self.best_ind = [] while self.cycles > 0: individual = self._generate_random_individual() ind_actions = self._construct_action([individual]) gnn = self.form_gnn_info(ind_actions[0]) _, ind_acc = \ self.submodel_manager.train(gnn, format=self.args.format) print("individual:", individual, " val_score:", ind_acc) if ind_acc > self.best_ind_acc: self.best_ind = individual.copy() self.best_ind_acc = ind_acc end_time = time.time() total_time = end_time - start_time print('Total elapsed time: ' + str(total_time)) print('[BEST STRUCTURE]', self.best_ind) print('[BEST STRUCTURE] Actions: ', self._construct_action([self.best_ind])) print('[BEST STRUCTURE] Accuracy: ', self.best_ind_acc) print("===== Random Search DONE ====")
[ "1300887184@qq.com" ]
1300887184@qq.com
68cb78d9786b17dbf9ebfd1f7d9bc1c3a09d832a
49e5321f91ec6a7090fad075dff70380e8782436
/test_example/008_temperature.py
fedf554c65d22940c0e2d72e90a337181b6586fd
[]
no_license
hedgehoght/py3_cainiao
068bd3b50f6fdcd21265ef426178ac94e27a437c
e671bb8b0d402e47d0ddd1eb250c4f8f78a58357
refs/heads/main
2023-04-02T22:39:00.713433
2021-04-09T08:25:06
2021-04-09T08:25:06
348,980,592
0
0
null
null
null
null
UTF-8
Python
false
false
1,903
py
#!/usr/bin/env python # -*- coding: utf-8 -*- # @Time : 2021/3/18 16:22 # @Author : huangting # @File : 008_temperature.py # 摄氏温度转华氏温度 # 以下实例演示了如何将摄氏温度转华氏温度: # author by : www.runoob.com # 用户输入摄氏温度 # 接收用户输入 celsius = float(input('输入摄氏温度: ')) # 计算华氏温度 fahrenheit = (celsius * 1.8) + 32 print('%0.1f 摄氏温度转为华氏温度为 %0.1f ' % (celsius, fahrenheit)) # 执行以上代码输出结果为: # 输入摄氏温度: 38 # 38.0摄氏温度转为华氏温度为100.4 # 以上实例中,摄氏温度转华氏温度的公式为celsius * 1.8 = fahrenheit - 32。所以得到以下式子: celsius = (fahrenheit - 32) / 1.8 # 笔记 # 摄氏度和华氏度相互转换 # !/usr/bin/python # -*- coding:utf-8 -*- a = int(input('摄氏度转换为华氏温度请按1\n华氏温度转化为摄氏度请按2\n')) while a != 1 and a != 2: a = int(input('你选择不正确,请重新输入。\n摄氏度转换为华氏温度请按1\n华氏温度转换为摄氏度请按2\n')) if a == 1: celsius = float(input('输入摄氏度:')) fahrenheit = (celsius * 1.8) + 32 # 计算华氏温度 print('%.1f摄氏度转为华氏温度为%.1f' % (celsius, fahrenheit)) else: fahrenheit = float(input('输入华氏度:')) celsius = (fahrenheit - 32) / 1.8 # 计算摄氏度 print('%.1f华氏度转为摄氏度为%.1f' % (fahrenheit, celsius)) # 参考方法: a = input("请输入带有符号的温度值: ") if a[-1] in ['F', 'f']: C = (eval(a[0:-1]) - 32) / 1.8 print("转换后的温度是{:.1f}C".format(C)) elif a[-1] in ['C', 'c']: F = 1.8 * eval(a[0:-1]) + 32 print("转换后的温度是{:.1f}F".format(F)) else: print("输入格式错误") # 测试结果: # 请输入带有符号的温度值: 38C # 转换后的温度是100.4F
[ "1720886432@qq.com" ]
1720886432@qq.com
751495d0c007ac9f2437d3f8ac658de6de62b17d
6511518ea4d81218d30b34ac6b783a50bac913fa
/base/webdriverfactory.py
64d30aa199154737731391ffa29280d938797be3
[]
no_license
sony093/amazon-repository
f979e1a7be6da9d3db415c8ad86acbc522b94629
205516503be6a5e34364e53f14dac53c76f2838c
refs/heads/master
2022-11-21T20:58:33.688593
2020-07-24T06:56:14
2020-07-24T06:56:14
282,141,546
0
0
null
null
null
null
UTF-8
Python
false
false
883
py
from selenium import webdriver class WebDriverFactory(): def __init__(self, browser): self.browser = browser def getWebDriverInstance(self): baseURL = "https://www.amazon.in/" if self.browser == "iexplore": driver = webdriver.Ie() elif self.browser == "firefox": driver = webdriver.Firefox() elif self.browser == "chrome": driver = webdriver.Chrome(executable_path="C:\\Users\\Abinash\\Desktop\\DRIVER\\chrome" "\\chromedriver.exe") else: driver = webdriver.Chrome(executable_path="C:\\Users\\Abinash\\Desktop\\DRIVER\\chrome" "\\chromedriver.exe") driver.implicitly_wait(3) driver.maximize_window() driver.get(baseURL) return driver
[ "sonybiswal093@gmail.com" ]
sonybiswal093@gmail.com
85fe8257ba6b215d2f1be3c0e44cab33018ef58f
305cfe3a1f16555509d6b63b1955cbfd2c667344
/posts/api_urls.py
affc09831cae7fe6e0ca34d4d4bd15c9ae8b1d7c
[ "MIT" ]
permissive
krainet/Wordplease
08843fb83fed8823878a291e4dd9e5c9e95f847b
c546913cdfa21be71d9c852fabf0a853e1617ab4
refs/heads/master
2020-05-18T11:59:17.809358
2015-08-30T15:13:23
2015-08-30T15:13:23
39,742,300
0
0
null
null
null
null
UTF-8
Python
false
false
273
py
# -*- coding: utf-8 -*- from posts.api import PostViewSet from django.conf.urls import include, url from rest_framework.routers import DefaultRouter router = DefaultRouter() router.register('post', PostViewSet) urlpatterns = [ url(r'1.0/', include(router.urls)), ]
[ "ralberti@mequedouno.com" ]
ralberti@mequedouno.com
604a402d920686f394a64772d385a750c89d7118
971b586e21abbb74d631ac49bef3836806261252
/LDA using Parameter Server/code/lda_run.py
8798301161ec03e132807470eb7fa8491ec78650
[]
no_license
samridhishree/Machine-Learning-for-Large-Datasets
92b84c665a115d069e45a0ce7c8d54e97a004e19
d02ded246d115b43bff680a85b896f88d30a83ea
refs/heads/master
2021-09-04T15:33:41.797957
2018-01-19T23:02:30
2018-01-19T23:02:30
114,903,782
4
0
null
null
null
null
UTF-8
Python
false
false
3,553
py
#!/usr/bin/env python if __name__ == '__main__': import argparse import os import time script_dir = os.path.dirname(os.path.realpath(__file__)) default_host_file = os.path.join(script_dir, 'hosts.txt') default_data_file = os.path.join(script_dir, '20news.csv') parser = argparse.ArgumentParser(description='Launches the JBosen LDA application using SSH.') parser.add_argument('--host_file', type=str, default=default_host_file, help='Path to the host file to use.') parser.add_argument('--num_local_worker_threads', type=int, default=1, help='Number of application worker threads per client.') parser.add_argument('--num_local_comm_channels', type=int, default=1, help='Number of network channels per client.') parser.add_argument('--data_file', type=str, default=default_data_file, help='Path to the input data file to use.') parser.add_argument('--output_dir', type=str, default=script_dir, help='Path to the output directory to use.') parser.add_argument('--num_words', type=int, default=None, help='Number of words in the vocabulary.') parser.add_argument('--num_topics', type=int, default=1, help='Number of topics to run LDA with.') parser.add_argument('--alpha', type=float, default=1.0, help='Value of alpha.') parser.add_argument('--beta', type=float, default=1.0, help='Value of beta.') parser.add_argument('--num_iterations', type=int, default=1, help='Number of passes over the dataset to run.') parser.add_argument('--num_clocks_per_iteration', type=int, default=1, help='Number of clocks for each iteration.') parser.add_argument('--staleness', type=int, default=0, help='Number of clocks for each iteration.') parser.add_argument('--java_args', type=str, default='', help='Extra arguments to pass to Java.') parser.add_argument('--pem_file', type=str, default='',help='Location of AWS pem file') args = parser.parse_args() #class_path = os.path.join(script_dir, 'build', 'libs', 'Lda.jar') main_class = 'Lda' with open(args.host_file, 'r') as f: host_ips = [line.split(':')[0] for line in f] def launch(client_id, ip): if args.pem_file: aws_args = "-i " + args.pem_file else: aws_args = " " cmd = 'ssh -o StrictHostKeyChecking=no -o UserKnownHostsFile=/dev/null ' + aws_args + " " + ip + ' ' cmd += '"cd ' + os.getcwd() + '; ' cmd += 'jython Lda.py ' cmd += '-clientId %d ' % client_id cmd += '-hostFile %s ' % args.host_file cmd += '-numLocalWorkerThreads %d ' % args.num_local_worker_threads cmd += '-numLocalCommChannels %d ' % args.num_local_comm_channels cmd += '-dataFile %s ' % args.data_file cmd += '-outputDir %s ' % args.output_dir cmd += '-numWords %d ' % args.num_words cmd += '-numTopics %d ' % args.num_topics cmd += '-alpha %f ' % args.alpha cmd += '-beta %f ' % args.beta cmd += '-numIterations %d ' % args.num_iterations cmd += '-numClocksPerIteration %d ' % args.num_clocks_per_iteration cmd += '-staleness %d ' % args.staleness cmd += '" &' print(cmd) os.system(cmd) if args.num_words is None: print 'Counting number of words in \'%s\'...' % args.data_file print 'To avoid this, provide the \'--num_words\' argument.' max_word = -1 with open(args.data_file, 'r') as f: for line in f: words = map(int, line.strip().split(',')) max_word = max(max_word, max(words)) args.num_words = max_word + 1 print("Starting instances of LDA...") for client_id, ip in enumerate(host_ips): launch(client_id, ip)
[ "sami@samridhishree.com" ]
sami@samridhishree.com
4ec3a3ad39f84c17851919fc61bb7c8ea7077454
09e57dd1374713f06b70d7b37a580130d9bbab0d
/benchmark/startCirq1255.py
433c18dd8d62c95a5ce1435e5cbc4fa0ed500276
[ "BSD-3-Clause" ]
permissive
UCLA-SEAL/QDiff
ad53650034897abb5941e74539e3aee8edb600ab
d968cbc47fe926b7f88b4adf10490f1edd6f8819
refs/heads/main
2023-08-05T04:52:24.961998
2021-09-19T02:56:16
2021-09-19T02:56:16
405,159,939
2
0
null
null
null
null
UTF-8
Python
false
false
3,849
py
#!/usr/bin/env python # -*- coding: utf-8 -*- # @Time : 5/15/20 4:49 PM # @File : grover.py # qubit number=5 # total number=49 import cirq import cirq.google as cg from typing import Optional import sys from math import log2 import numpy as np #thatsNoCode from cirq.contrib.svg import SVGCircuit # Symbols for the rotation angles in the QAOA circuit. def make_circuit(n: int, input_qubit): c = cirq.Circuit() # circuit begin c.append(cirq.H.on(input_qubit[0])) # number=3 c.append(cirq.rx(-1.3603096190043806).on(input_qubit[2])) # number=28 c.append(cirq.H.on(input_qubit[1])) # number=4 c.append(cirq.H.on(input_qubit[2])) # number=5 c.append(cirq.H.on(input_qubit[3])) # number=6 c.append(cirq.H.on(input_qubit[4])) # number=21 for i in range(2): c.append(cirq.H.on(input_qubit[0])) # number=1 c.append(cirq.H.on(input_qubit[1])) # number=2 c.append(cirq.H.on(input_qubit[2])) # number=7 c.append(cirq.H.on(input_qubit[3])) # number=8 c.append(cirq.H.on(input_qubit[3])) # number=34 c.append(cirq.CZ.on(input_qubit[4],input_qubit[3])) # number=35 c.append(cirq.H.on(input_qubit[3])) # number=36 c.append(cirq.H.on(input_qubit[0])) # number=17 c.append(cirq.H.on(input_qubit[1])) # number=18 c.append(cirq.CNOT.on(input_qubit[0],input_qubit[2])) # number=43 c.append(cirq.X.on(input_qubit[2])) # number=44 c.append(cirq.H.on(input_qubit[2])) # number=46 c.append(cirq.CZ.on(input_qubit[0],input_qubit[2])) # number=47 c.append(cirq.H.on(input_qubit[2])) # number=48 c.append(cirq.rx(-1.9697785938008003).on(input_qubit[1])) # number=37 c.append(cirq.H.on(input_qubit[2])) # number=19 c.append(cirq.H.on(input_qubit[3])) # number=20 c.append(cirq.H.on(input_qubit[0])) # number=38 c.append(cirq.CZ.on(input_qubit[1],input_qubit[0])) # number=39 c.append(cirq.H.on(input_qubit[0])) # number=40 c.append(cirq.X.on(input_qubit[0])) # number=32 c.append(cirq.CNOT.on(input_qubit[1],input_qubit[0])) # number=33 c.append(cirq.CNOT.on(input_qubit[0],input_qubit[1])) # number=24 c.append(cirq.X.on(input_qubit[1])) # number=25 c.append(cirq.X.on(input_qubit[1])) # number=41 c.append(cirq.CNOT.on(input_qubit[0],input_qubit[1])) # number=26 c.append(cirq.X.on(input_qubit[2])) # number=11 c.append(cirq.CNOT.on(input_qubit[2],input_qubit[3])) # number=30 c.append(cirq.X.on(input_qubit[3])) # number=12 c.append(cirq.H.on(input_qubit[2])) # number=42 c.append(cirq.X.on(input_qubit[0])) # number=13 c.append(cirq.X.on(input_qubit[1])) # number=14 c.append(cirq.X.on(input_qubit[2])) # number=15 c.append(cirq.X.on(input_qubit[3])) # number=16 c.append(cirq.X.on(input_qubit[1])) # number=22 c.append(cirq.X.on(input_qubit[1])) # number=23 # circuit end c.append(cirq.measure(*input_qubit, key='result')) return c def bitstring(bits): return ''.join(str(int(b)) for b in bits) if __name__ == '__main__': qubit_count = 5 input_qubits = [cirq.GridQubit(i, 0) for i in range(qubit_count)] circuit = make_circuit(qubit_count,input_qubits) circuit = cg.optimized_for_sycamore(circuit, optimizer_type='sqrt_iswap') circuit_sample_count =2000 simulator = cirq.Simulator() result = simulator.run(circuit, repetitions=circuit_sample_count) frequencies = result.histogram(key='result', fold_func=bitstring) writefile = open("../data/startCirq1255.csv","w+") print(format(frequencies),file=writefile) print("results end", file=writefile) print(circuit.__len__(), file=writefile) print(circuit,file=writefile) writefile.close()
[ "wangjiyuan123@yeah.net" ]
wangjiyuan123@yeah.net
e481d147740b8ba9df9a7d2102804aca929b1f8a
62172ba64eba74eddf96ea6d92db9967301edc2d
/src/method/sidnet/train.py
69885788dee97bd7abed3361d7d944ad390c0137
[]
no_license
icdm21-sidnet/codes
e00af1fb0de9472496b0604715f79359ef63b5b0
79b3c2fb6f6088200e753a0cf77e9de5ccb171c7
refs/heads/master
2023-05-26T18:26:17.546881
2021-06-10T12:28:56
2021-06-10T12:28:56
375,684,799
0
0
null
null
null
null
UTF-8
Python
false
false
5,724
py
#!/usr/bin/env python # -*- coding: utf-8 -*- import torch from dotmap import DotMap from tqdm import tqdm import numpy as np from method.sidnet.model import SidNet import scipy.sparse as sp from loguru import logger class SidNetTrainer(torch.nn.Module): def __init__(self, param): """ Constructore of SidNetTraininer :param param: parameter dictionary """ super(SidNetTrainer, self).__init__() self.param = param self.device = param.device self.in_dim = param.in_dim self.c = param.hyper_param.c def get_normalized_matrices(self, edges, num_nodes): """ Normalized signed adjacency matrix :param edges: signed edges :param num_nodes: number of nodes :return: normalized matrices """ row, col, data = edges[:, 0], edges[:, 1], edges[:, 2] shaping = (num_nodes, num_nodes) A = sp.csr_matrix((data, (row, col)), shape=shaping) A = A + sp.eye(num_nodes, num_nodes) rowsum = np.array(np.abs(A).sum(1)).astype(np.float32) rowsum[rowsum == 0] = 1 r_inv = np.power(rowsum, -1).flatten() r_mat_inv = sp.diags(r_inv) snA = r_mat_inv @ A snA = snA.tocoo().astype(np.float32) pos_idx, neg_idx = snA.data > 0, snA.data < 0 pos_row, pos_col, pos_data = snA.row[pos_idx], snA.col[pos_idx], snA.data[pos_idx] neg_row, neg_col, neg_data = snA.row[neg_idx], snA.col[neg_idx], snA.data[neg_idx] nApT = sp.csr_matrix((np.abs(pos_data), (pos_row, pos_col)), shape=shaping).T nAmT = sp.csr_matrix((np.abs(neg_data), (neg_row, neg_col)), shape=shaping).T return nApT, nAmT def convert_torch_sparse(self, A, shaping): """ Convert sparse matrix into torch sparse matrix :param A: scipy spares matrix :param shaping: shape :return: torch sparse matrix """ A = A.tocoo().astype(np.float32) indices = torch.from_numpy(np.vstack((A.row, A.col)).astype(np.int64)) values = torch.from_numpy(A.data) return torch.sparse.FloatTensor(indices, values, shaping) def convert_data(self, data): """ Convert input data for torch :param data: input data :return: torch data """ converted_data = DotMap() converted_data.num_nodes = data.num_nodes converted_data.neg_ratio = data.neg_ratio converted_data.H = torch.FloatTensor(data.H).to(self.device) # train data converted_data.train.edges = torch.from_numpy(data.train.X[:, 0:3]).to(self.device) nApT, nAmT = self.get_normalized_matrices(data.train.X, data.num_nodes) nApT = self.convert_torch_sparse(nApT, nApT.shape) nApT = (1 - self.c) * nApT converted_data.train.nApT = nApT.to(self.device) nAmT = self.convert_torch_sparse(nAmT, nAmT.shape) nAmT = (1 - self.c) * nAmT converted_data.train.nAmT = nAmT.to(self.device) y = np.asarray([1 if y_val > 0 else 0 for y_val in data.train.y]) converted_data.train.y = torch.from_numpy(y).to(self.device) converted_data.class_weights = torch.from_numpy(data.class_weights).type(torch.float32).to(self.device) # test data converted_data.test.edges = torch.from_numpy(data.test.X[:, 0:2]).to(self.device) y = np.asarray([1 if y_val > 0 else 0 for y_val in data.test.y]) converted_data.test.y = torch.from_numpy(y).to(self.device) return converted_data def train_with_hyper_param(self, data, hyper_param, epochs=100): """ Train SidNet with given hyperparameters :param data: input data :param hyper_param: hyperparameters :param epochs: target number of epochs :return: trained model """ self.c = hyper_param.c converted_data = self.convert_data(data) model = SidNet(hid_dims=hyper_param.hid_dims, in_dim=hyper_param.in_dim, device=self.device, num_nodes=converted_data.num_nodes, num_layers=hyper_param.num_layers, num_diff_layers=hyper_param.num_diff_layers, c=hyper_param.c).to(self.device) optimizer = torch.optim.Adam(model.parameters(), lr=hyper_param.learning_rate, weight_decay=hyper_param.weight_decay) scheduler = torch.optim.lr_scheduler.StepLR(optimizer, step_size=10, gamma=0.99) model.train() pbar = tqdm(range(epochs), desc='Epoch...') for epoch in pbar: optimizer.zero_grad() loss = model(nApT=converted_data.train.nApT, nAmT=converted_data.train.nAmT, X=converted_data.H, edges=converted_data.train.edges, y=converted_data.train.y) loss.backward() optimizer.step() scheduler.step() pbar.set_description('Epoch {}: {:.4} train loss'.format(epoch, loss.item())) pbar.close() # with torch.no_grad(): # model.eval() # auc, f1_scores, loss = model.evaluate(test_edges=converted_data.test.edges, # test_y=converted_data.test.y) # logger.info('test auc: {:.4f}'.format(auc)) # logger.info('test f1_macro: {:.4f}'.format(f1_scores.macro)) return model
[ "icdm21sidnet@gmail.com" ]
icdm21sidnet@gmail.com
7f9fb37e893d8d72f1e7cdaec460c08f350a2b7b
9bc71630f9a0f0d3f0b490210e8ca36d05f29bc5
/python/Views/QuestionAnswer.py
bc3e66895953e0836ca104937c4dbe4d559776b1
[]
no_license
SproutProject/SproutProject-regweb
b88b720a805bbd9027dd0fe1e631da65fadc18fa
46a7e17ca6e2ddf76dbb65c5768e68e6f2865ce2
refs/heads/master
2021-04-30T23:15:22.218639
2017-02-11T18:01:34
2017-02-11T18:01:34
71,987,513
1
0
null
null
null
null
UTF-8
Python
false
false
2,807
py
from sqlalchemy import and_ from Config import DEBUG from Model import * from Views.Base import RequestHandler from Views.Utils import get_user, db_insert class GetAllHandler(RequestHandler): def post(self): session = self.get_session() data = [] for row in session.query(Qa).filter(Qa.status == 1).order_by(Qa.order): data.append(row.as_dict()) self.return_status(self.STATUS_SUCCESS, data=data) session.close() class DeleteHandler(RequestHandler): def post(self): session = self.get_session() uid = self.get_secure_cookie('uid') if uid == None: self.return_status(self.STATUS_NOT_LOGINED) else: uid = int(uid) user = get_user(session, uid) if user.power < 1: self.return_status(self.STATUS_PERMISSION_DENIED) else: try: qa_id = self.get_argument('id') for row in session.query(Qa).filter(Qa.id == qa_id): row.status = 0 session.commit() self.return_status(self.STATUS_SUCCESS) except Exception as e: if DEBUG: print(e) self.return_status(self.STATUS_ERROR) session.close() class AddHandler(RequestHandler): def post(self): session = self.get_session() uid = self.get_secure_cookie('uid') if uid == None: self.return_status(self.STATUS_NOT_LOGINED) else: uid = int(uid) user = get_user(session, uid) if user.power < 1: self.return_status(self.STATUS_PERMISSION_DENIED) else: try: qa_id = int(self.get_argument('id')) order = self.get_argument('order') question = self.get_argument('question') answer = self.get_argument('answer') if qa_id != -1: for row in session.query(Qa).filter(and_(Qa.id == qa_id, Qa.status == 1)): row.order = order row.question = question row.answer = answer session.commit() else: instance = Qa(order = order, question = question \ , answer = answer, status = 1) db_insert(session, instance) self.return_status(self.STATUS_SUCCESS) except Exception as e: if DEBUG: print(e) self.return_status(self.STATUS_ERROR) session.close()
[ "luniacslime@gmail.com" ]
luniacslime@gmail.com
d48f8bec41176e377a39ba8177cac60f159340b7
297497957c531d81ba286bc91253fbbb78b4d8be
/third_party/python/esprima/esprima/__init__.py
aa2398f4102b2e9d4553bb39f890861fda8ee0ea
[ "LicenseRef-scancode-unknown-license-reference", "BSD-3-Clause" ]
permissive
marco-c/gecko-dev-comments-removed
7a9dd34045b07e6b22f0c636c0a836b9e639f9d3
61942784fb157763e65608e5a29b3729b0aa66fa
refs/heads/master
2023-08-09T18:55:25.895853
2023-08-01T00:40:39
2023-08-01T00:40:39
211,297,481
0
0
NOASSERTION
2019-09-29T01:27:49
2019-09-27T10:44:24
C++
UTF-8
Python
false
false
154
py
# -*- coding: utf-8 -*- from __future__ import absolute_import version = '4.0.1' __version__ = (4, 0, 1) from .esprima import *
[ "mcastelluccio@mozilla.com" ]
mcastelluccio@mozilla.com
022ad5c859c0bd9242aba0bf2f6eb4c519cc7624
9c2eb3b057cfab3c1fe62259855772cdcce3e247
/python/sam_and_substrings.py
6e4659aec87f35d9c56521e53d06817ced548eac
[]
no_license
saienthan/HackerRank
a508ab0fe7783d4033f59f4688db12f202b05595
3f45cd5bca610090f36d07861caa752402aae07a
refs/heads/master
2021-01-19T08:34:31.278662
2015-08-29T01:52:46
2015-08-29T01:52:46
31,596,550
0
0
null
null
null
null
UTF-8
Python
false
false
206
py
num = input() size = len(num) dp = [0]*size dp[0] = int(num[0]) ans = dp[0] for i in range(1,size): dp[i] = (dp[i-1]*10 + (int(num[i])*(i+1)))%1000000007 ans = (ans + dp[i])%1000000007 print(ans)
[ "saienthan@gmail.com" ]
saienthan@gmail.com
9b3897a204d2e19414bfafbc3476fb262e256b8a
32214725e788d3e5b8f360b47959cdf9fd4b79b5
/learn_theano/pylearn2/test_pylearn_tutorials.py
02741e3acee9840acf9946f89be017bf75e5bf65
[ "Apache-2.0" ]
permissive
consciousnesss/learn_theano
4906ae656d2780e42b973186b078298055cfa5f9
43bc661032963274520ecfdd620ac547ee421f9d
refs/heads/master
2021-01-10T13:13:25.552127
2015-12-05T05:07:17
2015-12-05T05:07:17
45,771,031
0
0
null
2015-12-05T05:10:33
2015-11-08T07:20:47
Python
UTF-8
Python
false
false
509
py
from learn_theano.pylearn2.custom_autoencoder_2 import custom_autoencoder_run from learn_theano.pylearn2.custom_logreg_1 import custom_log_reg_run from learn_theano.pylearn2.standard_mlp_0 import standard_mlp_run import theano def test_standard_mlp(): standard_mlp_run(1) def test_custom_log_reg(): old_floatx = theano.config.floatX theano.config.floatX = 'float64' custom_log_reg_run(1) theano.config.floatX = old_floatx def test_custom_autoencoder(): custom_autoencoder_run(1)
[ "olegsinyavskiy@gmail.com" ]
olegsinyavskiy@gmail.com
56891733bd69b657990cf4c7f42b4cadd4945ee3
cd33ce52f3cec01b6b4e6855b20548b66e7b48e3
/comision/migrations/0007_auto_20170228_1611.py
1dfadc6696ad5092ae8ba358c2e3054b81e3f565
[]
no_license
ingcacm/viceacad_si
3f24156405a4862fd2ef1458182608cf3e091afb
eb7680fadc510ac0b5768c9eb63885f6741f97cc
refs/heads/master
2020-04-02T15:24:41.745195
2018-11-02T15:13:09
2018-11-02T15:13:09
154,566,843
0
0
null
null
null
null
UTF-8
Python
false
false
1,640
py
# -*- coding: utf-8 -*- # Generated by Django 1.10.5 on 2017-02-28 16:11 from __future__ import unicode_literals from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('comision', '0006_auto_20170228_1601'), ] operations = [ migrations.AlterField( model_name='comisionv2', name='apoyo_bibliografico_otrosi', field=models.IntegerField(null=True), ), migrations.AlterField( model_name='comisionv2', name='fecha_otrosi', field=models.DateField(null=True), ), migrations.AlterField( model_name='comisionv2', name='matricula_otrosi', field=models.IntegerField(null=True), ), migrations.AlterField( model_name='comisionv2', name='salario_RRHH_otrosi', field=models.IntegerField(null=True), ), migrations.AlterField( model_name='comisionv2', name='salario_reemplazo_otrosi', field=models.IntegerField(null=True), ), migrations.AlterField( model_name='comisionv2', name='seguro_medico_otrosi', field=models.IntegerField(null=True), ), migrations.AlterField( model_name='comisionv2', name='tiquetes_otrosi', field=models.IntegerField(null=True), ), migrations.AlterField( model_name='comisionv2', name='valor_otrosi', field=models.IntegerField(null=True), ), ]
[ "ingenierocamilocryuz@gmail.com" ]
ingenierocamilocryuz@gmail.com
6e5c4d9328171eeb50e8290adcc1ce764248f029
8d472f9facb895dda9e1df81f3bb6c2f81b9c357
/master/bt5/slapos_jio/SkinTemplateItem/portal_skins/slapos_hal_json_style/Project_hasItem.py
d66daa1f26bb81642f295654c3ff99a0f40cc6d8
[]
no_license
SlapOS/slapos.core
852485eed9382685f3df6ba8532f8192bb1389c4
369e8d56636e1c59a745e68dc68154abfc5b7840
refs/heads/master
2023-08-31T04:42:34.722241
2023-08-30T15:13:08
2023-08-30T15:13:08
1,825,920
11
4
null
null
null
null
UTF-8
Python
false
false
81
py
import json return json.dumps(len(context.Project_getComputeNodeTrackingList()))
[ "rafael@nexedi.com" ]
rafael@nexedi.com
84111d9587a8ae65f17009f4940debffcdcba987
297d426d5519c669b210e82d4aff479a51949e52
/routines/flat_dark_corr.py
849357cb9f9c2abf7af01abd15c2581df6e98cda
[]
no_license
PulkitMalhotra15/Lunar-Eclipse-Analysis
4722d6779bad00f107a553dccd4335051dde570b
d07f6b36eec79fea1f10e3129d8c6b72bc669521
refs/heads/master
2020-08-21T22:27:14.922373
2019-10-19T21:09:11
2019-10-19T21:09:11
216,261,023
0
0
null
null
null
null
UTF-8
Python
false
false
1,902
py
#!/usr/bin/env python2 # -*- coding: utf-8 -*- """ Created on Sun Jul 29 21:10:03 2018 @author: sid """ import numpy as np import matplotlib.pyplot as plt import astropy.io.fits as fit import prepare_lists as pl llists = pl.llists() dpath='/home/atom/2018_07_27 TLE Jaisalmer/Analysis/images/dark_and_flat/' savepath='/home/atom/2018_07_27 TLE Jaisalmer/Analysis/images/df corrected/' dark={0.001:'001', 0.002:'002',0.005:'005',0.01:'01',0.02:'02',0.05:'05', 0.1:'1',0.2:'2',0.5:'5',1.0:'1s',2.0:'2s',5.0:'5s',10.0:'10s',20.0:'20s'} flat={0:'bl',1:'IR',2:'HA',3:'HB'} # Explanation: This script applies dark frame and flat frame corrections # to the data. The final .fit file is normalised for exposure time. for i in range(len(llists)): for j in range(len(llists[i])): raw=fit.open(llists[i][j][0]) exp=raw[0].header['EXPTIME'] img=raw[0].data if exp==1.5: dfr=fit.open(dpath+'dark_'+dark[1.0]+'.fit')[0].data+ \ fit.open(dpath+'dark_'+dark[0.5]+'.fit')[0].data elif exp==0.75: dfr=fit.open(dpath+'dark_'+dark[0.5]+'.fit')[0].data+ \ fit.open(dpath+'dark_'+dark[0.2]+'.fit')[0].data+ \ fit.open(dpath+'dark_'+dark[0.05]+'.fit')[0].data elif exp==3.0: dfr=fit.open(dpath+'dark_'+dark[2.0]+'.fit')[0].data+ \ fit.open(dpath+'dark_'+dark[1.0]+'.fit')[0].data elif exp==15.0: dfr=fit.open(dpath+'dark_'+dark[10.0]+'.fit')[0].data+ \ fit.open(dpath+'dark_'+dark[5.0]+'.fit')[0].data else: dfr=fit.open(dpath+'dark_'+dark[exp]+'.fit')[0].data ffr=fit.open(dpath+'flat_'+flat[i]+'.fit')[0].data ffr/=np.max(ffr) img=(img-dfr)/(ffr*exp) fit.writeto(savepath+'dfcorr_'+flat[i]+'_'+format(j,'03d')+'.fit', img,header=raw[0].header)
[ "noreply@github.com" ]
noreply@github.com
e077b381b5bd58b93198eb2f2a96e908241882cd
b4eeca0ca9dea1956251b1fdba0ab5cc5c6ad938
/img_saver.py
3f16cc31e795bbc6fffdca4a409c1ea3430c00fa
[]
no_license
Rowing0914/simple_CNN_mnist
70ecb5c4e5874b13a43c870aa9ac5bdce7aeba72
4ff3414bad9e8304ecc839314ff6f716a1bfa445
refs/heads/master
2020-03-18T14:14:28.582076
2018-05-26T11:25:12
2018-05-26T11:25:12
134,837,434
3
0
null
null
null
null
UTF-8
Python
false
false
1,567
py
import keras from keras.datasets import mnist from keras.models import model_from_json from keras import backend as K import numpy as np import matplotlib.pyplot as plt batch_size = 128 num_classes = 10 epochs = 5 index_range = 30 # input image dimensions img_rows, img_cols = 28, 28 checker = True while (checker): # the data, split between train and test sets (_, _), (x_test, y_test) = mnist.load_data() index = np.random.randint(len(x_test)) x_test = x_test[index:index+index_range] y_test = y_test[index:index+index_range] if K.image_data_format() == 'channels_first': x_test = x_test.reshape(x_test.shape[0], 1, img_rows, img_cols) input_shape = (1, img_rows, img_cols) else: x_test = x_test.reshape(x_test.shape[0], img_rows, img_cols, 1) input_shape = (img_rows, img_cols, 1) x_test = x_test.astype('float32') x_test /= 255 print(x_test.shape[0], 'test samples') # load json and create model json_file = open('./models/model.json', 'r') loaded_model_json = json_file.read() json_file.close() loaded_model = model_from_json(loaded_model_json) # load weights into new model loaded_model.load_weights("./models/model.h5") print("Loaded model from disk") integers = set() fig = plt.figure() for i in range(index_range): a = loaded_model.predict(x_test[i].reshape(1, 28, 28, 1)) integers.add(np.argmax(a)) fig.savefig('./images/{}.png'.format(np.argmax(a))) if integers == set((0,1,2,3,4,5,6,7,8,9)): checker = False print("You've got all integers from 0 to 9!")
[ "norio.kosaka@rakuten.com" ]
norio.kosaka@rakuten.com
4709c002cbe78386e7a2d8b1f0af23cb71cc52a3
1286114add63959946e4253efa7ac4d3f7dbab04
/product.py
53800c3871c514b426093efd14fa0f750a14ec44
[]
no_license
kugimasa/VendingMachine
e87a11fcc9976f989572b134aa94ebe5b131ca48
d8d81e8c879ea5bb62274dc78e66068e3d735b97
refs/heads/master
2020-07-19T21:48:47.977278
2019-09-05T09:40:33
2019-09-05T09:40:33
206,519,926
2
0
null
null
null
null
UTF-8
Python
false
false
233
py
class Product: def __init__(self, name, price): self.__name = name self.__price = price @property def name(self): return self.__name @property def price(self): return self.__price
[ "kugimania@gmail.com" ]
kugimania@gmail.com
5e504538adc68c06ea2082edf5674a0e82a28dc0
4f75cc33b4d65d5e4b054fc35b831a388a46c896
/.history/app_20210903181729.py
d0919a5fe032e1f8eaa9d4770a1d04d5bbe154c3
[]
no_license
Lr-2002/newpage
c3fe2acc451e24f6408996ea1271c61c321de702
c589ad974e7100aa9b1c2ccc095a959ff68069b6
refs/heads/main
2023-09-03T06:13:53.428236
2021-11-23T10:41:21
2021-11-23T10:41:21
402,606,000
0
0
null
null
null
null
UTF-8
Python
false
false
1,625
py
from flask import Flask ,render_template,url_for from flask_sqlalchemy import SQLAlchemy import os import sys import click app = Flask(__name__) app.config['SQLALCHEMY_DATABASE_URI'] = 'sqlite:///' + os.path.join(app.root_path,'data.db') # / / / / 是文件的绝对路径 app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] =False db = SQLAlchemy(app) @app.cli.command() @click.option('--drop',is_flag=True,help = 'Create after drop.') def initdb(drop): if drop: db.drop_all() db.create_all() click.echo('Initialize database.') class User(db.Model): id = db.Column(db.Integer, primary_key=True) name = db.Column(db.String(20)) class Movie(db.Model): id = db.Column(db.Integer, primary_key=True) title = db.Column(db.String(20)) year = db.Column(db.String(4)) # name = 'Grey Li' # movies = [ # {'title': 'My Neighbor Totoro', 'year': '1988'}, # {'title': 'Dead Poets Society', 'year': '1989'}, # {'title': 'A Perfect World', 'year': '1993'}, # {'title': 'Leon', 'year': '1994'}, # {'title': 'Mahjong', 'year': '1996'}, # {'title': 'Swallowtail Butterfly', 'year': '1996'}, # {'title': 'King of Comedy', 'year': '1999'}, # {'title': 'Devils on the Doorstep', 'year': '1999'}, # {'title': 'WALL-E', 'year': '2008'}, # {'title': 'The Pork of Music', 'year': '2012'}, # ] # @app.route('/static/<name>') # def static(name): # # url_for('static') # return name @app.route('/') def hello(): user = return render_template('index.html',name=name,movies = movies) # if __name__ == '__main__': # app.run()
[ "2629651228@qq.com" ]
2629651228@qq.com
59a610eb83b8706f74f0002b97f722652d711751
83c57f25a1c8b29bb84078340efabaf527a9452e
/pytest/xiaoniu88/pipelines.py
bcf58746076d6fa5a9859ffc60911edbe065bfe3
[]
no_license
ifzz/py
df06cf5da5920dae979c2c8454bfa02c36dfeeb1
2305e651613725ca51d6a87306f3ef83d6c51939
refs/heads/master
2021-01-18T08:32:21.256271
2016-03-11T10:30:28
2016-03-11T10:30:28
null
0
0
null
null
null
null
UTF-8
Python
false
false
289
py
# -*- coding: utf-8 -*- # Define your item pipelines here # # Don't forget to add your pipeline to the ITEM_PIPELINES setting # See: http://doc.scrapy.org/en/latest/topics/item-pipeline.html class Xiaoniu88Pipeline(object): def process_item(self, item, spider): return item
[ "junxuanwoo@163.com" ]
junxuanwoo@163.com
2091019304441ed2b85284ecae92403d78b00911
d6309b1c29872a78473b9469b103c03faa289343
/teams/forms.py
013799f1c4f250545260809050ed2f2ee9186cea
[]
no_license
HackyRoot/TeamFinder
86044ddc58e7d8626f04f1c11218c9abd20c015d
3f76c2b9f1862c703a872e41cff90ac31462ede6
refs/heads/master
2020-04-29T05:28:16.608339
2019-03-21T13:02:46
2019-03-21T13:02:46
175,884,658
1
0
null
null
null
null
UTF-8
Python
false
false
1,098
py
from django import forms from django.contrib.auth.forms import UserCreationForm from django.contrib.auth.models import User # from .models import Member, Team # https://stackoverflow.com/questions/49198400/django-add-user-to-team # class InviteMemberForm(forms.ModelForm): # # class Meta: # model = Team # fields = ['team_members', 'team_name'] # # def __init__(self,user,*args,**kwargs): # super(InviteMemberForm,self ).__init__(*args,**kwargs) # self.fields['team_name'].queryset = Team.objects.filter(id__in = Team.objects.filter('team' = user)) from django import forms from teams.models import Team from users.models import Profile Team.objects.filter() class TeamManageForm(forms.Form): team_name = forms.ChoiceField() member_email = forms.EmailField() class Meta: model = Team fields = ('team_name', 'team_lead', 'team_image', ) def __init__(self, user, *args, **kwargs): super(TeamManageForm, self).__init__(*args, **kwargs) self.fields['members'].queryset = Profile.objects.filter(user=user)
[ "steveparmar6nov2011@gmail.com" ]
steveparmar6nov2011@gmail.com
27851538355cbd4e982fa4268a95c563c111d7c4
07eddbfdf1fcc20e0b3c2027514e5cc9ace6360c
/src/agent.py
2b79da3118d1482dd97c49447982213aa887f5cf
[]
no_license
Benjamin-Etheredge/DeepQExploration
a087739e792dde8c1aa1c0c639963bbbfcdd3615
78f8998d6d1dbff740e5f32f239b9db2b9064afd
refs/heads/master
2023-05-11T02:14:50.018053
2021-04-28T04:13:28
2021-04-28T04:13:28
168,606,706
0
0
null
2023-05-01T13:44:30
2019-01-31T22:28:23
Python
UTF-8
Python
false
false
19,820
py
# TODO test with starting with large window and reducing size # TODO test with randomly removing items from deque instead of using a sliding window # TODO add new q value network for randomly sampling q values to test convergence of predicted q values. import time from datetime import datetime from timeit import default_timer as timer import sys import os import mlflow import custom_mlflow_logger #os.environ["CUDA_VISIBLE_DEVICES"] = "-1" import numpy as np np.random.seed(4) os.environ['TF_CPP_MIN_LOG_LEVEL'] = '3' import tensorflow as tf tf.random.set_seed(4) import random random.seed(4) from numpy import clip, stack, array, power #from learners import * from learners import DeepQ ##### #from tensorflow_core.python.keras.api._v1 import keras from tensorflow import keras # this breaks graph saving ##### from copy import deepcopy import gym #from scores import * from experience import Experience from buffer import ReplayBuffer, VoidBuffer from collections import deque # Limit GPU Memory Allocation # https://mc.ai/tensorflow-2-0-wanna-limit-gpu-memory/ import tensorflow as tf gpus = tf.config.experimental.list_physical_devices('GPU') if gpus: try: for gpu in gpus: tf.config.experimental.set_memory_growth(gpu, True) except RuntimeError as e: print(e) import tensorflow.compat.v1 as tf # this must go after due to FileWriter. TODO cleanup tf.disable_eager_execution() #tf.compat.v1.disable_eager_execution() # TODO process every 4th move class Agent: DECAY_TYPE_LINEAR = 'linear' def __init__(self, learner: DeepQ, replay_buffer: ReplayBuffer, environment: gym.Env, max_episode_steps: int, max_episodes=float("inf"), #scorer: Scores = Scores(100), reward_threshold: int = None, sample_size=128, random_choice_decay_min: float = 0.05, decay_type: str = 'linear', # decay_type: str = Agent.DECAY_TYPE_LINEAR, early_stopping: bool = True, verbose=0, seed=4, #seed=None, experience_creator=Experience, observation_processor=array, window=4, frame_skip=4, target_network_interval=None, random_decay_end=1000000, name_prefix="", random_starting_actions_max=10): # seeding agents individually to achieve reproducible results across parallel runs. if seed is None: seed = np.random.randint(0, 99999999) self.np_random_state = np.random.RandomState(seed) self.experience_creator = experience_creator self.observation_processor = observation_processor self.window = window self.learner = learner self.replay_buffer = replay_buffer self.env = environment self.recording = False #self.env = gym.Wrapper self.env.frameskip = frame_skip #self.env.seed(self.seed()) self.env.seed(seed) #self.env.action_space.seed(self.seed()) self.env.action_space.seed(seed) # This is needed to keep multiple game windows from opening up when scoring self.scoring_env = deepcopy(self.env) self.scoring_env.seed(seed) self.scoring_env.action_space.seed(seed) self.random_action_rate = 1.0 self.verbose = verbose self.early_stopping = early_stopping self.random_starting_actions_max = random_starting_actions_max if verbose >= 1: env_name = self.env.unwrapped.spec.id log_dir = f"logs/{name_prefix}{env_name}_{learner.name}_" + datetime.now().strftime("%Y%m%d-%H%M%S") self.tensorboard_writer = tf.summary.FileWriter(log_dir) tensorboard = keras.callbacks.TensorBoard( log_dir=log_dir, histogram_freq=0, batch_size=sample_size, write_graph=True, write_grads=True ) tensorboard.set_model(self.learner.model) # Easily Adjusted hyperparameters if reward_threshold is None: reward_threshold = sys.maxsize self.reward_stopping_threshold = reward_threshold self.max_episode_steps = max_episode_steps self.max_episodes = max_episodes self.on_policy_check_interval = min(max_episodes // 10, 150) if target_network_interval is None: self.target_network_updating_interval = int(self.max_episode_steps * 0.5) else: self.target_network_updating_interval = target_network_interval self.sample_size = sample_size self.log_triggering_threshold = max_episode_steps * 10 # log every 20 max game lengths self.decay_type = decay_type if random_choice_decay_min == 0: random_choice_decay_min = 0.0000000000000001 if self.decay_type == 'linear': self.randomChoiceDecayRate = float( (1.0 - random_choice_decay_min) / random_decay_end) else: self.randomChoiceDecayRate = float(power(random_choice_decay_min, 1. / self.max_episodes)) self.randomChoiceMinRate = random_choice_decay_min self.is_slowed = False self.random_choice_slow_down_point = (self.random_action_rate - self.randomChoiceMinRate) / 10. self.iterations = 0 self.update_interval = 4 self.frame_skip = frame_skip # TODO push to custom gym wrapper self.game_count = 0 # add on-policy recording self.env = gym.wrappers.Monitor(self.env, 'videos/on-policy', video_callable=lambda _: self.on_policy_check_time(), uid='on-policy', force=True) # add interval recording self.record_interval = 250 self.env = gym.wrappers.Monitor(self.env, 'videos/', video_callable=lambda _: self.game_count % self.record_interval == 0, force=True) self.prepare_buffer() def on_policy_check_time(self): return self.game_count % self.on_policy_check_interval == 0 def seed(self): seed = self.np_random_state.randint(0, 9999) assert (seed >= 0) return seed # TODO figure out how to make verbose checking wrapper def metric_log(self, *args, **kwargs): if self.verbose >= 1: tag, value, step = kwargs['name'], kwargs['data'], kwargs['step'] custom_mlflow_logger.Logger.log_metric(key=tag, value=value, step=step) def log_artifact(self, artifact_path): if self.verbose >= 1: mlflow.log_artifact(artifact_path) def should_select_random_action(self, random_choice_rate): return np.random.uniform(0, 1) < random_choice_rate def should_update_learner(self): return self.replay_buffer.is_ready() def should_update_autoencoder(self, iteration): return iteration % (self.target_network_updating_interval*20) == 0 def should_update_target_model(self, iteration): return iteration % self.target_network_updating_interval == 0 # TODO why should this be a property? def should_decay_epsilon(self): return self.replay_buffer.is_ready() def get_next_action(self, state, random_choice_rate=None): if random_choice_rate is None: random_choice_rate = self.random_action_rate if self.should_select_random_action(random_choice_rate): return self.get_random_action() else: return self.learner.get_next_action(state) def get_random_action(self): return self.env.action_space.sample() def decay_epsilon(self): # TODO set decay operator if self.decay_type == 'linear': self.random_action_rate = max(self.randomChoiceMinRate, (self.random_action_rate - self.randomChoiceDecayRate)) if not self.is_slowed and self.random_action_rate <= self.random_choice_slow_down_point: self.randomChoiceDecayRate /= 10 self.is_slowed = True else: self.random_action_rate = max(self.randomChoiceMinRate, (self.randomChoiceDecayRate * self.random_action_rate)) def update_learner(self): sample_idxs, weights, sample = self.replay_buffer.sample(self.sample_size) loss, learner_info = self.learner.update(Experience.training_items(sample), weights) self.replay_buffer.update(sample_idxs, loss) return loss, learner_info # TODO implement actual logger def should_log(self, iteration): return iteration % self.log_triggering_threshold == 0 def log(self): self.learner.log() self.replay_buffer.log() def render_game(self): self.play_game(verbose=10) def make_move(self, action): pass def prepare_buffer(self): while not self.replay_buffer.is_ready(): self.play_game(self.replay_buffer, random_rate=1.0) def play(self, step_limit=float("inf"), verbose: int = 1): best_on_policy_score = float("-inf") best_off_policy_score = float("-inf") total_steps = 0 rolling_average_scores = deque([], maxlen=200) moving_average = 0 while total_steps <= step_limit and self.max_episodes > self.game_count: if self.on_policy_check_time(): # Use max instead of min to be closer to the other publications # on_policy_score = np.mean([self.play_game(random_rate=0.0) for _ in range(4)]) on_policy_scores = [self.play_game(random_rate=0.0) for _ in range(4)] max_on_policy_score = max(on_policy_scores) median_on_policy_score = np.median(on_policy_scores) if best_on_policy_score < max_on_policy_score: best_on_policy_score = max_on_policy_score self.metric_log(name="best_on_policy_score", data=best_on_policy_score, step=total_steps) self.learner.model.save_weights("best_on_policy_model.h5") self.log_artifact("best_on_policy_model.h5") if verbose > 2: # https://github.com/openai/gym/wiki/FAQ #env = gym.wrappers.Monitor(env, '.videos/' + str(time()) + '/') pass self.metric_log(name="median_on_policy_score", data=median_on_policy_score, step=total_steps) self.metric_log(name="max_on_policy_score_per_frames", data=max_on_policy_score, step=total_steps) self.log_artifact("videos") self.game_count += 1 # TODO extract process to method step = self.observation_processor(self.env.reset()) list_buffer = [step for _ in range(self.window+1)] self.replay_buffer.prep(step) # TODO is prep needed? current_lives = self.env.env.ale.lives() self.metric_log(name="lives", data=current_lives, step=total_steps) is_done = False is_terminal = True total_reward = 0 old_reward = 0 old_steps = 0 game_steps = 0 game_start_time = time.time() # TODO for environments that reach the step limit, must specially handle case as not terminal # e.g. reaching the step limit should not have Q Prime set equal to 0. while not is_done: if verbose > 3: self.env.render() if is_terminal: starting_step = np.random.randint(1, self.random_starting_actions_max) #should I be dividing this? for _ in range(starting_step): # TODO should make random, but breakout has a STUPID mechanic # step, _, done, _ = self.scoring_env.step(self.get_random_action()) step, _, done, _ = self.env.step(1) #step = self.observation_processor(step) #list_buffer.append(step) #list_buffer.pop(0) step = self.observation_processor(step) is_terminal = False list_buffer = [step for _ in range(self.window + 1)] action_choice = self.get_next_action(list_buffer[1:]) # self.verbose_1_check(tf.summary.histogram, "action", action_choice, step=total_steps) next_step, reward, is_done, info = self.env.step(action_choice) if 'ale.lives' in info: lives = info['ale.lives'] is_terminal = lives < current_lives if is_terminal: self.metric_log(name="life_reward", data=total_reward-old_reward, step=total_steps) self.metric_log(name="life_steps", data=game_steps-old_steps, step=total_steps) old_reward = total_reward old_steps = game_steps self.metric_log(name="lives", data=lives, step=total_steps) current_lives = lives next_step = self.observation_processor(next_step) list_buffer.append(next_step) list_buffer.pop(0) total_reward += reward # TODO add prioirity experience = self.experience_creator(state=list_buffer[:-1], action=action_choice, next_state=list_buffer[1:], reward=reward, is_done=is_terminal or is_done) self.replay_buffer.append(experience) if self.replay_buffer.is_ready(): if total_steps % self.update_interval == 0: loss, learner_info = self.update_learner() self.metric_log(name="loss", data=loss, step=total_steps) self.decay_epsilon() if self.should_update_target_model(total_steps): self.metric_log(name="target_model_updates", data=total_steps // self.target_network_updating_interval, step=total_steps) self.update_target_model() if self.should_update_autoencoder(total_steps): pass #self.learner.update_autoencoder(self.replay_buffer.states) total_steps += 1 game_steps += 1 game_stop_time = time.time() elapsed_seconds = game_stop_time - game_start_time moves_per_second = game_steps / elapsed_seconds best_off_policy_score = max(best_off_policy_score, total_reward) if best_off_policy_score < total_reward: best_off_policy_score = total_reward self.metric_log(name="best_off_policy_score_per_frames", data=best_off_policy_score, step=total_steps) self.learner.model.save_weights("best_off_policy_model.h5") self.log_artifact("best_off_policy_model.h5") rolling_average_scores.append(total_reward) rolling_average = np.mean(rolling_average_scores) self.metric_log(name="rolling_average", data=rolling_average, step=total_steps) self.metric_log(name="move_per_second", data=moves_per_second, step=total_steps) self.metric_log(name="best_off_policy_score", data=best_off_policy_score, step=total_steps) self.metric_log(name="off_policy_score", data=total_reward, step=total_steps) self.metric_log(name="steps_per_game", data=game_steps, step=self.game_count) #moving_average -= moving_average / game_count #moving_average += total_reward / game_count #self.metric_log(name="moving_average", data=moving_average, step=total_steps) self.metric_log(name="epsilon_rate", data=self.random_action_rate, step=total_steps) self.metric_log(name="buffer_size_in_experiences", data=len(self.replay_buffer), step=self.game_count) self.metric_log(name="total steps", data=total_steps, step=self.game_count) self.log_artifact("videos") assert total_steps > 0 return best_off_policy_score, rolling_average, total_steps def update_target_model(self): self.learner.update_target_model() def load_model(self, file_name): pass def save_model(self, file_name): pass def play_game(self, buffer=VoidBuffer(), random_rate=0.0, verbose: int = 0): total_reward = 0 #self.scoring_env.seed(self.seed()) #self.env.action_space.seed(self.seed()) step = self.observation_processor(self.scoring_env.reset()) list_buffer = [step for _ in range(self.window + 1)] current_lives = self.scoring_env.env.ale.lives() step_count = 0 done = False is_terminal = True while not done: if verbose > 3: self.scoring_env.render() if is_terminal: starting_step = np.random.randint(1, self.random_starting_actions_max) # should I be dividing this by frameskip? for _ in range(starting_step): # TODO should make random, but breakout has a STUPID mechanic # step, _, done, _ = self.scoring_env.step(self.get_random_action()) step, _, done, _ = self.scoring_env.step(1) #step = self.observation_processor(step) #list_buffer.append(step) # TODO should i be letting the list_buffer see these? probably not #list_buffer.pop(0) step = self.observation_processor(step) list_buffer = [step for _ in range(self.window + 1)] is_terminal = False # maybe not needed # TODO convert step_buffer to longer form and make it my window.... # TODO but it probably won't make a huge difference since the np.arrays take way more space action_choice = self.get_next_action(list_buffer[1:], random_rate) # TODO build better policy evaluator step, reward, done, info = self.scoring_env.step(action_choice) total_reward += reward step_count += 1 step = self.observation_processor(step) list_buffer.append(step) list_buffer.pop(0) if 'ale.lives' in info: lives = info['ale.lives'] is_terminal = lives < current_lives current_lives = lives experience = self.experience_creator( state=list_buffer[:-1], action=action_choice, next_state=list_buffer[1:], reward=reward, is_done=done or is_terminal) buffer.append(experience) return total_reward def score_model(self, games=150, verbose: int = 0): scores = [self.play_game(verbose=verbose) for _ in range(games)] # Using max to be similar to other publications return max(scores)
[ "benjamin.etheredge@gmail.com" ]
benjamin.etheredge@gmail.com
8725d2a1385df69f69e2f38965280d22d8072110
958dfda84dd93ba500c18b22a53017704c84a9dd
/sql_queries.py
0797b6cd5eb51a731fd1610babd45f34efc62f56
[]
no_license
walkandride/Data-Modeling-With-Postgres
152e77486bfa749e48435072696db7e8fa0e7421
c0db3614d9984625f8fcf2d92331f70637dd0411
refs/heads/master
2021-03-24T14:42:25.226600
2020-05-28T17:17:08
2020-05-28T17:17:08
247,539,352
0
0
null
null
null
null
UTF-8
Python
false
false
3,569
py
# DROP TABLES songplay_table_drop = "DROP TABLE IF EXISTS songplays;" user_table_drop = "DROP TABLE IF EXISTS users;" song_table_drop = "DROP TABLE IF EXISTS songs;" artist_table_drop = "DROP TABLE IF EXISTS artists;" time_table_drop = "DROP TABLE IF EXISTS time;" # CREATE TABLES songplay_table_create = (""" CREATE TABLE IF NOT EXISTS songplays( songplay_id VARCHAR PRIMARY KEY , start_time NUMERIC , user_id VARCHAR NOT NULL , level VARCHAR CHECK (level IN ('free', 'paid')) , song_id VARCHAR , artist_id VARCHAR , session_id VARCHAR , location VARCHAR , user_agent VARCHAR , CONSTRAINT fk_users FOREIGN KEY (user_id) REFERENCES users (user_id) ON DELETE CASCADE , CONSTRAINT fk_songs FOREIGN KEY (song_id) REFERENCES songs (song_id) ON DELETE CASCADE , CONSTRAINT fk_artists FOREIGN KEY (artist_id) REFERENCES artists (artist_id) ON DELETE CASCADE ); """) user_table_create = (""" CREATE TABLE IF NOT EXISTS users( user_id VARCHAR PRIMARY KEY , first_name VARCHAR , last_name VARCHAR , gender VARCHAR , level VARCHAR CHECK (level IN ('free', 'paid')) ); """) song_table_create = (""" CREATE TABLE IF NOT EXISTS songs( song_id VARCHAR PRIMARY KEY , title VARCHAR , artist_id VARCHAR NOT NULL , year INT , duration NUMERIC ); """) artist_table_create = (""" CREATE TABLE IF NOT EXISTS artists( artist_id VARCHAR PRIMARY KEY , name VARCHAR , location VARCHAR , latitude NUMERIC , longitude NUMERIC ); """) time_table_create = (""" CREATE TABLE IF NOT EXISTS time( id_pk SERIAL PRIMARY KEY , start_time TIMESTAMP NOT NULL , hour SMALLINT , day SMALLINT , week SMALLINT , month SMALLINT , year SMALLINT , weekday SMALLINT ); """) # INSERT RECORDS songplay_table_insert = (""" INSERT INTO songplays( songplay_id , start_time , user_id , level , song_id , artist_id , session_id , location , user_agent ) VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s) ON CONFLICT(songplay_id) DO NOTHING """) user_table_insert = (""" INSERT INTO users( user_id , first_name , last_name , gender , level ) VALUES (%s, %s, %s, %s, %s) ON CONFLICT(user_id) DO UPDATE SET level = users.level; """) song_table_insert = (""" INSERT INTO songs( song_id , title , artist_id , year , duration ) VALUES (%s, %s, %s, %s, %s) """) artist_table_insert = (""" INSERT INTO artists( artist_id , name , location , latitude , longitude ) VALUES (%s, %s, %s, %s, %s) ON CONFLICT(artist_id) DO NOTHING """) time_table_insert = (""" INSERT INTO time( start_time , hour , day , week , month , year , weekday ) VALUES (TO_TIMESTAMP(%s / 1000.0), %s, %s, %s, %s, %s, %s) """) # FIND SONGS song_select = (""" SELECT s.song_id , a.artist_id FROM songs s , artists a WHERE s.artist_id = a.artist_id AND s.title = %s AND a.name = %s AND s.duration = %s """) # QUERY LISTS #create_table_queries = [songplay_table_create, user_table_create, song_table_create, artist_table_create, time_table_create] create_table_queries = [user_table_create, song_table_create, artist_table_create, time_table_create, songplay_table_create] drop_table_queries = [songplay_table_drop, user_table_drop, song_table_drop, artist_table_drop, time_table_drop]
[ "walkandride@hotmail.com" ]
walkandride@hotmail.com
88fae0f7f05b8375208a3a5144f972d9792eac4c
4ed038a638725ac77731b0b97ddd61aa37dd8d89
/cairis/misc/KaosModel.py
39259e225455f6f69fa7f7bc60346a9b9a88fa53
[ "Apache-2.0" ]
permissive
RachelLar/cairis_update
0b784101c4aff81ff0390328eb615e335301daa2
0b1d6d17ce49bc74887d1684e28c53c1b06e2fa2
refs/heads/master
2021-01-19T06:25:47.644993
2016-07-11T20:48:11
2016-07-11T20:48:11
63,103,727
0
0
null
null
null
null
UTF-8
Python
false
false
14,998
py
# Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. import pydot from cairis.core.Borg import Borg from cairis.core.ARM import * from cairis.core.colourcodes import usabilityColourCode from cairis.core.colourcodes import threatColourCode from cairis.core.colourcodes import obstacleColourCode class KaosModel: def __init__(self,associations,envName,kaosModelType = 'goal',goalName = '', db_proxy=None, font_name=None, font_size=None): self.theAssociations = associations self.theEnvironmentName = envName self.theGoalName = goalName b = Borg() if db_proxy is None or font_size is None or font_name is None: self.dbProxy = b.dbProxy self.fontName = b.fontName self.fontSize = b.fontSize else: self.dbProxy = db_proxy self.fontName = font_name self.fontSize = font_size self.theGraph = pydot.Dot() self.theKaosModel = kaosModelType if (self.theKaosModel == 'task'): self.theGraph.set_graph_defaults(rankdir='LR') else: self.theGraph.set_graph_defaults(rankdir='BT') self.theGraphName = b.tmpDir + '/' + self.theKaosModel + '.dot' def size(self): return len(self.theAssociations) def buildNode(self,dimName,objtName): objtUrl = dimName + '#' + objtName if (dimName == 'goal'): self.theGraph.add_node(pydot.Node(objtName,shape='parallelogram',fontname=self.fontName,fontsize=self.fontSize,URL=objtUrl)) # soft-goal attributes self.theGraph.add_node(pydot.Node(objtName,shape='polygon',style='rounded',sides='6',distortion='-0.537997',orientation='52',skew='-0.960726',fontname=self.fontName,fontsize=self.fontSize,URL=objtUrl)) elif (dimName == 'obstacle'): obsId = self.dbProxy.getDimensionId(objtName,'obstacle') envId = self.dbProxy.getDimensionId(self.theEnvironmentName,'environment') self.theGraph.add_node(pydot.Node(objtName,shape='polygon',skew='-0.4',style='filled',pencolor='black',colorscheme='ylorrd9',fillcolor=obstacleColourCode(self.dbProxy.obstacleProbability(obsId,envId)),fontname=self.fontName,fontsize=self.fontSize,URL=objtUrl)) elif (dimName == 'domainproperty'): self.theGraph.add_node(pydot.Node(objtName,shape='house',fontname=self.fontName,fontsize=self.fontSize,URL=objtUrl)) elif (dimName == 'requirement'): self.theGraph.add_node(pydot.Node(objtName,shape='parallelogram',fontname=self.fontName,fontsize=self.fontSize,URL=objtUrl)) elif (dimName == 'countermeasure'): self.theGraph.add_node(pydot.Node(objtName,shape='hexagon',fontname=self.fontName,fontsize=self.fontSize,URL=objtUrl)) elif ((dimName == 'role') and (self.theKaosModel != 'task')): self.theGraph.add_node(pydot.Node(objtName,shape='hexagon',fontname=self.fontName,fontsize=self.fontSize,URL=objtUrl)) elif ((dimName == 'role') and (self.theKaosModel == 'task')): self.theGraph.add_node(pydot.Node(objtName,shape='ellipse',fontname=self.fontName,fontsize=self.fontSize,URL=objtUrl)) elif (dimName == 'usecase'): self.theGraph.add_node(pydot.Node(objtName,shape='ellipse',fontname=self.fontName,fontsize=self.fontSize,URL=objtUrl)) elif (dimName == 'task'): objt = self.dbProxy.dimensionObject(objtName,'task') if (objt.assumption() == True): objtLabel = "&lt;&lt;Assumption&gt;&gt;" + objtName else: objtLabel = objtName taskScore = self.dbProxy.taskUsabilityScore(objtName,self.theEnvironmentName) self.theGraph.add_node(pydot.Node(objtName,label=objtLabel,shape='ellipse',style='filled',color=usabilityColourCode(taskScore),fontname=self.fontName,fontsize=self.fontSize,URL=objtUrl)) elif (dimName == 'misusecase'): ellipseColour = 'black' if (self.theKaosModel == 'task'): riskName = objtName[8:] riskObjt = self.dbProxy.dimensionObject(riskName,'risk') riskScores = self.dbProxy.riskScore(riskObjt.threat(),riskObjt.vulnerability(),self.theEnvironmentName,riskName) highestScore = 0 for riskScore in riskScores: currentScore = riskScore[2] if (currentScore > highestScore): highestScore = currentScore ellipseColour = threatColourCode(highestScore) self.theGraph.add_node(pydot.Node(objtName,shape='ellipse',style='filled',color=ellipseColour,fontcolor='white',fontname=self.fontName,fontsize=self.fontSize,URL=objtUrl)) elif (dimName == 'persona'): objt = self.dbProxy.dimensionObject(objtName,'persona') if (objt.assumption() == True): objtLabel = "&lt;&lt;Assumption&gt;&gt;" + objtName self.theGraph.add_node(pydot.Node(objtName,label=objtLabel,shape='circle',fontname=self.fontName,fontsize=self.fontSize,URL=objtUrl)) else: self.theGraph.add_node(pydot.Node(objtName,shape='circle',fontname=self.fontName,fontsize=self.fontSize,URL=objtUrl)) elif (dimName == 'attacker'): self.theGraph.add_node(pydot.Node(objtName,shape='circle',style='filled',color='black',fontname=self.fontName,fontsize=self.fontSize,URL=objtUrl)) elif (dimName == 'response'): self.theGraph.add_node(pydot.Node(objtName,shape='note',fontname=self.fontName,fontsize=self.fontSize,URL=objtUrl)) elif (dimName == 'asset'): fontColour = 'black' nodeColour = 'black' if (self.theKaosModel == 'task'): fontColour = 'blue' nodeColour = 'blue' self.theGraph.add_node(pydot.Node(objtName,shape='record',fontname=self.fontName,fontsize=self.fontSize,fontcolor=fontColour,color=nodeColour,URL=objtUrl)) else: raise UnknownNodeType(dimName) def layout(self,renderer = ''): if (renderer == ''): if ((self.theKaosModel == 'goal') or (self.theKaosModel == 'template_goal') or (self.theKaosModel == 'obstacle')): renderer = 'dot' if (self.theKaosModel == 'responsibility'): renderer = 'twopi' elif (self.theKaosModel == 'task'): renderer = 'dot' self.theGraph.write_xdot(self.theGraphName,prog=renderer) return open(self.theGraphName).read() def buildGoalModel(self,isComponent=False): self.nodeNameSet = set([]) refNodes = set([]) # the Graph get_edge function doesn't appear to work, so we'll keep a set of edges ourselves. edgeSet = set([]) for association in self.theAssociations: goalName = association.goal() associationType = association.type() subGoalName = association.subGoal() alternativeFlag = association.alternative() goalDimName = association.goalDimension() subGoalDimName = association.subGoalDimension() goalEnv = association.environment() if ((self.theGoalName != '' or isComponent == True) and goalName not in self.nodeNameSet): self.buildNode(goalDimName,goalName) if ((self.theGoalName != '' or isComponent == True) and subGoalName not in self.nodeNameSet): self.buildNode(subGoalDimName,subGoalName) if ((associationType == 'obstruct') or (associationType == 'resolve')): if ((subGoalName,goalName) not in edgeSet): goalEdge = pydot.Edge(subGoalName,goalName,dir='forward',arrowhead='veetee',weight='1') self.theGraph.add_edge(goalEdge) edgeSet.add((subGoalName,goalName)) elif (associationType == 'depend'): if ((subGoalName,goalName) not in edgeSet): objtUrl = 'depend#' + goalEnv + '/' + goalName + '/' + subGoalName self.theGraph.add_node(pydot.Node(objtUrl,shape='circle',label=' ',height='.2',width='.2',URL=objtUrl)) edge1 = pydot.Edge(goalName,objtUrl,dir='forward',arrowhead='vee',weight='1') self.theGraph.add_edge(edge1) edge2 = pydot.Edge(objtUrl,subGoalName,dir='forward',arrowhead='vee',weight='1') self.theGraph.add_edge(edge2) edgeSet.add((subGoalName,goalName)) else: refNodeName = goalName + '#' + associationType # This is probably a good time to see if there is already another goalassociation in the graph for another environment assocDirection = 'forward' arrowHead = 'vee' if ((subGoalName,refNodeName) not in edgeSet): objtUrl = 'link#' + goalEnv + '/' + goalName + '/' + subGoalName + '/' + goalDimName + '/' + subGoalDimName if (alternativeFlag == 1): refNodeName = goalName + '#' + subGoalName + '#' + associationType if (refNodeName not in refNodes): if (associationType == 'and'): objtUrl = 'linkand#' + goalEnv + '/' + goalName + '/' + subGoalName + '/' + goalDimName + '/' + subGoalDimName self.theGraph.add_node(pydot.Node(refNodeName,shape='circle',label=' ',height='.2',width='.2',URL=objtUrl)) elif (associationType == 'or'): objtUrl = 'linkor#' + goalEnv + '/' + goalName + '/' + subGoalName + '/' + goalDimName + '/' + subGoalDimName self.theGraph.add_node(pydot.Node(refNodeName,shape='circle',style='filled',color='black',label=' ',height='.2',width='.2',URL=objtUrl)) elif (associationType == 'responsible'): objtUrl = 'linkresponsible#' + goalEnv + '/' + goalName + '/' + subGoalName + '/' + goalDimName + '/' + subGoalDimName self.theGraph.add_node(pydot.Node(refNodeName,shape='circle',style='filled',color='red',label=' ',height='.2',width='.2',URL=objtUrl)) elif (associationType == 'conflict'): objtUrl = 'linkconflict#' + goalEnv + '/' + goalName + '/' + subGoalName + '/' + goalDimName + '/' + subGoalDimName self.theGraph.add_node(pydot.Node(refNodeName,shape='circle',color='red',label=' ',height='.2',width='.2',URL=objtUrl)) assocDirection = 'none' arrowHead = 'none' goalEdge = pydot.Edge(refNodeName,goalName,dir=assocDirection,arrowhead=arrowHead,weight='1') if ((refNodeName,goalName) not in edgeSet): self.theGraph.add_edge(goalEdge) edgeSet.add((refNodeName,goalName)) refNodes.add(refNodeName) if ((subGoalName,refNodeName) not in edgeSet): self.theGraph.add_edge(pydot.Edge(subGoalName,refNodeName,dir='none',weight='1')) edgeSet.add((subGoalName,refNodeName)) else: pass # Mark the node with a ? so we know the association properties might vary by environment # modifiedRefNodeName = '\"' + refNodeName + '\"' # refNode = self.theGraph.get_node(modifiedRefNodeName) # refNode.set('label','?') def buildTaskModel(self): self.nodeNameSet = set([]) edgeSet = set([]) fontSize = '7.5' for association in self.theAssociations: goalName = association.goal() subGoalName = association.subGoal() goalDimName = association.goalDimension() subGoalDimName = association.subGoalDimension() assocLabel = association.rationale() fontColour = 'black' edgeColour = 'black' edgeStyle = 'solid' assocDir = 'none' arrowHead = 'none' arrowTail = 'none' assocType = association.type() if (self.theGoalName != '' and goalName not in self.nodeNameSet): self.buildNode(goalDimName,goalName) self.nodeNameSet.add(goalName) if (self.theGoalName != '' and subGoalName not in self.nodeNameSet): self.buildNode(subGoalDimName,subGoalName) self.nodeNameSet.add(subGoalName) if (assocType in ('misusecasethreatasset_association','misusecasevulnerabilityasset_association','taskmisusecasethreat_association','taskmisusecasevulnerability_association')): fontColour = 'red' edgeColour = 'red' assocDir = 'forward' arrowHead = 'vee' elif (assocType in ('misusecasethreatmitigation_association','misusecasevulnerabilitymitigation_association','taskmisusecasemitigation_association')): fontColour = 'green' edgeColour = 'green' assocDir = 'forward' arrowHead = 'vee' elif (assocType == 'taskasset_association'): fontColour = 'blue' edgeColour = 'blue' arrowTail = 'vee' elif (assocType == 'rolepersona_association'): arrowHead = 'empty' if (assocType in ('misusecasethreatasset_association','misusecasevulnerabilityasset_association','taskasset_association')): arrowHead = 'none' arrowTail = 'vee' if (assocType == 'taskmisusecasemitigation_association'): arrowHead = 'none' arrowTail = 'vee' if (assocType == 'usecasetask_association'): arrowTail = 'vee' edgeStyle = 'dashed' objtUrl = goalDimName + '#' + subGoalDimName + '#' + assocType if ((subGoalName,goalName,assocLabel) not in edgeSet): if assocLabel == '': assocLabel = ' ' self.theGraph.add_edge(pydot.Edge(subGoalName,goalName,style=edgeStyle,dir=assocDir,arrowhead=arrowHead,arrowtail=arrowTail,label=assocLabel,fontsize=fontSize,weight='1',fontcolor=fontColour,color=edgeColour,URL=objtUrl)) edgeSet.add((subGoalName,goalName,assocLabel)) def graph(self): try: elements = [] if (self.theKaosModel == 'goal' and self.theGoalName == ''): elements = self.dbProxy.goalModelElements(self.theEnvironmentName) elif (self.theKaosModel == 'obstacle' and self.theGoalName == ''): elements = self.dbProxy.obstacleModelElements(self.theEnvironmentName) elif (self.theKaosModel == 'responsibility' and self.theGoalName == ''): elements = self.dbProxy.responsibilityModelElements(self.theEnvironmentName) elif (self.theKaosModel == 'task' and self.theGoalName == ''): elements = self.dbProxy.taskModelElements(self.theEnvironmentName) for element in elements: self.buildNode(element[0],element[1]) if ((self.theKaosModel == 'goal') or (self.theKaosModel == 'responsibility') or (self.theKaosModel == 'obstacle')): self.buildGoalModel() elif (self.theKaosModel == 'template_goal'): self.buildGoalModel(True) else: self.buildTaskModel() return self.layout() except DatabaseProxyException, errTxt: raise ARMException(errTxt)
[ "shamal.faily@googlemail.com" ]
shamal.faily@googlemail.com
384496359f968c0c15c69e0e31cf20fe03eecb7d
5fd297a27951074f3434d45ab7367687a15ad3b1
/cep/__init__.py
0499b31d041f05719ad12c78b1179c582ae8bb89
[]
no_license
Cazuky/cepy
808e155cdcf70bc25ef87f658e11c53bdb715530
eca789cf632156a2e61b9d6fb4422056105a1c8d
refs/heads/master
2021-01-11T14:12:31.974783
2012-11-27T17:45:25
2012-11-27T17:45:25
null
0
0
null
null
null
null
UTF-8
Python
false
false
64
py
from flaskapp import create_application __version__ = '0.0.0'
[ "luanfonceca@gmail.com" ]
luanfonceca@gmail.com
8c2a09329555943408f9a1d045564df59699fbf7
0b7272149f953c1a1fdbea4e17095e25f1e5c620
/hours/admin.py
0705be8278b9e1c111d3b5bf621a12928758f9fe
[]
no_license
Jayshah6699/Medico
c116bc6b09efc2c47d11758734122fc1c0e247e7
ecc2b2f28866758cbe8eeb48b554d40bae65c502
refs/heads/main
2023-01-30T18:54:37.331793
2020-12-19T12:13:56
2020-12-19T12:13:56
null
0
0
null
null
null
null
UTF-8
Python
false
false
132
py
from django.contrib import admin from .models import Department, Shift admin.site.register(Department) admin.site.register(Shift)
[ "andrewroblesdev@gmail.com" ]
andrewroblesdev@gmail.com
7f955293e5e91bed3c822740e591c956bf441510
6f36b511e8e74b82e113ce137f7bec42797e003c
/catalog_editor_main.py
43d3d1497ee5b82c6232625679fefe1fb5c26b7a
[ "MIT" ]
permissive
FIREdog5/Catalog-Editor
b15d28aae6ded8dc72f331da525ea4928d0894ab
49dab360e7284ac318db5d25dc2b2c1c97fa9a58
refs/heads/main
2023-02-10T06:52:59.972641
2021-01-09T09:05:16
2021-01-09T09:05:16
328,013,575
0
0
null
null
null
null
UTF-8
Python
false
false
1,276
py
import tkinter as tk import first_pass as fp import html_compiler as hc import data_manager as dm import pre_edit_pass as pep def main(old_window=None, return_function=lambda *_: None): if old_window: old_window.destroy() window = tk.Tk() screen_width = window.winfo_screenwidth() screen_height = window.winfo_screenheight() window.geometry("{0}x{1}+{2}+{3}".format(screen_width//2, screen_height//2, screen_width//2 - screen_width//4, screen_height//2 - screen_height//4)) greeting = tk.Label(master=window, text="Select what you want the program to do:") greeting.pack() frame = tk.Frame(master=window, borderwidth=50) frame.pack() def first_pass_call(): fp.main(old_window=window, return_function=main) button1 = tk.Button(master=frame, command=first_pass_call, text="First pass") button1.pack(side=tk.LEFT) def edit_pass_call(): pep.main(old_window=window, return_function=main) button2 = tk.Button(master=frame, command=edit_pass_call, text="Edit pass") button2.pack(side=tk.LEFT) def html_call(): hc.main(old_window=window, return_function=main) button2 = tk.Button(master=window, command=html_call, text="Create catalog.html") button2.pack() window.mainloop()
[ "betterthanbossfiredog5@gmail.com" ]
betterthanbossfiredog5@gmail.com
6aa00e15918047934e9a5d1a3b2ca63b884a34e0
31a2a512544734047f960f3ad386137c53004945
/pierre/gamelogic.py
def2add812f287db056a40e3807415fa01b8de13
[]
no_license
ValentinCPE/simonsGame
f077bd267eda3badb09756c5e92a53778b09e4e3
8bd468228f21d9c80370bbc158ec13fb6014f1e3
refs/heads/master
2021-06-07T01:53:38.258995
2016-11-16T21:44:41
2016-11-16T21:44:41
null
0
0
null
null
null
null
UTF-8
Python
false
false
740
py
#!/usr/bin/env python2 import random, os def compareSequences(list1, list2): for i in range(0, len(list1)-1): if list1[i] != list2[i]: return False return True limit = 3 points = 0 while True: usersequence = [] sequence = [] while len(sequence) < limit: sequence.append(random.randint(0,3)) print("\nReplay sequence :") print(sequence) #for i in sequence: #os.system("beep -f "+str(i*200+400)+" -l 400 -D 100") while True: print("Your sequence :") print(usersequence) if len(usersequence) < limit: usersequence.append(int(input())) else: if compareSequences(sequence, usersequence): points = points + 1 break else: print("End "+str(points)+" points.") quit() limit = limit+1
[ "hyacinthe@openmailbox.org" ]
hyacinthe@openmailbox.org
01945bf47b3a7cee41fd7758b27020b16dfe8c9f
c018a7de1de41c0b82c3be5430e5d18bb546b944
/adapter/gql/mutation/mutation.py
00a050f8d8bbd4e5d106fac370f050257a0136b0
[]
no_license
rafaelbm9126/py-crud
3b8f2741563f59cc6e3c490fc8c7addfd2b88923
88442915691340345d098941c60d23d06da4d74f
refs/heads/master
2022-01-09T13:25:21.472348
2019-06-15T22:54:06
2019-06-15T22:54:06
null
0
0
null
null
null
null
UTF-8
Python
false
false
271
py
from graphene import ( ObjectType ) from adapter.gql.mutation.post.create import PostCreate from adapter.gql.mutation.comment.create import CommentCreate class MutationC(ObjectType): post_create = PostCreate.Field() comment_create = CommentCreate.Field()
[ "rafaelbm9126@gmail.com" ]
rafaelbm9126@gmail.com
e3eb7c216955ffc40e650b1d631b5fe332b79e2f
ef875150adbd1b22864542f6038e8d30ca15380b
/venv/Lib/site-packages/wikidataintegrator/tests/tests.py
95ae61b59be6ab1295c95fa178a12eb40ceaadeb
[]
no_license
JimmyYourHonor/extract_syn_idf
52d452d0e04ab906d3a2b08d400658f083fdc16a
df05e83cca9312c8b08cfc6436011eeec864f078
refs/heads/master
2020-04-16T21:35:01.096776
2019-03-29T04:03:53
2019-03-29T04:03:53
165,930,787
0
1
null
null
null
null
UTF-8
Python
false
false
8,278
py
import unittest import pprint import copy import requests from wikidataintegrator import wdi_core, wdi_fastrun, wdi_login from wikidataintegrator.wdi_core import WDBaseDataType, WDApiError __author__ = 'Sebastian Burgstaller-Muehlbacher' __license__ = 'AGPLv3' class TestMediawikiApiCall(unittest.TestCase): def test_all(self): with self.assertRaises(WDApiError): wdi_core.WDItemEngine.mediawiki_api_call("GET", "http://www.wikidataaaaaaaaa.org", max_retries=3, retry_after=1, params={'format': 'json', 'action': 'wbgetentities', 'ids': 'Q42'}) with self.assertRaises(requests.HTTPError): wdi_core.WDItemEngine.mediawiki_api_call("GET", "http://httpstat.us/400", max_retries=3, retry_after=1) wdi_core.WDItemEngine.mediawiki_api_call("GET", max_retries=3, retry_after=1, params={'format': 'json', 'action': 'wbgetentities', 'ids': 'Q42'}) class TestDataType(unittest.TestCase): def test_wd_quantity(self): dt = wdi_core.WDQuantity(value='34', prop_nr='P43') dt_json = dt.get_json_representation() if not dt_json['mainsnak']['datatype'] == 'quantity': raise value = dt_json['mainsnak']['datavalue'] if not value['value']['amount'] == '+34': raise if not value['value']['unit'] == '1': raise dt2 = wdi_core.WDQuantity(value='34', prop_nr='P43', upper_bound='35', lower_bound='33') value = dt2.get_json_representation()['mainsnak']['datavalue'] if not value['value']['amount'] == '+34': raise if not value['value']['unit'] == '1': raise if not value['value']['upperBound'] == '+35': raise if not value['value']['lowerBound'] == '+33': raise def test_wd_geoshape(self): dt = wdi_core.WDGeoShape(value='Data:Inner_West_Light_Rail_stops.map', prop_nr='P43') dt_json = dt.get_json_representation() if not dt_json['mainsnak']['datatype'] == 'geo-shape': raise value = dt_json['mainsnak']['datavalue'] if not value['value'] == 'Data:Inner_West_Light_Rail_stops.map': raise if not value['type'] == 'string': raise def test_wd_string(self): pass def test_live_item(self): wd_item = wdi_core.WDItemEngine(wd_item_id='Q423111') mass_statement = [x for x in wd_item.statements if x.get_prop_nr() == 'P2067'].pop() pprint.pprint(mass_statement.get_json_representation()) if not mass_statement: raise # TODO: get json directly from the API and compare part to WDItemEngine def test_deletion_request(self): items_for_deletion = ['Q423', 'Q43345'] wdi_core.WDItemEngine.delete_items(item_list=items_for_deletion, reason='test deletion', login=None) class TestFastRun(unittest.TestCase): """ some basic tests for fastrun mode """ def test_fast_run(self): qid = 'Q27552312' statements = [ wdi_core.WDExternalID(value='P40095', prop_nr='P352'), wdi_core.WDExternalID(value='YER158C', prop_nr='P705') ] frc = wdi_fastrun.FastRunContainer(base_filter={'P352': '', 'P703': 'Q27510868'}, base_data_type=wdi_core.WDBaseDataType, engine=wdi_core.WDItemEngine) fast_run_result = frc.write_required(data=statements) if fast_run_result: message = 'fastrun failed' else: message = 'successful fastrun' print(fast_run_result, message) # here, fastrun should succeed, if not, test failed if fast_run_result: raise ValueError def test_fastrun_label(self): # tests fastrun label, description and aliases, and label in another language data = [wdi_core.WDExternalID('/m/02j71', 'P646')] fast_run_base_filter = {'P361': 'Q18589965'} item = wdi_core.WDItemEngine(wd_item_id="Q2", data=data, fast_run=True, fast_run_base_filter=fast_run_base_filter) frc = wdi_core.WDItemEngine.fast_run_store[0] frc.debug = True assert item.get_label('en') == "Earth" descr = item.get_description('en') assert len(descr) > 3 aliases = item.get_aliases() assert "Terra" in aliases assert list(item.fast_run_container.get_language_data("Q2", 'en', 'label'))[0] == "Earth" assert item.fast_run_container.check_language_data("Q2", ['not the Earth'], 'en', 'label') assert "Terra" in item.get_aliases() assert "planet" in item.get_description() assert item.get_label("es") == "Tierra" item.set_description(descr) item.set_description("fghjkl") assert item.wd_json_representation['descriptions']['en'] == {'language': 'en', 'value': 'fghjkl'} item.set_label("Earth") item.set_label("xfgfdsg") assert item.wd_json_representation['labels']['en'] == {'language': 'en', 'value': 'xfgfdsg'} item.set_aliases(["fake alias"], append=True) assert {'language': 'en', 'value': 'fake alias'} in item.wd_json_representation['aliases']['en'] # something thats empty (for now.., can change, so this just makes sure no exception is thrown) frc.check_language_data("Q2", ['Ewiase'], 'ak', 'label') frc.check_language_data("Q2", ['not Ewiase'], 'ak', 'label') frc.check_language_data("Q2", [''], 'ak', 'description') frc.check_language_data("Q2", [], 'ak', 'aliases') frc.check_language_data("Q2", ['sdf', 'sdd'], 'ak', 'aliases') item.get_label("ak") item.get_description("ak") item.get_aliases("ak") item.set_label("label", lang="ak") item.set_description("d", lang="ak") item.set_aliases(["a"], lang="ak", append=True) def test_sitelinks(): data = [wdi_core.WDItemID(value='Q12136', prop_nr='P31')] item = wdi_core.WDItemEngine(wd_item_id='Q622901', domain='diseases', data=data) item.get_sitelink("enwiki") assert "enwiki" not in item.wd_json_representation['sitelinks'] item.set_sitelink("enwiki", "something") assert item.get_sitelink("enwiki")['title'] == "something" assert "enwiki" in item.wd_json_representation['sitelinks'] def test_nositelinks(): # this item doesn't and probably wont ever have any sitelinks (but who knows?? maybe one day..) data = [wdi_core.WDItemID(value='Q5', prop_nr='P31')] item = wdi_core.WDItemEngine(wd_item_id='Q27869338', domain=' ', data=data) item.get_sitelink("enwiki") assert "enwiki" not in item.wd_json_representation['sitelinks'] item.set_sitelink("enwiki", "something") assert item.get_sitelink("enwiki")['title'] == "something" assert "enwiki" in item.wd_json_representation['sitelinks'] #### ## tests for statement equality, with and without refs #### def test_ref_equals(): # statements are identical oldref = [wdi_core.WDExternalID(value='P58742', prop_nr='P352'), wdi_core.WDItemID(value='Q24784025', prop_nr='P527'), wdi_core.WDTime('+2001-12-31T12:01:13Z', prop_nr='P813')] olditem = wdi_core.WDItemID("Q123", "P123", references=[oldref]) newitem = copy.deepcopy(olditem) assert olditem.equals(newitem, include_ref=False) assert olditem.equals(newitem, include_ref=True) # dates are a month apart newitem = copy.deepcopy(olditem) newitem.references[0][2] = wdi_core.WDTime('+2002-1-31T12:01:13Z', prop_nr='P813') assert olditem.equals(newitem, include_ref=False) assert not olditem.equals(newitem, include_ref=True) # multiple refs newitem = copy.deepcopy(olditem) newitem.references.append([wdi_core.WDExternalID(value='99999', prop_nr='P352')]) assert olditem.equals(newitem, include_ref=False) assert not olditem.equals(newitem, include_ref=True) olditem.references.append([wdi_core.WDExternalID(value='99999', prop_nr='P352')]) assert olditem.equals(newitem, include_ref=True)
[ "jfu.jf60@gmail.com" ]
jfu.jf60@gmail.com
727b6531f40238a90a6b79a006be2a0a540553cb
432dde1eea596e915673f57aca3c02687c503089
/home/home_location.py
d4439ea9800fda01e4efbbc74b1169d20b62a67e
[]
no_license
DavidLouisLevine/adventure
2762b825652d2c17eececdbd3951137ae455dada
dd91f7a155777bea5199e33e4f7238300080beed
refs/heads/master
2020-07-11T18:51:04.405008
2019-09-29T04:41:22
2019-09-29T04:41:22
204,619,716
0
0
null
null
null
null
UTF-8
Python
false
false
1,230
py
from adventure.location import Location locations = ( Location('Living', ( 'This room has a couch, chairs and TV.', 'You have entered the living room. You can watch TV here.', 'This room has two sofas, chairs and a chandelier.', 'A huge television that is great for watching games.'), (0, 'Bedroom', 'Garden', 0)), Location('Garden', ( 'This space has a swing, flowers and trees.', 'You have arrived at the garden. You can exercise here', 'This area has plants, grass and rabbits.', 'A nice shiny bike that is fun to ride.'), (0, 'Kitchen', 0, 'Living')), Location('Kitchen', ( 'This room has a fridge, oven, and a sink.', 'You have arrived in the kitchen. You can find food and drinks here.', 'This living area has pizza, coke, and icecream.', 'A red juicy fruit.'), ('Garden', 0, 0, 'Bedroom')), Location('Bedroom', ('This area has a bed, desk and a dresser.', 'You have arrived in the bedroom. You can rest here.', 'You see a wooden cot and a mattress on top of it.', 'A nice, comfortable bed with pillows and sheets.'), ('Living', 0, 'Kitchen', 0)) )
[ "dll@davidlouislevine.com" ]
dll@davidlouislevine.com
d9acf31c600bcd66b25ae0ae1a318d9535f57025
73a96d5e28352fd9d25a89c1c3ac509b9367ee49
/ExerciciosListas/Exe19.py
d2850a6541184650878b5213a38733c598ca54a4
[]
no_license
Elton86/ExerciciosPython
5023d32022338c05b759714ce691a822a1a90f5d
c8166c4f53462a0da744193892cdacc7a3b6b8b5
refs/heads/main
2023-05-26T19:14:39.836238
2021-06-17T01:25:22
2021-06-17T01:25:22
340,248,632
0
0
null
null
null
null
UTF-8
Python
false
false
2,885
py
"""ma empresa de pesquisas precisa tabular os resultados da seguinte enquete feita a um grande quantidade de organizações: "Qual o melhor Sistema Operacional para uso em servidores?" As possíveis respostas são: 1- Windows Server 2- Unix 3- Linux 4- Netware 5- Mac OS 6- Outro Você foi contratado para desenvolver um programa que leia o resultado da enquete e informe ao final o resultado da mesma. O programa deverá ler os valores até ser informado o valor 0, que encerra a entrada dos dados. Não deverão ser aceitos valores além dos válidos para o programa (0 a 6). Os valores referentes a cada uma das opções devem ser armazenados num vetor. Após os dados terem sido completamente informados, o programa deverá calcular a percentual de cada um dos concorrentes e informar o vencedor da enquete. O formato da saída foi dado pela empresa, e é o seguinte: ------------------- ----- --- Windows Server 1500 17% Unix 3500 40% Linux 3000 34% Netware 500 5% Mac OS 150 2% Outro 150 2% ------------------- ----- Total 8800 O Sistema Operacional mais votado foi o Unix, com 3500 votos, correspondendo a 40% dos votos. """ windows_server = unix = linux = netware = mac_os = outro = soma_votos = 0 while True: voto = str(input("1 - Windows Server\n" "2 - Unix\n" "3 - Linux\n" "4 - Netware\n" "5 - Mac OS\n" "6 - Outro\n" "0 - Sair\n" "Digite seu voto: ")) if voto == "0": print("Programa encerrado! Obrigado!") break elif voto not in ("1", "2", "3", "4", "5", "6"): print("Voto inválido, tente novamente!\n\n") continue else: if voto == "1": windows_server += 1 elif voto == "2": unix += 1 elif voto == "3": linux += 1 elif voto == "4": netware += 1 elif voto == "5": mac_os += 1 elif voto == "6": outro += 1 soma_votos += 1 print("Obrigado pelo seu voto!") print("*** Resulatdos ***") print("Windows Server \t {} \t {:.2%}\n" "Unix \t\t\t{} \t {:.2%} \n" "Linux \t\t\t{} \t {:.2%}\n" "Netware \t\t{} \t {:.2%}\n" "Mac OS \t\t\t{} \t {:.2%}\n" "Outros \t\t\t{} \t {:.2%}\n".format(windows_server, (windows_server / soma_votos), unix, (unix / soma_votos), linux, (linux / soma_votos), netware, (netware / soma_votos), mac_os, (mac_os / soma_votos), outro, (outro / soma_votos))) print("Total de votos = {}".format(soma_votos))
[ "elton86@gmail.com" ]
elton86@gmail.com
b72811cf470f14d3204cf120201855ee3e250c6f
99ec105d854a24488d8efec8b425c36daae0db64
/src/Fostr_Django/Fostr_Django/settings.py
da4c07c513e0678d110f912db439084763c90098
[]
no_license
allenwang52/Fostr
6630f053cc5f9e398e1f74c7e9a844ab7af941ca
6f07570d34059637edeb29b375b48a642f880efa
refs/heads/master
2022-07-25T21:19:48.245225
2020-05-23T01:44:48
2020-05-23T01:44:48
258,685,650
0
1
null
null
null
null
UTF-8
Python
false
false
3,131
py
""" Django settings for Fostr_Django project. Generated by 'django-admin startproject' using Django 3.0.3. For more information on this file, see https://docs.djangoproject.com/en/3.0/topics/settings/ For the full list of settings and their values, see https://docs.djangoproject.com/en/3.0/ref/settings/ """ import os # Build paths inside the project like this: os.path.join(BASE_DIR, ...) BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) # Quick-start development settings - unsuitable for production # See https://docs.djangoproject.com/en/3.0/howto/deployment/checklist/ # SECURITY WARNING: keep the secret key used in production secret! SECRET_KEY = 'v$tig!v-bf-hnd)l%@t1ls+*3xlpm8@-g0ly)+ml$=men*^k0u' # SECURITY WARNING: don't run with debug turned on in production! DEBUG = True ALLOWED_HOSTS = [] # Application definition INSTALLED_APPS = [ 'django.contrib.admin', 'django.contrib.auth', 'django.contrib.contenttypes', 'django.contrib.sessions', 'django.contrib.messages', 'django.contrib.staticfiles', 'phonenumber_field', ] MIDDLEWARE = [ 'django.middleware.security.SecurityMiddleware', 'django.contrib.sessions.middleware.SessionMiddleware', 'django.middleware.common.CommonMiddleware', 'django.middleware.csrf.CsrfViewMiddleware', 'django.contrib.auth.middleware.AuthenticationMiddleware', 'django.contrib.messages.middleware.MessageMiddleware', 'django.middleware.clickjacking.XFrameOptionsMiddleware', ] ROOT_URLCONF = 'Fostr_Django.urls' TEMPLATES = [ { 'BACKEND': 'django.template.backends.django.DjangoTemplates', 'DIRS': [], 'APP_DIRS': True, 'OPTIONS': { 'context_processors': [ 'django.template.context_processors.debug', 'django.template.context_processors.request', 'django.contrib.auth.context_processors.auth', 'django.contrib.messages.context_processors.messages', ], }, }, ] WSGI_APPLICATION = 'Fostr_Django.wsgi.application' # Database # https://docs.djangoproject.com/en/3.0/ref/settings/#databases DATABASES = { 'default': { 'ENGINE': 'django.db.backends.sqlite3', 'NAME': os.path.join(BASE_DIR, 'db.sqlite3'), } } # Password validation # https://docs.djangoproject.com/en/3.0/ref/settings/#auth-password-validators AUTH_PASSWORD_VALIDATORS = [ { 'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator', }, { 'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator', }, { 'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator', }, { 'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator', }, ] # Internationalization # https://docs.djangoproject.com/en/3.0/topics/i18n/ LANGUAGE_CODE = 'en-us' TIME_ZONE = 'UTC' USE_I18N = True USE_L10N = True USE_TZ = True # Static files (CSS, JavaScript, Images) # https://docs.djangoproject.com/en/3.0/howto/static-files/ STATIC_URL = '/static/'
[ "allenwang52@yahoo.com" ]
allenwang52@yahoo.com
cca8684959f85d2e036f5c9887666fd2e912318b
5865cc1b70db72b7a9a9a07547f05a1f47959bb1
/supervised_learning/0x02-tensorflow/0-create_placeholders.py
030bfd54225f665c91036d412c47e2ec3b3197fd
[]
no_license
nildiert/holbertonschool-machine_learning
c8cefc3a784348f09128c0f4d82d65b9d56000c5
273f81feaa14fe24ac4db5d82be0d13299e857b8
refs/heads/master
2020-12-21T12:27:48.280880
2020-09-25T17:58:33
2020-09-25T17:58:33
236,429,499
1
0
null
null
null
null
UTF-8
Python
false
false
326
py
#!/usr/bin/env python3 """ This function creates two placeholders """ import tensorflow as tf def create_placeholders(nx, classes): """ Method to create placeholders """ x = tf.placeholder(tf.float32, shape=(None, nx), name='x') y = tf.placeholder(tf.float32, shape=(None, classes), name='y') return (x, y)
[ "niljordan23@gmail.com" ]
niljordan23@gmail.com
63616405b27720b76566b120e130bee0ac7bae8e
cfa464f5e4ec36b740d6e884f0ca1e170ebd2efb
/0x15-api/1-export_to_CSV.py
402fb19b0b58ff81079e112f6fdb96aead0b7b14
[]
no_license
Immaannn2222/holberton-system_engineering-devops
6ea0c4f3af2943c242e1928a2b4e66932f193a34
bcf001f3693fc55d54842ad92848ee783edee37a
refs/heads/master
2020-12-22T21:12:22.507064
2020-10-12T19:46:37
2020-10-12T19:46:37
236,933,884
0
0
null
null
null
null
UTF-8
Python
false
false
798
py
#!/usr/bin/python3 """HTTP WITH PYTHON""" import csv import requests from sys import argv if __name__ == "__main__": """main""" to_do = requests.get('https://jsonplaceholder.typicode.com/todos/', params={"userId": argv[1]}) user = requests.get('https://jsonplaceholder.typicode.com/users', params={"id": argv[1]}) list_json = to_do.json() user_json = user.json() for i in user_json: name = i.get("username") i_d = i.get('id') with open(str(i_d) + '.csv', mode='w') as f: csv = csv.writer(f, delimiter=',', quoting=csv.QUOTE_ALL) for t in list_json: status = t.get("completed") task_title = t.get("title") csv.writerow([i_d, name, status, task_title])
[ "imennaayari@gmail.com" ]
imennaayari@gmail.com
d21c3d5ac2ec534e248cf7b149b64ca6d5a7c7aa
95062add19335123f4d3569644f37056310a54d5
/admin_manage/urls.py
6900cd054529d4cdb58b4cad6d856d61cfc08543
[]
no_license
jamisonz87/merch-app
653518474bdae6576ca25779fc7e4658e5eec114
cc6967d92e51a3ce73904b6ea58366b7fd44b3b0
refs/heads/master
2022-11-05T21:20:01.041349
2020-06-26T20:32:09
2020-06-26T20:32:09
275,229,555
0
0
null
null
null
null
UTF-8
Python
false
false
255
py
from django.urls import path from . import views urlpatterns = [ path('add_product/', views.add_product, name='add_product'), path('', views.index, name='admin_index'), path('view_order/<int:order_id>', views.view_order, name='view_order'), ]
[ "jamisonz87@gmail.com" ]
jamisonz87@gmail.com
e42f783b5a1c733909130de9eb7ff7a225dfbaf1
f1131c4079f437186cf92a75b06a472993bd67dd
/Superscalar-Processor/simulator.py
d25fd0ea90c5efadaae5f0a0086e7c764b865bda
[]
no_license
pradeep90/Processor-Simulator
c4fc5c92f3e29d66dc70e4897d210d32221d8295
ecac52f69633136b72c85ddf41215b556e21cf9f
refs/heads/master
2021-01-19T14:59:17.100469
2012-12-07T07:15:12
2012-12-07T07:15:12
6,086,352
1
0
null
null
null
null
UTF-8
Python
false
false
524
py
from processor import Processor import sys def main(): try: source_code_file_name = sys.argv[1] except: source_code_file_name = 'human-code.txt' # source_code_file_name = 'human-code-2.txt' code_file = file(source_code_file_name, 'r') initial_values_file = file('./initial_values_matrix.txt', 'r') # initial_values_file = file('./initial_values_fibo.txt', 'r') processor = Processor(code_file, initial_values_file) processor.run() if __name__ == "__main__": main()
[ "gohanpra@gmail.com" ]
gohanpra@gmail.com
155c373da8447288c1aafebb642fba1578142292
221e648122feca6be8490ea1e149ea8d16b46e1b
/panorama_stitch.py
9d994e5bb260c572781b7a82404c846278ad53de
[]
no_license
Hanuu/panorama
2cc8cf92ad4037acbc14fe07d43144de3de4063a
a7af3343218ac824870509ab742e62a9da5923b7
refs/heads/master
2020-04-01T19:56:48.743362
2018-12-03T05:52:15
2018-12-03T05:52:15
153,579,700
0
0
null
null
null
null
UTF-8
Python
false
false
3,246
py
""" Panorama Stitching Class Given two images, return a stitched image. Two images could be rotated. Design Doc: https://docs.google.com/document/d/1785FwZ11CizzumJ3CRqsoBdRwDlqTid2EWIUzbM2B2I/edit?usp=sharing """ import numpy as np import imutils import cv2 class PanoramaStitcher: def __int__(self): pass def stitch_multiple_images(self, images): number_of_images = len(images) temp_image = images[0] for trial in range(1, number_of_images-1): temp_image = self.stitch_two_images(temp_image, images[trial]) return temp_image def stitch_two_images(self, image1, image2): """ :param image1: :param image2: :return: stitched image """ (key_points_from_image1, features_from_image1) = self.get_key_points_and_features(image1) (key_points_from_image2, features_from_image2) = self.get_key_points_and_features(image2) # print(features_from_image2) # print(features_from_image1) # print(key_points_from_image2) # print(key_points_from_image1) homography_matrix = self.get_homography_matrix(key_points_from_image1, key_points_from_image2, features_from_image1, features_from_image2) print(homography_matrix) result = cv2.warpPerspective(image1, homography_matrix, (image1.shape[1] + image2.shape[1], image1.shape[0])) result[0:image2.shape[0], 0:image2.shape[1]] = image2 return result def get_key_points_and_features(self, image): """ :param image: :return: (keypoints in np array, features) """ (key_points, features) = cv2.xfeatures2d.SIFT_create().detectAndCompute(image, None) key_points = np.float32([key_point.pt for key_point in key_points]) return (key_points, features) def get_homography_matrix(self, key_points_from_image1, key_points_from_image2, features_from_image1,features_from_image2): """ :param key_points_from_image1: :param features_from_image1: :param key_points_from_image2: :param features_from_image2: :return: homography matrix """ print("!",features_from_image2) print("!",features_from_image1) raw_matches = cv2.DescriptorMatcher_create("BruteForce").knnMatch(features_from_image1, features_from_image2, 2) # print(raw_matches) matches = [] # print(raw_matches) for raw_match in raw_matches: if len(raw_match) == 2 and raw_match[0].distance < raw_match[1].distance: matches.append((raw_match[0].trainIdx, raw_match[0].queryIdx)) points_from_image1 = np.float32([key_points_from_image1[i] for (_, i) in matches]) points_from_image2 = np.float32([key_points_from_image2[i] for (i, _) in matches]) (homography_graph, status) = cv2.findHomography(points_from_image1, points_from_image2, cv2.RANSAC, 4.0) return homography_graph
[ "minjunkwakwak@gmail.com" ]
minjunkwakwak@gmail.com
502b02d1c61d4e86509630f79c837a98d579e3d2
39d3a01f30dd24be11f284a6fd25aae3c3ac56e5
/pixeldefend/models/pixelcnn_cifar.py
2bd1d74b9beec39dd65d303e28781ab2cafd0007
[]
no_license
anishathalye/obfuscated-gradients
a94470ad4ca7a9f16f2856f7fbab558e2e4c891e
9ef8b37ae985183f1cd37a40d126af7ab6c8d057
refs/heads/master
2023-06-21T19:37:43.648545
2023-06-10T14:11:29
2023-06-10T14:11:29
119,627,984
917
187
null
2018-11-04T23:15:52
2018-01-31T03:13:46
Jupyter Notebook
UTF-8
Python
false
false
2,166
py
import models.pixel_cnn_pp.nn as nn from models.pixel_cnn_pp.model import model_spec from utils import optimistic_restore import tensorflow as tf import numpy as np import os _PIXELCNN_CHECKPOINT_NAME = 'params_cifar.ckpt' DATA_DIR = os.path.join( os.path.dirname(__file__), os.pardir, 'data' ) _obs_shape = (32, 32, 3) _model_opt = { 'nr_resnet': 5, 'nr_filters': 160, 'nr_logistic_mix': 10, 'resnet_nonlinearity': 'concat_elu' } # XXX this being called "model" could cause problems if other things want to use the same scope _model_func = tf.make_template('model', model_spec) def _init_model(sess, checkpoint_name=None): global _model_func global _obs_shape global _model_opt if checkpoint_name is None: checkpoint_name = _PIXELCNN_CHECKPOINT_NAME checkpoint_path = os.path.join(DATA_DIR, checkpoint_name) x_init = tf.placeholder(tf.float32, (1,) + _obs_shape) model = _model_func(x_init, init=True, dropout_p=0.5, **_model_opt) # XXX need to add a scope argument to optimistic_restore and filter for # things that start with "{scope}/", so we can filter for "model/", because # the pixelcnn checkpoint has some random unscoped stuff like 'Variable' optimistic_restore(sess, checkpoint_path) # input is [batch, 32, 32, 3], pixels in [-1, 1] _initialized=False _initialized_name=None def model(sess, image, checkpoint_name=None): global _initialized global _initialized_name global _model_func global _model_opt if checkpoint_name is not None: checkpoint_name = os.path.basename(checkpoint_name) # currently, we only support one version of this model loaded at a # time; making multiple versions probably involves variable renaming or # something else that's probably painful assert not _initialized or _initialized_name == checkpoint_name if not _initialized: _init_model(sess, checkpoint_name) _initialized = True _initialized_name = checkpoint_name out = _model_func(image, dropout_p=0, **_model_opt) loss = nn.discretized_mix_logistic_loss(image, out) return loss, out
[ "nicholas@carlini.com" ]
nicholas@carlini.com
06d0c2730cb97528517b6138758f05097f3d260f
3d5f5403d9ac344abdd3fa71632514d2c4343179
/unsplash_python_script.py
7d03ed23440094aa278b42a7aa6a75a03f3b8c30
[]
no_license
psonH/Python-Selenium-Web-Automation
2d29d5e840d7c8456b63060614af4d572eddb803
625e62696f76e6a43a6cedf5b71c6fc6a9a30eb7
refs/heads/master
2022-11-16T03:41:28.444079
2020-07-11T09:20:02
2020-07-11T09:20:02
278,822,580
0
0
null
null
null
null
UTF-8
Python
false
false
1,517
py
from selenium import webdriver from selenium.webdriver.common.keys import Keys # from selenium.webdriver.common.by import By # from selenium.webdriver.support.ui import WebDriverWait # from selenium.webdriver.support import expected_conditions as EC import time import urllib.request # path to the chrome web driver PATH = "C:/Users/Dell/Desktop/Python Selenium/driver/chromedriver.exe" # creating the driver object driver = webdriver.Chrome(PATH) # accessing the website with .get() method driver.get("https://unsplash.com/") # perform a search query from the search bar search = driver.find_element_by_id('SEARCH_FORM_INPUT_nav-bar') search.send_keys("cute cats") search.send_keys(Keys.RETURN) time.sleep(5) #images_downld_tag = driver.find_elements_by_class_name("_3W4BS _1jjdS _1CBrG _1WPby xLon9 LqSCP _17avz _1B083 _3d86A") try: # images_tags = WebDriverWait(driver, 10).until( # EC.presence_of_element_located((By.TAG_NAME, 'a')) # ) images_tags = driver.find_elements_by_tag_name('a') #print(images_tags) image_links = [] for image_tag in images_tags: if image_tag.get_attribute('title') == 'Download photo': image_link = image_tag.get_attribute('href') image_links.append(image_link) #print(image_link) except: #time.sleep(10) driver.quit() driver.quit() count = 0 for image in image_links: count = count + 1 print(image) urllib.request.urlretrieve(image, "cat-image-" + str(count) + ".jpg") time.sleep(2)
[ "p47.hajela@gmail.com" ]
p47.hajela@gmail.com
82b163e0655d3a3d299eb6ece605cf2fcc3c9873
58873a31cf82764c326d9ad61a93212387c5cd52
/odoo_academy/wizard/.ipynb_checkpoints/sale_wizard-checkpoint.py
5341ed4e814de6a69b947c0399ca97294d65e167
[]
no_license
sebastian2161/ejercicio1
60cec40eb33421d9168458ccf93e24c0d74eec9f
8bd0f2c46bbc3bd939274416cb1a65de28cc29c3
refs/heads/main
2023-08-31T18:37:55.990171
2021-10-11T03:36:46
2021-10-11T03:36:46
406,099,568
0
0
null
2021-09-25T01:07:29
2021-09-13T19:17:00
Python
UTF-8
Python
false
false
1,638
py
# -*- coding: utf-8 -*- from odoo import models, fields, api class SaleWizard(models.TransientModel): _name = 'academy.sale.wizard' _description = 'Wizard: Rapida orden de venta para estudiantes de session' def _default_session(self): return self.env['academy.session'].browse(self._context.get('active_id')) session_id = fields.Many2one(comodel_name='academy.session', string='Session', required=True, default= _default_session) session_student_ids = fields.Many2many(comodel_name='res.partner', string='Students in the current session', related='session_id.students_ids', help='these are the students currently in the session') student_ids = fields.Many2many(comodel_name='res.partner', string='students for sale order') def create_sale_order(self): session_product_id = self.env['product.product'].search([('is_ssesion_product','=',True)], limit=1) if session_product_id: for student in self.student_ids: order_id = self.env['sale.order'].create({ 'partner_id': student.id, 'session_id': self.session_id.id, 'order_line': [(0,0,{'product_id':session_product_id.id, 'price_unit':self.session_id.total_price})] })
[ "sebastiancoll25@gmail.com" ]
sebastiancoll25@gmail.com
6b680b7c1510ab90715898f56a24b0345c4068df
0953e96ffe4e29e863fc876a8058c1940a44fdd4
/range.py
7027c250497402ec52e235da21eb7a2c4b3f1c06
[]
no_license
Harpal008/Python_Programs
651c0adb98302056acf6d605ac424a941bdd24c8
6e13f89a78be266c180b86c67b5fb2c543e42615
refs/heads/master
2020-04-17T00:02:00.864875
2019-01-16T12:15:43
2019-01-16T12:15:43
166,034,788
0
0
null
null
null
null
UTF-8
Python
false
false
241
py
# -*- coding: utf-8 -*- """ Created on Fri Dec 21 18:04:54 2018 @author: harpal """ l=[1,2,3,4,5] for num in l: print (num) for num in range(1,6,2): print( ) x=range(1,6,2) x2=list(x) print(x2)
[ "noreply@github.com" ]
noreply@github.com
d2028f57e889580dbad8b3af308f120a3fc7b02a
f9d919611a8db24de3e9729cb7efe513016708c2
/Tiralabra/src_lagrange/energiat.py~
6a184d5f07ae0609ea2c2da14bb14d6aa76814b6
[]
no_license
paapu88/TiraLabra
6e4926b604fe860da4b0d11c1c59fea561c14c60
9be12c74d4261d43a2f9116d4d8e090e1bae18de
refs/heads/master
2021-05-30T13:21:04.436866
2014-06-21T19:25:42
2014-06-21T19:25:42
null
0
0
null
null
null
null
UTF-8
Python
false
false
2,069
#!/usr/bin/env python # -*- coding: utf-8 -*- """ Tässä moduulissa lasketaan gridiin liittyviä energiatermejä: Elektronien kineettinen energia T(n(r)) Vaihtokorrelaatioenergia E_xc Elektronien Coulombin energia. Atomiytimien aiheuttama potentiaali V_ext*n(r) """ from sys import exit from laskentaa import tee_gauss_seidel_yksi_askel #from math import abs def E_T(elektroni_tiheys): """ Lasketaan elektronikaasun kineettinen energia gridissä. """ import numpy as np c = 2.871 return abs(c*np.sum(elektroni_tiheys)) def E_vaihtokorrelaatio(elektroni_tiheys): """ Lasketaan elektronikaasun vaihtokorrelaatio energia gridissä. """ import numpy as np c = 0.681420222312 return c*np.sum(elektroni_tiheys**(4/3)) def E_elektroni_elektroni(elektroni_tiheys, V_hartree): """ Lasketaan elektronikaasun Coulombin energia gridissä. """ import numpy as np c = 0.5 return c*np.sum(elektroni_tiheys * V_hartree) def E_elektroni_ydin(ydin_tiheys, V_hartree): """ Lasketaan ytien ja elektronien vuorovaikutusenergia """ import numpy as np c = 1.0 return c*np.sum(ydin_tiheys * V_hartree) def get_V_hartree(V_hartree, elektroni_tiheys_3d, dx, dy, dz, n_iter=10): """ lasketaan Hartree potentiaali tämänhetkiselle elektronitiheydelle""" for step in range(n_iter): tee_gauss_seidel_yksi_askel(V_hartree, elektroni_tiheys_3d, dx, dy, dz) def E_tot(elektroni_tiheys_flat, V_hartree, ydin_tiheys, dx, dy, dz): """lasketaan elektronien kokonaisenergia """ import numpy as np elektroni_tiheys_3d = np.array(elektroni_tiheys_flat) elektroni_tiheys_3d.resize(V_hartree.shape) # 100 iteraatiota jotta saadaan hartree potentiaali get_V_hartree(V_hartree, elektroni_tiheys_3d, dx, dy, dz, 100) ETOT = E_T(elektroni_tiheys_3d)+E_vaihtokorrelaatio(elektroni_tiheys_3d) + \ E_elektroni_elektroni(elektroni_tiheys_3d,V_hartree) + \ E_elektroni_ydin(elektroni_tiheys_3d,ydin_tiheys) print "ETOT", ETOT return ETOT
[ "markus.kaukonen@iki.fi" ]
markus.kaukonen@iki.fi
1dbeb3124b5820205735e94d1f113b90adcfa8f4
680d7da38e4af5df02c92e1509462ba135a3aa38
/index/views.py
2ede79e33a57bc23f202ee27a47a3cc08fe9bf27
[]
no_license
yanjl/mydjango
047fff986fde53dfe46bc858f95a1e92043e7910
d5c199d8c7deec37081ecc7faa8bf17cfe446c56
refs/heads/master
2020-04-13T07:54:07.052125
2018-12-25T08:56:51
2018-12-25T08:56:51
163,065,632
0
0
null
null
null
null
UTF-8
Python
false
false
3,383
py
from django.contrib.auth.mixins import LoginRequiredMixin from django.db.models import Avg, Count, Max, Min, Sum from django.http import HttpResponse from django.shortcuts import get_object_or_404, redirect, render from django.urls import reverse from django.views.generic import DeleteView, ListView, TemplateView from .forms import ProductForm, ProductModelForm from .models import Author, Book, Product, Publisher # Create your views here. def home_page(request): # return HttpResponse("<h1> you are a student<h1>") # 绝对路径 return redirect(reverse('user:login')) def mydate(request, year, month, day): return HttpResponse(f"{year}-{str(month)}-{str(day)}") class ProductList(ListView): # template_name = "index/list.html" # context_object_name = "books" queryset = Book.objects.annotate(Count('authors')) model = Book class Top10List(ListView): # template_name = 'index/top10.html' # context_object_name = 'publishers' model = Publisher # queryset = Publisher.top_ten_books # def get_queryset(self): # return self.top_ten_books() # def get_context_data(self, *, object_list=None, **kwargs): # context = super().get_context_data(object_list=object_list, **kwargs) # context['books'] = Publisher.objects..top_ten_books # return context class PublisherBookList(ListView): template_name = 'index/book_list.html' def get_queryset(self): # object_list self.publisher = get_object_or_404( Publisher, name=self.kwargs['publisher']) # Book.objects.annotate(Count('authors')) return Book.objects.filter(publisher=self.publisher).annotate( Count('authors')) class IndexTemplate(LoginRequiredMixin, TemplateView): template_name = "index/index.html" login_url = "/index/login/" def get_context_data(self, **kwargs): context = super().get_context_data(**kwargs) context["username"] = self.request.user.username return context # return super().get_context_data(**kwargs) class ProductDetail(DeleteView): template_name = "index/detail.html" context_object_name = "book" model = Book # get_queryset 获取URL中的数据 self.kwargs['name'] or self.kwargs.get('name', default='x') # get_context_data 设置HTML模板中的其它变量 def get_context_data(self, **kwargs): id = self.kwargs['pk'] context = super().get_context_data(**kwargs) context['authors'] = Book.objects.get(id=id).authors.all() return context # def get_object(self, queryset=None): # obj= super().get_object(queryset=queryset) # obj.last_accessed=timezone.now() # obj.save() # return obj def login(request): if request.method == "GET": product = ProductModelForm() return render(request, "index/login.html", locals()) else: product = ProductModelForm(request.POST) if product.is_valid(): product.save() # name = product.cleaned_data['name'] # type = product.cleaned_data['type'] # Product(name=name, type=type).save() return redirect(reverse('user:login')) else: error_msg = product.errors.as_json() print(error_msg) return render(request, "index/login.html", locals())
[ "sinberyh@aliyun.com" ]
sinberyh@aliyun.com
3ea5bee3bd4871ba78ed230af082be4efae65c9f
d76224386c2b359d6d3228567cbb274fea8fcaab
/final_back/asgi.py
3b0602bc765f04f8b1a90f1b18b5d63842de6062
[]
no_license
SSAFY-5th-seungwoon/Moya_backend
2a270525dc3d0d53ee4b42274696d19f84edce9d
ac8d7004dafef9a4f9030dbe3a5762661f3f06ac
refs/heads/master
2023-05-22T20:45:07.230178
2021-06-16T07:44:05
2021-06-16T07:44:05
369,787,652
1
0
null
null
null
null
UTF-8
Python
false
false
397
py
""" ASGI config for final_back project. It exposes the ASGI callable as a module-level variable named ``application``. For more information on this file, see https://docs.djangoproject.com/en/3.2/howto/deployment/asgi/ """ import os from django.core.asgi import get_asgi_application os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'final_back.settings') application = get_asgi_application()
[ "swhan9404@naver.com" ]
swhan9404@naver.com
2366d7935f4314c749785ebdab31ec3c1f338625
2d80296b4f13e3a5fb9380b89c626d418eb53a60
/python/8_kyu/Simple_multiplication.py
6108b15497b97099450c429569df0c4e551eb5e9
[]
no_license
ivan3123708/codewars
680484b32ac6347ebbcf311b5b98fc697a063927
73e99695a6e27c30baf17b8d4841462d702810a3
refs/heads/master
2020-03-19T08:32:05.863217
2018-10-28T00:17:34
2018-10-28T00:17:34
136,212,296
0
0
null
null
null
null
UTF-8
Python
false
false
104
py
def simple_multiplication(number): if number % 2 == 0: return number * 8 else: return number * 9
[ "ivan3123708@gmail.com" ]
ivan3123708@gmail.com
0daffa52db933f8ae220251c9fef1d3d8ab76afb
403ddbd5e1b59392991bf8ef3d8c20ead27e7e5f
/trader/language/chinese/constant.py
e79010c64ad91cfd0e3abd14cef2c66b5d034b15
[]
no_license
guanzidashu/vnpylib
f2b44e08d82d6a24ed154535fc98b936eeb4d8d9
6ee29f993070360b4ceef02e04b277ff3c9a8f14
refs/heads/master
2020-03-18T19:34:09.935449
2018-06-09T16:47:49
2018-06-09T16:47:49
135,162,129
0
0
null
null
null
null
UTF-8
Python
false
false
3,404
py
# encoding: UTF-8 # 默认空值 EMPTY_STRING = '' EMPTY_UNICODE = u'' EMPTY_INT = 0 EMPTY_FLOAT = 0.0 # 方向常量 DIRECTION_NONE = u'无方向' DIRECTION_LONG = u'duo' DIRECTION_SHORT = u'kong' DIRECTION_UNKNOWN = u'未知' DIRECTION_NET = u'净' DIRECTION_SELL = u'卖出' # IB接口 DIRECTION_COVEREDSHORT = u'备兑空' # 证券期权 # 开平常量 OFFSET_NONE = u'wukaipin' OFFSET_OPEN = u'kaicang' OFFSET_CLOSE = u'pingcang' OFFSET_CLOSETODAY = u'平今' OFFSET_CLOSEYESTERDAY = u'平昨' OFFSET_UNKNOWN = u'weizhi' # 状态常量 STATUS_NOTTRADED = u'未成交' STATUS_PARTTRADED = u'部分成交' STATUS_ALLTRADED = u'全部成交' STATUS_CANCELLED = u'已撤销' STATUS_REJECTED = u'拒单' STATUS_UNKNOWN = u'未知' # 合约类型常量 PRODUCT_EQUITY = u'股票' PRODUCT_FUTURES = u'期货' PRODUCT_OPTION = u'期权' PRODUCT_INDEX = u'指数' PRODUCT_COMBINATION = u'组合' PRODUCT_FOREX = u'外汇' PRODUCT_UNKNOWN = u'未知' PRODUCT_SPOT = u'现货' PRODUCT_DEFER = u'延期' PRODUCT_ETF = u'ETF' PRODUCT_WARRANT = u'权证' PRODUCT_BOND = u'债券' PRODUCT_NONE = '' # 价格类型常量 PRICETYPE_LIMITPRICE = u'限价' PRICETYPE_MARKETPRICE = u'市价' PRICETYPE_FAK = u'FAK' PRICETYPE_FOK = u'FOK' # 期权类型 OPTION_CALL = u'看涨期权' OPTION_PUT = u'看跌期权' # 交易所类型 EXCHANGE_SSE = 'SSE' # 上交所 EXCHANGE_SZSE = 'SZSE' # 深交所 EXCHANGE_CFFEX = 'CFFEX' # 中金所 EXCHANGE_SHFE = 'SHFE' # 上期所 EXCHANGE_CZCE = 'CZCE' # 郑商所 EXCHANGE_DCE = 'DCE' # 大商所 EXCHANGE_SGE = 'SGE' # 上金所 EXCHANGE_INE = 'INE' # 国际能源交易中心 EXCHANGE_UNKNOWN = 'UNKNOWN'# 未知交易所 EXCHANGE_NONE = '' # 空交易所 EXCHANGE_HKEX = 'HKEX' # 港交所 EXCHANGE_HKFE = 'HKFE' # 香港期货交易所 EXCHANGE_SMART = 'SMART' # IB智能路由(股票、期权) EXCHANGE_NYMEX = 'NYMEX' # IB 期货 EXCHANGE_GLOBEX = 'GLOBEX' # CME电子交易平台 EXCHANGE_IDEALPRO = 'IDEALPRO' # IB外汇ECN EXCHANGE_CME = 'CME' # CME交易所 EXCHANGE_ICE = 'ICE' # ICE交易所 EXCHANGE_LME = 'LME' # LME交易所 EXCHANGE_OANDA = 'OANDA' # OANDA外汇做市商 EXCHANGE_FXCM = 'FXCM' # FXCM外汇做市商 EXCHANGE_OKCOIN = 'OKCOIN' # OKCOIN比特币交易所 EXCHANGE_HUOBI = 'HUOBI' # 火币比特币交易所 EXCHANGE_LBANK = 'LBANK' # LBANK比特币交易所 EXCHANGE_KORBIT = 'KORBIT' # KORBIT韩国交易所 EXCHANGE_ZB = 'ZB' # 比特币中国比特币交易所 EXCHANGE_OKEX = 'OKEX' # OKEX比特币交易所 EXCHANGE_ZAIF = "ZAIF" # ZAIF日本比特币交易所 EXCHANGE_COINCHECK = "COINCHECK" # COINCHECK日本比特币交易所 # 货币类型 CURRENCY_USD = 'USD' # 美元 CURRENCY_CNY = 'CNY' # 人民币 CURRENCY_HKD = 'HKD' # 港币 CURRENCY_UNKNOWN = 'UNKNOWN' # 未知货币 CURRENCY_NONE = '' # 空货币 # 数据库 LOG_DB_NAME = 'VnTrader_Log_Db' # 接口类型 GATEWAYTYPE_EQUITY = 'equity' # 股票、ETF、债券 GATEWAYTYPE_FUTURES = 'futures' # 期货、期权、贵金属 GATEWAYTYPE_INTERNATIONAL = 'international' # 外盘 GATEWAYTYPE_BTC = 'btc' # 比特币 GATEWAYTYPE_DATA = 'data' # 数据(非交易)
[ "635518957@qq.com" ]
635518957@qq.com
222738c4f873a2f9f0c0cadf200ebad4fb8da0ce
816ef0bc4abc1ebeba9fde7f857fa59f31c4b933
/lab04/jsonparser_test.py
98475d3a92bc43b662dc9523b6ff6e2623a343cb
[]
no_license
Gogomoe/SUSTech_CS323_works
cc7dbb97df8a85055dc23f1c0dc72c72bba47901
5a142c665be8c5cf5342f2e89d16aea6e6d756d1
refs/heads/master
2023-02-14T04:39:13.835332
2021-01-07T13:11:04
2021-01-07T13:11:04
293,798,996
0
0
null
null
null
null
UTF-8
Python
false
false
1,239
py
#!/usr/bin/env python3 import pathlib import re import subprocess DATA = pathlib.Path('data') def jsonparser_output(json_file): out = subprocess.check_output(['./jp.out', json_file]) return out.decode().strip() def check_jsonchecker_fail_withlexical(): data = DATA/'jsonchecker' for failjson in data.glob('fail*.json'): out = jsonparser_output(failjson) if ('lexical error' not in out) or ('_EXCLUDE' in failjson.name): continue print(f'For file {failjson.name}:') print(out) print('-'*80) def check_jsonchecker_fail_syntaxonly(): data = DATA/'jsonchecker' recovered, total = 0, 0 for failjson in data.glob('fail*.json'): out = jsonparser_output(failjson) if ('lexical error' in out) or ('_EXCLUDE' in failjson.name): continue print(f'For file {failjson.name}:') print('-'*24) print(open(failjson).read()) print('-'*80) print(out) print('#'*80) m = re.match(r'^syntax(.*?)recovered$', out) recovered += bool(m) total += 1 print(f'Recovered/Total: {recovered}/{total}') # check_jsonchecker_fail_withlexical() check_jsonchecker_fail_syntaxonly()
[ "gogo81745.moe@gmail.com" ]
gogo81745.moe@gmail.com
af4765dd6396d538544701205a413d2b3b2edca6
16adaa2980c6d72945721cf5b58cc44fd7e3cb9c
/conditions.py
7c12d3ae0e533c6e51b6d425626b6260e0a16bd2
[]
no_license
sathishtammalla/PythonLearning
d23a7c267ff605dc0762b2d45d9769e7a2f61645
4a1394b5e831bea378b13be34be3cc87505538f4
refs/heads/master
2020-05-02T23:36:15.230662
2019-04-05T08:24:45
2019-04-05T08:24:45
178,284,759
0
0
null
null
null
null
UTF-8
Python
false
false
448
py
def conditions(): """IF Condition Check Program""" print('IF Conditions Check Program') print('Enter a Number for X') x = input() print("Entered X Value :{0}".format(x)) print('Enter a Number for Y') y = input() print("Entered Y Value :{0}".format(y)) if x < y: print('X is less than Y') elif x > y: print('X is greater than Y') else: print('X and Y are Equal') conditions()
[ "stammall@starbucks.com" ]
stammall@starbucks.com
ef12a61a3f9668366b02a4f68d57fc5cb87247f6
f9d4eee81dda90e41ee755f333e0d787dab749db
/eth2/beacon/scripts/quickstart_state/generate_beacon_genesis.py
2ce8f76b744662551c3a38e4b0081d708144e70a
[ "MIT" ]
permissive
Palem1988/trinity
f10f21119a7ea98a7fc9458e5ff05b1e4cf6a021
79c21f8ae90bc765a78cb8052af0e4271e4c25e1
refs/heads/master
2022-02-20T05:21:18.576796
2019-09-24T22:09:21
2019-09-24T22:40:24
210,869,348
0
1
MIT
2019-09-25T14:45:12
2019-09-25T14:45:04
null
UTF-8
Python
false
false
1,982
py
from pathlib import Path import time from eth2._utils.hash import hash_eth2 from eth2.beacon.genesis import initialize_beacon_state_from_eth1 from eth2.beacon.tools.builder.initializer import create_mock_deposits_and_root from eth2.beacon.tools.fixtures.config_types import Minimal from eth2.beacon.tools.fixtures.loading import load_config_at_path, load_yaml_at from eth2.beacon.tools.misc.ssz_vector import override_lengths ROOT_DIR = Path("eth2/beacon/scripts") KEY_SET_FILE = Path("keygen_16_validators.yaml") def _load_config(config): config_file_name = ROOT_DIR / Path(f"config_{config.name}.yaml") return load_config_at_path(config_file_name) def _main(): config_type = Minimal config = _load_config(config_type) override_lengths(config) key_set = load_yaml_at(ROOT_DIR / KEY_SET_FILE) pubkeys = () privkeys = () withdrawal_credentials = () keymap = {} for key_pair in key_set: pubkey = key_pair["pubkey"].to_bytes(48, byteorder="big") privkey = key_pair["privkey"] withdrawal_credential = ( config.BLS_WITHDRAWAL_PREFIX.to_bytes(1, byteorder="big") + hash_eth2(pubkey)[1:] ) pubkeys += (pubkey,) privkeys += (privkey,) withdrawal_credentials += (withdrawal_credential,) keymap[pubkey] = privkey deposits, _ = create_mock_deposits_and_root( pubkeys, keymap, config, withdrawal_credentials ) eth1_block_hash = b"\x42" * 32 # NOTE: this timestamp is a placeholder eth1_timestamp = 10000 state = initialize_beacon_state_from_eth1( eth1_block_hash=eth1_block_hash, eth1_timestamp=eth1_timestamp, deposits=deposits, config=config, ) genesis_time = int(time.time()) print(f"creating genesis at time {genesis_time}") genesis_state = state.copy(genesis_time=genesis_time) print(genesis_state.hash_tree_root.hex()) if __name__ == "__main__": _main()
[ "r.alex.stokes@gmail.com" ]
r.alex.stokes@gmail.com
cc8600545b3e22e4cd60c9f8edcd14f0dad2ed45
b8cb2d3dffdba0bec43352ae77fb81b7e4caf91d
/src/face_rec_flask.py
16868f2d812c1b2b19d01d0f162eb09d733753ae
[]
no_license
thangnch/MiAI_FaceRecog_2
f873b79a1353af3992ce3389f74d155918257f3a
cc0524adb708b4b2ed4b23f486f19431670efc59
refs/heads/master
2023-04-02T11:13:33.182950
2022-08-06T14:49:53
2022-08-06T14:49:53
207,716,157
10
20
null
2023-03-24T23:39:48
2019-09-11T03:35:56
Python
UTF-8
Python
false
false
3,796
py
from __future__ import absolute_import from __future__ import division from __future__ import print_function from flask import Flask from flask import render_template , request from flask_cors import CORS, cross_origin import tensorflow as tf import argparse import facenet import os import sys import math import pickle import align.detect_face import numpy as np import cv2 import collections from sklearn.svm import SVC import base64 MINSIZE = 20 THRESHOLD = [0.6, 0.7, 0.7] FACTOR = 0.709 IMAGE_SIZE = 182 INPUT_IMAGE_SIZE = 160 CLASSIFIER_PATH = '../Models/facemodel.pkl' FACENET_MODEL_PATH = '../Models/20180402-114759.pb' # Load The Custom Classifier with open(CLASSIFIER_PATH, 'rb') as file: model, class_names = pickle.load(file) print("Custom Classifier, Successfully loaded") tf.Graph().as_default() gpu_options = tf.GPUOptions(per_process_gpu_memory_fraction=0.6) sess = tf.Session(config=tf.ConfigProto(gpu_options=gpu_options, log_device_placement=False)) # Load the model print('Loading feature extraction model') facenet.load_model(FACENET_MODEL_PATH) # Get input and output tensors images_placeholder = tf.get_default_graph().get_tensor_by_name("input:0") embeddings = tf.get_default_graph().get_tensor_by_name("embeddings:0") phase_train_placeholder = tf.get_default_graph().get_tensor_by_name("phase_train:0") embedding_size = embeddings.get_shape()[1] pnet, rnet, onet = align.detect_face.create_mtcnn(sess, "align") app = Flask(__name__) CORS(app) @app.route('/') @cross_origin() def index(): return "OK!"; @app.route('/recog', methods=['POST']) @cross_origin() def upload_img_file(): if request.method == 'POST': # base 64 name="Unknown" f = request.form.get('image') w = int(request.form.get('w')) h = int(request.form.get('h')) decoded_string = base64.b64decode(f) frame = np.fromstring(decoded_string, dtype=np.uint8) #frame = frame.reshape(w,h,3) frame = cv2.imdecode(frame, cv2.IMREAD_ANYCOLOR) # cv2.IMREAD_COLOR in OpenCV 3.1 bounding_boxes, _ = align.detect_face.detect_face(frame, MINSIZE, pnet, rnet, onet, THRESHOLD, FACTOR) faces_found = bounding_boxes.shape[0] if faces_found > 0: det = bounding_boxes[:, 0:4] bb = np.zeros((faces_found, 4), dtype=np.int32) for i in range(faces_found): bb[i][0] = det[i][0] bb[i][1] = det[i][1] bb[i][2] = det[i][2] bb[i][3] = det[i][3] cropped = frame #cropped = frame[bb[i][1]:bb[i][3], bb[i][0]:bb[i][2], :] scaled = cv2.resize(cropped, (INPUT_IMAGE_SIZE, INPUT_IMAGE_SIZE), interpolation=cv2.INTER_CUBIC) scaled = facenet.prewhiten(scaled) scaled_reshape = scaled.reshape(-1, INPUT_IMAGE_SIZE, INPUT_IMAGE_SIZE, 3) feed_dict = {images_placeholder: scaled_reshape, phase_train_placeholder: False} emb_array = sess.run(embeddings, feed_dict=feed_dict) predictions = model.predict_proba(emb_array) best_class_indices = np.argmax(predictions, axis=1) best_class_probabilities = predictions[ np.arange(len(best_class_indices)), best_class_indices] best_name = class_names[best_class_indices[0]] print("Name: {}, Probability: {}".format(best_name, best_class_probabilities)) if best_class_probabilities > 0.8: name = class_names[best_class_indices[0]] else: name = "Unknown" return name; if __name__ == '__main__': app.run(debug=True, host='0.0.0.0',port='8000')
[ "noreply@github.com" ]
noreply@github.com
179469489a69ca59e2930a07ad28fb243302e0f3
12c1b33e27d841bb25899d6601f1de75b522d88d
/python/udacity/draw_turtles.py
6d504799dbe85482a068035ecfff4a600108ee55
[]
no_license
conflabermits/Scripts
ec27456b5b26ad7b1edaf30686addff2cacc6619
c91ef0594dda1228a523fcaccb4af3313d370718
refs/heads/main
2023-07-06T21:41:12.033118
2023-06-25T19:24:54
2023-06-25T19:24:54
66,151,253
4
0
null
2023-09-10T19:56:17
2016-08-20T14:35:34
HTML
UTF-8
Python
false
false
893
py
import turtle def draw_square(a_turtle): for i in range(0, 4): a_turtle.forward(100) a_turtle.right(90) def draw_circle(a_turtle): a_turtle.circle(100) def draw_triangle(a_turtle): for i in range(0, 3): a_turtle.forward(100) a_turtle.left(120) def draw_turtles(): window = turtle.Screen() window.bgcolor("red") brad = turtle.Turtle() brad.shape("turtle") brad.color("black", "green") brad.speed(10) # angie = turtle.Turtle() # angie.shape("circle") # angie.color("blue") # charlie = turtle.Turtle() # charlie.shape("arrow") # charlie.color("yellow") # charlie.speed(4) # charlie.left(180) for i in range(0, 72): draw_square(brad) brad.right(95) # draw_square(brad) # draw_circle(angie) # draw_triangle(charlie) window.exitonclick() draw_turtles()
[ "christopher.dunaj@gmail.com" ]
christopher.dunaj@gmail.com
42835590fa2d772e8fd35ff631574e8c3dda8650
2f30cf20d58e2cde4037441e67213223c69a6998
/lesson19_接口2/d02request.py
34aa5e3f987860394f8ccb9da1afab99314bd07e
[]
no_license
zengcong1314/python1205
b11db7de7d0ad1f8401b8b0c9b20024b4405ae6c
da800ed3374d1d43eb75485588ddb8c3a159bb41
refs/heads/master
2023-05-25T07:17:25.065004
2021-06-08T08:27:54
2021-06-08T08:27:54
318,685,835
0
0
null
null
null
null
UTF-8
Python
false
false
618
py
""" 第三方库 pip install requests python 用来发送HTTP 请求 """ import requests #发送get 请求 url = "http://www.keyou.site:8000/projects/" p = {"a":"b"} resp = requests.get(url,params=p) print(resp) print(resp.status_code) # 字符串 print(resp.text) # 字典 print(resp.json()) # post请求 url = "http://www.keyou.site:8000/user/login/" # 请求参数:json 格式的body data = { "username":"lemon1", "password":"123456" } header = { "Authorization":"JWT fow" } resp2 = requests.post(url,json=data,headers=header) print(resp2.json()) # query string: params={} # json json={} # headers
[ "237886015@qq.com" ]
237886015@qq.com
0f3aaea69808c239b235c44f472f9e05b0f6e1ab
63cf686bf970d28c045719de2f0e7e9dae5bed15
/Contains Duplicate .py
f7d07154cc28b4a5d52c30ce29ed8bc9695a4146
[]
no_license
menard-noe/LeetCode
6461bda4a076849cf69f2cd87999275f141cc483
4e9c50d256c84d1b830a7642b265619a0b69d542
refs/heads/master
2022-12-13T09:41:41.682555
2020-09-14T12:46:53
2020-09-14T12:46:53
282,481,920
0
0
null
null
null
null
UTF-8
Python
false
false
443
py
import math from typing import List class Solution: def containsDuplicate(self, nums: List[int]) -> bool: dico = dict() for num in nums: if num in dico: return True else: dico[num] = 0 return False if __name__ == "__main__": # execute only if run as a script input = [1,2,3,3] solution = Solution() print(solution.containsDuplicate(input))
[ "noe.menard4@gmail.com" ]
noe.menard4@gmail.com
b41947cab9b6e2b7a277cf5fc0c50563580e1d5b
ca2054e1cce228c0ff310797f4f4e884dcfd65f0
/thunder/ndarray/__init__.py
01a26e2e659e8548af33ed8857b96085e859f328
[]
no_license
gzhcv/deeplearning_framework
32a6e0c5d240e97fede129cb9795000b0169465d
3b535e4d7ad06f0855fc29b8d236eb9f77e23f74
refs/heads/master
2023-06-16T23:19:07.457070
2021-07-15T11:40:57
2021-07-15T11:40:57
384,694,836
2
0
null
null
null
null
UTF-8
Python
false
false
74
py
from . import ndarry from . import gpu_op __all__ = ['ndarry', 'gpu_op']
[ "guozhaohong@huawei.com" ]
guozhaohong@huawei.com
f4b310eeda479456a3e4f29b67d0d49ef4b40cdc
c951f19c5a1ffe1772772eab3e909d85164e671f
/{{cookiecutter.project_slug}}/code/custom_verification/verify_and_redirect/redirect.py
b2e0ef2ce9df63162f35426006044681310a97e3
[]
no_license
barabanpan/aws-cognito-serverless
dd2d463368aeacbecda32d7f490d0962fe21f060
4b069684e016517528e5ccb1bb8048337291e28f
refs/heads/main
2023-07-19T10:17:20.403070
2021-09-12T07:05:38
2021-09-12T07:05:38
312,616,616
1
0
null
2020-12-29T15:48:30
2020-11-13T15:43:03
Python
UTF-8
Python
false
false
1,633
py
import boto3 import logging import os from utils import redirect_to cognito = boto3.client("cognito-idp") CLIENT_ID = os.environ.get("COGNITO_CLIENT_ID") USER_POOL_ID = os.environ.get("COGNITO_USER_POOL_ID") REDIRECT_AFTER_VERIFICATION = os.environ.get("URL_FOR_REDIRECT_AFTER_COGNITO_VERIFICATION") REDIRECT_WHEN_CODE_EXPIRED = os.environ.get("URL_FOR_REDIRECT_IF_CODE_IS_EXIPIRED") REDIRECT_WHEN_ALREADY_CONFIRMED = os.environ.get("URL_FOR_REDIRECT_IF_ALREADY_VERIFIED") def handler(event, context): params = event.get("queryStringParameters") or dict() # can be None in event email, code = params.get("email"), params.get("code") try: cognito.confirm_sign_up( ClientId=CLIENT_ID, Username=email, ConfirmationCode=code, ) except cognito.exceptions.ExpiredCodeException: user = cognito.admin_get_user( UserPoolId=USER_POOL_ID, Username=email ) # check if email is already verified email_verified = [attr["Value"] for attr in user["UserAttributes"] if attr["Name"] == "email_verified"][0] if email_verified == "true": redirect_to(REDIRECT_WHEN_ALREADY_CONFIRMED) return redirect_to(REDIRECT_WHEN_CODE_EXPIRED) except (cognito.exceptions.CodeMismatchException, cognito.exceptions.LimitExceededException) as e: # here should be some page with "Oops, something went wrong. Please request another verification link later" logging.warning(repr(e)) return redirect_to(REDIRECT_WHEN_CODE_EXPIRED) return redirect_to(REDIRECT_AFTER_VERIFICATION)
[ "nataliia.dyshko@jetsoftpro.com" ]
nataliia.dyshko@jetsoftpro.com
1463b421ec5ef0507e7c270799171f92034fc335
e090d298340aa0cb12fc0d2ca3874cf4c9d9c547
/text-analysis-scripts/preprocess_split_data.py
aa465036a8cdf54938b7b47a8a4832cddbdd43ee
[]
no_license
mslovett21/crisis-computing
1d00b7f665ff63b48359b4ced6e06e133b5996b3
33b296426df78cd9051934f0aa36c5537a969ba3
refs/heads/master
2023-07-29T13:23:33.536024
2021-09-16T04:22:58
2021-09-16T04:22:58
309,000,861
2
1
null
null
null
null
UTF-8
Python
false
false
2,509
py
import os import pandas as pd import numpy as np from shutil import copy import glob import nltk from nltk.corpus import stopwords from itertools import chain nltk.download('stopwords') import re #access data folders for csv out_path = r'../data/tweets_csv' INFORMATIVE_TWEETS = out_path + "/INFORMATIVE_TWEETS/" NONINFORMATIVE_TWEETS = out_path + "/NONINFORMATIVE_TWEETS/" INFORMATIVE_FILES = glob.glob(INFORMATIVE_TWEETS + "/*.csv") NONINFORMATIVE_FILES = glob.glob(NONINFORMATIVE_TWEETS + "/*.csv") def read_data(files): dfs = [] for file in files: df = pd.read_csv(file, error_bad_lines=False) dfs.append(df) final_df = pd.concat(dfs) return final_df def create_tweets_df(): info_df = read_data(INFORMATIVE_FILES) noninfo_df = read_data(NONINFORMATIVE_FILES) info_df["text_info"] = 1 noninfo_df["text_info"] = 0 final_df = pd.concat([info_df, noninfo_df]) return final_df def preprocess_tweets(tweet): tweet = str(tweet) # Removing URL mentions tweet = ' '.join(re.sub(r"(?:\@|http?\://|https?\://|www)\S+", "", tweet).split()) tweet = ' '.join(re.sub("(@[A-Za-z0-9]+)|([^0-9A-Za-z \t])|(\w+:\/\/\S+)"," ",tweet).split()) # Removing stopwords stop = stopwords.words('english') tweet =' '.join([word for word in tweet.split() if word not in (stop)]) # Removing punctuations tweet = tweet.replace('[^\w\s]','') tweet = tweet.lower() return tweet # over all 12,762 tweets, out of this 11,407 unique tweets # informative 8463, noninformative 4299 if __name__ == '__main__': try: tweets_df = create_tweets_df() tweets_df['tweet_text'] = tweets_df.apply(lambda x: preprocess_tweets(x['tweet_text']), axis= 1) tweets_df = tweets_df.sample(frac = 1) num_tweets = len(tweets_df) trainset_size,valset_size, testset_size = int(num_tweets * 0.7), int(num_tweets*0.15),int(num_tweets*0.15) print(trainset_size) print(valset_size) print(testset_size) train, validate, test = np.split(tweets_df, [trainset_size,(valset_size+trainset_size)]) PATH = "../data/final_tweets/" os.mkdir(PATH) tweets_df.to_csv(PATH +"full_tweets_df.csv" ,index = False ) train.to_csv(PATH +"train_df.csv" ,index = False ) validate.to_csv(PATH +"validate_df.csv" ,index = False ) test.to_csv(PATH + "test_df.csv" ,index = False ) except Exception as e: print("Exception Occured::\n", e)
[ "patrycja.krawczuk@gmail.com" ]
patrycja.krawczuk@gmail.com
fb523bd643df5c81f1bc335ef9c4213751f44767
2d0fb78dde6029a971ab0310513e1d89811ce476
/bilibili/bili/bili/spiders/getFansFromSpaceFans.py
1b032d33ff6dfb86c63006426f967c3ee9b0d672
[]
no_license
biubiudragon/WebCmdMining
a7fd03a54363c1e74aea0e0bf81ffbee85eacdee
bd7efc80070ff7d63c89df0e6579c7df1502c3a1
refs/heads/master
2021-05-30T23:53:20.900803
2016-03-10T00:39:59
2016-03-10T00:39:59
null
0
0
null
null
null
null
UTF-8
Python
false
false
2,378
py
# -*- coding: utf-8 -*- __author__ = 'AllenCHM' from scrapy.spider import BaseSpider import json import pymongo from scrapy import Request from datetime import datetime import re from bili.settings import MONGOHOST class GetFansFromSpaceFansScrapy(BaseSpider): name = u'getFansFromSpaceFans' allowed_domains = [u'bilibili.com', ] def __init__(self): self.connectionMongoDB = pymongo.MongoClient(host=MONGOHOST, port=27017) self.db = self.connectionMongoDB['bilibili'] self.userInfo = self.db["userInfo"] def start_requests(self): userInfoUrl = u'http://space.bilibili.com/ajax/friend/GetFansList?mid={}&page=1' tmp = self.userInfo.find({}, {u'mid':1}) #mid 为用户id for i in tmp: yield Request(userInfoUrl.format(str(i[u'mid'])), meta={u'mid': i[u'mid']}, callback=self.parseUserFansJson) def parseUserFansJson(self, response): try: tmp = json.loads(response.body) if tmp[u'status']: for i in tmp[u'data'][u'list']: data = { u'mid':i[u'fid'], u'addtime':i[u'addtime'], u'uname':i[u'uname'], } self.userInfo.update({u'mid': str(response.meta[u'mid'])}, {u'$addToSet':{u'fansList':data}}) t = self.userInfo.find({u'mid':str(i[u'fid'])}) if not t.count(): userInfoUrl = u'http://space.bilibili.com/ajax/member/GetInfo?mid=' yield Request(userInfoUrl + str(i[u'fid']), callback=self.parseUserInfoJson) # for k in xrange(tmp[u'data'][u'pages']): #系统限制只能看前5页 for k in xrange(6): url = response.url.replace(re.findall(u'page=.*', response.url)[0], u'page='+str(k)) yield Request(url, meta={u'mid': response.meta[u'mid']}, callback=self.parseUserFansJson) except: pass def parseUserInfoJson(self, response): try: tmp = json.loads(response.body) if tmp[u'status']: self.userInfo.update({u'mid': tmp[u'data'][u'mid']}, tmp[u'data'], True) except: pass def spider_close(self): self.connectionMongoDB.close()
[ "chengchuanming@hotmail.com" ]
chengchuanming@hotmail.com
74b87ca9cb07bcf0b829fb9f8d1acca0b0fd7381
182dd5305aedeaa197f302c0d830ab85413cdd53
/plugins/Filters/Convert2Gray/Convert2Gray.py
68772b2605be2aa6796c95576bfe72f1a8208b5f
[ "MIT" ]
permissive
UmSenhorQualquer/workflow-editor
016dbf47759b2572a811b80fc8bc79c88404c4ab
6f836f99e155c2f503cf59adf4e8b8b574184e6d
refs/heads/master
2021-01-24T18:58:13.224476
2017-07-20T10:00:10
2017-07-20T10:00:10
86,163,117
2
0
null
null
null
null
UTF-8
Python
false
false
1,274
py
import core.utils.tools as tools, cv2 from core.modules.OTModulePlugin import OTModulePlugin from core.modules.ModuleConnection import ModuleConnection from datatypes.TypeComponentsVideoPipe import TypeComponentsVideoPipe from datatypes.TypeColorVideoPipe import TypeColorVideoPipe from datatypes.TypeColorVideo import TypeColorVideo from pyforms.Controls import ControlPlayer from pyforms.Controls import ControlCombo from pyforms.Controls import ControlButton class Convert2Gray(OTModulePlugin,TypeColorVideoPipe): def __init__(self, name): icon_path = tools.getFileInSameDirectory(__file__, 'iconsubbg.jpg') OTModulePlugin.__init__(self, name, iconFile=icon_path) TypeColorVideoPipe.__init__(self) self._video = ModuleConnection("Video", connecting=TypeColorVideo) self._player = ControlPlayer("Video player") self._video.changed = self.newVideoInputChoosen self._player.processFrame = self.processFrame self._formset = [ '_video', "_player", ] def newVideoInputChoosen(self): ModuleConnection.changed_event(self._video) value = self._video.value if value: self.open(value) self._player.value = value def processFrame(self, frame): return cv2.cvtColor(frame, cv2.COLOR_BGR2GRAY)
[ "ricardo.ribeiro@neuro.fchampalimaud.org" ]
ricardo.ribeiro@neuro.fchampalimaud.org
865969b8d3d2edcbf84ae4aacf778ed65aedfbbe
54495b3d41ff1a08ce6d786c8c0304b9850dffeb
/Iterator.py
bfde76530fc5a732c3a03668c1ed9326f76e3f3f
[]
no_license
Sakshi1504/Python-OOPS
d2324b923adcfed1179a709b9f9c21695e82753e
9c8ffe900131319737f5f5ee3c4e74adeb3f351e
refs/heads/main
2023-03-11T19:53:05.142711
2021-02-28T03:35:00
2021-02-28T03:35:00
343,012,299
0
0
null
null
null
null
UTF-8
Python
false
false
612
py
''' num=[5,1,2,3] print(num[2]) it = iter(num) print(it.__next__()) print(it.__next__()) #iterator takes forward further values, and don't repeat the same value again for i in it: print(i) ''' class TopTen: def __init__(self): self.num=1 def __iter__(self): return self def __next__(self): if(self.num<=10): value=self.num self.num+=1 return value else: raise StopIteration t=TopTen() for i in t: print(i) #print(t.__iter__()) #print(t.__next__()) #print(next(t))
[ "noreply@github.com" ]
noreply@github.com
123dd34179bbc5e5a5f6710a9269b70888902b80
485549f6ceedd40fb8d22b23403df011eb13d988
/utils/import_csv.py
8fe3facec6b4ad1564e34ee2a588621250f0049b
[ "MIT" ]
permissive
aasmune/RevNet
f1f6dbc4c20bd2baa02601f2609125061d30bcdd
f96edce499b2ba7d80c578577b06ede6572b9367
refs/heads/master
2020-04-21T06:57:57.883310
2019-03-20T11:29:42
2019-03-20T11:29:42
169,380,151
0
0
null
null
null
null
UTF-8
Python
false
false
564
py
from numpy import genfromtxt, isnan, interp, zeros import pandas def read_csv_files(filenames): data = [None for i in range(len(filenames))] for i in range(len(filenames)): t = pandas.read_csv(filenames[i], delimiter=',').values data[i] = t[~isnan(t).any(axis=1)] return data def create_single_table(data): n_samples = data[0].shape[0] n_channels = len(data) result = zeros(( n_samples, n_channels)) for i in range(n_channels): result[:,i] = interp(data[0][:,0], data[i][:,0], data[i][:,1]) return result
[ "aasm.eek@gmail.com" ]
aasm.eek@gmail.com
7ff1948228505514fa2fc18802fadd69dee1abbb
81cac5d646fc14e52b3941279d59fdd957b10f7e
/tests/components/homekit_controller/specific_devices/test_ecobee_occupancy.py
293ecd07dd2394ceabecc9061354e93c5bf4a172
[ "Apache-2.0" ]
permissive
arsaboo/home-assistant
6b6617f296408a42874a67a71ad9bc6074acd000
554e51017e7b1b6949783d9684c4a0e8ca21e466
refs/heads/dev
2023-07-27T20:56:52.656891
2022-01-19T19:30:57
2022-01-19T19:30:57
207,046,472
2
0
Apache-2.0
2019-09-08T01:35:16
2019-09-08T01:35:16
null
UTF-8
Python
false
false
1,226
py
""" Regression tests for Ecobee occupancy. https://github.com/home-assistant/core/issues/31827 """ from tests.components.homekit_controller.common import ( DeviceTestInfo, EntityTestInfo, assert_devices_and_entities_created, setup_accessories_from_file, setup_test_accessories, ) async def test_ecobee_occupancy_setup(hass): """Test that an Ecbobee occupancy sensor be correctly setup in HA.""" accessories = await setup_accessories_from_file(hass, "ecobee_occupancy.json") await setup_test_accessories(hass, accessories) await assert_devices_and_entities_created( hass, DeviceTestInfo( unique_id="00:00:00:00:00:00", name="Master Fan", model="ecobee Switch+", manufacturer="ecobee Inc.", sw_version="4.5.130201", hw_version="", serial_number="111111111111", devices=[], entities=[ EntityTestInfo( entity_id="binary_sensor.master_fan", friendly_name="Master Fan", unique_id="homekit-111111111111-56", state="off", ), ], ), )
[ "noreply@github.com" ]
noreply@github.com
4011086557663f0dd2f3045fdc0b4dd03086183d
56490255e417e65848b7774be49e9602363f238b
/pywinfuse/tools.py
fa88567eb928f7fb4de206fc6587f0932a8a771e
[ "MIT" ]
permissive
weijia/pywinfuse
a5668e567f46548d0e871c2cacd725b818bfce74
249196a70cbcc09b74288d4dc1fa04b25b7c6105
refs/heads/master
2022-02-04T23:19:27.320602
2022-01-30T15:21:17
2022-01-30T15:21:17
36,103,070
1
1
null
null
null
null
UTF-8
Python
false
false
828
py
from ctypes import * def setDwordByPoint(valueAddress, value): ''' valueAddress[0] = value && 0xff valueAddress[1] = (value >> 8) && 0xff ''' i = 0 while i < 4: memset(valueAddress+i, value&0xff, 1) i += 1 value >>= 8 def setLongLongByPoint(valueAddress, value): setDwordByPoint(valueAddress, value & 0xffffffff) setDwordByPoint(valueAddress+4, (value>>32) & 0xffffffff) def setStringByPoint(valueAddress, value, length): cnt = 0 for i in value: #print i cnt += 2 if cnt+2 > length: break #0061: u'a' -> 0x00000000: 61, 0x00000001: 00 memset(valueAddress, ord(i)&0xff, 1) valueAddress += 1 memset(valueAddress, (ord(i)>>8)&0xff, 1) valueAddress += 1 #print valueAddress memset(valueAddress, 0, 1) valueAddress += 1 memset(valueAddress, 0, 1)
[ "wang.weijia@a5b76d48-45a9-11de-a65c-33beeba39556" ]
wang.weijia@a5b76d48-45a9-11de-a65c-33beeba39556
f084f2434510565b6756fe2e22ff2eee7bd9ae65
52272ef3de9036a1b23b35047ceb90a2027df1f3
/selenium_doc/TEST/test1.py
23254c71b70f3e7247cd31f2a4df400aa90ba122
[]
no_license
luokeke/selenium_python
9f2883cc158e473902e0c4bbf9fca20ecb61bfda
3cc05034afd0bc0930921812393bd572db868fb3
refs/heads/master
2020-08-25T02:51:16.064938
2020-04-20T10:03:52
2020-04-20T10:03:52
216,950,778
3
1
null
2019-10-23T03:39:52
2019-10-23T02:29:06
null
UTF-8
Python
false
false
849
py
#!/usr/bin/env python #-*- coding:utf8 -*- #@author: 刘慧玲 2018/5/22 19:15 from selenium import webdriver from time import sleep from login01 import * ''' 脚本作用 :服务器ssp功能 ''' #批量删除存储空间 driver = webdriver.Firefox() driver.delete_all_cookies() driver.maximize_window() # 直接访问景安站存储空间链接,并登录。用户名密码可修改 Login().Ky_login(driver,"luokeke", "1") sleep(3) driver.get("https://mc.kuaiyun.cn/host/hostList") sleep(3) driver.find_element_by_link_text(u"管理").click() sleep(5) #打开管理跳转到新页面,涉及到多窗口操作。 all_window_handle = driver.window_handles # 获取打开的所有窗口句柄 driver.switch_to.window(all_window_handle[-1]) # 激活最顶层窗口句柄 #重装系统标签 driver.find_element_by_id("tab3_7").click() sleep(5)
[ "1326677806@qq.com" ]
1326677806@qq.com
ae6057dbb0fae441881aef45ecbbe8e1312170af
9673686edc2ffefdb076fa2b688fdf3364a342dc
/BOJ_4673.py
91cbf1013a075e210ae201718daac3dc43a4c3c9
[]
no_license
D7DonKIM7E/Python
f23ded6b3353bb5f675c1dfbaee554a58894fa53
b40d14f342f40819a4fbabb336248bfa26ad527f
refs/heads/main
2023-04-22T20:29:36.208879
2021-05-09T09:49:56
2021-05-09T09:49:56
349,476,797
0
0
null
null
null
null
UTF-8
Python
false
false
237
py
originalNumber = set(range(1,10001)) generatorEx = set() for n in range(1,10001) : for i in str(n): n += int(i) generatorEx.add(n) selfNumber = sorted(originalNumber - generatorEx) for n in selfNumber: print(n)
[ "puppy031894@gmail.com" ]
puppy031894@gmail.com
47794b95a5259461f6546bb731ff6ad706f6ddc8
1ee4b9f9cbe6c55e88b055e0200d4c9704b125cf
/server.py
282782f87eadfc5a8cf25ef0d7f1ff93f8c67655
[]
no_license
VagishM6/web_server_system
2d339dc937166903a736fef202fdf97dac347f4b
f6be5146576082d7c1146b9a80ecdea0534d414b
refs/heads/main
2023-08-01T20:13:29.453543
2021-09-10T16:32:24
2021-09-10T16:32:24
402,270,172
1
0
null
null
null
null
UTF-8
Python
false
false
1,097
py
from flask import Flask, render_template, url_for, request, redirect import csv app = Flask(__name__) @app.route('/') def my_home(): return render_template('index.html') @app.route('/<string:page_name>') def html_page(page_name): return render_template(page_name) # write the data to csv file def write_to_csv(data): with open('database.csv', mode='a', newline='') as database2: email = data["email"] subject = data["subject"] message = data["message"] csv_writer = csv.writer(database2, delimiter=',', quotechar='"', quoting=csv.QUOTE_MINIMAL) csv_writer.writerow([email, subject, message]) @app.route('/submit_form', methods=['POST', 'GET']) def submit_form(): if request.method == 'POST': try: data = request.form.to_dict() # store data as dictionary write_to_csv(data) # write the files to database return redirect('/thankyou.html') # redirects to thankyou.html except: return 'did not save to database' else: return 'something went wrong, try again!'
[ "iamvagish6@gmail.com" ]
iamvagish6@gmail.com
4dab74f654c68c3116d92d88b95a8cbe01d07417
6bc2f280299458357a590f18f40d61a784abb312
/ColumnReinforcement/SingleTieMultipleRebars.py
44ed628473a9568231f60431940589e8f5c9898f
[]
no_license
cadsimula-xin/FreeCAD-Reinforcement
a3a35f2817c0a2c5ec25608d44e29364bc4f7c1c
f9e5658b5a5a6943c3c2c10ba2b2bb2c745937a3
refs/heads/master
2021-03-14T12:16:14.481501
2020-03-12T07:55:36
2020-03-12T07:55:36
246,758,943
0
0
null
2020-03-12T06:21:15
2020-03-12T06:21:15
null
UTF-8
Python
false
false
50,771
py
# -*- coding: utf-8 -*- # *************************************************************************** # * * # * Copyright (c) 2019 - Suraj <dadralj18@gmail.com> * # * * # * This program is free software; you can redistribute it and/or modify * # * it under the terms of the GNU Lesser General Public License (LGPL) * # * as published by the Free Software Foundation; either version 2 of * # * the License, or (at your option) any later version. * # * for detail see the LICENCE text file. * # * * # * This program is distributed in the hope that it will be useful, * # * but WITHOUT ANY WARRANTY; without even the implied warranty of * # * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * # * GNU Library General Public License for more details. * # * * # * You should have received a copy of the GNU Library General Public * # * License along with this program; if not, write to the Free Software * # * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 * # * USA * # * * # *************************************************************************** __title__ = "Single Tie Multiple Rebars Reinforcement" __author__ = "Suraj" __url__ = "https://www.freecadweb.org" import FreeCAD from ColumnReinforcement.SingleTie import ( makeSingleTieFourRebars, editSingleTieFourRebars, ) from StraightRebar import makeStraightRebar, editStraightRebar from LShapeRebar import makeLShapeRebar, editLShapeRebar from Rebarfunc import ( showWarning, getParametersOfFace, gettupleOfNumberDiameter, getFacenameforRebar, getLRebarOrientationLeftRightCover, setGroupProperties, ) if FreeCAD.GuiUp: import FreeCADGui def makeSingleTieMultipleRebars( l_cover_of_tie, r_cover_of_tie, t_cover_of_tie, b_cover_of_tie, offset_of_tie, bent_angle, extension_factor, dia_of_tie, number_spacing_check, number_spacing_value, dia_of_main_rebars, main_rebars_t_offset, main_rebars_b_offset, main_rebars_type="StraightRebar", main_hook_orientation="Top Inside", main_hook_extend_along="x-axis", l_main_rebar_rounding=None, main_hook_extension=None, sec_rebars_t_offset=None, sec_rebars_b_offset=None, sec_rebars_number_diameter=None, sec_rebars_type=("StraightRebar", "StraightRebar"), sec_hook_orientation=("Top Inside", "Top Inside"), l_sec_rebar_rounding=None, sec_hook_extension=None, structure=None, facename=None, ): """makeSingleTieMultipleRebars(LeftCoverOfTie, RightCoverOfTie, TopCoverOfTie, BottomCoverOfTie, OffsetofTie, BentAngle, ExtensionFactor, DiameterOfTie, NumberSpacingCheck, NumberSpacingValue, DiameterOfMainRebars, TopOffsetOfMainRebars, BottomOffsetofMainRebars, MainRebarType, MainLShapeHookOrientation, MainLShapeHookExtendAlong, LShapeMainRebarRounding, LShapeMainHookLength, TopOffsetOfSecondaryRebars, BottomOffsetOfSecondaryRebars, SecondaryRebarNumberDiameterString, SecondaryRebarType, SecondaryLShapeHookOrientation, LShapeSecondaryRebarRounding, LShapeSecondaryHookLength, Structure, Facename): Adds the Single Tie Multiple Rebars reinforcement to the selected structural column object. It takes two different inputs for main_rebars_type i.e. 'StraightRebar', 'LShapeRebar'. It takes eight different orientations input for Main L-shaped hooks i.e. 'Top Inside', 'Top Outside', 'Bottom Inside', 'Bottom Outside', 'Top Left', 'Top Right', 'Bottom Left', 'Bottom Right'. It takes two different inputs for main_hook_extend_along i.e. 'x-axis', 'y-axis'. Note: Type of sec_rebars_t_offset, sec_rebars_b_offset, sec_rebars_number_diameter, sec_rebars_type, sec_hook_orientation, l_sec_rebar_rounding and sec_hook_extension argumants is a tuple. Syntax: (<value_for_xdir_rebars>, <value_for_ydir_rebars>). In sec_hook_orientation(<xdir_rebars_orientation>, <ydir_rebars_orientation>), Value of xdir_rebars_orientation can be: 'Top Inside', 'Top Outside', 'Bottom Inside', 'Bottom Outside', 'Top Upward', 'Top Downward', 'Bottom Upward', 'Bottom Downward'. Value of ydir_rebars_orientation can be: 'Top Inside', 'Top Outside', 'Bottom Inside', 'Bottom Outside', 'Top Left', 'Top Right', 'Bottom Left', 'Bottom Right'. """ if not structure and not facename: if FreeCAD.GuiUp: selected_obj = FreeCADGui.Selection.getSelectionEx()[0] structure = selected_obj.Object facename = selected_obj.SubElementNames[0] else: showWarning("Error: Pass structure and facename arguments") return None SingleTieFourRebarsObject = makeSingleTieFourRebars( l_cover_of_tie, r_cover_of_tie, t_cover_of_tie, b_cover_of_tie, offset_of_tie, bent_angle, extension_factor, dia_of_tie, number_spacing_check, number_spacing_value, dia_of_main_rebars, main_rebars_t_offset, main_rebars_b_offset, main_rebars_type, main_hook_orientation, main_hook_extend_along, l_main_rebar_rounding, main_hook_extension, structure, facename, ) if not sec_rebars_number_diameter: return SingleTieFourRebarsObject else: xdir_rebars_number_diameter = sec_rebars_number_diameter[0] ydir_rebars_number_diameter = sec_rebars_number_diameter[1] if ( not xdir_rebars_number_diameter or xdir_rebars_number_diameter == "0" ) and ( not ydir_rebars_number_diameter or ydir_rebars_number_diameter == "0" ): return SingleTieFourRebarsObject # Set parameters for xdir and ydir rebars if not sec_rebars_t_offset: xdir_rebars_t_offset = ydir_rebars_t_offset = main_rebars_t_offset else: xdir_rebars_t_offset = sec_rebars_t_offset[0] ydir_rebars_t_offset = sec_rebars_t_offset[1] if not sec_rebars_b_offset: xdir_rebars_b_offset = ydir_rebars_b_offset = main_rebars_b_offset else: xdir_rebars_b_offset = sec_rebars_b_offset[0] ydir_rebars_b_offset = sec_rebars_b_offset[1] xdir_rebars_type = sec_rebars_type[0] ydir_rebars_type = sec_rebars_type[1] if not sec_hook_orientation: if xdir_rebars_type == "StraightRebar": xdir_hook_orientation = None elif xdir_rebars_type == "LShapeRebar": xdir_hook_orientation = "Top Inside" if ydir_rebars_type == "StraightRebar": ydir_hook_orientation = None elif ydir_rebars_type == "LShapeRebar": ydir_hook_orientation = "Top Inside" else: xdir_hook_orientation = sec_hook_orientation[0] ydir_hook_orientation = sec_hook_orientation[1] if l_sec_rebar_rounding: l_xdir_rebar_rounding = l_sec_rebar_rounding[0] l_ydir_rebar_rounding = l_sec_rebar_rounding[1] if sec_hook_extension: xdir_hook_extension = sec_hook_extension[0] ydir_hook_extension = sec_hook_extension[1] if xdir_rebars_number_diameter and xdir_rebars_number_diameter != "0": xdir_rebars = makeXDirRebars( l_cover_of_tie, r_cover_of_tie, t_cover_of_tie, b_cover_of_tie, dia_of_tie, dia_of_main_rebars, xdir_rebars_t_offset, xdir_rebars_b_offset, xdir_rebars_type, xdir_hook_orientation, xdir_hook_extension, l_xdir_rebar_rounding, xdir_rebars_number_diameter, facename, structure, ) if ydir_rebars_number_diameter and ydir_rebars_number_diameter != "0": ydir_rebars = makeYDirRebars( l_cover_of_tie, r_cover_of_tie, t_cover_of_tie, b_cover_of_tie, dia_of_tie, dia_of_main_rebars, ydir_rebars_t_offset, ydir_rebars_b_offset, ydir_rebars_type, ydir_hook_orientation, ydir_hook_extension, l_ydir_rebar_rounding, ydir_rebars_number_diameter, facename, structure, ) # Create object of _SingleTieMultipleRebars to add new properties to it SingleTieMultipleRebars = _SingleTieMultipleRebars( SingleTieFourRebarsObject ) # Add created xdir/ydir rebars to xdir_rebars_group/ydir_rebars_group SingleTieMultipleRebars.addXDirRebars(xdir_rebars) SingleTieMultipleRebars.addYDirRebars(ydir_rebars) # Set properties values for xdir_rebars in xdir_rebars_group object if len(xdir_rebars) > 0: xdir_rebars_group = SingleTieMultipleRebars.xdir_rebars_group xdir_rebars_group.RebarType = xdir_rebars_type if xdir_rebars_type == "LShapeRebar": xdir_rebars_group.HookOrientation = xdir_hook_orientation xdir_rebars_group.HookExtension = xdir_hook_extension else: xdir_rebars_group.HookOrientation = "Top Inside" xdir_rebars_group.TopOffset = xdir_rebars_t_offset xdir_rebars_group.BottomOffset = xdir_rebars_b_offset xdir_rebars_group.NumberDiameter = xdir_rebars_number_diameter # Set properties values for ydir_rebars in ydir_rebars_group object if len(ydir_rebars) > 0: ydir_rebars_group = SingleTieMultipleRebars.ydir_rebars_group ydir_rebars_group.RebarType = ydir_rebars_type if ydir_rebars_type == "LShapeRebar": ydir_rebars_group.HookOrientation = ydir_hook_orientation ydir_rebars_group.HookExtension = ydir_hook_extension else: ydir_rebars_group.HookOrientation = "Top Inside" ydir_rebars_group.TopOffset = ydir_rebars_t_offset ydir_rebars_group.BottomOffset = ydir_rebars_b_offset ydir_rebars_group.NumberDiameter = ydir_rebars_number_diameter FreeCAD.ActiveDocument.recompute() return SingleTieMultipleRebars.Object def makeXDirRebars( l_cover_of_tie, r_cover_of_tie, t_cover_of_tie, b_cover_of_tie, dia_of_tie, dia_of_main_rebars, xdir_rebars_t_offset, xdir_rebars_b_offset, xdir_rebars_type, xdir_hook_orientation, xdir_hook_extension, l_xdir_rebar_rounding, xdir_rebars_number_diameter, facename, structure, ): """Adds the rebars along x-direction to the structural column object.""" xdir_rebars = [] facename_for_xdir_rebars = getFacenameforRebar( "y-axis", facename, structure ) # Find parameters of selected face FacePRM = getParametersOfFace(structure, facename) # find list of tuples of number and diameter of xdir rebars xdir_rebars_number_diameter_list = gettupleOfNumberDiameter( xdir_rebars_number_diameter ) xdir_rebars_number_diameter_list.reverse() # Calculate spacing between xdir-rebars face_length = FacePRM[0][0] xdir_span_length = ( FacePRM[0][0] - l_cover_of_tie - r_cover_of_tie - 2 * dia_of_tie - 2 * dia_of_main_rebars ) req_space_for_xdir_rebars = sum( x[0] * x[1] for x in xdir_rebars_number_diameter_list ) xdir_rebars_number = sum( number for number, _ in xdir_rebars_number_diameter_list ) spacing_in_xdir_rebars = (xdir_span_length - req_space_for_xdir_rebars) / ( xdir_rebars_number + 1 ) # Set parameter values for Straight/LShape xdir_rebars list_coverAlong = ["Right Side", "Left Side"] if xdir_rebars_type == "StraightRebar": # Set parameter values for Straight xdir_rebars r_cover = t_cover_of_tie + dia_of_tie l_cover = b_cover_of_tie + dia_of_tie rl_cover = [r_cover, l_cover] # Create Straight rebars along x-direction for i, coverAlong in enumerate(list_coverAlong): for j, (number, dia) in enumerate(xdir_rebars_number_diameter_list): if j == 0: f_cover_of_xdir_rebars = ( r_cover_of_tie + dia_of_tie + dia_of_main_rebars + spacing_in_xdir_rebars ) rear_cover_of_xdir_rebars = ( FacePRM[0][0] - f_cover_of_xdir_rebars - number * dia - (number - 1) * spacing_in_xdir_rebars ) xdir_rebars.append( makeStraightRebar( f_cover_of_xdir_rebars, (coverAlong, rl_cover[i]), xdir_rebars_t_offset, xdir_rebars_b_offset, dia, True, number, "Vertical", structure, facename_for_xdir_rebars, ) ) xdir_rebars[-1].OffsetEnd = rear_cover_of_xdir_rebars + dia / 2 f_cover_of_xdir_rebars += ( number * dia + number * spacing_in_xdir_rebars ) elif xdir_rebars_type == "LShapeRebar": face_length = getParametersOfFace(structure, facename_for_xdir_rebars)[ 0 ][0] l_rebar_orientation_cover_list = [] for i, (number, dia_of_rebars) in enumerate( xdir_rebars_number_diameter_list ): l_rebar_orientation_cover_list.append( getLRebarOrientationLeftRightCover( xdir_hook_orientation, xdir_hook_extension, "y-axis", l_cover_of_tie, r_cover_of_tie, t_cover_of_tie, b_cover_of_tie, dia_of_tie, dia_of_rebars, l_xdir_rebar_rounding, face_length, ) ) list_orientation = l_rebar_orientation_cover_list[0]["list_orientation"] l_cover_list = [] for l_rebar_orientation_cover in l_rebar_orientation_cover_list: l_cover_list.append(l_rebar_orientation_cover["l_cover"]) r_cover_list = [] for l_rebar_orientation_cover in l_rebar_orientation_cover_list: r_cover_list.append(l_rebar_orientation_cover["r_cover"]) # Create LShape rebars along x-direction for i, orientation in enumerate(list_orientation): for j, (number, dia) in enumerate(xdir_rebars_number_diameter_list): if j == 0: f_cover_of_xdir_rebars = ( r_cover_of_tie + dia_of_tie + dia_of_main_rebars + spacing_in_xdir_rebars ) rear_cover_of_xdir_rebars = ( FacePRM[0][0] - f_cover_of_xdir_rebars - number * dia - (number - 1) * spacing_in_xdir_rebars ) xdir_rebars.append( makeLShapeRebar( f_cover_of_xdir_rebars, xdir_rebars_b_offset, l_cover_list[j][i], r_cover_list[j][i], dia, xdir_rebars_t_offset, l_xdir_rebar_rounding, True, number, orientation, structure, facename_for_xdir_rebars, ) ) xdir_rebars[-1].OffsetEnd = rear_cover_of_xdir_rebars + dia / 2 f_cover_of_xdir_rebars += ( number * dia + number * spacing_in_xdir_rebars ) FreeCAD.ActiveDocument.recompute() return xdir_rebars def makeYDirRebars( l_cover_of_tie, r_cover_of_tie, t_cover_of_tie, b_cover_of_tie, dia_of_tie, dia_of_main_rebars, ydir_rebars_t_offset, ydir_rebars_b_offset, ydir_rebars_type, ydir_hook_orientation, ydir_hook_extension, l_ydir_rebar_rounding, ydir_rebars_number_diameter, facename, structure, ): """Adds the rebars along y-direction to the structural column object.""" ydir_rebars = [] facename_for_ydir_rebars = getFacenameforRebar( "x-axis", facename, structure ) # Find parameters of selected face FacePRM = getParametersOfFace(structure, facename) # find list of tuples of number and diameter of ydir rebars ydir_rebars_number_diameter_list = gettupleOfNumberDiameter( ydir_rebars_number_diameter ) # Calculate spacing between ydir-rebars ydir_span_length = ( FacePRM[0][1] - t_cover_of_tie - b_cover_of_tie - 2 * dia_of_tie - 2 * dia_of_main_rebars ) req_space_for_ydir_rebars = sum( x[0] * x[1] for x in ydir_rebars_number_diameter_list ) ydir_rebars_number = sum( number for number, _ in ydir_rebars_number_diameter_list ) spacing_in_ydir_rebars = (ydir_span_length - req_space_for_ydir_rebars) / ( ydir_rebars_number + 1 ) # Set parameter values for Straight/LShape ydir_rebars list_coverAlong = ["Right Side", "Left Side"] if ydir_rebars_type == "StraightRebar": # Set parameter values for Straight ydir_rebars r_cover = r_cover_of_tie + dia_of_tie l_cover = l_cover_of_tie + dia_of_tie rl_cover = [r_cover, l_cover] # Create Straight rebars along y-direction for i, coverAlong in enumerate(list_coverAlong): for j, (number, dia) in enumerate(ydir_rebars_number_diameter_list): if j == 0: f_cover_of_ydir_rebars = ( b_cover_of_tie + dia_of_tie + dia_of_main_rebars + spacing_in_ydir_rebars ) rear_cover_of_ydir_rebars = ( FacePRM[0][1] - f_cover_of_ydir_rebars - number * dia - (number - 1) * spacing_in_ydir_rebars ) ydir_rebars.append( makeStraightRebar( f_cover_of_ydir_rebars, (coverAlong, rl_cover[i]), ydir_rebars_t_offset, ydir_rebars_b_offset, dia, True, number, "Vertical", structure, facename_for_ydir_rebars, ) ) ydir_rebars[-1].OffsetEnd = rear_cover_of_ydir_rebars + dia / 2 f_cover_of_ydir_rebars += ( number * dia + number * spacing_in_ydir_rebars ) elif ydir_rebars_type == "LShapeRebar": face_length = getParametersOfFace(structure, facename_for_ydir_rebars)[ 0 ][0] l_rebar_orientation_cover_list = [] for i, (number, dia_of_rebars) in enumerate( ydir_rebars_number_diameter_list ): l_rebar_orientation_cover_list.append( getLRebarOrientationLeftRightCover( ydir_hook_orientation, ydir_hook_extension, "x-axis", l_cover_of_tie, r_cover_of_tie, t_cover_of_tie, b_cover_of_tie, dia_of_tie, dia_of_rebars, l_ydir_rebar_rounding, face_length, ) ) list_orientation = l_rebar_orientation_cover_list[0]["list_orientation"] l_cover_list = [] for l_rebar_orientation_cover in l_rebar_orientation_cover_list: l_cover_list.append(l_rebar_orientation_cover["l_cover"]) r_cover_list = [] for l_rebar_orientation_cover in l_rebar_orientation_cover_list: r_cover_list.append(l_rebar_orientation_cover["r_cover"]) # Create LShape rebars along y-direction for i, orientation in enumerate(list_orientation): for j, (number, dia) in enumerate(ydir_rebars_number_diameter_list): if j == 0: f_cover_of_ydir_rebars = ( r_cover_of_tie + dia_of_tie + dia_of_main_rebars + spacing_in_ydir_rebars ) rear_cover_of_ydir_rebars = ( FacePRM[0][1] - f_cover_of_ydir_rebars - number * dia - (number - 1) * spacing_in_ydir_rebars ) ydir_rebars.append( makeLShapeRebar( f_cover_of_ydir_rebars, ydir_rebars_b_offset, l_cover_list[j][i], r_cover_list[j][i], dia, ydir_rebars_t_offset, l_ydir_rebar_rounding, True, number, orientation, structure, facename_for_ydir_rebars, ) ) ydir_rebars[-1].OffsetEnd = rear_cover_of_ydir_rebars + dia / 2 f_cover_of_ydir_rebars += ( number * dia + number * spacing_in_ydir_rebars ) FreeCAD.ActiveDocument.recompute() return ydir_rebars def editSingleTieMultipleRebars( rebar_group, l_cover_of_tie, r_cover_of_tie, t_cover_of_tie, b_cover_of_tie, offset_of_tie, bent_angle, extension_factor, dia_of_tie, number_spacing_check, number_spacing_value, dia_of_main_rebars, main_rebars_t_offset, main_rebars_b_offset, main_rebars_type="StraightRebar", main_hook_orientation="Top Inside", main_hook_extend_along="x-axis", l_main_rebar_rounding=None, main_hook_extension=None, sec_rebars_t_offset=None, sec_rebars_b_offset=None, sec_rebars_number_diameter=None, sec_rebars_type=("StraightRebar", "StraightRebar"), sec_hook_orientation=("Top Inside", "Top Inside"), l_sec_rebar_rounding=None, sec_hook_extension=None, structure=None, facename=None, ): """editSingleTieMultipleRebars(RebarGroup, LeftCoverOfTie, RightCoverOfTie, TopCoverOfTie, BottomCoverOfTie, OffsetofTie, BentAngle, ExtensionFactor, DiameterOfTie, NumberSpacingCheck, NumberSpacingValue, DiameterOfMainRebars, TopOffsetofMainRebars, BottomOffsetofMainRebars, MainRebarType, MainLShapeHookOrientation, MainLShapeHookExtendAlong, LShapeMainRebarRounding, LShapeMainHookLength, TopOffsetofSecondaryRebars, BottomOffsetofSecondaryRebars, SecondaryRebarNumberDiameterString, SecondaryRebarType, SecondaryLShapeHookOrientation, LShapeSecondaryRebarRounding, LShapeSecondaryHookLength, Structure, Facename): Edit the Single Tie Multiple Rebars reinforcement for the selected structural column object. It takes two different inputs for main_rebars_type i.e. 'StraightRebar', 'LShapeRebar'. It takes eight different orientations input for Main L-shaped hooks i.e. 'Top Inside', 'Top Outside', 'Bottom Inside', 'Bottom Outside', 'Top Left', 'Top Right', 'Bottom Left', 'Bottom Right'. It takes two different inputs for main_hook_extend_along i.e. 'x-axis', 'y-axis'. Note: Type of sec_rebars_t_offset, sec_rebars_b_offset, sec_rebars_number_diameter, sec_rebars_type, sec_hook_orientation, l_sec_rebar_rounding and sec_hook_extension argumants is a tuple. Syntax: (<value_for_xdir_rebars>, <value_for_ydir_rebars>). In sec_hook_orientation(<xdir_rebars_orientation>, <ydir_rebars_orientation>), Value of xdir_rebars_orientation can be: 'Top Inside', 'Top Outside', 'Bottom Inside', 'Bottom Outside', 'Top Upward', 'Top Downward', 'Bottom Upward', 'Bottom Downward'. Value of ydir_rebars_orientation can be: 'Top Inside', 'Top Outside', 'Bottom Inside', 'Bottom Outside', 'Top Left', 'Top Right', 'Bottom Left', 'Bottom Right'. """ if len(rebar_group.RebarGroups) == 0: return rebar_group for i, tmp_rebar_group in enumerate(rebar_group.RebarGroups): if hasattr(tmp_rebar_group, "Ties"): if len(tmp_rebar_group.Ties) > 0: Tie = tmp_rebar_group.Ties[0] break else: showWarning( "You have deleted ties. Please recreate the " "ColumnReinforcement." ) return rebar_group elif i == len(rebar_group.RebarGroups) - 1: showWarning( "You have deleted ties group. Please recreate the " "ColumnReinforcement." ) return rebar_group if not structure and not facename: structure = Tie.Base.Support[0][0] facename = Tie.Base.Support[0][1][0] # Edit ties and main rebars editSingleTieFourRebars( rebar_group, l_cover_of_tie, r_cover_of_tie, t_cover_of_tie, b_cover_of_tie, offset_of_tie, bent_angle, extension_factor, dia_of_tie, number_spacing_check, number_spacing_value, dia_of_main_rebars, main_rebars_t_offset, main_rebars_b_offset, main_rebars_type, main_hook_orientation, main_hook_extend_along, l_main_rebar_rounding, main_hook_extension, structure, facename, ) if len(rebar_group.RebarGroups) == 3: # Set parameters for xdir and ydir rebars xdir_rebars_group = rebar_group.RebarGroups[2].SecondaryRebars[0] ydir_rebars_group = rebar_group.RebarGroups[2].SecondaryRebars[1] if not sec_rebars_t_offset: xdir_rebars_t_offset = xdir_rebars_group.TopOffset ydir_rebars_t_offset = ydir_rebars_group.TopOffset else: xdir_rebars_t_offset = sec_rebars_t_offset[0] ydir_rebars_t_offset = sec_rebars_t_offset[1] if not sec_rebars_b_offset: xdir_rebars_b_offset = xdir_rebars_group.BottomOffset ydir_rebars_b_offset = ydir_rebars_group.BottomOffset else: xdir_rebars_b_offset = sec_rebars_b_offset[0] ydir_rebars_b_offset = sec_rebars_b_offset[1] if not sec_rebars_number_diameter: xdir_rebars_number_diameter = xdir_rebars_group.NumberDiameter ydir_rebars_number_diameter = ydir_rebars_group.NumberDiameter else: xdir_rebars_number_diameter = sec_rebars_number_diameter[0] ydir_rebars_number_diameter = sec_rebars_number_diameter[1] else: # Set parameters for xdir and ydir rebars if not sec_rebars_t_offset: xdir_rebars_t_offset = "0.00 mm" ydir_rebars_t_offset = "0.00 mm" else: xdir_rebars_t_offset = sec_rebars_t_offset[0] ydir_rebars_t_offset = sec_rebars_t_offset[1] if not sec_rebars_b_offset: xdir_rebars_b_offset = "0.00 mm" ydir_rebars_b_offset = "0.00 mm" else: xdir_rebars_b_offset = sec_rebars_b_offset[0] ydir_rebars_b_offset = sec_rebars_b_offset[1] if not sec_rebars_number_diameter: xdir_rebars_number_diameter = "2#20mm+1#16mm+2#20mm" ydir_rebars_number_diameter = "1#20mm+1#16mm+1#20mm" else: xdir_rebars_number_diameter = sec_rebars_number_diameter[0] ydir_rebars_number_diameter = sec_rebars_number_diameter[1] xdir_rebars_type = sec_rebars_type[0] ydir_rebars_type = sec_rebars_type[1] xdir_hook_orientation = sec_hook_orientation[0] ydir_hook_orientation = sec_hook_orientation[1] if l_sec_rebar_rounding: l_xdir_rebar_rounding = l_sec_rebar_rounding[0] l_ydir_rebar_rounding = l_sec_rebar_rounding[1] if sec_hook_extension: xdir_hook_extension = sec_hook_extension[0] ydir_hook_extension = sec_hook_extension[1] # Set common parameters of xdir and ydir rebars xdir_rebars = xdir_rebars_group.XDirRebars ydir_rebars = ydir_rebars_group.YDirRebars # Check if there is need to recreate rebars recreate_Xdir_rebars = False recreate_ydir_rebars = False prev_xdir_rebars_type = xdir_rebars_group.RebarType prev_xdir_rebars_number_diameter = xdir_rebars_group.NumberDiameter prev_ydir_rebars_type = ydir_rebars_group.RebarType prev_ydir_rebars_number_diameter = ydir_rebars_group.NumberDiameter if ( prev_xdir_rebars_type == xdir_rebars_type and prev_xdir_rebars_number_diameter == xdir_rebars_number_diameter ): recreate_xdir_rebars = False else: recreate_xdir_rebars = True if ( prev_ydir_rebars_type == ydir_rebars_type and prev_ydir_rebars_number_diameter == ydir_rebars_number_diameter ): recreate_ydir_rebars = False else: recreate_ydir_rebars = True if recreate_xdir_rebars: for Rebar in xdir_rebars: base_name = Rebar.Base.Name FreeCAD.ActiveDocument.removeObject(Rebar.Name) FreeCAD.ActiveDocument.removeObject(base_name) FreeCAD.ActiveDocument.recompute() if xdir_rebars_number_diameter and xdir_rebars_number_diameter != "0": xdir_rebars = makeXDirRebars( l_cover_of_tie, r_cover_of_tie, t_cover_of_tie, b_cover_of_tie, dia_of_tie, dia_of_main_rebars, xdir_rebars_t_offset, xdir_rebars_b_offset, xdir_rebars_type, xdir_hook_orientation, xdir_hook_extension, l_xdir_rebar_rounding, xdir_rebars_number_diameter, facename, structure, ) else: editXDirRebars( xdir_rebars, l_cover_of_tie, r_cover_of_tie, t_cover_of_tie, b_cover_of_tie, dia_of_tie, dia_of_main_rebars, xdir_rebars_t_offset, xdir_rebars_b_offset, xdir_rebars_type, xdir_hook_orientation, xdir_hook_extension, l_xdir_rebar_rounding, xdir_rebars_number_diameter, facename, structure, ) if recreate_ydir_rebars: for Rebar in ydir_rebars: base_name = Rebar.Base.Name FreeCAD.ActiveDocument.removeObject(Rebar.Name) FreeCAD.ActiveDocument.removeObject(base_name) FreeCAD.ActiveDocument.recompute() if ydir_rebars_number_diameter and ydir_rebars_number_diameter != "0": ydir_rebars = makeYDirRebars( l_cover_of_tie, r_cover_of_tie, t_cover_of_tie, b_cover_of_tie, dia_of_tie, dia_of_main_rebars, ydir_rebars_t_offset, ydir_rebars_b_offset, ydir_rebars_type, ydir_hook_orientation, ydir_hook_extension, l_ydir_rebar_rounding, ydir_rebars_number_diameter, facename, structure, ) else: editYDirRebars( ydir_rebars, l_cover_of_tie, r_cover_of_tie, t_cover_of_tie, b_cover_of_tie, dia_of_tie, dia_of_main_rebars, ydir_rebars_t_offset, ydir_rebars_b_offset, ydir_rebars_type, ydir_hook_orientation, ydir_hook_extension, l_ydir_rebar_rounding, ydir_rebars_number_diameter, facename, structure, ) # Add created xdir/ydir rebars to xdir_rebars_group/ydir_rebars_group xdir_rebars_group.addObjects(xdir_rebars) ydir_rebars_group.addObjects(ydir_rebars) # Set properties values for xdir_rebars in xdir_rebars_group object xdir_rebars_group.XDirRebars = xdir_rebars xdir_rebars_group.RebarType = xdir_rebars_type if xdir_rebars_type == "LShapeRebar": xdir_rebars_group.HookOrientation = xdir_hook_orientation xdir_rebars_group.HookExtension = xdir_hook_extension xdir_rebars_group.TopOffset = xdir_rebars_t_offset xdir_rebars_group.BottomOffset = xdir_rebars_b_offset xdir_rebars_group.NumberDiameter = xdir_rebars_number_diameter # Set properties values for ydir_rebars in ydir_rebars_group object ydir_rebars_group.YDirRebars = ydir_rebars ydir_rebars_group.RebarType = ydir_rebars_type if ydir_rebars_type == "LShapeRebar": ydir_rebars_group.HookOrientation = ydir_hook_orientation ydir_rebars_group.HookExtension = ydir_hook_extension ydir_rebars_group.TopOffset = ydir_rebars_t_offset ydir_rebars_group.BottomOffset = ydir_rebars_b_offset ydir_rebars_group.NumberDiameter = ydir_rebars_number_diameter FreeCAD.ActiveDocument.recompute() return rebar_group def editXDirRebars( xdir_rebars_list, l_cover_of_tie, r_cover_of_tie, t_cover_of_tie, b_cover_of_tie, dia_of_tie, dia_of_main_rebars, xdir_rebars_t_offset, xdir_rebars_b_offset, xdir_rebars_type, xdir_hook_orientation, xdir_hook_extension, l_xdir_rebar_rounding, xdir_rebars_number_diameter, facename, structure, ): # Find parameters of selected face FacePRM = getParametersOfFace(structure, facename) facename_for_xdir_rebars = getFacenameforRebar( "y-axis", facename, structure ) # find list of tuples of number and diameter of xdir rebars xdir_rebars_number_diameter_list = gettupleOfNumberDiameter( xdir_rebars_number_diameter ) xdir_rebars_number_diameter_list.reverse() # Calculate spacing between xdir-rebars face_length = FacePRM[0][0] xdir_span_length = ( FacePRM[0][0] - l_cover_of_tie - r_cover_of_tie - 2 * dia_of_tie - 2 * dia_of_main_rebars ) req_space_for_xdir_rebars = sum( x[0] * x[1] for x in xdir_rebars_number_diameter_list ) xdir_rebars_number = sum( number for number, _ in xdir_rebars_number_diameter_list ) spacing_in_xdir_rebars = (xdir_span_length - req_space_for_xdir_rebars) / ( xdir_rebars_number + 1 ) # Set parameter values for Straight/LShape xdir_rebars list_coverAlong = ["Right Side", "Left Side"] if xdir_rebars_type == "StraightRebar": r_cover = t_cover_of_tie + dia_of_tie l_cover = b_cover_of_tie + dia_of_tie rl_cover = [r_cover, l_cover] index = 0 for i, coverAlong in enumerate(list_coverAlong): for j, (number, dia) in enumerate(xdir_rebars_number_diameter_list): if j == 0: f_cover_of_xdir_rebars = ( r_cover_of_tie + dia_of_tie + dia_of_main_rebars + spacing_in_xdir_rebars ) rear_cover_of_xdir_rebars = ( FacePRM[0][0] - f_cover_of_xdir_rebars - number * dia - (number - 1) * spacing_in_xdir_rebars ) editStraightRebar( xdir_rebars_list[index], f_cover_of_xdir_rebars, (coverAlong, rl_cover[i]), xdir_rebars_t_offset, xdir_rebars_b_offset, dia, True, number, "Vertical", structure, facename_for_xdir_rebars, ) xdir_rebars_list[index].OffsetEnd = ( rear_cover_of_xdir_rebars + dia / 2 ) f_cover_of_xdir_rebars += ( number * dia + number * spacing_in_xdir_rebars ) index += 1 elif xdir_rebars_type == "LShapeRebar": face_length = getParametersOfFace(structure, facename_for_xdir_rebars)[ 0 ][0] l_rebar_orientation_cover_list = [] for i, (number, dia_of_rebars) in enumerate( xdir_rebars_number_diameter_list ): l_rebar_orientation_cover_list.append( getLRebarOrientationLeftRightCover( xdir_hook_orientation, xdir_hook_extension, "y-axis", l_cover_of_tie, r_cover_of_tie, t_cover_of_tie, b_cover_of_tie, dia_of_tie, dia_of_rebars, l_xdir_rebar_rounding, face_length, ) ) list_orientation = l_rebar_orientation_cover_list[0]["list_orientation"] l_cover_list = [] for l_rebar_orientation_cover in l_rebar_orientation_cover_list: l_cover_list.append(l_rebar_orientation_cover["l_cover"]) r_cover_list = [] for l_rebar_orientation_cover in l_rebar_orientation_cover_list: r_cover_list.append(l_rebar_orientation_cover["r_cover"]) index = 0 for i, orientation in enumerate(list_orientation): for j, (number, dia) in enumerate(xdir_rebars_number_diameter_list): if j == 0: f_cover_of_xdir_rebars = ( r_cover_of_tie + dia_of_tie + dia_of_main_rebars + spacing_in_xdir_rebars ) rear_cover_of_xdir_rebars = ( FacePRM[0][0] - f_cover_of_xdir_rebars - number * dia - (number - 1) * spacing_in_xdir_rebars ) editLShapeRebar( xdir_rebars_list[index], f_cover_of_xdir_rebars, xdir_rebars_b_offset, l_cover_list[j][i], r_cover_list[j][i], dia, xdir_rebars_t_offset, l_xdir_rebar_rounding, True, number, orientation, structure, facename_for_xdir_rebars, ) xdir_rebars_list[index].OffsetEnd = ( rear_cover_of_xdir_rebars + dia / 2 ) f_cover_of_xdir_rebars += ( number * dia + number * spacing_in_xdir_rebars ) index += 1 FreeCAD.ActiveDocument.recompute() def editYDirRebars( ydir_rebars_list, l_cover_of_tie, r_cover_of_tie, t_cover_of_tie, b_cover_of_tie, dia_of_tie, dia_of_main_rebars, ydir_rebars_t_offset, ydir_rebars_b_offset, ydir_rebars_type, ydir_hook_orientation, ydir_hook_extension, l_ydir_rebar_rounding, ydir_rebars_number_diameter, facename, structure, ): """Edits the rebars created along y-direction in the structural column object.""" # Find parameters of selected face FacePRM = getParametersOfFace(structure, facename) facename_for_ydir_rebars = getFacenameforRebar( "x-axis", facename, structure ) # find list of tuples of number and diameter of ydir rebars ydir_rebars_number_diameter_list = gettupleOfNumberDiameter( ydir_rebars_number_diameter ) # Calculate spacing between ydir-rebars ydir_span_length = ( FacePRM[0][1] - t_cover_of_tie - b_cover_of_tie - 2 * dia_of_tie - 2 * dia_of_main_rebars ) req_space_for_ydir_rebars = sum( x[0] * x[1] for x in ydir_rebars_number_diameter_list ) ydir_rebars_number = sum( number for number, _ in ydir_rebars_number_diameter_list ) spacing_in_ydir_rebars = (ydir_span_length - req_space_for_ydir_rebars) / ( ydir_rebars_number + 1 ) # Set parameter values for Straight/LShape ydir_rebars list_coverAlong = ["Right Side", "Left Side"] if ydir_rebars_type == "StraightRebar": # Set parameter values for Straight ydir_rebars r_cover = r_cover_of_tie + dia_of_tie l_cover = l_cover_of_tie + dia_of_tie rl_cover = [r_cover, l_cover] index = 0 for i, coverAlong in enumerate(list_coverAlong): for j, (number, dia) in enumerate(ydir_rebars_number_diameter_list): if j == 0: f_cover_of_ydir_rebars = ( b_cover_of_tie + dia_of_tie + dia_of_main_rebars + spacing_in_ydir_rebars ) rear_cover_of_ydir_rebars = ( FacePRM[0][1] - f_cover_of_ydir_rebars - number * dia - (number - 1) * spacing_in_ydir_rebars ) editStraightRebar( ydir_rebars_list[index], f_cover_of_ydir_rebars, (coverAlong, rl_cover[i]), ydir_rebars_t_offset, ydir_rebars_b_offset, dia, True, number, "Vertical", structure, facename_for_ydir_rebars, ) ydir_rebars_list[index].OffsetEnd = ( rear_cover_of_ydir_rebars + dia / 2 ) f_cover_of_ydir_rebars += ( number * dia + number * spacing_in_ydir_rebars ) index += 1 elif ydir_rebars_type == "LShapeRebar": face_length = getParametersOfFace(structure, facename_for_ydir_rebars)[ 0 ][0] l_rebar_orientation_cover_list = [] for i, (number, dia_of_rebars) in enumerate( ydir_rebars_number_diameter_list ): l_rebar_orientation_cover_list.append( getLRebarOrientationLeftRightCover( ydir_hook_orientation, ydir_hook_extension, "x-axis", l_cover_of_tie, r_cover_of_tie, t_cover_of_tie, b_cover_of_tie, dia_of_tie, dia_of_rebars, l_ydir_rebar_rounding, face_length, ) ) list_orientation = l_rebar_orientation_cover_list[0]["list_orientation"] l_cover_list = [] for l_rebar_orientation_cover in l_rebar_orientation_cover_list: l_cover_list.append(l_rebar_orientation_cover["l_cover"]) r_cover_list = [] for l_rebar_orientation_cover in l_rebar_orientation_cover_list: r_cover_list.append(l_rebar_orientation_cover["r_cover"]) index = 0 for i, orientation in enumerate(list_orientation): for j, (number, dia) in enumerate(ydir_rebars_number_diameter_list): if j == 0: f_cover_of_ydir_rebars = ( r_cover_of_tie + dia_of_tie + dia_of_main_rebars + spacing_in_ydir_rebars ) rear_cover_of_ydir_rebars = ( FacePRM[0][1] - f_cover_of_ydir_rebars - number * dia - (number - 1) * spacing_in_ydir_rebars ) editLShapeRebar( ydir_rebars_list[index], f_cover_of_ydir_rebars, ydir_rebars_b_offset, l_cover_list[j][i], r_cover_list[j][i], dia, ydir_rebars_t_offset, l_ydir_rebar_rounding, True, number, orientation, structure, facename_for_ydir_rebars, ) ydir_rebars[index].OffsetEnd = ( rear_cover_of_ydir_rebars + dia / 2 ) f_cover_of_ydir_rebars += ( number * dia + number * spacing_in_ydir_rebars ) index += 1 FreeCAD.ActiveDocument.recompute() class _SingleTieMultipleRebars: def __init__(self, obj): """Add properties to object obj.""" self.Object = obj.rebar_group self.sec_rebars_group = self.Object.newObject( "App::DocumentObjectGroupPython", "SecondaryRebars" ) self.xdir_rebars_group = self.sec_rebars_group.newObject( "App::DocumentObjectGroupPython", "XDirRebars" ) self.ydir_rebars_group = self.sec_rebars_group.newObject( "App::DocumentObjectGroupPython", "YDirRebars" ) # Add secondary rebars group object to list of rebars groups of rebar # group object prev_rebar_groups = obj.rebar_group.RebarGroups prev_rebar_groups.append(self.sec_rebars_group) obj.rebar_group.RebarGroups = prev_rebar_groups # Add and set properties for secondary rebars group object properties = [] properties.append( ( "App::PropertyLinkList", "SecondaryRebars", "List of secondary rebars", 1, ) ) setGroupProperties(properties, self.sec_rebars_group) self.sec_rebars_group.SecondaryRebars = [ self.xdir_rebars_group, self.ydir_rebars_group, ] # Add properties to xdir rebars group object properties = [] properties.append( ("App::PropertyLinkList", "XDirRebars", "List of xdir rebars", 1) ) properties.append( ("App::PropertyString", "RebarType", "Type of xdir rebars", 1) ) properties.append( ( "App::PropertyString", "HookOrientation", "Orientation of LShaped Rebar Hook", 1, ) ) properties.append( ("App::PropertyDistance", "HookExtension", "Length of hook", 1) ) properties.append( ( "App::PropertyDistance", "TopOffset", "Top offset of xdir rebars", 1, ) ) properties.append( ( "App::PropertyDistance", "BottomOffset", "Bottom offset of xdir rebars", 1, ) ) properties.append( ( "App::PropertyString", "NumberDiameter", "Number Diameter list of rebars", 1, ) ) setGroupProperties(properties, self.xdir_rebars_group) # Add properties to ydir rebars group object properties = [] properties.append( ("App::PropertyLinkList", "YDirRebars", "List of ydir rebars", 1) ) properties.append( ("App::PropertyString", "RebarType", "Type of ydir rebars", 1) ) properties.append( ( "App::PropertyString", "HookOrientation", "Orientation of LShaped Rebar Hook", 1, ) ) properties.append( ("App::PropertyDistance", "HookExtension", "Length of hook", 1) ) properties.append( ( "App::PropertyDistance", "TopOffset", "Top offset of ydir rebars", 1, ) ) properties.append( ( "App::PropertyDistance", "BottomOffset", "Bottom offset of ydir rebars", 1, ) ) properties.append( ( "App::PropertyString", "NumberDiameter", "Number Diameter list of rebars", 1, ) ) setGroupProperties(properties, self.ydir_rebars_group) def addXDirRebars(self, xdir_rebars_list): """Add XDir Rebars to xdir_rebars group object.""" self.xdir_rebars_group.addObjects(xdir_rebars_list) prev_xdir_rebars_list = self.xdir_rebars_group.XDirRebars xdir_rebars_list.extend(prev_xdir_rebars_list) self.xdir_rebars_group.XDirRebars = xdir_rebars_list def addYDirRebars(self, ydir_rebars_list): """Add YDir Rebars to ydir_rebars group object.""" self.ydir_rebars_group.addObjects(ydir_rebars_list) prev_ydir_rebars_list = self.ydir_rebars_group.YDirRebars ydir_rebars_list.extend(prev_ydir_rebars_list) self.ydir_rebars_group.YDirRebars = ydir_rebars_list
[ "dadralj18@gmail.com" ]
dadralj18@gmail.com
6fba70adc9002e1c1d465e9e22bc0f20dd4e571d
0100687bae959dff1b14199d3f901f82e4dae019
/website/urls.py
d287c37bd62256e38e6c71c5cab4fbc2c7495fbb
[]
no_license
Kabzel55/Django_Blog
eb39f1ed8b28273b32ecdca084638f4590a98639
d4d6599d0dbacf4002ec5655a6704d9f35da8513
refs/heads/master
2022-08-26T22:27:51.506612
2020-05-27T13:23:37
2020-05-27T13:23:37
267,325,861
0
0
null
null
null
null
UTF-8
Python
false
false
1,381
py
from django.contrib import admin from django.urls import path, include from users import views as user_views from django.contrib.auth import views as auth_views from django.conf import settings from django.conf.urls.static import static urlpatterns = [ path('admin/', admin.site.urls), path('register/', user_views.register, name='register'), path('login/', auth_views.LoginView.as_view(template_name="users/login.html"), name='login'), path('logout/', auth_views.LogoutView.as_view(template_name="users/logout.html"), name='logout'), path('password-reset/', auth_views.PasswordResetView.as_view(template_name="users/password_reset.html"), name="password_reset"), path('password-reset/done/', auth_views.PasswordResetDoneView.as_view(template_name="users/password_reset_done.html"), name="password_reset_done"), path('password-reset-confirm/<uidb64>/<token>', auth_views.PasswordResetConfirmView.as_view(template_name="users/password_reset_confirm.html"), name="password_reset_confirm"), path('password-reset-complete/', auth_views.PasswordResetCompleteView.as_view(template_name="users/password_reset_complete.html"), name="password_reset_complete"), path('profile/', user_views.profile, name='profile'), path('', include('blog.urls')), ] if settings.DEBUG: urlpatterns+=static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
[ "patryk.koryga@pollub.edu.pl" ]
patryk.koryga@pollub.edu.pl
51fa600b4dc27c99a0e12ee0492e363f5ac329ce
e3f427476df64d80e284ba234788fda12abc7876
/Remove Duplicates from Sorted Array [ 96.27 %].py
f7d669679625e3d08a19fb28c8765d4c58e80ccf
[]
no_license
ankitrana1256/LeetcodeSolutions
217c525894974caa1794aba64bd322374a2b1943
3c7256a6bead3fb345ca8ab45bf0603e36ee523d
refs/heads/master
2023-08-27T05:10:28.433072
2021-11-04T06:56:09
2021-11-04T06:56:09
397,987,067
1
0
null
null
null
null
UTF-8
Python
false
false
252
py
class Solution: def removeDuplicates(nums) -> int: new_list = list(set(nums)) nums.clear() nums.extend(new_list) nums.sort() return len(nums) nums = [1,1,2] k = Solution.removeDuplicates(nums) print(k, nums)
[ "76605774+ankitrana1256@users.noreply.github.com" ]
76605774+ankitrana1256@users.noreply.github.com
3f7e20835a7fd86fe8ba02db4fc07183a8847b8a
ac1a67e95027d0ac4642cb809a9cf73782b12341
/preprocessing.py
e8463280b190d0f394b86562256180f5fa34304e
[]
no_license
cpgaffney1/cs231n
79cbc5a6c030f1e3428aac1d972b318b80ed1cf7
e230753dcdf266f4c06b1d63e665ca36dea3add5
refs/heads/master
2021-06-04T02:27:02.069525
2019-11-27T19:43:11
2019-11-27T19:43:11
130,610,673
0
0
null
null
null
null
UTF-8
Python
false
false
5,582
py
import os from PIL import Image import numpy as np import util import sys def process_data_batch_and_write(batchnum, filenames, text_data, numeric_data): i = 0 img_arrays = [] x_shapes = [] y_shapes = [] zpid_list = {} for file in filenames: if i % 100 == 0: sys.stdout.write('=') sys.stdout.flush() try: img = Image.open('imgs/' + file) except OSError: print('file unreadable') continue data = np.array(img) if data.shape != (300, 400, 3): # skip if improper shape. most are 300 x 300 continue zpid = file[:-4] if zpid in text_data.keys() and zpid in numeric_data.keys(): zpid_list[zpid] = i else: continue x_shapes.append(data.shape[0]) y_shapes.append(data.shape[1]) img_arrays.append(data) i += 1 sys.stdout.write('>\n') sys.stdout.flush() N = len(set(text_data.keys()) & set(numeric_data.keys()) & set(zpid_list.keys())) print('N is {}'.format(N)) assert(N == len(img_arrays)) ordered_descriptions = [''] * N ordered_addresses = [''] * N ordered_numeric_data = np.zeros((N, 4)) for zpid in zpid_list.keys(): index = zpid_list[zpid] ordered_descriptions[index] = text_data[zpid][0] ordered_addresses[index] = text_data[zpid][1] ordered_numeric_data[index] = numeric_data[zpid] imgs = np.zeros((N, 300, 400, 3)) for i in range(N): imgs[i] = img_arrays[i] mean_img = np.mean(imgs, axis=0) np.save('data/img_data{}.npy'.format(batchnum), imgs) np.save('data/numeric_data{}.npy'.format(batchnum), ordered_numeric_data) with open('data/descriptions{}.txt'.format(batchnum), 'w') as of: for y in ordered_descriptions: of.write('{}\n'.format(repr(y))) with open('data/addresses{}.txt'.format(batchnum), 'w') as of: for y in ordered_addresses: of.write('{}\n'.format(repr(y[1:-1]))) def process_data_batch(filenames, text_data, numeric_data, desired_shape=(299,299,3), verbose=True, mode='train'): i = 0 img_arrays = [] x_shapes = [] y_shapes = [] zpid_list = {} if verbose: print('Processing data batch') count = 0 if verbose: sys.stdout.write('<') sys.stdout.flush() for file in filenames: if count % 100 == 0: if verbose: sys.stdout.write('=') sys.stdout.flush() count += 1 try: folder = 'imgs/' if mode == 'val': folder = mode + '_' + folder if mode == 'test': folder = mode + '_' + folder img = Image.open(folder + file) except OSError: continue data = np.array(img) if data.shape != (300, 400, 3): # skip if improper shape. most are 300 x 400 continue zpid = file[:-4] if zpid not in text_data.keys() or zpid not in numeric_data.keys(): continue x_shapes.append(data.shape[0]) y_shapes.append(data.shape[1]) assert(zpid not in zpid_list.keys()) zpid_list[zpid] = i assert(i == len(img_arrays)) img_arrays.append(data) i += 1 assert(len(img_arrays) == len(zpid_list.keys())) if verbose: sys.stdout.write('>\n') sys.stdout.flush() N = len(zpid_list) if verbose: print('N is {}'.format(N)) ordered_descriptions = [''] * N ordered_addresses = [''] * N n_numeric_features = len(next(iter(numeric_data.values()))) ordered_numeric_data = np.zeros((N, n_numeric_features)) for zpid in zpid_list.keys(): index = zpid_list[zpid] ordered_descriptions[index] = text_data[zpid][0] ordered_addresses[index] = text_data[zpid][1] ordered_numeric_data[index] = numeric_data[zpid] imgs = np.zeros((N, desired_shape[0], desired_shape[1], desired_shape[2]), dtype=np.uint8) for i in range(N): returned_image = util.crop(img_arrays[i], shape=(desired_shape[0], desired_shape[1]), random=True) imgs[i] = returned_image return imgs, ordered_numeric_data, ordered_descriptions, ordered_addresses ########################### ####### zip, beds, baths, price ########################### def load_tabular_data(): numeric_data = {} text_data = {} prices = [] with open('tabular_data/scraped_data.csv', encoding='utf8', errors='replace') as f: lines = f.readlines() lines = lines[1:] for line in lines: sp = line.split(';,.') zpid, zip, price, beds, baths, descr, address = sp numeric_data[zpid] = (zip, beds, baths, price) text_data[zpid] = (descr, address) prices.append(float(price)) return numeric_data, text_data, prices def main(): numeric_data = {} text_data = {} with open('tabular_data/scraped_data.csv') as f: lines = f.readlines() lines = lines[1:] for line in lines: sp = line.split(';,.') zpid, zip, price, beds, baths, descr, address = sp numeric_data[zpid] = (zip, beds, baths, price) text_data[zpid] = (descr, address) index = 0 batch_size = 1000 files = os.listdir('imgs/') while len(files) != 0: process_data_batch_and_write(index, files[:batch_size], text_data, numeric_data) files = files[batch_size:] index += 1
[ "cgaffney@stanford.edu" ]
cgaffney@stanford.edu
4a08d92165fcd2b39439dfe0582d81ab8427ed10
9464f910f81cc5363c76f1b9e4a9290e89115812
/Problems/Lucky ticket/task.py
4e6912390a2dc3e0225543c6f2687614a0d94972
[]
no_license
sanqit/simply_chatty_bot
8bfb4052e2a4dbcac6cd782f0b2fedaea69ca52f
3ed5de7503fe8994d9e99313cb627fb0d4bf8621
refs/heads/master
2022-11-11T06:19:50.713198
2020-07-04T20:56:40
2020-07-04T20:56:40
277,183,691
0
0
null
null
null
null
UTF-8
Python
false
false
452
py
# Save the input in this variable ticket = int(input()) # Add up the digits for each half first = ticket % 10 ticket //= 10 second = ticket % 10 ticket //= 10 third = ticket % 10 ticket //= 10 half1 = first + second + third first = ticket % 10 ticket //= 10 second = ticket % 10 ticket //= 10 third = ticket % 10 half2 = first + second + third # Thanks to you, this code will work if half1 == half2: print("Lucky") else: print("Ordinary")
[ "admin@sanqit.ru" ]
admin@sanqit.ru
6991166ac2811bf5b5871d798c5766c22ed204be
32cb0be487895629ad1184ea25e0076a43abba0a
/LifePictorial/top/api/rest/HotelRoomImgUploadRequest.py
ff545c17c529a9cad6b04e9031dcf6168e228996
[]
no_license
poorevil/LifePictorial
6814e447ec93ee6c4d5b0f1737335601899a6a56
b3cac4aa7bb5166608f4c56e5564b33249f5abef
refs/heads/master
2021-01-25T08:48:21.918663
2014-03-19T08:55:47
2014-03-19T08:55:47
null
0
0
null
null
null
null
UTF-8
Python
false
false
407
py
''' Created by auto_sdk on 2014-02-10 16:59:30 ''' from top.api.base import RestApi class HotelRoomImgUploadRequest(RestApi): def __init__(self,domain='gw.api.taobao.com',port=80): RestApi.__init__(self,domain, port) self.gid = None self.pic = None self.position = None def getapiname(self): return 'taobao.hotel.room.img.upload' def getMultipartParas(self): return ['pic']
[ "poorevil@gmail.com" ]
poorevil@gmail.com
774d7f3f9294171b6783ee1dfd78785c51cde38f
e531605ed73a42af958143e2f80f4f721d2ac810
/tf2cv.py
332a9a325e5a55bd736ac0e36f6f290439889616
[ "MIT" ]
permissive
markgordon/keras-yolo3
2fadf44c872cc911afeea710d89830f7b05b39fc
53fb083523d1c728b3a5bbf4b5801fb2bfe842fa
refs/heads/master
2020-12-15T02:46:53.808418
2020-02-13T22:44:59
2020-02-13T22:44:59
234,970,107
0
0
MIT
2020-01-19T21:23:09
2020-01-19T21:23:08
null
UTF-8
Python
false
false
947
py
#! /usr/bin/env python import tensorflow as tf from numpy import loadtxt from tensorflow.keras.models import load_model import argparse import configparser import io import os from collections import defaultdict import tensorflow_model_optimization as tmot parser = argparse.ArgumentParser(description='Darknet To Keras Converter.') parser.add_argument('input_path', help='Path to Darknet cfg file.') parser.add_argument('output_path', help='Path to Darknet weights file.') # load model # %% def _main(args): input_path = os.path.expanduser(args.input_path) output_path = os.path.expanduser(args.output_path) model = load_model(input_path) model = tmot.sparsity.keras.strip_pruning(model) # Convert the model. converter = tf.lite.TFLiteConverter.from_keras_model(model) tflite_model = converter.convert() open (output_path , "wb") .write(tflite_model) if __name__ == '__main__': _main(parser.parse_args())
[ "x@y.z" ]
x@y.z
55092738abbf74b5764225191343917f20bb273f
b9c2d9bc099ebae0b9e7b1e9c538701ba41a235e
/experiments/tiered-ImageNet_DeepEMD/FRN/ResNet-12_pretrain/train.py
e4dbf15991344986fd6192a0f8d8ae0c08891c2b
[ "MIT" ]
permissive
Jf-Chen/FRN-main
9e3eeba4ec23e450f05f37e69db766ec5ce7f805
5b57b9e0d7368058a8e3ba41a53c460b54ab9b91
refs/heads/main
2023-08-30T12:29:03.178309
2021-10-26T08:25:38
2021-10-26T08:25:38
419,918,858
0
0
null
null
null
null
UTF-8
Python
false
false
1,047
py
import os import sys import torch import yaml from functools import partial sys.path.append('../../../../') from trainers import trainer, frn_train from datasets import dataloaders from models.FRN import FRN args = trainer.train_parser() with open('../../../../config.yml', 'r') as f: temp = yaml.safe_load(f) data_path = os.path.abspath(temp['data_path']) fewshot_path = os.path.join(data_path,'tiered-ImageNet_DeepEMD') pm = trainer.Path_Manager(fewshot_path=fewshot_path,args=args) train_loader = dataloaders.normal_train_dataloader(data_path=pm.train, batch_size=args.batch_size, transform_type=args.train_transform_type) num_cat = len(train_loader.dataset.classes) model = FRN(is_pretraining=True, num_cat=num_cat, resnet=args.resnet) train_func = partial(frn_train.pre_train,train_loader=train_loader) tm = trainer.Train_Manager(args,path_manager=pm,train_func=train_func) tm.train(model) tm.evaluate(model)
[ "485133380@qq.com" ]
485133380@qq.com