blob_id stringlengths 40 40 | directory_id stringlengths 40 40 | path stringlengths 3 616 | content_id stringlengths 40 40 | detected_licenses listlengths 0 112 | license_type stringclasses 2 values | repo_name stringlengths 5 115 | snapshot_id stringlengths 40 40 | revision_id stringlengths 40 40 | branch_name stringclasses 777 values | visit_date timestamp[us]date 2015-08-06 10:31:46 2023-09-06 10:44:38 | revision_date timestamp[us]date 1970-01-01 02:38:32 2037-05-03 13:00:00 | committer_date timestamp[us]date 1970-01-01 02:38:32 2023-09-06 01:08:06 | github_id int64 4.92k 681M ⌀ | star_events_count int64 0 209k | fork_events_count int64 0 110k | gha_license_id stringclasses 22 values | gha_event_created_at timestamp[us]date 2012-06-04 01:52:49 2023-09-14 21:59:50 ⌀ | gha_created_at timestamp[us]date 2008-05-22 07:58:19 2023-08-21 12:35:19 ⌀ | gha_language stringclasses 149 values | src_encoding stringclasses 26 values | language stringclasses 1 value | is_vendor bool 2 classes | is_generated bool 2 classes | length_bytes int64 3 10.2M | extension stringclasses 188 values | content stringlengths 3 10.2M | authors listlengths 1 1 | author_id stringlengths 1 132 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
172380e1b290156ef5d92275a7dc119164029ab0 | b7319b205a7c4d3b7987659b725f9d3e8619fece | /minifloorhockey/wsgi.py | 07608fd343ff71b542e744f040fd81c079b7c0b3 | [] | no_license | bhobbs20/mhl | 0845fbf4c14c6bdfa9adcdd86fdaf753758670e9 | cfc38c14fc3626b127ad3a924bf13bc951c27f0c | refs/heads/master | 2023-02-02T15:31:39.782993 | 2020-12-16T17:25:01 | 2020-12-16T17:25:01 | 322,051,668 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 407 | py | """
WSGI config for minifloorhockey project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/3.1/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'minifloorhockey.settings')
application = get_wsgi_application()
| [
"brianhobbs216@gmail.com"
] | brianhobbs216@gmail.com |
d7d4357cc15cd84e35e910f27de3f508d5757536 | b1bff5f49d017463da4a71bd723382ca617c3bf4 | /src/spaceone/inventory/connector/__init__.py | 5c28ad447c1ccf6da0e661e2a61244e4e5893a62 | [
"Apache-2.0"
] | permissive | khl6235/plugin-azure-vm | 5af286a8391c2bab9822d9e062b1b9a1f7eb3f9d | fb7eef1c440b0d487c9277ed01fbf1c6a752da3d | refs/heads/master | 2023-01-05T13:48:29.085468 | 2020-11-05T07:30:36 | 2020-11-05T07:30:36 | 294,640,540 | 0 | 0 | Apache-2.0 | 2020-09-11T08:38:35 | 2020-09-11T08:38:35 | null | UTF-8 | Python | false | false | 76 | py | from spaceone.inventory.connector.azure_vm_connector import AzureVMConnector | [
"bluese@megazone.com"
] | bluese@megazone.com |
7e2567e4a20aa02342cb0de0b8f2bcc6123bc9b2 | de24f83a5e3768a2638ebcf13cbe717e75740168 | /moodledata/vpl_data/380/usersdata/318/93006/submittedfiles/principal.py | 0320151c54c0035526eea0587c53dbe5b2639e6f | [] | no_license | rafaelperazzo/programacao-web | 95643423a35c44613b0f64bed05bd34780fe2436 | 170dd5440afb9ee68a973f3de13a99aa4c735d79 | refs/heads/master | 2021-01-12T14:06:25.773146 | 2017-12-22T16:05:45 | 2017-12-22T16:05:45 | 69,566,344 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 277 | py | # -*- coding: utf-8 -*-
#COMECE AQUI ABAIXO
n = int(input('Digite a quantidade de notas :'))
notas = []
for i in range(0,n,1):
notas.append(float(input('Digite a nota%d: ' % (i+1))))
media = 0
for i in range(0,n,1):
media += notas[i]/float(n)
print(notas)
print(media) | [
"rafael.mota@ufca.edu.br"
] | rafael.mota@ufca.edu.br |
9f5244ca9c999f852f87ed9ccd1c944c0a36fbe3 | 5963c12367490ffc01c9905c028d1d5480078dec | /homeassistant/components/verisure/config_flow.py | 6c2822896e6da67af964a623bce98bdcb9867099 | [
"Apache-2.0"
] | permissive | BenWoodford/home-assistant | eb03f73165d11935e8d6a9756272014267d7d66a | 2fee32fce03bc49e86cf2e7b741a15621a97cce5 | refs/heads/dev | 2023-03-05T06:13:30.354545 | 2021-07-18T09:51:53 | 2021-07-18T09:51:53 | 117,122,037 | 11 | 6 | Apache-2.0 | 2023-02-22T06:16:51 | 2018-01-11T16:10:19 | Python | UTF-8 | Python | false | false | 6,876 | py | """Config flow for Verisure integration."""
from __future__ import annotations
from typing import Any
from verisure import (
Error as VerisureError,
LoginError as VerisureLoginError,
ResponseError as VerisureResponseError,
Session as Verisure,
)
import voluptuous as vol
from homeassistant.config_entries import ConfigEntry, ConfigFlow, OptionsFlow
from homeassistant.const import CONF_EMAIL, CONF_PASSWORD
from homeassistant.core import callback
from homeassistant.data_entry_flow import FlowResult
from .const import (
CONF_GIID,
CONF_LOCK_CODE_DIGITS,
CONF_LOCK_DEFAULT_CODE,
DEFAULT_LOCK_CODE_DIGITS,
DOMAIN,
LOGGER,
)
class VerisureConfigFlowHandler(ConfigFlow, domain=DOMAIN):
"""Handle a config flow for Verisure."""
VERSION = 1
email: str
entry: ConfigEntry
installations: dict[str, str]
password: str
@staticmethod
@callback
def async_get_options_flow(config_entry: ConfigEntry) -> VerisureOptionsFlowHandler:
"""Get the options flow for this handler."""
return VerisureOptionsFlowHandler(config_entry)
async def async_step_user(
self, user_input: dict[str, Any] | None = None
) -> FlowResult:
"""Handle the initial step."""
errors: dict[str, str] = {}
if user_input is not None:
verisure = Verisure(
username=user_input[CONF_EMAIL], password=user_input[CONF_PASSWORD]
)
try:
await self.hass.async_add_executor_job(verisure.login)
except VerisureLoginError as ex:
LOGGER.debug("Could not log in to Verisure, %s", ex)
errors["base"] = "invalid_auth"
except (VerisureError, VerisureResponseError) as ex:
LOGGER.debug("Unexpected response from Verisure, %s", ex)
errors["base"] = "unknown"
else:
self.email = user_input[CONF_EMAIL]
self.password = user_input[CONF_PASSWORD]
self.installations = {
inst["giid"]: f"{inst['alias']} ({inst['street']})"
for inst in verisure.installations
}
return await self.async_step_installation()
return self.async_show_form(
step_id="user",
data_schema=vol.Schema(
{
vol.Required(CONF_EMAIL): str,
vol.Required(CONF_PASSWORD): str,
}
),
errors=errors,
)
async def async_step_installation(
self, user_input: dict[str, Any] | None = None
) -> FlowResult:
"""Select Verisure installation to add."""
if len(self.installations) == 1:
user_input = {CONF_GIID: list(self.installations)[0]}
if user_input is None:
return self.async_show_form(
step_id="installation",
data_schema=vol.Schema(
{vol.Required(CONF_GIID): vol.In(self.installations)}
),
)
await self.async_set_unique_id(user_input[CONF_GIID])
self._abort_if_unique_id_configured()
return self.async_create_entry(
title=self.installations[user_input[CONF_GIID]],
data={
CONF_EMAIL: self.email,
CONF_PASSWORD: self.password,
CONF_GIID: user_input[CONF_GIID],
},
)
async def async_step_reauth(self, data: dict[str, Any]) -> FlowResult:
"""Handle initiation of re-authentication with Verisure."""
self.entry = self.hass.config_entries.async_get_entry(self.context["entry_id"])
return await self.async_step_reauth_confirm()
async def async_step_reauth_confirm(
self, user_input: dict[str, Any] | None = None
) -> FlowResult:
"""Handle re-authentication with Verisure."""
errors: dict[str, str] = {}
if user_input is not None:
verisure = Verisure(
username=user_input[CONF_EMAIL], password=user_input[CONF_PASSWORD]
)
try:
await self.hass.async_add_executor_job(verisure.login)
except VerisureLoginError as ex:
LOGGER.debug("Could not log in to Verisure, %s", ex)
errors["base"] = "invalid_auth"
except (VerisureError, VerisureResponseError) as ex:
LOGGER.debug("Unexpected response from Verisure, %s", ex)
errors["base"] = "unknown"
else:
data = self.entry.data.copy()
self.hass.config_entries.async_update_entry(
self.entry,
data={
**data,
CONF_EMAIL: user_input[CONF_EMAIL],
CONF_PASSWORD: user_input[CONF_PASSWORD],
},
)
self.hass.async_create_task(
self.hass.config_entries.async_reload(self.entry.entry_id)
)
return self.async_abort(reason="reauth_successful")
return self.async_show_form(
step_id="reauth_confirm",
data_schema=vol.Schema(
{
vol.Required(CONF_EMAIL, default=self.entry.data[CONF_EMAIL]): str,
vol.Required(CONF_PASSWORD): str,
}
),
errors=errors,
)
class VerisureOptionsFlowHandler(OptionsFlow):
"""Handle Verisure options."""
def __init__(self, entry: ConfigEntry) -> None:
"""Initialize Verisure options flow."""
self.entry = entry
async def async_step_init(
self, user_input: dict[str, Any] | None = None
) -> FlowResult:
"""Manage Verisure options."""
errors = {}
if user_input is not None:
if len(user_input[CONF_LOCK_DEFAULT_CODE]) not in [
0,
user_input[CONF_LOCK_CODE_DIGITS],
]:
errors["base"] = "code_format_mismatch"
else:
return self.async_create_entry(title="", data=user_input)
return self.async_show_form(
step_id="init",
data_schema=vol.Schema(
{
vol.Optional(
CONF_LOCK_CODE_DIGITS,
default=self.entry.options.get(
CONF_LOCK_CODE_DIGITS, DEFAULT_LOCK_CODE_DIGITS
),
): int,
vol.Optional(
CONF_LOCK_DEFAULT_CODE,
default=self.entry.options.get(CONF_LOCK_DEFAULT_CODE),
): str,
}
),
errors=errors,
)
| [
"noreply@github.com"
] | BenWoodford.noreply@github.com |
13d692809403a8b9ca30618e1f0d1aa33761a670 | 548c26cc8e68c3116cecaf7e5cd9aadca7608318 | /payments/paybackpg.py | 9cd0a718e255d26e31ca460a5e6df8742267d7db | [] | no_license | Morphnus-IT-Solutions/riba | b69ecebf110b91b699947b904873e9870385e481 | 90ff42dfe9c693265998d3182b0d672667de5123 | refs/heads/master | 2021-01-13T02:18:42.248642 | 2012-09-06T18:20:26 | 2012-09-06T18:20:26 | 4,067,896 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 1,487 | py | import urllib, urllib2
import hashlib
from django.utils import simplejson
import logging
from django.conf import settings
log = logging.getLogger('request')
CATALOG_NAME = {
'Future Bazaar': 'futurebazaar',
'Ezone': 'ezoneonline',
}
def create_request(payment_attempt, request):
log.info("Entered create_request")
data = {'action':'create_new',
'gateway': 'payback',
'transactionId': payment_attempt.id,
'sessionId': payment_attempt.id,
'catalogName': CATALOG_NAME.get(request.client.client.name),
'points': "%i" % (4*payment_attempt.amount)
}
url = settings.KHAZANA_SERVER_URL
return get_response(url,data)
def process_response(payment_attempt, rawdata):
log.info('Payback process response data: %s' % rawdata)
return rawdata
def get_response(url, data):
try:
headers = {'Content-Type':'application/json; charset=UTF-8'}
data = simplejson.dumps(data)
req = urllib2.Request(url, data, headers)
proxy_support = urllib2.ProxyHandler({})
opener = urllib2.build_opener(proxy_support)
res = opener.open(req)
res_data = res.read()
log.info('Payment request data: %s' % data)
log.info('Got response for create payback payment request: %s' % res_data)
return simplejson.loads(res_data)
except IOError, e:
log.exception('Error creating payback payment request %s' % repr(e))
| [
"dalal.saumil@gmail.com"
] | dalal.saumil@gmail.com |
6add2f9d7701f8ca136f8bba16cbb839f0137be4 | 0cce9a9d9b9da4a820e9ed5fc674d06f0be9810a | /motorcycles2.py | 4defe2c1445f96a7e9c0c395f7187ccc52b4b112 | [] | no_license | wbroach/python_work | 3f4a85e998805f50b2400e64c5b7cbc31780b245 | 7e1842b317539d61bab0f04d72e71db893c865ff | refs/heads/master | 2020-04-14T22:06:43.164595 | 2019-10-02T01:25:00 | 2019-10-02T01:25:00 | 164,151,112 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 283 | py | motorcycles = []
motorcycles.append('honda')
motorcycles.append('yamaha')
motorcycles.append('suzuki')
print('The original list is as follows:')
print(motorcycles)
print(motorcycles[2])
del(motorcycles[0])
del(motorcycles[0])
del(motorcycles[0])
print(len(motorcycles))
| [
"someone@someplace.com"
] | someone@someplace.com |
ce33aec59d60accb1c87fea41a2e45aa5d5dde37 | 91824d746654fe12881b4fc3b55c553aae0d22ac | /py/brick-wall.py | df97ef507ec7e7482058e07765b8af5889315529 | [
"Apache-2.0"
] | permissive | ckclark/leetcode | a1a173c67a36a3256b198f853fcd3d15aa5abbb7 | 844c6f18d06dcb397db76436e5f4b8ddcb1beddc | refs/heads/master | 2021-01-15T08:14:43.368516 | 2020-02-14T07:25:05 | 2020-02-14T07:30:10 | 42,386,911 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 523 | py | from collections import Counter
class Solution(object):
def leastBricks(self, wall):
"""
:type wall: List[List[int]]
:rtype: int
"""
c = Counter()
wall_width = sum(wall[0])
max_non_cut = 0
for row in wall:
subsum = 0
for n in row:
subsum += n
c[subsum] += 1
if subsum < wall_width:
max_non_cut = max(c[subsum], max_non_cut)
return len(wall) - max_non_cut
| [
"clark.ck@gmail.com"
] | clark.ck@gmail.com |
b6275e78d3a511285b135c3981a04566ab50cb91 | 25fb2f88b9d6e1d01806c0e90f3fb02bc82e1ce6 | /wisdem/postprocessing/wisdem_get.py | 304169be70583a413ee5b496fe6889c90ec169be | [
"Apache-2.0"
] | permissive | johnjasa/WISDEM | 6b607ea8c3ef1aab8065b82e75e53e5c12fa2cd3 | a4571e71cb5b9869c81790f8abb1bb7fba8fdb02 | refs/heads/master | 2022-07-01T16:32:09.003230 | 2021-11-04T16:24:05 | 2021-11-04T16:24:05 | 250,635,279 | 0 | 0 | NOASSERTION | 2020-03-27T20:04:35 | 2020-03-27T20:04:35 | null | UTF-8 | Python | false | false | 2,140 | py | import numpy as np
def is_floating(prob):
return prob.model.options["modeling_options"]["flags"]["floating"]
def get_tower_diameter(prob):
if is_floating(prob):
return prob["floatingse.tower.outer_diameter"]
else:
return prob["towerse.tower_outer_diameter"]
def get_tower_thickness(prob):
if is_floating(prob):
return prob["floatingse.tower.wall_thickness"]
else:
return prob["towerse.tower_wall_thickness"]
def get_zpts(prob):
if is_floating(prob):
return prob["floatingse.tower.z_param"]
else:
return prob["towerse.z_param"]
def get_section_height(prob):
return np.diff(get_zpts(prob))
def get_transition_height(prob):
if is_floating(prob):
return prob["floating.transition_node"][-1]
else:
return prob["towerse.transition_piece_height"]
def get_tower_E(prob):
if is_floating(prob):
return prob["floatingse.tower.E"]
else:
return prob["towerse.E"]
def get_tower_G(prob):
if is_floating(prob):
return prob["floatingse.tower.G"]
else:
return prob["towerse.G"]
def get_tower_rho(prob):
if is_floating(prob):
return prob["floatingse.tower.rho"]
else:
return prob["towerse.rho"]
def get_tower_mass(prob):
if is_floating(prob):
return prob["floatingse.tower_mass"]
else:
return prob["towerse.tower_mass"]
def get_tower_cost(prob):
if is_floating(prob):
return prob["floatingse.tower_cost"]
else:
return prob["towerse.tower_cost"]
def get_structural_mass(prob):
if is_floating(prob):
return prob["floatingse.tower.structural_mass"]
else:
return prob["towerse.structural_mass"]
def get_tower_freqs(prob):
if is_floating(prob):
return prob["floatingse.tower_freqs"]
else:
return prob["towerse.tower.structural_frequencies"]
def get_tower_cm(prob):
if is_floating(prob):
return prob["floatingse.tower_center_of_mass"]
else:
return prob["towerse.tower_center_of_mass"]
def get_tower_cg(prob):
return get_tower_cm(prob)
| [
"garrett.barter@nrel.gov"
] | garrett.barter@nrel.gov |
fab6ab35bfc80d947cd776625d28511378d75d93 | a7596165a29e5186bc6c4718e3b6e835939b105d | /apps/beeswax/src/beeswax/api.py | 9382deb6bcf152c4a5dbd8418453aae81c8a8de7 | [
"Apache-2.0"
] | permissive | lockhart39/HueQualityAndIngestionApp | f0c778665f0fbe699ec30e0df5e9f3ed8a9c3384 | c75e55a43a8bdeb7aa0f5bf2101ec72b01dcac1c | refs/heads/master | 2021-08-20T00:31:29.481333 | 2017-11-27T19:22:16 | 2017-11-27T19:22:16 | 112,237,923 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 30,932 | py | #!/usr/bin/env python
# Licensed to Cloudera, Inc. under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. Cloudera, Inc. licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
import json
import re
from django.contrib.auth.models import User
from django.core.urlresolvers import reverse
from django.http import Http404
from django.utils.translation import ugettext as _
from django.views.decorators.http import require_POST
from thrift.transport.TTransport import TTransportException
from desktop.context_processors import get_app_name
from desktop.lib.django_util import JsonResponse
from desktop.lib.exceptions import StructuredThriftTransportException
from desktop.lib.exceptions_renderable import PopupException
from desktop.lib.i18n import force_unicode
from desktop.lib.parameterization import substitute_variables
from metastore import parser
from notebook.models import escape_rows
import beeswax.models
from beeswax.data_export import upload
from beeswax.design import HQLdesign
from beeswax.conf import USE_GET_LOG_API
from beeswax.forms import QueryForm
from beeswax.models import Session, QueryHistory
from beeswax.server import dbms
from beeswax.server.dbms import expand_exception, get_query_server_config, QueryServerException, QueryServerTimeoutException
from beeswax.views import authorized_get_design, authorized_get_query_history, make_parameterization_form,\
safe_get_design, save_design, massage_columns_for_json, _get_query_handle_and_state, \
_parse_out_hadoop_jobs
LOG = logging.getLogger(__name__)
def error_handler(view_fn):
def decorator(request, *args, **kwargs):
try:
return view_fn(request, *args, **kwargs)
except Http404, e:
raise e
except Exception, e:
LOG.exception('error in %s' % view_fn)
if not hasattr(e, 'message') or not e.message:
message = str(e)
else:
message = force_unicode(e.message, strings_only=True, errors='replace')
if 'Invalid OperationHandle' in message and 'id' in kwargs:
# Expired state.
query_history = authorized_get_query_history(request, kwargs['id'], must_exist=False)
if query_history:
query_history.set_to_expired()
query_history.save()
response = {
'status': -1,
'message': message,
}
if re.search('database is locked|Invalid query handle|not JSON serializable', message, re.IGNORECASE):
response['status'] = 2 # Frontend will not display this type of error
LOG.warn('error_handler silencing the exception: %s' % e)
return JsonResponse(response)
return decorator
@error_handler
def autocomplete(request, database=None, table=None, column=None, nested=None):
app_name = get_app_name(request)
query_server = get_query_server_config(app_name)
do_as = request.user
if (request.user.is_superuser or request.user.has_hue_permission(action="impersonate", app="security")) and 'doas' in request.GET:
do_as = User.objects.get(username=request.GET.get('doas'))
db = dbms.get(do_as, query_server)
response = _autocomplete(db, database, table, column, nested)
return JsonResponse(response)
def _autocomplete(db, database=None, table=None, column=None, nested=None):
response = {}
try:
if database is None:
response['databases'] = db.get_databases()
elif table is None:
tables_meta = db.get_tables_meta(database=database)
response['tables_meta'] = tables_meta
elif column is None:
t = db.get_table(database, table)
response['hdfs_link'] = t.hdfs_link
response['columns'] = [column.name for column in t.cols]
response['extended_columns'] = massage_columns_for_json(t.cols)
response['partition_keys'] = [{'name': part.name, 'type': part.type} for part in t.partition_keys]
else:
col = db.get_column(database, table, column)
if col:
parse_tree = parser.parse_column(col.name, col.type, col.comment)
if nested:
parse_tree = _extract_nested_type(parse_tree, nested)
response = parse_tree
# If column or nested type is scalar/primitive, add sample of values
if parser.is_scalar_type(parse_tree['type']):
table_obj = db.get_table(database, table)
sample = db.get_sample(database, table_obj, column, nested)
if sample:
sample = set([row[0] for row in sample.rows()])
response['sample'] = sorted(list(sample))
else:
raise Exception('Could not find column `%s`.`%s`.`%s`' % (database, table, column))
except (QueryServerTimeoutException, TTransportException), e:
response['code'] = 503
response['error'] = e.message
except Exception, e:
LOG.warn('Autocomplete data fetching error: %s' % e)
response['code'] = 500
response['error'] = e.message
return response
@error_handler
def parameters(request, design_id=None):
response = {'status': -1, 'message': ''}
# Use POST request to not confine query length.
if request.method != 'POST':
response['message'] = _('A POST request is required.')
parameterization_form_cls = make_parameterization_form(request.POST.get('query-query', ''))
if parameterization_form_cls:
parameterization_form = parameterization_form_cls(prefix="parameterization")
response['parameters'] = [{'parameter': field.html_name, 'name': field.name} for field in parameterization_form]
response['status']= 0
else:
response['parameters'] = []
response['status']= 0
return JsonResponse(response)
@error_handler
def execute_directly(request, query, design, query_server, tablename=None, **kwargs):
if design is not None:
design = authorized_get_design(request, design.id)
parameters = kwargs.pop('parameters', None)
db = dbms.get(request.user, query_server)
database = query.query.get('database', 'default')
db.use(database)
history_obj = db.execute_query(query, design)
watch_url = reverse(get_app_name(request) + ':api_watch_query_refresh_json', kwargs={'id': history_obj.id})
if parameters is not None:
history_obj.update_extra('parameters', parameters)
history_obj.save()
response = {
'status': 0,
'id': history_obj.id,
'watch_url': watch_url,
'statement': history_obj.get_current_statement(),
'is_redacted': history_obj.is_redacted
}
return JsonResponse(response)
@error_handler
def watch_query_refresh_json(request, id):
query_history = authorized_get_query_history(request, id, must_exist=True)
db = dbms.get(request.user, query_history.get_query_server_config())
if not request.POST.get('next'): # We need this as multi query would fail as current query is closed
handle, state = _get_query_handle_and_state(query_history)
query_history.save_state(state)
# Go to next statement if asked to continue or when a statement with no dataset finished.
try:
if request.POST.get('next') or (not query_history.is_finished() and query_history.is_success() and not query_history.has_results):
close_operation(request, id)
query_history = db.execute_next_statement(query_history, request.POST.get('query-query'))
handle, state = _get_query_handle_and_state(query_history)
except QueryServerException, ex:
raise ex
except Exception, ex:
LOG.exception(ex)
handle, state = _get_query_handle_and_state(query_history)
try:
start_over = request.POST.get('log-start-over') == 'true'
log = db.get_log(handle, start_over=start_over)
except Exception, ex:
log = str(ex)
jobs = _parse_out_hadoop_jobs(log)
job_urls = massage_job_urls_for_json(jobs)
result = {
'status': -1,
'log': log,
'jobs': jobs,
'jobUrls': job_urls,
'isSuccess': query_history.is_success(),
'isFailure': query_history.is_failure(),
'id': id,
'statement': query_history.get_current_statement(),
'watch_url': reverse(get_app_name(request) + ':api_watch_query_refresh_json', kwargs={'id': query_history.id}),
'oldLogsApi': USE_GET_LOG_API.get()
}
# Run time error
if query_history.is_failure():
res = db.get_operation_status(handle)
if query_history.is_canceled(res):
result['status'] = 0
elif hasattr(res, 'errorMessage') and res.errorMessage:
result['message'] = res.errorMessage
else:
result['message'] = _('Bad status for request %s:\n%s') % (id, res)
else:
result['status'] = 0
return JsonResponse(result)
def massage_job_urls_for_json(jobs):
massaged_jobs = []
for job in jobs:
massaged_jobs.append({
'name': job,
'url': reverse('jobbrowser.views.single_job', kwargs={'job': job})
})
return massaged_jobs
@error_handler
def close_operation(request, query_history_id):
response = {
'status': -1,
'message': ''
}
if request.method != 'POST':
response['message'] = _('A POST request is required.')
else:
query_history = authorized_get_query_history(request, query_history_id, must_exist=True)
db = dbms.get(query_history.owner, query_history.get_query_server_config())
handle = query_history.get_handle()
db.close_operation(handle)
query_history.set_to_expired()
query_history.save()
response['status'] = 0
return JsonResponse(response)
@error_handler
def explain_directly(request, query_server, query):
explanation = dbms.get(request.user, query_server).explain(query)
response = {
'status': 0,
'explanation': explanation.textual,
'statement': query.get_query_statement(0),
}
return JsonResponse(response)
@error_handler
def execute(request, design_id=None):
response = {'status': -1, 'message': ''}
if request.method != 'POST':
response['message'] = _('A POST request is required.')
app_name = get_app_name(request)
query_server = get_query_server_config(app_name)
query_type = beeswax.models.SavedQuery.TYPES_MAPPING[app_name]
design = safe_get_design(request, query_type, design_id)
try:
query_form = get_query_form(request)
if query_form.is_valid():
query_str = query_form.query.cleaned_data["query"]
explain = request.GET.get('explain', 'false').lower() == 'true'
design = save_design(request, query_form, query_type, design, False)
if query_form.query.cleaned_data['is_parameterized']:
# Parameterized query
parameterization_form_cls = make_parameterization_form(query_str)
if parameterization_form_cls:
parameterization_form = parameterization_form_cls(request.REQUEST, prefix="parameterization")
if parameterization_form.is_valid():
parameters = parameterization_form.cleaned_data
real_query = substitute_variables(query_str, parameters)
query = HQLdesign(query_form, query_type=query_type)
query._data_dict['query']['query'] = real_query
try:
if explain:
return explain_directly(request, query_server, query)
else:
return execute_directly(request, query, design, query_server, parameters=parameters)
except Exception, ex:
db = dbms.get(request.user, query_server)
error_message, log = expand_exception(ex, db)
response['message'] = error_message
return JsonResponse(response)
else:
response['errors'] = parameterization_form.errors
return JsonResponse(response)
# Non-parameterized query
query = HQLdesign(query_form, query_type=query_type)
if request.GET.get('explain', 'false').lower() == 'true':
return explain_directly(request, query_server, query)
else:
return execute_directly(request, query, design, query_server)
else:
response['message'] = _('There was an error with your query.')
response['errors'] = {
'query': [query_form.query.errors],
'settings': query_form.settings.errors,
'file_resources': query_form.file_resources.errors,
'functions': query_form.functions.errors,
}
except RuntimeError, e:
response['message']= str(e)
return JsonResponse(response)
@error_handler
def save_query_design(request, design_id=None):
response = {'status': -1, 'message': ''}
if request.method != 'POST':
response['message'] = _('A POST request is required.')
app_name = get_app_name(request)
query_type = beeswax.models.SavedQuery.TYPES_MAPPING[app_name]
design = safe_get_design(request, query_type, design_id)
try:
query_form = get_query_form(request)
if query_form.is_valid():
design = save_design(request, query_form, query_type, design, True)
response['design_id'] = design.id
response['status'] = 0
else:
response['errors'] = {
'query': [query_form.query.errors],
'settings': query_form.settings.errors,
'file_resources': query_form.file_resources.errors,
'functions': query_form.functions.errors,
'saveform': query_form.saveform.errors,
}
except RuntimeError, e:
response['message'] = str(e)
return JsonResponse(response)
@error_handler
def fetch_saved_design(request, design_id):
response = {'status': 0, 'message': ''}
if request.method != 'GET':
response['message'] = _('A GET request is required.')
app_name = get_app_name(request)
query_type = beeswax.models.SavedQuery.TYPES_MAPPING[app_name]
design = safe_get_design(request, query_type, design_id)
response['design'] = design_to_dict(design)
return JsonResponse(response)
@error_handler
def fetch_query_history(request, query_history_id):
response = {'status': 0, 'message': ''}
if request.method != 'GET':
response['message'] = _('A GET request is required.')
query = authorized_get_query_history(request, query_history_id, must_exist=True)
response['query_history'] = query_history_to_dict(request, query)
return JsonResponse(response)
@error_handler
def cancel_query(request, query_history_id):
response = {'status': -1, 'message': ''}
if request.method != 'POST':
response['message'] = _('A POST request is required.')
else:
try:
query_history = authorized_get_query_history(request, query_history_id, must_exist=True)
db = dbms.get(request.user, query_history.get_query_server_config())
db.cancel_operation(query_history.get_handle())
query_history.set_to_expired()
response['status'] = 0
except Exception, e:
response['message'] = unicode(e)
return JsonResponse(response)
@error_handler
def save_results_hdfs_directory(request, query_history_id):
"""
Save the results of a query to an HDFS directory.
Rerun the query.
"""
response = {'status': 0, 'message': ''}
query_history = authorized_get_query_history(request, query_history_id, must_exist=True)
server_id, state = _get_query_handle_and_state(query_history)
query_history.save_state(state)
error_msg, log = None, None
if request.method != 'POST':
response['message'] = _('A POST request is required.')
else:
if not query_history.is_success():
response['message'] = _('This query is %(state)s. Results unavailable.') % {'state': state}
response['status'] = -1
return JsonResponse(response)
db = dbms.get(request.user, query_history.get_query_server_config())
form = beeswax.forms.SaveResultsDirectoryForm({
'target_dir': request.POST.get('path')
}, fs=request.fs)
if form.is_valid():
target_dir = request.POST.get('path')
try:
response['type'] = 'hdfs-dir'
response['id'] = query_history.id
response['query'] = query_history.query
response['path'] = target_dir
response['success_url'] = '/filebrowser/view=%s' % target_dir
query_history = db.insert_query_into_directory(query_history, target_dir)
response['watch_url'] = reverse(get_app_name(request) + ':api_watch_query_refresh_json', kwargs={'id': query_history.id})
except Exception, ex:
error_msg, log = expand_exception(ex, db)
response['message'] = _('The result could not be saved: %s.') % error_msg
response['status'] = -3
else:
response['status'] = 1
response['errors'] = form.errors
return JsonResponse(response)
@error_handler
def save_results_hdfs_file(request, query_history_id):
"""
Save the results of a query to an HDFS file.
Do not rerun the query.
"""
response = {'status': 0, 'message': ''}
query_history = authorized_get_query_history(request, query_history_id, must_exist=True)
server_id, state = _get_query_handle_and_state(query_history)
query_history.save_state(state)
error_msg, log = None, None
if request.method != 'POST':
response['message'] = _('A POST request is required.')
else:
if not query_history.is_success():
response['message'] = _('This query is %(state)s. Results unavailable.') % {'state': state}
response['status'] = -1
return JsonResponse(response)
db = dbms.get(request.user, query_history.get_query_server_config())
form = beeswax.forms.SaveResultsFileForm({
'target_file': request.POST.get('path'),
'overwrite': request.POST.get('overwrite', False),
})
if form.is_valid():
target_file = form.cleaned_data['target_file']
overwrite = form.cleaned_data['overwrite']
try:
handle, state = _get_query_handle_and_state(query_history)
except Exception, ex:
response['message'] = _('Cannot find query handle and state: %s') % str(query_history)
response['status'] = -2
return JsonResponse(response)
try:
if overwrite and request.fs.exists(target_file):
if request.fs.isfile(target_file):
request.fs.do_as_user(request.user.username, request.fs.rmtree, target_file)
else:
raise PopupException(_("The target path is a directory"))
upload(target_file, handle, request.user, db, request.fs)
response['type'] = 'hdfs-file'
response['id'] = query_history.id
response['query'] = query_history.query
response['path'] = target_file
response['success_url'] = '/filebrowser/view=%s' % target_file
response['watch_url'] = reverse(get_app_name(request) + ':api_watch_query_refresh_json', kwargs={'id': query_history.id})
except Exception, ex:
error_msg, log = expand_exception(ex, db)
response['message'] = _('The result could not be saved: %s.') % error_msg
response['status'] = -3
else:
response['status'] = 1
response['errors'] = form.errors
return JsonResponse(response)
@error_handler
def save_results_hive_table(request, query_history_id):
"""
Save the results of a query to a hive table.
Rerun the query.
"""
response = {'status': 0, 'message': ''}
query_history = authorized_get_query_history(request, query_history_id, must_exist=True)
server_id, state = _get_query_handle_and_state(query_history)
query_history.save_state(state)
error_msg, log = None, None
if request.method != 'POST':
response['message'] = _('A POST request is required.')
else:
if not query_history.is_success():
response['message'] = _('This query is %(state)s. Results unavailable.') % {'state': state}
response['status'] = -1
return JsonResponse(response)
db = dbms.get(request.user, query_history.get_query_server_config())
database = query_history.design.get_design().query.get('database', 'default')
form = beeswax.forms.SaveResultsTableForm({
'target_table': request.POST.get('table')
}, db=db, database=database)
if form.is_valid():
try:
handle, state = _get_query_handle_and_state(query_history)
result_meta = db.get_results_metadata(handle)
except Exception, ex:
response['message'] = _('Cannot find query handle and state: %s') % str(query_history)
response['status'] = -2
return JsonResponse(response)
try:
query_history = db.create_table_as_a_select(request, query_history, form.target_database, form.cleaned_data['target_table'], result_meta)
response['id'] = query_history.id
response['query'] = query_history.query
response['type'] = 'hive-table'
response['path'] = form.cleaned_data['target_table']
response['success_url'] = reverse('metastore:describe_table', kwargs={'database': form.target_database, 'table': form.cleaned_data['target_table']})
response['watch_url'] = reverse(get_app_name(request) + ':api_watch_query_refresh_json', kwargs={'id': query_history.id})
except Exception, ex:
error_msg, log = expand_exception(ex, db)
response['message'] = _('The result could not be saved: %s.') % error_msg
response['status'] = -3
else:
response['status'] = 1
response['message'] = '\n'.join(form.errors.values()[0])
return JsonResponse(response)
@error_handler
def clear_history(request):
response = {'status': -1, 'message': ''}
if request.method != 'POST':
response['message'] = _('A POST request is required.')
else:
response['count'] = QueryHistory.objects.filter(owner=request.user, is_cleared=False).update(is_cleared=True)
response['status'] = 0
return JsonResponse(response)
@error_handler
def get_sample_data(request, database, table, column=None):
app_name = get_app_name(request)
query_server = get_query_server_config(app_name)
db = dbms.get(request.user, query_server)
response = _get_sample_data(db, database, table, column)
return JsonResponse(response)
def _get_sample_data(db, database, table, column):
table_obj = db.get_table(database, table)
sample_data = db.get_sample(database, table_obj, column)
response = {'status': -1}
if sample_data:
sample = escape_rows(sample_data.rows(), nulls_only=True)
if column:
sample = set([row[0] for row in sample])
sample = [[item] for item in sorted(list(sample))]
response['status'] = 0
response['headers'] = sample_data.cols()
response['full_headers'] = sample_data.full_cols()
response['rows'] = sample
else:
response['message'] = _('Failed to get sample data.')
return response
@error_handler
def get_indexes(request, database, table):
query_server = dbms.get_query_server_config(get_app_name(request))
db = dbms.get(request.user, query_server)
response = {'status': -1}
indexes = db.get_indexes(database, table)
if indexes:
response['status'] = 0
response['headers'] = indexes.cols()
response['rows'] = escape_rows(indexes.rows(), nulls_only=True)
else:
response['message'] = _('Failed to get indexes.')
return JsonResponse(response)
@error_handler
def get_settings(request):
query_server = dbms.get_query_server_config(get_app_name(request))
db = dbms.get(request.user, query_server)
response = {'status': -1}
settings = db.get_configuration()
if settings:
response['status'] = 0
response['settings'] = settings
else:
response['message'] = _('Failed to get settings.')
return JsonResponse(response)
@error_handler
def get_functions(request):
query_server = dbms.get_query_server_config(get_app_name(request))
db = dbms.get(request.user, query_server)
response = {'status': -1}
prefix = request.GET.get('prefix', None)
functions = db.get_functions(prefix)
if functions:
response['status'] = 0
rows = escape_rows(functions.rows(), nulls_only=True)
response['functions'] = [row[0] for row in rows]
else:
response['message'] = _('Failed to get functions.')
return JsonResponse(response)
@error_handler
def analyze_table(request, database, table, columns=None):
app_name = get_app_name(request)
query_server = get_query_server_config(app_name)
db = dbms.get(request.user, query_server)
response = {'status': -1, 'message': '', 'redirect': ''}
if request.method == "POST":
if columns is None:
query_history = db.analyze_table(database, table)
else:
query_history = db.analyze_table_columns(database, table)
response['watch_url'] = reverse('beeswax:api_watch_query_refresh_json', kwargs={'id': query_history.id})
response['status'] = 0
else:
response['message'] = _('A POST request is required.')
return JsonResponse(response)
@error_handler
def get_table_stats(request, database, table, column=None):
app_name = get_app_name(request)
query_server = get_query_server_config(app_name)
db = dbms.get(request.user, query_server)
response = {'status': -1, 'message': '', 'redirect': ''}
if column is not None:
stats = db.get_table_columns_stats(database, table, column)
else:
table = db.get_table(database, table)
stats = table.stats
response['stats'] = stats
response['status'] = 0
return JsonResponse(response)
@error_handler
def get_top_terms(request, database, table, column, prefix=None):
app_name = get_app_name(request)
query_server = get_query_server_config(app_name)
db = dbms.get(request.user, query_server)
response = {'status': -1, 'message': '', 'redirect': ''}
terms = db.get_top_terms(database, table, column, prefix=prefix, limit=int(request.GET.get('limit', 30)))
response['terms'] = terms
response['status'] = 0
return JsonResponse(response)
@error_handler
def get_session(request, session_id=None):
app_name = get_app_name(request)
query_server = get_query_server_config(app_name)
response = {'status': -1, 'message': ''}
if session_id:
session = Session.objects.get(id=session_id, owner=request.user, application=query_server['server_name'])
else: # get the latest session for given user and server type
session = Session.objects.get_session(request.user, query_server['server_name'])
if session is not None:
properties = json.loads(session.properties)
# Redact passwords
for key, value in properties.items():
if 'password' in key.lower():
properties[key] = '*' * len(value)
response['status'] = 0
response['session'] = {'id': session.id, 'application': session.application, 'status': session.status_code}
response['properties'] = properties
else:
response['message'] = _('Could not find session or no open sessions found.')
return JsonResponse(response)
@require_POST
@error_handler
def close_session(request, session_id):
app_name = get_app_name(request)
query_server = get_query_server_config(app_name)
response = {'status': -1, 'message': ''}
try:
filters = {'id': session_id, 'application': query_server['server_name']}
if not request.user.is_superuser:
filters['owner'] = request.user
session = Session.objects.get(**filters)
except Session.DoesNotExist:
response['message'] = _('Session does not exist or you do not have permissions to close the session.')
if session:
session = dbms.get(request.user, query_server).close_session(session)
response['status'] = 0
response['message'] = _('Session successfully closed.')
response['session'] = {'id': session_id, 'application': session.application, 'status': session.status_code}
return JsonResponse(response)
# Proxy API for Metastore App
def describe_table(request, database, table):
try:
from metastore.views import describe_table
return describe_table(request, database, table)
except Exception, e:
LOG.exception('Describe table failed')
raise PopupException(_('Problem accessing table metadata'), detail=e)
def design_to_dict(design):
hql_design = HQLdesign.loads(design.data)
return {
'id': design.id,
'query': hql_design.hql_query,
'name': design.name,
'desc': design.desc,
'database': hql_design.query.get('database', None),
'settings': hql_design.settings,
'file_resources': hql_design.file_resources,
'functions': hql_design.functions,
'is_parameterized': hql_design.query.get('is_parameterized', True),
'email_notify': hql_design.query.get('email_notify', True),
'is_redacted': design.is_redacted
}
def query_history_to_dict(request, query_history):
query_history_dict = {
'id': query_history.id,
'state': query_history.last_state,
'query': query_history.query,
'has_results': query_history.has_results,
'statement_number': query_history.statement_number,
'watch_url': reverse(get_app_name(request) + ':api_watch_query_refresh_json', kwargs={'id': query_history.id}),
'results_url': reverse(get_app_name(request) + ':view_results', kwargs={'id': query_history.id, 'first_row': 0})
}
if query_history.design:
query_history_dict['design'] = design_to_dict(query_history.design)
return query_history_dict
def get_query_form(request):
try:
try:
# Get database choices
query_server = dbms.get_query_server_config(get_app_name(request))
db = dbms.get(request.user, query_server)
databases = [(database, database) for database in db.get_databases()]
except StructuredThriftTransportException, e:
# If Thrift exception was due to failed authentication, raise corresponding message
if 'TSocket read 0 bytes' in str(e) or 'Error validating the login' in str(e):
raise PopupException(_('Failed to authenticate to query server, check authentication configurations.'), detail=e)
else:
raise e
except Exception, e:
raise PopupException(_('Unable to access databases, Query Server or Metastore may be down.'), detail=e)
if not databases:
raise RuntimeError(_("No databases are available. Permissions could be missing."))
query_form = QueryForm()
query_form.bind(request.POST)
query_form.query.fields['database'].choices = databases # Could not do it in the form
return query_form
"""
Utils
"""
def _extract_nested_type(parse_tree, nested_path):
nested_tokens = nested_path.strip('/').split('/')
subtree = parse_tree
for token in nested_tokens:
if token in subtree:
subtree = subtree[token]
elif 'fields' in subtree:
for field in subtree['fields']:
if field['name'] == token:
subtree = field
break
else:
raise Exception('Invalid nested type path: %s' % nested_path)
return subtree
| [
"cloudera@quickstart.cloudera"
] | cloudera@quickstart.cloudera |
ad39824936072d2fefcba287cf9c9844019ccebc | 163bbb4e0920dedd5941e3edfb2d8706ba75627d | /Code/CodeRecords/2953/60761/236880.py | 85dc0205c02d4042309bbbf445e67c9c0b168125 | [] | no_license | AdamZhouSE/pythonHomework | a25c120b03a158d60aaa9fdc5fb203b1bb377a19 | ffc5606817a666aa6241cfab27364326f5c066ff | refs/heads/master | 2022-11-24T08:05:22.122011 | 2020-07-28T16:21:24 | 2020-07-28T16:21:24 | 259,576,640 | 2 | 1 | null | null | null | null | UTF-8 | Python | false | false | 398 | py | def mintimes(a,b):
if(a==1):
return b-1
elif(a==0):
return 10000000
elif(a==b):
return 10000000
else:
return mintimes(b%a,a)+int(b/a)
n=int(input(""))
if(n==1):
print(0,end="")
else:
result=n-1
i=2
while(i<=n/2):
c=mintimes(n%i,i)+int(n/i)
result=min(c,result)
i=i+1
print(result,end="")
| [
"1069583789@qq.com"
] | 1069583789@qq.com |
866bcefcdd0a8d5e6b712d735419139fbee00878 | 56be90af1f455a259852a92044413f6d63a65402 | /microsip_web/apps/ventas/herramientas/generar_polizas/views.py | 29eee0f855f3d7f270ea9bc0f61bf56245c919ef | [] | no_license | ruff0/microsip_web | 6bf7f7ba77c1239e71097ad3421a7d5051126e3a | 4f1096613acf72439dbba6757e479034d8d6605e | refs/heads/master | 2021-01-12T17:32:38.218626 | 2015-01-07T17:03:57 | 2015-01-07T17:03:57 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 10,721 | py | #encoding:utf-8
from django.shortcuts import render_to_response, get_object_or_404
from django.http import HttpResponse, HttpResponseRedirect
from django.template import RequestContext
from .models import *
from forms import *
from django.db.models import Sum, Max, Q
from microsip_web.libs import contabilidad
# user autentication
from django.contrib.auth.decorators import login_required, permission_required
from microsip_api.comun.sic_db import get_conecctionname
##########################################
## ##
## Preferencias de empresa ##
## ##
##########################################
@login_required(login_url='/login/')
def preferenciasEmpresa_View(request, template_name='ventas/herramientas/generar_polizas/preferencias_empresa.html'):
try:
informacion_contable = InformacionContable_V.objects.all()[:1]
informacion_contable = informacion_contable[0]
except:
informacion_contable = InformacionContable_V()
cuenta_cliente_formset = modelformset_factory(clientes_config_cuenta, form= clientes_config_cuentaManageForm, can_delete=True,)
msg = ''
if request.method == 'POST':
formset = cuenta_cliente_formset(request.POST, request.FILES)
form = InformacionContableManageForm(request.POST, instance=informacion_contable)
if form.is_valid() and formset.is_valid():
form.save()
formset.save()
formset = cuenta_cliente_formset()
msg = 'Datos Guardados Exitosamente'
else:
form = InformacionContableManageForm(instance=informacion_contable)
formset = cuenta_cliente_formset()
plantillas = PlantillaPolizas_V.objects.all()
c= {'form':form,'msg':msg,'plantillas':plantillas,'formset':formset,}
return render_to_response(template_name, c, context_instance=RequestContext(request))
##########################################
## ##
## Generacion de polizas ##
## ##
##########################################
def generar_polizas(fecha_ini = None, fecha_fin = None, ignorar_documentos_cont = True, crear_polizas_por = 'Documento', crear_polizas_de = '', plantilla_facturas = '', plantilla_devoluciones ='', descripcion = '', connection_name = None, usuario_micorsip=''):
error = 0
msg = ''
documentosData = []
documentosGenerados = []
documentosDataDevoluciones = []
depto_co = ContabilidadDepartamento.objects.get(clave='GRAL')
try:
informacion_contable = InformacionContable_V.objects.all()[:1]
informacion_contable = informacion_contable[0]
except ObjectDoesNotExist:
error = 1
#Si estadefinida la informacion contable no hay error!!!
if error == 0:
facturas = []
devoluciones= []
if ignorar_documentos_cont:
if crear_polizas_de == 'F' or crear_polizas_de == 'FD':
facturas = VentasDocumento.objects.filter(Q(estado='N')|Q(estado='D'), tipo ='F', contabilizado ='N', fecha__gte=fecha_ini, fecha__lte=fecha_fin).order_by('fecha')[:99]
elif crear_polizas_de == 'D' or crear_polizas_de == 'FD':
devoluciones = VentasDocumento.objects.filter(estado = 'N').filter(tipo ='D', contabilizado ='N', fecha__gte=fecha_ini, fecha__lte=fecha_fin).order_by('fecha')[:99]
else:
if crear_polizas_de == 'F' or crear_polizas_de == 'FD':
facturas = VentasDocumento.objects.filter(Q(estado='N')|Q(estado='D'), tipo ='F', fecha__gte=fecha_ini, fecha__lte=fecha_fin).order_by('fecha')[:99]
elif crear_polizas_de == 'D' or crear_polizas_de == 'FD':
devoluciones = VentasDocumento.objects.filter(estado = 'N').filter(tipo = 'D', fecha__gte=fecha_ini, fecha__lte=fecha_fin).order_by('fecha')[:99]
#PREFIJO
prefijo = informacion_contable.tipo_poliza_ve.prefijo
if not informacion_contable.tipo_poliza_ve.prefijo:
prefijo = ''
if crear_polizas_de == 'F' or crear_polizas_de == 'FD':
msg, documentosData = contabilidad.crear_polizas(
origen_documentos = 'ventas',
documentos = facturas,
depto_co = depto_co,
informacion_contable= informacion_contable,
plantilla = plantilla_facturas,
crear_polizas_por = crear_polizas_por,
crear_polizas_de = crear_polizas_de,
msg = msg,
descripcion = descripcion,
tipo_documento = 'F',
connection_name = connection_name,
usuario_micorsip = usuario_micorsip,
)
documentosGenerados = documentosData
if crear_polizas_de == 'D' or crear_polizas_de == 'FD':
msg, documentosDataDevoluciones = contabilidad.crear_polizas(
origen_documentos = 'ventas',
documentos = devoluciones,
depto_co = depto_co,
informacion_contable= informacion_contable,
plantilla = plantilla_devoluciones,
crear_polizas_por = crear_polizas_por,
crear_polizas_de = crear_polizas_de,
msg = msg,
descripcion = descripcion,
tipo_documento = 'D',
connection_name = connection_name,
usuario_micorsip = usuario_micorsip,
)
elif error == 1 and msg=='':
msg = 'No se han derfinido las preferencias de la empresa para generar polizas [Por favor definelas primero en Configuracion > Preferencias de la empresa]'
return documentosGenerados, documentosDataDevoluciones, msg
@login_required(login_url='/login/')
def facturas_View(request, template_name='ventas/herramientas/generar_polizas/generar_polizas.html'):
connection_name = get_conecctionname(request.session)
if connection_name == '':
return HttpResponseRedirect('/select_db/')
documentosData = []
polizas_de_devoluciones = []
msg = msg_informacion =''
error = 0
if request.method == 'POST':
form = GenerarPolizasManageForm(request.POST)
if form.is_valid():
fecha_ini = form.cleaned_data['fecha_ini']
fecha_fin = form.cleaned_data['fecha_fin']
ignorar_documentos_cont = form.cleaned_data['ignorar_documentos_cont']
crear_polizas_por = form.cleaned_data['crear_polizas_por']
crear_polizas_de = form.cleaned_data['crear_polizas_de']
plantilla_facturas = form.cleaned_data['plantilla']
plantilla_devoluciones = form.cleaned_data['plantilla_2']
descripcion = form.cleaned_data['descripcion']
if (crear_polizas_de == 'F' and not plantilla_facturas== None) or (crear_polizas_de == 'D' and not plantilla_devoluciones== None) or (crear_polizas_de == 'FD' and not plantilla_facturas== None and not plantilla_devoluciones== None):
documentosData, polizas_de_devoluciones, msg = generar_polizas(fecha_ini, fecha_fin, ignorar_documentos_cont, crear_polizas_por, crear_polizas_de, plantilla_facturas, plantilla_devoluciones, descripcion, connection_name, request.user.username)
else:
error =1
msg = 'Seleciona una plantilla'
if (crear_polizas_de == 'F' or crear_polizas_de=='FD') and documentosData == [] and msg=='':
msg = 'Lo siento, no se encontraron facturas para este filtro'
elif (crear_polizas_de == 'D' or crear_polizas_de=='FD') and polizas_de_devoluciones == [] and msg=='':
msg = 'Lo siento, no se encontraron devoluciones para este filtro'
if crear_polizas_de == 'FD' and documentosData == [] and polizas_de_devoluciones == []:
msg = 'Lo siento, no se encontraron facturas ni devoluciones para este filtro'
if (not documentosData == [] or not polizas_de_devoluciones == []) and error == 0:
form = GenerarPolizasManageForm()
msg_informacion = 'Polizas generadas satisfactoriamente, *Ahora revisa las polizas pendientes generadas en el modulo de contabilidad'
else:
form = GenerarPolizasManageForm()
c = {'documentos':documentosData, 'polizas_de_devoluciones':polizas_de_devoluciones,'msg':msg,'form':form,'msg_informacion':msg_informacion,}
return render_to_response(template_name, c, context_instance=RequestContext(request))
##########################################
## ##
## Plantillas ##
## ##
##########################################
@login_required(login_url='/login/')
def plantilla_poliza_manageView(request, id = None, template_name='ventas/herramientas/generar_polizas/plantilla_poliza.html'):
message = ''
if id:
plantilla = get_object_or_404(PlantillaPolizas_V, pk=id)
else:
plantilla =PlantillaPolizas_V()
plantilla_form = PlantillaPolizaManageForm(request.POST or None, instance=plantilla)
extra = 1
plantilla_items = PlantillaPoliza_items_formset(ConceptoPlantillaPolizaManageForm, extra=extra, can_delete=True, can_order = True)
plantilla_items_formset = plantilla_items(request.POST or None, instance=plantilla)
if plantilla_form.is_valid() and plantilla_items_formset.is_valid():
plantilla = plantilla_form.save(commit = False)
plantilla.save()
#GUARDA CONCEPTOS DE PLANTILLA
for concepto_form in plantilla_items_formset:
Detalleplantilla = concepto_form.save(commit = False)
#PARA CREAR UNO NUEVO
if not Detalleplantilla.id:
Detalleplantilla.plantilla_poliza_v = plantilla
plantilla_items_formset.save()
return HttpResponseRedirect('/ventas/PreferenciasEmpresa/')
c = {'plantilla_form': plantilla_form, 'formset': plantilla_items_formset , 'message':message,}
return render_to_response(template_name, c, context_instance=RequestContext(request))
@login_required(login_url='/login/')
def plantilla_poliza_delete(request, id = None):
plantilla = get_object_or_404(PlantillaPolizas_V, pk=id)
plantilla.delete()
return HttpResponseRedirect('/ventas/PreferenciasEmpresa/') | [
"jesusmaherrera@gmail.com"
] | jesusmaherrera@gmail.com |
2369afd1881e2bf778f5cef9c86da552f9890dec | 53fab060fa262e5d5026e0807d93c75fb81e67b9 | /backup/user_167/ch45_2019_06_03_00_38_26_791511.py | 8a1d9f502e38e2832869ccccc55a25ef63d0e533 | [] | no_license | gabriellaec/desoft-analise-exercicios | b77c6999424c5ce7e44086a12589a0ad43d6adca | 01940ab0897aa6005764fc220b900e4d6161d36b | refs/heads/main | 2023-01-31T17:19:42.050628 | 2020-12-16T05:21:31 | 2020-12-16T05:21:31 | 306,735,108 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 187 | py | def zera_negativos(lista):
i=0
while i > len (lista):
i+=1
if lista[i]<0:
lista[i]=0
return lista
| [
"you@example.com"
] | you@example.com |
fa524d0167704fd519a3051b4b1daa397349d730 | 9743d5fd24822f79c156ad112229e25adb9ed6f6 | /xai/brain/wordbase/otherforms/_arthritics.py | 9b5ba637596aad5009e67b96be5c854ee2065ff9 | [
"MIT"
] | permissive | cash2one/xai | de7adad1758f50dd6786bf0111e71a903f039b64 | e76f12c9f4dcf3ac1c7c08b0cc8844c0b0a104b6 | refs/heads/master | 2021-01-19T12:33:54.964379 | 2017-01-28T02:00:50 | 2017-01-28T02:00:50 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 234 | py |
#calss header
class _ARTHRITICS():
def __init__(self,):
self.name = "ARTHRITICS"
self.definitions = arthritic
self.parents = []
self.childen = []
self.properties = []
self.jsondata = {}
self.basic = ['arthritic']
| [
"xingwang1991@gmail.com"
] | xingwang1991@gmail.com |
1a8616a27762f3165dd94949cf15cee25aa6c367 | 349daa7f480c7e61e16e5c2fef53d423b77d86fe | /receiver_line.py | c40156a26a46cc43378492660c35027475be919a | [] | no_license | samhaug/SP_setup | 5e68d81eae5abf4a0b47fe4dadd2730eed855921 | 63be3033abc61fb3e8f260b504857bc8c6ef566c | refs/heads/master | 2021-01-22T18:46:14.244231 | 2017-09-04T23:52:25 | 2017-09-04T23:52:25 | 102,411,954 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,369 | py | #!/home/samhaug/anaconda2/bin/python
'''
==============================================================================
File Name : receiver_line.py
Purpose : make line of stations from some source at some azimuth
Creation Date : 20-07-2017
Last Modified : Thu 20 Jul 2017 01:05:04 PM EDT
Created By : Samuel M. Haugland
==============================================================================
'''
import numpy as np
from matplotlib import pyplot as plt
from subprocess import call
from os import listdir
import h5py
import obspy
import seispy
import geopy
from sys import argv
def main():
st = obspy.read('/home/samhaug/work1/SP_sims/PREM_5s/st_Z.pk')
stout = obspy.core.stream.Stream()
start = geopy.Point(st[0].stats.sac['evla'],st[0].stats.sac['evlo'])
for idx,ii in enumerate(range(180)):
tr = obspy.core.trace.Trace()
tr.stats.sac = {}
d = geopy.distance.VincentyDistance(kilometers=111.195*ii)
e = d.destination(point=start,bearing=45)
tr.stats.sac['evla'] = st[0].stats.sac['evla']
tr.stats.sac['evlo'] = st[0].stats.sac['evlo']
tr.stats.sac['stla'] = e.latitude
tr.stats.sac['stlo'] = e.longitude
tr.stats.station = 'FUCK'
tr.stats.network = 'II'
stout.append(tr)
seispy.mapplot.plot(stout)
seispy.convert.gemini_stations(stout)
main()
| [
"samhaug@umich.edu"
] | samhaug@umich.edu |
c5aa8282f73ae6ac4f87891556844b41cd6dbe30 | 8fdcd12cfb91b2245da8b3c65fb937b1d72dd4c5 | /tissuelab/omero/gateway_ome500_ice351/omero_API_ice.py | 68838af3e48ac116db11f2b268d6ac9b1d942ec9 | [] | no_license | VirtualPlants/tissuelab | 569a334deab0b73acc8b43f313efc3f4c4e552fd | 8c064a34b91127806848f4992d1e4767574863cf | refs/heads/master | 2021-01-11T01:32:19.830778 | 2017-05-04T09:42:53 | 2017-05-04T09:42:53 | 70,694,783 | 2 | 1 | null | 2017-01-05T14:21:50 | 2016-10-12T11:49:10 | Python | UTF-8 | Python | false | false | 59,246 | py | # **********************************************************************
#
# Copyright (c) 2003-2013 ZeroC, Inc. All rights reserved.
#
# This copy of Ice is licensed to you under the terms described in the
# ICE_LICENSE file included in this distribution.
#
# **********************************************************************
#
# Ice version 3.5.1
#
# <auto-generated>
#
# Generated from file `API.ice'
#
# Warning: do not edit this file.
#
# </auto-generated>
#
import Ice, IcePy
import omero_cmd_API_ice
import omero_ServerErrors_ice
import omero_ModelF_ice
import omero_ServicesF_ice
import omero_System_ice
import omero_Collections_ice
# Included module omero
_M_omero = Ice.openModule('omero')
# Included module omero.model
_M_omero.model = Ice.openModule('omero.model')
# Included module Ice
_M_Ice = Ice.openModule('Ice')
# Included module Glacier2
_M_Glacier2 = Ice.openModule('Glacier2')
# Included module omero.sys
_M_omero.sys = Ice.openModule('omero.sys')
# Included module omero.api
_M_omero.api = Ice.openModule('omero.api')
# Included module omero.cmd
_M_omero.cmd = Ice.openModule('omero.cmd')
# Included module omero.grid
_M_omero.grid = Ice.openModule('omero.grid')
# Start of module omero
__name__ = 'omero'
# Start of module omero.api
__name__ = 'omero.api'
_M_omero.api.__doc__ = '''The omero::api module defines all the central verbs for working with OMERO.blitz.
Arguments and return values consist of those
types defined in the other ice files available here. With no
further custom code, it is possible to interoperate with
OMERO.blitz simply via the definitions here. Start with the
ServiceFactory definition at the end of this file.
Note: Using these types is significantly easier in combination with
the JavaDocs of the OMERO.server, specifically the ome.api
package. Where not further noted below, the follow mappings between
ome.api argument types and omero::api argument types hold:
+-----------------------+------------------------+
| ome.api | omero::api |
+-----------------------+------------------------+
|java.lang.Class |string |
+-----------------------+------------------------+
|java.util.Set |java.util.List/vector |
+-----------------------+------------------------+
|IPojo options (Map) |omero::sys::ParamMap |
+-----------------------+------------------------+
|If null needed |omero::RType subclass |
+-----------------------+------------------------+
|... |... |
+-----------------------+------------------------+'''
if 'ClientCallback' not in _M_omero.api.__dict__:
_M_omero.api.ClientCallback = Ice.createTempClass()
class ClientCallback(Ice.Object):
'''Primary callback interface for interaction between client and
server session ("ServiceFactory"). Where possible these methods
will be called one-way to prevent clients from hanging the server.'''
def __init__(self):
if Ice.getType(self) == _M_omero.api.ClientCallback:
raise RuntimeError('omero.api.ClientCallback is an abstract class')
def ice_ids(self, current=None):
return ('::Ice::Object', '::omero::api::ClientCallback')
def ice_id(self, current=None):
return '::omero::api::ClientCallback'
def ice_staticId():
return '::omero::api::ClientCallback'
ice_staticId = staticmethod(ice_staticId)
def requestHeartbeat(self, current=None):
'''Heartbeat-request made by the server to guarantee that the client
is alive. If the client is still active, then some method should
be made on the server to update the last idle time.'''
pass
def sessionClosed(self, current=None):
'''The session to which this ServiceFactory is connected has been
closed. Almost no further method calls (if any) are possible.
Create a new session via omero.client.createSession()'''
pass
def shutdownIn(self, milliseconds, current=None):
'''Message that the server will be shutting down in the
given number of milliseconds, after which all new and
running method invocations will recieve a CancelledException.'''
pass
def __str__(self):
return IcePy.stringify(self, _M_omero.api._t_ClientCallback)
__repr__ = __str__
_M_omero.api.ClientCallbackPrx = Ice.createTempClass()
class ClientCallbackPrx(Ice.ObjectPrx):
'''Heartbeat-request made by the server to guarantee that the client
is alive. If the client is still active, then some method should
be made on the server to update the last idle time.'''
def requestHeartbeat(self, _ctx=None):
return _M_omero.api.ClientCallback._op_requestHeartbeat.invoke(self, ((), _ctx))
'''Heartbeat-request made by the server to guarantee that the client
is alive. If the client is still active, then some method should
be made on the server to update the last idle time.'''
def begin_requestHeartbeat(self, _response=None, _ex=None, _sent=None, _ctx=None):
return _M_omero.api.ClientCallback._op_requestHeartbeat.begin(self, ((), _response, _ex, _sent, _ctx))
'''Heartbeat-request made by the server to guarantee that the client
is alive. If the client is still active, then some method should
be made on the server to update the last idle time.'''
def end_requestHeartbeat(self, _r):
return _M_omero.api.ClientCallback._op_requestHeartbeat.end(self, _r)
'''Heartbeat-request made by the server to guarantee that the client
is alive. If the client is still active, then some method should
be made on the server to update the last idle time.'''
def requestHeartbeat_async(self, _cb, _ctx=None):
return _M_omero.api.ClientCallback._op_requestHeartbeat.invokeAsync(self, (_cb, (), _ctx))
'''The session to which this ServiceFactory is connected has been
closed. Almost no further method calls (if any) are possible.
Create a new session via omero.client.createSession()'''
def sessionClosed(self, _ctx=None):
return _M_omero.api.ClientCallback._op_sessionClosed.invoke(self, ((), _ctx))
'''The session to which this ServiceFactory is connected has been
closed. Almost no further method calls (if any) are possible.
Create a new session via omero.client.createSession()'''
def begin_sessionClosed(self, _response=None, _ex=None, _sent=None, _ctx=None):
return _M_omero.api.ClientCallback._op_sessionClosed.begin(self, ((), _response, _ex, _sent, _ctx))
'''The session to which this ServiceFactory is connected has been
closed. Almost no further method calls (if any) are possible.
Create a new session via omero.client.createSession()'''
def end_sessionClosed(self, _r):
return _M_omero.api.ClientCallback._op_sessionClosed.end(self, _r)
'''The session to which this ServiceFactory is connected has been
closed. Almost no further method calls (if any) are possible.
Create a new session via omero.client.createSession()'''
def sessionClosed_async(self, _cb, _ctx=None):
return _M_omero.api.ClientCallback._op_sessionClosed.invokeAsync(self, (_cb, (), _ctx))
'''Message that the server will be shutting down in the
given number of milliseconds, after which all new and
running method invocations will recieve a CancelledException.'''
def shutdownIn(self, milliseconds, _ctx=None):
return _M_omero.api.ClientCallback._op_shutdownIn.invoke(self, ((milliseconds, ), _ctx))
'''Message that the server will be shutting down in the
given number of milliseconds, after which all new and
running method invocations will recieve a CancelledException.'''
def begin_shutdownIn(self, milliseconds, _response=None, _ex=None, _sent=None, _ctx=None):
return _M_omero.api.ClientCallback._op_shutdownIn.begin(self, ((milliseconds, ), _response, _ex, _sent, _ctx))
'''Message that the server will be shutting down in the
given number of milliseconds, after which all new and
running method invocations will recieve a CancelledException.'''
def end_shutdownIn(self, _r):
return _M_omero.api.ClientCallback._op_shutdownIn.end(self, _r)
'''Message that the server will be shutting down in the
given number of milliseconds, after which all new and
running method invocations will recieve a CancelledException.'''
def shutdownIn_async(self, _cb, milliseconds, _ctx=None):
return _M_omero.api.ClientCallback._op_shutdownIn.invokeAsync(self, (_cb, (milliseconds, ), _ctx))
def checkedCast(proxy, facetOrCtx=None, _ctx=None):
return _M_omero.api.ClientCallbackPrx.ice_checkedCast(proxy, '::omero::api::ClientCallback', facetOrCtx, _ctx)
checkedCast = staticmethod(checkedCast)
def uncheckedCast(proxy, facet=None):
return _M_omero.api.ClientCallbackPrx.ice_uncheckedCast(proxy, facet)
uncheckedCast = staticmethod(uncheckedCast)
_M_omero.api._t_ClientCallbackPrx = IcePy.defineProxy('::omero::api::ClientCallback', ClientCallbackPrx)
_M_omero.api._t_ClientCallback = IcePy.defineClass('::omero::api::ClientCallback', ClientCallback, -1, (), True, False, None, (), ())
ClientCallback._ice_type = _M_omero.api._t_ClientCallback
ClientCallback._op_requestHeartbeat = IcePy.Operation('requestHeartbeat', Ice.OperationMode.Normal, Ice.OperationMode.Normal, False, None, (), (), (), None, ())
ClientCallback._op_sessionClosed = IcePy.Operation('sessionClosed', Ice.OperationMode.Normal, Ice.OperationMode.Normal, False, None, (), (), (), None, ())
ClientCallback._op_shutdownIn = IcePy.Operation('shutdownIn', Ice.OperationMode.Normal, Ice.OperationMode.Normal, False, None, (), (((), IcePy._t_long, False, 0),), (), None, ())
_M_omero.api.ClientCallback = ClientCallback
del ClientCallback
_M_omero.api.ClientCallbackPrx = ClientCallbackPrx
del ClientCallbackPrx
if 'ServiceFactory' not in _M_omero.api.__dict__:
_M_omero.api.ServiceFactory = Ice.createTempClass()
class ServiceFactory(_M_omero.cmd.Session):
'''Starting point for all OMERO.blitz interaction.
A ServiceFactory once acquired can be used to create any number
of service proxies to the server. Most services implement [ServiceInterface]
or its subinterface [StatefulServiceInterface].'''
def __init__(self):
if Ice.getType(self) == _M_omero.api.ServiceFactory:
raise RuntimeError('omero.api.ServiceFactory is an abstract class')
def ice_ids(self, current=None):
return ('::Glacier2::Session', '::Ice::Object', '::omero::api::ServiceFactory', '::omero::cmd::Session')
def ice_id(self, current=None):
return '::omero::api::ServiceFactory'
def ice_staticId():
return '::omero::api::ServiceFactory'
ice_staticId = staticmethod(ice_staticId)
def getSecurityContexts(self, current=None):
'''Provides a list of all valid security contexts for this session.
Each of the returned [omero::model::IObject] instances can be
passed to setSecurityContext.'''
pass
def setSecurityContext(self, obj, current=None):
'''Changes the security context for the current session.
A security context limits the set of objects which will
be returned by all queries and restricts what updates
can be made.
Current valid values for security context:
[omero::model::ExperimenterGroup] - logs into a specific group
[omero::model::Share] - uses IShare to activate a share
Passing an unloaded version of either object type will change
the way the current session operates. Note: only objects which
are returned by the [getSecurityContexts] method are considered
valid. Any other instance will cause an exception to be thrown.
Example usage in Python:
sf = client.createSession()
objs = sf.getSecurityContexts()
old = sf.setSecurityContext(objs[-1])'''
pass
def setSecurityPassword(self, password, current=None):
'''Re-validates the password for the current session. This prevents
See methods that mention "HasPassword".'''
pass
def getAdminService(self, current=None):
pass
def getConfigService(self, current=None):
pass
def getContainerService(self, current=None):
pass
def getDeleteService(self, current=None):
pass
def getLdapService(self, current=None):
pass
def getPixelsService(self, current=None):
pass
def getProjectionService(self, current=None):
pass
def getQueryService(self, current=None):
pass
def getRenderingSettingsService(self, current=None):
pass
def getRepositoryInfoService(self, current=None):
pass
def getRoiService(self, current=None):
pass
def getScriptService(self, current=None):
pass
def getSessionService(self, current=None):
pass
def getShareService(self, current=None):
pass
def getTimelineService(self, current=None):
pass
def getTypesService(self, current=None):
pass
def getUpdateService(self, current=None):
pass
def getMetadataService(self, current=None):
pass
def createGateway(self, current=None):
'''The gateway service provided here is deprecated in OMERO 4.3
see Deprecated Services
for more information and alternative usage.'''
pass
def createExporter(self, current=None):
pass
def createJobHandle(self, current=None):
pass
def createRawFileStore(self, current=None):
pass
def createRawPixelsStore(self, current=None):
pass
def createRenderingEngine(self, current=None):
pass
def createSearchService(self, current=None):
pass
def createThumbnailStore(self, current=None):
pass
def sharedResources(self, current=None):
'''Returns a reference to a back-end manager. The [omero::grid::SharedResources]
service provides look ups for various facilities offered by OMERO:
OMERO.scripts
OMERO.tables
These facilities may or may not be available on first request.'''
pass
def getByName(self, name, current=None):
'''Allows looking up any stateless service by name.
See Constants.ice for examples of services.
If a service has been added by third-parties,
getByName can be used even though no concrete
method is available.'''
pass
def createByName(self, name, current=None):
'''Allows looking up any stateful service by name.
See Constants.ice for examples of services.
If a service has been added by third-parties,
createByName can be used even though no concrete
method is available.'''
pass
def subscribe(self, topicName, prx, current=None):
'''Subscribe to a given topic. The topic must exist and the user must
have sufficient permissions for that topic. Further the proxy object
must match the required type for the topic as encoded in the topic
name.'''
pass
def setCallback(self, callback, current=None):
'''Sets the single callback used by the ServiceFactory
to communicate with the client application. A default
callback is set by the omero::client object on
session creation which should suffice for most usage.
See the client object's documentation in each language
mapping for ways to use the callback.'''
pass
def close(self, current=None):
'''Deprecated misnomer.'''
pass
def closeOnDestroy(self, current=None):
'''Marks the session for closure rather than detachment, which will
be triggered by the destruction of the Glacier2 connection via
router.destroySession()
Closing the session rather the detaching is more secure, since all
resources are removed from the server and can safely be set once
it is clear that a client is finished with those resources.'''
pass
def detachOnDestroy(self, current=None):
'''Marks the session for detachment rather than closure, which will
be triggered by the destruction of the Glacier2 connection via
router.destroySession()
This is the default and allows a lost session to be reconnected,
at a slight security cost since the session will persist longer
and can be used by others if the UUID is intercepted.'''
pass
def activeServices(self, current=None):
'''Returns a list of string ids for currently active services. This will
_not_ keep services alive, and in fact checks for all expired services
and removes them.'''
pass
def keepAllAlive(self, proxies, current=None):
'''Requests that the given services be marked as alive. It is
possible that one of the services has already timed out, in which
case the returned long value will be non-zero.
Specifically, the bit representing the 0-based index will be 1:
if (retval & 1<<idx == 1<<idx) { // not alive }
Except for fatal server or session errors, this method should never
throw an exception.'''
pass
def keepAlive(self, proxy, current=None):
'''Returns true if the given service is alive.
Except for fatal server or session errors, this method should never
throw an exception.'''
pass
def __str__(self):
return IcePy.stringify(self, _M_omero.api._t_ServiceFactory)
__repr__ = __str__
_M_omero.api.ServiceFactoryPrx = Ice.createTempClass()
class ServiceFactoryPrx(_M_omero.cmd.SessionPrx):
'''Provides a list of all valid security contexts for this session.
Each of the returned [omero::model::IObject] instances can be
passed to setSecurityContext.'''
def getSecurityContexts(self, _ctx=None):
return _M_omero.api.ServiceFactory._op_getSecurityContexts.invoke(self, ((), _ctx))
'''Provides a list of all valid security contexts for this session.
Each of the returned [omero::model::IObject] instances can be
passed to setSecurityContext.'''
def begin_getSecurityContexts(self, _response=None, _ex=None, _sent=None, _ctx=None):
return _M_omero.api.ServiceFactory._op_getSecurityContexts.begin(self, ((), _response, _ex, _sent, _ctx))
'''Provides a list of all valid security contexts for this session.
Each of the returned [omero::model::IObject] instances can be
passed to setSecurityContext.'''
def end_getSecurityContexts(self, _r):
return _M_omero.api.ServiceFactory._op_getSecurityContexts.end(self, _r)
'''Changes the security context for the current session.
A security context limits the set of objects which will
be returned by all queries and restricts what updates
can be made.
Current valid values for security context:
[omero::model::ExperimenterGroup] - logs into a specific group
[omero::model::Share] - uses IShare to activate a share
Passing an unloaded version of either object type will change
the way the current session operates. Note: only objects which
are returned by the [getSecurityContexts] method are considered
valid. Any other instance will cause an exception to be thrown.
Example usage in Python:
sf = client.createSession()
objs = sf.getSecurityContexts()
old = sf.setSecurityContext(objs[-1])'''
def setSecurityContext(self, obj, _ctx=None):
return _M_omero.api.ServiceFactory._op_setSecurityContext.invoke(self, ((obj, ), _ctx))
'''Changes the security context for the current session.
A security context limits the set of objects which will
be returned by all queries and restricts what updates
can be made.
Current valid values for security context:
[omero::model::ExperimenterGroup] - logs into a specific group
[omero::model::Share] - uses IShare to activate a share
Passing an unloaded version of either object type will change
the way the current session operates. Note: only objects which
are returned by the [getSecurityContexts] method are considered
valid. Any other instance will cause an exception to be thrown.
Example usage in Python:
sf = client.createSession()
objs = sf.getSecurityContexts()
old = sf.setSecurityContext(objs[-1])'''
def begin_setSecurityContext(self, obj, _response=None, _ex=None, _sent=None, _ctx=None):
return _M_omero.api.ServiceFactory._op_setSecurityContext.begin(self, ((obj, ), _response, _ex, _sent, _ctx))
'''Changes the security context for the current session.
A security context limits the set of objects which will
be returned by all queries and restricts what updates
can be made.
Current valid values for security context:
[omero::model::ExperimenterGroup] - logs into a specific group
[omero::model::Share] - uses IShare to activate a share
Passing an unloaded version of either object type will change
the way the current session operates. Note: only objects which
are returned by the [getSecurityContexts] method are considered
valid. Any other instance will cause an exception to be thrown.
Example usage in Python:
sf = client.createSession()
objs = sf.getSecurityContexts()
old = sf.setSecurityContext(objs[-1])'''
def end_setSecurityContext(self, _r):
return _M_omero.api.ServiceFactory._op_setSecurityContext.end(self, _r)
'''Re-validates the password for the current session. This prevents
See methods that mention "HasPassword".'''
def setSecurityPassword(self, password, _ctx=None):
return _M_omero.api.ServiceFactory._op_setSecurityPassword.invoke(self, ((password, ), _ctx))
'''Re-validates the password for the current session. This prevents
See methods that mention "HasPassword".'''
def begin_setSecurityPassword(self, password, _response=None, _ex=None, _sent=None, _ctx=None):
return _M_omero.api.ServiceFactory._op_setSecurityPassword.begin(self, ((password, ), _response, _ex, _sent, _ctx))
'''Re-validates the password for the current session. This prevents
See methods that mention "HasPassword".'''
def end_setSecurityPassword(self, _r):
return _M_omero.api.ServiceFactory._op_setSecurityPassword.end(self, _r)
def getAdminService(self, _ctx=None):
return _M_omero.api.ServiceFactory._op_getAdminService.invoke(self, ((), _ctx))
def begin_getAdminService(self, _response=None, _ex=None, _sent=None, _ctx=None):
return _M_omero.api.ServiceFactory._op_getAdminService.begin(self, ((), _response, _ex, _sent, _ctx))
def end_getAdminService(self, _r):
return _M_omero.api.ServiceFactory._op_getAdminService.end(self, _r)
def getConfigService(self, _ctx=None):
return _M_omero.api.ServiceFactory._op_getConfigService.invoke(self, ((), _ctx))
def begin_getConfigService(self, _response=None, _ex=None, _sent=None, _ctx=None):
return _M_omero.api.ServiceFactory._op_getConfigService.begin(self, ((), _response, _ex, _sent, _ctx))
def end_getConfigService(self, _r):
return _M_omero.api.ServiceFactory._op_getConfigService.end(self, _r)
def getContainerService(self, _ctx=None):
return _M_omero.api.ServiceFactory._op_getContainerService.invoke(self, ((), _ctx))
def begin_getContainerService(self, _response=None, _ex=None, _sent=None, _ctx=None):
return _M_omero.api.ServiceFactory._op_getContainerService.begin(self, ((), _response, _ex, _sent, _ctx))
def end_getContainerService(self, _r):
return _M_omero.api.ServiceFactory._op_getContainerService.end(self, _r)
def getDeleteService(self, _ctx=None):
return _M_omero.api.ServiceFactory._op_getDeleteService.invoke(self, ((), _ctx))
def begin_getDeleteService(self, _response=None, _ex=None, _sent=None, _ctx=None):
return _M_omero.api.ServiceFactory._op_getDeleteService.begin(self, ((), _response, _ex, _sent, _ctx))
def end_getDeleteService(self, _r):
return _M_omero.api.ServiceFactory._op_getDeleteService.end(self, _r)
def getLdapService(self, _ctx=None):
return _M_omero.api.ServiceFactory._op_getLdapService.invoke(self, ((), _ctx))
def begin_getLdapService(self, _response=None, _ex=None, _sent=None, _ctx=None):
return _M_omero.api.ServiceFactory._op_getLdapService.begin(self, ((), _response, _ex, _sent, _ctx))
def end_getLdapService(self, _r):
return _M_omero.api.ServiceFactory._op_getLdapService.end(self, _r)
def getPixelsService(self, _ctx=None):
return _M_omero.api.ServiceFactory._op_getPixelsService.invoke(self, ((), _ctx))
def begin_getPixelsService(self, _response=None, _ex=None, _sent=None, _ctx=None):
return _M_omero.api.ServiceFactory._op_getPixelsService.begin(self, ((), _response, _ex, _sent, _ctx))
def end_getPixelsService(self, _r):
return _M_omero.api.ServiceFactory._op_getPixelsService.end(self, _r)
def getProjectionService(self, _ctx=None):
return _M_omero.api.ServiceFactory._op_getProjectionService.invoke(self, ((), _ctx))
def begin_getProjectionService(self, _response=None, _ex=None, _sent=None, _ctx=None):
return _M_omero.api.ServiceFactory._op_getProjectionService.begin(self, ((), _response, _ex, _sent, _ctx))
def end_getProjectionService(self, _r):
return _M_omero.api.ServiceFactory._op_getProjectionService.end(self, _r)
def getQueryService(self, _ctx=None):
return _M_omero.api.ServiceFactory._op_getQueryService.invoke(self, ((), _ctx))
def begin_getQueryService(self, _response=None, _ex=None, _sent=None, _ctx=None):
return _M_omero.api.ServiceFactory._op_getQueryService.begin(self, ((), _response, _ex, _sent, _ctx))
def end_getQueryService(self, _r):
return _M_omero.api.ServiceFactory._op_getQueryService.end(self, _r)
def getRenderingSettingsService(self, _ctx=None):
return _M_omero.api.ServiceFactory._op_getRenderingSettingsService.invoke(self, ((), _ctx))
def begin_getRenderingSettingsService(self, _response=None, _ex=None, _sent=None, _ctx=None):
return _M_omero.api.ServiceFactory._op_getRenderingSettingsService.begin(self, ((), _response, _ex, _sent, _ctx))
def end_getRenderingSettingsService(self, _r):
return _M_omero.api.ServiceFactory._op_getRenderingSettingsService.end(self, _r)
def getRepositoryInfoService(self, _ctx=None):
return _M_omero.api.ServiceFactory._op_getRepositoryInfoService.invoke(self, ((), _ctx))
def begin_getRepositoryInfoService(self, _response=None, _ex=None, _sent=None, _ctx=None):
return _M_omero.api.ServiceFactory._op_getRepositoryInfoService.begin(self, ((), _response, _ex, _sent, _ctx))
def end_getRepositoryInfoService(self, _r):
return _M_omero.api.ServiceFactory._op_getRepositoryInfoService.end(self, _r)
def getRoiService(self, _ctx=None):
return _M_omero.api.ServiceFactory._op_getRoiService.invoke(self, ((), _ctx))
def begin_getRoiService(self, _response=None, _ex=None, _sent=None, _ctx=None):
return _M_omero.api.ServiceFactory._op_getRoiService.begin(self, ((), _response, _ex, _sent, _ctx))
def end_getRoiService(self, _r):
return _M_omero.api.ServiceFactory._op_getRoiService.end(self, _r)
def getScriptService(self, _ctx=None):
return _M_omero.api.ServiceFactory._op_getScriptService.invoke(self, ((), _ctx))
def begin_getScriptService(self, _response=None, _ex=None, _sent=None, _ctx=None):
return _M_omero.api.ServiceFactory._op_getScriptService.begin(self, ((), _response, _ex, _sent, _ctx))
def end_getScriptService(self, _r):
return _M_omero.api.ServiceFactory._op_getScriptService.end(self, _r)
def getSessionService(self, _ctx=None):
return _M_omero.api.ServiceFactory._op_getSessionService.invoke(self, ((), _ctx))
def begin_getSessionService(self, _response=None, _ex=None, _sent=None, _ctx=None):
return _M_omero.api.ServiceFactory._op_getSessionService.begin(self, ((), _response, _ex, _sent, _ctx))
def end_getSessionService(self, _r):
return _M_omero.api.ServiceFactory._op_getSessionService.end(self, _r)
def getShareService(self, _ctx=None):
return _M_omero.api.ServiceFactory._op_getShareService.invoke(self, ((), _ctx))
def begin_getShareService(self, _response=None, _ex=None, _sent=None, _ctx=None):
return _M_omero.api.ServiceFactory._op_getShareService.begin(self, ((), _response, _ex, _sent, _ctx))
def end_getShareService(self, _r):
return _M_omero.api.ServiceFactory._op_getShareService.end(self, _r)
def getTimelineService(self, _ctx=None):
return _M_omero.api.ServiceFactory._op_getTimelineService.invoke(self, ((), _ctx))
def begin_getTimelineService(self, _response=None, _ex=None, _sent=None, _ctx=None):
return _M_omero.api.ServiceFactory._op_getTimelineService.begin(self, ((), _response, _ex, _sent, _ctx))
def end_getTimelineService(self, _r):
return _M_omero.api.ServiceFactory._op_getTimelineService.end(self, _r)
def getTypesService(self, _ctx=None):
return _M_omero.api.ServiceFactory._op_getTypesService.invoke(self, ((), _ctx))
def begin_getTypesService(self, _response=None, _ex=None, _sent=None, _ctx=None):
return _M_omero.api.ServiceFactory._op_getTypesService.begin(self, ((), _response, _ex, _sent, _ctx))
def end_getTypesService(self, _r):
return _M_omero.api.ServiceFactory._op_getTypesService.end(self, _r)
def getUpdateService(self, _ctx=None):
return _M_omero.api.ServiceFactory._op_getUpdateService.invoke(self, ((), _ctx))
def begin_getUpdateService(self, _response=None, _ex=None, _sent=None, _ctx=None):
return _M_omero.api.ServiceFactory._op_getUpdateService.begin(self, ((), _response, _ex, _sent, _ctx))
def end_getUpdateService(self, _r):
return _M_omero.api.ServiceFactory._op_getUpdateService.end(self, _r)
def getMetadataService(self, _ctx=None):
return _M_omero.api.ServiceFactory._op_getMetadataService.invoke(self, ((), _ctx))
def begin_getMetadataService(self, _response=None, _ex=None, _sent=None, _ctx=None):
return _M_omero.api.ServiceFactory._op_getMetadataService.begin(self, ((), _response, _ex, _sent, _ctx))
def end_getMetadataService(self, _r):
return _M_omero.api.ServiceFactory._op_getMetadataService.end(self, _r)
'''The gateway service provided here is deprecated in OMERO 4.3
see Deprecated Services
for more information and alternative usage.'''
def createGateway(self, _ctx=None):
return _M_omero.api.ServiceFactory._op_createGateway.invoke(self, ((), _ctx))
'''The gateway service provided here is deprecated in OMERO 4.3
see Deprecated Services
for more information and alternative usage.'''
def begin_createGateway(self, _response=None, _ex=None, _sent=None, _ctx=None):
return _M_omero.api.ServiceFactory._op_createGateway.begin(self, ((), _response, _ex, _sent, _ctx))
'''The gateway service provided here is deprecated in OMERO 4.3
see Deprecated Services
for more information and alternative usage.'''
def end_createGateway(self, _r):
return _M_omero.api.ServiceFactory._op_createGateway.end(self, _r)
def createExporter(self, _ctx=None):
return _M_omero.api.ServiceFactory._op_createExporter.invoke(self, ((), _ctx))
def begin_createExporter(self, _response=None, _ex=None, _sent=None, _ctx=None):
return _M_omero.api.ServiceFactory._op_createExporter.begin(self, ((), _response, _ex, _sent, _ctx))
def end_createExporter(self, _r):
return _M_omero.api.ServiceFactory._op_createExporter.end(self, _r)
def createJobHandle(self, _ctx=None):
return _M_omero.api.ServiceFactory._op_createJobHandle.invoke(self, ((), _ctx))
def begin_createJobHandle(self, _response=None, _ex=None, _sent=None, _ctx=None):
return _M_omero.api.ServiceFactory._op_createJobHandle.begin(self, ((), _response, _ex, _sent, _ctx))
def end_createJobHandle(self, _r):
return _M_omero.api.ServiceFactory._op_createJobHandle.end(self, _r)
def createRawFileStore(self, _ctx=None):
return _M_omero.api.ServiceFactory._op_createRawFileStore.invoke(self, ((), _ctx))
def begin_createRawFileStore(self, _response=None, _ex=None, _sent=None, _ctx=None):
return _M_omero.api.ServiceFactory._op_createRawFileStore.begin(self, ((), _response, _ex, _sent, _ctx))
def end_createRawFileStore(self, _r):
return _M_omero.api.ServiceFactory._op_createRawFileStore.end(self, _r)
def createRawPixelsStore(self, _ctx=None):
return _M_omero.api.ServiceFactory._op_createRawPixelsStore.invoke(self, ((), _ctx))
def begin_createRawPixelsStore(self, _response=None, _ex=None, _sent=None, _ctx=None):
return _M_omero.api.ServiceFactory._op_createRawPixelsStore.begin(self, ((), _response, _ex, _sent, _ctx))
def end_createRawPixelsStore(self, _r):
return _M_omero.api.ServiceFactory._op_createRawPixelsStore.end(self, _r)
def createRenderingEngine(self, _ctx=None):
return _M_omero.api.ServiceFactory._op_createRenderingEngine.invoke(self, ((), _ctx))
def begin_createRenderingEngine(self, _response=None, _ex=None, _sent=None, _ctx=None):
return _M_omero.api.ServiceFactory._op_createRenderingEngine.begin(self, ((), _response, _ex, _sent, _ctx))
def end_createRenderingEngine(self, _r):
return _M_omero.api.ServiceFactory._op_createRenderingEngine.end(self, _r)
def createSearchService(self, _ctx=None):
return _M_omero.api.ServiceFactory._op_createSearchService.invoke(self, ((), _ctx))
def begin_createSearchService(self, _response=None, _ex=None, _sent=None, _ctx=None):
return _M_omero.api.ServiceFactory._op_createSearchService.begin(self, ((), _response, _ex, _sent, _ctx))
def end_createSearchService(self, _r):
return _M_omero.api.ServiceFactory._op_createSearchService.end(self, _r)
def createThumbnailStore(self, _ctx=None):
return _M_omero.api.ServiceFactory._op_createThumbnailStore.invoke(self, ((), _ctx))
def begin_createThumbnailStore(self, _response=None, _ex=None, _sent=None, _ctx=None):
return _M_omero.api.ServiceFactory._op_createThumbnailStore.begin(self, ((), _response, _ex, _sent, _ctx))
def end_createThumbnailStore(self, _r):
return _M_omero.api.ServiceFactory._op_createThumbnailStore.end(self, _r)
'''Returns a reference to a back-end manager. The [omero::grid::SharedResources]
service provides look ups for various facilities offered by OMERO:
OMERO.scripts
OMERO.tables
These facilities may or may not be available on first request.'''
def sharedResources(self, _ctx=None):
return _M_omero.api.ServiceFactory._op_sharedResources.invoke(self, ((), _ctx))
'''Returns a reference to a back-end manager. The [omero::grid::SharedResources]
service provides look ups for various facilities offered by OMERO:
OMERO.scripts
OMERO.tables
These facilities may or may not be available on first request.'''
def begin_sharedResources(self, _response=None, _ex=None, _sent=None, _ctx=None):
return _M_omero.api.ServiceFactory._op_sharedResources.begin(self, ((), _response, _ex, _sent, _ctx))
'''Returns a reference to a back-end manager. The [omero::grid::SharedResources]
service provides look ups for various facilities offered by OMERO:
OMERO.scripts
OMERO.tables
These facilities may or may not be available on first request.'''
def end_sharedResources(self, _r):
return _M_omero.api.ServiceFactory._op_sharedResources.end(self, _r)
'''Allows looking up any stateless service by name.
See Constants.ice for examples of services.
If a service has been added by third-parties,
getByName can be used even though no concrete
method is available.'''
def getByName(self, name, _ctx=None):
return _M_omero.api.ServiceFactory._op_getByName.invoke(self, ((name, ), _ctx))
'''Allows looking up any stateless service by name.
See Constants.ice for examples of services.
If a service has been added by third-parties,
getByName can be used even though no concrete
method is available.'''
def begin_getByName(self, name, _response=None, _ex=None, _sent=None, _ctx=None):
return _M_omero.api.ServiceFactory._op_getByName.begin(self, ((name, ), _response, _ex, _sent, _ctx))
'''Allows looking up any stateless service by name.
See Constants.ice for examples of services.
If a service has been added by third-parties,
getByName can be used even though no concrete
method is available.'''
def end_getByName(self, _r):
return _M_omero.api.ServiceFactory._op_getByName.end(self, _r)
'''Allows looking up any stateful service by name.
See Constants.ice for examples of services.
If a service has been added by third-parties,
createByName can be used even though no concrete
method is available.'''
def createByName(self, name, _ctx=None):
return _M_omero.api.ServiceFactory._op_createByName.invoke(self, ((name, ), _ctx))
'''Allows looking up any stateful service by name.
See Constants.ice for examples of services.
If a service has been added by third-parties,
createByName can be used even though no concrete
method is available.'''
def begin_createByName(self, name, _response=None, _ex=None, _sent=None, _ctx=None):
return _M_omero.api.ServiceFactory._op_createByName.begin(self, ((name, ), _response, _ex, _sent, _ctx))
'''Allows looking up any stateful service by name.
See Constants.ice for examples of services.
If a service has been added by third-parties,
createByName can be used even though no concrete
method is available.'''
def end_createByName(self, _r):
return _M_omero.api.ServiceFactory._op_createByName.end(self, _r)
'''Subscribe to a given topic. The topic must exist and the user must
have sufficient permissions for that topic. Further the proxy object
must match the required type for the topic as encoded in the topic
name.'''
def subscribe(self, topicName, prx, _ctx=None):
return _M_omero.api.ServiceFactory._op_subscribe.invoke(self, ((topicName, prx), _ctx))
'''Subscribe to a given topic. The topic must exist and the user must
have sufficient permissions for that topic. Further the proxy object
must match the required type for the topic as encoded in the topic
name.'''
def begin_subscribe(self, topicName, prx, _response=None, _ex=None, _sent=None, _ctx=None):
return _M_omero.api.ServiceFactory._op_subscribe.begin(self, ((topicName, prx), _response, _ex, _sent, _ctx))
'''Subscribe to a given topic. The topic must exist and the user must
have sufficient permissions for that topic. Further the proxy object
must match the required type for the topic as encoded in the topic
name.'''
def end_subscribe(self, _r):
return _M_omero.api.ServiceFactory._op_subscribe.end(self, _r)
'''Sets the single callback used by the ServiceFactory
to communicate with the client application. A default
callback is set by the omero::client object on
session creation which should suffice for most usage.
See the client object's documentation in each language
mapping for ways to use the callback.'''
def setCallback(self, callback, _ctx=None):
return _M_omero.api.ServiceFactory._op_setCallback.invoke(self, ((callback, ), _ctx))
'''Sets the single callback used by the ServiceFactory
to communicate with the client application. A default
callback is set by the omero::client object on
session creation which should suffice for most usage.
See the client object's documentation in each language
mapping for ways to use the callback.'''
def begin_setCallback(self, callback, _response=None, _ex=None, _sent=None, _ctx=None):
return _M_omero.api.ServiceFactory._op_setCallback.begin(self, ((callback, ), _response, _ex, _sent, _ctx))
'''Sets the single callback used by the ServiceFactory
to communicate with the client application. A default
callback is set by the omero::client object on
session creation which should suffice for most usage.
See the client object's documentation in each language
mapping for ways to use the callback.'''
def end_setCallback(self, _r):
return _M_omero.api.ServiceFactory._op_setCallback.end(self, _r)
'''Deprecated misnomer.'''
def close(self, _ctx=None):
return _M_omero.api.ServiceFactory._op_close.invoke(self, ((), _ctx))
'''Deprecated misnomer.'''
def begin_close(self, _response=None, _ex=None, _sent=None, _ctx=None):
return _M_omero.api.ServiceFactory._op_close.begin(self, ((), _response, _ex, _sent, _ctx))
'''Deprecated misnomer.'''
def end_close(self, _r):
return _M_omero.api.ServiceFactory._op_close.end(self, _r)
'''Marks the session for closure rather than detachment, which will
be triggered by the destruction of the Glacier2 connection via
router.destroySession()
Closing the session rather the detaching is more secure, since all
resources are removed from the server and can safely be set once
it is clear that a client is finished with those resources.'''
def closeOnDestroy(self, _ctx=None):
return _M_omero.api.ServiceFactory._op_closeOnDestroy.invoke(self, ((), _ctx))
'''Marks the session for closure rather than detachment, which will
be triggered by the destruction of the Glacier2 connection via
router.destroySession()
Closing the session rather the detaching is more secure, since all
resources are removed from the server and can safely be set once
it is clear that a client is finished with those resources.'''
def begin_closeOnDestroy(self, _response=None, _ex=None, _sent=None, _ctx=None):
return _M_omero.api.ServiceFactory._op_closeOnDestroy.begin(self, ((), _response, _ex, _sent, _ctx))
'''Marks the session for closure rather than detachment, which will
be triggered by the destruction of the Glacier2 connection via
router.destroySession()
Closing the session rather the detaching is more secure, since all
resources are removed from the server and can safely be set once
it is clear that a client is finished with those resources.'''
def end_closeOnDestroy(self, _r):
return _M_omero.api.ServiceFactory._op_closeOnDestroy.end(self, _r)
'''Marks the session for detachment rather than closure, which will
be triggered by the destruction of the Glacier2 connection via
router.destroySession()
This is the default and allows a lost session to be reconnected,
at a slight security cost since the session will persist longer
and can be used by others if the UUID is intercepted.'''
def detachOnDestroy(self, _ctx=None):
return _M_omero.api.ServiceFactory._op_detachOnDestroy.invoke(self, ((), _ctx))
'''Marks the session for detachment rather than closure, which will
be triggered by the destruction of the Glacier2 connection via
router.destroySession()
This is the default and allows a lost session to be reconnected,
at a slight security cost since the session will persist longer
and can be used by others if the UUID is intercepted.'''
def begin_detachOnDestroy(self, _response=None, _ex=None, _sent=None, _ctx=None):
return _M_omero.api.ServiceFactory._op_detachOnDestroy.begin(self, ((), _response, _ex, _sent, _ctx))
'''Marks the session for detachment rather than closure, which will
be triggered by the destruction of the Glacier2 connection via
router.destroySession()
This is the default and allows a lost session to be reconnected,
at a slight security cost since the session will persist longer
and can be used by others if the UUID is intercepted.'''
def end_detachOnDestroy(self, _r):
return _M_omero.api.ServiceFactory._op_detachOnDestroy.end(self, _r)
'''Returns a list of string ids for currently active services. This will
_not_ keep services alive, and in fact checks for all expired services
and removes them.'''
def activeServices(self, _ctx=None):
return _M_omero.api.ServiceFactory._op_activeServices.invoke(self, ((), _ctx))
'''Returns a list of string ids for currently active services. This will
_not_ keep services alive, and in fact checks for all expired services
and removes them.'''
def begin_activeServices(self, _response=None, _ex=None, _sent=None, _ctx=None):
return _M_omero.api.ServiceFactory._op_activeServices.begin(self, ((), _response, _ex, _sent, _ctx))
'''Returns a list of string ids for currently active services. This will
_not_ keep services alive, and in fact checks for all expired services
and removes them.'''
def end_activeServices(self, _r):
return _M_omero.api.ServiceFactory._op_activeServices.end(self, _r)
'''Requests that the given services be marked as alive. It is
possible that one of the services has already timed out, in which
case the returned long value will be non-zero.
Specifically, the bit representing the 0-based index will be 1:
if (retval & 1<<idx == 1<<idx) { // not alive }
Except for fatal server or session errors, this method should never
throw an exception.'''
def keepAllAlive(self, proxies, _ctx=None):
return _M_omero.api.ServiceFactory._op_keepAllAlive.invoke(self, ((proxies, ), _ctx))
'''Requests that the given services be marked as alive. It is
possible that one of the services has already timed out, in which
case the returned long value will be non-zero.
Specifically, the bit representing the 0-based index will be 1:
if (retval & 1<<idx == 1<<idx) { // not alive }
Except for fatal server or session errors, this method should never
throw an exception.'''
def begin_keepAllAlive(self, proxies, _response=None, _ex=None, _sent=None, _ctx=None):
return _M_omero.api.ServiceFactory._op_keepAllAlive.begin(self, ((proxies, ), _response, _ex, _sent, _ctx))
'''Requests that the given services be marked as alive. It is
possible that one of the services has already timed out, in which
case the returned long value will be non-zero.
Specifically, the bit representing the 0-based index will be 1:
if (retval & 1<<idx == 1<<idx) { // not alive }
Except for fatal server or session errors, this method should never
throw an exception.'''
def end_keepAllAlive(self, _r):
return _M_omero.api.ServiceFactory._op_keepAllAlive.end(self, _r)
'''Returns true if the given service is alive.
Except for fatal server or session errors, this method should never
throw an exception.'''
def keepAlive(self, proxy, _ctx=None):
return _M_omero.api.ServiceFactory._op_keepAlive.invoke(self, ((proxy, ), _ctx))
'''Returns true if the given service is alive.
Except for fatal server or session errors, this method should never
throw an exception.'''
def begin_keepAlive(self, proxy, _response=None, _ex=None, _sent=None, _ctx=None):
return _M_omero.api.ServiceFactory._op_keepAlive.begin(self, ((proxy, ), _response, _ex, _sent, _ctx))
'''Returns true if the given service is alive.
Except for fatal server or session errors, this method should never
throw an exception.'''
def end_keepAlive(self, _r):
return _M_omero.api.ServiceFactory._op_keepAlive.end(self, _r)
def checkedCast(proxy, facetOrCtx=None, _ctx=None):
return _M_omero.api.ServiceFactoryPrx.ice_checkedCast(proxy, '::omero::api::ServiceFactory', facetOrCtx, _ctx)
checkedCast = staticmethod(checkedCast)
def uncheckedCast(proxy, facet=None):
return _M_omero.api.ServiceFactoryPrx.ice_uncheckedCast(proxy, facet)
uncheckedCast = staticmethod(uncheckedCast)
_M_omero.api._t_ServiceFactoryPrx = IcePy.defineProxy('::omero::api::ServiceFactory', ServiceFactoryPrx)
_M_omero.api._t_ServiceFactory = IcePy.defineClass('::omero::api::ServiceFactory', ServiceFactory, -1, (), True, False, None, (_M_omero.cmd._t_Session,), ())
ServiceFactory._ice_type = _M_omero.api._t_ServiceFactory
ServiceFactory._op_getSecurityContexts = IcePy.Operation('getSecurityContexts', Ice.OperationMode.Normal, Ice.OperationMode.Normal, False, None, (), (), (), ((), _M_omero.api._t_IObjectList, False, 0), (_M_omero._t_ServerError,))
ServiceFactory._op_setSecurityContext = IcePy.Operation('setSecurityContext', Ice.OperationMode.Normal, Ice.OperationMode.Normal, False, None, (), (((), _M_omero.model._t_IObject, False, 0),), (), ((), _M_omero.model._t_IObject, False, 0), (_M_omero._t_ServerError,))
ServiceFactory._op_setSecurityPassword = IcePy.Operation('setSecurityPassword', Ice.OperationMode.Normal, Ice.OperationMode.Normal, False, None, (), (((), IcePy._t_string, False, 0),), (), None, (_M_omero._t_ServerError,))
ServiceFactory._op_getAdminService = IcePy.Operation('getAdminService', Ice.OperationMode.Normal, Ice.OperationMode.Normal, False, None, (), (), (), ((), _M_omero.api._t_IAdminPrx, False, 0), (_M_omero._t_ServerError,))
ServiceFactory._op_getConfigService = IcePy.Operation('getConfigService', Ice.OperationMode.Normal, Ice.OperationMode.Normal, False, None, (), (), (), ((), _M_omero.api._t_IConfigPrx, False, 0), (_M_omero._t_ServerError,))
ServiceFactory._op_getContainerService = IcePy.Operation('getContainerService', Ice.OperationMode.Normal, Ice.OperationMode.Normal, False, None, (), (), (), ((), _M_omero.api._t_IContainerPrx, False, 0), (_M_omero._t_ServerError,))
ServiceFactory._op_getDeleteService = IcePy.Operation('getDeleteService', Ice.OperationMode.Normal, Ice.OperationMode.Normal, False, None, (), (), (), ((), _M_omero.api._t_IDeletePrx, False, 0), (_M_omero._t_ServerError,))
ServiceFactory._op_getLdapService = IcePy.Operation('getLdapService', Ice.OperationMode.Normal, Ice.OperationMode.Normal, False, None, (), (), (), ((), _M_omero.api._t_ILdapPrx, False, 0), (_M_omero._t_ServerError,))
ServiceFactory._op_getPixelsService = IcePy.Operation('getPixelsService', Ice.OperationMode.Normal, Ice.OperationMode.Normal, False, None, (), (), (), ((), _M_omero.api._t_IPixelsPrx, False, 0), (_M_omero._t_ServerError,))
ServiceFactory._op_getProjectionService = IcePy.Operation('getProjectionService', Ice.OperationMode.Normal, Ice.OperationMode.Normal, False, None, (), (), (), ((), _M_omero.api._t_IProjectionPrx, False, 0), (_M_omero._t_ServerError,))
ServiceFactory._op_getQueryService = IcePy.Operation('getQueryService', Ice.OperationMode.Normal, Ice.OperationMode.Normal, False, None, (), (), (), ((), _M_omero.api._t_IQueryPrx, False, 0), (_M_omero._t_ServerError,))
ServiceFactory._op_getRenderingSettingsService = IcePy.Operation('getRenderingSettingsService', Ice.OperationMode.Normal, Ice.OperationMode.Normal, False, None, (), (), (), ((), _M_omero.api._t_IRenderingSettingsPrx, False, 0), (_M_omero._t_ServerError,))
ServiceFactory._op_getRepositoryInfoService = IcePy.Operation('getRepositoryInfoService', Ice.OperationMode.Normal, Ice.OperationMode.Normal, False, None, (), (), (), ((), _M_omero.api._t_IRepositoryInfoPrx, False, 0), (_M_omero._t_ServerError,))
ServiceFactory._op_getRoiService = IcePy.Operation('getRoiService', Ice.OperationMode.Normal, Ice.OperationMode.Normal, False, None, (), (), (), ((), _M_omero.api._t_IRoiPrx, False, 0), (_M_omero._t_ServerError,))
ServiceFactory._op_getScriptService = IcePy.Operation('getScriptService', Ice.OperationMode.Normal, Ice.OperationMode.Normal, False, None, (), (), (), ((), _M_omero.api._t_IScriptPrx, False, 0), (_M_omero._t_ServerError,))
ServiceFactory._op_getSessionService = IcePy.Operation('getSessionService', Ice.OperationMode.Normal, Ice.OperationMode.Normal, False, None, (), (), (), ((), _M_omero.api._t_ISessionPrx, False, 0), (_M_omero._t_ServerError,))
ServiceFactory._op_getShareService = IcePy.Operation('getShareService', Ice.OperationMode.Normal, Ice.OperationMode.Normal, False, None, (), (), (), ((), _M_omero.api._t_ISharePrx, False, 0), (_M_omero._t_ServerError,))
ServiceFactory._op_getTimelineService = IcePy.Operation('getTimelineService', Ice.OperationMode.Normal, Ice.OperationMode.Normal, False, None, (), (), (), ((), _M_omero.api._t_ITimelinePrx, False, 0), (_M_omero._t_ServerError,))
ServiceFactory._op_getTypesService = IcePy.Operation('getTypesService', Ice.OperationMode.Normal, Ice.OperationMode.Normal, False, None, (), (), (), ((), _M_omero.api._t_ITypesPrx, False, 0), (_M_omero._t_ServerError,))
ServiceFactory._op_getUpdateService = IcePy.Operation('getUpdateService', Ice.OperationMode.Normal, Ice.OperationMode.Normal, False, None, (), (), (), ((), _M_omero.api._t_IUpdatePrx, False, 0), (_M_omero._t_ServerError,))
ServiceFactory._op_getMetadataService = IcePy.Operation('getMetadataService', Ice.OperationMode.Normal, Ice.OperationMode.Normal, False, None, (), (), (), ((), _M_omero.api._t_IMetadataPrx, False, 0), (_M_omero._t_ServerError,))
ServiceFactory._op_createGateway = IcePy.Operation('createGateway', Ice.OperationMode.Normal, Ice.OperationMode.Normal, False, None, (), (), (), ((), _M_omero.api._t_GatewayPrx, False, 0), (_M_omero._t_ServerError,))
ServiceFactory._op_createGateway.deprecate("createGateway() is deprecated")
ServiceFactory._op_createExporter = IcePy.Operation('createExporter', Ice.OperationMode.Normal, Ice.OperationMode.Normal, False, None, (), (), (), ((), _M_omero.api._t_ExporterPrx, False, 0), (_M_omero._t_ServerError,))
ServiceFactory._op_createJobHandle = IcePy.Operation('createJobHandle', Ice.OperationMode.Normal, Ice.OperationMode.Normal, False, None, (), (), (), ((), _M_omero.api._t_JobHandlePrx, False, 0), (_M_omero._t_ServerError,))
ServiceFactory._op_createRawFileStore = IcePy.Operation('createRawFileStore', Ice.OperationMode.Normal, Ice.OperationMode.Normal, False, None, (), (), (), ((), _M_omero.api._t_RawFileStorePrx, False, 0), (_M_omero._t_ServerError,))
ServiceFactory._op_createRawPixelsStore = IcePy.Operation('createRawPixelsStore', Ice.OperationMode.Normal, Ice.OperationMode.Normal, False, None, (), (), (), ((), _M_omero.api._t_RawPixelsStorePrx, False, 0), (_M_omero._t_ServerError,))
ServiceFactory._op_createRenderingEngine = IcePy.Operation('createRenderingEngine', Ice.OperationMode.Normal, Ice.OperationMode.Normal, False, None, (), (), (), ((), _M_omero.api._t_RenderingEnginePrx, False, 0), (_M_omero._t_ServerError,))
ServiceFactory._op_createSearchService = IcePy.Operation('createSearchService', Ice.OperationMode.Normal, Ice.OperationMode.Normal, False, None, (), (), (), ((), _M_omero.api._t_SearchPrx, False, 0), (_M_omero._t_ServerError,))
ServiceFactory._op_createThumbnailStore = IcePy.Operation('createThumbnailStore', Ice.OperationMode.Normal, Ice.OperationMode.Normal, False, None, (), (), (), ((), _M_omero.api._t_ThumbnailStorePrx, False, 0), (_M_omero._t_ServerError,))
ServiceFactory._op_sharedResources = IcePy.Operation('sharedResources', Ice.OperationMode.Normal, Ice.OperationMode.Normal, False, None, (), (), (), ((), _M_omero.grid._t_SharedResourcesPrx, False, 0), (_M_omero._t_ServerError,))
ServiceFactory._op_getByName = IcePy.Operation('getByName', Ice.OperationMode.Normal, Ice.OperationMode.Normal, False, None, (), (((), IcePy._t_string, False, 0),), (), ((), _M_omero.api._t_ServiceInterfacePrx, False, 0), (_M_omero._t_ServerError,))
ServiceFactory._op_createByName = IcePy.Operation('createByName', Ice.OperationMode.Normal, Ice.OperationMode.Normal, False, None, (), (((), IcePy._t_string, False, 0),), (), ((), _M_omero.api._t_StatefulServiceInterfacePrx, False, 0), (_M_omero._t_ServerError,))
ServiceFactory._op_subscribe = IcePy.Operation('subscribe', Ice.OperationMode.Normal, Ice.OperationMode.Normal, False, None, (), (((), IcePy._t_string, False, 0), ((), IcePy._t_ObjectPrx, False, 0)), (), None, (_M_omero._t_ServerError,))
ServiceFactory._op_setCallback = IcePy.Operation('setCallback', Ice.OperationMode.Normal, Ice.OperationMode.Normal, False, None, (), (((), _M_omero.api._t_ClientCallbackPrx, False, 0),), (), None, (_M_omero._t_ServerError,))
ServiceFactory._op_close = IcePy.Operation('close', Ice.OperationMode.Normal, Ice.OperationMode.Normal, False, None, (), (), (), None, (_M_omero._t_ServerError,))
ServiceFactory._op_close.deprecate("close() is deprecated. use closeOnDestroy() instead.")
ServiceFactory._op_closeOnDestroy = IcePy.Operation('closeOnDestroy', Ice.OperationMode.Normal, Ice.OperationMode.Normal, False, None, (), (), (), None, (_M_omero._t_ServerError,))
ServiceFactory._op_detachOnDestroy = IcePy.Operation('detachOnDestroy', Ice.OperationMode.Normal, Ice.OperationMode.Normal, False, None, (), (), (), None, (_M_omero._t_ServerError,))
ServiceFactory._op_activeServices = IcePy.Operation('activeServices', Ice.OperationMode.Normal, Ice.OperationMode.Normal, False, None, (), (), (), ((), _M_omero.api._t_StringSet, False, 0), (_M_omero._t_ServerError,))
ServiceFactory._op_keepAllAlive = IcePy.Operation('keepAllAlive', Ice.OperationMode.Normal, Ice.OperationMode.Normal, False, None, (), (((), _M_omero.api._t_ServiceList, False, 0),), (), ((), IcePy._t_long, False, 0), (_M_omero._t_ServerError,))
ServiceFactory._op_keepAlive = IcePy.Operation('keepAlive', Ice.OperationMode.Normal, Ice.OperationMode.Normal, False, None, (), (((), _M_omero.api._t_ServiceInterfacePrx, False, 0),), (), ((), IcePy._t_bool, False, 0), (_M_omero._t_ServerError,))
_M_omero.api.ServiceFactory = ServiceFactory
del ServiceFactory
_M_omero.api.ServiceFactoryPrx = ServiceFactoryPrx
del ServiceFactoryPrx
# End of module omero.api
__name__ = 'omero'
# End of module omero
| [
"guillaume.baty@inria.fr"
] | guillaume.baty@inria.fr |
d97ce608036c61b0d7f5ff7468932f6c8707e8c7 | 4382d1d3775cf8cfd33cf66c534203f2859d45d4 | /oolearning/evaluators/RegressionEvaluator.py | 4af5fd81469487c57019e20189f0049ca0a20b75 | [
"MIT"
] | permissive | shane-kercheval/oo-learning | 2d5b43a7c515df65b0d903f7b30c74286e609030 | 4cc5f574be258925b2b30fcd90f994b356b97c88 | refs/heads/master | 2022-07-21T22:46:11.640491 | 2022-06-15T02:52:24 | 2022-06-15T02:52:24 | 119,191,623 | 1 | 0 | MIT | 2022-04-06T19:26:33 | 2018-01-27T18:37:48 | Python | UTF-8 | Python | false | false | 5,008 | py | import numpy as np
from matplotlib import pyplot as plt
from oolearning.evaluators.EvaluatorBase import EvaluatorBase
from oolearning.evaluators.MaeScore import MaeScore
from oolearning.evaluators.MseScore import MseScore
from oolearning.evaluators.RSquaredScore import RSquaredScore
import warnings
with warnings.catch_warnings():
warnings.filterwarnings("ignore", category=Warning)
from statsmodels import api as sm # https://github.com/statsmodels/statsmodels/issues/3814
# noinspection SpellCheckingInspection
class RegressionEvaluator(EvaluatorBase):
"""
Evaluates models for regresion (i.e. numeric outcome) problems.
"""
def __init__(self):
self._actual_values = None
self._predicted_values = None
self._residuals = None
self._standard_deviation = None
self._mean_squared_error = None
self._mean_absolute_error = None
self._r_squared = None
def evaluate(self, actual_values: np.ndarray, predicted_values: np.ndarray):
assert len(actual_values) == len(predicted_values)
self._actual_values = actual_values
self._predicted_values = predicted_values
self._residuals = actual_values - predicted_values
self._standard_deviation = np.std(actual_values)
self._mean_squared_error = MseScore().calculate(actual_values=actual_values,
predicted_values=predicted_values)
self._mean_absolute_error = MaeScore().calculate(actual_values=actual_values,
predicted_values=predicted_values)
self._r_squared = RSquaredScore().calculate(actual_values=actual_values,
predicted_values=predicted_values)
return self
@property
def mean_absolute_error(self) -> float:
return self._mean_absolute_error
@property
def mean_squared_error(self) -> float:
return self._mean_squared_error
@property
def root_mean_squared_error(self) -> float:
return np.sqrt(self.mean_squared_error)
@property
def rmse_to_st_dev(self) -> float:
return self.root_mean_squared_error / self._standard_deviation
@property
def r_squared(self) -> float:
return self._r_squared
@property
def total_observations(self):
return len(self._actual_values)
@property
def all_quality_metrics(self) -> dict:
return {'Mean Absolute Error (MAE)': self.mean_absolute_error,
'Mean Squared Error (MSE)': self.mean_squared_error,
'Root Mean Squared Error (RMSE)': self.root_mean_squared_error,
'RMSE to Standard Deviation of Target': self.rmse_to_st_dev,
'R Squared': self.r_squared,
'Total Observations': self.total_observations}
def plot_residuals_vs_fits(self):
lowess = sm.nonparametric.lowess
loess_points = lowess(self._residuals, self._predicted_values)
loess_x, loess_y = zip(*loess_points)
plt.plot(loess_x, loess_y, color='r')
plt.scatter(x=self._predicted_values, y=self._residuals, s=8, alpha=0.5)
plt.title('Residuals vs. Fitted Values')
plt.xlabel('Fitted Values')
plt.ylabel('Residuals (Actual - Predicted)')
return plt.gca()
def plot_predictions_vs_actuals(self):
lowess = sm.nonparametric.lowess
loess_points = lowess(self._predicted_values, self._actual_values)
loess_x, loess_y = zip(*loess_points)
plt.plot(loess_x, loess_y, color='r', alpha=0.5, label='Loess (Predictions vs Actuals)')
plt.plot(self._actual_values, self._actual_values, color='b', alpha=0.5, label='Perfect Prediction')
plt.scatter(x=self._actual_values, y=self._predicted_values, s=8, alpha=0.5)
plt.title('Predicted Values vs. Actual Values')
plt.xlabel('Actuals')
plt.ylabel('Predicted')
ax = plt.gca()
handles, labels = ax.get_legend_handles_labels()
ax.legend(handles, labels)
plt.figtext(0.99, 0.01,
'Note: observations above blue line mean model is over-predicting; below means under-predicting.', # noqa
horizontalalignment='right')
return ax
def plot_residuals_vs_actuals(self):
lowess = sm.nonparametric.lowess
loess_points = lowess(self._residuals, self._actual_values)
loess_x, loess_y = zip(*loess_points)
plt.plot(loess_x, loess_y, color='r')
plt.scatter(x=self._actual_values, y=self._residuals, s=8, alpha=0.5)
plt.title('Residuals vs. Actual Values')
plt.xlabel('Actual')
plt.ylabel('Residuals (Actual - Predicted)')
plt.figtext(0.99, 0.01,
'Note: Actual > Predicted => Under-predicting (positive residual); negative residuals mean over-predicting', # noqa
horizontalalignment='right')
| [
"shane.kercheval@gmail.com"
] | shane.kercheval@gmail.com |
9988b94d526a323b486685de8ce175e1100badc7 | 61d499546f75e851d8b55293fe52754d01b66502 | /tcpproxy.py | 64e649d646ad3cd11408ae3fa20b753ce57e0040 | [] | no_license | sqrt-9/python-black | 0f6fd41db14d15ba0cb3438fd49066001ea85c8b | 82f0635e113b11bf2dffb1d85679018bf990e63b | refs/heads/master | 2021-01-18T00:15:50.185617 | 2016-09-20T12:12:11 | 2016-09-20T12:12:11 | 68,708,276 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,352 | py | import sys
import socket
import threading
def server_loop(local_host,local_port,remote_host,remote_port,receive_first):
server = socket.socket(socket.AF_INET,socket.SOCK_STREAM)
try:
server.bind((local_host,local_port))
except:
print 'failed on %S:%d'%(local_host,local_port)
print 'check for other'
sys.exit(0)
print 'listen on %s:%d'%(local_host,local_port)
server.listen(5)
while True:
client_socket,addr = server.accept()
print 'received from%s:%d'%(addr[0],addr[1])
proxy_thread = threading.Thread(target = proxy_handler,args = (client_socket,remote_port,receive_first))
proxy_thread.start()
def proxy_handler(client_socket,remote_host,remote_port,receive_first):
remote_socket = socket.socket(socket.AF_INET,socket.SOCK_STREAM)
remote_socket.connect((remote_host,remote_port))
if receive_first:
remote_buffer = receive_from(remote_socket)
hexdump(remote_buffer)
remote_buffer = response_handler(remote_buffer)
if len(remote_buffer):
print 'send %d bytes to localhost'%len(remote_buffer)
client_socket.send(remote_buffer)
while True:
local_buffer = receive_from(client_socket)
if len(local_buffer):
print 'received %d bytes from localhost'%len(local_buffer)
hexdump(local_buffer)
local_buffer = request_handler(local_buffer)
remote_socket.send(local_buffer)
print 'sent to remote'
remote_buffer = receive_from(remote_sockeet)
if len(remote_buffer):
print 'received %d bytes from remote'%len(remote_buffer)
hexdump(remote_buffer):
remote_buffer = response_handler(remote_buffer)
client_soket.send(remote_buffer)
print 'sent to localhost'
if not len(local_buffer) or not len(remote_buffer):
client_socket.close()
remote.socket.close()
print 'no more date. Closing connections.'
break
def hexdump(src,length = 16):
result = []
digits = 4 if isinstance(src,unicode) else 2
for i in xrange(0,len(src,),length):
s = src[i:i+length]
hexa = b' '.join
def main():
if len(sys.argv[1:]) != s:
print 'example:127.0.0.1 9000 10.12.131.1 9000 True'
sys.exit(0)
local_host = sys.argv[1]
local_port = int(sys.argv[2])
remote_host = sys.argv[3]
remote_port = int(sys.argv[4])
receive_first = sys.argv[5]
if 'True' in receive_first:
receive_first = True
else:
receive_first = Flase
server_loop(local_host,local_port,remote_host,remote_port,receive_first)
main()
| [
"root@localhost.localdomain"
] | root@localhost.localdomain |
f071adc4282e6c3cf2b85418e354abb6b8fdc0d4 | 4c8c0f857500b5f4b572f139602e46a6c813f6e3 | /Polymorhphism_and_Magic_methods_exercises/Animal/animals/birds.py | 4597d2754bc68c1d5ff9190aec3e1a14945a7ebd | [] | no_license | svetoslavastoyanova/Python_OOP | 3d21fb0480c088ecad11211c2d9a01139cde031f | 518f73ecc8a39e7085d4b8bf5657a1556da3dcfa | refs/heads/main | 2023-08-04T19:46:58.906739 | 2021-09-18T07:46:02 | 2021-09-18T07:46:02 | 352,304,158 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 584 | py | from Animal.animals.animal import Bird
from Animal.food import Meat, Vegetable, Fruit, Food
class Owl(Bird):
def feed(self, food):
if not isinstance(food, Meat):
return f"{self.__class__.__name__} does not eat {food.__class__.__name__}!"
self.weight += 0.25*food.quantity
self.food_eaten += food.quantity
def make_sound(self):
return f"Hoot Hoot"
class Hen(Bird):
def feed(self, food):
self.weight += 0.35 * food.quantity
self.food_eaten += food.quantity
def make_sound(self):
return f"Cluck"
| [
"svetoslava_stoyanova92@abv.bg"
] | svetoslava_stoyanova92@abv.bg |
c6e7240c8e1647d157d1af0e10c600e086b34e15 | e87524319ee0e610c451f1777a5a90883f508fe3 | /top/clearlight/base/runoob/print/percent_oper.py | 1823597198c3561b7a515bdedceca880ca3144ad | [
"Apache-2.0"
] | permissive | ClearlightY/Python_learn | 3a805bbe8883d58d0e60bd73f985b6093b983248 | 93b9b7efae5a1cf05faf8ee7c5e36dcc99c7a232 | refs/heads/master | 2020-12-11T06:58:17.984184 | 2020-07-30T18:10:20 | 2020-07-30T18:10:20 | 233,794,474 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 961 | py | '''
第一个 % 后面的内容为显示的格式说明,6 为显示宽度,3 为小数点位数,f 为浮点数类型
第二个 % 后面为显示的内容来源,输出结果右对齐,2.300 长度为 5,故前面有一空格
'''
print("%6.3f" % 2.3)
# 2.300
'''
x 为表示 16 进制,显示宽度为 10,前面有 8 个空格。
'''
print("%+10x" % 10)
# +a
'''
%s 字符串 (采用str()的显示)
%r 字符串 (采用repr()的显示)
%c 单个字符
%b 二进制整数
%d 十进制整数
%i 十进制整数
%o 八进制整数
%x 十六进制整数
%e 指数 (基底写为e)
%E 指数 (基底写为E)
%f 浮点数
%F 浮点数,与上相同%g 指数(e)或浮点数 (根据显示长度)
%G 指数(E)或浮点数 (根据显示长度)
%% 字符"%"
'''
print("%-5x" % -10)
# -a
pi = 3.1415
print("pi的值是%s" % pi)
print("pi的值是%.8f" % pi)
# pi的值是3.1415
# pi的值是3.14150000
| [
"lxy12531@163.com"
] | lxy12531@163.com |
a33e7ff9a796b765615d6964bf5bf0d3690bd040 | d4d2ed267f63d9b48c74135a124bd2534211145f | /simpleblog/urls.py | fcfb8855430b032a5dba05877c6d41c7836f5f52 | [] | no_license | MerleLiuKun/EasyCase | 0a7a8411f7095f68e4ecd3be6deb7e99808000dd | fde2a88b3e2f31949d3f7cbe9a44704be44974f6 | refs/heads/master | 2021-09-05T00:20:34.622096 | 2017-11-02T03:52:28 | 2017-11-02T03:52:28 | 108,724,714 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 388 | py | from django.conf.urls import url
from rest_framework.routers import DefaultRouter
from . import views
router = DefaultRouter()
router.register(
r'tags',
views.TagViewSet,
)
router.register(
r'categorys',
views.CategoryViewSet,
)
router.register(
r'posts',
views.PostViewSet,
)
router.register(
r'users',
views.UserViewSet,
)
urlpatterns = router.urls
| [
"merle.liukun@gmail.com"
] | merle.liukun@gmail.com |
f4fa8fe37323df0691cdd4b41e1b498002191f43 | 87cacb90676e5e7d1d8f0e643f1ad6ed9e35acbf | /need to clean/codes/split_train.py | b8cdd4d50e9135672a21ad3fba518dbe29845926 | [] | no_license | vuhoangminh/Kaggle-TalkingData-AdTracking-Fraud-Detection-Challenge | 3b75d4a7c60574a4875c62e8843a01d945d792d3 | 56045f446f1a0c538d91ac65e536edc4b7b5a417 | refs/heads/master | 2020-03-13T12:56:42.309722 | 2018-05-08T10:50:35 | 2018-05-08T10:50:35 | 131,129,397 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,561 | py | """
Adding improvements inspired from:
Ravi Teja's fe script: https://www.kaggle.com/rteja1113/lightgbm-with-count-features?scriptVersionId=2815638
"""
import pandas as pd
import time
import numpy as np
from sklearn.cross_validation import train_test_split
import lightgbm as lgb
import gc
import pickle
path = 'E:/kaggle/input/'
dtypes = {
'ip' : 'uint32',
'app' : 'uint16',
'device' : 'uint16',
'os' : 'uint16',
'channel' : 'uint16',
'is_attributed' : 'uint8',
'click_id' : 'uint32'
}
TRAINSAMPLE = 180000000
NROWS = 90000000
# NROWS = 300
num_split = int(TRAINSAMPLE/NROWS)
print (num_split)
def load_write(iSplit):
skip_rows = iSplit*NROWS
print('loading train data...')
if iSplit>0:
train_df = pd.read_csv(path+"train.csv", skiprows=range(1,skip_rows), nrows=NROWS, dtype=dtypes, usecols=['ip','app','device','os', 'channel', 'click_time', 'is_attributed'])
else:
train_df = pd.read_csv(path+"train.csv", nrows=NROWS, dtype=dtypes, usecols=['ip','app','device','os', 'channel', 'click_time', 'is_attributed'])
gc.collect()
print('Extracting new features...')
train_df['sec'] = pd.to_datetime(train_df.click_time).dt.second.astype('uint8')
train_df['min'] = pd.to_datetime(train_df.click_time).dt.minute.astype('uint8')
train_df['hour'] = pd.to_datetime(train_df.click_time).dt.hour.astype('uint8')
train_df['day'] = pd.to_datetime(train_df.click_time).dt.day.astype('uint8')
train_df['wday'] = pd.to_datetime(train_df.click_time).dt.dayofweek.astype('uint8')
print(train_df.head())
gc.collect()
print('grouping by ip-day-hour combination...')
gp = train_df[['ip','day','hour','channel']].groupby(by=['ip','day','hour'])[['channel']].count().reset_index().rename(index=str, columns={'channel': 'qty'})
train_df = train_df.merge(gp, on=['ip','day','hour'], how='left')
print(train_df.head())
del gp
gc.collect()
print('group by ip-app combination...')
gp = train_df[['ip', 'app', 'channel']].groupby(by=['ip', 'app'])[['channel']].count().reset_index().rename(index=str, columns={'channel': 'ip_app_count'})
train_df = train_df.merge(gp, on=['ip','app'], how='left')
print(train_df.head())
del gp
gc.collect()
print('group by ip-app-os combination...')
gp = train_df[['ip','app', 'os', 'channel']].groupby(by=['ip', 'app', 'os'])[['channel']].count().reset_index().rename(index=str, columns={'channel': 'ip_app_os_count'})
print("merging...")
train_df = train_df.merge(gp, on=['ip','app', 'os'], how='left')
print(train_df.head())
del gp
gc.collect()
print("vars and data type: ")
train_df.info()
train_df['qty'] = train_df['qty'].astype('uint16')
train_df['ip_app_count'] = train_df['ip_app_count'].astype('uint16')
train_df['ip_app_os_count'] = train_df['ip_app_os_count'].astype('uint16')
print(train_df.head())
print("after splitted: ")
print(train_df.head())
train_df = train_df.drop(['ip', 'click_time', 'sec'], axis=1)
print(train_df.head())
print("train size: ", len(train_df))
save_name = 'train_' + str(iSplit)
print("save to: ", save_name)
train_df.to_pickle(save_name)
del train_df
gc.collect()
for iSplit in range(num_split):
# for iSplit in range(5):
print('Processing split', iSplit+1)
skip_rows = iSplit*NROWS
print (skip_rows)
load_write(iSplit)
| [
"minhmanutd@gmail.com"
] | minhmanutd@gmail.com |
e8326a9f5e81a823edf3d917f79b00e74e3ae5d3 | 450c45e780332f56ea339a83891f0c12d6120794 | /google/ads/google_ads/v2/services/display_keyword_view_service_client.py | 0dc3881cfd1f64b2409c021cf12e8f862f73d224 | [
"Apache-2.0",
"LicenseRef-scancode-generic-cla"
] | permissive | akaashhazarika/google-ads-python | 7766370cc526190c962dc9ff806520d459b05c25 | 25b43aa616020ad7dfa55b90fa236a29cf97d45a | refs/heads/master | 2020-06-07T08:56:21.533323 | 2019-06-27T15:26:49 | 2019-06-27T15:26:49 | 191,448,135 | 0 | 0 | Apache-2.0 | 2019-06-11T20:57:04 | 2019-06-11T20:57:04 | null | UTF-8 | Python | false | false | 10,016 | py | # -*- coding: utf-8 -*-
#
# Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Accesses the google.ads.googleads.v2.services DisplayKeywordViewService API."""
import pkg_resources
import warnings
from google.oauth2 import service_account
import google.api_core.gapic_v1.client_info
import google.api_core.gapic_v1.config
import google.api_core.gapic_v1.method
import google.api_core.gapic_v1.routing_header
import google.api_core.grpc_helpers
import google.api_core.path_template
from google.ads.google_ads.v2.services import display_keyword_view_service_client_config
from google.ads.google_ads.v2.services.transports import display_keyword_view_service_grpc_transport
from google.ads.google_ads.v2.proto.services import display_keyword_view_service_pb2
_GAPIC_LIBRARY_VERSION = pkg_resources.get_distribution(
'google-ads',
).version
class DisplayKeywordViewServiceClient(object):
"""Service to manage display keyword views."""
SERVICE_ADDRESS = 'googleads.googleapis.com:443'
"""The default address of the service."""
# The name of the interface for this client. This is the key used to
# find the method configuration in the client_config dictionary.
_INTERFACE_NAME = 'google.ads.googleads.v2.services.DisplayKeywordViewService'
@classmethod
def from_service_account_file(cls, filename, *args, **kwargs):
"""Creates an instance of this client using the provided credentials
file.
Args:
filename (str): The path to the service account private key json
file.
args: Additional arguments to pass to the constructor.
kwargs: Additional arguments to pass to the constructor.
Returns:
DisplayKeywordViewServiceClient: The constructed client.
"""
credentials = service_account.Credentials.from_service_account_file(
filename)
kwargs['credentials'] = credentials
return cls(*args, **kwargs)
from_service_account_json = from_service_account_file
@classmethod
def display_keyword_view_path(cls, customer, display_keyword_view):
"""Return a fully-qualified display_keyword_view string."""
return google.api_core.path_template.expand(
'customers/{customer}/displayKeywordViews/{display_keyword_view}',
customer=customer,
display_keyword_view=display_keyword_view,
)
def __init__(self, transport=None, channel=None, credentials=None,
client_config=None, client_info=None):
"""Constructor.
Args:
transport (Union[~.DisplayKeywordViewServiceGrpcTransport,
Callable[[~.Credentials, type], ~.DisplayKeywordViewServiceGrpcTransport]): A transport
instance, responsible for actually making the API calls.
The default transport uses the gRPC protocol.
This argument may also be a callable which returns a
transport instance. Callables will be sent the credentials
as the first argument and the default transport class as
the second argument.
channel (grpc.Channel): DEPRECATED. A ``Channel`` instance
through which to make calls. This argument is mutually exclusive
with ``credentials``; providing both will raise an exception.
credentials (google.auth.credentials.Credentials): The
authorization credentials to attach to requests. These
credentials identify this application to the service. If none
are specified, the client will attempt to ascertain the
credentials from the environment.
This argument is mutually exclusive with providing a
transport instance to ``transport``; doing so will raise
an exception.
client_config (dict): DEPRECATED. A dictionary of call options for
each method. If not specified, the default configuration is used.
client_info (google.api_core.gapic_v1.client_info.ClientInfo):
The client info used to send a user-agent string along with
API requests. If ``None``, then default info will be used.
Generally, you only need to set this if you're developing
your own client library.
"""
# Raise deprecation warnings for things we want to go away.
if client_config is not None:
warnings.warn('The `client_config` argument is deprecated.',
PendingDeprecationWarning, stacklevel=2)
else:
client_config = display_keyword_view_service_client_config.config
if channel:
warnings.warn('The `channel` argument is deprecated; use '
'`transport` instead.',
PendingDeprecationWarning, stacklevel=2)
# Instantiate the transport.
# The transport is responsible for handling serialization and
# deserialization and actually sending data to the service.
if transport:
if callable(transport):
self.transport = transport(
credentials=credentials,
default_class=display_keyword_view_service_grpc_transport.DisplayKeywordViewServiceGrpcTransport,
)
else:
if credentials:
raise ValueError(
'Received both a transport instance and '
'credentials; these are mutually exclusive.'
)
self.transport = transport
else:
self.transport = display_keyword_view_service_grpc_transport.DisplayKeywordViewServiceGrpcTransport(
address=self.SERVICE_ADDRESS,
channel=channel,
credentials=credentials,
)
if client_info is None:
client_info = google.api_core.gapic_v1.client_info.ClientInfo(
gapic_version=_GAPIC_LIBRARY_VERSION,
)
else:
client_info.gapic_version = _GAPIC_LIBRARY_VERSION
self._client_info = client_info
# Parse out the default settings for retry and timeout for each RPC
# from the client configuration.
# (Ordinarily, these are the defaults specified in the `*_config.py`
# file next to this one.)
self._method_configs = google.api_core.gapic_v1.config.parse_method_configs(
client_config['interfaces'][self._INTERFACE_NAME],
)
# Save a dictionary of cached API call functions.
# These are the actual callables which invoke the proper
# transport methods, wrapped with `wrap_method` to add retry,
# timeout, and the like.
self._inner_api_calls = {}
# Service calls
def get_display_keyword_view(
self,
resource_name,
retry=google.api_core.gapic_v1.method.DEFAULT,
timeout=google.api_core.gapic_v1.method.DEFAULT,
metadata=None):
"""
Returns the requested display keyword view in full detail.
Args:
resource_name (str): The resource name of the display keyword view to fetch.
retry (Optional[google.api_core.retry.Retry]): A retry object used
to retry requests. If ``None`` is specified, requests will not
be retried.
timeout (Optional[float]): The amount of time, in seconds, to wait
for the request to complete. Note that if ``retry`` is
specified, the timeout applies to each individual attempt.
metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
that is provided to the method.
Returns:
A :class:`~google.ads.googleads_v2.types.DisplayKeywordView` instance.
Raises:
google.api_core.exceptions.GoogleAPICallError: If the request
failed for any reason.
google.api_core.exceptions.RetryError: If the request failed due
to a retryable error and retry attempts failed.
ValueError: If the parameters are invalid.
"""
# Wrap the transport method to add retry and timeout logic.
if 'get_display_keyword_view' not in self._inner_api_calls:
self._inner_api_calls['get_display_keyword_view'] = google.api_core.gapic_v1.method.wrap_method(
self.transport.get_display_keyword_view,
default_retry=self._method_configs['GetDisplayKeywordView'].retry,
default_timeout=self._method_configs['GetDisplayKeywordView'].timeout,
client_info=self._client_info,
)
request = display_keyword_view_service_pb2.GetDisplayKeywordViewRequest(
resource_name=resource_name,
)
if metadata is None:
metadata = []
metadata = list(metadata)
try:
routing_header = [('resource_name', resource_name)]
except AttributeError:
pass
else:
routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata(routing_header)
metadata.append(routing_metadata)
return self._inner_api_calls['get_display_keyword_view'](request, retry=retry, timeout=timeout, metadata=metadata)
| [
"noreply@github.com"
] | akaashhazarika.noreply@github.com |
af6563a14947d2c2eee15062e80ebef5c653c552 | 9743d5fd24822f79c156ad112229e25adb9ed6f6 | /xai/brain/wordbase/verbs/_skimmed.py | 85b96e7c40e8352d9c6ec45728f4ef38b80df091 | [
"MIT"
] | permissive | cash2one/xai | de7adad1758f50dd6786bf0111e71a903f039b64 | e76f12c9f4dcf3ac1c7c08b0cc8844c0b0a104b6 | refs/heads/master | 2021-01-19T12:33:54.964379 | 2017-01-28T02:00:50 | 2017-01-28T02:00:50 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 228 | py |
from xai.brain.wordbase.verbs._skim import _SKIM
#calss header
class _SKIMMED(_SKIM, ):
def __init__(self,):
_SKIM.__init__(self)
self.name = "SKIMMED"
self.specie = 'verbs'
self.basic = "skim"
self.jsondata = {}
| [
"xingwang1991@gmail.com"
] | xingwang1991@gmail.com |
304b8cd13235847181617b46fe7f83d8ae6cc83e | 8019624601ea9d3aa5c98ee8aa47c74a20805ca1 | /twnews/tests/test_chinatimes.py | 3abd2e6b9a57b9d4c0407bb29326521861b11483 | [
"MIT"
] | permissive | sevaroy/twnews | 7825b7717386d61bfe2f96439ad57a7f2b99ff9c | 92d468f36a1fd2031bc5fa315ad02d317eebcf1a | refs/heads/master | 2020-04-04T18:03:46.064045 | 2018-11-02T03:31:04 | 2018-11-02T03:31:04 | 156,148,250 | 1 | 0 | MIT | 2018-11-05T02:17:29 | 2018-11-05T02:17:29 | null | UTF-8 | Python | false | false | 2,292 | py | """
中時電子報單元測試
"""
import unittest
from twnews.soup import NewsSoup, pkgdir
#@unittest.skip
class TestChinatimes(unittest.TestCase):
def setUp(self):
self.url = 'https://www.chinatimes.com/realtimenews/20180916001767-260402'
self.dtf = '%Y-%m-%d %H:%M:%S'
def test_01_sample(self):
"""
測試本地樣本解構
* 如果測試 02 失敗,需要用 bin/getnews.sh 重新製作本地樣本
"""
nsoup = NewsSoup(pkgdir + '/samples/chinatimes.html.gz', mobile=False)
self.assertEqual('chinatimes', nsoup.channel)
self.assertIn('悲慟!北市士林年邁母子 住處上吊自殺身亡', nsoup.title())
self.assertEqual('2018-09-16 15:31:00', nsoup.date().strftime(self.dtf))
self.assertEqual('謝明俊', nsoup.author())
self.assertIn('北市士林區葫蘆街一處民宅', nsoup.contents())
def test_02_desktop(self):
"""
測試桌面版網頁解構
* 務必開啟強制更新,確保解構程式能跟進網站最新版本
* 實際新聞內容有可能更新,需要同步單元測試的預期值
"""
nsoup = NewsSoup(self.url, refresh=True, mobile=False)
self.assertEqual('chinatimes', nsoup.channel)
self.assertIn('悲慟!北市士林年邁母子 住處上吊自殺身亡', nsoup.title())
self.assertEqual('2018-09-16 15:31:00', nsoup.date().strftime(self.dtf))
self.assertEqual('謝明俊', nsoup.author())
self.assertIn('北市士林區葫蘆街一處民宅', nsoup.contents())
def test_03_mobile(self):
"""
測試行動版網頁解構
* 務必開啟強制更新,確保解構程式能跟進網站最新版本
* 實際新聞內容有可能更新,需要同步單元測試的預期值
"""
nsoup = NewsSoup(self.url, refresh=True, mobile=True)
self.assertEqual('chinatimes', nsoup.channel)
self.assertIn('悲慟!北市士林年邁母子 住處上吊自殺身亡', nsoup.title())
self.assertEqual('2018-09-16 15:31:00', nsoup.date().strftime(self.dtf))
self.assertEqual('謝明俊', nsoup.author())
self.assertIn('北市士林區葫蘆街一處民宅', nsoup.contents())
| [
"virus.warnning@gmail.com"
] | virus.warnning@gmail.com |
ab041452c1c435c0d19fabc01ece24d0ee8c3b5f | d8574d8ca5c571fd7ed3b67dac00c55df2ac6580 | /data/kitchen/python/kitchen/evaluatorGui.py | 312321bdd631c125ebc01f300b4833c3d8811df8 | [] | no_license | h2r/slu_core | 19de87e55e84dd4dada65b4d7b65857bdae740e8 | e30c0245177e8481397fd4987422e3d35d79ec08 | refs/heads/master | 2021-07-09T18:19:33.424551 | 2020-07-06T18:49:18 | 2020-07-06T18:49:18 | 143,929,984 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 7,331 | py | from kitchen import kitchenState, recipeManager, annotatedRecipe, planningLanguage
import pickle_util
class Evaluator():
def __init__(self):
self.score = 0
self.successCount = 0
self.totalCount = 0
self.noopSuccessCount = 0
def evaluateInstructions(self, targetRecipe):
#Can a single instruction be interpreted as multiple instructions? Does it even matter?
model_fname = "kitchenModel_1.5.pck"
rm = recipeManager.RecipeManager(model_fname)
pl = planningLanguage.PlanningLanguage()
tc = 0
sc = 0
nsc = 0
#A list of (planningLanguage, is_correct) tuples. Used mainly for GUI
completePath = []
print "Evaluating (instruction-level): " + targetRecipe.name
for i in range(len(targetRecipe.instructions)):
self.totalCount += 1
tc += 1
instruction = targetRecipe.instructions[i]
#print "instruction", instruction
initialState = targetRecipe.idx_to_start_state(i)
instructionInferredPlan = rm.find_plan(instruction[0], initialState)
desiredPlan = pl.compileAnnotation(instruction[1], initialState)
desiredEndState = desiredPlan[-1][1]
if len(instructionInferredPlan) == 0:
#print "Zero length instruction for:", instruction
if len(desiredPlan) == 1:
if desiredPlan[-1][0].name == "noop":
self.noopSuccessCount += 1
nsc += 1
completePath.append(("| noop()", True))
else:
completePath.append(("None", False))
else:
completePath.append(("None", False))
else:
#print "inferred plan", instructionInferredPlan
actualEndState = instructionInferredPlan[-1][1][-1][1]
#print "actualEndState", actualEndState
#plInferredPath = planningLanguage.decompile(instructionInferredPlan[-1][1])
plInferredPath = ""
for i in instructionInferredPlan:
plInferredPath = plInferredPath + " | " + planningLanguage.decompile(i[1])
if desiredEndState == actualEndState:
self.successCount += 1
sc += 1
print instructionInferredPlan
completePath.append((plInferredPath, True))
else:
completePath.append((plInferredPath, False))
print "State is not the same for instruction", instruction
print "Inferred path was: ", planningLanguage.decompile(instructionInferredPlan[0][1])
## print "Desired mixing bowl:", desiredEndState.mixing_bowl
## print "Actual mixing bowl:", actualEndState.mixing_bowl
print "\n"
print "\n\nResults for the instruction-level evaluation of :", targetRecipe.name
print "Total Instructions:", tc, "\nSuccess:", sc
print "Noop Success:", nsc
print "Failures:", tc - (sc+nsc), "\n\n"
return completePath
def evaluateEndToEnd(self, targetRecipe, useBeam=True):
#A list of (planningLanguage, is_correct) tuples. Used mainly for GUI
completePath = []
self.totalCount += 1
model_fname = "kitchenModel_1.5.pck"
training_set = pickle_util.load("training.pck")
rm = recipeManager.RecipeManager(model_fname)
pl = planningLanguage.PlanningLanguage()
print "\nEvaluating (end-to-end):", targetRecipe.name
recipeText = targetRecipe.instruction_text
initialState = targetRecipe.start_state
if useBeam:
inferredPlan = rm.find_beam_plan(recipeText, initialState)
else:
inferredPlan = rm.find_plan(recipeText, initialState)
print "\ninferred", inferredPlan
actualEndState = inferredPlan[-1][1][-1][1]
print "\ndesired states", targetRecipe.states
desiredEndState = targetRecipe.states[-1][-1][1]
plInferredPath = ""
for i in inferredPlan:
plInferredPath = plInferredPath + " | " + planningLanguage.decompile(i[1])
print "\nPL inferred:", plInferredPath
plActual = ""
for i in targetRecipe.instructions:
plActual = plActual + " | " + i[1]
print "\nPL Desired:", plActual, "\n"
#print desiredEndState
#print "end state", actualEndState
if desiredEndState == actualEndState:
self.successCount += 1
print "\n\nResults for the End-to-End evaluation for :", targetRecipe.name
print "Success"
else:
print "\nResults for the End-to-End evaluation for :", targetRecipe.name
print "Failure"
return 0
def runInstructionEvaluation(runTestSet=True):
training_set = pickle_util.load("training.pck")
totalRecipes = 0
arc = annotatedRecipe.Corpus(training_set=training_set)
recipeCorpus = arc.recipes
ev = Evaluator()
for i in recipeCorpus:
if i.is_training_set == runTestSet:
continue
totalRecipes += 1
ev.evaluateInstructions(i)
print "\n\nOverall results for the entire instruction-level evaluation."
print "Total Recipes:", totalRecipes
print "Total Instructions:", ev.totalCount, "\nSuccess:", ev.successCount
print "Noop Success:", ev.noopSuccessCount
print "Failures:", ev.totalCount - (ev.successCount+ev.noopSuccessCount)
right = ev.successCount + ev.noopSuccessCount
print "%.3f%% (%d/%d)" % (float(right)/ev.totalCount * 100,
right, ev.totalCount)
print "\n\n"
return (ev.totalCount, ev.successCount, ev.noopSuccessCount)
def runEndToEndEvaluation(runTestSet=True):
training_set = pickle_util.load("training.pck")
totalRecipes = 0
arc = annotatedRecipe.Corpus(training_set=training_set)
recipeCorpus = arc.recipes
ev = Evaluator()
for i in recipeCorpus:
if i.is_training_set == runTestSet:
continue
totalRecipes += 1
result = ev.evaluateEndToEnd(i)
if result != 0:
print "Failure 1"
return 1
print "\n\nOverall results for the entire end-to-end evaluation."
print "Total Recipes:", totalRecipes
print "Success:", ev.successCount
print "Failures:", ev.totalCount - ev.successCount
right = ev.successCount + ev.noopSuccessCount
print "%.3f%% (%d/%d)" % (float(right)/ev.totalCount * 100,
right, ev.totalCount)
print "\n\n"
return (totalRecipes, ev.successCount)
def main(argv):
eArg = 0
print "arg", argv
for i in argv:
if "--evaluate=" in i:
j = i.replace("--evaluate=", "")
eArg = int(j)
print eArg
if eArg == 1:
runInstructionEvaluation()
elif eArg == 2:
runEndToEndEvaluation()
else:
print "Error with the args"
if __name__=="__main__":
import sys
main(sys.argv)
| [
"stefie10@alum.mit.edu"
] | stefie10@alum.mit.edu |
944b91037e97b649e60f3396991edf79261d738d | ebfcae1c5ba2997b2ac4471d5bedc3f5daffcb31 | /incepiton-mysql-master/app/auth/form.py | e8c9b27bf30f223f224929065f1813ff4c42f599 | [
"MIT"
] | permissive | babiato/flaskapp1 | 84de2d0b26a54f5820d3bbe97926782ad41e005c | 530beb9e3b8516e0e93960b99521c23a523ef546 | refs/heads/master | 2023-02-26T16:36:49.760632 | 2021-02-04T09:08:40 | 2021-02-04T09:08:40 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 824 | py | from ..models import User
from flask_wtf import FlaskForm
from wtforms import StringField, PasswordField, BooleanField
from wtforms.validators import DataRequired, Email, ValidationError
class LoginForm(FlaskForm):
username = StringField('Username', validators=[DataRequired()])
password = PasswordField('Password', validators=[DataRequired()])
remember_me = BooleanField('Remember me', default=False)
class RegisterForm(FlaskForm):
username = StringField('Username', validators=[DataRequired()])
password = PasswordField('Password', validators=[DataRequired()])
email = StringField('Email', validators=[DataRequired(), Email()])
def validate_username(self, field):
if User.query.filter(User.name == field.data).first():
raise ValidationError('Username already in use')
| [
"jinxufang@tencent.com"
] | jinxufang@tencent.com |
503a571f039f0c34b2f4e839996f9bb23f5c1d9c | ba66da3901361854b9bb621586f1e49ad0121ee0 | /正式开班/pymysql/mysql_1.py | acf2710c566de36c2631fe7b39728a471f8d3378 | [] | no_license | luobodage/PythonBasis | c4739920055afbda03774d90151ab183a83583f8 | ea65536e759fec221a70d7647ae86120277d5459 | refs/heads/master | 2023-05-14T15:51:56.213282 | 2021-05-31T00:57:56 | 2021-05-31T00:57:56 | 322,145,745 | 3 | 1 | null | null | null | null | UTF-8 | Python | false | false | 3,697 | py | # -*- coding: UTF-8 -*-
import pymysql
import pandas as pd
# author: luoboovo
# contact: fuyu16032001@gmail.com
# datetime: 2021/1/29 10:46
# software: PyCharm
# = = =
# = = =
# = = =
# ===========
# = 萝 =
# = 卜 =
# = 神 =
# = 保 =
# = 佑 =
# = 永 =
# = 无 =
# = bug =
# = =
# = =
# =
data_before = 'housing.xls'
def dataCleaning():
"""
数据清洗
:return: 新的csv文件
"""
df = pd.read_excel(data_before)
print(df.isnull().sum())
for column in list(df.columns[df.isnull().sum() > 0]):
mean_val = df[column].mean()
df[column].fillna(mean_val, inplace=True)
print(df.isnull().sum())
df.to_csv('housing.csv')
def login_mysql(user, password):
"""
mysql登录
:param user: 用户名
:param password: 密码
:return: 返回一个游标
"""
conn = pymysql.connect(
host='localhost',
port=3306,
charset='utf8',
cursorclass=pymysql.cursors.DictCursor, # 指定类型
user=user,
password=password, )
cursor = conn.cursor()
cursor.execute('CREATE database house')
cursor.execute('use house')
return cursor, conn
def reboot_mysql(user, password):
"""
mysql登录
:param user: 用户名
:param password: 密码
:return: 返回一个游标
"""
conn = pymysql.connect(
host='localhost',
port=3306,
charset='utf8',
cursorclass=pymysql.cursors.DictCursor, # 指定类型
user=user,
password=password, )
cursor = conn.cursor()
cursor.execute('use house')
return cursor, conn
def create_database():
"""
创建数据库
:return:
"""
cursor, conn = reboot_mysql('root', '1334')
try:
cursor.execute("""
CREATE table housing(ID INT PRIMARY KEY,
longitude float not null ,
latitude float not null ,
housing_median_age int not null ,
total_rooms int not null ,
total_bedrooms float not null ,
population int not null ,
households int not null ,
median_income float not null,
median_house_value varchar(10) not null )
""")
except:
print('创建失败')
cursor.close()
conn.close()
def readAndWriteData():
cursor, conn = reboot_mysql('root', '1334')
with open('housing.csv', 'r') as csv:
data = csv.read()
print(data.split('\n'))
a = data.split('\n')
print(type(data))
for i in range(len(a) - 1):
# print(i.split())
print(a[i])
b = a[i].split(',')
print(b)
print(b[0])
sql = f'INSERT into house.housing VALUES({b[0]},{b[1]},{b[2]},{b[3]},{b[4]},{b[5]},{b[6]},{b[7]},{b[8]},{b[9]}) '
cursor.execute(sql)
# for i in data:
# a = i.split(',')
# print(a)
# # print(len(a))
# print(type(a[10]))
# sql = f'INSERT into house.housing VALUES({str(a[0])},{str(a[1])},{str(a[2])},{str(a[3])},{str(a[4])},{str(a[5])},{a[6]},{a[7]},{str(a[8])},{str(a[9])},{str(a[10])}) '
# sql = f'INSERT into house.housing VALUES({a}) '
# cursor.execute(sql)
conn.commit()
cursor.close()
conn.close()
if __name__ == '__main__':
# dataCleaning()
# login_mysql('root', '1334')
readAndWriteData()
# create_database()
| [
"fuyu16032001@gmail.com"
] | fuyu16032001@gmail.com |
83be4867c7f7e8d53df50de5d7ca29ce8d8ccf62 | fdbabbd10c03d20d7b35c00eab682af2d4be6601 | /main.py | 8614a6a798657badf6eb003e3771b0d45bfd5711 | [] | no_license | allenabraham777/cvsm_logic | 44777e8066143d790767029e954b9b52251c9cce | 148e53dcd190d649c8f261e32ad2d5fd18a261c0 | refs/heads/master | 2022-11-14T19:10:27.309242 | 2020-07-06T13:21:46 | 2020-07-06T13:21:46 | 277,547,274 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,180 | py | informations = []
suppliers = []
activities = []
# Information Flow
print("Enter the information flow (Type 'end' to Stop) : ")
i = 0
while(True):
print("Enter the component "+str(i + 1)+" detail")
information = input()
if information == "end":
break
informations.append(information)
i = i + 1
print(informations)
# Supplier
print("Enter the Supplier Details (Type 'end' to Stop) : ")
i = 0
while(True):
print("Enter the supplier "+str(i + 1)+" details")
supplier_name = input("Name : ")
if supplier_name == "end":
break
supplier_interval = input("Intervel : ")
supplier = {"Name":supplier_name, "Intervel":supplier_interval}
suppliers.append(supplier)
i = i + 1
print(suppliers)
# Process Flow
print("Enter the activity flow (Type 'end' to Stop) : ")
i = 0
while(True):
act_code = input("Enter activity "+ str(i + 1) +" code (end_act to stop)")
if(act_code == "end_act"):
break
flag = True
activity_status = True
sub_activities = []
while(True):
status = int(input("Enter 1 if the process is a waiting process, 10 to exit else 0"))
if status == 10:
break
if status == 1:
activity_status = False
else:
activity_status = True
sub_act_name = input("Enter the name of activity : ")
lot_size = input("Enter the lot size : ")
cycle_time = input("Cycle Time : ")
if activity_status and flag:
flag = False
equipments = []
while(True):
equipment_name = input("Equipment Name (end_eqp to break): ")
if(equipment_name == "end_eqp"):
break
equipment_cost = input("Capital Cost : ")
equipment_maintenance_cost = input("Maintenence Cost : ")
equipment_total_usage = input("Total Usage per Year : ")
equipment = {
"name": equipment_name,
"cost": equipment_cost,
"maintenence": equipment_maintenance_cost,
"usage": equipment_total_usage
}
equipments.append(equipment)
supervisor_count = input("Number of supervisors : ")
operator_count = input("Number of operators : ")
operator_cost = input("Operator cost : ")
operator_time = input("Operation time : ")
material_cost = input("Material cost : ")
sub_activity = {
"type": "activity",
"name": sub_act_name,
"lot_size": lot_size,
"equipments": equipments,
"supervisor_count": supervisor_count,
"operator_count": operator_count,
"operator_cost": operator_cost,
"operator_time": operator_time,
"material_cost": material_cost,
"cycle_time": cycle_time
}
sub_activities.append(sub_activity)
else:
waiting_time = input("Waiting time before process")
sub_activity = {
"type": "waiting",
"name": sub_act_name,
"lot_size": lot_size,
"cycle_time": cycle_time,
"waiting_time": waiting_time
}
sub_activities.append(sub_activity)
rejection = input("Percentage rejection")
activity = {
"code": act_code,
"sub_activities": sub_activities,
"rejection": rejection
}
activities.append(activity)
print(activities) | [
"replituser@example.com"
] | replituser@example.com |
a069cd82f85f5cfc00365c1f723ab5711469ef0f | 463c053bcf3f4a7337b634890720ea9467f14c87 | /python/ray/tests/test_advanced_7.py | c4743ad518d978319220af787533e2198499758f | [
"BSD-3-Clause",
"MIT",
"Apache-2.0"
] | permissive | pdames/ray | e8faddc4440976211a6bcead8f8b6e62c1dcda01 | 918d3601c6519d333f10910dc75eb549cbb82afa | refs/heads/master | 2023-01-23T06:11:11.723212 | 2022-05-06T22:55:59 | 2022-05-06T22:55:59 | 245,515,407 | 1 | 1 | Apache-2.0 | 2023-01-14T08:02:21 | 2020-03-06T20:59:04 | Python | UTF-8 | Python | false | false | 8,472 | py | # coding: utf-8
from concurrent.futures import ThreadPoolExecutor
import logging
import random
import sys
import threading
import time
import numpy as np
import pytest
import ray.cluster_utils
from ray._private.test_utils import (
client_test_enabled,
)
if client_test_enabled():
from ray.util.client import ray
else:
import ray
logger = logging.getLogger(__name__)
@pytest.mark.skipif(
client_test_enabled(), reason="grpc interaction with releasing resources"
)
def test_multithreading(ray_start_2_cpus):
# This test requires at least 2 CPUs to finish since the worker does not
# release resources when joining the threads.
def run_test_in_multi_threads(test_case, num_threads=10, num_repeats=25):
"""A helper function that runs test cases in multiple threads."""
def wrapper():
for _ in range(num_repeats):
test_case()
time.sleep(random.randint(0, 10) / 1000.0)
return "ok"
executor = ThreadPoolExecutor(max_workers=num_threads)
futures = [executor.submit(wrapper) for _ in range(num_threads)]
for future in futures:
assert future.result() == "ok"
@ray.remote
def echo(value, delay_ms=0):
if delay_ms > 0:
time.sleep(delay_ms / 1000.0)
return value
def test_api_in_multi_threads():
"""Test using Ray api in multiple threads."""
@ray.remote
class Echo:
def echo(self, value):
return value
# Test calling remote functions in multiple threads.
def test_remote_call():
value = random.randint(0, 1000000)
result = ray.get(echo.remote(value))
assert value == result
run_test_in_multi_threads(test_remote_call)
# Test multiple threads calling one actor.
actor = Echo.remote()
def test_call_actor():
value = random.randint(0, 1000000)
result = ray.get(actor.echo.remote(value))
assert value == result
run_test_in_multi_threads(test_call_actor)
# Test put and get.
def test_put_and_get():
value = random.randint(0, 1000000)
result = ray.get(ray.put(value))
assert value == result
run_test_in_multi_threads(test_put_and_get)
# Test multiple threads waiting for objects.
num_wait_objects = 10
objects = [echo.remote(i, delay_ms=10) for i in range(num_wait_objects)]
def test_wait():
ready, _ = ray.wait(
objects,
num_returns=len(objects),
timeout=1000.0,
)
assert len(ready) == num_wait_objects
assert ray.get(ready) == list(range(num_wait_objects))
run_test_in_multi_threads(test_wait, num_repeats=1)
# Run tests in a driver.
test_api_in_multi_threads()
# Run tests in a worker.
@ray.remote
def run_tests_in_worker():
test_api_in_multi_threads()
return "ok"
assert ray.get(run_tests_in_worker.remote()) == "ok"
# Test actor that runs background threads.
@ray.remote
class MultithreadedActor:
def __init__(self):
self.lock = threading.Lock()
self.thread_results = []
def background_thread(self, wait_objects):
try:
# Test wait
ready, _ = ray.wait(
wait_objects,
num_returns=len(wait_objects),
timeout=1000.0,
)
assert len(ready) == len(wait_objects)
for _ in range(20):
num = 10
# Test remote call
results = [echo.remote(i) for i in range(num)]
assert ray.get(results) == list(range(num))
# Test put and get
objects = [ray.put(i) for i in range(num)]
assert ray.get(objects) == list(range(num))
time.sleep(random.randint(0, 10) / 1000.0)
except Exception as e:
with self.lock:
self.thread_results.append(e)
else:
with self.lock:
self.thread_results.append("ok")
def spawn(self):
wait_objects = [echo.remote(i, delay_ms=10) for i in range(10)]
self.threads = [
threading.Thread(target=self.background_thread, args=(wait_objects,))
for _ in range(20)
]
[thread.start() for thread in self.threads]
def join(self):
[thread.join() for thread in self.threads]
assert self.thread_results == ["ok"] * len(self.threads)
return "ok"
actor = MultithreadedActor.remote()
actor.spawn.remote()
ray.get(actor.join.remote()) == "ok"
@pytest.mark.skipif(client_test_enabled(), reason="internal api")
def test_wait_makes_object_local(ray_start_cluster_enabled):
cluster = ray_start_cluster_enabled
cluster.add_node(num_cpus=0)
cluster.add_node(num_cpus=2)
ray.init(address=cluster.address)
@ray.remote
class Foo:
def method(self):
return np.zeros(1024 * 1024)
a = Foo.remote()
# Test get makes the object local.
x_id = a.method.remote()
assert not ray.worker.global_worker.core_worker.object_exists(x_id)
ray.get(x_id)
assert ray.worker.global_worker.core_worker.object_exists(x_id)
# Test wait makes the object local.
x_id = a.method.remote()
assert not ray.worker.global_worker.core_worker.object_exists(x_id)
ok, _ = ray.wait([x_id])
assert len(ok) == 1
assert ray.worker.global_worker.core_worker.object_exists(x_id)
@pytest.mark.skipif(client_test_enabled(), reason="internal api")
def test_future_resolution_skip_plasma(ray_start_cluster_enabled):
cluster = ray_start_cluster_enabled
# Disable worker caching so worker leases are not reused; set object
# inlining size threshold so the borrowed ref is inlined.
cluster.add_node(
num_cpus=1,
resources={"pin_head": 1},
_system_config={
"worker_lease_timeout_milliseconds": 0,
"max_direct_call_object_size": 100 * 1024,
},
)
cluster.add_node(num_cpus=1, resources={"pin_worker": 1})
ray.init(address=cluster.address)
@ray.remote(resources={"pin_head": 1})
def f(x):
return x + 1
@ray.remote(resources={"pin_worker": 1})
def g(x):
borrowed_ref = x[0]
f_ref = f.remote(borrowed_ref)
f_result = ray.get(f_ref)
# borrowed_ref should be inlined on future resolution and shouldn't be
# in Plasma.
assert ray.worker.global_worker.core_worker.object_exists(
borrowed_ref, memory_store_only=True
)
return f_result * 2
one = f.remote(0)
g_ref = g.remote([one])
assert ray.get(g_ref) == 4
def test_task_output_inline_bytes_limit(ray_start_cluster_enabled):
cluster = ray_start_cluster_enabled
# Disable worker caching so worker leases are not reused; set object
# inlining size threshold and enable storing of small objects in in-memory
# object store so the borrowed ref is inlined.
# set task_rpc_inlined_bytes_limit which only allows inline 20 bytes.
cluster.add_node(
num_cpus=1,
resources={"pin_head": 1},
_system_config={
"worker_lease_timeout_milliseconds": 0,
"max_direct_call_object_size": 100 * 1024,
"task_rpc_inlined_bytes_limit": 20,
},
)
cluster.add_node(num_cpus=1, resources={"pin_worker": 1})
ray.init(address=cluster.address)
@ray.remote(num_returns=5, resources={"pin_head": 1})
def f():
return list(range(5))
@ray.remote(resources={"pin_worker": 1})
def sum():
numbers = f.remote()
result = 0
for i, ref in enumerate(numbers):
result += ray.get(ref)
inlined = ray.worker.global_worker.core_worker.object_exists(
ref, memory_store_only=True
)
if i < 2:
assert inlined
else:
assert not inlined
return result
assert ray.get(sum.remote()) == 10
if __name__ == "__main__":
import pytest
sys.exit(pytest.main(["-v", __file__]))
| [
"noreply@github.com"
] | pdames.noreply@github.com |
1dcc7298149c256a1f3ef9c06b425f0fdeef4e84 | 7669454a633042da5696f814768f523c8af18b7a | /chiasim/hashable/SpendBundle.py | ab165d05ba98ce2199aafe48f7affff9ef0d71a4 | [
"Apache-2.0"
] | permissive | Chia-Network/ledger_sim | 46422efccf5993b5d2de8f3b0e6e3fb635115980 | de53d4129e10d883b92988e21bd5e248a8f89813 | refs/heads/main | 2023-01-24T13:03:58.343814 | 2023-01-19T22:05:59 | 2023-01-19T22:05:59 | 203,893,213 | 4 | 9 | null | 2023-01-19T22:06:01 | 2019-08-23T00:20:07 | Python | UTF-8 | Python | false | false | 1,474 | py | from ..atoms import streamable
from .BLSSignature import BLSSignature
from .CoinSolution import CoinSolutionList
@streamable
class SpendBundle:
"""
This is a list of coins being spent along with their solution programs, and a single
aggregated signature. This is the object that most closely corresponds to a bitcoin
transaction (although because of non-interactive signature aggregation, the boundaries
between transactions are more flexible than in bitcoin).
"""
coin_solutions: CoinSolutionList
aggregated_signature: BLSSignature
@classmethod
def aggregate(cls, spend_bundles):
coin_solutions = []
sigs = []
for _ in spend_bundles:
coin_solutions += _.coin_solutions
sigs.append(_.aggregated_signature)
aggregated_signature = BLSSignature.aggregate(sigs)
return cls(coin_solutions, aggregated_signature)
def additions(self):
from chiasim.wallet.deltas import additions_for_solution
items = []
for coin_solution in self.coin_solutions._items:
items += additions_for_solution(coin_solution.coin.name(), coin_solution.solution)
return tuple(items)
def removals(self):
return tuple(_.coin for _ in self.coin_solutions)
def fees(self) -> int:
amount_in = sum(_.amount for _ in self.removals())
amount_out = sum(_.amount for _ in self.additions())
return amount_in - amount_out
| [
"him@richardkiss.com"
] | him@richardkiss.com |
b9de0d9977fdf52300400d43ae398ac0ca3bdd53 | a07fd8aca2d69ade2e388054dd2c1c9991232185 | /tests/test_tutorial/test_handling_errors/test_tutorial001.py | 8809c135bd706cbb18e1438335b1a2807035a261 | [
"MIT"
] | permissive | vitalik/fastapi | 76b71bbbade19f12484c73dcbdca426197cc2db6 | 0276f5fd3aafb38dcbb430177a4685aeb58e5c69 | refs/heads/master | 2023-08-01T06:56:06.053824 | 2023-07-25T20:46:02 | 2023-07-25T20:46:02 | 315,668,229 | 1 | 0 | MIT | 2020-11-24T15:07:16 | 2020-11-24T15:07:15 | null | UTF-8 | Python | false | false | 3,298 | py | from fastapi.testclient import TestClient
from docs_src.handling_errors.tutorial001 import app
client = TestClient(app)
def test_get_item():
response = client.get("/items/foo")
assert response.status_code == 200, response.text
assert response.json() == {"item": "The Foo Wrestlers"}
def test_get_item_not_found():
response = client.get("/items/bar")
assert response.status_code == 404, response.text
assert response.headers.get("x-error") is None
assert response.json() == {"detail": "Item not found"}
def test_openapi_schema():
response = client.get("/openapi.json")
assert response.status_code == 200, response.text
assert response.json() == {
"openapi": "3.1.0",
"info": {"title": "FastAPI", "version": "0.1.0"},
"paths": {
"/items/{item_id}": {
"get": {
"responses": {
"200": {
"description": "Successful Response",
"content": {"application/json": {"schema": {}}},
},
"422": {
"description": "Validation Error",
"content": {
"application/json": {
"schema": {
"$ref": "#/components/schemas/HTTPValidationError"
}
}
},
},
},
"summary": "Read Item",
"operationId": "read_item_items__item_id__get",
"parameters": [
{
"required": True,
"schema": {"title": "Item Id", "type": "string"},
"name": "item_id",
"in": "path",
}
],
}
}
},
"components": {
"schemas": {
"ValidationError": {
"title": "ValidationError",
"required": ["loc", "msg", "type"],
"type": "object",
"properties": {
"loc": {
"title": "Location",
"type": "array",
"items": {
"anyOf": [{"type": "string"}, {"type": "integer"}]
},
},
"msg": {"title": "Message", "type": "string"},
"type": {"title": "Error Type", "type": "string"},
},
},
"HTTPValidationError": {
"title": "HTTPValidationError",
"type": "object",
"properties": {
"detail": {
"title": "Detail",
"type": "array",
"items": {"$ref": "#/components/schemas/ValidationError"},
}
},
},
}
},
}
| [
"noreply@github.com"
] | vitalik.noreply@github.com |
07d9bd027d220d26f623bd4bfb006fa601fd7172 | 5e626d4138ef02efae149c41e00ea4d04b8239e8 | /chippedyScrapson.py | 891ab89172f238074c359b540e1258a1adfe6e5c | [] | no_license | ganti/chippedyScrapson | f1ecc57622ea2bbc8a5f7f467081523089fa36e2 | 4de30ff9ae8236ff1083668931cc9f000a34e0eb | refs/heads/master | 2020-07-19T02:38:51.137565 | 2019-09-04T16:30:19 | 2019-09-04T16:30:19 | 206,360,373 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,045 | py | #!/usr/bin/env python3
import os
import sys
from pprint import pprint
import csv
import json, codecs
import time
import random
import requests
from lxml import html
from scrapy import Selector
from bs4 import BeautifulSoup
allURLs = []
def main():
url = "https://www.tagesanzeiger.ch"
depth = 3
makeListTxtFromURL(url, depth=depth)
call = "python3 ./webscreenshot/webscreenshot.py -i list.txt -v --workers 7"
#
# webscreenshot has many params: screensize, browser etc.
# all arguments of webscreenshot.py are in ./webscreenshot/00_help_webscreenshot.txt
#
os.system(call)
def makeListTxtFromURL(url, depth=5):
global allURLs
domain = url.split("://")[0]+"://" + url.split("://")[1].split("/")[0]
with open('list.txt', 'w') as file:
file.write("")
with open('url_error.txt', 'w') as file:
file.write("")
allURLs = getExtractAllLinksFromPage(url, domain, depth)
#pprint(allURLs)
allURLs = [url] + allURLs
allURLs = list(set(allURLs))
with open('list.txt', 'w') as file:
for row in allURLs:
file.write(row +"\n")
def getExtractAllLinksFromPage(url, domain, depth):
global allURLs
result = []
if depth >= 1 and domain in url:
content = getPageContentOfURL(url)
if content != None:
contentLinks = getLinksFromPageContent(content, domain)
result = result + contentLinks
depth = depth -1
for link in contentLinks:
#print(str(depth)+" "+link)
sublinks = getExtractAllLinksFromPage(link, domain, depth)
for sublink in sublinks:
if not sublink in allURLs:
result.append(sublink)
result = list(set(result))
allURLs = allURLs + result
return result
def getLinksFromPageContent(content, domain):
global allURLs
links = []
bs = BeautifulSoup(content, features="lxml")
for link in bs.findAll('a'):
links.append(link.get('href'))
result = []
for link in links:
addToList = True
if not link is None:
ignoreStartWith = ['javascript:', 'mailto:']
ignoreEndsWith = ['.pdf', '.zip', '.png', '.jpg', '.gif']
for ignorePraefix in ignoreStartWith:
if link.startswith(ignorePraefix):
addToList = False
for ignoreSuffix in ignoreEndsWith:
if link.endswith(ignoreSuffix):
addToList = False
if addToList == True:
if link.startswith('//'):
link = domain.split("://")[0]+link
if link.startswith('/'):
link = domain+link
if domain in link and not link in allURLs:
result.append(link)
addToList = False
result = list(set(result))
allURLs = allURLs + result
return result
def getPageContentOfURL(url, run=3):
content = None
try:
page = requests.get(url)
if page.status_code != 200:
print(str(page.status_code) +" 💩 " + url)
else:
print(str(page.status_code) +" ✅ " + url)
content = page.content
except requests.exceptions.RequestException:
content = getPageContentOfURL(url, run=(run-1))
if content == None and run == 0:
with open('url_error.txt', 'a') as file:
file.write(url+"\n")
content = None
return content
if __name__ == "__main__" :
main() | [
"{ID}+{username}@users.noreply.github.com"
] | {ID}+{username}@users.noreply.github.com |
a3ffa5427b8db497a9bf8bf144544ac7fcba7f78 | d535830db49fcc2b77be02650be09682ddeb0e1d | /main.py | d5d7f612c292900627d360bfdafc9cd26969163a | [] | no_license | stemussi/Bot_ricerca_libri | feec4667434c465894f75e1e2e21bfd3019ff2f0 | 65161a2f2575391ab39881cd3b4cb3515e0f388e | refs/heads/master | 2022-12-15T08:43:30.793028 | 2022-02-12T10:41:44 | 2022-02-12T10:41:44 | 185,013,796 | 0 | 0 | null | 2022-12-08T02:31:42 | 2019-05-05T10:03:45 | Python | UTF-8 | Python | false | false | 13,573 | py | # -*- coding: utf-8 -*-
"""
Created on Thu Jul 26 10:52:31 2018
@author: bradamante
"""
from telegram.ext import Updater, CommandHandler, MessageHandler, Filters, CallbackQueryHandler
from telegram import InlineKeyboardButton, InlineKeyboardMarkup, ReplyKeyboardMarkup
from SPARQLWrapper import SPARQLWrapper, JSON
from dbhelper import DBHelper
# da fare: la tastiera inline non vuole callback_data più lunghe di 64 -> creare una tabella con l'isbn per esempio (3 orette)
class libro:
#più che altro per passare i dati tra le funzioni
def __init__(self):
self.telegram = "Non trovato"
self.titolo = ""
self.link = ""
self.autore = ""
def crea_telegram(self):
self.telegram ="["+self.titolo+"]("+self.link.replace(")","%29")+")"
def add_link(self, link):
self.link=link
def add_titolo(self, titolo):
self.titolo=titolo
def add_autore(self, autore):
self.autore =autore
db = DBHelper()
TOKEN ="XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX"
def Cerca_titolo(titolo):
# qui arriva il titolo secco e ritorna un "libro"
titolo=titolo.strip()
risp=libro()
sparql = SPARQLWrapper("http://it.dbpedia.org/sparql")
#l'union serve per libri che non hanno l'autore in catalogo
# l'order by mi mette sopra il record con l'autore valorizzato se c'è
q = """
PREFIX rdfs: <http://www.w3.org/2000/01/rdf-schema#>
SELECT DISTINCT ?book, ?autore WHERE{
{{?book a <http://dbpedia.org/ontology/Book> } UNION{
?book <http://it.dbpedia.org/property/autore> ?uso.
?uso rdfs:label ?autore}}.
?book <http://it.dbpedia.org/property/titolo> '''"""+titolo+"""'''@it }
ORDER BY desc(?autore)"""
sparql.setQuery(q)
sparql.setReturnFormat(JSON)
results = sparql.query().convert()
if (len(results["results"]["bindings"]))>0:
leng = len("http://it.dbpedia.org/resource/")
dbpedia = results["results"]["bindings"][0]["book"]["value"]
tit = titolo
aut=""
#controllo che si sia il campo autore, non sempre lo ho, vedi union sopra
if(len(results["results"]["bindings"][0])>1):
aut = results["results"]["bindings"][0]["autore"]["value"]
risp.add_titolo(tit)
risp.add_link("https://it.wikipedia.org/wiki/" + dbpedia[leng:])
# vabbè non è il massimo ma per il 95% delle volte funziona
risp.add_autore(aut)
risp.crea_telegram()
return risp
def Cerca_libro(titolo):
#qui arriva una parte del titolo e ne ritorna una serie di completi
titolo =titolo.strip()
sparql = SPARQLWrapper("http://it.dbpedia.org/sparql")
q = """
SELECT DISTINCT ?book, ?titolo WHERE {
?book a<http://dbpedia.org/ontology/Book> .
?book <http://it.dbpedia.org/property/titolo> ?titolo
FILTER regex(?titolo, '''"""+titolo+"""''', "i")
}
ORDER BY RAND()
LIMIT 10"""
sparql.setQuery(q)
sparql.setReturnFormat(JSON)
results = sparql.query().convert()
lista_titoli=[]
if (len(results["results"]["bindings"]))>0:
for result in results["results"]["bindings"]:
tit = result["titolo"]["value"]
lista_titoli.append(tit)
return lista_titoli
def Cerca_autore(autore):
#anche qua nulla di magico, arriva una parte di nome e ritorna una lista di nomi completi
autore= autore.strip()
sparql = SPARQLWrapper("http://it.dbpedia.org/sparql")
q = """
PREFIX rdfs: <http://www.w3.org/2000/01/rdf-schema#>
SELECT DISTINCT ?autore WHERE {
?book a <http://dbpedia.org/ontology/Book> .
?book <http://it.dbpedia.org/property/autore> ?uso.
?uso rdfs:label ?autore
FILTER regex(?autore, '''"""+autore+"""''', "i")
}
ORDER BY RAND()
LIMIT 10"""
sparql.setQuery(q)
sparql.setReturnFormat(JSON)
results = sparql.query().convert()
lista_autori=[]
if (len(results["results"]["bindings"]))>0:
for result in results["results"]["bindings"]:
tit = result["autore"]["value"]
lista_autori.append(tit)
return lista_autori
def Cerca_libro_autore(autore):
#arriva un nome autore completo e ritorna una lista di suoi libri
autore=autore.strip()
sparql = SPARQLWrapper("http://it.dbpedia.org/sparql")
q = """
PREFIX rdfs: <http://www.w3.org/2000/01/rdf-schema#>
SELECT DISTINCT ?book, ?titolo WHERE {
?book a<http://dbpedia.org/ontology/Book> .
?book <http://it.dbpedia.org/property/titolo> ?titolo.
?book <http://it.dbpedia.org/property/autore> ?autore.
?autore rdfs:label '''"""+autore+"""'''@it
}
ORDER BY RAND()
LIMIT 10"""
sparql.setQuery(q)
sparql.setReturnFormat(JSON)
results = sparql.query().convert()
lista_titoli=[]
if (len(results["results"]["bindings"]))>0:
for result in results["results"]["bindings"]:
tit = result["titolo"]["value"]
lista_titoli.append(tit)
return lista_titoli
def Cerca_genere(bot, update, genere_in):
# invia un messaggio e relativa tastiera con i libri di un genere
chat = update.message.chat_id
genere = genere_in.strip()
if (genere.find("orror")!=-1):
#serve per unificare il genere orrore a Horror
genere="orror"
sparql = SPARQLWrapper("http://it.dbpedia.org/sparql")
q = """
PREFIX rdfs: <http://www.w3.org/2000/01/rdf-schema#>
SELECT DISTINCT ?book, ?titolo WHERE {
?book a<http://dbpedia.org/ontology/Book> .
?book <http://it.dbpedia.org/property/titolo> ?titolo.
?book <http://it.dbpedia.org/property/tipo> ?tipo.
FILTER regex(?tipo, '''"""+genere+"""''', "i")
}
ORDER BY RAND()
LIMIT 10"""
sparql.setQuery(q)
sparql.setReturnFormat(JSON)
results = sparql.query().convert()
if (len(results["results"]["bindings"]))>0:
keyboard=[]
for result in results["results"]["bindings"]:
tit = result["titolo"]["value"]
if (len(tit)<60):
#la tastiera inline non vuole callback_data più lunghe di 64
keyboard.append([InlineKeyboardButton(tit, callback_data=tit)])
message = "scegli il titolo"
reply_markup = InlineKeyboardMarkup(keyboard)
update.message.reply_text(message, reply_markup=reply_markup)
else:
message = genere_in + " non trovato"
update.message.reply_text(message)
def Testo(bot, update):
#serve per unificare tasti e messaggi scritti a mano
text = update.message.text
Testo_glob(bot, update, text)
def Testo_glob(bot, update, text):
#qui arrivano i testi senza funzioni: titoli o porzioni di titoli
chat = update.message.chat_id
#si carica tutti i titoli di quella chat in una lista
items = db.get_titoli(chat)
if text in items:
#se lo trova lo elimina e propone gli altri da eliminare
db.delete_item(text, chat)
items = db.get_titoli(chat)
if (len(items)>0):
keyboard=[]
for item in items:
if (len(item)<60):
# 60 caratteri del callback_data
keyboard.append([InlineKeyboardButton(item, callback_data=item)])
reply_markup = InlineKeyboardMarkup(keyboard)
message = text + " eliminato \n Seleziona il prossimo libro da eliminare"
update.message.reply_text(message , reply_markup=reply_markup)
else:
message = text + " eliminato"
update.message.reply_text(message)
else:
#se non ce l'ho cerca il titolo secco
libri=Cerca_titolo(text)
if (libri.telegram !='Non trovato'):
# se lo trova spara fuori il link a wikipedia
db.add_item(chat, libri.titolo, libri.autore, libri.link, libri.telegram)
update.message.reply_markdown(libri.telegram)
else:
#se non lo trova lo tratta come titolo parziale
titoli = Cerca_libro(text)
if (len(titoli)>0):
#trovato qualcosa
message = "scegli il titolo"
keyboard=[]
for titolo in titoli:
if (len(titolo)<60):
#solito discorso dei 64 caratteri
keyboard.append([InlineKeyboardButton(titolo, callback_data=titolo)])
reply_markup = InlineKeyboardMarkup(keyboard)
update.message.reply_text(message, reply_markup=reply_markup)
else:
message = "Non ho trovato nulla"
update.message.reply_text(message)
def Start(bot, update):
#si comincia così, la tastiera è sempre visibile se no la Marta si lamenta
keyboard= [
["/lista", "/wiki","/genere"],
["/cancella","/aiuto"]
]
reply_markup = ReplyKeyboardMarkup(keyboard)
update.message.reply_text("lista dei libri!",reply_markup=reply_markup)
def Aiuto(bot, update):
update.message.reply_text("""scrivi un titolo o parte di esso per aggiungerlo
riscrivi il titolo o /cancella per eliminare i vari elementi
scrivi /lista per avere la lista semplice dei libri inseriti
scrivi /wiki per avere la lista con i link dei libri inseriti
scrivi /autore e un autore per libri di quell'autore
e /genere per scegliere un genere tra quelli proposti.
Per ora non sono gestiti i titoli più lunghi di 60 caratteri""")
def Lista(bot, update):
# da una lista senza link ma se c'è mette l'autore
items = db.get_autore_titolo(update.message.chat_id)
if (len(items)>0):
for item in items:
update.message.reply_text(item[1]+" - "+item[0])
else:
message = "La lista è vuota"
update.message.reply_text(message)
def Wiki(bot, update):
#semplice semplice ritorna il markdown
items = db.get_message(update.message.chat_id)
if (len(items)>0):
for item in items:
update.message.reply_markdown(item)
else:
message = "Non ho libri in lista"
update.message.reply_text(message)
def Cancella(bot, update):
#lui non fa altro che proporre una tastiera con la lista di libri
#cliccando sopra a uno è come riscrivere un titolo
items = db.get_titoli(update.message.chat_id)
if (len(items)>0):
message = "Seleziona il libro da eliminare"
keyboard=[]
for item in items:
if (len(item)<60):
keyboard.append([InlineKeyboardButton(item, callback_data=item)])
reply_markup = InlineKeyboardMarkup(keyboard)
update.message.reply_text(message , reply_markup=reply_markup)
else:
message = "Non ho libri da eliminare"
update.message.reply_text(message)
def Autore_secco(bot, update, autore):
#chiamato in due casi: uso la tastiera per scegliere un autore tra quelli proposti
# trovo solo un autore che contiene la stringa inserita
print(autore)
titoli = Cerca_libro_autore(autore)
print(titoli)
if (len(titoli)>0):
update.message.reply_text(autore)
message = "Seleziona un libro"
keyboard=[]
for titolo in titoli:
if (len(titolo)<60):
keyboard.append([InlineKeyboardButton(titolo, callback_data=titolo)])
reply_markup = InlineKeyboardMarkup(keyboard)
update.message.reply_text(message, reply_markup=reply_markup)
else:
message = "Non libri per questo autore"
update.message.reply_text(message)
def Autore(bot, update, args):
# ritorna o una lista di autori oppure di libri se ne trova solo uno
#inizializzo autore perchè se non passo args si spacca tutto poi
autore=""
#ricreo le stringhe spezzate da telegram
for parola in args:
autore = autore + " " + parola
if (len(autore)>0):
#cerca autori con quel nome
autori=Cerca_autore(autore)
if (len(autori)==1):
#se ne trova uno cerca i libri di quell'autore
Autore_secco(bot,update,autori[0])
elif (len(autori)>0):
#altrimenti propone una serie di nomi e praticamente ritorna a chiamre questa funzione
message = "scegli l'autore"
autori_send=[]
for autore_uso in autori:
autori_send.append([InlineKeyboardButton(autore_uso, callback_data="/autore "+autore_uso)])
reply_markup = InlineKeyboardMarkup(autori_send)
update.message.reply_text(message, reply_markup=reply_markup)
else:
message = "Non ho trovato nulla"
update.message.reply_text(message)
else:
message = "Inserisci il nome di un autore dopo /autore"
update.message.reply_text(message)
def Genere(bot, update, args):
# se scrivo il genere cerco con quel genere se no li propongo io
#inizializzo genere perchè se non passo args si spacca tutto poi
genere =""
for parola in args:
genere = genere + " " + parola
if (len(genere)>0):
Cerca_genere(bot, update, genere)
else:
message = "Seleziona il genere"
keyboard=[[InlineKeyboardButton("Giallo", callback_data="/genere Giallo"),InlineKeyboardButton("Per ragazzi", callback_data="/genere Per ragazzi")],
[InlineKeyboardButton("Fantascienza", callback_data="/genere Fantascienza"),InlineKeyboardButton("Fantasy", callback_data="/genere Fantasy")],
[InlineKeyboardButton("Thriller", callback_data="/genere Thriller"),InlineKeyboardButton("Horror", callback_data="/genere Horror")],
[InlineKeyboardButton("Romanzo", callback_data="/genere Romanzo"),InlineKeyboardButton("Umoristico", callback_data="/genere Umoristico")],
[InlineKeyboardButton("Rosa", callback_data="/genere Rosa"),InlineKeyboardButton("Saggio", callback_data="/genere Saggio")]
]
reply_markup = InlineKeyboardMarkup(keyboard)
update.message.reply_text(message , reply_markup=reply_markup)
def Button(bot, update):
#gestisco le tastiere inline
text = update.callback_query.data
if text.startswith("/autore"):
Autore_secco(bot, update.callback_query, text[7:])
elif text.startswith("/genere"):
Cerca_genere(bot, update.callback_query, text[7:])
else:
Testo_glob(bot, update.callback_query, text)
def main():
db.setup()
updater = Updater(TOKEN)
dp = updater.dispatcher
#gestisco i comandi /qualcosa CommandHandler("comandoRicevuto",funzioneDaLanciare,possibileArgomento)
dp.add_handler(CommandHandler("start", Start))
dp.add_handler(CommandHandler("lista", Lista))
dp.add_handler(CommandHandler("wiki", Wiki))
dp.add_handler(CommandHandler("cancella", Cancella))
dp.add_handler(CommandHandler("aiuto", Aiuto))
dp.add_handler(CommandHandler("genere", Genere, pass_args=True))
dp.add_handler(CommandHandler("autore", Autore, pass_args=True))
#gestisco le tastiere inline
dp.add_handler(CallbackQueryHandler(Button))
#gestisco il testo secco
dp.add_handler(MessageHandler(Filters.text, Testo))
updater.start_polling()
updater.idle()
if __name__ == '__main__':
try:
main()
except KeyboardInterrupt:
exit()
| [
"you@example.com"
] | you@example.com |
e41596ef4f52bb1f4192b5be0b6eb1505f775730 | fadc5b7a2e3f83ea69e3812800f2e4aa441e36d9 | /Scripts/Analysis/Tandem_Gene_Similarity.py | eaa2254bfa3f418d9f730549ac59f23feeebb267 | [] | no_license | TomJKono/Maize_Tandem_Evolution | 4e63c58614ec56476f4b4116554206ddf3d73b28 | 413b6c8d0d9a257a370060674a0e3936f3d93fc2 | refs/heads/master | 2022-06-30T04:20:19.228107 | 2022-06-15T22:55:06 | 2022-06-15T22:55:06 | 114,657,294 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,964 | py | #!/usr/bin/env python
"""Script to generate alignments for tandem duplicates. Translates the sequence
to amino acids, aligns them with clustal-omega, then back-translates them. Takes
three arguments:
1) Tandem duplicates CSV
2) Representative transcripts FASTA
3) Output directory
"""
import sys
import tempfile
import os
import itertools
from Bio import SeqIO
from Bio import SeqRecord
from Bio import Seq
from Bio.Align.Applications import ClustalOmegaCommandline
def parse_transcripts(trans):
"""Return a dictionary of sequences."""
s = SeqIO.parse(trans, 'fasta')
seq_dict = SeqIO.to_dict(s)
# Remove the _whatever at the end
seq_dict_nosuff = {}
for seqid in seq_dict:
seq_dict_nosuff[seqid.split('_')[0]] = seq_dict[seqid]
return seq_dict_nosuff
def get_cds(geneid, seqdict):
"""Return the amino acid sequence of a gene with a given ID."""
nuc_seq = seqdict[geneid]
# Translate it
aa_seq = nuc_seq.seq.translate()
# Decorate it like you would a full SeqRecord object
aa_seq_rec = SeqRecord.SeqRecord(
aa_seq,
id=geneid,
description='')
return aa_seq_rec
def align_genes(gene1, gene2):
"""Align the two genes with clustal-omega"""
# Make temp files for clustal in and out
clust_in = tempfile.NamedTemporaryFile(
prefix='CO_in_',
suffix='.fasta',
mode='w+t')
clust_out = tempfile.NamedTemporaryFile(
prefix='CO_out_',
suffix='.fasta',
mode='w+t')
# Write the sequences into the temp file
SeqIO.write([gene1, gene2], clust_in, 'fasta')
# Seek to the beginning else the file will appear empty
clust_in.seek(0)
# Run the command
cline = ClustalOmegaCommandline(
infile=clust_in.name,
outfile=clust_out.name,
seqtype='protein',
force=True,
iterations=10,
distmat_full=True,
distmat_full_iter=True)
cline()
clust_in.close()
# Return the handle to the output file
return clust_out
def back_translate(aln_file, seqdict):
"""Back-translate the aligned amino acids, using the original CDS
nucleotide sequences."""
aln = SeqIO.parse(aln_file.name, 'fasta')
bt_seq = []
for prot_seq in aln:
codon = 0
bt = ''
nuc = seqdict[prot_seq.id]
for aa in prot_seq:
if aa == '-':
bt += '---'
else:
bt += nuc[codon*3:(codon*3)+3]
codon += 1
bt_seq.append(bt)
return bt_seq
def write_alignment(nuc_aln, outdir):
"""Write the alignment in FASTA format to the alignment directory."""
# Get the gene IDs from the alignment data
gids = [s.id.split('_')[0] for s in nuc_aln]
# Join them to make a filename
fname = '-'.join(gids)
# Generate an output filename
abs_outdir = os.path.abspath(os.path.expanduser(outdir))
outname = os.path.join(abs_outdir, fname + '.fasta')
# Then write the data into the file
handle = open(outname, 'w')
SeqIO.write(nuc_aln, handle, 'fasta')
handle.close()
# Write a lil message to stderr that says we finished
sys.stderr.write('Wrote ' + fname + '\n')
return
def main(tandem, transcripts, outdir):
"""Main function."""
tx_seqs = parse_transcripts(transcripts)
with open(tandem, 'r') as f:
for line in f:
# We want to take two genes at a time.
genes = line.strip().split(',')
gene_pairs = itertools.combinations(genes, 2)
for gp in gene_pairs:
g1 = get_cds(gp[0], tx_seqs)
g2 = get_cds(gp[1], tx_seqs)
aln = align_genes(g1, g2)
nuc_aln = back_translate(aln, tx_seqs)
write_alignment(nuc_aln, outdir)
# Be good, and clean up our open handles
aln.close()
return
main(sys.argv[1], sys.argv[2], sys.argv[3])
| [
"konox006@umn.edu"
] | konox006@umn.edu |
50ac68c917222046abbdec7b8a40f6f7aa28d8e3 | 5182897b2f107f4fd919af59c6762d66c9be5f1d | /.history/src/Simulador_20200711141345.py | 0c144e119f344e7814e5e7eaab36a9bbce38ee79 | [
"MIT"
] | permissive | eduardodut/Trabalho_final_estatistica_cd | 422b7e702f96291f522bcc68d2e961d80d328c14 | fbedbbea6bdd7a79e1d62030cde0fab4e93fc338 | refs/heads/master | 2022-11-23T03:14:05.493054 | 2020-07-16T23:49:26 | 2020-07-16T23:49:26 | 277,867,096 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 13,880 | py | import pandas as pd
import numpy as np
from Matriz_esferica import Matriz_esferica
from Individuo import Individuo, Fabrica_individuo
import random
from itertools import permutations
import matplotlib.pyplot as plt
from matplotlib.colors import ListedColormap
from scipy.sparse import csr_matrix, lil_matrix
class Simulador():
SADIO = 0
INFECTADO_TIPO_1 = 1 #assintomáticos e o infectado inicial
INFECTADO_TIPO_2 = 2 #sintomático
CURADO = 3
MORTO = 4
def __init__(
self,
tamanho_matriz, #numero de linhas e colunas da matriz esférica
percentual_inicial_tipo1, #percentual inicial da população que será infectada tipo 1
percentual_inicial_tipo2, #percentual inicial da população que será infectada tipo 2
chance_infeccao, #chance que um infectado tipo 2 tem de infectar um indivíduo saudável
chance_infeccao_tipo2, #chance de um indivíduo infectado se tornar contagioso
chance_morte, #chance de um indivíduo tipo 2 morrer ao fim de uma atualização
atualizacoes_cura): #número de atualizações necessárias para a cura de um indivíduo tipo 1 ou 2
self.num_atualizacoes = 0
self.lista_infectados_tipo_2 = []
self.lista_infectados_tipo_1 = []
self.num_curados = 0
self.num_mortos = 0
self.chance_infeccao = chance_infeccao
self.chance_infeccao_tipo2 = chance_infeccao_tipo2
self.chance_morte = chance_morte
self.atualizacoes_cura = atualizacoes_cura
self.populacao_inicial = int(tamanho_matriz**2)
self.num_inicial_tipo2 = int(self.populacao_inicial * percentual_inicial_tipo2)
self.num_inicial_tipo1 = 1 + int(self.populacao_inicial * percentual_inicial_tipo1)
self.num_inicial_sadios = self.populacao_inicial - (self.num_inicial_tipo2 + self.num_inicial_tipo1)
self.matriz_status = lil_matrix((tamanho_matriz, tamanho_matriz),dtype= np.uint8)
self.matriz_atualizacoes_cura = lil_matrix((tamanho_matriz, tamanho_matriz),dtype= np.uint8)
#self.matriz_status = self.df_individuos.to_numpy()
self.popular(tamanho_matriz)
self.lista_matrizes_status = []
#objeto que é responsável por validar a movimentação no grid n x n
self.matriz_esferica = Matriz_esferica(tamanho_matriz)
dict = {
'num_sadios':self.num_inicial_sadios,
'num_infect_t1':self.num_inicial_tipo1,
'num_infect_t2':self.num_inicial_tipo2,
'num_curados':0,
'num_mortos':0}
#dataframe que guardará os resultados de cada atualização
self.dataframe = pd.DataFrame(dict,index = [0])
self.salvar_posicionamento()
def criar_individuo(self, status, posicao):
self.matriz_status[posicao[0], posicao[1]] = status
if status == self.INFECTADO_TIPO_1 or status == self.INFECTADO_TIPO_2:
self.matriz_status[posicao[0], posicao[1]] = self.atualizacoes_cura
def salvar_posicionamento(self):
self.lista_matrizes_status.append(self.matriz_status)
def verificar_infeccao(self, lista_infectantes):
lista_novos_infectados_tipo1 = []
lista_novos_infectados_tipo2 = []
#itera sobre sobre a lista de individuos que infectam e cada um realiza a tividade de infectar
print(lista_infectantes)
for indice_infectante in lista_infectantes:
print("Infectante na posição: ", indice_infectante)
#busca os vizinhos do infectante atual
lista_vizinhos = self.matriz_esferica.get_vizinhos(indice_infectante)
print("Lista vizinhos: ", lista_vizinhos)
#Para cada vizinho, se ele for sadio, é gerado um número aleatório para verificar se foi infectado
for indice_vizinho in lista_vizinhos:
#verificação de SADIO
if self.verifica_status(indice_vizinho) == self.SADIO:
#verificação do novo status
novo_status = self.infectar(chance_infeccao, chance_infeccao_tipo2)
#se for um infectado tipo 1
if novo_status == Individuo.INFECTADO_TIPO_1:
#adiciona na lista de novos tipo 1
lista_novos_infectados_tipo1.append(indice_vizinho)
self.criar_individuo(Individuo.INFECTADO_TIPO_1,indice_vizinho)
if novo_status == Individuo.INFECTADO_TIPO_2:
#adiciona na lista de novos tipo 1
lista_novos_infectados_tipo2.append(indice_vizinho)
self.criar_individuo(Individuo.INFECTADO_TIPO_2,indice_vizinho)
return lista_novos_infectados_tipo1, lista_novos_infectados_tipo2
def checagem_morte_individual(self, chance_morte, indice):
rng_morte = random.random()
if rng_morte <= chance_morte:
self.matriz_status[indice[0], indice[1]] = self.MORTO
return self.MORTO
else:
return self.checar_cura_individual(indice)
def checar_cura_individual(self, indice):
self.matriz_atualizacoes_cura[indice[0], indice[1]] -= 1
if self.matriz_atualizacoes_cura[indice[0], indice[1]] == 0:
self.matriz_status[indice[0], indice[1]] = self.CURADO
return self.CURADO
else:
return self.matriz_status[indice[0], indice[1]]
def checagem_morte_cura_lista(self, lista_infectantes_tipo2):
lista_curados = []
lista_mortos = []
for indice_infectante in lista_infectantes_tipo2:
novo_status = self.checagem_morte_individual(self.chance_morte, indice_infectante)
if novo_status == Individuo.MORTO:
lista_mortos.append(indice_infectante)
if novo_status == Individuo.CURADO:
lista_curados.append(indice_infectante)
return lista_mortos, lista_curados
def checagem_cura_lista(self, lista_infectantes):
lista_curados = []
for indice_infectante in lista_infectantes:
novo_status = checar_cura_individual(indice_infectante)
if novo_status == Individuo.CURADO:
lista_curados.append(indice_infectante)
return lista_curados
def iterar(self):
#Verifica os novos infectados por infectantes do tipo 1 e 2
lista_novos_infectados_tipo1, lista_novos_infectados_tipo2 = self.verificar_infeccao(self.lista_infectados_tipo_1+self.lista_infectados_tipo_2)
# #Verifica morte dos tipo 2
lista_mortos_atualizacao, lista_curados_t2_atualizacao = self.checagem_morte_cura_lista(self.lista_infectados_tipo_2)
self.lista_infectados_tipo_2 = [indice for indice in self.lista_infectados_tipo_2 if indice not in lista_mortos_atualizacao and indice not in lista_curados_t2_atualizacao]
# #atualiza o novo número de mortos
# self.num_mortos += len(lista_mortos_atualizacao)
# #Verificar cura
# lista_curados_t1_atualizacao = self.verificar_cura(self.lista_infectados_tipo_1)
# self.lista_infectados_tipo_1 = [indice for indice in self.lista_infectados_tipo_1 if indice not in lista_curados_t1_atualizacao ]
# #adiciona os novos curados na lista geral de curados
# self.num_curados = self.num_curados + len(lista_curados_t1_atualizacao) + len(lista_curados_t2_atualizacao)
# # self. #movimentar infectantes:
# # for x,y in self.lista_infectados_tipo_1:
# # self.mover_infectante((x,y))
# # for x,y in self.lista_infectados_tipo_2:
# # self.mover_infectante((x,y))
# #adicionar os novos infectados tipo 1 e 2 para as respectivas listas
# self.lista_infectados_tipo_2 = self.lista_infectados_tipo_2 + lista_novos_infectados_tipo2
# self.lista_infectados_tipo_1 = self.lista_infectados_tipo_1 + lista_novos_infectados_tipo1
# dict = {
# 'num_sadios':self.populacao_inicial - self.num_mortos - self.num_curados - len(self.lista_infectados_tipo_1) - len(self.lista_infectados_tipo_2) ,
# 'num_infect_t1':len(self.lista_infectados_tipo_1),
# 'num_infect_t2':len(self.lista_infectados_tipo_2),
# 'num_curados':self.num_curados,
# 'num_mortos':self.num_mortos}
# self.dataframe = self.dataframe.append(dict, ignore_index=True)
# print("num t1: ", len(self.lista_infectados_tipo_1))
# print("num t2: ", len(self.lista_infectados_tipo_2))
# print("num curados: ", self.num_curados)
# print("num mortos: ", self.num_mortos)
# print("---------")
# #salva a nova matriz de status
# self.salvar_posicionamento()
# #adiciona 1 ao número de atualizações realizadas na matriz
# self.num_atualizacoes +=1
def infectar(self, chance_infeccao, chance_infeccao_tipo2):
saida = Individuo.SADIO
#número aleatório para chance de infectar o vizinho
rng_infeccao = random.random()
if rng_infeccao <= chance_infeccao:
#número aleatório para chance de infecção tipo 1 ou 2
rng_infeccao_tipo2 = random.random()
if rng_infeccao_tipo2 <= chance_infeccao_tipo2:
saida = Individuo.INFECTADO_TIPO_2
else:
saida = Individuo.INFECTADO_TIPO_1
return saida
def popular(self, tamanho_matriz):
#self.df_individuos.iloc[:,:] = self.criar_individuo(Individuo.SADIO,(0,0))
#lista de possíveis combinações de índices da matriz de dados
permutacoes = permutations(list(range(tamanho_matriz)),2)
#conversão para lista de tuplas(x,y)
lista_indices = list(permutacoes)
#embaralhamento dos índices
random.shuffle(lista_indices)
#cria o primeiro tipo1:
indice = lista_indices.pop()
self.criar_individuo(Individuo.INFECTADO_TIPO_1,indice)
self.lista_infectados_tipo_1.append(indice)
#cria o restante dos tipos 1
for i in range(1,self.num_inicial_tipo1):
indice = lista_indices.pop()
self.criar_individuo(Individuo.INFECTADO_TIPO_1,indice)
self.lista_infectados_tipo_1.append(indice)
#cria o restante dos tipo 2:
for indice in range(self.num_inicial_tipo2):
indice = lista_indices.pop()
self.criar_individuo(Individuo.INFECTADO_TIPO_2,indice)
self.lista_infectados_tipo_2.append(indice)
def trocar_status_localizacao(self,ponto_ini,ponto_final):
x_ini = ponto_ini[0]
y_ini = ponto_ini[1]
x_fin = ponto_final[0]
y_fin = ponto_final[1]
aux = self.df_individuos.loc[x_fin,y_fin]
print("Aux2====>: ",self.df_individuos.loc[x_fin,y_fin])
self.df_individuos.loc[x_fin,y_fin], self.df_individuos.loc[x_ini,y_ini] = self.df_individuos.loc[x_ini,y_ini], self.df_individuos.loc[x_fin,y_fin]
# self.df_individuos.loc[x_fin,y_fin] = self.df_individuos.loc[x_ini,y_ini]
# self.df_individuos.loc[x_ini,y_ini] = aux2
self.matriz_status[x_fin,y_fin] = self.df_individuos.loc[x_fin,y_fin].status
self.matriz_status[x_ini,y_ini] = self.df_individuos.loc[x_ini,y_ini].status
def verifica_status(self, indice):
return self.matriz_status[indice[0], indice[1]]
def mover_infectante(self, posicao_inicial):
pos_x, pos_y = posicao_inicial[0], posicao_inicial[1]
rng_posicao = random.random()
if rng_posicao <=0.25:
#move pra cima
pos_x -= 1
elif rng_posicao <=0.5:
#move pra baixo
pos_x += 1
elif rng_posicao <=0.75:
#move para esquerda
pos_y -= 1
else:
#move para direita
pos_y += 1
posicao_final= self.matriz_esferica.valida_ponto_matriz(pos_x, pos_y)
self.trocar_status_localizacao(posicao_inicial, posicao_final)
chance_infeccao = 1
chance_infeccao_tipo2 = 0.2
chance_morte = 0.1
atualizacoes_cura = 10
percentual_inicial_tipo1 = 0.0
percentual_inicial_tipo2 = 0.1
sim = Simulador(
10,
percentual_inicial_tipo1,
percentual_inicial_tipo2,
chance_infeccao,
chance_infeccao_tipo2,
chance_morte,atualizacoes_cura)
#print(sim.lista_matrizes_posicionamento[0])
#print(sim.lista_infectados_tipo_2)
#print(sim.lista_infectados_tipo_1)
cmap = ListedColormap(['w', 'y', 'r', 'blue', 'black'])
# while sim.dataframe.iloc[-1]['num_infect_t1']+sim.dataframe.iloc[-1]['num_infect_t2'] > 0:
# print(sim.df_individuos)
# #print("xxxxxxxxxxxxxxxxxTipo: ",type(sim.lista_matrizes_posicionamento[len(sim.lista_matrizes_posicionamento)-1].toarray()))
# #plt.matshow(sim.lista_matrizes_posicionamento[0], cmap = cmap, vmin= 0, vmax = 4)
# #plt.show()
# sim.iterar()
print(sim.dataframe)
print(sim.lista_infectados_tipo_1)
print(sim.lista_infectados_tipo_2)
sim.iterar()
# print("status inicial: ", sim.df_individuos[1][0].status)
# print("Novos infectados: ", sim.verificar_infeccao(sim.lista_infectados_tipo_1))
# plt.show()
| [
"eduardo_dut@edu.unifor.br"
] | eduardo_dut@edu.unifor.br |
2141b204a271ad34cce0f84c94daf409ea90d43d | e3365bc8fa7da2753c248c2b8a5c5e16aef84d9f | /indices/16g.py | 61c7820f687186750a1ef8b50a1e5cb43d361671 | [] | no_license | psdh/WhatsintheVector | e8aabacc054a88b4cb25303548980af9a10c12a8 | a24168d068d9c69dc7a0fd13f606c080ae82e2a6 | refs/heads/master | 2021-01-25T10:34:22.651619 | 2015-09-23T11:54:06 | 2015-09-23T11:54:06 | 42,749,205 | 2 | 3 | null | 2015-09-23T11:54:07 | 2015-09-18T22:06:38 | Python | UTF-8 | Python | false | false | 62 | py | ii = [('CookGHP.py', 1), ('GodwWLN.py', 1), ('BentJRP.py', 1)] | [
"prabhjyotsingh95@gmail.com"
] | prabhjyotsingh95@gmail.com |
5532c264f012df678571ce895acd1cc83a14f820 | e909e9bb4b2e54bb64d6bee9cf9fbaf14c584e04 | /malib/algorithm/maddpg/loss.py | 6595de6c38d2c580a92fad36c131c5dd868c5dd8 | [
"MIT"
] | permissive | zhihaolyu/malib | 9cd8fdcdc1c613c11fc1e6f385adac5312474509 | 1c7ca1819325796a6ec604aa1ae8c771708fc50c | refs/heads/main | 2023-05-13T03:41:05.211832 | 2021-06-08T04:35:10 | 2021-06-08T04:35:10 | 374,880,657 | 0 | 0 | MIT | 2021-06-08T04:29:26 | 2021-06-08T04:29:25 | null | UTF-8 | Python | false | false | 4,862 | py | import torch
import gym
from malib.algorithm.common import misc
from malib.algorithm.ddpg.loss import DDPGLoss
from malib.backend.datapool.offline_dataset_server import Episode
from malib.algorithm.common.model import get_model
class MADDPGLoss(DDPGLoss):
def __init__(self):
super(MADDPGLoss, self).__init__()
self.cnt = 0
self._params = {
"tau": 0.01,
"grad_norm_clipping": 0.5,
"actor_lr": 1e-2,
"critic_lr": 1e-2,
"optimizer": "Adam",
}
def _set_centralized_critic(self):
global_state_space = self.policy.custom_config["global_state_space"]
self.policy.deregister_state("critic")
self.policy.deregister_state("target_critic")
model_cls = get_model(self.policy.model_config["critic"])
self.policy.set_critic(model_cls(global_state_space, gym.spaces.Discrete(1)))
self.policy.target_critic = model_cls(
global_state_space, gym.spaces.Discrete(1)
)
self.policy.update_target()
def reset(self, policy, config):
"""Replace critic with a centralized critic"""
self._params.update(config)
if policy is not self.policy:
self._policy = policy
self._set_centralized_critic()
self.setup_optimizers()
def step(self):
self.policy.soft_update(tau=self._params["tau"])
return None
def __call__(self, agent_batch):
FloatTensor = (
torch.cuda.FloatTensor
if self.policy.custom_config["use_cuda"]
else torch.FloatTensor
)
cast_to_tensor = lambda x: FloatTensor(x.copy())
cliprange = self._params["grad_norm_clipping"]
# print(all_agent_batch[agent_id])
rewards = cast_to_tensor(agent_batch[self.main_id][Episode.REWARDS]).view(-1, 1)
dones = cast_to_tensor(agent_batch[self.main_id][Episode.DONES]).view(-1, 1)
cur_obs = cast_to_tensor(agent_batch[self.main_id][Episode.CUR_OBS])
gamma = self.policy.custom_config["gamma"]
target_vf_in_list_obs = []
target_vf_in_list_act = []
vf_in_list_obs = []
vf_in_list_act = []
# set target state
for aid in self.agents:
batch = agent_batch[aid]
target_vf_in_list_obs.append(cast_to_tensor(batch[Episode.NEXT_OBS]))
target_vf_in_list_act.append(batch["next_act_by_target"])
vf_in_list_obs.append(cast_to_tensor(batch[Episode.CUR_OBS]))
vf_in_list_act.append(cast_to_tensor(batch[Episode.ACTION_DIST]))
target_vf_state = torch.cat(
[*target_vf_in_list_obs, *target_vf_in_list_act], dim=1
)
vf_state = torch.cat([*vf_in_list_obs, *vf_in_list_act], dim=1)
# ============================== Critic optimization ================================
target_value = rewards + gamma * (1.0 - dones) * self.policy.target_critic(
target_vf_state
)
eval_value = self.policy.critic(vf_state)
assert eval_value.shape == target_value.shape, (
eval_value.shape,
target_value.shape,
)
value_loss = torch.nn.MSELoss()(eval_value, target_value.detach())
self.optimizers["critic"].zero_grad()
value_loss.backward()
torch.nn.utils.clip_grad_norm_(self.policy.critic.parameters(), cliprange)
self.optimizers["critic"].step()
# ==================================================================================
# ================================ Actor optimization ==============================
main_idx = None
for i, aid in enumerate(self.agents):
# replace with tensor
if aid == self.main_id:
vf_in_list_act[i] = self.policy.compute_actions(cur_obs)
main_idx = i
break
vf_state = torch.cat([*vf_in_list_obs, *vf_in_list_act], dim=1)
policy_loss = -self.policy.critic(vf_state).mean() # need add regularization?
policy_loss += (vf_in_list_act[main_idx] ** 2).mean() * 1e-3
self.optimizers["actor"].zero_grad()
policy_loss.backward()
torch.nn.utils.clip_grad_norm_(self.policy.actor.parameters(), cliprange)
self.optimizers["actor"].step()
# ==================================================================================
loss_names = [
"policy_loss",
"value_loss",
"target_value_est",
"value_est",
]
stats_list = [
policy_loss.detach().numpy(),
value_loss.detach().numpy(),
target_value.mean().detach().numpy(),
eval_value.mean().detach().numpy(),
]
return dict(zip(loss_names, stats_list))
| [
"kornbergfresnel@outlook.com"
] | kornbergfresnel@outlook.com |
4d4a4b30e934813ff025d0254db353158afb96f2 | 25872e1ba4f86cbbf77d0130f341b21e5dd9e692 | /SingleNumberIi.py | 3a629bea501e870c6d47a412d50885842d0bef96 | [] | no_license | zongxinwu92/leetcode | dc3d209e14532b9b01cfce6d4cf6a4c2d7ced7de | e1aa45a1ee4edaf72447b771ada835ad73e7f508 | refs/heads/master | 2021-06-10T21:46:23.937268 | 2017-01-09T09:58:49 | 2017-01-09T09:58:49 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 307 | py | '''
Created on 1.12.2017
@author: Jesse
''''''
Given an array of integers, every element appears three times except for one, which appears exactly once. Find that single one.
Note:
Your algorithm should have a linear runtime complexity. Could you implement it without using extra memory?
"
'''
| [
"darrencheng0817@gmail.com"
] | darrencheng0817@gmail.com |
7e5d65332d4070875a7f9db581e37769d3d2be1c | ee121fc62ea630fe62870d535d2e63b68dc715ca | /docker/dkn/train/recsys_tools/recsys_kg/embed_dkn.py | fc2ea6d9149cdbf9bce3800fd67cbcc50864a101 | [] | no_license | amliuyong/GW-Rec-Release | 744e5b284854ebfc6b7bd056df9decfd24ffd0c4 | 45c7c7b59a4108cf5475c05d8c2fc1959955dd51 | refs/heads/main | 2023-02-26T02:19:43.697804 | 2020-12-30T08:32:32 | 2020-12-30T08:32:32 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 35,820 | py | import sys
import numpy as np
import tensorflow as tf
from sklearn.metrics import roc_auc_score
import pandas as pd
from collections import namedtuple
import subprocess
import argparse
import os
import json
import shutil
import glob
from datetime import date, timedelta
from tempfile import TemporaryDirectory
#################### CMD Arguments ####################
FLAGS = tf.app.flags.FLAGS
# model param
tf.app.flags.DEFINE_boolean(
"transform", True, "whether to transform entity embeddings")
tf.app.flags.DEFINE_boolean("use_context", False,
"whether to transform context embeddings")
tf.app.flags.DEFINE_boolean("use_entity", True,
"whether to transform entity embeddings")
tf.app.flags.DEFINE_integer("max_click_history", 3,
"number of sampled click history for each user")
tf.app.flags.DEFINE_integer(
"n_filters", 128, "number of filters for each size in KCNN")
tf.app.flags.DEFINE_list(
'filter_sizes', [1, 2], 'list of filter sizes, e.g., --filter_sizes 2 3')
tf.app.flags.DEFINE_float('l2_weight', 0.001, 'weight of l2 regularization')
tf.app.flags.DEFINE_string('attention_activation', 'relu',
'activation method for attention module')
tf.app.flags.DEFINE_string('KGE', 'TransE',
'knowledge graph embedding method, please ensure that the specified input file exists')
tf.app.flags.DEFINE_integer('entity_dim', 400,
'dimension of entity embeddings, please ensure that the specified input file exists')
tf.app.flags.DEFINE_integer('word_dim', 300,
'dimension of word embeddings, please ensure that the specified input file exists')
tf.app.flags.DEFINE_integer('max_title_length', 32,
'maximum length of news titles, should be in accordance with the input datasets')
tf.app.flags.DEFINE_integer('attention_layer_sizes', 100,
'layer sizes of attention module')
tf.app.flags.DEFINE_list('layer_sizes', [100],
'layer size for final prediction score layer')
tf.app.flags.DEFINE_list('activation', ['sigmoid'],
'activation method for attention module')
# training param
tf.app.flags.DEFINE_integer("perform_shuffle", 0, "perform shuffle data")
tf.app.flags.DEFINE_integer("num_epochs", 10, "Number of epochs")
tf.app.flags.DEFINE_integer("batch_size", 128, "Number of batch size")
tf.app.flags.DEFINE_integer("log_steps", 1000, "save summary every steps")
tf.app.flags.DEFINE_float("learning_rate", 0.001, "learning rate")
tf.app.flags.DEFINE_float("embed_l1", 0.00000, "L1 regularization for embeddings")
tf.app.flags.DEFINE_float("layer_l1", 0.00000, "L1 regularization for nn layers")
tf.app.flags.DEFINE_float("embed_l2", 0.00001, "L2 regularization for embeddings")
# tf.app.flags.DEFINE_float("layer_l2", 0.00003, "L2 regularization for nn layers")
tf.app.flags.DEFINE_float("layer_l2", 0.001, "L2 regularization for nn layers")
tf.app.flags.DEFINE_float("cross_l1", 0.00000, "cross L1 regularization")
tf.app.flags.DEFINE_float("cross_l2", 0.00000, "corss L2 regularization")
tf.app.flags.DEFINE_string("loss_type", 'log_loss',
"loss type {square_loss, log_loss}")
tf.app.flags.DEFINE_string(
"optimizer", 'Adam', "optimizer type {Adam, Adagrad, GD, Momentum}")
tf.app.flags.DEFINE_string("data_dir", '', "data dir")
tf.app.flags.DEFINE_string("dt_dir", '', "data dt partition")
tf.app.flags.DEFINE_string("model_dir", '', "model check point dir")
tf.app.flags.DEFINE_string("servable_model_dir", '',
"export servable model for TensorFlow Serving")
tf.app.flags.DEFINE_string(
"task_type", 'train', "task type {train, infer, eval, export}")
tf.app.flags.DEFINE_boolean("clear_existing_model",
False, "clear existing model or not")
tf.app.flags.DEFINE_string(
"checkpointPath", '', "checkpoint path during training ")
tf.app.flags.DEFINE_float(
"loss_weight", '1.0', "weight for pos sample")
tf.app.flags.DEFINE_list('dropout', [0.0], 'dropout parameters of training stage')
Data = namedtuple('Data', ['size', 'clicked_words',
'clicked_entities', 'news_words', 'news_entities', 'labels'])
class DKN(object):
# def __init__(self, params, feature=None, labels=None, hparams=hparams):
def __init__(self, params, feature=None, labels=None):
# prepare train/test data
# print(params)
self.train_data = None
self.test_data = None
seed = 30
init_value = 0.1
self.initializer = tf.random_uniform_initializer(
-init_value, init_value, seed=seed)
self.n_filters_total = params["n_filters"] * len(params["filter_sizes"])
self.reg_params = [] # for computing regularization loss
self.layer_params = []
self.embed_params = []
self.cross_params = []
self._build_inputs(params, feature, labels)
# # build raw model
# self._build_model(params)
# build ms implement model
# self.hparams=hparams
self.params=params
self._build_ms_model(params)
# self._build_train(params)
def _build_inputs(self, params, feature=None, labels=None):
self.clicked_words = feature["click_words"]
self.clicked_entities = feature["click_entities"]
self.news_words = feature["news_words"]
self.news_entities = feature["news_entities"]
self.labels = labels
# print("!!!!!!!!!!verify input shape")
# print("!!!!!!!!!!clicked words {}".format(self.clicked_words))
# print("!!!!!!!!!!clicked entities {}".format(self.clicked_entities))
# print("!!!!!!!!!!news words {}".format(self.news_words))
# print("!!!!!!!!!!news entities {}".format(self.news_entities))
def _build_ms_model(self, params):
with tf.name_scope('embedding'):
if FLAGS.data_dir == '':
raw_dir = os.environ.get('SM_CHANNEL_TRAIN')
else:
raw_dir = os.path.join(FLAGS.data_dir, 'train')
word_embs = np.load(
os.path.join(raw_dir,'word_embeddings_' + str(params["word_dim"]) + '.npy'))
entity_embs = np.load(os.path.join(raw_dir,'entity_embeddings_' +
params["KGE"] + '_' + str(params["entity_dim"]) + '.npy'))
# word_embs = np.load(os.path.join(raw_dir, 'word_embeddings.npy'))
# entity_embs = np.load(os.path.join(
# raw_dir, 'entity_embeddings.npy'))
self.word_embeddings = tf.Variable(
word_embs, trainable=False, dtype=np.float32, name='word')
# self.word_embeddings = word_embs
self.entity_embeddings = tf.Variable(
entity_embs, trainable=False, dtype=np.float32, name='entity')
# self.entity_embeddings = entity_embs
# self.reg_params.append(self.word_embeddings)
# self.reg_params.append(self.entity_embeddings)
# print("run here 1!")
# print(params["use_context"])
if params["use_context"]:
# print("run here 2.1!")
context_embs = np.load(os.path.join(raw_dir,'context_embeddings_' +
params["KGE"] + '_' + str(params["entity_dim"]) + '.npy'))
self.context_embeddings = tf.Variable(
context_embs, dtype=np.float32, name='context')
# self.reg_params.append(self.context_embeddings)
# print("run here 2.2!")
if params["transform"]:
# print("run here 3.1!")
self.entity_embeddings = tf.layers.dense(
self.entity_embeddings, units=params["entity_dim"], activation=tf.nn.tanh, name='transformed_entity',
kernel_regularizer=tf.contrib.layers.l2_regularizer(params["l2_weight"]))
# print("run here 3.2!")
if params["use_context"]:
self.context_embeddings = tf.layers.dense(
self.context_embeddings, units=params["entity_dim"], activation=tf.nn.tanh,
name='transformed_context', kernel_regularizer=tf.contrib.layers.l2_regularizer(params["l2_weight"]))
# print("build graph")
self.logit = tf.reshape(self._build_graph(), [-1])
# print("build output")
self.output = tf.sigmoid(self.logit)
def _build_graph(self):
params = self.params
# print("params {}".format(params))
self.keep_prob_train = 1 - np.array(params["dropout"])
self.keep_prob_test = np.ones_like(params["dropout"])
with tf.compat.v1.variable_scope("DKN") as scope:
print("build dkn")
logit = self._build_dkn()
return logit
def _build_dkn(self):
"""The main function to create DKN's logic.
Returns:
obj: Prediction score made by the DKN model.
"""
params = self.params
click_news_embed_batch, candidate_news_embed_batch = self._build_pair_attention(
self.news_words,
self.news_entities,
self.clicked_words,
self.clicked_entities,
self.params,
)
nn_input = tf.concat(
[click_news_embed_batch, candidate_news_embed_batch], axis=1
)
dnn_channel_part = 2
last_layer_size = dnn_channel_part * self.num_filters_total
layer_idx = 0
hidden_nn_layers = []
hidden_nn_layers.append(nn_input)
with tf.compat.v1.variable_scope(
"nn_part", initializer=self.initializer
) as scope:
for idx, layer_size in enumerate(params["layer_sizes"]):
curr_w_nn_layer = tf.compat.v1.get_variable(
name="w_nn_layer" + str(layer_idx),
shape=[last_layer_size, layer_size],
dtype=tf.float32,
)
curr_b_nn_layer = tf.compat.v1.get_variable(
name="b_nn_layer" + str(layer_idx),
shape=[layer_size],
dtype=tf.float32,
)
curr_hidden_nn_layer = tf.compat.v1.nn.xw_plus_b(
hidden_nn_layers[layer_idx], curr_w_nn_layer, curr_b_nn_layer
)
# if hparams.enable_BN is True:
# curr_hidden_nn_layer = tf.layers.batch_normalization(
# curr_hidden_nn_layer,
# momentum=0.95,
# epsilon=0.0001,
# training=self.is_train_stage,
# )
activation = params["activation"][idx]
# curr_hidden_nn_layer = self._active_layer(
# logit=curr_hidden_nn_layer, activation=activation
# )
curr_hidden_nn_layer = tf.nn.sigmoid(curr_hidden_nn_layer)
hidden_nn_layers.append(curr_hidden_nn_layer)
layer_idx += 1
last_layer_size = layer_size
# self.layer_params.append(curr_w_nn_layer)
# self.layer_params.append(curr_b_nn_layer)
w_nn_output = tf.compat.v1.get_variable(
name="w_nn_output", shape=[last_layer_size, 1], dtype=tf.float32
)
b_nn_output = tf.compat.v1.get_variable(
name="b_nn_output", shape=[1], dtype=tf.float32
)
# self.layer_params.append(w_nn_output)
# self.layer_params.append(b_nn_output)
nn_output = tf.compat.v1.nn.xw_plus_b(
hidden_nn_layers[-1], w_nn_output, b_nn_output
)
return nn_output
def _build_pair_attention(
self,
candidate_word_batch,
candidate_entity_batch,
click_word_batch,
click_entity_batch,
params,
):
"""This function learns the candidate news article's embedding and user embedding.
User embedding is generated from click history and also depends on the candidate news article via attention mechanism.
Article embedding is generated via KCNN module.
Args:
candidate_word_batch (obj): tensor word indices for constructing news article
candidate_entity_batch (obj): tensor entity values for constructing news article
click_word_batch (obj): tensor word indices for constructing user clicked history
click_entity_batch (obj): tensor entity indices for constructing user clicked history
params (obj): global hyper-parameters
Returns:
click_field_embed_final_batch: user embedding
news_field_embed_final_batch: candidate news article embedding
"""
doc_size = params["max_title_length"]
attention_hidden_sizes = params["attention_layer_sizes"]
clicked_words = tf.reshape(click_word_batch, shape=[-1, doc_size])
clicked_entities = tf.reshape(click_entity_batch, shape=[-1, doc_size])
with tf.compat.v1.variable_scope(
"attention_net", initializer=self.initializer
) as scope:
# use kims cnn to get conv embedding
with tf.compat.v1.variable_scope(
"kcnn", initializer=self.initializer, reuse=tf.compat.v1.AUTO_REUSE
) as cnn_scope:
news_field_embed = self._kims_cnn(
candidate_word_batch, candidate_entity_batch, params
)
click_field_embed = self._kims_cnn(
clicked_words, clicked_entities, params
)
click_field_embed = tf.reshape(
click_field_embed,
shape=[
-1,
params["max_click_history"],
params["n_filters"] * len(params["filter_sizes"]),
],
)
avg_strategy = False
if avg_strategy:
click_field_embed_final = tf.reduce_mean(
click_field_embed, axis=1, keepdims=True
)
else:
news_field_embed = tf.expand_dims(news_field_embed, 1)
news_field_embed_repeat = tf.add(
tf.zeros_like(click_field_embed), news_field_embed
)
attention_x = tf.concat(
axis=-1, values=[click_field_embed, news_field_embed_repeat]
)
attention_x = tf.reshape(
attention_x, shape=[-1, self.num_filters_total * 2]
)
attention_w = tf.compat.v1.get_variable(
name="attention_hidden_w",
shape=[self.num_filters_total * 2, attention_hidden_sizes],
dtype=tf.float32,
)
attention_b = tf.compat.v1.get_variable(
name="attention_hidden_b",
shape=[attention_hidden_sizes],
dtype=tf.float32,
)
curr_attention_layer = tf.compat.v1.nn.xw_plus_b(
attention_x, attention_w, attention_b
)
activation = params["attention_activation"]
curr_attention_layer = tf.nn.relu(curr_attention_layer)
attention_output_w = tf.compat.v1.get_variable(
name="attention_output_w",
shape=[attention_hidden_sizes, 1],
dtype=tf.float32,
)
attention_output_b = tf.compat.v1.get_variable(
name="attention_output_b", shape=[1], dtype=tf.float32
)
attention_weight = tf.compat.v1.nn.xw_plus_b(
curr_attention_layer, attention_output_w, attention_output_b
)
attention_weight = tf.reshape(
attention_weight, shape=[-1, params["max_click_history"], 1]
)
norm_attention_weight = tf.nn.softmax(attention_weight, axis=1)
click_field_embed_final = tf.reduce_sum(
tf.multiply(click_field_embed, norm_attention_weight),
axis=1,
keepdims=True,
)
# if attention_w not in self.layer_params:
# self.layer_params.append(attention_w)
# if attention_b not in self.layer_params:
# self.layer_params.append(attention_b)
# if attention_output_w not in self.layer_params:
# self.layer_params.append(attention_output_w)
# if attention_output_b not in self.layer_params:
# self.layer_params.append(attention_output_b)
self.news_field_embed_final_batch = tf.squeeze(news_field_embed)
click_field_embed_final_batch = tf.squeeze(click_field_embed_final)
return click_field_embed_final_batch, self.news_field_embed_final_batch
def _kims_cnn(self, word, entity, params):
"""The KCNN module. KCNN is an extension of traditional CNN that incorporates symbolic knowledge from
a knowledge graph into sentence representation learning.
Args:
word (obj): word indices for the sentence.
entity (obj): entity indices for the sentence. Entities are aligned with words in the sentence.
params (obj): global hyper-parameters.
Returns:
obj: Sentence representation.
"""
# kims cnn parameter
filter_sizes = params["filter_sizes"]
num_filters = params["n_filters"]
dim = params["word_dim"]
embedded_chars = tf.nn.embedding_lookup(self.word_embeddings, word)
print(embedded_chars)
if params["use_entity"] and params["use_context"]:
entity_embedded_chars = tf.nn.embedding_lookup(
self.entity_embeddings, entity
)
context_embedded_chars = tf.nn.embedding_lookup(
self.context_embeddings, entity
)
concat = tf.concat(
[embedded_chars, entity_embedded_chars, context_embedded_chars], axis=-1
)
elif params["use_entity"]:
entity_embedded_chars = tf.nn.embedding_lookup(
self.entity_embeddings, entity
)
print(entity_embedded_chars)
concat = tf.concat([embedded_chars, entity_embedded_chars], axis=-1)
else:
concat = embedded_chars
concat_expanded = tf.expand_dims(concat, -1)
# Create a convolution + maxpool layer for each filter size
pooled_outputs = []
for i, filter_size in enumerate(filter_sizes):
with tf.compat.v1.variable_scope(
"conv-maxpool-%s" % filter_size, initializer=self.initializer
):
# Convolution Layer
if params["use_entity"] and params["use_context"]:
filter_shape = [filter_size, dim + params["entity_dim"] * 2, 1, num_filters]
elif params["use_entity"]:
filter_shape = [filter_size, dim + params["entity_dim"], 1, num_filters]
else:
filter_shape = [filter_size, dim, 1, num_filters]
W = tf.compat.v1.get_variable(
name="W" + "_filter_size_" + str(filter_size),
shape=filter_shape,
dtype=tf.float32,
initializer=tf.contrib.layers.xavier_initializer(uniform=False),
)
b = tf.compat.v1.get_variable(
name="b" + "_filter_size_" + str(filter_size),
shape=[num_filters],
dtype=tf.float32,
)
# if W not in self.layer_params:
# self.layer_params.append(W)
# if b not in self.layer_params:
# self.layer_params.append(b)
conv = tf.nn.conv2d(
concat_expanded,
W,
strides=[1, 1, 1, 1],
padding="VALID",
name="conv",
)
# Apply nonlinearity
h = tf.nn.relu(tf.nn.bias_add(conv, b), name="relu")
# Maxpooling over the outputs
pooled = tf.nn.max_pool2d(
h,
ksize=[1, params["max_title_length"] - filter_size + 1, 1, 1],
strides=[1, 1, 1, 1],
padding="VALID",
name="pool",
)
pooled_outputs.append(pooled)
# Combine all the pooled features
# self.num_filters_total is the kims cnn output dimension
self.num_filters_total = num_filters * len(filter_sizes)
h_pool = tf.concat(pooled_outputs, axis=-1)
h_pool_flat = tf.reshape(h_pool, [-1, self.num_filters_total])
return h_pool_flat
def _l2_loss(self):
l2_loss = tf.zeros([1], dtype=tf.float32)
# embedding_layer l2 loss
for param in self.embed_params:
l2_loss = tf.add(
l2_loss, tf.multiply(self.params["embed_l2"], tf.nn.l2_loss(param))
)
params = self.layer_params
for param in params:
l2_loss = tf.add(
l2_loss, tf.multiply(self.params["layer_l2"], tf.nn.l2_loss(param))
)
return l2_loss
def _l1_loss(self):
l1_loss = tf.zeros([1], dtype=tf.float32)
# embedding_layer l2 loss
for param in self.embed_params:
l1_loss = tf.add(
l1_loss, tf.multiply(self.params["embed_l1"], tf.norm(param, ord=1))
)
params = self.layer_params
for param in params:
l1_loss = tf.add(
l1_loss, tf.multiply(self.params["layer_l1"], tf.norm(param, ord=1))
)
return l1_loss
def _cross_l_loss(self):
"""Construct L1-norm and L2-norm on cross network parameters for loss function.
Returns:
obj: Regular loss value on cross network parameters.
"""
cross_l_loss = tf.zeros([1], dtype=tf.float32)
for param in self.cross_params:
cross_l_loss = tf.add(
cross_l_loss, tf.multiply(self.params["cross_l1"], tf.norm(param, ord=1))
)
cross_l_loss = tf.add(
cross_l_loss, tf.multiply(self.params["cross_l2"], tf.norm(param, ord=2))
)
return cross_l_loss
def _build_train(self, params):
with tf.name_scope('train'):
self.base_loss = tf.reduce_mean(
tf.nn.sigmoid_cross_entropy_with_logits(
logits=self.logit,
labels=self.labels
)
)
self.l2_loss = tf.Variable(tf.constant(
0., dtype=tf.float32), trainable=False)
# for param in self.reg_params:
# self.l2_loss = tf.add(
# self.l2_loss, params["l2_weight"] * tf.nn.l2_loss(param))
if params["transform"]:
self.l2_loss = tf.add(
self.l2_loss, tf.compat.v1.losses.get_regularization_loss())
# self.loss = self.base_loss + self.l2_loss
# self.loss = self.base_loss
# self.embed_regular_loss = tf.add_n(tf.get_collection(tf.GraphKeys.REGULARIZATION_LOSSES))
self.regular_loss = self._l2_loss() + self._l1_loss() + self._cross_l_loss()
self.loss = tf.add(self.base_loss, self.regular_loss)
# self.loss = tf.add(self.loss, self.embed_regular_loss)
self.optimizer = tf.compat.v1.train.AdamOptimizer(
FLAGS.learning_rate).minimize(self.loss)
def input_fn(filenames='', channel='training', batch_size=32, num_epochs=1, perform_shuffle=False):
# print('Parsing', filenames)
def decode_txt(line):
# print("test line {}".format(line))
max_click_history = FLAGS.max_click_history
max_title_length = FLAGS.max_title_length
line = tf.expand_dims(line, axis=0)
# print("test more axis line {}".format(line))
columns = tf.string_split(line, '\t')
# print("test collumns {}".format(columns))
user_id = tf.strings.to_number(columns.values[0], out_type=tf.int32)
label = tf.strings.to_number(columns.values[3], out_type=tf.float32)
ids = []
# click history to be added
for i in range(1, 3):
raw1 = tf.string_split([columns.values[i]], '[').values
raw2 = tf.string_split(raw1, ']').values
sparse_modify_tensor = tf.string_split(raw2, ',')
# sparse_modify_tensor = tf.string_split([columns.values[i]], ',')
modify_tensor = tf.reshape(
sparse_modify_tensor.values, [max_title_length])
# ids.append(tf.squeeze(modify_tensor))
ids.append(modify_tensor)
ids[i-1] = tf.strings.to_number(ids[i-1], out_type=tf.int32)
click_ids = []
for i in range(4, 6):
# raw1 = tf.string_split([columns.values[i]], '[').values
# raw2 = tf.string_split(raw1, ']').values
# sparse_modify_tensor = tf.string_split(raw2, '-')
sparse_modify_tensor = tf.string_split([columns.values[i]], '-')
def judge(sparse):
empty = tf.constant('""')
return tf.math.equal(sparse.values[0], empty)
def org(max_click_history, max_title_length):
return tf.zeros([max_click_history, max_title_length], tf.int32)
def update(sparse, max_click_history, max_title_length):
two_d_t = []
update_indices = []
t_list = []
for i in range(max_title_length):
t_list.append('0')
base_t = tf.constant([t_list])
raw1_t = tf.string_split(sparse.values, '[')
raw2_t = tf.string_split(raw1_t.values, ']')
string_t = tf.string_split(raw2_t.values, ',').values
string_t = tf.reshape(string_t, [-1, max_title_length])
for j in range(max_click_history):
string_t = tf.concat([string_t, base_t], 0)
return tf.strings.to_number(tf.slice(string_t, [0, 0], [max_click_history, max_title_length], 'debug_slice_zay'), tf.int32)
click_ids.append(tf.cond(judge(sparse_modify_tensor), lambda: org(max_click_history, max_title_length),
lambda: update(sparse_modify_tensor, max_click_history, max_title_length)))
feat = {"user_id": user_id, "news_words": ids[0], "news_entities": ids[1],
"click_words": click_ids[0], "click_entities": click_ids[1]}
return feat, label
dataset = tf.data.TextLineDataset(filenames)
# dataset = dataset.skip(1)
if perform_shuffle:
dataset = dataset.shuffle(buffer_size=1024*1024)
dataset = dataset.map(
decode_txt, num_parallel_calls=tf.data.experimental.AUTOTUNE)
if num_epochs > 1:
dataset = dataset.repeat(num_epochs)
dataset = dataset.batch(
batch_size, drop_remainder=True) # Batch size to use
dataset = dataset.cache()
dataset = dataset.prefetch(buffer_size=tf.data.experimental.AUTOTUNE)
iterator = tf.compat.v1.data.make_one_shot_iterator(dataset)
batch_features, batch_labels = iterator.get_next()
return batch_features, batch_labels
def model_fn(features, labels, mode, params):
"""Bulid Model function f(x) for Estimator."""
dkn_model = DKN(params, features, labels)
pred = dkn_model.output
predictions = {"prob": pred}
export_outputs = {
tf.saved_model.DEFAULT_SERVING_SIGNATURE_DEF_KEY: tf.estimator.export.PredictOutput(predictions)}
# Provide an estimator spec for `ModeKeys.PREDICT`
if mode == tf.estimator.ModeKeys.PREDICT:
return tf.estimator.EstimatorSpec(
mode=mode,
predictions=predictions,
export_outputs=export_outputs)
# ------bulid loss------
print("build train")
dkn_model._build_train(params)
loss = dkn_model.loss
print("build loss")
# Provide an estimator spec for `ModeKeys.EVAL`
# eval_logging_hook = tf.estimator.LoggingTensorHook(
# {'eval_labels': labels, 'eval_pred': pred, 'eval_loss':loss}, every_n_iter=1)
# eval_metric_ops = {
# "auc": tf.metrics.auc(labels, pred)
# }
auc_metric = tf.compat.v1.metrics.auc(labels, pred)
eval_metric_ops = {
# "auc": roc_auc_score(y_true=labels, y_score=pred)
"auc": auc_metric
}
if mode == tf.estimator.ModeKeys.EVAL:
return tf.estimator.EstimatorSpec(
mode=mode,
predictions=predictions,
loss=loss,
# evaluation_hooks=[eval_logging_hook],
eval_metric_ops=eval_metric_ops)
# optimizer = dkn_model.optimizer
train_op = tf.train.AdamOptimizer(FLAGS.learning_rate).minimize(
loss, global_step=tf.train.get_or_create_global_step())
# Provide an estimator spec for `ModeKeys.TRAIN` modes
# train_logging_hook = tf.estimator.LoggingTensorHook(
# {'train_labels': labels, 'train_pred': pred, 'train_loss':loss}, every_n_iter=1)
print("build train")
if mode == tf.estimator.ModeKeys.TRAIN:
return tf.estimator.EstimatorSpec(
mode=mode,
predictions=predictions,
loss=loss,
# training_hooks=[train_logging_hook],
train_op=train_op)
def main(_):
print("check input params: ")
print(sys.argv)
if FLAGS.dt_dir == "":
FLAGS.dt_dir = (date.today() + timedelta(-1)).strftime('%Y%m%d')
print('task_type ', FLAGS.task_type)
print('model_dir ', FLAGS.model_dir)
print('checkpoint_dir ', FLAGS.checkpointPath)
print('data_dir ', FLAGS.data_dir)
print('dt_dir ', FLAGS.dt_dir)
print('num_epochs ', FLAGS.num_epochs)
print('batch_size ', FLAGS.batch_size)
print('loss_type ', FLAGS.loss_type)
print('optimizer ', FLAGS.optimizer)
print('learning_rate ', FLAGS.learning_rate)
print('embed_l2 ', FLAGS.embed_l2)
print('layer_l2 ', FLAGS.layer_l2)
print('shuffle ', FLAGS.perform_shuffle)
print('use_context ', FLAGS.use_context)
# check train/test path
if FLAGS.data_dir == '':
train_data_dir = os.environ.get('SM_CHANNEL_TRAIN')
eval_data_dir = os.environ.get('SM_CHANNEL_EVAL')
else:
train_data_dir = os.path.join(FLAGS.data_dir, 'train')
eval_data_dir = os.path.join(FLAGS.data_dir, 'test')
print("train dir is {}".format(train_data_dir))
print("eval dir is {}".format(eval_data_dir))
# tr_files = os.path.join(train_data_dir, 'train.csv')
tr_files = glob.glob("%s/*.csv" % train_data_dir)
print("tr_files:", tr_files)
# va_files = os.path.join(eval_data_dir, 'validation.csv')
va_files = glob.glob("%s/*.csv" % eval_data_dir)
print("va_files:", va_files)
te_files = None
print("te_files:", te_files)
if FLAGS.clear_existing_model:
try:
shutil.rmtree(FLAGS.model_dir)
except Exception as e:
print(e, "at clear_existing_model")
else:
print("existing model cleaned at %s" % FLAGS.model_dir)
model_params = {
"transform": False,
"use_entity": FLAGS.use_entity,
"use_context": False,
"max_click_history": FLAGS.max_click_history,
"n_filters": FLAGS.n_filters,
"filter_sizes": FLAGS.filter_sizes,
# "SEED": FLAGS.SEED,
"KGE": FLAGS.KGE,
"entity_dim": FLAGS.entity_dim,
"word_dim": FLAGS.word_dim,
"max_title_length": FLAGS.max_title_length,
"l2_weight": FLAGS.l2_weight,
"layer_sizes": FLAGS.layer_sizes,
"loss_weight": FLAGS.loss_weight,
"dropout": FLAGS.dropout,
"activation": FLAGS.activation,
"attention_layer_sizes": FLAGS.attention_layer_sizes,
"attention_activation": FLAGS.attention_activation,
"embed_l1": FLAGS.embed_l1,
"layer_l1": FLAGS.layer_l1,
"embed_l2": FLAGS.embed_l2,
"layer_l2": FLAGS.layer_l2
}
config = tf.compat.v1.ConfigProto()
config.gpu_options.allow_growth = True
print("sagemaker mode building ...")
dkn_estimator = tf.estimator.Estimator(model_fn=model_fn, model_dir=FLAGS.checkpointPath,
params=model_params, config=tf.estimator.RunConfig().replace(session_config=config))
if FLAGS.task_type == 'train':
"""
train_spec = tf.estimator.TrainSpec(input_fn=lambda: input_fn(tr_files, channel='training', num_epochs=FLAGS.num_epochs, batch_size=FLAGS.batch_size), hooks=[bcast_hook])
eval_spec = tf.estimator.EvalSpec(input_fn=lambda: input_fn(va_files, channel='evaluation', num_epochs=1, batch_size=FLAGS.batch_size), steps=None, start_delay_secs=1000, throttle_secs=1200)
tf.estimator.train_and_evaluate(NCF, train_spec, eval_spec)
"""
i = 1
for _ in range(FLAGS.num_epochs):
print("start train")
train_result = dkn_estimator.train(input_fn=lambda: input_fn(
tr_files, num_epochs=1, batch_size=FLAGS.batch_size, perform_shuffle=FLAGS.perform_shuffle))
print("finish train, start eval")
eval_result = dkn_estimator.evaluate(input_fn=lambda: input_fn(
va_files, num_epochs=1, batch_size=FLAGS.batch_size))
print("sagemaker mode epoch %d test_auc is %.4f" %
(i, eval_result['auc']))
i = i + 1
elif FLAGS.task_type == 'eval':
dkn_estimator.evaluate(input_fn=lambda: input_fn(
va_files, num_epochs=1, batch_size=FLAGS.batch_size))
elif FLAGS.task_type == 'infer':
preds = dkn_estimator.predict(input_fn=lambda: input_fn(
te_files, num_epochs=1, batch_size=FLAGS.batch_size), predict_keys="prob")
if FLAGS.task_type == 'export' or FLAGS.task_type == 'train':
feature_spec = {
'click_words': tf.placeholder(
dtype=tf.int32, shape=[None, model_params["max_click_history"], model_params["max_title_length"]], name='click_words'),
'click_entities': tf.placeholder(
dtype=tf.int32, shape=[None, model_params["max_click_history"], model_params["max_title_length"]], name='click_entities'),
'news_words': tf.placeholder(
dtype=tf.int32, shape=[None, model_params["max_title_length"]], name='news_words'),
'news_entities': tf.placeholder(
dtype=tf.int32, shape=[None, model_params["max_title_length"]], name='news_entities')
}
serving_input_receiver_fn = tf.estimator.export.build_raw_serving_input_receiver_fn(
feature_spec)
dkn_estimator.export_savedmodel(FLAGS.servable_model_dir,
serving_input_receiver_fn)
print("finish saving model!")
if __name__ == "__main__":
tf.compat.v1.logging.set_verbosity(tf.compat.v1.logging.INFO)
tf.compat.v1.app.run() | [
"wlinamzn@amazon.com"
] | wlinamzn@amazon.com |
1fd67a737285b069a76fd9bef0c7396315d5e1bb | 0104add04cd6da515e2ccb2c27e44bc6693f9bcf | /Yurii_Khomych/l_6_software_engineering/behavioral/memento.py | 1a1a01f4eb315fe84397e2f083b58aafe802ff5c | [] | no_license | YuriiKhomych/ITEA-advanced | c96c3cf9b279caf62fefcd41faf543cee7534626 | 90bc47733c07b5b866aa3a14aa12a169f5df289c | refs/heads/master | 2022-12-09T20:38:23.607426 | 2019-12-22T17:30:59 | 2019-12-22T17:30:59 | 209,354,034 | 0 | 9 | null | 2022-12-08T03:04:04 | 2019-09-18T16:23:12 | Python | UTF-8 | Python | false | false | 1,118 | py |
class Memento:
def __init__(self, state):
self._state = state
def get_state(self):
return self._state
class Caretaker:
def __init__(self):
self._memento = None
def get_memento(self):
return self._memento
def set_memento(self, memento):
self._memento = memento
class Originator:
def __init__(self):
self._state = None
def set_state(self, state):
self._state = state
def get_state(self):
return self._state
def save_state(self):
return Memento(self._state)
def restore_state(self, memento):
self._state = memento.get_state()
originator = Originator()
caretaker = Caretaker()
originator.set_state('on')
print('Originator state:', originator.get_state()) # Originator state: on
caretaker.set_memento(originator.save_state())
originator.set_state('off')
print('Originator change state:', originator.get_state()) # Originator change state: off
originator.restore_state(caretaker.get_memento())
print('Originator restore state:', originator.get_state()) # Originator restore state: on
| [
"yuriykhomich@gmail.com"
] | yuriykhomich@gmail.com |
f6354a22cec4f6c58ab5d595e288572bd06699e6 | 673e829dda9583c8dd2ac8d958ba1dc304bffeaf | /data/multilingual/Latn.TZO/Serif_8/pdf_to_json_test_Latn.TZO_Serif_8.py | d7a4be6524bcc374d418903d1171a9ab302394e2 | [
"BSD-3-Clause"
] | permissive | antoinecarme/pdf_to_json_tests | 58bab9f6ba263531e69f793233ddc4d33b783b7e | d57a024fde862e698d916a1178f285883d7a3b2f | refs/heads/master | 2021-01-26T08:41:47.327804 | 2020-02-27T15:54:48 | 2020-02-27T15:54:48 | 243,359,934 | 2 | 1 | null | null | null | null | UTF-8 | Python | false | false | 303 | py | import pdf_to_json as p2j
import json
url = "file:data/multilingual/Latn.TZO/Serif_8/udhr_Latn.TZO_Serif_8.pdf"
lConverter = p2j.pdf_to_json.pdf_to_json_converter()
lConverter.mImageHashOnly = True
lDict = lConverter.convert(url)
print(json.dumps(lDict, indent=4, ensure_ascii=False, sort_keys=True))
| [
"antoine.carme@laposte.net"
] | antoine.carme@laposte.net |
779652da6a5a24ad321543242382a5833a3019d0 | eb9f655206c43c12b497c667ba56a0d358b6bc3a | /python/testData/completion/typedParameterStringPath/a.after.py | 0ad95ae34e998bde802ce420572b05ffe171fd2c | [
"Apache-2.0"
] | permissive | JetBrains/intellij-community | 2ed226e200ecc17c037dcddd4a006de56cd43941 | 05dbd4575d01a213f3f4d69aa4968473f2536142 | refs/heads/master | 2023-09-03T17:06:37.560889 | 2023-09-03T11:51:00 | 2023-09-03T12:12:27 | 2,489,216 | 16,288 | 6,635 | Apache-2.0 | 2023-09-12T07:41:58 | 2011-09-30T13:33:05 | null | UTF-8 | Python | false | false | 648 | py | from typing import Any, overload, Union
from os import PathLike
def baz(akjlkgjdfsakglkd: PathLike) -> None:
pass
baz("foo")
def bar(akjlkgjdfsakglkd: Union[str, PathLike]) -> None:
pass
bar("foobar.txt")
@overload
def foo(akjlkgjdfsakglkd: str) -> None:
pass
@overload
def foo(akjlkgjdfsakglkd: PathLike) -> None:
pass
def foo(akjlkgjdfsakglkd):
pass
foo("foobar.txt")
def qux(akjlkgjdfsakglkd: Union[str, Any]) -> None:
pass
qux("foo")
@overload
def quux(akjlkgjdfsakglkd: Any) -> None:
pass
@overload
def quux(akjlkgjdfsakglkd: str) -> None:
pass
def quux(akjlkgjdfsakglkd):
pass
quux("foo") | [
"intellij-monorepo-bot-no-reply@jetbrains.com"
] | intellij-monorepo-bot-no-reply@jetbrains.com |
74534eb5988526a54d67aa7eef218c14628636d4 | ef7eabdd5f9573050ef11d8c68055ab6cdb5da44 | /topCoder/srms/500s/srm582/div2/semi_perfect_square.py | 66ebf650bdca212ea2858d5755eeba0b7624349b | [
"WTFPL"
] | permissive | gauravsingh58/algo | cdbf68e28019ba7c3e4832e373d32c71902c9c0d | 397859a53429e7a585e5f6964ad24146c6261326 | refs/heads/master | 2022-12-28T01:08:32.333111 | 2020-09-30T19:37:53 | 2020-09-30T19:37:53 | 300,037,652 | 1 | 1 | WTFPL | 2020-10-15T09:26:32 | 2020-09-30T19:29:29 | Java | UTF-8 | Python | false | false | 244 | py | class SemiPerfectSquare:
def check(self, N):
l, u = int(N**.33), int(N**.5)
for b in xrange(l, u+1):
for a in xrange(1, b):
if a * b * b == N:
return 'Yes'
return 'No'
| [
"elmas.ferhat@gmail.com"
] | elmas.ferhat@gmail.com |
02fe0020965f0e68f5076b7516d6a72a049849a9 | 10fbe5526e5f0b8588b65f70f088cd86b6e9afbe | /tyfslutb/migrations/0015_auto_20150218_1627.py | 97838971133cd35a5966606ccc5466e072829c6d | [] | no_license | MarkusH/django-migrations-benchmark | eb4b2312bb30a5a5d2abf25e95eca8f714162056 | e2bd24755389668b34b87d254ec8ac63725dc56e | refs/heads/master | 2016-09-05T15:36:45.250134 | 2015-03-31T23:44:28 | 2015-03-31T23:44:28 | 31,168,231 | 3 | 1 | null | null | null | null | UTF-8 | Python | false | false | 950 | py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('ygnakzgjxu', '0008_auto_20150218_1626'),
('tyfslutb', '0014_auto_20150218_1626'),
]
operations = [
migrations.CreateModel(
name='Bmzhg',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('nlcvjjxtmx', models.IntegerField(default=0)),
],
),
migrations.AddField(
model_name='qcwbo',
name='helryvwow',
field=models.OneToOneField(null=True, related_name='+', to='ygnakzgjxu.Xdwhlpqgw'),
),
migrations.AddField(
model_name='ynbpgqn',
name='dxemnqzz',
field=models.CharField(default='', max_length=208),
),
]
| [
"info@markusholtermann.eu"
] | info@markusholtermann.eu |
886d06ed7a374a174b7fe278b07f3f68764a4b7f | d046fd4ac8e52ed8054199765893f7e1a71302f2 | /master/bt5/slapos_cloud/SkinTemplateItem/portal_skins/slapos_cloud/Instance_tryToGarbageCollectNonAllocatedRootTree.py | 905e534dc8829ee1bea07b4d4019168e67e6e6ac | [] | no_license | jakop345/slapos.core | 7538418056be6541e9ee7a70d2d6b694e03daafc | 410dfb506b7ec17745365d573e7401f217b74ed4 | refs/heads/master | 2020-06-18T21:43:39.012812 | 2016-11-24T11:06:49 | 2016-11-24T11:06:49 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,814 | py | from zExceptions import Unauthorized
if REQUEST is not None:
raise Unauthorized
instance = context
portal = context.getPortalObject()
if instance.getValidationState() != 'validated' \
or instance.getSlapState() not in ('start_requested', 'stop_requested') \
or instance.getAggregateValue(portal_type='Computer Partition') is not None:
return
latest_comment = portal.portal_workflow.getInfoFor(instance, 'comment', wf_id='edit_workflow')
if latest_comment != 'Allocation failed: no free Computer Partition':
# No nothing if allocation alarm didn't run on it
return
latest_edit_time = portal.portal_workflow.getInfoFor(instance, 'time', wf_id='edit_workflow')
if (int(DateTime()) - int(latest_edit_time)) < 259200:
# Allow 3 days gap betweeb latest allocation try and deletion
return
# Only destroy if the instance is the only one in the tree
hosting_subscription = instance.getSpecialiseValue("Hosting Subscription")
if (hosting_subscription.getPredecessor() != instance.getRelativeUrl()):
return
if (len(hosting_subscription.getPredecessorList()) != 1):
return
instance_list = portal.portal_catalog(
portal_type=["Software Instance", "Slave Instance"],
default_specialise_uid=hosting_subscription.getUid(),
limit=2)
if len(instance_list) != 1:
return
# OK, destroy hosting subscription
hosting_subscription.requestDestroy(
software_release=hosting_subscription.getUrlString(),
software_title=hosting_subscription.getTitle(),
software_type=hosting_subscription.getSourceReference(),
instance_xml=hosting_subscription.getTextContent(),
sla_xml=hosting_subscription.getSlaXml(),
shared=hosting_subscription.isRootSlave(),
state='destroyed',
comment="Garbage collect %s not allocated for more than 3 days" % instance.getRelativeUrl(),
)
hosting_subscription.archive()
| [
"alain.takoudjou@nexedi.com"
] | alain.takoudjou@nexedi.com |
479972621f87be75415d882e77a8e70f347f8405 | 60696fa455101fbd2bef5efc19910d2fc856c324 | /libqi-python/qi/__init__.py | 2140178b74a899ab8af5849eac331e9023a047a3 | [] | no_license | yumilceh/libqi-python | 263baca432f118f15057c8a2b43e0321bb5609e1 | 900643316c272fcdb9f5de76111125fb05927108 | refs/heads/master | 2020-04-10T22:52:11.210697 | 2018-12-11T13:50:30 | 2018-12-11T13:50:30 | 161,334,566 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 7,444 | py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
""" QiMessaging Python bindings """
from __future__ import absolute_import
import os
import sys
import ctypes
import platform
import traceback
PATH_LIBQI = os.path.dirname(os.path.realpath(__file__))
# Add LibQi Python Folder to the Path
sys.path.append(PATH_LIBQI)
# Set Path and Load Dependancies for the Platform
if "aldebaran" in platform.platform():
path_robot_lib = os.path.join(PATH_LIBQI, "robot")
sys.path.append(path_robot_lib)
current_lib_path = os.environ.get("LD_LIBRARY_PATH", "")
if current_lib_path:
current_lib_path += ":"
current_lib_path += ":" + path_robot_lib
os.environ["LD_LIBRARY_PATH"] = current_lib_path
robot_dependencies = [
"libc.so.6",
"libstdc++.so.6",
"ld-linux.so.2",
"librt.so.1",
"libm.so.6",
"libdl.so.2",
"libgcc_s.so.1",
"libssl.so.1.0.0",
"libpthread.so.0",
"libsystemd.so.0",
"libcrypto.so.1.0.0",
"libpython2.7.so.1.0",
"libboost_chrono.so.1.59.0",
"libboost_date_time.so.1.59.0",
"libboost_filesystem.so.1.59.0",
"libboost_locale.so.1.59.0",
"libboost_program_options.so.1.59.0",
"libboost_python.so.1.59.0",
"libboost_regex.so.1.59.0",
"libboost_system.so.1.59.0",
"libboost_thread.so.1.59.0",
"libqi.so",
"libqipython.so",
]
for dependency in robot_dependencies:
library_path = os.path.join(PATH_LIBQI, "robot", dependency)
try:
ctypes.cdll.LoadLibrary(library_path)
except:
print("Unable to load %s\n%s" % (library_path, traceback.format_exc()))
elif sys.platform.startswith("linux"):
path_linux_lib = os.path.join(PATH_LIBQI, "linux")
sys.path.append(path_linux_lib)
current_lib_path = os.environ.get("LD_LIBRARY_PATH", "")
if current_lib_path:
current_lib_path += ":"
current_lib_path += ":" + path_linux_lib
os.environ["LD_LIBRARY_PATH"] = current_lib_path
linux_dependencies = [
"libicudata.so",
"libicuuc.so",
"libicui18n.so",
"libcrypto.so",
"libssl.so",
"libpython2.7.so",
"libboost_system.so",
"libboost_thread.so",
"libboost_python.so",
"libboost_chrono.so",
"libboost_program_options.so",
"libboost_filesystem.so",
"libboost_regex.so",
"libboost_locale.so",
"libqi.so",
"libqipython.so",
]
for dependency in linux_dependencies:
library_path = os.path.join(PATH_LIBQI, "linux", dependency)
try:
ctypes.cdll.LoadLibrary(library_path)
except:
print("Unable to load %s\n%s" % (library_path, traceback.format_exc()))
elif sys.platform.startswith("darwin"):
path_mac_lib = os.path.join(PATH_LIBQI, "mac")
path_mac_qi = os.path.join(PATH_LIBQI, "mac", "python2.7", "site-packages")
sys.path.append(path_mac_lib)
sys.path.append(path_mac_qi)
current_lib_path = os.environ.get("DYLD_LIBRARY_PATH", "")
if current_lib_path:
current_lib_path += ":"
current_lib_path += ":" + path_mac_lib
os.environ["DYLD_LIBRARY_PATH"] = current_lib_path
mac_dependencies = [
"libcrypto.1.0.0.dylib",
"libssl.1.0.0.dylib",
"libboost_system.dylib",
"libboost_python.dylib",
"libboost_date_time.dylib",
"libboost_chrono.dylib",
"libboost_filesystem.dylib",
"libboost_regex.dylib",
"libboost_program_options.dylib",
"libboost_locale.dylib",
"libboost_thread.dylib",
"libqi.dylib",
"libqipython.dylib",
"python2.7/site-packages/_qi.so",
]
for dependency in mac_dependencies:
library_path = os.path.join(PATH_LIBQI, "mac", dependency)
try:
ctypes.cdll.LoadLibrary(library_path)
except:
print("Unable to load %s\n%s" % (library_path, traceback.format_exc()))
elif sys.platform.startswith("win"):
sys.path.append(os.path.join(PATH_LIBQI, "win"))
ctypes.windll.kernel32.SetDllDirectoryA(os.path.join(PATH_LIBQI, "win"))
# Import LibQi Functionnalities
from _qi import Application as _Application
from _qi import ApplicationSession as _ApplicationSession
from _qi import (FutureState, FutureTimeout,
Future, futureBarrier, Promise,
Property, Session, Signal,
async, PeriodicTask)
from _qi import (clockNow, steadyClockNow, systemClockNow)
from _qi import (module, listModules)
from . import path
from ._type import (Void, Bool, Int8, UInt8, Int16, UInt16,
Int32, UInt32, Int64, UInt64,
Float, Double, String, List,
Map, Struct, Object, Dynamic,
Buffer, AnyArguments, typeof, _isinstance)
from ._binder import bind, nobind, singleThreaded, multiThreaded
from .logging import fatal, error, warning, info, verbose, Logger
from .logging import getLogger, logFatal, logError, logWarning, logInfo, logVerbose, logDebug # Deprecated
from .translator import defaultTranslator, tr, Translator
from .version import version
# Set the Version Number
__version__ = version
def PromiseNoop(*args, **kwargs):
""" No operation function deprecated:: 2.5 """
pass
# Rename isinstance here. (isinstance should not be used in this file)
isinstance = _isinstance
_app = None
# We want to stop all thread before python start destroying module and the like.
# (this avoid callback calling python while it's destroying)
def _stopApplication():
""" Stop the Application """
global _app
if _app is not None:
_app.stop()
del _app
_app = None
# Application is a singleton, it should live till the end
# of the program because it owns eventloops
def Application(args=None, raw=False, autoExit=True, url=None):
""" Instanciate and return the App """
global _app
if _app is None:
if args is None:
args = sys.argv
if url is None:
url = "tcp://127.0.0.1:9559"
if not args:
args = ['python']
elif args[0] == '':
args[0] = 'python'
if raw:
_app = _Application(args)
else:
_app = _ApplicationSession(args, autoExit, url)
else:
raise Exception("Application was already initialized")
return _app
ApplicationSession = Application
__all__ = [
"FutureState",
"FutureTimeout",
"Future",
"futureBarrier",
"Promise",
"PromiseNoop",
"Property",
"Session",
"Signal",
"createObject",
"registerObjectFactory",
"async",
"Void", "Bool", "Int8", "UInt8", "Int16", "UInt16", "Int32", "UInt32", "Int64", "UInt64",
"Float", "Double", "String", "List", "Map", "Struct", "Object", "Dynamic", "Buffer", "AnyArguments",
"typeof", "isinstance",
"bind", "nobind", "singleThreaded", "multiThreaded",
"fatal", "error", "warning", "info", "verbose",
"getLogger", "logFatal", "logError", "logWarning", "logInfo", "logVerbose", "logDebug", # Deprecated
"Logger", "defaultTranslator", "tr", "Translator",
"module", "listModules",
"clockNow", "steadyClockNow", "systemClockNow"
]
# Register _stopApplication as a function to be executed at termination
import atexit
atexit.register(_stopApplication)
del atexit
| [
"you@example.com"
] | you@example.com |
5ed922dea4cd1d1bbea38444f453f27061c5c7c7 | b4537dfc431cba7ff40e0692ab6c223394ae4d69 | /151-replace-adjacent-colors.py | 4c56543292a015d62c66c55ae5fe43c2df89044e | [] | no_license | ericgarig/daily-coding-problem | fdc04f5bf823933100686c4129575f5ef3746676 | d3e1a6ab102c7af1eea4ab6b1282e4d44e5b80ba | refs/heads/master | 2020-03-31T09:59:11.767162 | 2019-12-20T21:44:43 | 2019-12-20T21:44:43 | 152,117,524 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,728 | py | """
Daily Coding Problem - 2019-03-08.
Given a 2-D matrix representing an image, a location of a pixel in the
screen and a color C, replace the color of the given pixel and all
adjacent same colored pixels with C.
For example, given the following matrix, and location pixel of (2, 2),
and 'G' for green:
B B W
W W W
W W W
B B B
Becomes
B B G
G G G
G G G
B B B
"""
def solve(arr, pos, color):
"""Given a 2D array, position, and color, change all adjacent colors."""
i = 0
same_color = [pos]
while i < len(same_color):
for j in get_neighbors(arr, same_color[i], arr[pos[0]][pos[1]]):
if j not in same_color:
same_color.append(j)
i += 1
for i in same_color:
arr[i[0]][i[1]] = color
return arr
def get_neighbors(arr, pos, color):
"""Return neighbors with the same color."""
neighbors = []
try:
if arr[pos[0] + 1][pos[1]] == color:
neighbors.append((pos[0] + 1, pos[1]))
except IndexError:
pass
try:
if arr[pos[0] - 1][pos[1]] == color:
neighbors.append((pos[0] - 1, pos[1]))
except IndexError:
pass
try:
if arr[pos[0] + 1][pos[1]] == color:
neighbors.append((pos[0] + 1, pos[1]))
except IndexError:
pass
try:
if arr[pos[0]][pos[1] - 1] == color:
neighbors.append((pos[0], pos[1] - 1))
except IndexError:
pass
return neighbors
input_matrix = [
["B", "B", "W"],
["W", "W", "W"],
["W", "W", "W"],
["B", "B", "B"],
]
output_matrx = [
["B", "B", "G"],
["G", "G", "G"],
["G", "G", "G"],
["B", "B", "B"],
]
assert (solve(input_matrix, (2, 2), "G")) == output_matrx
| [
"erik.shagdar@gmail.com"
] | erik.shagdar@gmail.com |
bbdafee77be3a4794d79a6a57bfebfa1c75df080 | 53fab060fa262e5d5026e0807d93c75fb81e67b9 | /backup/user_119/ch29_2019_03_07_18_47_06_270339.py | 58f6e3df7e0d001c552cf93d132e7396cb7d4717 | [] | no_license | gabriellaec/desoft-analise-exercicios | b77c6999424c5ce7e44086a12589a0ad43d6adca | 01940ab0897aa6005764fc220b900e4d6161d36b | refs/heads/main | 2023-01-31T17:19:42.050628 | 2020-12-16T05:21:31 | 2020-12-16T05:21:31 | 306,735,108 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 116 | py | def calcula_aumento(salario):
if salario<=1250:
return salario*0,15
elif:
return salario*0,1 | [
"you@example.com"
] | you@example.com |
ae7baf3d9ff93e6bbe82475c98939da0532e619b | b83ac23819fd7ba998563f2ad870405bdd07cc2b | /gym_idsgame/agents/training_agents/policy_gradient/reinforce/reinforce.py | a0eeaa526f000f0883b7b0c8df4415fb0b66e5d6 | [
"MIT"
] | permissive | Limmen/gym-idsgame | 699abd2894bce15108f1606f5fb71f612dd7ba03 | d10830fef55308d383c98b41b34688a7fceae357 | refs/heads/master | 2023-09-01T17:32:16.768138 | 2023-08-22T12:00:53 | 2023-08-22T12:00:53 | 247,794,752 | 49 | 12 | MIT | 2021-04-21T07:50:06 | 2020-03-16T19:00:27 | Python | UTF-8 | Python | false | false | 34,673 | py | """
An agent for the IDSGameEnv that implements the REINFORCE Policy Gradient algorithm.
"""
from typing import Union, List
import numpy as np
import time
import tqdm
import torch
from torch.distributions import Categorical
from torch.utils.tensorboard import SummaryWriter
from gym_idsgame.envs.rendering.video.idsgame_monitor import IdsGameMonitor
from gym_idsgame.envs.idsgame_env import IdsGameEnv
from gym_idsgame.agents.dao.experiment_result import ExperimentResult
from gym_idsgame.envs.constants import constants
from gym_idsgame.agents.training_agents.models.fnn_w_softmax import FNNwithSoftmax
from gym_idsgame.agents.training_agents.models.lstm_w_softmax import LSTMwithSoftmax
from gym_idsgame.agents.training_agents.policy_gradient.pg_agent import PolicyGradientAgent
from gym_idsgame.agents.training_agents.policy_gradient.pg_agent_config import PolicyGradientAgentConfig
class ReinforceAgent(PolicyGradientAgent):
"""
An implementation of the REINFORCE Policy Gradient algorithm
"""
def __init__(self, env:IdsGameEnv, config: PolicyGradientAgentConfig):
"""
Initialize environment and hyperparameters
:param config: the configuration
"""
super(ReinforceAgent, self).__init__(env, config)
self.attacker_policy_network = None
self.defender_policy_network = None
self.loss_fn = None
self.attacker_optimizer = None
self.defender_optimizer = None
self.attacker_lr_decay = None
self.defender_lr_decay = None
self.tensorboard_writer = SummaryWriter(self.config.tensorboard_dir)
self.initialize_models()
self.tensorboard_writer.add_hparams(self.config.hparams_dict(), {})
self.machine_eps = np.finfo(np.float32).eps.item()
self.env.idsgame_config.save_trajectories = False
self.env.idsgame_config.save_attack_stats = False
def initialize_models(self) -> None:
"""
Initialize models
:return: None
"""
# Initialize models
if self.config.lstm_network:
self.attacker_policy_network = LSTMwithSoftmax(input_dim=self.config.input_dim_attacker,
output_dim=self.config.output_dim_attacker,
hidden_dim=self.config.hidden_dim,
num_lstm_layers=self.config.num_lstm_layers,
num_hidden_linear_layers=self.config.num_hidden_layers,
hidden_activation="ReLU",
seq_length=self.config.lstm_seq_length)
self.defender_policy_network = LSTMwithSoftmax(input_dim=self.config.input_dim_defender,
output_dim=self.config.output_dim_defender,
hidden_dim=self.config.hidden_dim,
num_lstm_layers=self.config.num_lstm_layers,
num_hidden_linear_layers=self.config.num_hidden_layers,
hidden_activation="ReLU",
seq_length=self.config.lstm_seq_length)
else:
self.attacker_policy_network = FNNwithSoftmax(input_dim=self.config.input_dim_attacker,
output_dim=self.config.output_dim_attacker,
hidden_dim=self.config.hidden_dim,
num_hidden_layers=self.config.num_hidden_layers,
hidden_activation=self.config.hidden_activation)
self.defender_policy_network = FNNwithSoftmax(input_dim=self.config.input_dim_defender,
output_dim=self.config.output_dim_defender,
hidden_dim=self.config.hidden_dim,
num_hidden_layers=self.config.num_hidden_layers,
hidden_activation=self.config.hidden_activation)
# Specify device
if torch.cuda.is_available() and self.config.gpu:
device = torch.device("cuda:" + str(self.config.gpu_id))
self.config.logger.info("Running on the GPU")
else:
device = torch.device("cpu")
self.config.logger.info("Running on the CPU")
self.attacker_policy_network.to(device)
self.defender_policy_network.to(device)
# Define Optimizer. The call to model.parameters() in the optimizer constructor will contain the learnable
# parameters of the layers in the model
if self.config.optimizer == "Adam":
self.attacker_optimizer = torch.optim.Adam(self.attacker_policy_network.parameters(), lr=self.config.alpha_attacker)
self.defender_optimizer = torch.optim.Adam(self.defender_policy_network.parameters(), lr=self.config.alpha_defender)
elif self.config.optimizer == "SGD":
self.attacker_optimizer = torch.optim.SGD(self.attacker_policy_network.parameters(), lr=self.config.alpha_attacker)
self.defender_optimizer = torch.optim.SGD(self.defender_policy_network.parameters(), lr=self.config.alpha_defender)
else:
raise ValueError("Optimizer not recognized")
# LR decay
if self.config.lr_exp_decay:
self.attacker_lr_decay = torch.optim.lr_scheduler.ExponentialLR(optimizer=self.attacker_optimizer,
gamma=self.config.lr_decay_rate)
self.defender_lr_decay = torch.optim.lr_scheduler.ExponentialLR(optimizer=self.attacker_optimizer,
gamma=self.config.lr_decay_rate)
def training_step(self, saved_rewards : List[List[float]], saved_log_probs : List[List[torch.Tensor]],
attacker=True) -> torch.Tensor:
"""
Performs a training step of the Deep-Q-learning algorithm (implemented in PyTorch)
:param saved_rewards list of rewards encountered in the latest episode trajectory
:param saved_log_probs list of log-action probabilities (log p(a|s)) encountered in the latest episode trajectory
:return: loss
"""
#print("saved rewards: {}".format(sum(sum(x) for x in saved_rewards)))
policy_loss = []
num_batches = len(saved_rewards)
for batch in range(num_batches):
R = 0
returns = []
# Create discounted returns. When episode is finished we can go back and compute the observed cumulative
# discounted reward by using the observed rewards
for r in saved_rewards[batch][::-1]:
R = r + self.config.gamma * R
returns.insert(0, R)
num_rewards = len(returns)
# convert list to torch tensor
returns = torch.tensor(returns)
# normalize
std = returns.std()
if num_rewards < 2:
std = 0
returns = (returns - returns.mean()) / (std + self.machine_eps)
# Compute PG "loss" which in reality is the expected reward, which we want to maximize with gradient ascent
for log_prob, R in zip(saved_log_probs[batch], returns):
# negative log prob since we are doing gradient descent (not ascent)
policy_loss.append(-log_prob * R)
# Compute gradient and update models
if attacker:
# reset gradients
self.attacker_optimizer.zero_grad()
# expected loss over the batch
policy_loss_total = torch.stack(policy_loss).sum()
policy_loss = policy_loss_total/num_batches
# perform backprop
policy_loss.backward()
# maybe clip gradient
if self.config.clip_gradient:
torch.nn.utils.clip_grad_norm_(self.attacker_policy_network.parameters(), 1)
# gradient descent step
self.attacker_optimizer.step()
else:
# reset gradients
self.defender_optimizer.zero_grad()
# expected loss over the batch
policy_loss_total = torch.stack(policy_loss).sum()
policy_loss = policy_loss_total/num_batches
# perform backprop
policy_loss.backward()
# maybe clip gradient
if self.config.clip_gradient:
torch.nn.utils.clip_grad_norm_(self.defender_policy_network.parameters(), 1)
# gradient descent step
self.defender_optimizer.step()
return policy_loss
def get_action(self, state: np.ndarray, attacker : bool = True, legal_actions : List = None,
non_legal_actions : List = None) -> Union[int, torch.Tensor]:
"""
Samples an action from the policy network
:param state: the state to sample an action for
:param attacker: boolean flag whether running in attacker mode (if false assume defender)
:param legal_actions: list of allowed actions
:param non_legal_actions: list of disallowed actions
:return: The sampled action id
"""
if self.config.lstm_network:
state = torch.from_numpy(state.reshape(1, state.shape[0], state.shape[1]*state.shape[2])).float()
else:
state = torch.from_numpy(state.flatten()).float()
# Move to GPU if using GPU
if torch.cuda.is_available() and self.config.gpu:
device = torch.device("cuda:" + str(self.config.gpu_id))
state = state.to(device)
# Calculate legal actions
if attacker:
actions = list(range(self.env.num_attack_actions))
if not self.env.local_view_features() or (legal_actions is None or non_legal_actions is None):
legal_actions = list(filter(lambda action: self.env.is_attack_legal(action), actions))
non_legal_actions = list(filter(lambda action: not self.env.is_attack_legal(action), actions))
else:
actions = list(range(self.env.num_defense_actions))
legal_actions = list(filter(lambda action: self.env.is_defense_legal(action), actions))
non_legal_actions = list(filter(lambda action: not self.env.is_defense_legal(action), actions))
# Forward pass using the current policy network to predict P(a|s)
if attacker:
action_probs = self.attacker_policy_network(state).squeeze()
# Set probability of non-legal actions to 0
action_probs_1 = action_probs.clone()
if len(legal_actions) > 0 and len(non_legal_actions) < self.env.num_attack_actions:
action_probs_1[non_legal_actions] = 0
else:
action_probs = self.defender_policy_network(state).squeeze()
# Set probability of non-legal actions to 0
action_probs_1 = action_probs.clone()
# print("state shape:{}".format(state.shape))
# print("action shape:{}".format(action_probs_1.shape))
if len(legal_actions) > 0 and len(non_legal_actions) < self.env.num_defense_actions:
action_probs_1[non_legal_actions] = 0
# Use torch.distributions package to create a parameterizable probability distribution of the learned policy
# PG uses a trick to turn the gradient into a stochastic gradient which we can sample from in order to
# approximate the true gradient (which we can’t compute directly). It can be seen as an alternative to the
# reparameterization trick
policy_dist = Categorical(action_probs_1)
# Sample an action from the probability distribution
try:
action = policy_dist.sample()
except Exception as e:
print("Nan values in distribution, consider using a lower learnign rate or gradient clipping")
print("legal actions: {}".format(legal_actions))
print("non_legal actions: {}".format(non_legal_actions))
print("action_probs: {}".format(action_probs))
print("action_probs_1: {}".format(action_probs_1))
print("state: {}".format(state))
print("policy_dist: {}".format(policy_dist))
action = torch.tensor(0).type(torch.LongTensor)
# log_prob returns the log of the probability density/mass function evaluated at value.
# save the log_prob as it will use later on for computing the policy gradient
# policy gradient theorem says that the stochastic gradient of the expected return of the current policy is
# the log gradient of the policy times the expected return, therefore we save the log of the policy distribution
# now and use it later to compute the gradient once the episode has finished.
log_prob = policy_dist.log_prob(action)
return action.item(), log_prob
def train(self) -> ExperimentResult:
"""
Runs the REINFORCE algorithm
:return: Experiment result
"""
self.config.logger.info("Starting Training")
self.config.logger.info(self.config.to_str())
if len(self.train_result.avg_episode_steps) > 0:
self.config.logger.warning("starting training with non-empty result object")
done = False
obs = self.env.reset(update_stats=False)
attacker_obs, defender_obs = obs
attacker_state = self.update_state(attacker_obs=attacker_obs, defender_obs=defender_obs, state=[],
attacker=True)
defender_state = self.update_state(defender_obs=defender_obs, attacker_obs=attacker_obs, state=[],
attacker=False)
# Tracking metrics
episode_attacker_rewards = []
episode_defender_rewards = []
episode_steps = []
episode_avg_attacker_loss = []
episode_avg_defender_loss = []
# Logging
self.outer_train.set_description_str("[Train] epsilon:{:.2f},avg_a_R:{:.2f},avg_d_R:{:.2f},"
"avg_t:{:.2f},avg_h:{:.2f},acc_A_R:{:.2f}," \
"acc_D_R:{:.2f}".format(self.config.epsilon, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0))
saved_attacker_log_probs_batch = []
saved_attacker_rewards_batch = []
saved_defender_log_probs_batch = []
saved_defender_rewards_batch = []
# Training
for iter in range(self.config.num_episodes):
# Batch
for episode in range(self.config.batch_size):
episode_attacker_reward = 0
episode_defender_reward = 0
episode_step = 0
episode_attacker_loss = 0.0
episode_defender_loss = 0.0
saved_attacker_log_probs = []
saved_attacker_rewards = []
saved_defender_log_probs = []
saved_defender_rewards = []
while not done:
if self.config.render:
self.env.render(mode="human")
if not self.config.attacker and not self.config.defender:
raise AssertionError("Must specify whether training an attacker agent or defender agent")
# Default initialization
attacker_action = 0
defender_action = 0
# Get attacker and defender actions
if self.config.attacker:
legal_actions = None
illegal_actions = None
if self.env.local_view_features():
legal_actions, illegal_actions = self.get_legal_attacker_actions(attacker_obs)
attacker_action, attacker_log_prob = self.get_action(attacker_state, attacker=True,
legal_actions=legal_actions,
non_legal_actions=illegal_actions)
if self.env.local_view_features():
attacker_action = PolicyGradientAgent.convert_local_attacker_action_to_global(attacker_action, attacker_obs)
saved_attacker_log_probs.append(attacker_log_prob)
if self.config.defender:
defender_action, defender_log_prob= self.get_action(defender_state, attacker=False)
saved_defender_log_probs.append(defender_log_prob)
action = (attacker_action, defender_action)
# Take a step in the environment
obs_prime, reward, done, _ = self.env.step(action)
# Update metrics
attacker_reward, defender_reward = reward
obs_prime_attacker, obs_prime_defender = obs_prime
episode_attacker_reward += attacker_reward
saved_attacker_rewards.append(attacker_reward)
episode_defender_reward += defender_reward
saved_defender_rewards.append(defender_reward)
episode_step += 1
# Move to the next state
obs = obs_prime
attacker_obs = obs_prime_attacker
defender_obs = obs_prime_defender
attacker_state = self.update_state(attacker_obs=attacker_obs, defender_obs=defender_obs,
state=attacker_state, attacker=True)
defender_state = self.update_state(defender_obs=defender_obs, attacker_obs=attacker_obs,
state=defender_state, attacker=False)
# Render final frame
if self.config.render:
self.env.render(mode="human")
# Accumulate batch
saved_attacker_log_probs_batch.append(saved_attacker_log_probs)
saved_attacker_rewards_batch.append(saved_attacker_rewards)
saved_defender_log_probs_batch.append(saved_defender_log_probs)
saved_defender_rewards_batch.append(saved_defender_rewards)
# Record episode metrics
self.num_train_games += 1
self.num_train_games_total += 1
if self.env.state.hacked:
self.num_train_hacks += 1
self.num_train_hacks_total += 1
episode_attacker_rewards.append(episode_attacker_reward)
episode_defender_rewards.append(episode_defender_reward)
episode_steps.append(episode_step)
# Reset environment for the next episode and update game stats
done = False
attacker_obs, defender_obs = self.env.reset(update_stats=True)
attacker_state = self.update_state(attacker_obs=attacker_obs, defender_obs=defender_obs, state=[],
attacker=True)
defender_state = self.update_state(defender_obs=defender_obs, attacker_obs=attacker_obs, state=[],
attacker=False)
# Perform Batch Policy Gradient updates
if self.config.attacker:
loss = self.training_step(saved_attacker_rewards_batch, saved_attacker_log_probs_batch, attacker=True)
episode_attacker_loss += loss.item()
if self.config.defender:
loss = self.training_step(saved_defender_rewards_batch, saved_defender_log_probs_batch, attacker=False)
episode_defender_loss += loss.item()
if self.config.batch_size > 0:
if self.config.attacker:
episode_avg_attacker_loss.append(episode_attacker_loss / self.config.batch_size)
if self.config.defender:
episode_avg_defender_loss.append(episode_defender_loss / self.config.batch_size)
else:
if self.config.attacker:
episode_avg_attacker_loss.append(episode_attacker_loss)
if self.config.defender:
episode_avg_defender_loss.append(episode_defender_loss)
# Reset batch
saved_attacker_log_probs_batch = []
saved_attacker_rewards_batch = []
saved_defender_log_probs_batch = []
saved_defender_rewards_batch = []
# Decay LR after every iteration
lr_attacker = self.config.alpha_attacker
if self.config.lr_exp_decay:
self.attacker_lr_decay.step()
lr_attacker = self.attacker_lr_decay.get_lr()[0]
# Decay LR after every iteration
lr_defender = self.config.alpha_defender
if self.config.lr_exp_decay:
self.defender_lr_decay.step()
lr_defender = self.defender_lr_decay.get_lr()[0]
# Log average metrics every <self.config.train_log_frequency> iterations
if iter % self.config.train_log_frequency == 0:
if self.num_train_games > 0 and self.num_train_games_total > 0:
self.train_hack_probability = self.num_train_hacks / self.num_train_games
self.train_cumulative_hack_probability = self.num_train_hacks_total / self.num_train_games_total
else:
self.train_hack_probability = 0.0
self.train_cumulative_hack_probability = 0.0
self.log_metrics(iteration=iter, result=self.train_result, attacker_episode_rewards=episode_attacker_rewards,
defender_episode_rewards=episode_defender_rewards, episode_steps=episode_steps,
episode_avg_attacker_loss=episode_avg_attacker_loss, episode_avg_defender_loss=episode_avg_defender_loss,
eval=False, update_stats=True, lr_attacker=lr_attacker, lr_defender=lr_defender)
# Log values and gradients of the parameters (histogram summary) to tensorboard
if self.config.attacker:
for tag, value in self.attacker_policy_network.named_parameters():
tag = tag.replace('.', '/')
self.tensorboard_writer.add_histogram(tag, value.data.cpu().numpy(), iter)
self.tensorboard_writer.add_histogram(tag + '_attacker/grad', value.grad.data.cpu().numpy(),
iter)
if self.config.defender:
for tag, value in self.defender_policy_network.named_parameters():
tag = tag.replace('.', '/')
self.tensorboard_writer.add_histogram(tag, value.data.cpu().numpy(), iter)
self.tensorboard_writer.add_histogram(tag + '_defender/grad', value.grad.data.cpu().numpy(),
iter)
episode_attacker_rewards = []
episode_defender_rewards = []
episode_steps = []
self.num_train_games = 0
self.num_train_hacks = 0
# Run evaluation every <self.config.eval_frequency> iterations
if iter % self.config.eval_frequency == 0:
self.eval(iter)
# Save models every <self.config.checkpoint_frequency> iterations
if iter % self.config.checkpoint_freq == 0:
self.save_model()
self.env.save_trajectories(checkpoint=True)
self.env.save_attack_data(checkpoint=True)
if self.config.save_dir is not None:
time_str = str(time.time())
self.train_result.to_csv(self.config.save_dir + "/" + time_str + "_train_results_checkpoint.csv")
self.eval_result.to_csv(self.config.save_dir + "/" + time_str + "_eval_results_checkpoint.csv")
self.outer_train.update(1)
# Anneal epsilon linearly
self.anneal_epsilon()
self.config.logger.info("Training Complete")
# Final evaluation (for saving Gifs etc)
self.eval(self.config.num_episodes-1, log=False)
# Save networks
self.save_model()
# Save other game data
self.env.save_trajectories(checkpoint = False)
self.env.save_attack_data(checkpoint=False)
if self.config.save_dir is not None:
time_str = str(time.time())
self.train_result.to_csv(self.config.save_dir + "/" + time_str + "_train_results_checkpoint.csv")
self.eval_result.to_csv(self.config.save_dir + "/" + time_str + "_eval_results_checkpoint.csv")
return self.train_result
def eval(self, train_episode, log=True) -> ExperimentResult:
"""
Performs evaluation with the greedy policy with respect to the learned Q-values
:param train_episode: the train episode to keep track of logging
:param log: whether to log the result
:return: None
"""
self.config.logger.info("Starting Evaluation")
time_str = str(time.time())
self.num_eval_games = 0
self.num_eval_hacks = 0
if len(self.eval_result.avg_episode_steps) > 0:
self.config.logger.warning("starting eval with non-empty result object")
if self.config.eval_episodes < 1:
return
done = False
# Video config
if self.config.video:
if self.config.video_dir is None:
raise AssertionError("Video is set to True but no video_dir is provided, please specify "
"the video_dir argument")
self.env = IdsGameMonitor(self.env, self.config.video_dir + "/" + time_str, force=True,
video_frequency=self.config.video_frequency)
self.env.metadata["video.frames_per_second"] = self.config.video_fps
# Tracking metrics
episode_attacker_rewards = []
episode_defender_rewards = []
episode_steps = []
# Logging
self.outer_eval = tqdm.tqdm(total=self.config.eval_episodes, desc='Eval Episode', position=1)
self.outer_eval.set_description_str(
"[Eval] avg_a_R:{:.2f},avg_d_R:{:.2f},avg_t:{:.2f},avg_h:{:.2f},acc_A_R:{:.2f}," \
"acc_D_R:{:.2f}".format(0.0, 0,0, 0.0, 0.0, 0.0, 0.0))
# Eval
attacker_obs, defender_obs = self.env.reset(update_stats=False)
attacker_state = self.update_state(attacker_obs=attacker_obs, defender_obs=defender_obs, state=[],
attacker=True)
defender_state = self.update_state(defender_obs=defender_obs, attacker_obs=attacker_obs, state=[],
attacker=False)
for episode in range(self.config.eval_episodes):
episode_attacker_reward = 0
episode_defender_reward = 0
episode_step = 0
while not done:
if self.config.eval_render:
self.env.render()
time.sleep(self.config.eval_sleep)
# Default initialization
attacker_action = 0
defender_action = 0
# Get attacker and defender actions
if self.config.attacker:
legal_actions = None
illegal_actions = None
if self.env.local_view_features():
legal_actions, illegal_actions = self.get_legal_attacker_actions(attacker_obs)
attacker_action, _ = self.get_action(attacker_state, attacker=True,
legal_actions=legal_actions, non_legal_actions=illegal_actions)
if self.env.local_view_features():
attacker_action = PolicyGradientAgent.convert_local_attacker_action_to_global(attacker_action, attacker_obs)
if self.config.defender:
defender_action, _ = self.get_action(defender_state, attacker=False)
action = (attacker_action, defender_action)
# Take a step in the environment
obs_prime, reward, done, _ = self.env.step(action)
# Update state information and metrics
attacker_reward, defender_reward = reward
obs_prime_attacker, obs_prime_defender = obs_prime
episode_attacker_reward += attacker_reward
episode_defender_reward += defender_reward
episode_step += 1
attacker_obs = obs_prime_attacker
defender_obs = obs_prime_defender
attacker_state = self.update_state(attacker_obs=attacker_obs, defender_obs=defender_obs,
state=attacker_state, attacker=True)
defender_state = self.update_state(defender_obs=defender_obs, attacker_obs=attacker_obs,
state=defender_state, attacker=False)
# Render final frame when game completed
if self.config.eval_render:
self.env.render()
time.sleep(self.config.eval_sleep)
self.config.logger.info("Eval episode: {}, Game ended after {} steps".format(episode, episode_step))
# Record episode metrics
episode_attacker_rewards.append(episode_attacker_reward)
episode_defender_rewards.append(episode_defender_reward)
episode_steps.append(episode_step)
# Update eval stats
self.num_eval_games +=1
self.num_eval_games_total += 1
if self.env.state.detected:
self.eval_attacker_cumulative_reward -= constants.GAME_CONFIG.POSITIVE_REWARD
self.eval_defender_cumulative_reward += constants.GAME_CONFIG.POSITIVE_REWARD
if self.env.state.hacked:
self.eval_attacker_cumulative_reward += constants.GAME_CONFIG.POSITIVE_REWARD
self.eval_defender_cumulative_reward -= constants.GAME_CONFIG.POSITIVE_REWARD
self.num_eval_hacks += 1
self.num_eval_hacks_total +=1
# Log average metrics every <self.config.eval_log_frequency> episodes
if episode % self.config.eval_log_frequency == 0 and log:
if self.num_eval_hacks > 0:
self.eval_hack_probability = float(self.num_eval_hacks) / float(self.num_eval_games)
if self.num_eval_games_total > 0:
self.eval_cumulative_hack_probability = float(self.num_eval_hacks_total) / float(
self.num_eval_games_total)
self.log_metrics(episode, self.eval_result, episode_attacker_rewards, episode_defender_rewards, episode_steps,
eval = True, update_stats=False)
# Save gifs
if self.config.gifs and self.config.video:
self.env.generate_gif(self.config.gif_dir + "/episode_" + str(train_episode) + "_"
+ time_str + ".gif", self.config.video_fps)
# Add frames to tensorboard
for idx, frame in enumerate(self.env.episode_frames):
self.tensorboard_writer.add_image(str(train_episode) + "_eval_frames/" + str(idx),
frame, global_step=train_episode,
dataformats = "HWC")
# Reset for new eval episode
done = False
attacker_obs, defender_obs = self.env.reset(update_stats=False)
attacker_state = self.update_state(attacker_obs=attacker_obs, defender_obs=defender_obs,
state=attacker_state, attacker=True)
defender_state = self.update_state(defender_obs=defender_obs, attacker_obs=attacker_obs,
state=defender_state, attacker=False)
self.outer_eval.update(1)
# Log average eval statistics
if log:
if self.num_eval_hacks > 0:
self.eval_hack_probability = float(self.num_eval_hacks) / float(self.num_eval_games)
if self.num_eval_games_total > 0:
self.eval_cumulative_hack_probability = float(self.num_eval_hacks_total) / float(
self.num_eval_games_total)
self.log_metrics(train_episode, self.eval_result, episode_attacker_rewards, episode_defender_rewards,
episode_steps, eval=True, update_stats=True)
self.env.close()
self.config.logger.info("Evaluation Complete")
return self.eval_result
def save_model(self) -> None:
"""
Saves the PyTorch Model Weights
:return: None
"""
time_str = str(time.time())
if self.config.save_dir is not None:
if self.config.attacker:
path = self.config.save_dir + "/" + time_str + "_attacker_policy_network.pt"
self.config.logger.info("Saving policy-network to: {}".format(path))
torch.save(self.attacker_policy_network.state_dict(), path)
if self.config.defender:
path = self.config.save_dir + "/" + time_str + "_defender_policy_network.pt"
self.config.logger.info("Saving policy-network to: {}".format(path))
torch.save(self.defender_policy_network.state_dict(), path)
else:
self.config.logger.warning("Save path not defined, not saving policy-networks to disk")
| [
"kimham@kth.se"
] | kimham@kth.se |
c8345d46b7e6acca3cd3f2de57a0bf85e4e79016 | 60eb98538025c61cf94a91f6c96f9ee81dcd3fdf | /monai/engines/__init__.py | 36719ae61c18553d8e31520cae7b70625028094c | [
"Apache-2.0",
"LicenseRef-scancode-free-unknown"
] | permissive | gagandaroach/MONAI | 167e7746995d4b6136731881e22ad4df333b16a9 | 79b83d9fac41efae9b90ed2f9ad078d6d664bf64 | refs/heads/master | 2023-06-02T19:54:47.737846 | 2021-06-24T18:34:02 | 2021-06-24T18:34:02 | 270,741,899 | 0 | 0 | Apache-2.0 | 2020-06-08T16:29:32 | 2020-06-08T16:29:31 | null | UTF-8 | Python | false | false | 989 | py | # Copyright 2020 - 2021 MONAI Consortium
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from .evaluator import EnsembleEvaluator, Evaluator, SupervisedEvaluator
from .multi_gpu_supervised_trainer import create_multigpu_supervised_evaluator, create_multigpu_supervised_trainer
from .trainer import GanTrainer, SupervisedTrainer, Trainer
from .utils import (
GanKeys,
IterationEvents,
default_make_latent,
default_prepare_batch,
engine_apply_transform,
get_devices_spec,
)
| [
"noreply@github.com"
] | gagandaroach.noreply@github.com |
983aa64b512f189c949d9b514387add42af7ee14 | 1ee3dc4fa096d12e409af3a298ba01f5558c62b5 | /ixnetwork_restpy/testplatform/sessions/ixnetwork/topology/rsvppcepexpectedinitiatedlsps.py | 17e0b5a0e9d231bd7b19e49d62c30ca30e11b757 | [
"MIT"
] | permissive | parthpower/ixnetwork_restpy | 321e64a87be0a4d990276d26f43aca9cf4d43cc9 | 73fa29796a5178c707ee4e21d90ff4dad31cc1ed | refs/heads/master | 2020-07-04T13:34:42.162458 | 2019-08-13T20:33:17 | 2019-08-13T20:33:17 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 8,010 | py | # MIT LICENSE
#
# Copyright 1997 - 2019 by IXIA Keysight
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"),
# to deal in the Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
from ixnetwork_restpy.base import Base
from ixnetwork_restpy.files import Files
class RsvpPcepExpectedInitiatedLsps(Base):
"""The RsvpPcepExpectedInitiatedLsps class encapsulates a required rsvpPcepExpectedInitiatedLsps node in the ixnetwork hierarchy.
An instance of the class can be obtained by accessing the RsvpPcepExpectedInitiatedLsps property from a parent instance.
The internal properties list will contain one and only one set of properties which is populated when the property is accessed.
"""
_SDM_NAME = 'rsvpPcepExpectedInitiatedLsps'
def __init__(self, parent):
super(RsvpPcepExpectedInitiatedLsps, self).__init__(parent)
@property
def RsvpIngressRROSubObjectsList(self):
"""An instance of the RsvpIngressRROSubObjectsList class.
Returns:
obj(ixnetwork_restpy.testplatform.sessions.ixnetwork.topology.rsvpingressrrosubobjectslist.RsvpIngressRROSubObjectsList)
Raises:
NotFoundError: The requested resource does not exist on the server
ServerError: The server has encountered an uncategorized error condition
"""
from ixnetwork_restpy.testplatform.sessions.ixnetwork.topology.rsvpingressrrosubobjectslist import RsvpIngressRROSubObjectsList
return RsvpIngressRROSubObjectsList(self)
@property
def Tag(self):
"""An instance of the Tag class.
Returns:
obj(ixnetwork_restpy.testplatform.sessions.ixnetwork.topology.tag.Tag)
Raises:
NotFoundError: The requested resource does not exist on the server
ServerError: The server has encountered an uncategorized error condition
"""
from ixnetwork_restpy.testplatform.sessions.ixnetwork.topology.tag import Tag
return Tag(self)
@property
def Active(self):
"""Activate/Deactivate Configuration
Returns:
obj(ixnetwork_restpy.multivalue.Multivalue)
"""
return self._get_attribute('active')
@property
def BackupLspId(self):
"""
Returns:
obj(ixnetwork_restpy.multivalue.Multivalue)
"""
return self._get_attribute('backupLspId')
@property
def Count(self):
"""Number of elements inside associated multiplier-scaled container object, e.g. number of devices inside a Device Group.
Returns:
number
"""
return self._get_attribute('count')
@property
def DescriptiveName(self):
"""Longer, more descriptive name for element. It's not guaranteed to be unique like -name-, but may offers more context
Returns:
str
"""
return self._get_attribute('descriptiveName')
@property
def EnableRRO(self):
"""Enable RRO
Returns:
obj(ixnetwork_restpy.multivalue.Multivalue)
"""
return self._get_attribute('enableRRO')
@property
def LocalIp(self):
"""Local IP
Returns:
list(str)
"""
return self._get_attribute('localIp')
@property
def Name(self):
"""Name of NGPF element, guaranteed to be unique in Scenario
Returns:
str
"""
return self._get_attribute('name')
@Name.setter
def Name(self, value):
self._set_attribute('name', value)
@property
def NumberOfRroSubObjects(self):
"""Number Of RRO Sub-Objects
Returns:
number
"""
return self._get_attribute('numberOfRroSubObjects')
@NumberOfRroSubObjects.setter
def NumberOfRroSubObjects(self, value):
self._set_attribute('numberOfRroSubObjects', value)
@property
def SessionInformation(self):
"""Logs additional information about the RSVP session state
Returns:
list(str[lastErrLSPAdmissionControlFailure|lastErrLSPBadAdSpecValue|lastErrLSPBadExplicitRoute|lastErrLSPBadFlowspecValue|lastErrLSPBadInitialSubobject|lastErrLSPBadLooseNode|lastErrLSPBadStrictNode|lastErrLSPBadTSpecValue|lastErrLSPDelayBoundNotMet|lastErrLSPMPLSAllocationFailure|lastErrLSPMTUTooBig|lastErrLSPNonRSVPRouter|lastErrLSPNoRouteAvailable|lastErrLSPPathErr|lastErrLSPPathTearSent|lastErrLSPRequestedBandwidthUnavailable|lastErrLSPReservationTearReceived|lastErrLSPReservationTearSent|lastErrLSPReservationTimeout|lastErrLSPRoutingLoops|lastErrLSPRoutingProblem|lastErrLSPRSVPSystemError|lastErrLSPServiceConflict|lastErrLSPServiceUnsupported|lastErrLSPTrafficControlError|lastErrLSPTrafficControlSystemError|lastErrLSPTrafficOrganizationError|lastErrLSPTrafficServiceError|lastErrLSPUnknownObjectClass|lastErrLSPUnknownObjectCType|lastErrLSPUnsupportedL3PID|lSPAdmissionControlFailure|lSPBadAdSpecValue|lSPBadExplicitRoute|lSPBadFlowspecValue|lSPBadInitialSubobject|lSPBadLooseNode|lSPBadStrictNode|lSPBadTSpecValue|lSPDelayBoundNotMet|lSPMPLSAllocationFailure|lSPMTUTooBig|lSPNonRSVPRouter|lSPNoRouteAvailable|lSPPathErr|lSPPathTearSent|lSPPceInitiatedMsgNotReceived|lSPRequestedBandwidthUnavailable|lSPReservationNotReceived|lSPReservationTearReceived|lSPReservationTearSent|lSPReservationTimeout|lSPRoutingLoops|lSPRoutingProblem|lSPRSVPSystemError|lSPServiceConflict|lSPServiceUnsupported|lSPTrafficControlError|lSPTrafficControlSystemError|lSPTrafficOrganizationError|lSPTrafficServiceError|lSPUnknownObjectClass|lSPUnknownObjectCType|lSPUnsupportedL3PID|mbbCompleted|mbbTriggered|none])
"""
return self._get_attribute('sessionInformation')
@property
def State(self):
"""State
Returns:
list(str[down|none|notStarted|pceRequestNotReceived|up])
"""
return self._get_attribute('state')
@property
def SymbolicPathName(self):
"""This is used for generating the traffic for those LSPs from PCE for which the Symbolic Path Name is configured and matches the value.
Returns:
obj(ixnetwork_restpy.multivalue.Multivalue)
"""
return self._get_attribute('symbolicPathName')
def update(self, Name=None, NumberOfRroSubObjects=None):
"""Updates a child instance of rsvpPcepExpectedInitiatedLsps on the server.
This method has some named parameters with a type: obj (Multivalue).
The Multivalue class has the associated documentation that details the possible values for those named parameters.
Args:
Name (str): Name of NGPF element, guaranteed to be unique in Scenario
NumberOfRroSubObjects (number): Number Of RRO Sub-Objects
Raises:
ServerError: The server has encountered an uncategorized error condition
"""
self._update(locals())
def get_device_ids(self, PortNames=None, Active=None, BackupLspId=None, EnableRRO=None, SymbolicPathName=None):
"""Base class infrastructure that gets a list of rsvpPcepExpectedInitiatedLsps device ids encapsulated by this object.
Use the optional regex parameters in the method to refine the list of device ids encapsulated by this object.
Args:
PortNames (str): optional regex of port names
Active (str): optional regex of active
BackupLspId (str): optional regex of backupLspId
EnableRRO (str): optional regex of enableRRO
SymbolicPathName (str): optional regex of symbolicPathName
Returns:
list(int): A list of device ids that meets the regex criteria provided in the method parameters
Raises:
ServerError: The server has encountered an uncategorized error condition
"""
return self._get_ngpf_device_ids(locals())
| [
"srvc_cm_packages@keysight.com"
] | srvc_cm_packages@keysight.com |
e54cd6e2a2e416870412ed33580505381d15801f | 9db281fbed35bb8384eeacaa81d1a32a9dcc5cca | /class-28/demo/full-stack-snacks/snacks/migrations/0001_initial.py | ad9b33da216b354507f1c25488783863e143cca1 | [] | no_license | corey-marchand/seattle-python-401d14 | aab3f48c82229f1958989ce8318de60b9abbe4e2 | ae9ffebc9e5250cb5ec1760fd7764da0d3ad4e4c | refs/heads/master | 2022-11-15T16:09:37.248530 | 2020-07-09T19:10:49 | 2020-07-09T19:10:49 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 794 | py | # Generated by Django 3.0.7 on 2020-06-17 06:22
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='Snack',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=64)),
('description', models.TextField()),
('purchaser', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
),
]
| [
"jb.tellez@gmail.com"
] | jb.tellez@gmail.com |
a0a4fee1ad51b2720d16c8a31097e86016f07fa8 | d0eb2004a98fa79e280be2535f337604e96ccece | /dict_oper.py | 484f5f458b76e280da0ad82a2809bd9ab5fb4d66 | [] | no_license | Parya1112009/python | f33a9ccfbc858886f8d065eee3f29bca430e5688 | 2550f5c54ff8310977806bab3b39adb8ce252496 | refs/heads/master | 2022-11-22T04:15:16.822655 | 2022-11-16T03:56:26 | 2022-11-16T03:56:26 | 88,561,568 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 694 | py | prices = {"mango": 24,"apple": 25,"banana": 20}
product = input("enter the product you want to buy : ")
price = prices.get(product)
if price:
print (f"the price of {product} is {price}")
else:
print("this product is not available today SORRY!!!")
del prices["apple"]
print (prices)
prices["apple"] = 40
print (prices)
prices.pop("mango")
print(f"pop will remove the value of key mango,the final dictionary is\n {prices}")
prices["mango"] = 49
prices["apple"] = 49
prices.popitem()
print(f"popitem will remove the value of most recent key value pair, the final dictionary is\n {prices}")
prices.clear()
print(f"clear will empty the dictionary , the final dictionary is \n{prices}")
| [
"noreply@github.com"
] | Parya1112009.noreply@github.com |
de956c8607638b1cb558fc81b01ce67bbdff7bb9 | 3ee1bb0d0acfa5c412b37365a4564f0df1c093fb | /python_import/p72_main.py | 65026687f67b56914ee89cf2150c49f736c6eb29 | [] | no_license | moileehyeji/Study | 3a20bf0d74e1faec7a2a5981c1c7e7861c08c073 | 188843c6415a4c546fdf6648400d072359d1a22b | refs/heads/main | 2023-04-18T02:30:15.810749 | 2021-05-04T08:43:53 | 2021-05-04T08:43:53 | 324,901,835 | 3 | 0 | null | null | null | null | UTF-8 | Python | false | false | 480 | py | import p71_byunsu as p71
# p71.aaa = 3 #print(p71.aaa) 3출력
print(p71.aaa)
print(p71.square(10))
# 2
# 1024
print('===================================')
from p71_byunsu import aaa, square
print(aaa) #p71의 aaa 메모리
# 2
aaa = 3
print(aaa) #p72의 aaa 메모리
print(square(10)) #p71의 square함수 메모리
# 3
# 1024
# p71의 aaa(=2)변수, square함수가 메모리에 할당
# p72의 aaa(=3)변수가 다른 메모리에 할당 | [
"noreply@github.com"
] | moileehyeji.noreply@github.com |
9f4edd4e9531bdc53246a1e37a9c11585b60457f | 001386fcdfc18c54cc1dbd23a4a9e6bf30deb890 | /amqp/codec/tests/test_codec.py | 6e81731aab5d18121e52ac6af918d7e80d680534 | [] | no_license | wizardsofindustry/python-amqp | 4f4453db8303216a7e29f526ccdcd07878e255b7 | 50a771c27836f9a60d646fbf969e07e0107c6934 | refs/heads/master | 2021-01-17T19:59:56.958796 | 2016-06-12T11:30:07 | 2016-06-12T11:54:00 | 65,640,375 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 95 | py | import unittest
from amqp import codec
class AMQPCodecTestCase(unittest.TestCase):
pass
| [
"cochise.ruhulessin@wizardsofindustry.net"
] | cochise.ruhulessin@wizardsofindustry.net |
967874032efced734e3899b3c039bb0c3d0d7694 | 8a3726abfc9cb72d8ccf7d32b18edabf8d16b630 | /04/b.py | 93c3d27e7728e2b3be4dd064d4e02b4afc08d3fa | [] | no_license | alex-stephens/aoc2015 | 48a46efc1a888ea2d451a5938fc404d26e96e1a0 | ccc1c85f8da7a0585003b2e4f99f3f1def35ec0b | refs/heads/master | 2023-02-05T23:02:19.148138 | 2020-12-27T19:16:47 | 2020-12-27T19:16:47 | 324,579,165 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 301 | py | import hashlib
key = open('input.txt').readline()
def check(key, num):
string = key + str(num)
hash = hashlib.md5(string.encode())
result = hash.hexdigest()
nzeros = 6
return result.startswith('0'*nzeros)
n = 0
while True:
if check(key, n):
break
n += 1
print(n) | [
"alexstephens9@gmail.com"
] | alexstephens9@gmail.com |
2c6ec495b46bf3f29802da6521109de0872fd8fb | e770533cc7d8517134d6f9159f5f9e52747c7153 | /python/11pywinauto/test2.py | 52aaf498cb5e6b5a8be30e39d1a472c9edaf21d3 | [] | no_license | code1990/bootPython | 5d878f7fac8aaa09a2b9e4a6d50a3c0f86c6dea5 | e5debd59b07a2c713f3e692aa4f44a9d2e5baeae | refs/heads/master | 2022-07-27T04:31:00.292692 | 2020-08-07T07:07:15 | 2020-08-07T07:07:23 | 206,805,170 | 0 | 0 | null | 2020-10-13T15:51:34 | 2019-09-06T13:56:39 | Python | UTF-8 | Python | false | false | 5,200 | py | import pywinauto
import pyautogui
from pywinauto.keyboard import SendKeys
from pywinauto.mouse import *
# from pywinauto.keyboard import *
import time
#1.运行360浏览器
browser_path = 'D:\\360\\360se6\\Application\\360se.exe'
user_page = 373
app = pywinauto.Application().start(browser_path)
#2.打开360浏览器主窗口
mainWindow = app.window(class_name=r'360se6_Frame')
time.sleep(10)
print("配置https://www.linkedin.com/feed/为360主页")
# print("鼠标点击领跑插件>>>>>")
# pyautogui.moveTo(935, 44)
# pyautogui.click()
# time.sleep(12)
# print("鼠标点击关闭360主页>>>>>")
# pyautogui.moveTo(279, 12)
# pyautogui.click()
time.sleep(3)
# print("鼠标移动到屏幕中间>>>>>")
# pyautogui.moveTo(300, 200)
# time.sleep(10)
# currentMouseX, currentMouseY = pyautogui.position()
# print(currentMouseX)
# print(currentMouseY)
print("鼠标点击关闭领英帮助界面>>>>>")
pyautogui.moveTo(1211, 286)
pyautogui.click()
time.sleep(3)
print("点击人脉菜单>>>>>")
pyautogui.moveTo(1167, 158)
pyautogui.click()
# time.sleep(3)
print("移动到input输入框,点击输入框获取输入状态>>>>>")
pyautogui.moveTo(930, 187)
pyautogui.click()
# time.sleep(3)
print("请务必保持英文输入法状态>>>>>")
print("模拟键盘输入文字>>>>>")
#模拟输入信息
pyautogui.typewrite(message='bolts inc',interval=0.5)
pyautogui.press('enter')
time.sleep(10)
# 鼠标左击一次
# pyautogui.click()
# time.sleep(3)
# currentMouseX, currentMouseY = pyautogui.position()
# print(currentMouseX)
# print(currentMouseY)
# print("鼠标点击搜索按钮>>>>>")
# pyautogui.moveTo(1230, 185)
# pyautogui.click()
# time.sleep(3)
print("鼠标点击多选选择框>>>>>")
pyautogui.moveTo(935, 227)
pyautogui.click()
# time.sleep(3)
# print("鼠标点击添加按钮>>>>>")
# pyautogui.moveTo(1060, 229)
# pyautogui.click()
# # time.sleep(3)
# print("鼠标移动到文本输入框>>>>>")
# pyautogui.moveTo(932, 274)
# pyautogui.click()
# print("全选删除文本输入框>>>>>")
# pyautogui.hotkey('ctrl', 'a')
# pyautogui.hotkey('ctrl', 'x')
# pyautogui.click()
# time.sleep(3)
# print("鼠标点击发送按钮>>>>>")
# pyautogui.moveTo(1220, 422)
# pyautogui.click()
# time.sleep(10*28)
print("鼠标点击下一页>>>>>")
pyautogui.moveTo(1231, 227)
pyautogui.click()
time.sleep(20)
currentMouseX, currentMouseY = pyautogui.position()
print(currentMouseX)
print(currentMouseY)
pyautogui.alert(text='This is an alert box.', title='Test')
# app.kill()
# #4.点击新弹出窗体的确定按钮
# out_note=u'关于记事本'
# button_name_ok='确定'
# app[out_note][button_name_ok].click()
# #5.查看一个窗体含有的控件,子窗体,菜单
# print(app[title_notepad].print_control_identifiers())
# #-------------------无标题记事本的含有的控件,子窗体,菜单-----------------
# # Control Identifiers:
# #
# # Notepad - '无标题 - 记事本' (L8, T439, R892, B815)
# # ['无标题 - 记事本Notepad', 'Notepad', '无标题 - 记事本']
# # child_window(title="无标题 - 记事本", class_name="Notepad")
# # |
# # | Edit - '' (L16, T490, R884, B807)
# # | ['无标题 - 记事本Edit', 'Edit']
# # | child_window(class_name="Edit")
# # |
# # | StatusBar - '' (L16, T785, R884, B807)
# # | ['StatusBar', '无标题 - 记事本StatusBar', 'StatusBar 第 1 行,第 1 列']
# # | child_window(class_name="msctls_statusbar32")
# # None
#
# #6.在记事本中输入一些文本
# #[tips-> ctrl+点击鼠标左键快速查看被调用函数]
# app.title_notepad.Edit.type_keys('pywinauto works!\n',with_spaces=True,with_newlines=True)
# app.title_notepad.Edit.type_keys('hello word !\n',with_spaces=True,with_newlines=True)
# #7.选择编辑菜单->编辑时间/日期
# # app[title_notepad].menu_select('编辑->时间/日期(&d)')
# #8.连接已运行程序
# #如连接微信 借助spy++找到运行程序的handle
# app1=pywinauto.Application(backend='uia').connect(handle=0x00320830)
# #9.查看运行窗口窗体名称
# print(app1.window())
# print(app1['Dialog'].print_control_identifiers())
# # Dialog - '微信' (L968, T269, R1678, B903)
# # ['微信Dialog', 'Dialog', '微信']
# # child_window(title="微信", control_type="Window")
# # |
# # | Pane - 'ChatContactMenu' (L-10000, T-10000, R-9999, B-9999)
# # | ['ChatContactMenu', 'ChatContactMenuPane', 'Pane', 'Pane0', 'Pane1']
# # | child_window(title="ChatContactMenu", control_type="Pane")
# # | |
# # | | Pane - '' (L-10019, T-10019, R-9980, B-9980)
# # | | ['', 'Pane2', '0', '1']
# # |
# # | Pane - '' (L948, T249, R1698, B923)
# # | ['2', 'Pane3']
# # None
# #10.通过路径去打开一个已有程序
# #11.鼠标控制
# x=0
# y=0
# for i in range(20):
# step_x = i*8
# step_y = i*5
# move(coords=(step_x,step_y ))
# time.sleep(1)
#
# #12.键盘控制
# #键盘对应的ascii http://www.baike.com/wiki/ASCII
# #发送键盘指令,打开命令行,输入一条命令for /l %i in (1,1,100) do tree
# SendKeys('{VK_LWIN}')
# SendKeys('cmd')
# SendKeys('{VK_RETURN}')
# time.sleep(3)
# SendKeys('for /L +5i in +9 1,1,100+0 do tree {VK_RETURN}',with_spaces=True) | [
"s1332177151@sina.com"
] | s1332177151@sina.com |
024edc7ca11aa1d922a9f0786b76767268c596b9 | 76d43c6ee84b8c6cc90dd61a097fa57ecb85a17e | /term2/Computational_Neuroscience/Coursework2/Question4.py | 8ee3bdfe41c1c991856ddac64bea8fac8526d6b0 | [] | no_license | junghyun4425/UoB_Projects | 071708d4150117de1650b22d836fad4ac8fbf559 | 49f5eac9697c709da21b519619a28303bd83e728 | refs/heads/master | 2021-06-27T05:44:21.988846 | 2021-06-19T03:17:06 | 2021-06-19T03:17:06 | 231,607,267 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,399 | py | from numpy import *
import numpy as np
import matplotlib.pyplot as plt
def load_data(filename,T):
data_array = [T(line.strip()) for line in open(filename, 'r')]
return data_array
def cal_STA(stim, data_array, width, sample_rate, interval, nec_adj):
interval /= sample_rate
time_bin = int(width / sample_rate)
sta = np.zeros(time_bin)
spike_tmp = np.nonzero(data_array)[0]
spike_times=[]
# necessarily adjacent case
if nec_adj == 1:
for i in range(0, len(spike_tmp)):
index_s = spike_tmp[i] + interval
if data_array[int(index_s)] != 0:
check_s = data_array[int(spike_tmp[i]) + 1:int(spike_tmp[i] + interval)]
if sum(check_s) == 0:
spike_times.append(spike_tmp[i])
# not necessarily adjacent case
else:
for i in range(0, len(spike_tmp)):
index_s = spike_tmp[i] + interval
if data_array[int(index_s)] != 0:
spike_times.append(spike_tmp[i])
num = len(spike_times)
for tau in range(0, time_bin):
dist = 0
windows = []
for i in range(0, num):
if spike_times[i] < tau:
dist += 1
windows.append(stim[spike_times[i] - tau])
sta[tau] = sum(windows) / (num - dist)
return sta
stimulus=load_data("stim.dat",float)
spikes=load_data("rho.dat",int)
sample_rate = 2
width = 100
interval = [2, 10, 20, 50]
sta_adj = []
sta_not_adj = []
for i in range(0, 4):
sta_adj.append(cal_STA(stimulus, spikes, width, sample_rate, interval[i], 1))
sta_not_adj.append(cal_STA(stimulus, spikes, width, sample_rate, interval[i], 0))
time = np.arange(0, width / sample_rate)
plt.figure()
plt.plot(time, sta_adj[0], label='2ms')
plt.plot(time, sta_adj[1], label='10ms')
plt.plot(time, sta_adj[2], label='20ms')
plt.plot(time, sta_adj[3], label='50ms')
plt.legend()
plt.xlabel('Time (ms)')
plt.ylabel('Stimulus')
plt.title('STA (spikes are necessarily adjacent)')
plt.savefig('adjacent.png')
plt.figure()
plt.plot(time, sta_not_adj[0], label='2ms')
plt.plot(time, sta_not_adj[1], label='10ms')
plt.plot(time, sta_not_adj[2], label='20ms')
plt.plot(time, sta_not_adj[3], label='50ms')
plt.legend()
plt.xlabel('Time (ms)')
plt.ylabel('Stimulus')
plt.title('STA (spikes are not necessarily adjacent)')
plt.savefig('notnecessarilyadjacent.png')
plt.show()
| [
"junghyun153@naver.com"
] | junghyun153@naver.com |
8711969cce6874a941f33df8118fe018a65c20b2 | 723ea3f47a45fe756c4a77809eb2a4d6b98bc733 | /crackfun/cc150/Dijkstra’s shortest path algorithm.py | 7f3ed39306673ee737dc04aa9a712ef9cbf9f7fc | [] | no_license | JoyiS/Leetcode | a625e7191bcb80d246328121669a37ac81e30343 | 5510ef424135783f6dc40d3f5e85c4c42677c211 | refs/heads/master | 2021-10-21T05:41:00.706086 | 2019-03-03T06:29:14 | 2019-03-03T06:29:14 | 110,296,869 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,504 | py | '''
Given a graph and a source vertex in graph, find shortest paths from source to all vertices in the given graph.
Time Complexity O((|V|+|E|)log|V|) geeks say O(E+VLogV) (with the use of Fibonacci heap
Space Complexity O(|V|)
下面写的代码中值得注意的是:
1) Graph 用二维matrix来实现,graph[u][v] 表示 u 到 v 的距离。如果 uv 不通, 那么 graph[u][v] = 0
2) dijkstra 的输入是 src 和 des 两个vertex。需要keep一个visited的set来表示这个vertex有没有被访问过。需要一个dist[]来记录每一个vertex的distance.
3) 在dijkstra中每一次循环的第一步是找一个当前没有visited的vertex这个vertex的distance是最小的。然后去更新这个vertex的每一个neighbor的dist [].
'''
# Python program for Dijkstra's single
# source shortest path algorithm. The program is
# for adjacency matrix representation of the graph
class Graph():
def __init__(self, vertices):
self.V = vertices
self.graph = [[0 for column in range(vertices)]
for row in range(vertices)]
def minDistance(self, dist, visited):
# Initilaize minimum distance for next node
mindis = float('inf')
# Search not nearest vertex not in the
# shortest path tree
for v in range(self.V):
if visited[v] == False and dist[v] < mindis:
mindis = dist[v]
min_index = v
return min_index, mindis
def dijkstra(self, src, des):
dist = [float('inf')] * self.V
dist[src] = 0
visited = [False] * self.V
output = []
for x in range(self.V):
u, mindis = self.minDistance(dist, visited)
output += [u]
if u == des:
print('Min distance is : ' + str(mindis))
return output
visited[u] = True
for v in range(self.V):
if self.graph[u][v] > 0 and visited[v] == False and dist[v] > dist[u] + self.graph[u][v]:
dist[v] = dist[u] + self.graph[u][v]
# Driver program
g = Graph(9)
g.graph = [[0, 4, 0, 0, 0, 0, 0, 8, 0],
[4, 0, 8, 0, 0, 0, 0, 11, 0],
[0, 8, 0, 7, 0, 4, 0, 0, 2],
[0, 0, 7, 0, 9, 14, 0, 0, 0],
[0, 0, 0, 9, 0, 10, 0, 0, 0],
[0, 0, 4, 14, 10, 0, 2, 0, 0],
[0, 0, 0, 0, 0, 2, 0, 1, 6],
[8, 11, 0, 0, 0, 0, 1, 0, 7],
[0, 0, 2, 0, 0, 0, 6, 7, 0]
]
g.dijkstra(0,8) | [
"california.sjy@gmail.com"
] | california.sjy@gmail.com |
5d9833dcdd35e411ee6f3e223d2a9feb7cdcdf14 | 553af49f5937ac8fdf47e826315ea5a761b56d0d | /lecture_07/wall_04.py | 694163031ac2190cef911550de1877e4e0eb2a85 | [] | no_license | compas-ITA20/ITA20 | 6ef232b6376de8a8b6391394f6b2f6e37f5d2068 | 244ddbfdc9480f4ea8dda7f983f600f4793594d8 | refs/heads/main | 2023-01-24T20:30:12.360193 | 2020-12-09T10:47:44 | 2020-12-09T10:47:44 | 300,848,724 | 3 | 2 | null | null | null | null | UTF-8 | Python | false | false | 1,867 | py | from compas.geometry import Bezier
from compas.geometry import Point, Polyline, Vector
from compas.geometry import offset_polyline
from compas.geometry import intersection_line_segment_xy
from compas.utilities import linspace, pairwise
from compas_plotters import GeometryPlotter
def intersection_line_polyline(line, polyline):
for segment in pairwise(polyline.points):
x = intersection_line_segment_xy(line, segment)
if x:
return Point(*x)
controlpoints = [Point(0, 0, 0), Point(4, 2.5, 0), Point(6, -2.5, 0), Point(10, 0, 0)]
controlpoly = Polyline(controlpoints)
curve = Bezier(controlpoints)
poly = Polyline(curve.locus())
poly1 = Polyline(offset_polyline(poly, +0.15))
poly2 = Polyline(offset_polyline(poly, -0.15))
points = [curve.point(t) for t in linspace(0, 1, 20)]
tangents = [curve.tangent(t) for t in linspace(0, 1, 20)]
normals = [Vector(0, 0, 1).cross(t) for t in tangents]
lines = [[point, point + normal] for point, normal in zip(points, normals)]
points1 = [intersection_line_polyline(line, poly1) for line in lines[1:-1]]
points2 = [intersection_line_polyline(line, poly2) for line in lines[1:-1]]
# ==============================================================================
# Visualization
# ==============================================================================
plotter = GeometryPlotter(figsize=(16, 9))
plotter.add(controlpoly, linestyle='dotted', linewidth=1.0, color=(0.5, 0.5, 0.5))
for point in controlpoints:
plotter.add(point, edgecolor=(1.0, 0.0, 0.0))
for point in points:
plotter.add(point, size=2)
for point in points1:
plotter.add(point, size=2)
for point in points2:
plotter.add(point, size=2)
plotter.add(poly, color=(0.4, 0.4, 0.4))
plotter.add(poly1, color=(0.0, 0.0, 0.0))
plotter.add(poly2, color=(0.0, 0.0, 0.0))
plotter.zoom_extents()
plotter.show()
| [
"vanmelet@ethz.ch"
] | vanmelet@ethz.ch |
34312bb0ee7868abb57ef60d6422949c6780b782 | edc0974d07e0f4574636130c7ac3b1dceeb1c28b | /robot/backends/__init__.py | 7ef357f3cc9ae5bded2a7a91a1982c9a2f7fcd6a | [
"MIT"
] | permissive | prophile/robot-api2 | 32f3fabd6a2921fb244149099a341be65ba2f140 | 53d3a27bbbf1b1a044e9d883fa817bb03361186a | refs/heads/master | 2020-03-23T06:09:07.510785 | 2018-07-16T23:05:09 | 2018-07-16T23:05:09 | 141,191,968 | 0 | 3 | MIT | 2019-01-22T18:23:42 | 2018-07-16T20:39:30 | Python | UTF-8 | Python | false | false | 31 | py | """Robot hardware backends."""
| [
"arplynn@gmail.com"
] | arplynn@gmail.com |
e498ed42df11a9002be442314e0dfb847530b219 | 2aba3c043ce4ef934adce0f65bd589268ec443c5 | /atcoder/Indeed_now_qual_A/B.py | 0b953deb7658d51f31f98245cbd081e2b99bb72c | [] | no_license | kambehmw/algorithm_python | 4f66593b77039d90515d1fcbecacdab8c811b92f | 17222399dcc92fd8f908e5774a9883e2e89c486e | refs/heads/master | 2020-06-02T12:44:11.322356 | 2020-05-18T13:22:05 | 2020-05-18T13:22:05 | 191,157,113 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 476 | py | from collections import Counter
N = int(input())
S = [input() for _ in range(N)]
T = "indeednow"
counter = Counter(T)
for s in S:
if len(s) != len(T):
print("NO")
continue
counter2 = Counter(s)
flag = True
for k, v in counter2.items():
if k not in counter:
flag = False
break
if counter[k] != v:
flag = False
break
if flag:
print("YES")
else:
print("NO") | [
"kanbe.hmw@gmail.com"
] | kanbe.hmw@gmail.com |
5198c8f0ff598ce68a93865125ae5b40802f99a2 | 068070cc53bb033f6bf5cf2fe7660e6231ff1ae9 | /command.py | 798097d60fa12ca800a16cc8e4d788a7d2ebb179 | [
"MIT"
] | permissive | Answeror/yacron | afbed6ac6e0ba02a8d2fc4d67b5f7d0e512dfd82 | 45060bbaeffc2f06583b5c38607608e8770f4d51 | refs/heads/master | 2016-09-06T11:43:11.559755 | 2013-09-09T16:04:12 | 2013-09-09T16:04:12 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,287 | py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
.. module:: command
:synopsis: Some command used to build pyqt.
.. moduleauthor:: Answeror <answeror@gmail.com>
These code was borrowed from `https://bitbucket.org/jbmohler/pyhacc/src/b0ad3a0b1e58/setup.py`_.
"""
import os
import distutils
from distutils.core import Command
from cx_Freeze import build
def needsupdate(src, targ):
return not os.path.exists(targ) or os.path.getmtime(src) > os.path.getmtime(targ)
class PySideUiBuild:
def qrc(self, qrc_file, py_file):
import subprocess
import PySide
pyside_path = os.path.dirname(PySide.__file__)
rcc_path = os.path.join(pyside_path, 'pyside-rcc')
rccprocess = subprocess.Popen([rcc_path, qrc_file, '-py3', '-o', py_file])
rccprocess.wait()
def uic(self, ui_file, py_file):
import subprocess
import PySide
pyside_path = os.path.dirname(PySide.__file__)
uic_path = os.path.join(pyqt_path, 'pyside-uic')
rccprocess = subprocess.Popen([uic_path, ui_file, '-o', py_file])
rccprocess.wait()
class PyQt4UiBuild:
def qrc(self, qrc_file, py_file):
import subprocess
import PyQt4
pyqt_path = os.path.dirname(PyQt4.__file__)
pyrcc4_path = os.path.join(pyside_path, 'pyrcc4')
rccprocess = subprocess.Popen([pyrcc4_path, qrc_file, '-py3', '-o', py_file])
rccprocess.wait()
def uic(self, ui_file, py_file):
from PyQt4 import uic
fp = open(py_file, 'w')
uic.compileUi(ui_file, fp)
fp.close()
class QtUiBuild(Command, PySideUiBuild):
description = "build Python modules from Qt Designer .ui files"
user_options = []
ui_files = []
qrc_files = []
def initialize_options(self):
pass
def finalize_options(self):
pass
def compile_ui(self, ui_file, py_file):
if not needsupdate(ui_file, py_file):
return
print("compiling %s -> %s" % (ui_file, py_file))
try:
self.uic(ui_file, py_file)
except Exception as e:
raise distutils.errors.DistutilsExecError('Unable to compile user interface %s' % str(e))
return
def compile_qrc(self, qrc_file, py_file):
if not needsupdate(qrc_file, py_file):
return
print("compiling %s -> %s" % (qrc_file, py_file))
try:
self.qrc(qrc_file, py_file)
except Exception as e:
raise distutils.errors.DistutilsExecError('Unable to compile resource file %s' % str(e))
return
def run(self):
for f in self.ui_files:
dir, basename = os.path.split(f)
self.compile_ui(f, os.path.join(dir, "ui_" + basename.replace(".ui", ".py")))
for f in self.qrc_files:
dir, basename = os.path.split(f)
self.compile_qrc(f, os.path.join(dir, basename.replace(".qrc", "_rc.py")))
QtUiBuild.ui_files = []
QtUiBuild.qrc_files = [os.path.join(dir, f) \
for dir in ['yacron'] \
for f in os.listdir(dir) if f.endswith('.qrc')]
class Build(build):
sub_commands = [('build_ui', None)] + build.sub_commands
cmds = {
'build': Build,
'build_ui': QtUiBuild,
}
| [
"answeror@gmail.com"
] | answeror@gmail.com |
a94afbba47b18f02c8f95fe6db0082ed58da616c | 721f75630501927efb9f4bb9527d9ad5173714ea | /nbviewer/tests/base.py | 7f3458ff48d24a5e5bc7783e1d376db1e6cfc0e7 | [
"BSD-3-Clause",
"Apache-2.0"
] | permissive | charsmith/nbviewer | 3bf8faa3b0806888cb14125a6e4a1ce414763c46 | f2356c5c82f3c3d95f2201bf1ac6f106cdfd5426 | refs/heads/master | 2021-01-16T20:49:40.496538 | 2018-01-10T20:57:46 | 2018-01-10T20:57:46 | 20,975,398 | 0 | 0 | BSD-3-Clause | 2018-01-10T20:57:46 | 2014-06-18T19:36:05 | Python | UTF-8 | Python | false | false | 2,399 | py | """Base class for nbviewer tests.
Derived from IPython.html notebook test case in 2.0
"""
#-----------------------------------------------------------------------------
# Copyright (C) 2013 The IPython Development Team
#
# Distributed under the terms of the BSD License. The full license is in
# the file COPYING, distributed as part of this software.
#-----------------------------------------------------------------------------
import os
import sys
import time
import requests
from contextlib import contextmanager
from subprocess import Popen, PIPE
from unittest import TestCase
from nbviewer.utils import url_path_join
class NBViewerTestCase(TestCase):
"""A base class for tests that need a running nbviewer server."""
port = 12341
@classmethod
def wait_until_alive(cls):
"""Wait for the server to be alive"""
while True:
try:
requests.get(cls.url())
except Exception:
time.sleep(.1)
else:
break
@classmethod
def wait_until_dead(cls):
"""Wait for the server to stop getting requests after shutdown"""
while True:
try:
requests.get(cls.url())
except Exception:
break
else:
time.sleep(.1)
@classmethod
def get_server_cmd(cls):
return [
sys.executable, '-m', 'nbviewer',
'--port=%d' % cls.port,
# '--logging=debug',
]
@classmethod
def setup_class(cls):
server_cmd = cls.get_server_cmd()
devnull = open(os.devnull, 'w')
cls.server = Popen(server_cmd,
stdout=devnull,
stderr=devnull,
)
cls.wait_until_alive()
@classmethod
def teardown_class(cls):
cls.server.terminate()
cls.wait_until_dead()
@classmethod
def url(cls, *parts):
return url_path_join('http://localhost:%i' % cls.port, *parts)
@contextmanager
def assert_http_error(status, msg=None):
try:
yield
except requests.HTTPError as e:
real_status = e.response.status_code
assert real_status == status, \
"Expected status %d, got %d" % (real_status, status)
if msg:
assert msg in str(e), e
else:
assert False, "Expected HTTP error status"
| [
"benjaminrk@gmail.com"
] | benjaminrk@gmail.com |
243492441759892aacad82cd08e364136522c70a | 391a95ef338ac01956346438b597e2d23e13d708 | /libpysat/transform/multiply_vector.py | 895be654990c40dbf5538d356ae45946e758a498 | [
"LicenseRef-scancode-warranty-disclaimer",
"LicenseRef-scancode-public-domain-disclaimer",
"LicenseRef-scancode-public-domain"
] | permissive | wateryi/PyHAT | 34fecf822d673f35009c1d7172757b5bc5223341 | e7977d945ba1e928bf3163ed7992796fb833527d | refs/heads/master | 2020-06-18T20:18:30.090262 | 2020-05-01T01:12:50 | 2020-05-01T01:12:50 | 196,433,305 | 0 | 0 | NOASSERTION | 2019-07-11T16:52:45 | 2019-07-11T16:52:45 | null | UTF-8 | Python | false | false | 505 | py | #this function multiplies all the spectra in a data frame by a vector.
import numpy as np
import pandas as pd
def multiply_vector(df, vectorfile):
df_spectra = df['wvl']
# TODO: check to make sure wavelengths match before multiplying
vector = np.array(pd.read_csv(vectorfile, sep=',', header=None))[:, 1]
if df_spectra.shape[1] == vector.shape[0]:
df['wvl'] = df_spectra.multiply(vector, axis=1)
else:
print('Vector is not the same size as the spectra!')
return df | [
"rbanderson@usgs.gov"
] | rbanderson@usgs.gov |
af808a5caa79930f8276ae03bb82eb0cf792acb0 | 5b1ff6054c4f60e4ae7315db9f20a334bc0b7634 | /Push2/observable_property_alias.py | 2e3d81d968f12d6a54dbfadafc7979283138bf09 | [] | no_license | maratbakirov/AbletonLive9_RemoteScripts | 2869122174634c75405a965401aa97a2dae924a1 | 4a1517c206353409542e8276ebab7f36f9bbd4ef | refs/heads/master | 2021-06-05T14:38:27.959025 | 2021-05-09T11:42:10 | 2021-05-09T11:42:10 | 13,348,327 | 3 | 4 | null | 2016-10-16T13:51:11 | 2013-10-05T16:27:04 | Python | UTF-8 | Python | false | false | 1,448 | py | #Embedded file name: /Users/versonator/Jenkins/live/output/mac_64_static/Release/python-bundle/MIDI Remote Scripts/Push2/observable_property_alias.py
from __future__ import absolute_import, print_function
from ableton.v2.base import SlotManager, Slot
class ObservablePropertyAlias(SlotManager):
def __init__(self, alias_host, property_host = None, property_name = '', alias_name = None, getter = None, *a, **k):
super(ObservablePropertyAlias, self).__init__(*a, **k)
self._alias_host = alias_host
self._alias_name = alias_name or property_name
self._property_host = property_host
self._property_name = property_name
self._property_slot = None
self._setup_alias(getter)
def _get_property_host(self):
return self._property_host
def _set_property_host(self, host):
self._property_host = host
self._property_slot.subject = host
property_host = property(_get_property_host, _set_property_host)
def _setup_alias(self, getter):
aliased_prop = property(getter or self._get_property)
setattr(self._alias_host.__class__, self._alias_name, aliased_prop)
notifier = getattr(self._alias_host, 'notify_' + self._alias_name)
self._property_slot = self.register_slot(Slot(self.property_host, notifier, self._property_name))
def _get_property(self, _):
return getattr(self.property_host, self._property_name, None) | [
"julien@julienbayle.net"
] | julien@julienbayle.net |
b13cfc7dd7c09bffc0ad7fb5541f8106aca24199 | c2f4afee3ec4faef7231da2e48c8fef3d309b3e3 | /fibonacci.py | fdb44e57e885e0a330732763227f1562799bd197 | [] | no_license | tanu312000/pyChapter | a723f99754ff2b21e694a9da3cb2c6ca0cd10fce | 2fd28aefcbfaf0f6c34db90fdf0d77f9aea142ce | refs/heads/master | 2020-05-03T15:51:34.334806 | 2019-03-31T16:17:45 | 2019-03-31T16:17:45 | 178,712,059 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 34 | py | import fibonacci
fibonacci.fib(10) | [
"tanurocks90@gmail.com"
] | tanurocks90@gmail.com |
dc024a4ba381f04fe26481171fa4aed643ffad8b | ee86ad4b38f6ba13f195246f14224ba781f933cc | /09_start/비트연산예제1.py | 52bacb459e2633987a88ed983f9c64c703ebd1e8 | [] | no_license | yejikk/Algorithm | aed7adf00c1e32d21b735b3b34dc6cb75049f164 | 531f43305b3a23c824c9e153151b7280c1dc2535 | refs/heads/master | 2020-04-17T06:17:28.961656 | 2019-11-16T08:02:49 | 2019-11-16T08:02:49 | 166,318,881 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 302 | py | def Bbit_print(i):
output = ''
for j in range(7, -1, -1):
if i & (1 << j):
output += '1'
else:
output += '0'
# output += '1' if i & (1 << j) else '0'
print(output)
for i in range(-5, 6):
print('{} = '.format(i), end='')
Bbit_print(i) | [
"dpwl7484@gmail.com"
] | dpwl7484@gmail.com |
9672d446fc6d70d32c8ae69520ee2a435e1d1943 | 4e5b20fdcca20f458322f0a8cd11bbdacb6fb3e5 | /suning/api/selfmarket/OrderReturnAddRequest.py | d55710e0b5f5c80e89f665c34fb0b6e8f10f5a29 | [] | no_license | shijingyu/sunningAPI | 241f33b0660dc84635ce39688fed499f5c57a5da | 4a3b2ef7f9bdc4707d1eaff185bc7eb636fe90d5 | refs/heads/master | 2020-04-24T22:15:11.584028 | 2019-02-24T06:41:20 | 2019-02-24T06:41:20 | 172,305,179 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,388 | py | # -*- coding: utf-8 -*-
'''
Created on 2015-12-28
@author: suning
'''
from suning.api.abstract import AbstractApi
class OrderReturnAddRequest(AbstractApi):
'''
'''
def __init__(self):
AbstractApi.__init__(self)
self.outOrderId = None
self.oldOrderId = None
self.orderSource = None
self.expectStartTime = None
self.expectEndTime = None
self.remark = None
self.senderZipCode = None
self.senderProvince = None
self.senderCity = None
self.senderArea = None
self.senderTown = None
self.senderAddress = None
self.senderName = None
self.senderMobile = None
self.senderPhone = None
self.takeFlag = None
self.orderFlag = None
self.orderProductList = None
self.setParamRule({
'outOrderId':{'allow_empty':False},
'oldOrderId':{'allow_empty':False},
'orderSource':{'allow_empty':False},
'expectStartTime':{'allow_empty':False},
'expectEndTime':{'allow_empty':False},
'senderProvince':{'allow_empty':False},
'senderCity':{'allow_empty':False},
'senderName':{'allow_empty':False},
})
def getApiBizName(self):
return 'addOrderReturn'
def getApiMethod(self):
return 'suning.fourps.orderreturn.add'
| [
"945090896@qq.com"
] | 945090896@qq.com |
3f208c733be675073a38549935c3b63d6bfcd83b | 706518f154812af56f8fc91a71cd65d9667d9ed0 | /python/paddle/tensor/stat.py | 8c74360a17d05bda855f338daf9ad6885fa1e2b6 | [
"Apache-2.0"
] | permissive | andreazanetti/Paddle | 3ea464703d67963134ffc6828f364412adb03fce | a259076dd01801e2e619237da02235a4856a96bb | refs/heads/develop | 2023-04-25T08:30:43.751734 | 2021-05-05T01:31:44 | 2021-05-05T01:31:44 | 263,870,069 | 0 | 2 | Apache-2.0 | 2020-07-07T10:45:08 | 2020-05-14T09:22:07 | null | UTF-8 | Python | false | false | 13,619 | py | # Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# TODO: define statistical functions of a tensor
import numpy as np
from ..fluid.framework import Variable
from ..fluid.layer_helper import LayerHelper
from ..fluid.framework import core, in_dygraph_mode
from ..fluid import layers
from .search import where
from ..fluid.data_feeder import convert_dtype, check_variable_and_dtype, check_type, check_dtype
import paddle
__all__ = []
def mean(x, axis=None, keepdim=False, name=None):
"""
Computes the mean of the input tensor's elements along ``axis``.
Args:
x (Tensor): The input Tensor with data type float32, float64.
axis (int|list|tuple, optional): The axis along which to perform mean
calculations. ``axis`` should be int, list(int) or tuple(int). If
``axis`` is a list/tuple of dimension(s), mean is calculated along
all element(s) of ``axis`` . ``axis`` or element(s) of ``axis``
should be in range [-D, D), where D is the dimensions of ``x`` . If
``axis`` or element(s) of ``axis`` is less than 0, it works the
same way as :math:`axis + D` . If ``axis`` is None, mean is
calculated over all elements of ``x``. Default is None.
keepdim (bool, optional): Whether to reserve the reduced dimension(s)
in the output Tensor. If ``keepdim`` is True, the dimensions of
the output Tensor is the same as ``x`` except in the reduced
dimensions(it is of size 1 in this case). Otherwise, the shape of
the output Tensor is squeezed in ``axis`` . Default is False.
name (str, optional): Name for the operation (optional, default is None).
For more information, please refer to :ref:`api_guide_Name`.
Returns:
Tensor, results of average along ``axis`` of ``x``, with the same data
type as ``x``.
Examples:
.. code-block:: python
import paddle
x = paddle.to_tensor([[[1., 2., 3., 4.],
[5., 6., 7., 8.],
[9., 10., 11., 12.]],
[[13., 14., 15., 16.],
[17., 18., 19., 20.],
[21., 22., 23., 24.]]])
out1 = paddle.mean(x)
# [12.5]
out2 = paddle.mean(x, axis=-1)
# [[ 2.5 6.5 10.5]
# [14.5 18.5 22.5]]
out3 = paddle.mean(x, axis=-1, keepdim=True)
# [[[ 2.5]
# [ 6.5]
# [10.5]]
# [[14.5]
# [18.5]
# [22.5]]]
out4 = paddle.mean(x, axis=[0, 2])
# [ 8.5 12.5 16.5]
"""
if isinstance(axis, int):
axis = [axis]
reduce_all = True if axis is None \
or len(axis)==0 \
or len(axis) == len(x.shape) else False
if axis is None or len(axis) == 0:
axis = [0]
if in_dygraph_mode():
return core.ops.reduce_mean(x, 'dim', axis, 'keep_dim', keepdim,
'reduce_all', reduce_all)
check_variable_and_dtype(x, 'x/input', ['float32', 'float64'],
'mean/reduce_mean')
check_type(axis, 'axis/dim', (int, list, tuple), 'mean/reduce_mean')
if isinstance(axis, (list, tuple)):
for item in axis:
check_type(item, 'elements of axis/dim', (int), 'mean/reduce_mean')
helper = LayerHelper('mean', **locals())
attrs = {'dim': axis, 'keep_dim': keepdim, 'reduce_all': reduce_all}
out = helper.create_variable_for_type_inference(x.dtype)
helper.append_op(
type='reduce_mean', inputs={'X': x}, outputs={'Out': out}, attrs=attrs)
return out
def var(x, axis=None, unbiased=True, keepdim=False, name=None):
"""
Computes the variance of ``x`` along ``axis`` .
Args:
x (Tensor): The input Tensor with data type float32, float64.
axis (int|list|tuple, optional): The axis along which to perform
variance calculations. ``axis`` should be int, list(int) or
tuple(int). If ``axis`` is a list/tuple of dimension(s), variance
is calculated along all element(s) of ``axis`` . ``axis`` or
element(s) of ``axis`` should be in range [-D, D), where D is the
dimensions of ``x`` . If ``axis`` or element(s) of ``axis`` is less
than 0, it works the same way as :math:`axis + D` . If ``axis`` is
None, variance is calculated over all elements of ``x``. Default
is None.
unbiased (bool, optional): Whether to use the unbiased estimation. If
``unbiased`` is True, the divisor used in the computation is
:math:`N - 1`, where :math:`N` represents the number of elements
along ``axis`` , otherwise the divisor is :math:`N`. Default is True.
keepdim (bool, optional): Whether to reserve the reduced dimension(s)
in the output Tensor. If ``keepdim`` is True, the dimensions of
the output Tensor is the same as ``x`` except in the reduced
dimensions(it is of size 1 in this case). Otherwise, the shape of
the output Tensor is squeezed in ``axis`` . Default is False.
name (str, optional): Name for the operation (optional, default is None).
For more information, please refer to :ref:`api_guide_Name`.
Returns:
Tensor, results of variance along ``axis`` of ``x``, with the same data
type as ``x``.
Examples:
.. code-block:: python
import paddle
x = paddle.to_tensor([[1.0, 2.0, 3.0], [1.0, 4.0, 5.0]])
out1 = paddle.var(x)
# [2.66666667]
out2 = paddle.var(x, axis=1)
# [1. 4.33333333]
"""
if not in_dygraph_mode():
check_variable_and_dtype(x, 'x', ['float32', 'float64'], 'var')
u = mean(x, axis, True, name)
out = paddle.sum((x - u)**2, axis, keepdim=keepdim, name=name)
n = paddle.cast(paddle.numel(x), x.dtype) \
/ paddle.cast(paddle.numel(out), x.dtype)
if unbiased:
one_const = paddle.ones([1], x.dtype)
n = where(n > one_const, n - 1., one_const)
out /= n
return out
def std(x, axis=None, unbiased=True, keepdim=False, name=None):
"""
Computes the standard-deviation of ``x`` along ``axis`` .
Args:
x (Tensor): The input Tensor with data type float32, float64.
axis (int|list|tuple, optional): The axis along which to perform
standard-deviation calculations. ``axis`` should be int, list(int)
or tuple(int). If ``axis`` is a list/tuple of dimension(s),
standard-deviation is calculated along all element(s) of ``axis`` .
``axis`` or element(s) of ``axis`` should be in range [-D, D),
where D is the dimensions of ``x`` . If ``axis`` or element(s) of
``axis`` is less than 0, it works the same way as :math:`axis + D` .
If ``axis`` is None, standard-deviation is calculated over all
elements of ``x``. Default is None.
unbiased (bool, optional): Whether to use the unbiased estimation. If
``unbiased`` is True, the standard-deviation is calculated via the
unbiased estimator. If ``unbiased`` is True, the divisor used in
the computation is :math:`N - 1`, where :math:`N` represents the
number of elements along ``axis`` , otherwise the divisor is
:math:`N`. Default is True.
keepdim (bool, optional): Whether to reserve the reduced dimension(s)
in the output Tensor. If ``keepdim`` is True, the dimensions of
the output Tensor is the same as ``x`` except in the reduced
dimensions(it is of size 1 in this case). Otherwise, the shape of
the output Tensor is squeezed in ``axis`` . Default is False.
name (str, optional): Name for the operation (optional, default is None).
For more information, please refer to :ref:`api_guide_Name`.
Returns:
Tensor, results of standard-deviation along ``axis`` of ``x``, with the
same data type as ``x``.
Examples:
.. code-block:: python
import paddle
x = paddle.to_tensor([[1.0, 2.0, 3.0], [1.0, 4.0, 5.0]])
out1 = paddle.std(x)
# [1.63299316]
out2 = paddle.std(x, axis=1)
# [1. 2.081666]
"""
if not in_dygraph_mode():
check_variable_and_dtype(x, 'x', ['float32', 'float64'], 'std')
out = var(**locals())
return paddle.sqrt(out)
def numel(x, name=None):
"""
Returns the number of elements for a tensor, which is a int64 Tensor with shape [1] in static mode
or a scalar value in imperative mode
Args:
x (Tensor): The input Tensor, it's data type can be bool, float16, float32, float64, int32, int64.
Returns:
Tensor: The number of elements for the input Tensor.
Examples:
.. code-block:: python
import paddle
x = paddle.full(shape=[4, 5, 7], fill_value=0, dtype='int32')
numel = paddle.numel(x) # 140
"""
if in_dygraph_mode():
return core.ops.size(x)
if not isinstance(x, Variable):
raise TypeError("x must be a Tensor in numel")
helper = LayerHelper('numel', **locals())
out = helper.create_variable_for_type_inference(
dtype=core.VarDesc.VarType.INT64)
helper.append_op(type='size', inputs={'Input': x}, outputs={'Out': out})
return out
def median(x, axis=None, keepdim=False, name=None):
"""
Compute the median along the specified axis.
Args:
x (Tensor): The input Tensor, it's data type can be bool, float16, float32, float64, int32, int64.
axis (int, optional): The axis along which to perform median calculations ``axis`` should be int.
``axis`` should be in range [-D, D), where D is the dimensions of ``x`` .
If ``axis`` is less than 0, it works the same way as :math:`axis + D`.
If ``axis`` is None, median is calculated over all elements of ``x``. Default is None.
keepdim (bool, optional): Whether to reserve the reduced dimension(s)
in the output Tensor. If ``keepdim`` is True, the dimensions of
the output Tensor is the same as ``x`` except in the reduced
dimensions(it is of size 1 in this case). Otherwise, the shape of
the output Tensor is squeezed in ``axis`` . Default is False.
name (str, optional): Name for the operation (optional, default is None).
For more information, please refer to :ref:`api_guide_Name`.
Returns:
Tensor, results of median along ``axis`` of ``x``. If data type of ``x`` is float64, data type of results will be float64, otherwise data type will be float32.
Examples:
.. code-block:: python
import paddle
x = paddle.arange(12).reshape([3, 4])
# x is [[0 , 1 , 2 , 3 ],
# [4 , 5 , 6 , 7 ],
# [8 , 9 , 10, 11]]
y1 = paddle.median(x)
# y1 is [5.5]
y2 = paddle.median(x, axis=0)
# y2 is [4., 5., 6., 7.]
y3 = paddle.median(x, axis=1)
# y3 is [1.5, 5.5, 9.5]
y4 = paddle.median(x, axis=0, keepdim=True)
# y4 is [[4., 5., 6., 7.]]
"""
if not isinstance(x, Variable):
raise TypeError("In median, the input x should be a Tensor.")
is_flatten = axis is None
dims = len(x.shape)
if is_flatten:
x = paddle.flatten(x)
axis = 0
else:
if not isinstance(axis, int) or not (axis < dims and axis >= -dims):
raise ValueError(
"In median, axis should be none or an integer in range [-rank(x), rank(x))."
)
if axis < 0:
axis += dims
sz = x.shape[axis]
kth = sz >> 1
tensor_topk, idx = paddle.topk(x, kth + 1, axis=axis, largest=False)
dtype = 'float64' if x.dtype == core.VarDesc.VarType.FP64 else 'float32'
if sz & 1 == 0:
out_tensor = paddle.slice(
tensor_topk, axes=[axis], starts=[kth - 1],
ends=[kth]) + paddle.slice(
tensor_topk, axes=[axis], starts=[kth], ends=[kth + 1])
out_tensor = paddle.cast(out_tensor, dtype=dtype) / 2
else:
out_tensor = paddle.cast(
paddle.slice(
tensor_topk, axes=[axis], starts=[kth], ends=[kth + 1]),
dtype=dtype)
if not keepdim or is_flatten:
if not is_flatten:
newshape = x.shape[:axis] + x.shape[axis + 1:]
elif not keepdim:
newshape = [1]
else:
newshape = [1] * dims
else:
newshape = out_tensor.shape
out_tensor = out_tensor.reshape(newshape, name=name)
return out_tensor
| [
"noreply@github.com"
] | andreazanetti.noreply@github.com |
782c942b2fc75fcf6b9a2197fdadadf5d66b9070 | 378d848acbcb495c164af9766c80ea8817d4b510 | /字符串/重排回文.py | 2b1e9b0545ff3a5c1808a080385f2da35947fe3a | [] | no_license | youkaede77/Data-Structure-and-Algorithm | fa0fb70e6016f8d0290310d356a8b1830867eed6 | 068fa82f29dc8ad41f00055cc76aeb6b8c24410d | refs/heads/master | 2022-04-09T06:52:18.448641 | 2020-02-24T16:15:47 | 2020-02-24T16:15:47 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 630 | py | # 对一个字符串进行重排,是否存在回文
# 如:aabccdd —— acdbdca 是
# 如: abc 不是
# 思路:
# 如果s长度为偶数,则每个字母必须出现偶数次
# 如果长度为奇数,只有一个字母出现奇数次
from collections import Counter
def is_permucate_palindrome(s):
n = len(s)
count = Counter(s)
odd = 0
for c in count.values():
if n%2 and odd >=2: # 奇数
return False
if n%2 == 0: # 偶数
if odd:
return False
if c % 2:
odd += 1
return True
print(is_permucate_palindrome('abcac')) | [
"weidafeng.edu@gmail.com"
] | weidafeng.edu@gmail.com |
f58290a7a2ebacbca7a192937e431cc851c73eb5 | 55b2068319ac68d89a086e43cae83ba6f440f9d5 | /test/test_tag_matcher.py | fa33fe9bb2ddbf5c7ccb7b8a48f35767dfd07c54 | [
"BSD-2-Clause"
] | permissive | sunliwen/behave | ecf88caf378296667ebc9b5527538725963b40a9 | 6debee106efc99c7d7a9ff4b9577abe8bf5fd31c | refs/heads/master | 2021-01-21T06:18:15.144832 | 2018-03-15T12:45:48 | 2018-03-15T12:45:48 | 27,376,204 | 1 | 1 | null | 2018-03-15T12:45:49 | 2014-12-01T11:24:30 | Python | UTF-8 | Python | false | false | 16,427 | py | # -*- coding: utf-8 -*-
from behave.tag_matcher import *
from mock import Mock
from unittest import TestCase
class TestOnlyWithCategoryTagMatcher(TestCase):
TagMatcher = OnlyWithCategoryTagMatcher
def setUp(self):
category = "xxx"
self.tag_matcher = OnlyWithCategoryTagMatcher(category, "alice")
self.active_tag = self.TagMatcher.make_category_tag(category, "alice")
self.similar_tag = self.TagMatcher.make_category_tag(category, "alice2")
self.other_tag = self.TagMatcher.make_category_tag(category, "other")
self.category = category
def test_should_exclude_with__returns_false_with_active_tag(self):
tags = [ self.active_tag ]
self.assertEqual(False, self.tag_matcher.should_exclude_with(tags))
def test_should_exclude_with__returns_false_with_active_tag_and_more(self):
test_patterns = [
([ self.active_tag, self.other_tag ], "CASE: first"),
([ self.other_tag, self.active_tag ], "CASE: last"),
([ "foo", self.active_tag, self.other_tag, "bar" ], "CASE: middle"),
]
for tags, case in test_patterns:
self.assertEqual(False, self.tag_matcher.should_exclude_with(tags),
"%s: tags=%s" % (case, tags))
def test_should_exclude_with__returns_true_with_other_tag(self):
tags = [ self.other_tag ]
self.assertEqual(True, self.tag_matcher.should_exclude_with(tags))
def test_should_exclude_with__returns_true_with_other_tag_and_more(self):
test_patterns = [
([ self.other_tag, "foo" ], "CASE: first"),
([ "foo", self.other_tag ], "CASE: last"),
([ "foo", self.other_tag, "bar" ], "CASE: middle"),
]
for tags, case in test_patterns:
self.assertEqual(True, self.tag_matcher.should_exclude_with(tags),
"%s: tags=%s" % (case, tags))
def test_should_exclude_with__returns_true_with_similar_tag(self):
tags = [ self.similar_tag ]
self.assertEqual(True, self.tag_matcher.should_exclude_with(tags))
def test_should_exclude_with__returns_true_with_similar_and_more(self):
test_patterns = [
([ self.similar_tag, "foo" ], "CASE: first"),
([ "foo", self.similar_tag ], "CASE: last"),
([ "foo", self.similar_tag, "bar" ], "CASE: middle"),
]
for tags, case in test_patterns:
self.assertEqual(True, self.tag_matcher.should_exclude_with(tags),
"%s: tags=%s" % (case, tags))
def test_should_exclude_with__returns_false_without_category_tag(self):
test_patterns = [
([ ], "CASE: No tags"),
([ "foo" ], "CASE: One tag"),
([ "foo", "bar" ], "CASE: Two tags"),
]
for tags, case in test_patterns:
self.assertEqual(False, self.tag_matcher.should_exclude_with(tags),
"%s: tags=%s" % (case, tags))
def test_should_run_with__negates_result_of_should_exclude_with(self):
test_patterns = [
([ ], "CASE: No tags"),
([ "foo" ], "CASE: One non-category tag"),
([ "foo", "bar" ], "CASE: Two non-category tags"),
([ self.active_tag ], "CASE: active tag"),
([ self.active_tag, self.other_tag ], "CASE: active and other tag"),
([ self.active_tag, "foo" ], "CASE: active and foo tag"),
([ self.other_tag ], "CASE: other tag"),
([ self.other_tag, "foo" ], "CASE: other and foo tag"),
([ self.similar_tag ], "CASE: similar tag"),
([ "foo", self.similar_tag ], "CASE: foo and similar tag"),
]
for tags, case in test_patterns:
result1 = self.tag_matcher.should_run_with(tags)
result2 = self.tag_matcher.should_exclude_with(tags)
self.assertEqual(result1, not result2, "%s: tags=%s" % (case, tags))
self.assertEqual(not result1, result2, "%s: tags=%s" % (case, tags))
def test_make_category_tag__returns_category_tag_prefix_without_value(self):
category = "xxx"
tag1 = OnlyWithCategoryTagMatcher.make_category_tag(category)
tag2 = OnlyWithCategoryTagMatcher.make_category_tag(category, None)
tag3 = OnlyWithCategoryTagMatcher.make_category_tag(category, value=None)
self.assertEqual("only.with_xxx=", tag1)
self.assertEqual("only.with_xxx=", tag2)
self.assertEqual("only.with_xxx=", tag3)
self.assertTrue(tag1.startswith(OnlyWithCategoryTagMatcher.tag_prefix))
def test_make_category_tag__returns_category_tag_with_value(self):
category = "xxx"
tag1 = OnlyWithCategoryTagMatcher.make_category_tag(category, "alice")
tag2 = OnlyWithCategoryTagMatcher.make_category_tag(category, "bob")
self.assertEqual("only.with_xxx=alice", tag1)
self.assertEqual("only.with_xxx=bob", tag2)
def test_make_category_tag__returns_category_tag_with_tag_prefix(self):
my_tag_prefix = "ONLY_WITH."
category = "xxx"
TagMatcher = OnlyWithCategoryTagMatcher
tag0 = TagMatcher.make_category_tag(category, tag_prefix=my_tag_prefix)
tag1 = TagMatcher.make_category_tag(category, "alice", my_tag_prefix)
tag2 = TagMatcher.make_category_tag(category, "bob", tag_prefix=my_tag_prefix)
self.assertEqual("ONLY_WITH.xxx=", tag0)
self.assertEqual("ONLY_WITH.xxx=alice", tag1)
self.assertEqual("ONLY_WITH.xxx=bob", tag2)
self.assertTrue(tag1.startswith(my_tag_prefix))
def test_ctor__with_tag_prefix(self):
tag_prefix = "ONLY_WITH."
tag_matcher = OnlyWithCategoryTagMatcher("xxx", "alice", tag_prefix)
tags = ["foo", "ONLY_WITH.xxx=foo", "only.with_xxx=bar", "bar"]
actual_tags = tag_matcher.select_category_tags(tags)
self.assertEqual(["ONLY_WITH.xxx=foo"], actual_tags)
class TestOnlyWithAnyCategoryTagMatcher(TestCase):
TagMatcher = OnlyWithAnyCategoryTagMatcher
def setUp(self):
category_value_provider = {
"foo": "alice",
"bar": "BOB",
}
TagMatcher = OnlyWithCategoryTagMatcher
self.tag_matcher = OnlyWithAnyCategoryTagMatcher(category_value_provider)
self.category1_active_tag = TagMatcher.make_category_tag("foo", "alice")
self.category1_similar_tag = TagMatcher.make_category_tag("foo", "alice2")
self.category1_other_tag = TagMatcher.make_category_tag("foo", "bob")
self.category2_active_tag = TagMatcher.make_category_tag("bar", "BOB")
self.category2_similar_tag = TagMatcher.make_category_tag("bar", "BOB2")
self.category2_other_tag = TagMatcher.make_category_tag("bar", "CHARLY")
self.unknown_category_tag = TagMatcher.make_category_tag("UNKNOWN", "one")
def test_should_exclude_with__returns_false_with_active_tag(self):
tags1 = [ self.category1_active_tag ]
tags2 = [ self.category2_active_tag ]
self.assertEqual(False, self.tag_matcher.should_exclude_with(tags1))
self.assertEqual(False, self.tag_matcher.should_exclude_with(tags2))
def test_should_exclude_with__returns_false_with_active_tag_and_more(self):
test_patterns = [
([ self.category1_active_tag, self.category1_other_tag ], "CASE: first"),
([ self.category1_other_tag, self.category1_active_tag ], "CASE: last"),
([ "foo", self.category1_active_tag, self.category1_other_tag, "bar" ], "CASE: middle"),
]
for tags, case in test_patterns:
self.assertEqual(False, self.tag_matcher.should_exclude_with(tags),
"%s: tags=%s" % (case, tags))
def test_should_exclude_with__returns_true_with_other_tag(self):
tags = [ self.category1_other_tag ]
self.assertEqual(True, self.tag_matcher.should_exclude_with(tags))
def test_should_exclude_with__returns_true_with_other_tag_and_more(self):
test_patterns = [
([ self.category1_other_tag, "foo" ], "CASE: first"),
([ "foo", self.category1_other_tag ], "CASE: last"),
([ "foo", self.category1_other_tag, "bar" ], "CASE: middle"),
]
for tags, case in test_patterns:
self.assertEqual(True, self.tag_matcher.should_exclude_with(tags),
"%s: tags=%s" % (case, tags))
def test_should_exclude_with__returns_true_with_similar_tag(self):
tags = [ self.category1_similar_tag ]
self.assertEqual(True, self.tag_matcher.should_exclude_with(tags))
def test_should_exclude_with__returns_true_with_similar_and_more(self):
test_patterns = [
([ self.category1_similar_tag, "foo" ], "CASE: first"),
([ "foo", self.category1_similar_tag ], "CASE: last"),
([ "foo", self.category1_similar_tag, "bar" ], "CASE: middle"),
]
for tags, case in test_patterns:
self.assertEqual(True, self.tag_matcher.should_exclude_with(tags),
"%s: tags=%s" % (case, tags))
def test_should_exclude_with__returns_false_without_category_tag(self):
test_patterns = [
([ ], "CASE: No tags"),
([ "foo" ], "CASE: One tag"),
([ "foo", "bar" ], "CASE: Two tags"),
]
for tags, case in test_patterns:
self.assertEqual(False, self.tag_matcher.should_exclude_with(tags),
"%s: tags=%s" % (case, tags))
def test_should_exclude_with__returns_false_with_unknown_category_tag(self):
"""Tags from unknown categories, not supported by category_value_provider,
should not be excluded.
"""
tags = [ self.unknown_category_tag ]
self.assertEqual("only.with_UNKNOWN=one", self.unknown_category_tag)
self.assertEqual(None, self.tag_matcher.category_value_provider.get("UNKNOWN"))
self.assertEqual(False, self.tag_matcher.should_exclude_with(tags))
def test_should_exclude_with__combinations_of_2_categories(self):
test_patterns = [
("CASE 00: 2 inactive category tags", True,
[ self.category1_other_tag, self.category2_other_tag]),
("CASE 01: inactive and active category tags", True,
[ self.category1_other_tag, self.category2_active_tag]),
("CASE 10: active and inactive category tags", True,
[ self.category1_active_tag, self.category2_other_tag]),
("CASE 11: 2 active category tags", False, # -- SHOULD-RUN
[ self.category1_active_tag, self.category2_active_tag]),
# -- SPECIAL CASE: With unknown category
("CASE 0x: inactive and unknown category tags", True,
[ self.category1_other_tag, self.unknown_category_tag]),
("CASE 1x: active and unknown category tags", False, # SHOULD-RUN
[ self.category1_active_tag, self.unknown_category_tag]),
]
for case, expected, tags in test_patterns:
actual_result = self.tag_matcher.should_exclude_with(tags)
self.assertEqual(expected, actual_result,
"%s: tags=%s" % (case, tags))
def test_should_run_with__negates_result_of_should_exclude_with(self):
test_patterns = [
([ ], "CASE: No tags"),
([ "foo" ], "CASE: One non-category tag"),
([ "foo", "bar" ], "CASE: Two non-category tags"),
([ self.category1_active_tag ], "CASE: active tag"),
([ self.category1_active_tag, self.category1_other_tag ], "CASE: active and other tag"),
([ self.category1_active_tag, "foo" ], "CASE: active and foo tag"),
([ self.category1_other_tag ], "CASE: other tag"),
([ self.category1_other_tag, "foo" ], "CASE: other and foo tag"),
([ self.category1_similar_tag ], "CASE: similar tag"),
([ "foo", self.category1_similar_tag ], "CASE: foo and similar tag"),
]
for tags, case in test_patterns:
result1 = self.tag_matcher.should_run_with(tags)
result2 = self.tag_matcher.should_exclude_with(tags)
self.assertEqual(result1, not result2, "%s: tags=%s" % (case, tags))
self.assertEqual(not result1, result2, "%s: tags=%s" % (case, tags))
class TestPredicateTagMatcher(TestCase):
def test_exclude_with__mechanics(self):
predicate_function_blueprint = lambda tags: False
predicate_function = Mock(predicate_function_blueprint)
predicate_function.return_value = True
tag_matcher = PredicateTagMatcher(predicate_function)
tags = [ "foo", "bar" ]
self.assertEqual(True, tag_matcher.should_exclude_with(tags))
predicate_function.assert_called_once_with(tags)
self.assertEqual(True, predicate_function(tags))
def test_should_exclude_with__returns_true_when_predicate_is_true(self):
predicate_always_true = lambda tags: True
tag_matcher1 = PredicateTagMatcher(predicate_always_true)
tags = [ "foo", "bar" ]
self.assertEqual(True, tag_matcher1.should_exclude_with(tags))
self.assertEqual(True, predicate_always_true(tags))
def test_should_exclude_with__returns_true_when_predicate_is_true2(self):
# -- CASE: Use predicate function instead of lambda.
def predicate_contains_foo(tags):
return any(x == "foo" for x in tags)
tag_matcher2 = PredicateTagMatcher(predicate_contains_foo)
tags = [ "foo", "bar" ]
self.assertEqual(True, tag_matcher2.should_exclude_with(tags))
self.assertEqual(True, predicate_contains_foo(tags))
def test_should_exclude_with__returns_false_when_predicate_is_false(self):
predicate_always_false = lambda tags: False
tag_matcher1 = PredicateTagMatcher(predicate_always_false)
tags = [ "foo", "bar" ]
self.assertEqual(False, tag_matcher1.should_exclude_with(tags))
self.assertEqual(False, predicate_always_false(tags))
class TestCompositeTagMatcher(TestCase):
@staticmethod
def count_tag_matcher_with_result(tag_matchers, tags, result_value):
count = 0
for tag_matcher in tag_matchers:
current_result = tag_matcher.should_exclude_with(tags)
if current_result == result_value:
count += 1
return count
def setUp(self):
predicate_false = lambda tags: False
predicate_contains_foo = lambda tags: any(x == "foo" for x in tags)
self.tag_matcher_false = PredicateTagMatcher(predicate_false)
self.tag_matcher_foo = PredicateTagMatcher(predicate_contains_foo)
tag_matchers = [
self.tag_matcher_foo,
self.tag_matcher_false
]
self.ctag_matcher = CompositeTagMatcher(tag_matchers)
def test_should_exclude_with__returns_true_when_any_tag_matcher_returns_true(self):
test_patterns = [
("CASE: with foo", ["foo", "bar"]),
("CASE: with foo2", ["foozy", "foo", "bar"]),
]
for case, tags in test_patterns:
actual_result = self.ctag_matcher.should_exclude_with(tags)
self.assertEqual(True, actual_result,
"%s: tags=%s" % (case, tags))
actual_true_count = self.count_tag_matcher_with_result(
self.ctag_matcher.tag_matchers, tags, True)
self.assertEqual(1, actual_true_count)
def test_should_exclude_with__returns_false_when_no_tag_matcher_return_true(self):
test_patterns = [
("CASE: without foo", ["fool", "bar"]),
("CASE: without foo2", ["foozy", "bar"]),
]
for case, tags in test_patterns:
actual_result = self.ctag_matcher.should_exclude_with(tags)
self.assertEqual(False, actual_result,
"%s: tags=%s" % (case, tags))
actual_true_count = self.count_tag_matcher_with_result(
self.ctag_matcher.tag_matchers, tags, True)
self.assertEqual(0, actual_true_count)
| [
"jenisys@users.noreply.github.com"
] | jenisys@users.noreply.github.com |
f8bcbca4ba2bd7e3422b4fb0acf48f8fde4128ae | 5730110af5e4f0abe538ed7825ddd62c79bc3704 | /pacu/pacu/core/svc/vstim/clock/tcp/__init__.py | e5209a96fc0aebe9b6cdcd813c6e40f09bc677ce | [] | no_license | jzeitoun/pacu-v2 | bdbb81def96a2d87171ca20b89c878b2f66975e7 | 0ccb254a658263b4fe8c80ea623f860cb7dc1428 | refs/heads/master | 2021-06-03T18:50:50.890399 | 2020-04-27T16:31:59 | 2020-04-27T16:31:59 | 110,889,657 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 128 | py | from pacu.core.svc.vstim.clock.tcp.oneway import OnewayTCPClock
from pacu.core.svc.vstim.clock.tcp.twoway import TwowayTCPClock
| [
"jzeitoun@uci.edu"
] | jzeitoun@uci.edu |
2ef0ef2379ebf4494a924278b069c3642cd630f3 | de428c011b56db862f05ec0ceab17b85f83f94b1 | /pythongame/game_data/abilities/ability_fireball.py | 34724c8299dfd5c68821f72061ffdb217bf7c69f | [] | no_license | risooonho/python-2d-game | c6d1fceaf09c72a6f7573230a4a899bf79164b7f | 24b02646ed56f9017069b243b774e0ee46951aea | refs/heads/master | 2021-05-17T06:02:13.538699 | 2020-02-15T23:59:54 | 2020-02-15T23:59:54 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,794 | py | import random
from pythongame.core.ability_effects import register_ability_effect, AbilityWasUsedSuccessfully, AbilityResult
from pythongame.core.buff_effects import get_buff_effect, AbstractBuffEffect, register_buff_effect
from pythongame.core.common import Sprite, ProjectileType, AbilityType, Millis, \
Direction, SoundId, BuffType, PeriodicTimer, HeroUpgradeId
from pythongame.core.damage_interactions import deal_player_damage_to_enemy, DamageType
from pythongame.core.game_data import register_ability_data, AbilityData, UiIconSprite, \
register_ui_icon_sprite_path, register_entity_sprite_map, ABILITIES
from pythongame.core.game_state import GameState, WorldEntity, Projectile, NonPlayerCharacter
from pythongame.core.hero_upgrades import register_hero_upgrade_effect
from pythongame.core.math import get_position_from_center_position, translate_in_direction
from pythongame.core.projectile_controllers import create_projectile_controller, AbstractProjectileController, \
register_projectile_controller
from pythongame.core.sound_player import play_sound
from pythongame.core.view.image_loading import SpriteSheet
from pythongame.core.visual_effects import VisualCircle, VisualParticleSystem
# Note: Projectile size must be smaller than hero entity size (otherwise you get a collision when shooting next to wall)
FIREBALL_MANA_COST = 4
FIREBALL_UPGRADED_MANA_COST = 3
PROJECTILE_SIZE = (28, 28)
MIN_DMG = 3
MAX_DMG = 4
FIREBALL_TALENT_BURN_DURATION = Millis(2500)
FIREBALL_TALENT_BURN_INTERVAL = Millis(500)
FIREBALL_TALENT_BURN_TOTAL_DAMAGE = int(round(FIREBALL_TALENT_BURN_DURATION / FIREBALL_TALENT_BURN_INTERVAL))
BUFF_TYPE = BuffType.BURNT_BY_FIREBALL
def _create_visual_splash(effect_position, game_state):
game_state.visual_effects.append(
VisualCircle((250, 100, 50), effect_position, 22, 45, Millis(100), 0))
particle_colors = [(250, 100, 100),
(250, 50, 100),
(250, 100, 50)]
particle_system = VisualParticleSystem(
num_particles=10,
position=effect_position,
colors=particle_colors,
alpha=100,
duration_interval=(Millis(50), Millis(200)))
game_state.visual_effects.append(particle_system)
class ProjectileController(AbstractProjectileController):
def __init__(self):
super().__init__(1500)
def apply_enemy_collision(self, npc: NonPlayerCharacter, game_state: GameState, projectile: Projectile):
damage_amount: float = MIN_DMG + random.random() * (MAX_DMG - MIN_DMG)
deal_player_damage_to_enemy(game_state, npc, damage_amount, DamageType.MAGIC)
_create_visual_splash(npc.world_entity.get_center_position(), game_state)
has_burn_upgrade = game_state.player_state.has_upgrade(HeroUpgradeId.ABILITY_FIREBALL_BURN)
if has_burn_upgrade:
npc.gain_buff_effect(get_buff_effect(BUFF_TYPE), FIREBALL_TALENT_BURN_DURATION)
play_sound(SoundId.ABILITY_FIREBALL_HIT)
projectile.has_collided_and_should_be_removed = True
def apply_wall_collision(self, game_state: GameState, projectile: Projectile):
_create_visual_splash(projectile.world_entity.get_center_position(), game_state)
play_sound(SoundId.ABILITY_FIREBALL_HIT)
projectile.has_collided_and_should_be_removed = True
class BurntByFireball(AbstractBuffEffect):
def __init__(self):
self.timer = PeriodicTimer(FIREBALL_TALENT_BURN_INTERVAL)
def apply_middle_effect(self, game_state: GameState, buffed_entity: WorldEntity, buffed_npc: NonPlayerCharacter,
time_passed: Millis):
if self.timer.update_and_check_if_ready(time_passed):
deal_player_damage_to_enemy(game_state, buffed_npc, 1, DamageType.MAGIC)
game_state.visual_effects.append(
VisualCircle((180, 50, 50), buffed_npc.world_entity.get_center_position(), 10, 20, Millis(50), 0,
buffed_entity))
def get_buff_type(self):
return BUFF_TYPE
def _apply_ability(game_state: GameState) -> AbilityResult:
player_entity = game_state.player_entity
distance_from_player = 35
projectile_pos = translate_in_direction(
get_position_from_center_position(player_entity.get_center_position(), PROJECTILE_SIZE),
player_entity.direction,
distance_from_player)
projectile_speed = 0.3
entity = WorldEntity(projectile_pos, PROJECTILE_SIZE, Sprite.PROJECTILE_PLAYER_FIREBALL, player_entity.direction,
projectile_speed)
projectile = Projectile(entity, create_projectile_controller(ProjectileType.PLAYER_FIREBALL))
game_state.projectile_entities.append(projectile)
effect_position = (projectile_pos[0] + PROJECTILE_SIZE[0] // 2,
projectile_pos[1] + PROJECTILE_SIZE[1] // 2)
game_state.visual_effects.append(VisualCircle((250, 150, 50), effect_position, 15, 5, Millis(300), 0))
has_lightfooted_upgrade = game_state.player_state.has_upgrade(HeroUpgradeId.MAGE_LIGHT_FOOTED)
if not has_lightfooted_upgrade:
game_state.player_state.gain_buff_effect(get_buff_effect(BuffType.RECOVERING_AFTER_ABILITY), Millis(300))
return AbilityWasUsedSuccessfully()
def _upgrade_fireball_mana_cost(_game_state: GameState):
ABILITIES[AbilityType.FIREBALL].mana_cost = FIREBALL_UPGRADED_MANA_COST
def register_fireball_ability():
register_ability_effect(AbilityType.FIREBALL, _apply_ability)
description = "Shoot a fireball, dealing " + str(MIN_DMG) + "-" + str(MAX_DMG) + \
" magic damage to the first enemy that it hits."
ability_data = AbilityData("Fireball", UiIconSprite.ABILITY_FIREBALL, FIREBALL_MANA_COST, Millis(500), description,
SoundId.ABILITY_FIREBALL)
register_ability_data(AbilityType.FIREBALL, ability_data)
register_ui_icon_sprite_path(UiIconSprite.ABILITY_FIREBALL, "resources/graphics/icon_fireball.png")
register_projectile_controller(ProjectileType.PLAYER_FIREBALL, ProjectileController)
sprite_sheet = SpriteSheet("resources/graphics/projectile_player_fireball.png")
original_sprite_size = (64, 64)
indices_by_dir = {
Direction.LEFT: [(x, 0) for x in range(8)],
Direction.UP: [(x, 2) for x in range(8)],
Direction.RIGHT: [(x, 4) for x in range(8)],
Direction.DOWN: [(x, 6) for x in range(8)]
}
scaled_sprite_size = (48, 48)
register_entity_sprite_map(Sprite.PROJECTILE_PLAYER_FIREBALL, sprite_sheet, original_sprite_size,
scaled_sprite_size, indices_by_dir, (-9, -9))
register_buff_effect(BUFF_TYPE, BurntByFireball)
register_hero_upgrade_effect(HeroUpgradeId.ABILITY_FIREBALL_MANA_COST, _upgrade_fireball_mana_cost)
| [
"jonte.murray@gmail.com"
] | jonte.murray@gmail.com |
82530bb5148f663c6037aa9b33d89c402bc72c63 | ee66a0e854e196300f7d88bdb2bb2e88530a2571 | /code/data.py | dff0715c34d181d708993ec98ff4082054978bc0 | [
"MIT"
] | permissive | RuthAngus/kinematics-and-rotation | 343532d70ba79466f559a9abea7c525ce2d28b47 | 7cad283612bc70ca9d12c79978561b938f527198 | refs/heads/master | 2022-07-31T20:14:37.090658 | 2020-05-19T12:16:50 | 2020-05-19T12:16:50 | 187,388,579 | 2 | 2 | MIT | 2020-04-23T15:10:45 | 2019-05-18T17:47:54 | Jupyter Notebook | UTF-8 | Python | false | false | 6,758 | py | # Converting Exploring data into a script.
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
from astropy.io import fits
import astropy.utils as au
from astropy.coordinates import SkyCoord
from dustmaps.bayestar import BayestarQuery
import astropy.units as units
from tools import getDust
from stardate.lhf import age_model
from calc_velocities import calc_vb, calc_vz, calc_vl
import astropy.units as u
from astropy.coordinates import ICRS
from astropy.coordinates import Galactic
from astropy.table import Table
from pyia import GaiaData
import astropy.coordinates as coord
from photometric_teff import bprp_to_teff
plotpar = {'axes.labelsize': 30,
'font.size': 30,
'legend.fontsize': 15,
'xtick.labelsize': 30,
'ytick.labelsize': 30,
'text.usetex': True}
plt.rcParams.update(plotpar)
print("Load McQuillan data")
mc = pd.read_csv("../Table_1_Periodic.txt")
print("Loading Gaia catalog")
with fits.open("../kepler_dr2_1arcsec.fits") as data:
gaia = pd.DataFrame(data[1].data, dtype="float64")
gaia_mc = pd.merge(mc, gaia, on="kepid", how="left")
print(len(gaia_mc), "stars")
# S/N cuts
sn = gaia_mc.parallax.values/gaia_mc.parallax_error.values
m = (sn > 10)
m &= (gaia_mc.parallax.values > 0) * np.isfinite(gaia_mc.parallax.values)
m &= gaia_mc.astrometric_excess_noise.values < 5
print(len(gaia_mc.iloc[m]), "stars after S/N cuts")
# Jason's wide binary cuts
# m &= gaia_mc.astrometric_excess_noise.values > 0
# m &= gaia_mc.astrometric_excess_noise_sig.values > 6
# Jason's short-period binary cuts
# m &= radial_velocity_error < 4
# print(len(gaia_mc.iloc[m]), "stars after Jason's binary cuts")
# assert 0
gaia_mc = gaia_mc.iloc[m]
print("Loading Dustmaps")
bayestar = BayestarQuery(max_samples=2, version='bayestar2019')
print("Calculating Ebv")
coords = SkyCoord(gaia_mc.ra.values*units.deg, gaia_mc.dec.values*units.deg,
distance=gaia_mc.r_est.values*units.pc)
ebv, flags = bayestar(coords, mode='percentile', pct=[16., 50., 84.],
return_flags=True)
# Calculate Av
Av_bayestar = 2.742 * ebv
print(np.shape(Av_bayestar), "shape")
Av = Av_bayestar[:, 1]
Av_errm = Av - Av_bayestar[:, 0]
Av_errp = Av_bayestar[:, 2] - Av
Av_std = .5*(Av_errm + Av_errp)
# Catch places where the extinction uncertainty is zero and default to an
# uncertainty of .05
m = Av_std == 0
Av_std[m] = .05
gaia_mc["ebv"] = ebv[:, 1] # The median ebv value.
gaia_mc["Av"] = Av
gaia_mc["Av_errp"] = Av_errp
gaia_mc["Av_errm"] = Av_errm
gaia_mc["Av_std"] = Av_std
# Calculate dereddened photometry
AG, Abp, Arp = getDust(gaia_mc.phot_g_mean_mag.values,
gaia_mc.phot_bp_mean_mag.values,
gaia_mc.phot_rp_mean_mag.values, gaia_mc.ebv.values)
gaia_mc["bp_dered"] = gaia_mc.phot_bp_mean_mag.values - Abp
gaia_mc["rp_dered"] = gaia_mc.phot_rp_mean_mag.values - Arp
gaia_mc["bprp_dered"] = gaia_mc["bp_dered"] - gaia_mc["rp_dered"]
gaia_mc["G_dered"] = gaia_mc.phot_g_mean_mag.values - AG
# Calculate Absolute magntitude
def mM(m, D):
return 5 - 5*np.log10(D) + m
abs_G = mM(gaia_mc.G_dered.values, gaia_mc.r_est)
gaia_mc["abs_G"] = abs_G
# Remove NaNs
m2 = np.isfinite(gaia_mc.abs_G.values)
gaia_mc = gaia_mc.iloc[m2]
# Remove binaries
x = gaia_mc.bp_dered - gaia_mc.rp_dered
y = gaia_mc.abs_G
AT = np.vstack((x**6, x**5, x**4, x**3, x**2, x, np.ones_like(x)))
ATA = np.dot(AT, AT.T)
w = np.linalg.solve(ATA, np.dot(AT, y))
minb, maxb, extra = 0, 2.2, .27
xs = np.linspace(minb, maxb, 1000)
subcut = 4.
m = (minb < x) * (x < maxb)
m &= (y < np.polyval(w, x) - extra) + (subcut > y)
flag = np.zeros(len(gaia_mc))
flag[~m] = np.ones(len(flag[~m]))
gaia_mc["flag"] = flag
test = gaia_mc.iloc[gaia_mc.flag.values == 1]
plt.plot(gaia_mc.bp_dered - gaia_mc.rp_dered, gaia_mc.abs_G, ".", alpha=.1)
plt.plot(test.bp_dered - test.rp_dered, test.abs_G, ".", alpha=.1)
plt.ylim(10, 1)
plt.savefig("test")
# Calculate photometric Teff
teffs = bprp_to_teff(gaia_mc.bp_dered - gaia_mc.rp_dered)
gaia_mc["color_teffs"] = teffs
print("Calculating gyro ages")
logages = []
for i, p in enumerate(gaia_mc.Prot.values):
logages.append(age_model(np.log10(p), gaia_mc.phot_bp_mean_mag.values[i] -
gaia_mc.phot_rp_mean_mag.values[i]))
gaia_mc["log_age"] = np.array(logages)
gaia_mc["age"] = (10**np.array(logages))*1e-9
plt.figure(figsize=(16, 9), dpi=200)
singles = gaia_mc.flag.values == 1
plt.scatter(gaia_mc.bprp_dered.values[singles], gaia_mc.abs_G.values[singles],
c=gaia_mc.age.values[singles], vmin=0, vmax=5, s=50, alpha=.2,
cmap="viridis", rasterized=True, edgecolor="none")
plt.xlabel("$\mathrm{G_{BP}-G_{RP}~[dex]}$")
plt.ylabel("$\mathrm{G~[dex]}$")
plt.colorbar(label="$\mathrm{Gyrochronal~age~[Gyr]}$")
plt.ylim(11, 5.5)
plt.xlim(.8, 2.7);
plt.savefig("age_gradient.pdf")
print("Calculating vb")
pmb_samples, vb_samples = calc_vb(gaia_mc)
pmb, vb = np.median(pmb_samples, axis=1), np.median(vb_samples, axis=1)
pmb_err, vb_err = np.std(pmb_samples, axis=1), np.std(vb_samples, axis=1)
vb_errp = np.percentile(vb_samples, 84, axis=1) - vb
vb_errm = vb - np.percentile(vb_samples, 16, axis=1)
gaia_mc["vb"] = vb
gaia_mc["vb_err"] = vb_err
# print("Calculating vl")
# vl_samples = calc_vl(gaia_mc)
# vl, vl_err = np.median(vl_samples, axis=1), np.std(vl_samples, axis=1)
# vl_errp = np.percentile(vl_samples, 84, axis=1) - vl
# vl_errm = vl - np.percentile(vl_samples, 16, axis=1)
# gaia_mc["vl"] = vl
# gaia_mc["vl_err"] = vl_err
# Calculate b
icrs = ICRS(ra=gaia_mc.ra.values*u.degree,
dec=gaia_mc.dec.values*u.degree)
lb = icrs.transform_to(Galactic)
b = lb.b*u.degree
l = lb.l*u.degree
gaia_mc["b"] = b.value
gaia_mc["l"] = l.value
print("Calculating VZ")
mrv = gaia_mc.radial_velocity.values != 0.00
vz, vz_err = calc_vz(gaia_mc)
vz[~mrv] = np.ones(len(vz[~mrv]))*np.nan
vz_err[~mrv] = np.ones(len(vz_err[~mrv]))*np.nan
gaia_mc["vz"] = vz
gaia_mc["vz_err"] = vz_err
# Calculate v_ra and v_dec
d = gaia_mc.r_est.values*u.pc
vra = (gaia_mc.pmra.values*u.mas/u.yr * d).to(u.km/u.s,
u.dimensionless_angles())
vdec = (gaia_mc.pmdec.values*u.mas/u.yr * d).to(u.km/u.s,
u.dimensionless_angles())
c = coord.SkyCoord(ra=gaia_mc.ra.values*u.deg, dec=gaia_mc.dec.values*u.deg,
distance=d, pm_ra_cosdec=gaia_mc.pmra.values*u.mas/u.yr,
pm_dec=gaia_mc.pmdec.values*u.mas/u.yr)
gal = c.galactic
v_b = (gal.pm_b * gal.distance).to(u.km/u.s, u.dimensionless_angles())
gaia_mc["v_ra"] = vra.value
gaia_mc["v_dec"] = vdec.value
gaia_mc["v_b"] = v_b
print("Saving file")
gaia_mc.to_csv("gaia_mc5.csv")
| [
"ruth.angus@astro.ox.ac.uk"
] | ruth.angus@astro.ox.ac.uk |
1ad6a6fe4e509cff53c845770777678f0b1d73f9 | 25b4fc4a54faf0f4217f3661477fa8f26cd60164 | /Orders/migrations/0004_rename_city_order_town_city.py | f8f82d71f368704c408a0e1eeaf7fb9050510edf | [] | no_license | AshtiNematian/Book_Store_Nematian_ | 6f601f69f0a25522ac351e4ad963f17011254289 | b83ea7319dbead2be5812e2d001c58e7d906fff9 | refs/heads/master | 2023-07-21T03:56:48.386869 | 2021-09-03T17:03:17 | 2021-09-03T17:04:24 | 402,333,026 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 362 | py | # Generated by Django 3.2.6 on 2021-08-26 09:20
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('Orders', '0003_auto_20210826_1349'),
]
operations = [
migrations.RenameField(
model_name='order',
old_name='city',
new_name='town_city',
),
]
| [
"you@example.com"
] | you@example.com |
404fb6aeb98af6e4b45d47570ca4d57c33888f8c | acbfcd794191dc4919c6ab18a41dab128c9546ad | /setup.py | 122029909f03ba261a77e0373140abbf19ae7cc5 | [
"MIT"
] | permissive | nigma/django-request-id | 9b9562a7658d842557416c1cdf51c8dd6f5fa2ea | 0850e04e91b616b9aa8443959edf4a73a62289e4 | refs/heads/master | 2021-05-24T04:21:32.339021 | 2016-10-23T15:27:48 | 2016-10-23T15:27:48 | 16,383,094 | 28 | 13 | MIT | 2021-01-11T19:42:34 | 2014-01-30T16:36:59 | Python | UTF-8 | Python | false | false | 1,854 | py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
import sys
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
version = "1.0.0"
if sys.argv[-1] == "publish":
os.system("python setup.py sdist bdist_wheel upload")
print("You probably want to also tag the version now:")
print(" git tag -a %s -m 'version %s'" % (version, version))
print(" git push --tags")
sys.exit()
readme = open("README.rst").read()
history = open("HISTORY.rst").read().replace(".. :changelog:", "")
setup(
name="django-request-id",
version=version,
description="""Augment each request with unique id for logging purposes""",
license="MIT",
author="Filip Wasilewski",
author_email="en@ig.ma",
url="https://github.com/nigma/django-request-id",
long_description=readme + "\n\n" + history,
packages=[
"request_id",
],
include_package_data=True,
install_requires=[
"django",
],
zip_safe=True,
keywords="django request-id",
classifiers=[
"Development Status :: 5 - Production/Stable",
"Environment :: Web Environment",
"Framework :: Django",
"Framework :: Django :: 1.8",
"Framework :: Django :: 1.9",
"Framework :: Django :: 1.10",
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.3",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5",
"Topic :: Software Development",
"Topic :: Software Development :: Libraries :: Python Modules"
],
)
| [
"en@ig.ma"
] | en@ig.ma |
886ec42f03ae959f306ac16499444fbbe5fbec73 | 270363be5ea94d33469fe4271eccb343357d4fa6 | /linalg/optim/conjugate_gradient.py | 49f095c2ebeba493d88fd167c9316edb9ddffc69 | [] | no_license | tkelestemur/learn-linalg | c487389e9802b0223232bcb8c9ec0003cc7df091 | a6e04e903e5c9e00801b56a228c56fd8b8ba8c71 | refs/heads/master | 2023-03-19T05:53:34.407780 | 2021-01-02T13:54:40 | 2021-01-02T14:26:53 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 929 | py | import numpy as np
from .base import IterativeSolver
class ConjugateGradient(IterativeSolver):
"""A conjugate gradient (CG) solver.
Unlike gradient descent, CG is guaranteed to converge in at most `n` steps.
"""
def __init__(self, max_iters, tol=1e-6):
self.max_iters = max_iters
self.tol = tol
def _solve(self, A, b):
n = A.shape[0]
x = np.random.randn(n) # Randomly initialize an estimate of x.
r = b - A @ x
v = np.array(r, copy=True)
beta = 0
for i in range(self.max_iters):
v = r + beta*v # Search direction.
alpha = r@r / (v.T @ A @ v) # Line search.
x = x + alpha*v # Update estimate.
r_old = r # Save the old residual.
r = r - alpha*A@v # Update the residual.
if (r@r) < self.tol*(r_old@r_old):
print("Converged in {} iterations.".format(i))
break
beta = (r@r) / (r_old@r_old) # Direction step.
return x
| [
"kevinarmandzakka@gmail.com"
] | kevinarmandzakka@gmail.com |
bf34a1e3f50cd8ba8fade62d4a408ed0f8953a02 | 36dfe29989de2005f5ad8f61b7575e16bc7f46ed | /Blue/BFS/JumpingNumber.py | 5a79cc610740b1fee81ec5e97ffb8142d69c7881 | [] | no_license | chrislevn/Coding-Challenges | ca17e26476a721ca855fef6e2e1587148758dde5 | dee52baaf5d15046a901b174e8b319deaa1c6107 | refs/heads/master | 2022-03-26T00:05:09.817752 | 2022-02-03T17:37:53 | 2022-02-03T17:37:53 | 242,436,335 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,786 | py | # Check if the value is a jumping number or not
def checkJumping(value):
#Split number into a list of string
splitNumber = list(str(value))
# Take first and second values and convert them into integer
firstValue = int(splitNumber[0])
secondValue = int(splitNumber[1])
# Calculate difference between the second and the first value
diff = secondValue - firstValue
# Based on the trend of the first 2 values, define whether the trend is increasing or not
if diff > 0:
increasing = True
elif diff == 0:
return False
else:
increasing = False
# Define the trend from the second value
if increasing:
# Reversed loop
for i in range(len(splitNumber) - 1, 1, -1):
diff = int(splitNumber[i]) - int(splitNumber[i-1])
# if diff < 0, mean the trend is different. Therefore, the value is a jumping number
if diff < 0:
return True
else:
# Reversed loop
for i in range(len(splitNumber) - 1, 1, -1):
diff = int(splitNumber[i]) - int(splitNumber[i-1])
# if diff > 0, mean the trend is different. Therefore, the value is a jumping number
if diff > 0:
return True
return False
count = 100
# create a realCounter variable to check if the sequence has total 500 numbers
realCounter = 0
# Result value
total = 0
# Create a sample test array to check if the sequence is a list of jumpy numbers or not
# testArr = []
# Run the loop from 100, the loop stops when the sequence has total 500 numbers.
while realCounter < 500:
if checkJumping(count):
# testArr.append(count)
realCounter += 1
total += count
count += 1
# print(testArr)
print(total) | [
"locvicvn1234@gmail.com"
] | locvicvn1234@gmail.com |
03c0b569271add7aed620e1f3b32dea58117f35c | ed15e441d4cd7a54d989610b8070a5d14bfda4c8 | /1805/python高级/5/8-重写.py | e395db85cdba412a97661b2bab7bab65a2cf3a0a | [] | no_license | jmh9876/p1804_jmh | 24593af521749913b65685e21ffc37281c43998f | a52a6366c21ad7598e71d8e82aeee746ecee7c6b | refs/heads/master | 2020-03-15T23:30:02.769818 | 2018-08-02T09:10:20 | 2018-08-02T09:10:20 | 132,395,104 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 288 | py | class Animal():
def wark(self):
print("叫")
class Dog(Animal):
def wark(self):
print("汪汪叫")
class xiaotianquan(Dog):
def wark(self):
print("狂叫")
d = Dog()
d.wark()
xtq = xiaotianquan()
xtq.wark()
| [
"2210744940@qq.com"
] | 2210744940@qq.com |
2eb1a75ee9bf27075c117540d1608c6a2117b96b | e9ef3cd143478660d098668a10e67544a42b5878 | /Lib/corpuscrawler/crawl_enx.py | 0006cc37449f7bf8345db34670223292315b5b45 | [
"Apache-2.0"
] | permissive | google/corpuscrawler | a5c790c19b26e6397b768ce26cf12bbcb641eb90 | 10adaecf4ed5a7d0557c8e692c186023746eb001 | refs/heads/master | 2023-08-26T04:15:59.036883 | 2022-04-20T08:18:11 | 2022-04-20T08:18:11 | 102,909,145 | 119 | 40 | NOASSERTION | 2022-04-20T08:18:12 | 2017-09-08T22:21:03 | Python | UTF-8 | Python | false | false | 809 | py | # Copyright 2018 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import, print_function, unicode_literals
from corpuscrawler.util import crawl_bibleis
def crawl(crawler):
out = crawler.get_output(language='enx')
crawl_bibleis(crawler, out, bible='ENXBSP')
| [
"sascha@brawer.ch"
] | sascha@brawer.ch |
82efc495c463906297ad2423d973ea7de9e5139b | 8c861caac797ecc24d9e907989af3d8b4493a2b4 | /gwt_pt/oanda/order/get.py | 4851450cd58e7821403824d52b0a59a2c9fa6020 | [] | no_license | eggyolktech/gwtPT | b607242ce5bbc13854f05eff69bf98ca6de2d218 | 23d60e54966fcd2ba85a1038ff807921d6b70028 | refs/heads/master | 2021-09-14T21:03:00.347549 | 2018-05-19T16:12:43 | 2018-05-19T16:12:43 | 112,349,960 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,061 | py | #!/usr/bin/env python
import argparse
import gwt_pt.oanda.common.config
import gwt_pt.oanda.common.view
def main():
"""
Get the details of an Order in an Account
"""
parser = argparse.ArgumentParser()
#
# Add the command line argument to parse to the v20 config
#
gwt_pt.oanda.common.config.add_argument(parser)
parser.add_argument(
"orderid",
help=(
"The ID of the Order to get. If prepended "
"with an '@', this will be interpreted as a client Order ID"
)
)
args = parser.parse_args()
#
# Create the api context based on the contents of the
# v20 config file
#
api = args.config.create_context()
#
# Submit the request to create the Market Order
#
response = api.order.get(
args.config.active_account,
args.orderid
)
print("Response: {} ({})".format(response.status, response.reason))
print("")
order = response.get("order", 200)
print(order)
if __name__ == "__main__":
main()
| [
"eggyolktech@gmail.com"
] | eggyolktech@gmail.com |
301053dfb0c8739c06bcc589b77f5b70570ea219 | ca7aa979e7059467e158830b76673f5b77a0f5a3 | /Python_codes/p03024/s649105913.py | 6ec6fb59fbfd609b785114ac42be49f9118c4f89 | [] | no_license | Aasthaengg/IBMdataset | 7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901 | f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8 | refs/heads/main | 2023-04-22T10:22:44.763102 | 2021-05-13T17:27:22 | 2021-05-13T17:27:22 | 367,112,348 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 117 | py | m = list(map(str,input()))
maru = m.count('o')
batsu = m.count('x')
if batsu > 7:
print('NO')
else:
print('YES') | [
"66529651+Aastha2104@users.noreply.github.com"
] | 66529651+Aastha2104@users.noreply.github.com |
a078ceb80fb08d947461e11dc935026a821823d3 | 8881a4927d893e1e755c0488f76ba7941b379f26 | /just_django/dashboard/notepad/views.py | 645436b0609ecc27be5307948ddc99273f019660 | [] | no_license | SatishNitk/Django | 6bb839fcf2bc7d70413e3d56ac98124a7a96a5de | d9260c032322a34410d783c39a8f13e8f63b8be4 | refs/heads/master | 2020-05-24T23:01:35.767388 | 2019-07-06T13:56:50 | 2019-07-06T13:56:50 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,229 | py | from django.shortcuts import render,redirect, get_object_or_404
from notepad.models import *
from notepad.forms import *
def create_view(request):
form = NoteModelForm(request.POST or None, request.FILES or None)
if form.is_valid():
form.instance.user = request.user
form.save()
return redirect('/')
context = {
'form':form
}
return render(request,"notepad/create.html",context)
def list_view(request):
notes = Note.objects.all()
context = {
'object_list' : notes
}
return render(request,"notepad/list.html",context)
def delete_view(request, note_id):
item_to_delete = Note.objects.filter(pk=note_id)
if item_to_delete.exists():
if request.user == item_to_delete[0].user: # check for user is the owner for this note to delete
item_to_delete[0].delete()
return redirect("/notes/list")
def update_view(request, note_id):
unique_note = get_object_or_404(Note,id=note_id)
form = NoteModelForm(request.POST or None, request.FILES or None, instance=unique_note) # due to instance form will be pre populated with data
if form.is_valid():
form.instance.user = request.user
form.save()
return redirect('/')
context = {
'form':form
}
return render(request,"notepad/create.html",context)
| [
"satishkrgu95@gmail.com"
] | satishkrgu95@gmail.com |
5134d00deabcf5b4c674b1a99748d87fc878a810 | 4918c2450e4f5c74246257e6f77e4443db8594d7 | /src/alchemtest/tests/test_namd.py | 2249eb1cc5fa485d2e96ccd25518334dca809fa5 | [
"BSD-3-Clause"
] | permissive | ttjoseph/alchemtest-broken | 58ab5470cdaf112911a7b08048056a71597147a9 | d106cafcf02570d170f4ac78a11fb07517b5de11 | refs/heads/master | 2023-07-05T19:20:47.586928 | 2021-08-01T22:10:18 | 2021-08-01T22:10:18 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 468 | py | '''Tests for all the NAMD datasets'''
import pytest
from alchemtest.namd import load_tyr2ala, load_idws
from . import BaseDatasetTest
class TestNAMD(BaseDatasetTest):
@pytest.fixture(scope="class",
params = [(load_tyr2ala, ('forward', 'backward'), (1, 1)),
(load_idws, ('forward', ), (2,)),
])
def dataset(self, request):
return super(TestNAMD, self).dataset(request)
| [
"orbeckst@gmail.com"
] | orbeckst@gmail.com |
ca667e57f2843a81d8bdb44aa0f217287f0f2a5a | 212ea42ae6425e4a5e4563167d391e8ffe7e090b | /click_app_template_repository_demo/cli.py | 1bdd73daaec7a8a7c7223b2fbfb8613e4ad30338 | [
"Apache-2.0"
] | permissive | simonw/click-app-template-repository-demo-prev | 80563221203a7c6a909be50d5bdc9952dd927700 | 058f5b6fb2e4dee1ab80f7479b390c84c8deb984 | refs/heads/main | 2023-07-05T06:58:28.109577 | 2021-08-30T01:06:11 | 2021-08-30T01:06:11 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 363 | py | import click
@click.group()
@click.version_option()
def cli():
"Demo of simonw/click-app-template-repository"
@cli.command(name="command")
@click.argument(
"example"
)
@click.option(
"-o",
"--option",
help="An example option",
)
def first_command(example, option):
"Command description goes here"
click.echo("Here is some output")
| [
"actions@github.com"
] | actions@github.com |
b6f5ac176c14ef3851f5eb729f9346af4e42b399 | f07a42f652f46106dee4749277d41c302e2b7406 | /Data Set/bug-fixing-5/173a9e3eb37826e2524f5dd66aab24fcf203068b-<_get_outer_edges>-bug.py | 6b77e21432d3ffaf2f38b31d8a9f3d2dbeb18926 | [] | no_license | wsgan001/PyFPattern | e0fe06341cc5d51b3ad0fe29b84098d140ed54d1 | cc347e32745f99c0cd95e79a18ddacc4574d7faa | refs/heads/main | 2023-08-25T23:48:26.112133 | 2021-10-23T14:11:22 | 2021-10-23T14:11:22 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 714 | py | def _get_outer_edges(a, range):
'\n Determine the outer bin edges to use, from either the data or the range\n argument\n '
if (range is not None):
(first_edge, last_edge) = range
elif (a.size == 0):
(first_edge, last_edge) = (0, 1)
else:
(first_edge, last_edge) = (a.min(), a.max())
if (first_edge > last_edge):
raise ValueError('max must be larger than min in range parameter.')
if (not (np.isfinite(first_edge) and np.isfinite(last_edge))):
raise ValueError('range parameter must be finite.')
if (first_edge == last_edge):
first_edge = (first_edge - 0.5)
last_edge = (last_edge + 0.5)
return (first_edge, last_edge) | [
"dg1732004@smail.nju.edu.cn"
] | dg1732004@smail.nju.edu.cn |
e62161aae7741e63193d7fe0fed8497b20197711 | a3b306df800059a5b74975793251a28b8a5f49c7 | /Graphs/LX-2/molecule_otsu = False/BioImageXD-1.0/ITK/lib/InsightToolkit/WrapITK/lib/itkBSplineResampleImageFunctionPython.py | 3de6009b7934c7c391948b53336b3deac2ba1648 | [] | no_license | giacomo21/Image-analysis | dc17ba2b6eb53f48963fad931568576fda4e1349 | ea8bafa073de5090bd8f83fb4f5ca16669d0211f | refs/heads/master | 2016-09-06T21:42:13.530256 | 2013-07-22T09:35:56 | 2013-07-22T09:35:56 | 11,384,784 | 1 | 1 | null | null | null | null | UTF-8 | Python | false | false | 35,840 | py | # This file was automatically generated by SWIG (http://www.swig.org).
# Version 1.3.40
#
# Do not make changes to this file unless you know what you are doing--modify
# the SWIG interface file instead.
from sys import version_info
if version_info >= (3,0,0):
new_instancemethod = lambda func, inst, cls: _itkBSplineResampleImageFunctionPython.SWIG_PyInstanceMethod_New(func)
else:
from new import instancemethod as new_instancemethod
if version_info >= (2,6,0):
def swig_import_helper():
from os.path import dirname
import imp
fp = None
try:
fp, pathname, description = imp.find_module('_itkBSplineResampleImageFunctionPython', [dirname(__file__)])
except ImportError:
import _itkBSplineResampleImageFunctionPython
return _itkBSplineResampleImageFunctionPython
if fp is not None:
try:
_mod = imp.load_module('_itkBSplineResampleImageFunctionPython', fp, pathname, description)
finally:
fp.close()
return _mod
_itkBSplineResampleImageFunctionPython = swig_import_helper()
del swig_import_helper
else:
import _itkBSplineResampleImageFunctionPython
del version_info
try:
_swig_property = property
except NameError:
pass # Python < 2.2 doesn't have 'property'.
def _swig_setattr_nondynamic(self,class_type,name,value,static=1):
if (name == "thisown"): return self.this.own(value)
if (name == "this"):
if type(value).__name__ == 'SwigPyObject':
self.__dict__[name] = value
return
method = class_type.__swig_setmethods__.get(name,None)
if method: return method(self,value)
if (not static) or hasattr(self,name):
self.__dict__[name] = value
else:
raise AttributeError("You cannot add attributes to %s" % self)
def _swig_setattr(self,class_type,name,value):
return _swig_setattr_nondynamic(self,class_type,name,value,0)
def _swig_getattr(self,class_type,name):
if (name == "thisown"): return self.this.own()
method = class_type.__swig_getmethods__.get(name,None)
if method: return method(self)
raise AttributeError(name)
def _swig_repr(self):
try: strthis = "proxy of " + self.this.__repr__()
except: strthis = ""
return "<%s.%s; %s >" % (self.__class__.__module__, self.__class__.__name__, strthis,)
try:
_object = object
_newclass = 1
except AttributeError:
class _object : pass
_newclass = 0
def _swig_setattr_nondynamic_method(set):
def set_attr(self,name,value):
if (name == "thisown"): return self.this.own(value)
if hasattr(self,name) or (name == "this"):
set(self,name,value)
else:
raise AttributeError("You cannot add attributes to %s" % self)
return set_attr
import ITKCommonBasePython
import itkEventObjectsPython
import pyBasePython
import itkImagePython
import itkFixedArrayPython
import itkCovariantVectorPython
import vnl_vectorPython
import vcl_complexPython
import vnl_matrixPython
import itkVectorPython
import vnl_vector_refPython
import ITKRegionsPython
import itkSizePython
import itkIndexPython
import itkOffsetPython
import itkPointPython
import itkMatrixPython
import vnl_matrix_fixedPython
import itkRGBAPixelPython
import itkSymmetricSecondRankTensorPython
import itkRGBPixelPython
import itkBSplineInterpolateImageFunctionPython
import itkContinuousIndexPython
import itkInterpolateImageFunctionPython
import itkImageFunctionPython
import itkFunctionBasePython
import itkArrayPython
def itkBSplineResampleImageFunctionID3D_New():
return itkBSplineResampleImageFunctionID3D.New()
def itkBSplineResampleImageFunctionIF3D_New():
return itkBSplineResampleImageFunctionIF3D.New()
def itkBSplineResampleImageFunctionIUS3D_New():
return itkBSplineResampleImageFunctionIUS3D.New()
def itkBSplineResampleImageFunctionIUL3D_New():
return itkBSplineResampleImageFunctionIUL3D.New()
def itkBSplineResampleImageFunctionIUC3D_New():
return itkBSplineResampleImageFunctionIUC3D.New()
def itkBSplineResampleImageFunctionID2D_New():
return itkBSplineResampleImageFunctionID2D.New()
def itkBSplineResampleImageFunctionIF2D_New():
return itkBSplineResampleImageFunctionIF2D.New()
def itkBSplineResampleImageFunctionIUS2D_New():
return itkBSplineResampleImageFunctionIUS2D.New()
def itkBSplineResampleImageFunctionIUL2D_New():
return itkBSplineResampleImageFunctionIUL2D.New()
def itkBSplineResampleImageFunctionIUC2D_New():
return itkBSplineResampleImageFunctionIUC2D.New()
class itkBSplineResampleImageFunctionID2D(itkBSplineInterpolateImageFunctionPython.itkBSplineInterpolateImageFunctionID2DD):
"""Proxy of C++ itkBSplineResampleImageFunctionID2D class"""
thisown = _swig_property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag')
def __init__(self, *args, **kwargs): raise AttributeError("No constructor defined")
__repr__ = _swig_repr
def __New_orig__():
"""__New_orig__()"""
return _itkBSplineResampleImageFunctionPython.itkBSplineResampleImageFunctionID2D___New_orig__()
__New_orig__ = staticmethod(__New_orig__)
__swig_destroy__ = _itkBSplineResampleImageFunctionPython.delete_itkBSplineResampleImageFunctionID2D
def cast(*args):
"""cast(itkLightObject obj) -> itkBSplineResampleImageFunctionID2D"""
return _itkBSplineResampleImageFunctionPython.itkBSplineResampleImageFunctionID2D_cast(*args)
cast = staticmethod(cast)
def GetPointer(self):
"""GetPointer(self) -> itkBSplineResampleImageFunctionID2D"""
return _itkBSplineResampleImageFunctionPython.itkBSplineResampleImageFunctionID2D_GetPointer(self)
def New(*args, **kargs):
"""New() -> itkBSplineResampleImageFunctionID2D
Create a new object of the class itkBSplineResampleImageFunctionID2D and set the input and the parameters if some
named or non-named arguments are passed to that method.
New() tries to assign all the non named parameters to the input of the new objects - the
first non named parameter in the first input, etc.
The named parameters are used by calling the method with the same name prefixed by 'Set'.
Ex:
itkBSplineResampleImageFunctionID2D.New( reader, Threshold=10 )
is (most of the time) equivalent to:
obj = itkBSplineResampleImageFunctionID2D.New()
obj.SetInput( 0, reader.GetOutput() )
obj.SetThreshold( 10 )
"""
obj = itkBSplineResampleImageFunctionID2D.__New_orig__()
import itkTemplate
itkTemplate.New(obj, *args, **kargs)
return obj
New = staticmethod(New)
itkBSplineResampleImageFunctionID2D.GetPointer = new_instancemethod(_itkBSplineResampleImageFunctionPython.itkBSplineResampleImageFunctionID2D_GetPointer,None,itkBSplineResampleImageFunctionID2D)
itkBSplineResampleImageFunctionID2D_swigregister = _itkBSplineResampleImageFunctionPython.itkBSplineResampleImageFunctionID2D_swigregister
itkBSplineResampleImageFunctionID2D_swigregister(itkBSplineResampleImageFunctionID2D)
def itkBSplineResampleImageFunctionID2D___New_orig__():
"""itkBSplineResampleImageFunctionID2D___New_orig__()"""
return _itkBSplineResampleImageFunctionPython.itkBSplineResampleImageFunctionID2D___New_orig__()
def itkBSplineResampleImageFunctionID2D_cast(*args):
"""itkBSplineResampleImageFunctionID2D_cast(itkLightObject obj) -> itkBSplineResampleImageFunctionID2D"""
return _itkBSplineResampleImageFunctionPython.itkBSplineResampleImageFunctionID2D_cast(*args)
class itkBSplineResampleImageFunctionID3D(itkBSplineInterpolateImageFunctionPython.itkBSplineInterpolateImageFunctionID3DD):
"""Proxy of C++ itkBSplineResampleImageFunctionID3D class"""
thisown = _swig_property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag')
def __init__(self, *args, **kwargs): raise AttributeError("No constructor defined")
__repr__ = _swig_repr
def __New_orig__():
"""__New_orig__()"""
return _itkBSplineResampleImageFunctionPython.itkBSplineResampleImageFunctionID3D___New_orig__()
__New_orig__ = staticmethod(__New_orig__)
__swig_destroy__ = _itkBSplineResampleImageFunctionPython.delete_itkBSplineResampleImageFunctionID3D
def cast(*args):
"""cast(itkLightObject obj) -> itkBSplineResampleImageFunctionID3D"""
return _itkBSplineResampleImageFunctionPython.itkBSplineResampleImageFunctionID3D_cast(*args)
cast = staticmethod(cast)
def GetPointer(self):
"""GetPointer(self) -> itkBSplineResampleImageFunctionID3D"""
return _itkBSplineResampleImageFunctionPython.itkBSplineResampleImageFunctionID3D_GetPointer(self)
def New(*args, **kargs):
"""New() -> itkBSplineResampleImageFunctionID3D
Create a new object of the class itkBSplineResampleImageFunctionID3D and set the input and the parameters if some
named or non-named arguments are passed to that method.
New() tries to assign all the non named parameters to the input of the new objects - the
first non named parameter in the first input, etc.
The named parameters are used by calling the method with the same name prefixed by 'Set'.
Ex:
itkBSplineResampleImageFunctionID3D.New( reader, Threshold=10 )
is (most of the time) equivalent to:
obj = itkBSplineResampleImageFunctionID3D.New()
obj.SetInput( 0, reader.GetOutput() )
obj.SetThreshold( 10 )
"""
obj = itkBSplineResampleImageFunctionID3D.__New_orig__()
import itkTemplate
itkTemplate.New(obj, *args, **kargs)
return obj
New = staticmethod(New)
itkBSplineResampleImageFunctionID3D.GetPointer = new_instancemethod(_itkBSplineResampleImageFunctionPython.itkBSplineResampleImageFunctionID3D_GetPointer,None,itkBSplineResampleImageFunctionID3D)
itkBSplineResampleImageFunctionID3D_swigregister = _itkBSplineResampleImageFunctionPython.itkBSplineResampleImageFunctionID3D_swigregister
itkBSplineResampleImageFunctionID3D_swigregister(itkBSplineResampleImageFunctionID3D)
def itkBSplineResampleImageFunctionID3D___New_orig__():
"""itkBSplineResampleImageFunctionID3D___New_orig__()"""
return _itkBSplineResampleImageFunctionPython.itkBSplineResampleImageFunctionID3D___New_orig__()
def itkBSplineResampleImageFunctionID3D_cast(*args):
"""itkBSplineResampleImageFunctionID3D_cast(itkLightObject obj) -> itkBSplineResampleImageFunctionID3D"""
return _itkBSplineResampleImageFunctionPython.itkBSplineResampleImageFunctionID3D_cast(*args)
class itkBSplineResampleImageFunctionIF2D(itkBSplineInterpolateImageFunctionPython.itkBSplineInterpolateImageFunctionIF2DF):
"""Proxy of C++ itkBSplineResampleImageFunctionIF2D class"""
thisown = _swig_property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag')
def __init__(self, *args, **kwargs): raise AttributeError("No constructor defined")
__repr__ = _swig_repr
def __New_orig__():
"""__New_orig__()"""
return _itkBSplineResampleImageFunctionPython.itkBSplineResampleImageFunctionIF2D___New_orig__()
__New_orig__ = staticmethod(__New_orig__)
__swig_destroy__ = _itkBSplineResampleImageFunctionPython.delete_itkBSplineResampleImageFunctionIF2D
def cast(*args):
"""cast(itkLightObject obj) -> itkBSplineResampleImageFunctionIF2D"""
return _itkBSplineResampleImageFunctionPython.itkBSplineResampleImageFunctionIF2D_cast(*args)
cast = staticmethod(cast)
def GetPointer(self):
"""GetPointer(self) -> itkBSplineResampleImageFunctionIF2D"""
return _itkBSplineResampleImageFunctionPython.itkBSplineResampleImageFunctionIF2D_GetPointer(self)
def New(*args, **kargs):
"""New() -> itkBSplineResampleImageFunctionIF2D
Create a new object of the class itkBSplineResampleImageFunctionIF2D and set the input and the parameters if some
named or non-named arguments are passed to that method.
New() tries to assign all the non named parameters to the input of the new objects - the
first non named parameter in the first input, etc.
The named parameters are used by calling the method with the same name prefixed by 'Set'.
Ex:
itkBSplineResampleImageFunctionIF2D.New( reader, Threshold=10 )
is (most of the time) equivalent to:
obj = itkBSplineResampleImageFunctionIF2D.New()
obj.SetInput( 0, reader.GetOutput() )
obj.SetThreshold( 10 )
"""
obj = itkBSplineResampleImageFunctionIF2D.__New_orig__()
import itkTemplate
itkTemplate.New(obj, *args, **kargs)
return obj
New = staticmethod(New)
itkBSplineResampleImageFunctionIF2D.GetPointer = new_instancemethod(_itkBSplineResampleImageFunctionPython.itkBSplineResampleImageFunctionIF2D_GetPointer,None,itkBSplineResampleImageFunctionIF2D)
itkBSplineResampleImageFunctionIF2D_swigregister = _itkBSplineResampleImageFunctionPython.itkBSplineResampleImageFunctionIF2D_swigregister
itkBSplineResampleImageFunctionIF2D_swigregister(itkBSplineResampleImageFunctionIF2D)
def itkBSplineResampleImageFunctionIF2D___New_orig__():
"""itkBSplineResampleImageFunctionIF2D___New_orig__()"""
return _itkBSplineResampleImageFunctionPython.itkBSplineResampleImageFunctionIF2D___New_orig__()
def itkBSplineResampleImageFunctionIF2D_cast(*args):
"""itkBSplineResampleImageFunctionIF2D_cast(itkLightObject obj) -> itkBSplineResampleImageFunctionIF2D"""
return _itkBSplineResampleImageFunctionPython.itkBSplineResampleImageFunctionIF2D_cast(*args)
class itkBSplineResampleImageFunctionIF3D(itkBSplineInterpolateImageFunctionPython.itkBSplineInterpolateImageFunctionIF3DF):
"""Proxy of C++ itkBSplineResampleImageFunctionIF3D class"""
thisown = _swig_property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag')
def __init__(self, *args, **kwargs): raise AttributeError("No constructor defined")
__repr__ = _swig_repr
def __New_orig__():
"""__New_orig__()"""
return _itkBSplineResampleImageFunctionPython.itkBSplineResampleImageFunctionIF3D___New_orig__()
__New_orig__ = staticmethod(__New_orig__)
__swig_destroy__ = _itkBSplineResampleImageFunctionPython.delete_itkBSplineResampleImageFunctionIF3D
def cast(*args):
"""cast(itkLightObject obj) -> itkBSplineResampleImageFunctionIF3D"""
return _itkBSplineResampleImageFunctionPython.itkBSplineResampleImageFunctionIF3D_cast(*args)
cast = staticmethod(cast)
def GetPointer(self):
"""GetPointer(self) -> itkBSplineResampleImageFunctionIF3D"""
return _itkBSplineResampleImageFunctionPython.itkBSplineResampleImageFunctionIF3D_GetPointer(self)
def New(*args, **kargs):
"""New() -> itkBSplineResampleImageFunctionIF3D
Create a new object of the class itkBSplineResampleImageFunctionIF3D and set the input and the parameters if some
named or non-named arguments are passed to that method.
New() tries to assign all the non named parameters to the input of the new objects - the
first non named parameter in the first input, etc.
The named parameters are used by calling the method with the same name prefixed by 'Set'.
Ex:
itkBSplineResampleImageFunctionIF3D.New( reader, Threshold=10 )
is (most of the time) equivalent to:
obj = itkBSplineResampleImageFunctionIF3D.New()
obj.SetInput( 0, reader.GetOutput() )
obj.SetThreshold( 10 )
"""
obj = itkBSplineResampleImageFunctionIF3D.__New_orig__()
import itkTemplate
itkTemplate.New(obj, *args, **kargs)
return obj
New = staticmethod(New)
itkBSplineResampleImageFunctionIF3D.GetPointer = new_instancemethod(_itkBSplineResampleImageFunctionPython.itkBSplineResampleImageFunctionIF3D_GetPointer,None,itkBSplineResampleImageFunctionIF3D)
itkBSplineResampleImageFunctionIF3D_swigregister = _itkBSplineResampleImageFunctionPython.itkBSplineResampleImageFunctionIF3D_swigregister
itkBSplineResampleImageFunctionIF3D_swigregister(itkBSplineResampleImageFunctionIF3D)
def itkBSplineResampleImageFunctionIF3D___New_orig__():
"""itkBSplineResampleImageFunctionIF3D___New_orig__()"""
return _itkBSplineResampleImageFunctionPython.itkBSplineResampleImageFunctionIF3D___New_orig__()
def itkBSplineResampleImageFunctionIF3D_cast(*args):
"""itkBSplineResampleImageFunctionIF3D_cast(itkLightObject obj) -> itkBSplineResampleImageFunctionIF3D"""
return _itkBSplineResampleImageFunctionPython.itkBSplineResampleImageFunctionIF3D_cast(*args)
class itkBSplineResampleImageFunctionIUC2D(itkBSplineInterpolateImageFunctionPython.itkBSplineInterpolateImageFunctionIUC2DUC):
"""Proxy of C++ itkBSplineResampleImageFunctionIUC2D class"""
thisown = _swig_property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag')
def __init__(self, *args, **kwargs): raise AttributeError("No constructor defined")
__repr__ = _swig_repr
def __New_orig__():
"""__New_orig__()"""
return _itkBSplineResampleImageFunctionPython.itkBSplineResampleImageFunctionIUC2D___New_orig__()
__New_orig__ = staticmethod(__New_orig__)
__swig_destroy__ = _itkBSplineResampleImageFunctionPython.delete_itkBSplineResampleImageFunctionIUC2D
def cast(*args):
"""cast(itkLightObject obj) -> itkBSplineResampleImageFunctionIUC2D"""
return _itkBSplineResampleImageFunctionPython.itkBSplineResampleImageFunctionIUC2D_cast(*args)
cast = staticmethod(cast)
def GetPointer(self):
"""GetPointer(self) -> itkBSplineResampleImageFunctionIUC2D"""
return _itkBSplineResampleImageFunctionPython.itkBSplineResampleImageFunctionIUC2D_GetPointer(self)
def New(*args, **kargs):
"""New() -> itkBSplineResampleImageFunctionIUC2D
Create a new object of the class itkBSplineResampleImageFunctionIUC2D and set the input and the parameters if some
named or non-named arguments are passed to that method.
New() tries to assign all the non named parameters to the input of the new objects - the
first non named parameter in the first input, etc.
The named parameters are used by calling the method with the same name prefixed by 'Set'.
Ex:
itkBSplineResampleImageFunctionIUC2D.New( reader, Threshold=10 )
is (most of the time) equivalent to:
obj = itkBSplineResampleImageFunctionIUC2D.New()
obj.SetInput( 0, reader.GetOutput() )
obj.SetThreshold( 10 )
"""
obj = itkBSplineResampleImageFunctionIUC2D.__New_orig__()
import itkTemplate
itkTemplate.New(obj, *args, **kargs)
return obj
New = staticmethod(New)
itkBSplineResampleImageFunctionIUC2D.GetPointer = new_instancemethod(_itkBSplineResampleImageFunctionPython.itkBSplineResampleImageFunctionIUC2D_GetPointer,None,itkBSplineResampleImageFunctionIUC2D)
itkBSplineResampleImageFunctionIUC2D_swigregister = _itkBSplineResampleImageFunctionPython.itkBSplineResampleImageFunctionIUC2D_swigregister
itkBSplineResampleImageFunctionIUC2D_swigregister(itkBSplineResampleImageFunctionIUC2D)
def itkBSplineResampleImageFunctionIUC2D___New_orig__():
"""itkBSplineResampleImageFunctionIUC2D___New_orig__()"""
return _itkBSplineResampleImageFunctionPython.itkBSplineResampleImageFunctionIUC2D___New_orig__()
def itkBSplineResampleImageFunctionIUC2D_cast(*args):
"""itkBSplineResampleImageFunctionIUC2D_cast(itkLightObject obj) -> itkBSplineResampleImageFunctionIUC2D"""
return _itkBSplineResampleImageFunctionPython.itkBSplineResampleImageFunctionIUC2D_cast(*args)
class itkBSplineResampleImageFunctionIUC3D(itkBSplineInterpolateImageFunctionPython.itkBSplineInterpolateImageFunctionIUC3DUC):
"""Proxy of C++ itkBSplineResampleImageFunctionIUC3D class"""
thisown = _swig_property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag')
def __init__(self, *args, **kwargs): raise AttributeError("No constructor defined")
__repr__ = _swig_repr
def __New_orig__():
"""__New_orig__()"""
return _itkBSplineResampleImageFunctionPython.itkBSplineResampleImageFunctionIUC3D___New_orig__()
__New_orig__ = staticmethod(__New_orig__)
__swig_destroy__ = _itkBSplineResampleImageFunctionPython.delete_itkBSplineResampleImageFunctionIUC3D
def cast(*args):
"""cast(itkLightObject obj) -> itkBSplineResampleImageFunctionIUC3D"""
return _itkBSplineResampleImageFunctionPython.itkBSplineResampleImageFunctionIUC3D_cast(*args)
cast = staticmethod(cast)
def GetPointer(self):
"""GetPointer(self) -> itkBSplineResampleImageFunctionIUC3D"""
return _itkBSplineResampleImageFunctionPython.itkBSplineResampleImageFunctionIUC3D_GetPointer(self)
def New(*args, **kargs):
"""New() -> itkBSplineResampleImageFunctionIUC3D
Create a new object of the class itkBSplineResampleImageFunctionIUC3D and set the input and the parameters if some
named or non-named arguments are passed to that method.
New() tries to assign all the non named parameters to the input of the new objects - the
first non named parameter in the first input, etc.
The named parameters are used by calling the method with the same name prefixed by 'Set'.
Ex:
itkBSplineResampleImageFunctionIUC3D.New( reader, Threshold=10 )
is (most of the time) equivalent to:
obj = itkBSplineResampleImageFunctionIUC3D.New()
obj.SetInput( 0, reader.GetOutput() )
obj.SetThreshold( 10 )
"""
obj = itkBSplineResampleImageFunctionIUC3D.__New_orig__()
import itkTemplate
itkTemplate.New(obj, *args, **kargs)
return obj
New = staticmethod(New)
itkBSplineResampleImageFunctionIUC3D.GetPointer = new_instancemethod(_itkBSplineResampleImageFunctionPython.itkBSplineResampleImageFunctionIUC3D_GetPointer,None,itkBSplineResampleImageFunctionIUC3D)
itkBSplineResampleImageFunctionIUC3D_swigregister = _itkBSplineResampleImageFunctionPython.itkBSplineResampleImageFunctionIUC3D_swigregister
itkBSplineResampleImageFunctionIUC3D_swigregister(itkBSplineResampleImageFunctionIUC3D)
def itkBSplineResampleImageFunctionIUC3D___New_orig__():
"""itkBSplineResampleImageFunctionIUC3D___New_orig__()"""
return _itkBSplineResampleImageFunctionPython.itkBSplineResampleImageFunctionIUC3D___New_orig__()
def itkBSplineResampleImageFunctionIUC3D_cast(*args):
"""itkBSplineResampleImageFunctionIUC3D_cast(itkLightObject obj) -> itkBSplineResampleImageFunctionIUC3D"""
return _itkBSplineResampleImageFunctionPython.itkBSplineResampleImageFunctionIUC3D_cast(*args)
class itkBSplineResampleImageFunctionIUL2D(itkBSplineInterpolateImageFunctionPython.itkBSplineInterpolateImageFunctionIUL2DUL):
"""Proxy of C++ itkBSplineResampleImageFunctionIUL2D class"""
thisown = _swig_property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag')
def __init__(self, *args, **kwargs): raise AttributeError("No constructor defined")
__repr__ = _swig_repr
def __New_orig__():
"""__New_orig__()"""
return _itkBSplineResampleImageFunctionPython.itkBSplineResampleImageFunctionIUL2D___New_orig__()
__New_orig__ = staticmethod(__New_orig__)
__swig_destroy__ = _itkBSplineResampleImageFunctionPython.delete_itkBSplineResampleImageFunctionIUL2D
def cast(*args):
"""cast(itkLightObject obj) -> itkBSplineResampleImageFunctionIUL2D"""
return _itkBSplineResampleImageFunctionPython.itkBSplineResampleImageFunctionIUL2D_cast(*args)
cast = staticmethod(cast)
def GetPointer(self):
"""GetPointer(self) -> itkBSplineResampleImageFunctionIUL2D"""
return _itkBSplineResampleImageFunctionPython.itkBSplineResampleImageFunctionIUL2D_GetPointer(self)
def New(*args, **kargs):
"""New() -> itkBSplineResampleImageFunctionIUL2D
Create a new object of the class itkBSplineResampleImageFunctionIUL2D and set the input and the parameters if some
named or non-named arguments are passed to that method.
New() tries to assign all the non named parameters to the input of the new objects - the
first non named parameter in the first input, etc.
The named parameters are used by calling the method with the same name prefixed by 'Set'.
Ex:
itkBSplineResampleImageFunctionIUL2D.New( reader, Threshold=10 )
is (most of the time) equivalent to:
obj = itkBSplineResampleImageFunctionIUL2D.New()
obj.SetInput( 0, reader.GetOutput() )
obj.SetThreshold( 10 )
"""
obj = itkBSplineResampleImageFunctionIUL2D.__New_orig__()
import itkTemplate
itkTemplate.New(obj, *args, **kargs)
return obj
New = staticmethod(New)
itkBSplineResampleImageFunctionIUL2D.GetPointer = new_instancemethod(_itkBSplineResampleImageFunctionPython.itkBSplineResampleImageFunctionIUL2D_GetPointer,None,itkBSplineResampleImageFunctionIUL2D)
itkBSplineResampleImageFunctionIUL2D_swigregister = _itkBSplineResampleImageFunctionPython.itkBSplineResampleImageFunctionIUL2D_swigregister
itkBSplineResampleImageFunctionIUL2D_swigregister(itkBSplineResampleImageFunctionIUL2D)
def itkBSplineResampleImageFunctionIUL2D___New_orig__():
"""itkBSplineResampleImageFunctionIUL2D___New_orig__()"""
return _itkBSplineResampleImageFunctionPython.itkBSplineResampleImageFunctionIUL2D___New_orig__()
def itkBSplineResampleImageFunctionIUL2D_cast(*args):
"""itkBSplineResampleImageFunctionIUL2D_cast(itkLightObject obj) -> itkBSplineResampleImageFunctionIUL2D"""
return _itkBSplineResampleImageFunctionPython.itkBSplineResampleImageFunctionIUL2D_cast(*args)
class itkBSplineResampleImageFunctionIUL3D(itkBSplineInterpolateImageFunctionPython.itkBSplineInterpolateImageFunctionIUL3DUL):
"""Proxy of C++ itkBSplineResampleImageFunctionIUL3D class"""
thisown = _swig_property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag')
def __init__(self, *args, **kwargs): raise AttributeError("No constructor defined")
__repr__ = _swig_repr
def __New_orig__():
"""__New_orig__()"""
return _itkBSplineResampleImageFunctionPython.itkBSplineResampleImageFunctionIUL3D___New_orig__()
__New_orig__ = staticmethod(__New_orig__)
__swig_destroy__ = _itkBSplineResampleImageFunctionPython.delete_itkBSplineResampleImageFunctionIUL3D
def cast(*args):
"""cast(itkLightObject obj) -> itkBSplineResampleImageFunctionIUL3D"""
return _itkBSplineResampleImageFunctionPython.itkBSplineResampleImageFunctionIUL3D_cast(*args)
cast = staticmethod(cast)
def GetPointer(self):
"""GetPointer(self) -> itkBSplineResampleImageFunctionIUL3D"""
return _itkBSplineResampleImageFunctionPython.itkBSplineResampleImageFunctionIUL3D_GetPointer(self)
def New(*args, **kargs):
"""New() -> itkBSplineResampleImageFunctionIUL3D
Create a new object of the class itkBSplineResampleImageFunctionIUL3D and set the input and the parameters if some
named or non-named arguments are passed to that method.
New() tries to assign all the non named parameters to the input of the new objects - the
first non named parameter in the first input, etc.
The named parameters are used by calling the method with the same name prefixed by 'Set'.
Ex:
itkBSplineResampleImageFunctionIUL3D.New( reader, Threshold=10 )
is (most of the time) equivalent to:
obj = itkBSplineResampleImageFunctionIUL3D.New()
obj.SetInput( 0, reader.GetOutput() )
obj.SetThreshold( 10 )
"""
obj = itkBSplineResampleImageFunctionIUL3D.__New_orig__()
import itkTemplate
itkTemplate.New(obj, *args, **kargs)
return obj
New = staticmethod(New)
itkBSplineResampleImageFunctionIUL3D.GetPointer = new_instancemethod(_itkBSplineResampleImageFunctionPython.itkBSplineResampleImageFunctionIUL3D_GetPointer,None,itkBSplineResampleImageFunctionIUL3D)
itkBSplineResampleImageFunctionIUL3D_swigregister = _itkBSplineResampleImageFunctionPython.itkBSplineResampleImageFunctionIUL3D_swigregister
itkBSplineResampleImageFunctionIUL3D_swigregister(itkBSplineResampleImageFunctionIUL3D)
def itkBSplineResampleImageFunctionIUL3D___New_orig__():
"""itkBSplineResampleImageFunctionIUL3D___New_orig__()"""
return _itkBSplineResampleImageFunctionPython.itkBSplineResampleImageFunctionIUL3D___New_orig__()
def itkBSplineResampleImageFunctionIUL3D_cast(*args):
"""itkBSplineResampleImageFunctionIUL3D_cast(itkLightObject obj) -> itkBSplineResampleImageFunctionIUL3D"""
return _itkBSplineResampleImageFunctionPython.itkBSplineResampleImageFunctionIUL3D_cast(*args)
class itkBSplineResampleImageFunctionIUS2D(itkBSplineInterpolateImageFunctionPython.itkBSplineInterpolateImageFunctionIUS2DUS):
"""Proxy of C++ itkBSplineResampleImageFunctionIUS2D class"""
thisown = _swig_property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag')
def __init__(self, *args, **kwargs): raise AttributeError("No constructor defined")
__repr__ = _swig_repr
def __New_orig__():
"""__New_orig__()"""
return _itkBSplineResampleImageFunctionPython.itkBSplineResampleImageFunctionIUS2D___New_orig__()
__New_orig__ = staticmethod(__New_orig__)
__swig_destroy__ = _itkBSplineResampleImageFunctionPython.delete_itkBSplineResampleImageFunctionIUS2D
def cast(*args):
"""cast(itkLightObject obj) -> itkBSplineResampleImageFunctionIUS2D"""
return _itkBSplineResampleImageFunctionPython.itkBSplineResampleImageFunctionIUS2D_cast(*args)
cast = staticmethod(cast)
def GetPointer(self):
"""GetPointer(self) -> itkBSplineResampleImageFunctionIUS2D"""
return _itkBSplineResampleImageFunctionPython.itkBSplineResampleImageFunctionIUS2D_GetPointer(self)
def New(*args, **kargs):
"""New() -> itkBSplineResampleImageFunctionIUS2D
Create a new object of the class itkBSplineResampleImageFunctionIUS2D and set the input and the parameters if some
named or non-named arguments are passed to that method.
New() tries to assign all the non named parameters to the input of the new objects - the
first non named parameter in the first input, etc.
The named parameters are used by calling the method with the same name prefixed by 'Set'.
Ex:
itkBSplineResampleImageFunctionIUS2D.New( reader, Threshold=10 )
is (most of the time) equivalent to:
obj = itkBSplineResampleImageFunctionIUS2D.New()
obj.SetInput( 0, reader.GetOutput() )
obj.SetThreshold( 10 )
"""
obj = itkBSplineResampleImageFunctionIUS2D.__New_orig__()
import itkTemplate
itkTemplate.New(obj, *args, **kargs)
return obj
New = staticmethod(New)
itkBSplineResampleImageFunctionIUS2D.GetPointer = new_instancemethod(_itkBSplineResampleImageFunctionPython.itkBSplineResampleImageFunctionIUS2D_GetPointer,None,itkBSplineResampleImageFunctionIUS2D)
itkBSplineResampleImageFunctionIUS2D_swigregister = _itkBSplineResampleImageFunctionPython.itkBSplineResampleImageFunctionIUS2D_swigregister
itkBSplineResampleImageFunctionIUS2D_swigregister(itkBSplineResampleImageFunctionIUS2D)
def itkBSplineResampleImageFunctionIUS2D___New_orig__():
"""itkBSplineResampleImageFunctionIUS2D___New_orig__()"""
return _itkBSplineResampleImageFunctionPython.itkBSplineResampleImageFunctionIUS2D___New_orig__()
def itkBSplineResampleImageFunctionIUS2D_cast(*args):
"""itkBSplineResampleImageFunctionIUS2D_cast(itkLightObject obj) -> itkBSplineResampleImageFunctionIUS2D"""
return _itkBSplineResampleImageFunctionPython.itkBSplineResampleImageFunctionIUS2D_cast(*args)
class itkBSplineResampleImageFunctionIUS3D(itkBSplineInterpolateImageFunctionPython.itkBSplineInterpolateImageFunctionIUS3DUS):
"""Proxy of C++ itkBSplineResampleImageFunctionIUS3D class"""
thisown = _swig_property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag')
def __init__(self, *args, **kwargs): raise AttributeError("No constructor defined")
__repr__ = _swig_repr
def __New_orig__():
"""__New_orig__()"""
return _itkBSplineResampleImageFunctionPython.itkBSplineResampleImageFunctionIUS3D___New_orig__()
__New_orig__ = staticmethod(__New_orig__)
__swig_destroy__ = _itkBSplineResampleImageFunctionPython.delete_itkBSplineResampleImageFunctionIUS3D
def cast(*args):
"""cast(itkLightObject obj) -> itkBSplineResampleImageFunctionIUS3D"""
return _itkBSplineResampleImageFunctionPython.itkBSplineResampleImageFunctionIUS3D_cast(*args)
cast = staticmethod(cast)
def GetPointer(self):
"""GetPointer(self) -> itkBSplineResampleImageFunctionIUS3D"""
return _itkBSplineResampleImageFunctionPython.itkBSplineResampleImageFunctionIUS3D_GetPointer(self)
def New(*args, **kargs):
"""New() -> itkBSplineResampleImageFunctionIUS3D
Create a new object of the class itkBSplineResampleImageFunctionIUS3D and set the input and the parameters if some
named or non-named arguments are passed to that method.
New() tries to assign all the non named parameters to the input of the new objects - the
first non named parameter in the first input, etc.
The named parameters are used by calling the method with the same name prefixed by 'Set'.
Ex:
itkBSplineResampleImageFunctionIUS3D.New( reader, Threshold=10 )
is (most of the time) equivalent to:
obj = itkBSplineResampleImageFunctionIUS3D.New()
obj.SetInput( 0, reader.GetOutput() )
obj.SetThreshold( 10 )
"""
obj = itkBSplineResampleImageFunctionIUS3D.__New_orig__()
import itkTemplate
itkTemplate.New(obj, *args, **kargs)
return obj
New = staticmethod(New)
itkBSplineResampleImageFunctionIUS3D.GetPointer = new_instancemethod(_itkBSplineResampleImageFunctionPython.itkBSplineResampleImageFunctionIUS3D_GetPointer,None,itkBSplineResampleImageFunctionIUS3D)
itkBSplineResampleImageFunctionIUS3D_swigregister = _itkBSplineResampleImageFunctionPython.itkBSplineResampleImageFunctionIUS3D_swigregister
itkBSplineResampleImageFunctionIUS3D_swigregister(itkBSplineResampleImageFunctionIUS3D)
def itkBSplineResampleImageFunctionIUS3D___New_orig__():
"""itkBSplineResampleImageFunctionIUS3D___New_orig__()"""
return _itkBSplineResampleImageFunctionPython.itkBSplineResampleImageFunctionIUS3D___New_orig__()
def itkBSplineResampleImageFunctionIUS3D_cast(*args):
"""itkBSplineResampleImageFunctionIUS3D_cast(itkLightObject obj) -> itkBSplineResampleImageFunctionIUS3D"""
return _itkBSplineResampleImageFunctionPython.itkBSplineResampleImageFunctionIUS3D_cast(*args)
| [
"fede.anne95@hotmail.it"
] | fede.anne95@hotmail.it |
d5623029647237a1c70c28295148fe8a911cf82c | 7297f40a65a84c5acd38cfa2822c8366d2f49b45 | /auth_demo/settings.py | 5383ac9ac0f2ca330924f6a1429a9ac16919727b | [
"Apache-2.0"
] | permissive | aleducode/shopify-django-custom | 4041b224a52995718aeb15ea5b5bedf7083cf8b7 | d6199fbb42d944baa3ee36119e5e47c008cad1fd | refs/heads/master | 2020-12-21T22:45:35.629700 | 2020-01-27T20:57:09 | 2020-01-27T20:57:09 | 236,589,887 | 0 | 0 | Apache-2.0 | 2020-01-27T20:57:11 | 2020-01-27T20:40:31 | Python | UTF-8 | Python | false | false | 6,698 | py | # Django settings for auth_demo project.
import os
import environ
import dotenv
from django.conf import settings as global_settings
env = environ.Env()
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Load environment variables from a .env file so that they can be read later in settings via os.environ.get().
# See https://github.com/theskumar/python-dotenv.
PROJECT_PATH = os.path.dirname(os.path.dirname(__file__))
DEBUG = True
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': env('POSTGRES_DB'),
'USER': env('POSTGRES_USER'),
'PASSWORD': env('POSTGRES_PASSWORD'),
'HOST': env('POSTGRES_HOST'),
'PORT': env('POSTGRES_PORT'),
}
}
# Hosts/domain names that are valid for this site; required if DEBUG is False
# See https://docs.djangoproject.com/en/1.5/ref/settings/#allowed-hosts
ALLOWED_HOSTS = ['*']
# Local time zone for this installation. Choices can be found here:
# http://en.wikipedia.org/wiki/List_of_tz_zones_by_name
# although not all choices may be available on all operating systems.
# In a Windows environment this must be set to your system time zone.
TIME_ZONE = 'America/Chicago'
# Language code for this installation. All choices can be found here:
# http://www.i18nguy.com/unicode/language-identifiers.html
LANGUAGE_CODE = 'en-us'
SITE_ID = 1
# If you set this to False, Django will make some optimizations so as not
# to load the internationalization machinery.
USE_I18N = True
# If you set this to False, Django will not format dates, numbers and
# calendars according to the current locale.
USE_L10N = True
# If you set this to False, Django will not use timezone-aware datetimes.
USE_TZ = True
# Absolute filesystem path to the directory that will hold user-uploaded files.
# Example: "/var/www/example.com/media/"
MEDIA_ROOT = ''
# URL that handles the media served from MEDIA_ROOT. Make sure to use a
# trailing slash.
# Examples: "http://example.com/media/", "http://media.example.com/"
MEDIA_URL = '/media/'
# Absolute path to the directory static files should be collected to.
# Don't put anything in this directory yourself; store your static files
# in apps' "static/" subdirectories and in STATICFILES_DIRS.
# Example: "/var/www/example.com/static/"
STATIC_ROOT = '/auth_app/static/'
# URL prefix for static files.
# Example: "http://example.com/static/", "http://static.example.com/"
STATIC_URL = '/static/'
# Additional locations of static files
STATICFILES_DIRS=(
os.path.join(BASE_DIR,'static'),
)
# List of finder classes that know how to find static files in
# various locations.
STATICFILES_FINDERS = (
'django.contrib.staticfiles.finders.FileSystemFinder',
'django.contrib.staticfiles.finders.AppDirectoriesFinder',
# 'django.contrib.staticfiles.finders.DefaultStorageFinder',
)
# Make this unique, and don't share it with anybody.
SECRET_KEY = 'zbopgazej3!+%#8r226!%d*o-7we-*vap7=^mdh30-1*r95nb('
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [os.path.join(BASE_DIR,'templates')],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
'shopify_auth.context_processors.shopify_auth'
],
},
},
]
# https://docs.djangoproject.com/en/dev/ref/settings/#middleware
MIDDLEWARE = [
"django.middleware.security.SecurityMiddleware",
"django.contrib.sessions.middleware.SessionMiddleware",
"django.middleware.locale.LocaleMiddleware",
"django.middleware.common.CommonMiddleware",
"django.middleware.csrf.CsrfViewMiddleware",
"django.contrib.auth.middleware.AuthenticationMiddleware",
"django.contrib.messages.middleware.MessageMiddleware",
]
ROOT_URLCONF = 'auth_demo.urls'
# Python dotted path to the WSGI application used by Django's runserver.
WSGI_APPLICATION = 'auth_demo.wsgi.application'
# Start off with the default context processors.
INSTALLED_APPS = (
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.sites',
'django.contrib.messages',
'django.contrib.staticfiles',
'shopify_auth',
'auth_app',
# Uncomment the next line to enable the admin:
'django.contrib.admin',
# Uncomment the next line to enable admin documentation:
'django.contrib.admindocs',
)
SESSION_SERIALIZER = 'django.contrib.sessions.serializers.JSONSerializer'
# A sample logging configuration. The only tangible logging
# performed by this configuration is to send an email to
# the site admins on every HTTP 500 error when DEBUG=False.
# See http://docs.djangoproject.com/en/dev/topics/logging for
# more details on how to customize your logging configuration.
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'filters': {
'require_debug_false': {
'()': 'django.utils.log.RequireDebugFalse'
}
},
'handlers': {
'mail_admins': {
'level': 'ERROR',
'filters': ['require_debug_false'],
'class': 'django.utils.log.AdminEmailHandler'
}
},
'loggers': {
'django.request': {
'handlers': ['mail_admins'],
'level': 'ERROR',
'propagate': True,
},
}
}
# Use the Shopify Auth authentication backend.
AUTHENTICATION_BACKENDS = (
'shopify_auth.backends.ShopUserBackend',
)
# Use the Shopify Auth user model.
AUTH_USER_MODEL = 'auth_app.AuthAppShopUser'
# Set a default login redirect location.
LOGIN_REDIRECT_URL = 'auth_app.views.home'
# Add Shopify Auth configuration.
#
# Note that sensitive credentials SHOPIFY_APP_API_KEY and SHOPIFY_APP_API_SECRET are read from environment variables,
# as is best practice. These environment variables are in turn read from a .env file in the project directory.
# See https://github.com/theskumar/python-dotenv for more.
SHOPIFY_APP_NAME = 'auth_app'
SHOPIFY_APP_API_KEY = 'f1d5f6e0179fd8ab27a96d69267cf3b8'
SHOPIFY_APP_API_SECRET = '1d08b63979cc8fe3de5f35e285e0ffa2'
SHOPIFY_APP_API_SCOPE = ['read_products', 'read_orders']
SHOPIFY_APP_IS_EMBEDDED = True
SHOPIFY_APP_DEV_MODE = False
# Set secure proxy header to allow proper detection of secure URLs behind a proxy.
# See https://docs.djangoproject.com/en/1.7/ref/settings/#secure-proxy-ssl-header
SECURE_PROXY_SSL_HEADER = ('HTTP_X_FORWARDED_PROTO', 'https')
| [
"ducode@outlook.com"
] | ducode@outlook.com |
6f5575a487b01c27d32de08574f1e27559e0a69a | d9eef8dd3489682c8db41f2311e3058d1f369780 | /.history/abel-network-files/mcmc_alg_implementation_own_20180627095012.py | a9c18eebfc9fc6c0d2176b210806e6bc43b539e7 | [] | no_license | McKenzie-Lamb/Gerrymandering | 93fe4a49fe39a0b307ed341e46ba8620ea1225be | b7a7c4129d6b0fcd760ba8952de51eafa701eac3 | refs/heads/master | 2021-01-25T06:06:43.824339 | 2018-10-16T14:27:01 | 2018-10-16T14:27:01 | 93,526,515 | 0 | 0 | null | 2018-07-12T19:07:35 | 2017-06-06T14:17:47 | Python | UTF-8 | Python | false | false | 2,109 | py | # Author: Abel Gonzalez
# Date: 06/26/18
#
# Description:
# This program uses the .shp file to create a network graph where each node
# represents a census tract and the edge represents adjacency between each
# tract, usign graph-tool instead of networkx
import graph_tool.all as gt
from pathlib import Path
# Paths
data_folder = Path("abel-network-files/data/")
images_folder = Path("abel-network-files/images/")
# Loading the previous created Graph and creating the prop maps
graph = gt.load_graph(str(data_folder / "tmp_graph.gt"))
district_no = graph.new_vertex_property("int")
color = graph.new_vertex_property("vector<double>")
ring_color = graph.new_vertex_property("vector<double>")
# Assigning the district to each vertex as a property map
def get_districts_data(graph, color):
districts_data = {}
districts = gt.minimize_blockmodel_dl(graph, 2,2)
blocks = districts.get_blocks()
for i in graph.vertices():
district_no[graph.vertex(i)] = blocks[i]
color[graph.vertex(i)] = (255, 255, 0, 1) if blocks[i] == 1 else (0, 255, 255, 1)
if district_no[graph.vertex(i)] in districts_data.keys():
for j in districts_data[blocks[i]].keys():
districts_data[blocks[i]][j] += graph.vp.data[i][j]
else:
districts_data[blocks[i]] = graph.vp.data[i]
return districts_data
# Assign ring color based on democrats total votes:
def adjust_color(districts_data, ring_color):
for i in districts_data.keys():
if districts_data[i]['CONDEM14'] > districts_data[i]['CONREP14']:
ring_color_ = (0, 0, 255, 1)
else:
ring_color_ = (255, 0, 0, 1)
print(ring_color_)
matched_vertices = gt.find_vertex(graph, district_no, i)
for j in matched_vertices:
ring_color[graph.vertex(j)] = ring_color_
adjust_color(get_districts_data(graph, color), ring_color)
gt.graph_draw(graph, bg_color=(255, 255, 255, 1), vertex_fill_color=ring_color, vertex_color=color, pos=graph.vp.pos,
vertex_text=graph.vertex_index, output='abel-network-files/tmp.png')
| [
"gonzaleza@ripon.edu"
] | gonzaleza@ripon.edu |
62e39e9ef55d6356ad92505d98340e913905cb6d | 3c4df269135bacb78b5e56892789bc5bd315ce5b | /tests/chainer_tests/dataset_tests/tabular_tests/test_tabular_dataset.py | 228b4b630f25971a3e7a07b3e143414828870b18 | [
"MIT",
"LicenseRef-scancode-unknown-license-reference"
] | permissive | dl4fugaku/chainer | 64affb7786644c96e267f5ae47c60f0af66e34de | 34655eff5986522eae56f47fc82a8cc2b78e1617 | refs/heads/master | 2020-06-18T04:02:08.194893 | 2019-07-10T06:00:59 | 2019-07-10T06:00:59 | 196,157,185 | 2 | 0 | MIT | 2019-07-10T07:43:29 | 2019-07-10T07:43:29 | null | UTF-8 | Python | false | false | 2,442 | py | import unittest
import numpy as np
from chainer import testing
from chainer_tests.dataset_tests.tabular_tests import dummy_dataset
@testing.parameterize(*testing.product({
'mode': [tuple, dict, None],
'return_array': [True, False],
}))
class TestTabularDataset(unittest.TestCase):
def test_fetch(self):
def callback(indices, key_indices):
self.assertIsNone(indices)
self.assertIsNone(key_indices)
dataset = dummy_dataset.DummyDataset(
mode=self.mode, return_array=self.return_array, callback=callback)
output = dataset.fetch()
if self.mode is tuple:
expected = tuple(dataset.data)
elif self.mode is dict:
expected = dict(zip(('a', 'b', 'c'), dataset.data))
elif self.mode is None:
expected = dataset.data[0]
np.testing.assert_equal(output, expected)
if self.mode is dict:
output = output.values()
elif self.mode is None:
output = output,
for out in output:
if self.return_array:
self.assertIsInstance(out, np.ndarray)
else:
self.assertIsInstance(out, list)
def test_get_example(self):
def callback(indices, key_indices):
self.assertEqual(indices, [3])
self.assertIsNone(key_indices)
dataset = dummy_dataset.DummyDataset(
mode=self.mode, return_array=self.return_array, callback=callback)
if self.mode is tuple:
expected = tuple(dataset.data[:, 3])
elif self.mode is dict:
expected = dict(zip(('a', 'b', 'c'), dataset.data[:, 3]))
elif self.mode is None:
expected = dataset.data[0, 3]
self.assertEqual(dataset.get_example(3), expected)
def test_iter(self):
dataset = dummy_dataset.DummyDataset(
mode=self.mode, return_array=self.return_array)
it = iter(dataset)
for i in range(10):
if self.mode is tuple:
expected = tuple(dataset.data[:, i])
elif self.mode is dict:
expected = dict(zip(('a', 'b', 'c'), dataset.data[:, i]))
elif self.mode is None:
expected = dataset.data[0, i]
self.assertEqual(next(it), expected)
with self.assertRaises(StopIteration):
next(it)
testing.run_module(__name__, __file__)
| [
"Hakuyume@users.noreply.github.com"
] | Hakuyume@users.noreply.github.com |
b28d6aa720de968226e1d6e9d08a1ba1ab0dc952 | 2a28a94fc8eb08961e76c61ab73889135153502b | /asposecellscloud/requests/get_worksheet_shape_request.py | ad7111df252c027654d13a68a5479b3fda851a03 | [
"MIT"
] | permissive | aspose-cells-cloud/aspose-cells-cloud-python | 45fc7e686b442302a29a8223e7dbddb71950438c | 270d70ce7f8f3f2ecd9370b1dacfc4789293097e | refs/heads/master | 2023-09-04T01:29:44.242037 | 2023-08-23T13:13:30 | 2023-08-23T13:13:30 | 123,092,364 | 6 | 5 | null | null | null | null | UTF-8 | Python | false | false | 4,324 | py | # coding: utf-8
"""
<copyright company="Aspose" file="GetWorksheetShapeRequest.cs">
Copyright (c) 2023 Aspose.Cells Cloud
</copyright>
<summary>
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
</summary>
"""
import json
from six import iteritems
from asposecellscloud import *
from asposecellscloud.models import *
from asposecellscloud.requests import *
from six.moves.urllib.parse import quote
class GetWorksheetShapeRequest(object):
def __init__(self , name ,sheet_name ,shapeindex ,folder =None ,storage_name =None ):
self.name = name
self.sheet_name = sheet_name
self.shapeindex = shapeindex
self.folder = folder
self.storage_name = storage_name
def create_http_request(self, api_client):
# verify the required parameter 'name' is set
if self.name is None:
raise ValueError("Missing the required parameter `name` when calling `get_worksheet_shape`")
# verify the required parameter 'sheet_name' is set
if self.sheet_name is None:
raise ValueError("Missing the required parameter `sheet_name` when calling `get_worksheet_shape`")
# verify the required parameter 'shapeindex' is set
if self.shapeindex is None:
raise ValueError("Missing the required parameter `shapeindex` when calling `get_worksheet_shape`")
collection_formats = {}
path_params = {}
if self.name is not None:
path_params['name'] = self.name
if self.sheet_name is not None:
path_params['sheetName'] = self.sheet_name
if self.shapeindex is not None:
path_params['shapeindex'] = self.shapeindex
query_params = []
if self.folder is not None:
query_params.append(('folder',self.folder ))
if self.storage_name is not None:
query_params.append(('storageName',self.storage_name ))
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = api_client.\
select_header_accept(['application/json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = []
resource_path = "/cells/{name}/worksheets/{sheetName}/shapes/{shapeindex}"
# path parameters
if path_params:
path_params = api_client.sanitize_for_serialization(path_params)
path_params = api_client.parameters_to_tuples(path_params, collection_formats)
for k, v in path_params:
# specified safe chars, encode everything
resource_path = resource_path.replace('{%s}' % k, quote(str(v), safe='/'))
return {
"method": "GET",
"path":resource_path,
"query_params": query_params,
"header_params": header_params,
"form_params": form_params,
"files":local_var_files,
"auth_settings":auth_settings,
"body": body_params,
"collection_formats": collection_formats,
"response_type": 'ShapeResponse'
}
| [
"roy.wang@aspose.com"
] | roy.wang@aspose.com |
f012ee7eb3d7e55e579d51802426fbc3b902d52b | 6b2a8dd202fdce77c971c412717e305e1caaac51 | /solutions_5631989306621952_0/Python/farizazmi/A.py | ed241a8ad3d77775b45e57d65fc2d39acced49b0 | [] | no_license | alexandraback/datacollection | 0bc67a9ace00abbc843f4912562f3a064992e0e9 | 076a7bc7693f3abf07bfdbdac838cb4ef65ccfcf | refs/heads/master | 2021-01-24T18:27:24.417992 | 2017-05-23T09:23:38 | 2017-05-23T09:23:38 | 84,313,442 | 2 | 4 | null | null | null | null | UTF-8 | Python | false | false | 179 | py | for tc in range(1, input() + 1):
s = raw_input()
res = s[0]
for c in s[1:]:
res = (c + res) if (c >= res[0]) else res + c
print "Case #" + str(tc) +": " +str(res) | [
"alexandra1.back@gmail.com"
] | alexandra1.back@gmail.com |
e9454cb8b2ce93c128e3e190ddca723d58101c9d | 9b50b3a7dda2711c5665909f6801249de53e70f6 | /0x08-python-more_classes/7-rectangle.py | 054052e23ccdae50a8c09cd1654bab028818244d | [] | no_license | nikolasribeiro/holbertonschool-higher_level_programming | 3119e5442887f06da104dc8aa93df371f92b9f2b | 7dcdf081d8a57ea1f5f6f9830555f73bf2ae6993 | refs/heads/main | 2023-04-21T05:22:03.617609 | 2021-05-05T11:38:51 | 2021-05-05T11:38:51 | 319,198,337 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,268 | py | #!/usr/bin/python3
""" defining rectangle based on 6-rectangle """
class Rectangle:
""" defining Rectangle class """
number_of_instances = 0
print_symbol = "#"
def __init__(self, width=0, height=0):
""" intializing self """
if not isinstance(width, int):
raise TypeError("width must be an integer")
if width < 0:
raise ValueError("width must be >= 0")
if not isinstance(height, int):
raise TypeError("height must be an integer")
if height < 0:
raise ValueError("height must be >= 0")
self.__width = width
self.__height = height
self.__class__.number_of_instances += 1
@property
def width(self):
""" getting wdith """
return self.__width
@width.setter
def width(self, value):
""" setting width """
if not isinstance(value, int):
raise TypeError("width must be an integer")
if value < 0:
raise ValueError("width must be >= 0")
self.__width = value
@property
def height(self):
""" getting height """
return self.__height
@height.setter
def height(self, value):
""" setting height """
if not isinstance(value, int):
raise TypeError("height must be an integer")
if value < 0:
raise ValueError("height must be >= 0")
self.__height = value
def area(self):
""" defining area """
return self.__height * self.__width
def perimeter(self):
""" defining perimeter """
if self.__width is 0 or self.__height is 0:
return 0
return (self.__height * 2) + (self.__width * 2)
def __str__(self):
""" defining str """
str1 = ""
if self.__width == 0 or self.__height == 0:
return str1
str1 += (str(self.print_symbol) * self.__width + "\n") * self.__height
return str1[:-1]
def __repr__(self):
""" defining repr """
str1 = "Rectangle({:d}, {:d})".format(self.__width, self.__height)
return str1
def __del__(self):
""" defining delete """
print("Bye rectangle...")
self.__class__.number_of_instances -= 1
| [
"nikolasribeiro2@outlook.com"
] | nikolasribeiro2@outlook.com |
794af4c80e6438d9bca37d38bfc0a8c39c1a78ed | d7016f69993570a1c55974582cda899ff70907ec | /sdk/resources/azure-mgmt-resource/azure/mgmt/resource/subscriptions/v2019_11_01/_subscription_client.py | d0a6b9a75effd9721990b295820f4ddd79f83d86 | [
"MIT",
"LicenseRef-scancode-generic-cla",
"LGPL-2.1-or-later"
] | permissive | kurtzeborn/azure-sdk-for-python | 51ca636ad26ca51bc0c9e6865332781787e6f882 | b23e71b289c71f179b9cf9b8c75b1922833a542a | refs/heads/main | 2023-03-21T14:19:50.299852 | 2023-02-15T13:30:47 | 2023-02-15T13:30:47 | 157,927,277 | 0 | 0 | MIT | 2022-07-19T08:05:23 | 2018-11-16T22:15:30 | Python | UTF-8 | Python | false | false | 4,526 | py | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from copy import deepcopy
from typing import Any, TYPE_CHECKING
from azure.core.rest import HttpRequest, HttpResponse
from azure.mgmt.core import ARMPipelineClient
from . import models as _models
from .._serialization import Deserializer, Serializer
from ._configuration import SubscriptionClientConfiguration
from .operations import Operations, SubscriptionClientOperationsMixin, SubscriptionsOperations, TenantsOperations
if TYPE_CHECKING:
# pylint: disable=unused-import,ungrouped-imports
from azure.core.credentials import TokenCredential
class SubscriptionClient(SubscriptionClientOperationsMixin): # pylint: disable=client-accepts-api-version-keyword
"""All resource groups and resources exist within subscriptions. These operation enable you get
information about your subscriptions and tenants. A tenant is a dedicated instance of Azure
Active Directory (Azure AD) for your organization.
:ivar operations: Operations operations
:vartype operations: azure.mgmt.resource.subscriptions.v2019_11_01.operations.Operations
:ivar subscriptions: SubscriptionsOperations operations
:vartype subscriptions:
azure.mgmt.resource.subscriptions.v2019_11_01.operations.SubscriptionsOperations
:ivar tenants: TenantsOperations operations
:vartype tenants: azure.mgmt.resource.subscriptions.v2019_11_01.operations.TenantsOperations
:param credential: Credential needed for the client to connect to Azure. Required.
:type credential: ~azure.core.credentials.TokenCredential
:param base_url: Service URL. Default value is "https://management.azure.com".
:type base_url: str
:keyword api_version: Api Version. Default value is "2019-11-01". Note that overriding this
default value may result in unsupported behavior.
:paramtype api_version: str
"""
def __init__(
self, credential: "TokenCredential", base_url: str = "https://management.azure.com", **kwargs: Any
) -> None:
self._config = SubscriptionClientConfiguration(credential=credential, **kwargs)
self._client = ARMPipelineClient(base_url=base_url, config=self._config, **kwargs)
client_models = {k: v for k, v in _models.__dict__.items() if isinstance(v, type)}
self._serialize = Serializer(client_models)
self._deserialize = Deserializer(client_models)
self._serialize.client_side_validation = False
self.operations = Operations(self._client, self._config, self._serialize, self._deserialize)
self.subscriptions = SubscriptionsOperations(self._client, self._config, self._serialize, self._deserialize)
self.tenants = TenantsOperations(self._client, self._config, self._serialize, self._deserialize)
def _send_request(self, request: HttpRequest, **kwargs: Any) -> HttpResponse:
"""Runs the network request through the client's chained policies.
>>> from azure.core.rest import HttpRequest
>>> request = HttpRequest("GET", "https://www.example.org/")
<HttpRequest [GET], url: 'https://www.example.org/'>
>>> response = client._send_request(request)
<HttpResponse: 200 OK>
For more information on this code flow, see https://aka.ms/azsdk/dpcodegen/python/send_request
:param request: The network request you want to make. Required.
:type request: ~azure.core.rest.HttpRequest
:keyword bool stream: Whether the response payload will be streamed. Defaults to False.
:return: The response of your network call. Does not do error handling on your response.
:rtype: ~azure.core.rest.HttpResponse
"""
request_copy = deepcopy(request)
request_copy.url = self._client.format_url(request_copy.url)
return self._client.send_request(request_copy, **kwargs)
def close(self) -> None:
self._client.close()
def __enter__(self) -> "SubscriptionClient":
self._client.__enter__()
return self
def __exit__(self, *exc_details) -> None:
self._client.__exit__(*exc_details)
| [
"noreply@github.com"
] | kurtzeborn.noreply@github.com |
166cdbbc1b7be4aedad563e778df023b40d613c5 | 9743d5fd24822f79c156ad112229e25adb9ed6f6 | /xai/brain/wordbase/verbs/_misappropriated.py | c37b06a6f096fe5ec79442af9b84df72df3d68c5 | [
"MIT"
] | permissive | cash2one/xai | de7adad1758f50dd6786bf0111e71a903f039b64 | e76f12c9f4dcf3ac1c7c08b0cc8844c0b0a104b6 | refs/heads/master | 2021-01-19T12:33:54.964379 | 2017-01-28T02:00:50 | 2017-01-28T02:00:50 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 294 | py |
from xai.brain.wordbase.verbs._misappropriate import _MISAPPROPRIATE
#calss header
class _MISAPPROPRIATED(_MISAPPROPRIATE, ):
def __init__(self,):
_MISAPPROPRIATE.__init__(self)
self.name = "MISAPPROPRIATED"
self.specie = 'verbs'
self.basic = "misappropriate"
self.jsondata = {}
| [
"xingwang1991@gmail.com"
] | xingwang1991@gmail.com |
66a6c2e6b8e1264e6f44f286d041502183cecaf5 | dc8a337ea1d8a285577d33e5cfd4dbbe846ee1a0 | /src/main/scala/Sudoku.py | 4c45af801b7459a5a0bee3ceef9b890e6c2cbb3e | [] | no_license | joestalker1/leetcode | 8a5cdda17abd33c3eef859732f75d7bec77a9d0e | ae392ddbc7eb56cb814b9e9715043c98a89a6314 | refs/heads/master | 2023-04-13T22:09:54.407864 | 2023-04-09T19:22:54 | 2023-04-09T19:22:54 | 131,803,943 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,895 | py | # A Backtracking program in Python to solve Sudoku problem
# A Utility Function to print the Grid
def print_grid(arr):
for i in range(9):
for j in range(9):
print(arr[i][j]),
print('\n')
# Function to Find the entry in the Grid that is still not used
# Searches the grid to find an entry that is still unassigned. If
# found, the reference parameters row, col will be set the location
# that is unassigned, and true is returned. If no unassigned entries
# remains, false is returned.
# 'l' is a list variable that has been passed from the solve_sudoku function
# to keep track of incrementation of Rows and Columns
def find_empty_location(arr, l):
for row in range(9):
for col in range(9):
if (arr[row][col] == 0):
l[0] = row
l[1] = col
return True
return False
# Returns a boolean which indicates whether any assigned entry
# in the specified row matches the given number.
def used_in_row(arr, row, num):
for i in range(9):
if (arr[row][i] == num):
return True
return False
# Returns a boolean which indicates whether any assigned entry
# in the specified column matches the given number.
def used_in_col(arr, col, num):
for i in range(9):
if (arr[i][col] == num):
return True
return False
# Returns a boolean which indicates whether any assigned entry
# within the specified 3x3 box matches the given number
def used_in_box(arr, row, col, num):
for i in range(3):
for j in range(3):
if (arr[i + row][j + col] == num):
return True
return False
# Checks whether it will be legal to assign num to the given row, col
# Returns a boolean which indicates whether it will be legal to assign
# num to the given row, col location.
def check_location_is_safe(arr, row, col, num):
# Check if 'num' is not already placed in current row,
# current column and current 3x3 box
return not used_in_row(arr, row, num) and not used_in_col(arr, col, num) and not used_in_box(arr, row - row % 3,
col - col % 3, num)
# Takes a partially filled-in grid and attempts to assign values to
# all unassigned locations in such a way to meet the requirements
# for Sudoku solution (non-duplication across rows, columns, and boxes)
def solve_sudoku(arr):
# 'l' is a list variable that keeps the record of row and col in find_empty_location Function
l = [0, 0]
# If there is no unassigned location, we are done
if (not find_empty_location(arr, l)):
return True
# Assigning list values to row and col that we got from the above Function
row = l[0]
col = l[1]
# consider digits 1 to 9
for num in range(1, 10):
# if looks promising
if (check_location_is_safe(arr, row, col, num)):
# make tentative assignment
arr[row][col] = num
# return, if success, ya ! if(solve_sudoku(arr)):
return True
# failure, unmake & try again
arr[row][col] = 0
# this triggers backtracking
return False
# Driver main function to test above functions
# creating a 2D array for the grid
grid = [[0 for x in range(9)] for y in range(9)]
# assigning values to the grid
grid = [[3, 0, 6, 5, 0, 8, 4, 0, 0],
[5, 2, 0, 0, 0, 0, 0, 0, 0],
[0, 8, 7, 0, 0, 0, 0, 3, 1],
[0, 0, 3, 0, 1, 0, 0, 8, 0],
[9, 0, 0, 8, 6, 3, 0, 0, 5],
[0, 5, 0, 0, 9, 0, 6, 0, 0],
[1, 3, 0, 0, 0, 0, 2, 5, 0],
[0, 0, 0, 0, 0, 0, 0, 7, 4],
[0, 0, 5, 2, 0, 6, 3, 0, 0]]
# if success print the grid
if (solve_sudoku(grid)):
print_grid(grid)
else:
print
"No solution exists"
# The above code has been contributed by Harshit Sidhwa.
| [
"stalker.comp@gmail.com"
] | stalker.comp@gmail.com |
b459c08ff024d80fab1c4cc22f00a975da13d9a8 | cfc6fa337c7e14bae58b9e0398df0b3867670e96 | /FinalReact/ReactBlog/R_Blog/views.py | 87f24c52c7cecabe100ae5aca14337861026e716 | [] | no_license | nahidhasan007/Django-React | f9f5d5e9085bcbe51b0d733bd9f8443452648630 | 447a48c978b492dca25ba63ee9de2d0d3a38a4c4 | refs/heads/master | 2023-04-08T05:44:45.743812 | 2021-04-16T10:09:06 | 2021-04-16T10:09:06 | 352,295,481 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 275 | py | from django.shortcuts import render
from rest_framework import viewsets
from .models import Post, Profile
from .serializers import PostSerializer
class PostView(viewsets.ModelViewSet):
queryset = Post.objects.all().order_by("-id")
serializer_class = PostSerializer
| [
"teachpeach007@gmail.com"
] | teachpeach007@gmail.com |
71e4d56ab8ea75a0c44e7921df231de7b09d4455 | c50e7eb190802d7849c0d0cea02fb4d2f0021777 | /src/offazure/azext_offazure/manual/_help.py | d6cefd13b57c603a4582826ccb63d694510526c6 | [
"LicenseRef-scancode-generic-cla",
"MIT"
] | permissive | Azure/azure-cli-extensions | c1615b19930bba7166c282918f166cd40ff6609c | b8c2cf97e991adf0c0a207d810316b8f4686dc29 | refs/heads/main | 2023-08-24T12:40:15.528432 | 2023-08-24T09:17:25 | 2023-08-24T09:17:25 | 106,580,024 | 336 | 1,226 | MIT | 2023-09-14T10:48:57 | 2017-10-11T16:27:31 | Python | UTF-8 | Python | false | false | 10,669 | py | # --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
# pylint: disable=too-many-lines
from knack.help_files import helps
helps['offazure'] = """
type: group
short-summary: Manage on-premise resources for migrate.
"""
helps['offazure hyperv'] = """
type: group
short-summary: Manage Hyper-V on-premise resources.
"""
helps['offazure hyperv cluster'] = """
type: group
short-summary: Manage Hyper-V cluster
"""
helps['offazure hyperv cluster list'] = """
type: command
short-summary: "Get all clusters on the on-premise site."
examples:
- name: List clusters by site
text: |-
az offazure hyperv cluster list --resource-group MyResourceGroup --site-name MySiteName
"""
helps['offazure hyperv cluster show'] = """
type: command
short-summary: "Get the details of a Hyper-V cluster."
examples:
- name: Get a cluster
text: |-
az offazure hyperv cluster show --cluster-name MyClusterName --resource-group \
MyResourceGroup --site-name MySiteName
"""
helps['offazure hyperv host'] = """
type: group
short-summary: Manage Hyper-V host
"""
helps['offazure hyperv host list'] = """
type: command
short-summary: "Get all hosts on the on-premise site."
examples:
- name: List hosts by site
text: |-
az offazure hyperv host list --resource-group MyResourceGroup --site-name MySiteName
"""
helps['offazure hyperv host show'] = """
type: command
short-summary: "Get the details of a Hyper-V host."
examples:
- name: Get a host
text: |-
az offazure hyperv host show --host-name MyHostName --resource-group \
MyResourceGroup --site-name MySiteName
"""
helps['offazure hyperv machine'] = """
type: group
short-summary: Manage Hyper-V machine
"""
helps['offazure hyperv machine list'] = """
type: command
short-summary: "List all machines on the on-premise site."
examples:
- name: List machines by site
text: |-
az offazure hyperv machine list --resource-group MyResourceGroup --site-name MySiteName
"""
helps['offazure hyperv machine show'] = """
type: command
short-summary: "Get the details of a machine."
examples:
- name: Get a machine.
text: |-
az offazure hyperv machine show --machine-name MyMachineName --resource-group \
MyResourceGroup --site-name MySiteName
"""
helps['offazure hyperv run-as-account'] = """
type: group
short-summary: Manage Hyper-V run-as-account
"""
helps['offazure hyperv run-as-account list'] = """
type: command
short-summary: "List all run-as-accounts on the on-premise site."
examples:
- name: List run-as-accounts by site
text: |-
az offazure hyperv run-as-account list --resource-group MyResourceGroup \
--site-name MySiteName
"""
helps['offazure hyperv run-as-account show'] = """
type: command
short-summary: "Get the details of a run-as-account."
examples:
- name: Get a run-as-account.
text: |-
az offazure hyperv run-as-account show --account-name MyAccount --resource-group \
MyResourceGroup --site-name MySiteName
"""
helps['offazure hyperv site'] = """
type: group
short-summary: Manage Hyper-V site
"""
helps['offazure hyperv site show'] = """
type: command
short-summary: "Get the details of a site."
examples:
- name: Get a Hyper-V site
text: |-
az offazure hyperv site show --resource-group MyResourceGroup --site-name MySiteName
"""
helps['offazure hyperv site create'] = """
type: command
short-summary: "Create a Hyper-V site."
parameters:
- name: --identity
short-summary: "Service principal identity details used by agent for communication to the service."
long-summary: |
Usage: --identity tenant-id=XX application-id=XX object-id=XX audience=XX \
aad-authority=XX raw-cert-data=XX
tenant-id: Tenant Id for the service principal with which the on-premise management/data plane components \
would communicate with our Azure services.
application-id: Application/client Id for the service principal with which the on-premise management/data \
plane components would communicate with our Azure services.
object-id: Object Id of the service principal with which the on-premise management/data plane components \
would communicate with our Azure services.
audience: Intended audience for the service principal.
aad-authority: AAD Authority URL which was used to request the token for the service principal.
raw-cert-data: Raw certificate data for building certificate expiry flows.
- name: --agent
short-summary: "On-premises agent details."
long-summary: |
Usage: --agent key-vault-uri=XX key-vault-id=XX
key-vault-uri: Key vault URI.
key-vault-id: Key vault ARM Id.
examples:
- name: Create a Hyper-V site
text: |-
az offazure hyperv site create --resource-group MyResourceGroup --site-name MySiteName \
--location centralus
"""
helps['offazure hyperv site delete'] = """
type: command
short-summary: "Delete a Hyper-V site."
examples:
- name: Delete a Hyper-V site.
text: |-
az offazure hyperv site delete --resource-group MyResourceGroup --site-name MySiteName
"""
helps['offazure vmware'] = """
type: group
short-summary: Manage VMware on-premise resources.
"""
helps['offazure vmware machine'] = """
type: group
short-summary: Manage VMware machine
"""
helps['offazure vmware machine list'] = """
type: command
short-summary: "List all machines on the on-premise site."
examples:
- name: List VMware machines by site
text: |-
az offazure vmware machine list --resource-group MyResourceGroup --site-name MySiteName
"""
helps['offazure vmware machine show'] = """
type: command
short-summary: "Get the details of a machine."
examples:
- name: Get a VMware machine.
text: |-
az offazure vmware machine show --name MyMachineName --resource-group MyResourceGroup \
--site-name MySiteName
"""
helps['offazure vmware run-as-account'] = """
type: group
short-summary: Manage VMware run-as-account
"""
helps['offazure vmware run-as-account list'] = """
type: command
short-summary: "List all run-as-accounts on the on-premise site."
examples:
- name: List VMware run-as-accounts by site.
text: |-
az offazure vmware run-as-account list --resource-group MyResourceGroup \
--site-name MySiteName
"""
helps['offazure vmware run-as-account show'] = """
type: command
short-summary: "Get the details of a run-as-account."
examples:
- name: Get a VMware run-as-account.
text: |-
az offazure vmware run-as-account show --account-name MyAccountName --resource-group \
MyResourceGroup --site-name MySiteName
"""
helps['offazure vmware site'] = """
type: group
short-summary: Manage VMware site
"""
helps['offazure vmware site show'] = """
type: command
short-summary: "Get the details of a VMware site."
examples:
- name: Get a VMware site
text: |-
az offazure vmware site show --resource-group MyResourceGroup --name MySiteName \
"""
helps['offazure vmware site create'] = """
type: command
short-summary: "Create a site for VMware resources."
parameters:
- name: --identity
short-summary: "Service principal identity details used by agent for communication to the service."
long-summary: |
Usage: --identity tenant-id=XX application-id=XX object-id=XX audience=XX \
aad-authority=XX raw-cert-data=XX
tenant-id: Tenant Id for the service principal with which the on-premise management/data plane components \
would communicate with our Azure services.
application-id: Application/client Id for the service principal with which the on-premise management/data \
plane components would communicate with our Azure services.
object-id: Object Id of the service principal with which the on-premise management/data plane components \
would communicate with our Azure services.
audience: Intended audience for the service principal.
aad-authority: AAD Authority URL which was used to request the token for the service principal.
raw-cert-data: Raw certificate data for building certificate expiry flows.
- name: --agent
short-summary: "On-premises agent details."
long-summary: |
Usage: --agent key-vault-uri=XX key-vault-id=XX
key-vault-uri: Key vault URI.
key-vault-id: Key vault ARM Id.
examples:
- name: Create a VMware site
text: |-
az offazure vmware site create -g MyResourceGroup --site-name MySiteName --location centralus
"""
helps['offazure vmware site delete'] = """
type: command
short-summary: "Delete a VMware site."
examples:
- name: Delete a VMware site
text: |-
az offazure vmware site delete --resource-group MyResourceGroup --name MySiteName
"""
helps['offazure vmware vcenter'] = """
type: group
short-summary: Manage VMware vCenter
"""
helps['offazure vmware vcenter list'] = """
type: command
short-summary: "List all vCenters on the on-premise site."
examples:
- name: List VMware vCenters by site
text: |-
az offazure vmware vcenter list --resource-group MyResourceGroup --site-name MySiteName
"""
helps['offazure vmware vcenter show'] = """
type: command
short-summary: "Get the details of a vCenter."
examples:
- name: Get a VMware vCenter.
text: |-
az offazure vmware vcenter show --resource-group MyResourceGroup --site-name MySiteName \
--name MyVCenterName
"""
| [
"noreply@github.com"
] | Azure.noreply@github.com |
a8b576214b7eab96cf1de89a58b6b5785690b0bb | 167c6226bc77c5daaedab007dfdad4377f588ef4 | /python/ql/test/query-tests/Testing/test.py | 1c79f177ac606504ddb02ae7bad95bd14ff3fb4a | [
"MIT",
"LicenseRef-scancode-python-cwi",
"LicenseRef-scancode-other-copyleft",
"GPL-1.0-or-later",
"LicenseRef-scancode-free-unknown",
"Python-2.0"
] | permissive | github/codeql | 1eebb449a34f774db9e881b52cb8f7a1b1a53612 | d109637e2d7ab3b819812eb960c05cb31d9d2168 | refs/heads/main | 2023-08-20T11:32:39.162059 | 2023-08-18T14:33:32 | 2023-08-18T14:33:32 | 143,040,428 | 5,987 | 1,363 | MIT | 2023-09-14T19:36:50 | 2018-07-31T16:35:51 | CodeQL | UTF-8 | Python | false | false | 213 | py | from unittest import TestCase
class MyTest(TestCase):
def test1(self):
self.assertTrue(1 == 1)
self.assertFalse(1 > 2)
self.assertTrue(1 in [1])
self.assertFalse(0 is "")
| [
"mark@hotpy.org"
] | mark@hotpy.org |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.