text
stringlengths 8
6.05M
|
|---|
"""
Tests for :mod:`nova_api` and :mod:`nova_objects`.
"""
from __future__ import absolute_import, division, unicode_literals
import json
from six import text_type
from six.moves.urllib.parse import urlencode, parse_qs
from testtools.matchers import (
ContainsDict, Equals, MatchesDict, MatchesListwise, StartsWith)
import treq
from twisted.trial.unittest import SynchronousTestCase
from mimic.test.helpers import json_request, request, request_with_content, validate_link_json
from mimic.rest.nova_api import NovaApi, NovaControlApi
from mimic.test.behavior_tests import (
behavior_tests_helper_class,
register_behavior)
from mimic.test.fixtures import APIMockHelper, TenantAuthentication
from mimic.util.helper import seconds_to_timestamp
from mimic.model.nova_objects import (
RegionalServerCollection, Server, IPv4Address)
import random
def status_of_server(test_case, server_id):
"""
Retrieve the status of a server.
"""
get_server = request(test_case, test_case.root, b"GET",
test_case.uri + '/servers/' + server_id)
get_server_response = test_case.successResultOf(get_server)
get_server_response_body = test_case.successResultOf(
treq.json_content(get_server_response))
return get_server_response_body['server']['status']
def create_server(helper, name=None, imageRef=None, flavorRef=None,
metadata=None, diskConfig=None, body_override=None,
region="ORD", key_name=None, request_func=json_request):
"""
Create a server with the given body and returns the response object and
body.
:param name: Name of the server - defaults to "test_server"
:param imageRef: Image of the server - defaults to "test-image"
:param flavorRef: Flavor size of the server - defaults to "test-flavor"
:param metadata: Metadata of the server - optional
:param diskConfig: the "OS-DCF:diskConfig" setting for the server -
optional
:param str body_override: String containing the server args to
override the default server body JSON.
:param str region: The region in which to create the server
:param callable request_func: What function to use to make the request -
defaults to json_request (alternately could be request_with_content)
:return: either the response object, or the response object and JSON
body if ``json`` is `True`.
"""
body = body_override
if body is None:
data = {
"name": name if name is not None else 'test_server',
"key_name": key_name if key_name is not None else 'test_key',
"imageRef": imageRef if imageRef is not None else "test-image",
"flavorRef": flavorRef if flavorRef is not None else "test-flavor"
}
if metadata is not None:
data['metadata'] = metadata
if diskConfig is not None:
data["OS-DCF:diskConfig"] = diskConfig
body = json.dumps({"server": data}).encode("utf-8")
create_server = request_func(
helper.test_case,
helper.root,
b"POST",
'{0}/servers'.format(helper.get_service_endpoint(
"cloudServersOpenStack", region)).encode("ascii"),
body
)
return helper.test_case.successResultOf(create_server)
def quick_create_server(helper, **create_server_kwargs):
"""
Quickly create a server with a bunch of default parameters, retrieving its
server ID.
:param name: Optional name of the server
:return: the server ID of the created server
"""
resp, body = create_server(helper, request_func=json_request,
**create_server_kwargs)
helper.test_case.assertEqual(resp.code, 202)
return body["server"]["id"]
def delete_server(helper, server_id):
"""
Delete server
"""
d = request_with_content(
helper.test_case, helper.root, b"DELETE",
'{0}/servers/{1}'.format(helper.uri, server_id))
resp, body = helper.test_case.successResultOf(d)
helper.test_case.assertEqual(resp.code, 204)
def update_metdata_item(helper, server_id, key, value):
"""
Update metadata item
"""
d = request_with_content(
helper.test_case, helper.root, b"PUT",
'{0}/servers/{1}/metadata/{2}'.format(helper.uri, server_id, key),
json.dumps({'meta': {key: value}}).encode("utf-8"))
resp, body = helper.test_case.successResultOf(d)
helper.test_case.assertEqual(resp.code, 200)
def update_metdata(helper, server_id, metadata):
"""
Update metadata
"""
d = request_with_content(
helper.test_case, helper.root, b"PUT",
'{0}/servers/{1}/metadata'.format(helper.uri, server_id),
json.dumps({'metadata': metadata}).encode("utf-8"))
resp, body = helper.test_case.successResultOf(d)
helper.test_case.assertEqual(resp.code, 200)
def update_status(helper, control_endpoint, server_id, status):
"""
Update server status
"""
d = request_with_content(
helper.test_case, helper.root, b"POST",
control_endpoint + "/attributes/",
json.dumps({"status": {server_id: status}}).encode("utf-8"))
resp, body = helper.test_case.successResultOf(d)
helper.test_case.assertEqual(resp.code, 201)
def use_creation_behavior(helper, name, parameters, criteria):
"""
Use the given behavior for server creation.
"""
return register_behavior(
helper.test_case, helper.root,
"{0}/behaviors/creation".format(
helper.auth.get_service_endpoint("cloudServersBehavior")),
name, parameters, criteria)
class NovaAPITests(SynchronousTestCase):
"""
Tests for the Nova Api plugin.
"""
def setUp(self):
"""
Create a :obj:`MimicCore` with :obj:`NovaApi` as the only plugin,
and create a server
"""
nova_api = NovaApi(["ORD", "MIMIC"])
self.helper = self.helper = APIMockHelper(
self, [nova_api, NovaControlApi(nova_api=nova_api)]
)
self.root = self.helper.root
self.clock = self.helper.clock
self.uri = self.helper.uri
self.server_name = 'test_server'
self.create_server_response, self.create_server_response_body = (
create_server(self.helper, name=self.server_name))
self.server_id = self.create_server_response_body['server']['id']
def test_create_server_with_manual_diskConfig(self):
"""
Servers should respect the provided OS-DCF:diskConfig setting if
supplied.
"""
create_server_response, response_body = create_server(
self.helper, name=self.server_name + "A", diskConfig="MANUAL")
self.assertEqual(
response_body['server']['OS-DCF:diskConfig'], 'MANUAL')
# Make sure we report on proper state.
server_id = response_body['server']['id']
get_server = request(
self, self.root, b"GET", self.uri + '/servers/' + server_id
)
get_server_response = self.successResultOf(get_server)
response_body = self.successResultOf(
treq.json_content(get_server_response))
self.assertEqual(
response_body['server']['OS-DCF:diskConfig'], 'MANUAL')
def test_create_server_with_bad_diskConfig(self):
"""
When ``create_server`` is passed an invalid ``OS-DCF:diskImage`` (e.g.,
one which is neither AUTO nor MANUAL), it should return an HTTP status
code of 400.
"""
create_server_response, _ = create_server(
self.helper, name=self.server_name + "A",
diskConfig="AUTO-MANUAL")
self.assertEqual(create_server_response.code, 400)
def validate_server_detail_json(self, server_json):
"""
Tests to validate the server JSON.
"""
validate_link_json(self, server_json)
# id and links has already been checked, there are others that are not
# yet implemented in mimic/optional
response_keys = ("accessIPv4", "accessIPv6", "addresses", "created",
"flavor", "image", "metadata", "name", "status",
"tenant_id", "updated", "OS-EXT-STS:task_state",
"OS-DCF:diskConfig")
for key in response_keys:
self.assertIn(key, server_json)
validate_link_json(self, server_json['image'])
validate_link_json(self, server_json['flavor'])
self.assertIsInstance(server_json['addresses'], dict)
for addresses in server_json['addresses'].values():
self.assertIsInstance(addresses, list)
for address in addresses:
self.assertIn('addr', address)
self.assertIn('version', address)
self.assertIn(address['version'], (4, 6),
"Address version must be 4 or 6: {0}"
.format(address))
def test_create_server(self):
"""
Test to verify :func:`create_server` on ``POST /v2.0/<tenant_id>/servers``
"""
self.assertEqual(self.create_server_response.code, 202)
self.assertIsInstance(self.server_id, text_type)
self.assertNotEqual(
self.create_server_response_body['server']['adminPass'],
"testpassword"
)
validate_link_json(self, self.create_server_response_body['server'])
def test_create_server_with_keypair_name(self):
"""
Test to verify creating a server with a named keypair works
"""
keypair_name = "server_keypair"
resp, body = create_server(self.helper, key_name=keypair_name)
self.assertEqual(resp.code, 202)
server_id = body['server']['id']
get_server = request(
self, self.root, b"GET", self.uri + '/servers/' + server_id
)
get_server_response = self.successResultOf(get_server)
response_body = self.successResultOf(
treq.json_content(get_server_response))
self.assertEqual(
response_body['server']['key_name'], keypair_name)
def test_create_server_without_keypair_name(self):
"""
Test to verify creating a server without a named keypair returns None
"""
data = {
"name": "fake_server",
"imageRef": "test-image",
"flavorRef": "test-flavor"
}
body = json.dumps({"server": data}).encode("utf-8")
create_resp, create_body = create_server(self.helper,
body_override=body)
server_id = create_body['server']['id']
get_server = request(
self, self.root, b"GET", self.uri + '/servers/' + server_id
)
get_server_response = self.successResultOf(get_server)
response_body = self.successResultOf(
treq.json_content(get_server_response))
self.assertEqual(
response_body['server']['key_name'], None)
def test_created_servers_have_dissimilar_admin_passwords(self):
"""
Two (or more) servers created should not share passwords.
"""
other_response, other_response_body = create_server(
self.helper, name=self.server_name)
self.assertNotEqual(
self.create_server_response_body['server']['adminPass'],
other_response_body['server']['adminPass']
)
def test_list_servers(self):
"""
Test to verify :func:`list_servers` on ``GET /v2.0/<tenant_id>/servers``
"""
list_servers = request(self, self.root, b"GET", self.uri + '/servers')
list_servers_response = self.successResultOf(list_servers)
list_servers_response_body = self.successResultOf(
treq.json_content(list_servers_response))
self.assertEqual(list_servers_response.code, 200)
self.assertEqual(list_servers_response_body['servers'][0]['id'],
self.server_id)
self.assertEqual(len(list_servers_response_body['servers']), 1)
validate_link_json(self, list_servers_response_body['servers'][0])
def test_list_servers_with_args(self):
"""
Test to verify :func:`list_servers` on ``GET /v2.0/<tenant_id>/servers?name<name>``,
when a server with that name exists
"""
list_servers = request(
self, self.root, b"GET", self.uri + '/servers?name=' + self.server_name)
list_servers_response = self.successResultOf(list_servers)
list_servers_response_body = self.successResultOf(
treq.json_content(list_servers_response))
self.assertEqual(list_servers_response.code, 200)
self.assertEqual(list_servers_response_body['servers'][0]['id'],
self.server_id)
self.assertEqual(len(list_servers_response_body['servers']), 1)
def test_list_servers_with_args_negative(self):
"""
Test to verify :func:`list_servers` on ``GET /v2.0/<tenant_id>/servers?name<name>``
when a server with that name does not exist
"""
list_servers = request(
self, self.root, b"GET", self.uri + '/servers?name=no_server')
list_servers_response = self.successResultOf(list_servers)
list_servers_response_body = self.successResultOf(
treq.json_content(list_servers_response))
self.assertEqual(list_servers_response.code, 200)
self.assertEqual(len(list_servers_response_body['servers']), 0)
def test_get_server(self):
"""
Test to verify :func:`get_server` on ``GET /v2.0/<tenant_id>/servers/<server_id>``,
when the server_id exists
"""
get_server = request(
self, self.root, b"GET", self.uri + '/servers/' + self.server_id)
get_server_response = self.successResultOf(get_server)
get_server_response_body = self.successResultOf(
treq.json_content(get_server_response))
self.assertEqual(get_server_response.code, 200)
self.assertEqual(get_server_response_body['server']['id'],
self.server_id)
self.assertEqual(
get_server_response_body['server']['status'], 'ACTIVE')
admin_password = get_server_response_body['server'].get('adminPass', None)
self.assertEqual(admin_password, None)
self.validate_server_detail_json(get_server_response_body['server'])
def test_get_server_negative(self):
"""
Test to verify :func:`get_server` on ``GET /v2.0/<tenant_id>/servers/<server_id>``,
when the server_id does not exist
"""
response, body = self.successResultOf(json_request(
self, self.root, b"GET", self.uri + '/servers/test-server-id'))
self.assertEqual(response.code, 404)
self.assertEqual(body, {
"itemNotFound": {
"message": "Instance could not be found",
"code": 404
}
})
def test_list_servers_with_details(self):
"""
Test to verify :func:`list_servers_with_details` on ``GET /v2.0/<tenant_id>/servers/detail``
"""
list_servers_detail = request(
self, self.root, b"GET", self.uri + '/servers/detail')
list_servers_detail_response = self.successResultOf(
list_servers_detail)
list_servers_detail_response_body = self.successResultOf(
treq.json_content(list_servers_detail_response))
self.assertEqual(list_servers_detail_response.code, 200)
self.assertEqual(list_servers_detail_response_body['servers'][0]['id'],
self.server_id)
self.assertEqual(len(list_servers_detail_response_body['servers']), 1)
self.assertEqual(
list_servers_detail_response_body['servers'][0]['status'], 'ACTIVE')
self.validate_server_detail_json(
list_servers_detail_response_body['servers'][0])
def test_list_servers_with_details_with_args(self):
"""
:func:`list_servers_with_details`, used by
``GET /v2.0/<tenant_id>/servers/detail``, returns the server details
for only the servers of a given name
"""
create_server(self.helper, name="non-matching-name")
response, body = self.successResultOf(json_request(
self, self.root, b"GET",
"{0}/servers/detail?name={1}".format(self.uri, self.server_name)))
self.assertEqual(response.code, 200)
self.assertIsNot(body['servers'], None)
self.assertIsNot(body['servers'][0], None)
self.assertEqual(body['servers'][0]['id'], self.server_id)
self.assertEqual(len(body['servers']), 1)
self.assertEqual(body['servers'][0]['status'], 'ACTIVE')
self.validate_server_detail_json(body['servers'][0])
def test_list_servers_with_details_with_args_negative(self):
"""
:func:`list_servers_with_details`, used by
``GET /v2.0/<tenant_id>/servers/detail``, returns no servers when
there aren't any that match the given name
"""
response, body = self.successResultOf(json_request(
self, self.root, b"GET",
'{0}/servers/detail?name=no_server'.format(self.uri)
.encode('utf-8')))
self.assertEqual(response.code, 200)
self.assertEqual(len(body['servers']), 0)
def test_delete_server(self):
"""
Test to verify :func:`delete_server` on ``DELETE /v2.0/<tenant_id>/servers/<server_id>``
"""
delete_server = request(
self, self.root, b"DELETE", self.uri + '/servers/' + self.server_id)
delete_server_response = self.successResultOf(delete_server)
self.assertEqual(delete_server_response.code, 204)
self.assertEqual(self.successResultOf(treq.content(delete_server_response)),
b"")
# Get and see if server actually got deleted
get_server = request(
self, self.root, b"GET", self.uri + '/servers/' + self.server_id)
get_server_response = self.successResultOf(get_server)
self.assertEqual(get_server_response.code, 404)
def test_delete_server_negative(self):
"""
Test to verify :func:`delete_server` on ``DELETE /v2.0/<tenant_id>/servers/<server_id>``,
when the server_id does not exist
"""
delete_server = request(
self, self.root, b"DELETE", self.uri + '/servers/test-server-id')
delete_server_response = self.successResultOf(delete_server)
self.assertEqual(delete_server_response.code, 404)
def test_get_server_limits(self):
"""
Test to verify :func:`get_limit` on ``GET /v2.0/<tenant_id>/limits``
"""
get_server_limits = request(
self, self.root, b"GET", self.uri + '/limits')
get_server_limits_response = self.successResultOf(get_server_limits)
self.assertEqual(get_server_limits_response.code, 200)
self.assertTrue(
self.successResultOf(treq.json_content(get_server_limits_response)))
def test_get_server_ips(self):
"""
Test to verify :func:`get_ips` on ``GET /v2.0/<tenant_id>/servers/<server_id>/ips``
"""
get_server_ips = request(self, self.root, b"GET",
self.uri + '/servers/' + self.server_id + '/ips')
get_server_ips_response = self.successResultOf(get_server_ips)
get_server_ips_response_body = self.successResultOf(
treq.json_content(get_server_ips_response))
self.assertEqual(get_server_ips_response.code, 200)
list_servers_detail = request(
self, self.root, b"GET", self.uri + '/servers/detail')
list_servers_detail_response = self.successResultOf(
list_servers_detail)
list_servers_detail_response_body = self.successResultOf(
treq.json_content(list_servers_detail_response))
self.assertEqual(get_server_ips_response_body['addresses'],
list_servers_detail_response_body['servers'][0]['addresses'])
def test_get_server_ips_negative(self):
"""
Test to verify :func:`get_ips` on ``GET /v2.0/<tenant_id>/servers/<server_id>/ips``,
when the server_id does not exist
"""
get_server_ips = request(self, self.root, b"GET",
self.uri + '/servers/non-existant-server/ips')
get_server_ips_response = self.successResultOf(get_server_ips)
self.assertEqual(get_server_ips_response.code, 404)
def test_different_region_same_server(self):
"""
Creating a server in one nova region should not create it in other nova
regions.
"""
# NB: the setUp creates a server in ORD.
service_uri = self.helper.get_service_endpoint("cloudServersOpenStack",
"MIMIC")
other_region_servers = self.successResultOf(
treq.json_content(
self.successResultOf(request(self, self.root, b"GET",
service_uri + "/servers/")))
)["servers"]
self.assertEqual(other_region_servers, [])
def test_different_tenants_same_region(self):
"""
Creating a server for one tenant in a particular region should not
create it for other tenants in the same region.
"""
other_tenant = TenantAuthentication(self, self.root, "other", "other")
service_endpoint = other_tenant.get_service_endpoint(
"cloudServersOpenStack", "ORD")
response, response_body = self.successResultOf(
json_request(
self, self.root, b"GET",
service_endpoint + '/servers'))
self.assertEqual(response.code, 200)
self.assertEqual(response_body, {'servers': []})
def test_modify_existing_server_status(self):
"""
An HTTP ``POST`` to ``.../<control-endpoint>/attributes/`` with a JSON
mapping of attribute type to the server ID and its given server's
status will change that server's status.
"""
nova_control_endpoint = self.helper.auth.get_service_endpoint(
"cloudServersBehavior", "ORD")
server_id = self.create_server_response_body["server"]["id"]
status_modification = {
"status": {server_id: "ERROR"}
}
status = status_of_server(self, server_id)
self.assertEqual(status, "ACTIVE")
set_status = request(
self, self.root, b"POST",
nova_control_endpoint + "/attributes/",
json.dumps(status_modification).encode("utf-8")
)
set_status_response = self.successResultOf(set_status)
self.assertEqual(set_status_response.code, 201)
status = status_of_server(self, server_id)
self.assertEqual(status, "ERROR")
def test_modify_multiple_server_status(self):
"""
An HTTP ``POST`` to ``.../<control-endpoint>/attributes/`` with a JSON
mapping of attribute type to several server IDs and each given server's
status will change each server's status.
"""
nova_control_endpoint = self.helper.auth.get_service_endpoint(
"cloudServersBehavior", "ORD")
second_server_id = quick_create_server(self.helper, region="ORD")
server_id = self.create_server_response_body["server"]["id"]
status_modification = {
"status": {server_id: "ERROR",
second_server_id: "BUILD"}
}
status = status_of_server(self, server_id)
second_status = status_of_server(self, second_server_id)
self.assertEqual(status, "ACTIVE")
self.assertEqual(second_status, "ACTIVE")
set_status = request(
self, self.root, b"POST",
nova_control_endpoint + "/attributes/",
json.dumps(status_modification).encode("utf-8")
)
set_status_response = self.successResultOf(set_status)
self.assertEqual(set_status_response.code, 201)
status = status_of_server(self, server_id)
second_status = status_of_server(self, second_server_id)
self.assertEqual(status, "ERROR")
self.assertEqual(second_status, "BUILD")
def test_server_resize(self):
"""
Resizing a server that does not exist should respond with a 404 and
resizing a server that does exist should respond with a 202 and the
server should have an updated flavor
http://docs.rackspace.com/servers/api/v2/cs-devguide/cs-devguide-20150727.pdf
"""
resize_request = {"resize": {"flavorRef": "2"}}
response, body = self.successResultOf(json_request(
self, self.root, b"POST", self.uri + '/servers/nothing/action', resize_request))
self.assertEqual(response.code, 404)
self.assertEqual(body, {
"itemNotFound": {
"message": "Instance nothing could not be found",
"code": 404
}
})
existing_server = request(
self, self.root, b"POST",
self.uri + '/servers/' + self.server_id + '/action',
json.dumps(resize_request).encode("utf-8")
)
existing_server_response = self.successResultOf(existing_server)
self.assertEqual(existing_server_response.code, 202)
get_resized_server = request(
self, self.root, b"GET", self.uri + '/servers/' + self.server_id)
get_server_response = self.successResultOf(get_resized_server)
get_server_response_body = self.successResultOf(
treq.json_content(get_server_response))
self.assertEqual(get_server_response_body['server']['flavor']['id'], '2')
no_resize_request = {"non_supported_action": {"flavorRef": "2"}}
response, body = self.successResultOf(json_request(
self, self.root, b"POST",
self.uri + '/servers/' + self.server_id + '/action',
no_resize_request)
)
self.assertEqual(response.code, 400)
self.assertEqual(body, {
"badRequest": {
"message": "There is no such action currently supported",
"code": 400
}
})
no_flavorref_request = {"resize": {"missingflavorRef": "5"}}
response, body = self.successResultOf(json_request(
self, self.root, b"POST",
self.uri + '/servers/' + self.server_id + '/action',
no_flavorref_request)
)
self.assertEqual(response.code, 400)
self.assertEqual(body, {
"badRequest": {
"message": "Resize requests require 'flavorRef' attribute",
"code": 400
}
})
def test_confirm_and_revert_server_resize(self):
"""
After a server finishes resizing, the size must be confirmed or
reverted
A confirmation action should make the server ACTIVE and return a 204
A revert action should change the flavor and return a 202
Attempting to revert or confirm that is not in VERIFY_RESIZE state
returns a 409
http://docs.rackspace.com/servers/api/v2/cs-devguide/cs-devguide-20150727.pdf
"""
confirm_request = json.dumps({"confirmResize": "null"}).encode("utf-8")
revert_request = json.dumps({"revertResize": "null"}).encode("utf-8")
resize_request = json.dumps(
{"resize": {"flavorRef": "2"}}).encode("utf-8")
response, body = self.successResultOf(json_request(
self, self.root, b"POST",
self.uri + '/servers/' + self.server_id + '/action', confirm_request
))
self.assertEqual(response.code, 409)
self.assertEqual(body, {
"conflictingRequest": {
"message": "Cannot 'confirmResize' instance " + self.server_id +
" while it is in vm_state active",
"code": 409
}
})
response, body = self.successResultOf(json_request(
self, self.root, b"POST",
self.uri + '/servers/' + self.server_id + '/action', revert_request))
self.assertEqual(response.code, 409)
self.assertEqual(body, {
"conflictingRequest": {
"message": "Cannot 'revertResize' instance " + self.server_id +
" while it is in vm_state active",
"code": 409
}
})
request(self, self.root, b"POST",
self.uri + '/servers/' + self.server_id + '/action', resize_request)
confirm = request(
self, self.root, b"POST",
self.uri + '/servers/' + self.server_id + '/action', confirm_request)
confirm_response = self.successResultOf(confirm)
self.assertEqual(confirm_response.code, 204)
resize_request = json.dumps(
{"resize": {"flavorRef": "10"}}).encode("utf-8")
request(self, self.root, b"POST",
self.uri + '/servers/' + self.server_id + '/action', resize_request)
resized_server = request(
self, self.root, b"GET", self.uri + '/servers/' + self.server_id)
resized_server_response = self.successResultOf(resized_server)
resized_server_response_body = self.successResultOf(
treq.json_content(resized_server_response))
self.assertEqual(resized_server_response_body['server']['flavor']['id'], '10')
revert = request(
self, self.root, b"POST",
self.uri + '/servers/' + self.server_id + '/action', revert_request)
revert_response = self.successResultOf(revert)
self.assertEqual(revert_response.code, 202)
reverted_server = request(
self, self.root, b"GET", self.uri + '/servers/' + self.server_id)
reverted_server_response = self.successResultOf(reverted_server)
reverted_server_response_body = self.successResultOf(
treq.json_content(reverted_server_response))
self.assertEqual(reverted_server_response_body['server']['flavor']['id'], '2')
def test_rescue(self):
"""
Attempting to rescue a server that is not in ACTIVE state
returns conflictingRequest with response code 409.
If the server is in ACTIVE state, then a new password is returned
for the server with a response code of 200.
http://docs.rackspace.com/servers/api/v2/cs-devguide/content/rescue_mode.html
"""
metadata = {"server_error": "1"}
server_id = quick_create_server(self.helper, metadata=metadata)
rescue_request = {"rescue": "none"}
response, body = self.successResultOf(json_request(
self, self.root, b"POST",
self.uri + '/servers/' + server_id + '/action', rescue_request)
)
self.assertEqual(response.code, 409)
self.assertEqual(body, {
"conflictingRequest": {
"message": "Cannot 'rescue' instance " + server_id +
" while it is in task state other than active",
"code": 409
}
})
rescue = request(
self, self.root, b"POST",
self.uri + '/servers/' + self.server_id + '/action',
json.dumps(rescue_request).encode("utf-8"))
rescue_response = self.successResultOf(rescue)
rescue_response_body = self.successResultOf(
treq.json_content(rescue_response))
self.assertEqual(rescue_response.code, 200)
self.assertTrue('"adminPass":' in json.dumps(rescue_response_body))
def test_unrescue(self):
"""
Attempting to unrescue a server that is not in RESCUE state a response body
of conflicting request and response code of 409
Unsrescuing a server that is in ACTIVE state, returns a 200.
http://docs.rackspace.com/servers/api/v2/cs-devguide/content/exit_rescue_mode.html
"""
rescue_request = {"rescue": "none"}
unrescue_request = {"unrescue": "null"}
response, body = self.successResultOf(json_request(
self, self.root, b"POST",
self.uri + '/servers/' + self.server_id + '/action',
unrescue_request)
)
self.assertEqual(response.code, 409)
self.assertEqual(body, {
"conflictingRequest": {
"message": "Cannot 'unrescue' instance " + self.server_id +
" while it is in task state other than rescue",
"code": 409
}
})
# Put a server in rescue status
json_request(
self, self.root, b"POST",
self.uri + '/servers/' + self.server_id + '/action',
rescue_request
)
unrescue = request(self, self.root, b"POST",
self.uri + '/servers/' + self.server_id + '/action',
json.dumps(unrescue_request).encode("utf-8"))
unrescue_response = self.successResultOf(unrescue)
self.assertEqual(unrescue_response.code, 200)
def test_reboot_server(self):
"""
A hard reboot of a server sets the server status to HARD_REBOOT and
returns a 202
A soft reboot of a server sets the server status to REBOOT and returns
a 202
After some amount of time the server will go back to ACTIVE state
The clock is being used to advance time and verify that status changes
from the a reboot state to active. The current time interval being
used in hardcoded in the route for now. In the future we need to
refactor to allow different durations to be set including a zero
duration which would allow the server to skip the intermediary state of
HARD_REBOOT or REBOOT and go straight to ACTIVE
If the 'type' attribute is left out of the request, a response body is
returned with code of 400
http://docs.rackspace.com/servers/api/v2/cs-devguide/content/Reboot_Server-d1e3371.html
"""
no_reboot_type_request = json.dumps(
{"reboot": {"missing_type": "SOFT"}}).encode("utf-8")
response, body = self.successResultOf(json_request(
self, self.root, b"POST",
self.uri.encode("ascii") + b'/servers/' +
self.server_id.encode("ascii") +
b'/action',
no_reboot_type_request)
)
self.assertEqual(response.code, 400)
self.assertEqual(body, {
"badRequest": {
"message": "Missing argument 'type' for reboot",
"code": 400
}
})
response, body = self.successResultOf(json_request(
self, self.root, b"POST",
(self.uri + '/servers/' + self.server_id + '/action')
.encode("ascii"),
{"reboot": {"type": "FIRM"}})
)
self.assertEqual(response.code, 400)
self.assertEqual(body, {
"badRequest": {
"message": "Argument 'type' for reboot is not HARD or SOFT",
"code": 400
}
})
# Soft reboot tests
soft_reboot_request = (
json.dumps({"reboot": {"type": "SOFT"}}).encode("ascii")
)
soft_reboot = request(
self, self.root, b"POST",
self.uri + '/servers/' + self.server_id + '/action',
soft_reboot_request
)
soft_reboot_response = self.successResultOf(soft_reboot)
self.assertEqual(soft_reboot_response.code, 202)
response, body = self.successResultOf(json_request(
self, self.root, b"GET", self.uri + '/servers/' + self.server_id))
self.assertEqual(body['server']['status'], 'REBOOT')
# Advance the clock 3 seconds and check status
self.clock.advance(3)
rebooted_server = request(
self, self.root, b"GET", self.uri + '/servers/' + self.server_id)
rebooted_server_response = self.successResultOf(rebooted_server)
rebooted_server_response_body = self.successResultOf(
treq.json_content(rebooted_server_response))
self.assertEqual(rebooted_server_response_body['server']['status'], 'ACTIVE')
# Hard Reboot Tests
hard_reboot_request = (
json.dumps({"reboot": {"type": "HARD"}}).encode("ascii")
)
hard_reboot = request(
self, self.root, b"POST",
self.uri + '/servers/' + self.server_id + '/action',
hard_reboot_request
)
hard_reboot_response = self.successResultOf(hard_reboot)
self.assertEqual(hard_reboot_response.code, 202)
hard_reboot_server = request(
self, self.root, b"GET", self.uri + '/servers/' + self.server_id)
hard_reboot_server_response = self.successResultOf(hard_reboot_server)
hard_reboot_server_response_body = self.successResultOf(
treq.json_content(hard_reboot_server_response))
self.assertEqual(hard_reboot_server_response_body['server']['status'], 'HARD_REBOOT')
# Advance clock 6 seconds and check server status
self.clock.advance(6)
rebooted_server = request(
self, self.root, b"GET", self.uri + '/servers/' + self.server_id)
rebooted_server_response = self.successResultOf(rebooted_server)
rebooted_server_response_body = self.successResultOf(
treq.json_content(rebooted_server_response))
self.assertEqual(rebooted_server_response_body['server']['status'], 'ACTIVE')
def test_change_password(self):
"""
Resetting the password on a non ACTIVE server responds with a
conflictingRequest and response code 409
adminPass is required as part of the request body, if missing a badRequest
is returned with response code 400
A successful password reset returns 202
http://docs.rackspace.com/servers/api/v2/cs-devguide/content/Change_Password-d1e3234.html
"""
password_request = json.dumps(
{"changePassword": {"adminPass": "password"}}).encode("utf-8")
bad_password_request = json.dumps(
{"changePassword": {"Pass": "password"}}).encode("utf-8")
response, body = self.successResultOf(json_request(
self, self.root, b"POST",
self.uri + '/servers/' + self.server_id + '/action',
bad_password_request))
self.assertEqual(response.code, 400)
self.assertEqual(body, {
"badRequest": {
"message": "No adminPass was specified",
"code": 400
}
})
password_reset = request(
self, self.root, b"POST",
self.uri + '/servers/' + self.server_id + '/action', password_request)
password_reset_response = self.successResultOf(password_reset)
self.assertEqual(password_reset_response.code, 202)
# Create server in error state and test response when changing password
# in state other than ACTIVE
metadata = {"server_error": "1"}
server_id = quick_create_server(self.helper, metadata=metadata)
response, body = self.successResultOf(json_request(
self, self.root, b"POST",
self.uri + '/servers/' + server_id + '/action', password_request))
self.assertEqual(response.code, 409)
self.assertEqual(body, {
"conflictingRequest": {
"message": "Cannot 'changePassword' instance " + server_id +
" while it is in task state other than active",
"code": 409
}
})
def test_rebuild(self):
rebuild_request = {
"rebuild": {"imageRef": "d5f916f8-03a4-4392-9ec2-cc6e5ad41cf0"}
}
no_imageRef_request = {"rebuild": {"name": "new_server"}}
response, body = self.successResultOf(json_request(
self, self.root, b"POST",
self.uri + '/servers/' + self.server_id + '/action',
no_imageRef_request)
)
self.assertEqual(response.code, 400)
self.assertEqual(body, {
"badRequest": {
"message": "Could not parse imageRef from request.",
"code": 400
}
})
response, body = self.successResultOf(json_request(
self, self.root, b"POST",
self.uri + '/servers/' + self.server_id + '/action',
rebuild_request)
)
self.assertEqual(response.code, 202)
self.assertTrue('adminPass' in json.dumps(body))
self.assertEqual(body['server']['id'], self.server_id)
self.assertEqual(body['server']['status'], 'REBUILD')
self.clock.advance(5)
rebuilt_server = request(
self, self.root, b"GET", self.uri + '/servers/' + self.server_id)
rebuilt_server_response = self.successResultOf(rebuilt_server)
rebuilt_server_response_body = self.successResultOf(
treq.json_content(rebuilt_server_response))
self.assertEqual(rebuilt_server_response_body['server']['status'], 'ACTIVE')
# Create server in error state and test response when an attempt to
# rebuild the server when it is in state other than ACTIVE
metadata = {"server_error": "1"}
server_id = quick_create_server(self.helper, metadata=metadata)
response, body = self.successResultOf(json_request(
self, self.root, b"POST",
self.uri + '/servers/' + server_id + '/action', rebuild_request))
self.assertEqual(response.code, 409)
self.assertEqual(body, {
"conflictingRequest": {
"message": "Cannot 'rebuild' instance " + server_id +
" while it is in task state other than active",
"code": 409
}
})
def test_create_image(self):
create_image_request = json.dumps({"createImage": {"name": "CreatedImage"}}).encode("utf-8")
nova_api = NovaApi(["ORD", "MIMIC"])
helper = APIMockHelper(
self, [nova_api, NovaControlApi(nova_api=nova_api)]
)
root = helper.root
uri = helper.uri
image_list = request(self, root, b"GET", uri + '/images')
image_list_response = self.successResultOf(image_list)
image_list_response_body = self.successResultOf(treq.json_content(image_list_response))
image_list_size = len(image_list_response_body['images'])
random_image_choice = random.randint(0, (len(image_list_response_body['images'])) - 1)
image_id = image_list_response_body['images'][random_image_choice]['id']
server_name = 'createdFromImage'
self.create_server_response, self.create_server_response_body = (
create_server(helper, name=server_name, imageRef=image_id))
server_id = self.create_server_response_body['server']['id']
create_image = request(self, root, b"POST", uri + '/servers/' + server_id + '/action',
create_image_request)
create_image_response = self.successResultOf(create_image)
self.assertEqual(create_image_response.code, 202)
image_list = request(self, root, b"GET", uri + '/images/detail')
image_list_response = self.successResultOf(image_list)
image_list_response_body = self.successResultOf(treq.json_content(image_list_response))
image = [image for image in image_list_response_body['images']
if image['name'] == 'CreatedImage']
self.assertEqual((image[0]['name']), "CreatedImage")
self.assertEqual(image_list_size + 1, len(image_list_response_body['images']))
class NovaAPIChangesSinceTests(SynchronousTestCase):
"""
Tests for listing servers with changes-since filter
"""
def setUp(self):
"""
Create a :obj:`MimicCore` with :obj:`NovaApi` as the only plugin,
and create a server
"""
nova_api = NovaApi(["ORD", "MIMIC"])
helper = self.helper = APIMockHelper(
self, [nova_api, NovaControlApi(nova_api=nova_api)]
)
self.root = helper.root
self.uri = helper.uri
self.clock = helper.clock
self.control_endpoint = helper.auth.get_service_endpoint(
"cloudServersBehavior",
"ORD")
self.server1 = quick_create_server(helper)
self.clock.advance(1)
self.server2 = quick_create_server(helper)
def list_servers_detail(self, since):
changes_since = seconds_to_timestamp(since)
params = urlencode({"changes-since": changes_since})
resp, body = self.successResultOf(
json_request(
self, self.root, b"GET",
'{0}/servers/detail?{1}'.format(self.uri, params)))
self.assertEqual(resp.code, 200)
return body['servers']
def list_servers(self, since):
servers = self.list_servers_detail(since)
return [s['id'] for s in servers]
def test_no_changes(self):
"""
Returns no servers if nothing has changed since time given
"""
self.clock.advance(3)
self.assertEqual(self.list_servers(2), [])
def test_returns_created_servers(self):
"""
Returns servers created after given time
"""
self.assertEqual(self.list_servers(0.5), [self.server2])
self.assertEqual(self.list_servers(1.0), [self.server2])
def test_returns_deleted_servers(self):
"""
Returns DELETED servers if they've been deleted since the time given
"""
self.clock.advance(1)
delete_server(self.helper, self.server1)
self.clock.advance(2)
matcher = MatchesListwise(
[ContainsDict({"status": Equals(u"DELETED"), "id": Equals(self.server1)})])
mismatch = matcher.match(self.list_servers_detail(1.5))
self.assertIs(mismatch, None)
def test_returns_updated_status_servers(self):
"""
Returns servers whose status has been updated since given time
"""
self.clock.advance(1)
update_status(self.helper, self.control_endpoint, self.server2, u"ERROR")
self.assertEqual(self.list_servers(1.5), [self.server2])
def test_returns_updated_metadata_servers(self):
"""
Returns servers whose metadata has changes since given time
"""
self.clock.advance(1)
update_metdata_item(self.helper, self.server1, "a", "b")
self.assertEqual(self.list_servers(1.5), [self.server1])
def test_returns_replaced_metadata_servers(self):
"""
Returns servers whose metadata has been replaced since given time
"""
self.clock.advance(1)
update_metdata(self.helper, self.server1, {"a": "b"})
self.assertEqual(self.list_servers(1.5), [self.server1])
class NovaAPIListServerPaginationTests(SynchronousTestCase):
"""
Tests for the Nova plugin API for paginating while listing servers,
both with and without details.
"""
def make_nova_app(self):
"""
Create a :obj:`MimicCore` with :obj:`NovaApi` as the only plugin,
and create a server
"""
self.helper = APIMockHelper(self, [NovaApi(["ORD", "MIMIC"])])
self.root = self.helper.root
self.uri = self.helper.uri
def create_servers(self, n, name_generation=None):
"""
Create ``n`` servers, returning a list of their server IDs.
"""
return [
quick_create_server(
self.helper,
name=("{0}".format(i) if name_generation is None
else name_generation(i))
) for i in range(n)
]
def list_servers(self, path, params=None, code=200):
"""
List all servers using the given path and parameters. Return the
entire response body.
"""
url = self.uri + path
if params is not None:
url = "{0}?{1}".format(url, urlencode(params))
resp, body = self.successResultOf(
json_request(self, self.root, b"GET", url))
self.assertEqual(resp.code, code)
return body
def match_body_with_links(self, result, expected_servers, expected_path,
expected_query_params):
"""
Given the result from listing servers, matches it against an expected
value that includes the next page links.
"""
self.assertEqual(expected_servers, result['servers'])
expected_matcher = MatchesDict({
'servers': Equals(expected_servers),
'servers_links': MatchesListwise([
MatchesDict({
'rel': Equals('next'),
'href': StartsWith(
"{0}{1}?".format(self.uri, expected_path))
})
])
})
mismatch = expected_matcher.match(result)
if mismatch is not None:
self.fail(mismatch.describe())
link = result['servers_links'][0]['href']
query_string = link.split('?', 1)[-1]
self.assertEqual(expected_query_params, parse_qs(query_string))
def test_with_invalid_marker(self):
"""
If an invalid marker is passed, no matter what other parameters,
return with a 400 bad request.
"""
self.make_nova_app()
self.create_servers(2)
combos = ({}, {'limit': 1}, {'name': '0'}, {'limit': 1, 'name': '0'})
for path in ('/servers', '/servers/detail'):
for combo in combos:
combo['marker'] = '9000'
error_body = self.list_servers(path, combo, code=400)
self.assertEqual(
{
"badRequest": {
"message": "marker [9000] not found",
"code": 400
}
},
error_body)
def _check_invalid_limit(self, limit, message):
"""
Make a request with an invalid limit against every possible
combination of parameters, and assert that a 400 bad request is
returned with the given message.
"""
self.make_nova_app()
self.create_servers(2, lambda i: 'server')
servers = self.list_servers('/servers')['servers']
combos = ({}, {'marker': servers[0]['id']}, {'name': 'server'},
{'marker': servers[0]['id'], 'name': 'server'})
for path in ('/servers', '/servers/detail'):
for combo in combos:
combo['limit'] = limit
error_body = self.list_servers(path, combo, code=400)
self.assertEqual(
{
"badRequest": {
"message": message,
"code": 400
}
},
error_body)
def test_with_non_int_limit(self):
"""
If a limit that can't be converted into an integer is passed, no
matter what other parameters there are, return with a 400 bad request.
"""
for non_int in ('a', '0.1', '[]'):
self._check_invalid_limit(
non_int, "limit param must be an integer")
def test_with_negative_limit(self):
"""
If a negative limit is passed, no matter what other parameters there
are, return 400 with a bad request.
"""
self._check_invalid_limit('-1', "limit param must be positive")
def test_with_limit_as_0(self):
"""
If a limit of 0 is passed, no matter what other parameters there are,
return no servers and do not include the next page link.
"""
self.make_nova_app()
self.create_servers(2, lambda i: 'server')
servers = self.list_servers('/servers')['servers']
combos = ({}, {'marker': servers[0]['id']}, {'name': 'server'},
{'marker': servers[0]['id'], 'name': 'server'})
for path in ('/servers', '/servers/detail'):
for combo in combos:
combo['limit'] = 0
with_params = self.list_servers(path, combo)
self.assertEqual({'servers': []}, with_params)
def test_with_valid_marker_only(self):
"""
If just the marker is passed, and it's a valid marker, list all
servers after that marker without any kind of limit.
Do not return a next page link.
"""
for path in ('/servers', '/servers/detail'):
self.make_nova_app()
self.create_servers(5)
servers = self.list_servers(path)['servers']
with_params = self.list_servers(path, {'marker': servers[0]['id']})
self.assertEqual({'servers': servers[1:]}, with_params)
def test_with_marker_and_name(self):
"""
If just the marker and name are passed, list all servers after that
marker that have that particular name. There is no number of servers
limit. Do not return a next page link.
"""
for path in ('/servers', '/servers/detail'):
self.make_nova_app()
self.create_servers(5, lambda i: "{0}".format(0 if i == 1 else 1))
servers = self.list_servers(path)['servers']
self.assertEqual(['1', '0', '1', '1', '1'],
[server['name'] for server in servers],
"Assumption about server list ordering is wrong")
with_params = self.list_servers(
path, {'marker': servers[0]['id'], 'name': "1"})
self.assertEqual({'servers': servers[2:]}, with_params)
def test_with_limit_lt_servers_only(self):
"""
If just the limit is passed, and the limit is less than the number of
servers, list only that number of servers in the limit, starting with
the first server in the list. Include the next page link.
"""
for path in ('/servers', '/servers/detail'):
self.make_nova_app()
self.create_servers(2)
servers = self.list_servers(path)['servers']
with_params = self.list_servers(path, {'limit': 1})
self.match_body_with_links(
with_params,
expected_servers=[servers[0]],
expected_path=path,
expected_query_params={
'limit': ['1'], 'marker': [servers[0]['id']]
}
)
def test_with_limit_eq_servers_only(self):
"""
If just the limit is passed, and the limit is equal to the number
of servers, list all the servers starting with the first server in
the list. Include the next page link.
"""
for path in ('/servers', '/servers/detail'):
self.make_nova_app()
self.create_servers(2)
servers = self.list_servers(path)['servers']
with_params = self.list_servers(path, {'limit': 2})
self.match_body_with_links(
with_params,
expected_servers=servers,
expected_path=path,
expected_query_params={
'limit': ['2'], 'marker': [servers[1]['id']]
}
)
def test_with_limit_gt_servers_only(self):
"""
If just the limit is passed, and the limit is greater than the number
of servers, list all the servers starting with the first server in
the list. Do not include the next page link.
"""
for path in ('/servers', '/servers/detail'):
self.make_nova_app()
self.create_servers(2)
servers = self.list_servers(path)['servers']
with_params = self.list_servers(path, {'limit': 5})
self.assertEqual({'servers': servers}, with_params)
def test_with_limit_lt_servers_with_name(self):
"""
If the limit and name are passed, and the limit is less than the
number of servers that match that name, list only that number of
servers with that name in the limit, starting with
the first server with that name. Include the next page link.
"""
for path in ('/servers', '/servers/detail'):
self.make_nova_app()
self.create_servers(3, lambda i: "{0}".format(0 if i == 0 else 1))
servers = self.list_servers(path)['servers']
self.assertEqual(['0', '1', '1'],
[server['name'] for server in servers],
"Assumption about server list ordering is wrong")
with_params = self.list_servers(path, {'limit': 1, 'name': '1'})
self.match_body_with_links(
with_params,
expected_servers=[servers[1]],
expected_path=path,
expected_query_params={
'limit': ['1'],
'marker': [servers[1]['id']],
'name': ['1']
}
)
def test_with_limit_eq_servers_with_name(self):
"""
If the limit and name are passed, and the limit is equal to the
number of servers that match the name, list all the servers that match
that name starting with the first server that matches. Include the
next page link.
"""
for path in ('/servers', '/servers/detail'):
self.make_nova_app()
self.create_servers(3, lambda i: "{0}".format(0 if i == 0 else 1))
servers = self.list_servers(path)['servers']
self.assertEqual(['0', '1', '1'],
[server['name'] for server in servers],
"Assumption about server list ordering is wrong")
with_params = self.list_servers(path, {'limit': 2, 'name': '1'})
self.match_body_with_links(
with_params,
expected_servers=servers[1:],
expected_path=path,
expected_query_params={
'limit': ['2'],
'marker': [servers[2]['id']],
'name': ['1']
}
)
def test_with_limit_gt_servers_with_name(self):
"""
If the limit and name are passed, and the limit is greater than the
number of servers that match the name, list all the servers that match
that name starting with the first server that matches. Do not
include the next page link.
"""
for path in ('/servers', '/servers/detail'):
self.make_nova_app()
self.create_servers(3, lambda i: "{0}".format(0 if i == 0 else 1))
servers = self.list_servers(path)['servers']
self.assertEqual(['0', '1', '1'],
[server['name'] for server in servers],
"Assumption about server list ordering is wrong")
with_params = self.list_servers(path, {'limit': 5, 'name': '1'})
self.assertEqual({'servers': servers[1:]}, with_params)
def test_with_limit_lt_servers_with_marker(self):
"""
If the limit and marker are passed, and the limit is less than the
number of servers, list only that number of servers after the one
with the marker ID. Include the next page link.
"""
for path in ('/servers', '/servers/detail'):
self.make_nova_app()
self.create_servers(3)
servers = self.list_servers(path)['servers']
with_params = self.list_servers(
path, {'limit': 1, 'marker': servers[0]['id']})
self.match_body_with_links(
with_params,
expected_servers=[servers[1]],
expected_path=path,
expected_query_params={
'limit': ['1'], 'marker': [servers[1]['id']]
}
)
def test_with_limit_eq_servers_with_marker(self):
"""
If the limit and marker are passed, and the limit is equal to the
number of servers, list all the servers after the one with the marker
ID. Include the next page link.
"""
for path in ('/servers', '/servers/detail'):
self.make_nova_app()
self.create_servers(3)
servers = self.list_servers(path)['servers']
with_params = self.list_servers(
path, {'limit': 2, 'marker': servers[0]['id']})
self.match_body_with_links(
with_params,
expected_servers=servers[1:],
expected_path=path,
expected_query_params={
'limit': ['2'], 'marker': [servers[2]['id']]
}
)
def test_with_limit_gt_servers_with_marker(self):
"""
If the limit and marker are passed, and the limit is greater than the
number of servers, list all the servers after the one with the marker
ID. Do not include the next page link.
"""
for path in ('/servers', '/servers/detail'):
self.make_nova_app()
self.create_servers(3)
servers = self.list_servers(path)['servers']
with_params = self.list_servers(
path, {'limit': 5, 'marker': servers[0]['id']})
self.assertEqual({'servers': servers[1:]}, with_params)
def test_with_limit_lt_servers_with_marker_and_name(self):
"""
If the limit, marker, and name are passed, and the limit is less than
the number of servers that match that name, list only that number of
servers with that name in the limit, after the one with the marker ID.
The marker ID does not even have to belong to a server that matches
the given name.
Include the next page link.
"""
for path in ('/servers', '/servers/detail'):
self.make_nova_app()
self.create_servers(6, lambda i: "{0}".format(i % 2))
servers = self.list_servers(path)['servers']
self.assertEqual(['0', '1', '0', '1', '0', '1'],
[server['name'] for server in servers],
"Assumption about server list ordering is wrong")
with_params = self.list_servers(
path, {'limit': 1, 'name': '1', 'marker': servers[2]['id']})
self.match_body_with_links(
with_params,
expected_servers=[servers[3]],
expected_path=path,
expected_query_params={
'limit': ['1'],
'marker': [servers[3]['id']],
'name': ['1']
}
)
def test_with_limit_eq_servers_with_marker_and_name(self):
"""
If the limit, marker, and name are passed, and the limit is equal to
the number of servers that match the name, list all the servers that
match that name after the one with the marker ID.
The marker ID does not even have to belong to a server that matches
the given name.
Include the next page link.
"""
for path in ('/servers', '/servers/detail'):
self.make_nova_app()
self.create_servers(6, lambda i: "{0}".format(i % 2))
servers = self.list_servers(path)['servers']
self.assertEqual(['0', '1', '0', '1', '0', '1'],
[server['name'] for server in servers],
"Assumption about server list ordering is wrong")
with_params = self.list_servers(
path, {'limit': 2, 'name': '1', 'marker': servers[2]['id']})
self.match_body_with_links(
with_params,
expected_servers=[servers[3], servers[5]],
expected_path=path,
expected_query_params={
'limit': ['2'],
'marker': [servers[5]['id']],
'name': ['1']
}
)
def test_with_limit_gt_servers_with_marker_and_name(self):
"""
If the limit, marker, and name are passed, and the limit is greater
than the number of servers that match the name, list all the servers
that match that name after the one with the marker ID.
The marker ID does not even have to belong to a server that matches
the given name.
Do not include the next page link.
"""
for path in ('/servers', '/servers/detail'):
self.make_nova_app()
self.create_servers(6, lambda i: "{0}".format(i % 2))
servers = self.list_servers(path)['servers']
self.assertEqual(['0', '1', '0', '1', '0', '1'],
[server['name'] for server in servers],
"Assumption about server list ordering is wrong")
with_params = self.list_servers(
path, {'limit': 5, 'name': '1', 'marker': servers[2]['id']})
self.assertEqual({'servers': [servers[3], servers[5]]},
with_params)
def test_deleted_servers_do_not_affect_pagination_no_changes_since(self):
"""
If a bunch of servers are deleted, they do not impact pagination if
changes-since is not passed.
"""
for path in ('/servers', '/servers/detail'):
self.make_nova_app()
server_ids = self.create_servers(5)
for server_id in server_ids:
delete_server(self.helper, server_id)
server_ids = self.create_servers(5)
servers = self.list_servers(path, {'limit': 5})
self.assertEqual(set([s['id'] for s in servers['servers']]),
set(server_ids))
self.assertIn('servers_links', servers)
class NovaAPINegativeTests(SynchronousTestCase):
"""
Tests for the Nova plugin api for error injections
"""
def setUp(self):
"""
Create a :obj:`MimicCore` with :obj:`NovaApi` as the only plugin,
and create a server
"""
nova_api = NovaApi(["ORD", "MIMIC"])
nova_control_api = NovaControlApi(nova_api=nova_api)
self.helper = APIMockHelper(self, [nova_api, nova_control_api])
self.nova_control_endpoint = self.helper.auth.get_service_endpoint(
"cloudServersBehavior",
"ORD")
self.root = self.helper.root
self.uri = self.helper.uri
self.helper = self.helper
def test_create_server_request_with_no_body_causes_bad_request(self):
"""
Test to verify :func:`create_server` does not fail when it receives a
request with no body.
"""
create_server_response, _ = create_server(
self.helper, body_override=b"")
self.assertEquals(create_server_response.code, 400)
def test_create_server_request_with_invalid_body_causes_bad_request(self):
"""
Test to verify :func:`create_server` does not fail when it receives a
request with no body.
"""
create_server_response, _ = create_server(
self.helper, body_override=b'{ bad request: }')
self.assertEquals(create_server_response.code, 400)
def test_create_server_failure(self):
"""
Test to verify :func:`create_server` fails with given error message
and response code in the metadata, and the given failure type.
"""
serverfail = {"message": "Create server failure", "code": 500,
"type": "specialType"}
metadata = {"create_server_failure": json.dumps(serverfail)}
create_server_response, create_server_response_body = create_server(
self.helper, metadata=metadata)
self.assertEquals(create_server_response.code, 500)
self.assertEquals(
create_server_response_body['specialType']['message'],
"Create server failure")
self.assertEquals(
create_server_response_body['specialType']['code'], 500)
def test_create_server_failure_string_type(self):
"""
Test to verify :func:`create_server` fails with string body
and response code in the metadata, if the failure type is "string".
"""
serverfail = {"message": "Create server failure", "code": 500,
"type": "string"}
metadata = {"create_server_failure": json.dumps(serverfail)}
create_server_response, create_server_response_body = create_server(
self.helper, metadata=metadata, request_func=request_with_content)
self.assertEquals(create_server_response.code, 500)
self.assertEquals(create_server_response_body.decode("utf-8"),
"Create server failure")
def test_create_server_failure_and_list_servers(self):
"""
Test to verify :func:`create_server` fails with given error message
and response code in the metadata and does not actually create a server.
"""
serverfail = {"message": "Create server failure", "code": 500}
metadata = {"create_server_failure": json.dumps(serverfail)}
create_server_response, create_server_response_body = create_server(
self.helper, metadata=metadata)
self.assertEquals(create_server_response.code, 500)
self.assertEquals(
create_server_response_body['computeFault']['message'],
"Create server failure")
self.assertEquals(
create_server_response_body['computeFault']['code'], 500)
# List servers
list_servers = request(self, self.root, b"GET", self.uri + '/servers')
list_servers_response = self.successResultOf(list_servers)
self.assertEquals(list_servers_response.code, 200)
list_servers_response_body = self.successResultOf(
treq.json_content(list_servers_response))
self.assertEquals(list_servers_response_body['servers'], [])
def test_server_in_building_state_for_specified_time(self):
"""
Test to verify :func:`create_server` creates a server in BUILD
status for the time specified in the metadata.
"""
self.do_timing_test(metadata={"server_building": "1"},
before=u"BUILD",
delay=2.0,
after=u"ACTIVE")
def test_server_building_behavior(self):
"""
Like :obj:`test_server_in_building_state_for_specified_time`, but by
creating a behavior via the behaviors API ahead of time, rather than
passing metadata.
"""
use_creation_behavior(self.helper, "build", {"duration": 4.0}, [])
self.do_timing_test(metadata={},
before=u"BUILD",
delay=5.0,
after=u"ACTIVE")
def test_server_active_then_error_behavior(self):
"""
When a server is created with the :obj:`active-then-error` behavior, it
will go into the "error" state after the specified ``duration`` number
of seconds.
"""
use_creation_behavior(
self.helper, "active-then-error", {"duration": 7.0}, [])
self.do_timing_test(metadata={},
before=u"ACTIVE",
delay=8.0,
after=u"ERROR")
def do_timing_test(self, metadata, before, delay, after):
"""
Do a test where a server starts in one status and then transitions to
another after a period of time.
"""
# create server with metadata to keep the server in building state for
# 3 seconds
server_id = quick_create_server(self.helper, metadata=metadata)
def get_server_status():
return status_of_server(self, server_id)
# get server and verify status is BUILD
self.assertEquals(get_server_status(), before)
# List servers with details and verify the server is in BUILD status
list_servers = request(
self, self.root, b"GET", self.uri + '/servers/detail')
list_servers_response = self.successResultOf(list_servers)
self.assertEquals(list_servers_response.code, 200)
list_servers_response_body = self.successResultOf(
treq.json_content(list_servers_response))
self.assertEquals(len(list_servers_response_body['servers']), 1)
building_server = list_servers_response_body['servers'][0]
self.assertEquals(building_server['status'], before)
# Time Passes...
self.helper.clock.advance(delay)
# get server and verify status changed to active
self.assertEquals(get_server_status(), after)
def test_server_in_error_state(self):
"""
Test to verify :func:`create_server` creates a server in ERROR state.
"""
metadata = {"server_error": "1"}
# create server with metadata to set status in ERROR
server_id = quick_create_server(self.helper, metadata=metadata)
# get server and verify status is ERROR
get_server = request(self, self.root, b"GET", self.uri + '/servers/' +
server_id)
get_server_response = self.successResultOf(get_server)
get_server_response_body = self.successResultOf(
treq.json_content(get_server_response))
self.assertEquals(
get_server_response_body['server']['status'], "ERROR")
def test_delete_server_fails_specified_number_of_times(self):
"""
Test to verify :func: `delete_server` does not delete the server,
and returns the given response code, the number of times specified
in the metadata
"""
deletefail = {"times": 1, "code": 500}
metadata = {"delete_server_failure": json.dumps(deletefail)}
# create server and verify it was successful
server_id = quick_create_server(self.helper, metadata=metadata)
# delete server and verify the response
delete_server = request(
self, self.root, b"DELETE", self.uri + '/servers/' + server_id)
delete_server_response = self.successResultOf(delete_server)
self.assertEqual(delete_server_response.code, 500)
# get server and verify the server was not deleted
get_server = request(self, self.root, b"GET", self.uri + '/servers/' +
server_id)
get_server_response = self.successResultOf(get_server)
self.assertEquals(get_server_response.code, 200)
# delete server again and verify the response
delete_server = request(
self, self.root, b"DELETE", self.uri + '/servers/' + server_id)
delete_server_response = self.successResultOf(delete_server)
self.assertEqual(delete_server_response.code, 204)
self.assertEqual(self.successResultOf(treq.content(delete_server_response)),
b"")
# get server and verify the server was deleted this time
get_server = request(
self, self.root, b"GET", self.uri + '/servers/' + server_id)
get_server_response = self.successResultOf(get_server)
self.assertEquals(get_server_response.code, 404)
def test_create_server_failure_using_behaviors(self):
"""
:func:`create_server` fails with given error message and response code
when a behavior is registered that matches its hostname.
"""
use_creation_behavior(
self.helper,
"fail",
{"message": "Create server failure", "code": 500},
[{"server_name": "failing_server_name"}]
)
create_server_response, create_server_response_body = create_server(
self.helper, name="failing_server_name")
self.assertEquals(create_server_response.code, 500)
self.assertEquals(
create_server_response_body['computeFault']['message'],
"Create server failure")
self.assertEquals(
create_server_response_body['computeFault']['code'], 500)
def test_create_server_failure_based_on_metadata(self):
"""
:func:`create_server` fails with the given error message and response
code when a behavior is registered that matches its metadata.
"""
use_creation_behavior(
self.helper,
"fail",
{"message": "Sample failure message",
"type": "specialType", "code": 503},
[{"metadata": {"field1": "value1",
"field2": "reg.*ex"}}]
)
create_server_response, _ = create_server(
self.helper, name="failing_server_name")
self.assertEquals(create_server_response.code, 202)
failing_create_response, failing_create_response_body = create_server(
self.helper,
metadata={"field1": "value1",
"field2": "regular expression"}
)
self.assertEquals(
failing_create_response_body['specialType']['message'],
"Sample failure message")
self.assertEquals(
failing_create_response_body['specialType']['code'], 503)
def _try_false_negative_failure(self, failure_type=None):
"""
Helper function to list servers and verify that there are no servers,
then trigger a false-negative create and verify that it created a
server. Returns the failure response so it can be further verified.
"""
# List servers with details and verify there are no servers
resp, list_body = self.successResultOf(json_request(
self, self.root, b"GET", self.uri + '/servers'))
self.assertEqual(resp.code, 200)
self.assertEqual(len(list_body['servers']), 0)
params = {"message": "Create server failure", "code": 500}
if failure_type is not None:
params["type"] = failure_type
# Get a 500 creating a server
use_creation_behavior(
self.helper,
"false-negative", params, [{"server_name": "failing_server_name"}]
)
create_server_response, body = create_server(
self.helper,
name="failing_server_name",
request_func=request_with_content
)
self.assertEquals(create_server_response.code, 500)
# List servers with details and verify there are no servers
resp, list_body = self.successResultOf(json_request(
self, self.root, b"GET", self.uri + '/servers'))
self.assertEqual(resp.code, 200)
self.assertEqual(len(list_body['servers']), 1)
return create_server_response, body
def test_create_false_negative_failure_using_behaviors(self):
"""
:func:`create_server` fails with given error message, type, and
response code, but creates the server anyway, when a behavior is
registered that matches its hostname. The type is 'computeFault'
by default.
"""
response, body = self._try_false_negative_failure()
body = json.loads(body.decode("utf-8"))
self.assertEquals(body['computeFault']['message'],
"Create server failure")
self.assertEquals(body['computeFault']['code'], 500)
def test_create_false_negative_failure_with_specific_type(self):
"""
:func:`create_server` fails with given error message, type, and
response code, but creates the server anyway, when a behavior is
registered that matches its hostname. The type is whatever is
specified if it's not "string".
"""
response, body = self._try_false_negative_failure('specialType')
body = json.loads(body.decode("utf-8"))
self.assertEquals(body['specialType']['message'],
"Create server failure")
self.assertEquals(body['specialType']['code'], 500)
def test_create_false_negative_failure_with_string_type(self):
"""
:func:`create_server` fails with given error body and
response code, but creates the server anyway, when a behavior is
registered that matches its hostname. The body is just a string
when the type is "string".
"""
response, body = self._try_false_negative_failure("string")
self.assertEquals(body, b"Create server failure")
def test_modify_status_non_existent_server(self):
"""
When using the ``.../attributes`` endpoint, if a non-existent server is
specified, the server will respond with a "bad request" status code and
not modify the status of any server.
"""
nova_control_endpoint = self.helper.auth.get_service_endpoint(
"cloudServersBehavior", "ORD")
server_id_1 = quick_create_server(self.helper)
server_id_2 = quick_create_server(self.helper)
server_id_3 = quick_create_server(self.helper)
status_modification = {
"status": {
server_id_1: "ERROR",
server_id_2: "ERROR",
server_id_3: "ERROR",
"not_a_server_id": "BUILD",
}
}
set_status = request(
self, self.root, b"POST",
nova_control_endpoint + "/attributes/",
json.dumps(status_modification).encode("utf-8")
)
set_status_response = self.successResultOf(set_status)
self.assertEqual(status_of_server(self, server_id_1), "ACTIVE")
self.assertEqual(status_of_server(self, server_id_2), "ACTIVE")
self.assertEqual(status_of_server(self, server_id_3), "ACTIVE")
self.assertEqual(set_status_response.code, 400)
@behavior_tests_helper_class
class NovaCreateServerBehaviorControlPlane(object):
"""
Helper object used to generate tests for Nova create server behavior
CRUD operations.
"""
criteria = [{"server_name": "failing_server_name"}]
names_and_params = (
("fail",
{"message": "Create server failure", "code": 500, "type": "string"}),
("fail",
{"message": "Invalid creation", "code": 400, "type": "string"})
)
def __init__(self, test_case):
"""
Set up the criteria, api mock, etc.
"""
nova_api = NovaApi(["ORD", "MIMIC"])
self.api_helper = APIMockHelper(
test_case, [nova_api, NovaControlApi(nova_api=nova_api)])
self.root = self.api_helper.root
self.behavior_api_endpoint = "{0}/behaviors/creation".format(
self.api_helper.get_service_endpoint("cloudServersBehavior"))
def trigger_event(self):
"""
Create server with with the name "failing_server_name".
"""
return create_server(self.api_helper, name="failing_server_name",
request_func=request_with_content)
def validate_injected_behavior(self, name_and_params, response, body):
"""
Given the behavior that is expected, validate the response and body.
"""
name, params = name_and_params
self.api_helper.test_case.assertEquals(response.code, params['code'])
self.api_helper.test_case.assertEquals(body.decode("utf-8"),
params['message'])
def validate_default_behavior(self, response, body):
"""
Validate the response and body of a successful server create.
"""
self.api_helper.test_case.assertEquals(response.code, 202)
body = json.loads(body.decode("utf-8"))
self.api_helper.test_case.assertIn('server', body)
class NovaAPIMetadataTests(SynchronousTestCase):
"""
Tests for the Nova Api plugin handling metadata.
"""
def setUp(self):
"""
Create a :obj:`MimicCore` with :obj:`NovaApi` as the only plugin,
and create a server
"""
self.helper = APIMockHelper(self, [NovaApi(["ORD", "MIMIC"])])
self.root = self.helper.root
self.uri = self.helper.uri
def get_server_url(self, metadata):
"""
Create a server with the given metadata, and return the URL of
the server.
"""
response, body = create_server(self.helper, metadata=metadata)
self.assertEqual(response.code, 202)
return [
link['href'] for link in body['server']['links']
if link['rel'] == 'self'][0]
def set_metadata(self, request_body):
"""
Create a server with null metadata, then hit the set metadata endpoint
with the given request body.
"""
return self.successResultOf(json_request(
self, self.root, b"PUT", self.get_server_url(None) + '/metadata',
request_body))
def set_metadata_item(self, create_metadata, key, request_body):
"""
Create a server with given metadata, then hit the set metadata item
endpoint with the given request body.
"""
return self.successResultOf(json_request(
self, self.root, b"PUT",
self.get_server_url(create_metadata) + '/metadata/' + key,
request_body))
def get_created_server_metadata(self):
"""
Ok, we've lost the link to the original server. But there should
just be the one. Get its metadata.
"""
resp, body = self.successResultOf(json_request(
self, self.root, b"GET", self.uri + '/servers/detail'))
self.assertEqual(resp.code, 200)
return body['servers'][0]['metadata']
def assert_malformed_body(self, response, body):
"""
Assert that the response and body are 400:malformed request body.
"""
self.assertEqual(response.code, 400)
self.assertEqual(body, {"badRequest": {
"message": "Malformed request body",
"code": 400
}})
def assert_maximum_metadata(self, response, body):
"""
Assert that the response and body are 403:max metadata.
"""
self.assertEqual(response.code, 403)
self.assertEqual(body, {"forbidden": {
"message": "Maximum number of metadata items exceeds 40",
"code": 403
}})
def assert_metadata_not_string(self, response, body):
"""
Assert that the response and body are 400:metadata value not string.
"""
self.assertEqual(response.code, 400)
self.assertEqual(body, {"badRequest": {
"message": (
"Invalid metadata: The input is not a string or unicode"),
"code": 400
}})
def assert_no_such_server(self, response, body):
"""
Assert that the response and body are 404:server does not exist.
"""
self.assertEqual(response.code, 404)
self.assertEqual(body, {
'itemNotFound': {
'message': 'Server does not exist',
'code': 404
}
})
def test_create_server_with_invalid_metadata_object(self):
"""
When ``create_server`` with an invalid metadata object (a string), it
should return an HTTP status code of 400:malformed body.
"""
self.assert_malformed_body(
*create_server(self.helper, metadata="not metadata"))
def test_create_server_with_too_many_metadata_items(self):
"""
When ``create_server`` is passed metadata with too many items, it
should return an HTTP status code of 403 and an error message saying
there are too many items.
"""
metadata = {"key{0}".format(i): "value{0}".format(i)
for i in range(100)}
self.assert_maximum_metadata(
*create_server(self.helper, metadata=metadata))
def test_create_server_with_invalid_metadata_values(self):
"""
When ``create_server`` is passed metadata with non-string-type values,
it should return an HTTP status code of 400 and an error message
saying that values must be strings or unicode.
"""
self.assert_metadata_not_string(
*create_server(self.helper, metadata={"key": []}))
def test_create_server_too_many_metadata_items_takes_precedence(self):
"""
When ``create_server`` is passed metadata with too many items and
invalid metadata values, the too many items error takes precedence.
"""
metadata = {"key{0}".format(i): [] for i in range(100)}
self.assert_maximum_metadata(
*create_server(self.helper, metadata=metadata))
def test_create_server_null_metadata_succeeds(self):
"""
When ``create_server`` is passed null metadata, it successfully
creates a server.
"""
response, body = create_server(self.helper, metadata=None)
self.assertEqual(response.code, 202)
def test_get_metadata(self):
"""
Getting metadata gets whatever metadata the server has.
"""
metadata = {'key': 'value', 'key2': 'anothervalue'}
response, body = self.successResultOf(json_request(
self, self.root, b"GET",
self.get_server_url(metadata) + '/metadata'))
self.assertEqual(response.code, 200)
self.assertEqual(body, {'metadata': metadata})
# double check against server details
self.assertEqual(
body, {'metadata': self.get_created_server_metadata()})
def test_get_metadata_on_nonexistant_server_404(self):
"""
Getting metadata on a non-existing server results in a 404.
"""
response, body = self.successResultOf(json_request(
self, self.root, b"GET",
self.uri + '/servers/1234/metadata'))
self.assert_no_such_server(response, body)
def test_set_metadata_on_nonexistant_server_404(self):
"""
Setting metadata on a non-existing server results in a 404.
"""
response, body = self.successResultOf(json_request(
self, self.root, b"PUT",
self.uri + '/servers/1234/metadata',
{'metadata': {}}))
self.assert_no_such_server(response, body)
def test_set_metadata_with_only_metadata_body_succeeds(self):
"""
When setting metadata with a body that looks like
``{'metadata': {<valid metadata>}}``, a 200 is received with a valid
response body.
"""
response, body = self.set_metadata({"metadata": {}})
self.assertEqual(response.code, 200)
self.assertEqual(body, {'metadata': {}})
self.assertEqual(self.get_created_server_metadata(), {})
def test_set_metadata_with_extra_keys_succeeds(self):
"""
When setting metadata with a body that contains extra garbage keys,
a 200 is received with a valid response body.
"""
response, body = self.set_metadata({"metadata": {}, "extra": "junk"})
self.assertEqual(response.code, 200)
self.assertEqual(body, {'metadata': {}})
self.assertEqual(self.get_created_server_metadata(), {})
def test_set_metadata_to_null_fails(self):
"""
When setting metadata to null, a 400 with a specific message is
received.
"""
response, body = self.set_metadata({"metadata": None})
self.assertEqual(response.code, 400)
self.assertEqual(body, {
"badRequest": {
"message": "Malformed request body. metadata must be object",
"code": 400
}
})
def test_set_metadata_with_invalid_json_body_fails(self):
"""
When setting metadata with an invalid request body (not a dict), it
should return an HTTP status code of 400:malformed request body
"""
self.assert_malformed_body(*self.set_metadata(b'meh'))
def test_set_metadata_with_invalid_metadata_object(self):
"""
When ``set_metadata`` is passed a dictionary with the metadata key,
but the metadata is not a dict, it should return an HTTP status code
of 400: malformed request body.
"""
self.assert_malformed_body(
*self.set_metadata({"metadata": "not metadata"}))
def test_set_metadata_without_metadata_key(self):
"""
When ``set_metadata`` is passed metadata with the wrong key, it
should return an HTTP status code of 400: malformed request body.
"""
self.assert_malformed_body(
*self.set_metadata({"meta": {"wrong": "metadata key"}}))
def test_set_metadata_with_too_many_metadata_items(self):
"""
When ``set_metadata`` is passed metadata with too many items, it
should return an HTTP status code of 403 and an error message saying
there are too many items.
"""
metadata = {"key{0}".format(i): "value{0}".format(i)
for i in range(100)}
self.assert_maximum_metadata(
*self.set_metadata({"metadata": metadata}))
def test_set_metadata_with_invalid_metadata_values(self):
"""
When ``set_metadata`` is passed metadata with non-string-type values,
it should return an HTTP status code of 400 and an error message
saying that values must be strings or unicode.
"""
self.assert_metadata_not_string(
*self.set_metadata({"metadata": {"key": []}}))
def test_set_metadata_on_nonexistant_server_404_takes_precedence(self):
"""
Setting metadata on a non-existing server results in a 404, no matter
how broken the metadata is.
"""
response, body = self.successResultOf(json_request(
self, self.root, b"PUT",
self.uri + '/servers/1234/metadata',
b'meh'))
self.assert_no_such_server(response, body)
def test_set_metadata_too_many_metadata_items_takes_precedence(self):
"""
When ``set_metadata`` is passed metadata with too many items and
invalid metadata values, the too many items error takes precedence.
"""
metadata = {"key{0}".format(i): [] for i in range(100)}
self.assert_maximum_metadata(
*self.set_metadata({"metadata": metadata}))
def test_set_metadata_item_on_nonexistant_server_404(self):
"""
Setting metadata item on a non-existing server results in a 404.
"""
response, body = self.successResultOf(json_request(
self, self.root, b"PUT",
self.uri + '/servers/1234/metadata/key',
{'meta': {'key': 'value'}}))
self.assert_no_such_server(response, body)
def test_set_metadata_item_with_only_meta_body_succeeds(self):
"""
When setting a metadata item with a body that looks like
``{'meta': {<valid key>: <valid value>}}``, a 200 is received with a
valid response body.
"""
response, body = self.set_metadata_item(
{}, 'key', {"meta": {'key': 'value'}})
self.assertEqual(response.code, 200)
self.assertEqual(body, {'meta': {'key': 'value'}})
self.assertEqual(self.get_created_server_metadata(), {'key': 'value'})
def test_set_metadata_item_with_extra_keys_succeeds(self):
"""
When setting metadata with a body that contains extra garbage keys,
a 200 is received with a valid response body.
"""
response, body = self.set_metadata_item(
{}, 'key', {"meta": {'key': 'value'}, "extra": "junk"})
self.assertEqual(response.code, 200)
self.assertEqual(body, {'meta': {'key': 'value'}})
self.assertEqual(self.get_created_server_metadata(), {'key': 'value'})
def test_set_metadata_item_with_invalid_json_body_fails(self):
"""
When setting metadata item with an invalid request body, it should
return an HTTP status code of 400:malformed request body
"""
self.assert_malformed_body(*self.set_metadata_item({}, "meh", b"meh"))
def test_set_metadata_item_with_wrong_key_fails(self):
"""
When setting metadata item without a 'meta' key should
return an HTTP status code of 400:malformed request body
"""
self.assert_malformed_body(
*self.set_metadata_item({}, "meh",
{"metadata": {"meh": "value"}}))
def test_set_metadata_item_with_mismatching_key_and_body(self):
"""
When setting metadata item, the key in the 'meta' dictionary needs to
match the key in the URL, or a special 400 response is returned.
"""
response, body = self.set_metadata_item(
{}, "key", {"meta": {"notkey": "value"}})
self.assertEqual(response.code, 400)
self.assertEqual(body, {
"badRequest": {
"message": "Request body and URI mismatch",
"code": 400
}
})
def test_set_metadata_item_with_wrong_meta_type_fails(self):
"""
When setting metadata item without a 'meta' key mapped to not a
dictionary should return an HTTP status code of 400:malformed request
body
"""
self.assert_malformed_body(
*self.set_metadata_item({}, "meh", {"meta": "wrong"}))
def test_set_metadata_item_with_too_many_keys_and_values(self):
"""
When ``set_metadata_item`` is passed too many keys and values, it
should return an HTTP status code of 400 and a special
metadata-item-only error message saying there are too many items.
"""
response, body = self.set_metadata_item(
{}, 'key', {"meta": {"key": "value", "otherkey": "otherval"}})
self.assertEqual(response.code, 400)
self.assertEqual(body, {
"badRequest": {
"message": "Request body contains too many items",
"code": 400
}
})
def test_set_metadata_item_with_too_many_metadata_items_already(self):
"""
When ``set_metadata_item`` is called with a new key and there are
already the maximum number of metadata items on the server already,
it should return an HTTP status code of 403 and an error message
saying there are too many items.
"""
metadata = {"key{0}".format(i): "value{0}".format(i)
for i in range(40)}
self.assert_maximum_metadata(
*self.set_metadata_item(metadata, 'newkey',
{"meta": {"newkey": "newval"}}))
def test_set_metadata_item_replace_existing_metadata(self):
"""
If there are already the maximum number of metadata items on the
server, but ``set_metadata_item`` is called with an already existing
key, it should succeed (because it replaces the original metadata
item).
"""
metadata = {"key{0}".format(i): "value{0}".format(i)
for i in range(40)}
response, body = self.set_metadata_item(
metadata, 'key0', {"meta": {"key0": "newval"}})
self.assertEqual(response.code, 200)
self.assertEqual(body, {"meta": {"key0": "newval"}})
expected = {"key{0}".format(i): "value{0}".format(i)
for i in range(1, 40)}
expected['key0'] = 'newval'
self.assertEqual(self.get_created_server_metadata(), expected)
def test_set_metadata_item_with_invalid_metadata_values(self):
"""
When ``set_metadata_item`` is passed metadata with non-string-type values,
it should return an HTTP status code of 400 and an error message
saying that values must be strings or unicode.
"""
self.assert_metadata_not_string(
*self.set_metadata_item({}, 'key', {"meta": {"key": []}}))
def test_set_metadata_item_on_nonexistant_server_404_takes_precedence(
self):
"""
Setting metadata item on a non-existing server results in a 404, and
takes precedence over other errors.
"""
response, body = self.successResultOf(json_request(
self, self.root, b"PUT",
self.uri + '/servers/1234/metadata/key',
b'meh'))
self.assert_no_such_server(response, body)
def test_set_metadata_item_too_many_metadata_items_takes_precedence(self):
"""
When ``set_metadata_item`` is passed metadata with too many items and
invalid metadata values, the too many items error takes precedence.
"""
metadata = {"key{0}".format(i): "value{0}".format(i)
for i in range(40)}
self.assert_maximum_metadata(
*self.set_metadata_item(metadata, 'key', {"meta": {"key": []}}))
class NovaServerTests(SynchronousTestCase):
def test_unique_ips(self):
"""
The private IP address of generated servers will be unique even if
the given ``ipsegment`` factory generates non-unique pairs.
"""
nova_api = NovaApi(["ORD", "MIMIC"])
self.helper = self.helper = APIMockHelper(
self, [nova_api, NovaControlApi(nova_api=nova_api)]
)
coll = RegionalServerCollection(
tenant_id='abc123', region_name='ORD', clock=self.helper.clock,
servers=[])
creation_json = {
'server': {'name': 'foo', 'flavorRef': 'bar', 'imageRef': 'baz'}}
def ipsegment():
yield 1
yield 1
yield 2
yield 2
yield 3
yield 3
Server.from_creation_request_json(
coll, creation_json,
ipsegment=lambda ips=ipsegment(): next(ips))
Server.from_creation_request_json(
coll, creation_json,
ipsegment=lambda ips=ipsegment(): next(ips))
self.assertEqual(coll.servers[0].private_ips,
[IPv4Address(address='10.180.1.1')])
self.assertEqual(coll.servers[1].private_ips,
[IPv4Address(address='10.180.2.2')])
|
from _typeshed import Incomplete
def bethe_hessian_matrix(
G, r: Incomplete | None = None, nodelist: Incomplete | None = None
): ...
|
from datetime import date
atual = date.today()
menor = 0
maior = 0
for c in range(1, 8, 1):
nasc = int(input(f'Em que ano a {c}ª pessoa nasceu? '))
if (atual.year - nasc) < 18:
menor += 1
else:
maior += 1
print(f'\nAo todo tivemos {maior} pessoas maiores de idade')
print(f'E também tivemos {menor} pessoas menores de idade')
|
import numpy as np
import cv2 as cv
import dlib
img= cv.imread('pic.jpg')
ogimg=img
gray= cv.cvtColor(img,cv.COLOR_BGR2GRAY)
canny = cv.Canny(gray,125,150)
cv.imshow('cannys',canny )
contours,hierarchy=cv.findContours(canny.copy(),cv.RETR_TREE,cv.CHAIN_APPROX_NONE)
print(len(contours))
areas=[]
for cnt in contours :
area=cv.contourArea(cnt)
areas.append(area)
sorted_contours=sorted(contours,key=cv.contourArea,reverse=True)
largest=sorted_contours[0]
cv.drawContours(ogimg,largest,-1,(255,0,0),15)
cv.imshow('Largest',ogimg)
cv.waitKey(0)
|
# -*- coding: utf-8 -*-
"""
Created on Wed May 6 11:33:29 2020
@author: juanc
"""
# Import needed functionality
import matplotlib.pyplot as plt
from collections import Counter
def plot_counter(counter, n_most_common=5):
# Subset the n_most_common items from the input counter
top_items = counter.most_common(n_most_common)
# Plot `top_items`
plot_counter_most_common(top_items)
def plot_counter_most_common(top_items):
top_items_dict = dict(top_items)
plt.figure()
plt.bar(range(len(top_items_dict)), list(top_items_dict.values()), align='center')
plt.xticks(range(len(top_items_dict)), list(top_items_dict.keys()), rotation='vertical')
plt.tight_layout()
plt.show()
def sum_counters(counters):
# Sum the inputted counters
return sum(counters, Counter())
|
# Copyright (c) 2012 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
{
'targets': [
{
'target_name': 'target2',
'type': 'none',
'sources': [
'../touch.py'
],
'rules': [
{
'rule_name': 'rule2',
'extension': 'py',
'inputs': [],
'outputs': [
'rule.txt',
],
'action': [
'python', '../touch.py', '<(_outputs)',
],
# Allows the test to run without hermetic cygwin on windows.
'msvs_cygwin_shell': 0,
},
],
},
],
}
|
"""
Testing CSVLogger
"""
class CSVLogger(BaseLogger):
def __init__(
self,
evaluator,
log_dicts=None,
train_log_dicts=None,
val_log_dicts=None,
log_dir="./logs",
filename="logs.csv",
**kwargs,
):
"""Initiate a CSV logger.
Summary operators are created according to the parameters specified
in the `log_dict`, `train_log_dict` and `val_log_dict` dictionaries.
The `log_dict` dictionary contains the parameters that should be
logged both with training and validation data, whereas the
`train_log_dict` and the `val_log_dict` specifies the summaries that
should be created for only the training data and validation data
respectively. The structure of the dictionaries are as follows:
```
[
{
'log_name': 'Name of log 1'
'log_var': first_log_var'
},
{
'log_name': 'Name of log 2'
'log_var': 'second_log_var'
}
}
```
The keys of the dictionaries are the name of the variables that we
want to log. For example, if you want to log the loss of the network,
this should the key should simply be `'loss'`. First, the evaluator
instance is scanned for variable with the specified name (in this case,
`loss`), then, if no variable with that name is found the network
instance is scanned. Finally, if there is no variable with the
specified name in the network instance the trainable parameters of the
network is scanned.
Below is an example of how the
`log_dict` dictionary might look.
```
[
{
'log_name': 'Loss'
'log_var': loss'
},
{
'log_name': 'Accuracy'
'log_var': 'accuracy'
}
]
```
Parameters:
-----------
evaluator : utils.Evaluator
The network evaluator to log from.
log_dict : dict
Logging dictionary used for both training and validation logs.
train_log_dict: dict
Logging dictionary used for training logs.
val_log_dict: dict
Logging dictionary used for validation logs.
"""
super().__init__(
evaluator=evaluator,
log_dicts=log_dicts,
train_log_dicts=train_log_dicts,
val_log_dicts=val_log_dicts,
)
self.log_dir = Path(log_dir) / self.network.name
self.filename = filename
self.filepath = self.log_dir / filename
self._init_logfile()
both_summary_ops = self._init_logs(self.log_dicts)
self.train_summary_op = self._join_summaries(
self._init_logs(self.train_log_dicts), both_summary_ops
)
self.val_summary_op = self._join_summaries(
self._init_logs(self.val_log_dicts), both_summary_ops
)
def _init_logfile(self):
"""Initiate an empty dataframe with the correct clumns to write the logs in.
"""
if not self.log_dir.is_dir():
self.log_dir.mkdir(parents=True)
self.logs = pd.DataFrame(columns=['train', 'val', 'var_name'])
def _join_summaries(self, *args):
"""Join the summaries to one summary list with one dict.
The input is a series of lists containing one dictionary,
and the output is a single list with one element which is a joined
version of all input dictionaries.
"""
return dict(ChainMap(*args))
def _init_logs(self, log_dict):
"""Initiate the logging operators specified in `log_dicts`.
The logging operator is a single dictionary with variable name as keys
and the corresponding tensorflow operators as values.
Parameters:
-----------
log_dicts : list
List of dictionaries specifying the kind of logs to create.
See `__init__` docstring for examples.
Returns:
--------
dict : The logging operator
"""
logs = tuple(super()._init_logs(log_dict))
return dict(ChainMap(*logs))
def _init_log(self, log_var, var_name, *args, **kwargs):
"""Create a specific log operator.
`*args` and `**kwargs` are ignored.
Attributes
----------
log_var : tensorflow.Tensor
var_name : str
"""
#self.logs = self.logs.append({'train': 33, 'val': 23}, ignore_index=True)
# add possible variable name (log operator)
self.logs = self.logs.append({'var_name':var_name}, ignore_index=True)
return {var_name: log_var}
def _log(self, summaries, it_num, log_type):
"""Logs a single time step.
"""
#her stoppet den opp:
self.logs = self.logs.set_index('var_name')
for name, s in summaries.items():
self.logs[log_type].loc[name] = np.mean(s)
# save the dataframe as a csv-file
self.logs.to_csv(self.filepath, sep='\t', encoding='utf-8')
|
import tensorflow as tf
from tensorflow.python.ops import math_ops
from tensorflow.contrib.cudnn_rnn.python.ops import cudnn_rnn_ops
from my.tensorflow.cudnn_recurrent_layers import CudnnLstm, CudnnGru
VERY_NEGATIVE_NUMBER = -1e29
def exp_mask(val, mask):
mask = tf.cast(tf.sequence_mask(mask, tf.shape(val)[1]), tf.float32)
return val * mask + (1 - mask) * VERY_NEGATIVE_NUMBER
def get_cudnn_rnn(cell_type):
if cell_type == 'rnn':
return tf.contrib.cudnnrnn.CudnnRNNRelu
elif cell_type == 'gru':
return tf.contrib.cudnnrnn.CudnnGRU
elif cell_type == 'lstm':
return tf.contrib.cudnnrnn.CudnnLSTM
else:
raise ValueError("Invalid cell type! Got %s" % (cell_type))
def create_multi_rnn(cell_type, hidden_size, layers, keep_prob):
is_cudnn = False
if cell_type == 'rnn':
create_cell = tf.contrib.rnn.BasicRNNCell
elif cell_type == 'gru':
create_cell = tf.contrib.rnn.GRUCell
elif cell_type == 'basic_lstm':
create_cell = tf.contrib.rnn.BasicLSTMCell
elif cell_type == 'lstm':
create_cell = tf.contrib.rnn.LSTMCell
elif cell_type == 'cudnn_lstm':
create_cell = tf.contrib.cudnn_rnn.CudnnCompatibleLSTMCell
elif cell_type == 'cudnn_gru':
create_cell = tf.contrib.cudnn_rnn.CudnnCompatibleGRUCell
else:
raise ValueError("Invalid cell type! Got %s" % (cell_type))
cell = lambda : create_cell(num_units = hidden_size)
add_dropout = lambda cell: tf.contrib.rnn.DropoutWrapper(
cell,
input_keep_prob=keep_prob,
seed=11235)
if layers == 1 and not is_cudnn:
return add_dropout(cell())
cells = [cell() for _ in range(layers)]
return add_dropout(tf.contrib.rnn.MultiRNNCell(cells))
def dot_product_attention(tensor1, tensor2, with_bias=True):
'''a = t1 * t2 + b'''
dots = tf.matmul(tensor1, tensor2, transpose_b = True)
if with_bias:
bias = tf.get_variable("bias", shape=(), dtype=tf.float32)
dots += bias
return dots
def tri_linear_attention(x, keys, with_bias=True):
'''a = w1t1 + w2t2 + t1w3t2 + b'''
init = tf.contrib.layers.xavier_initializer()
key_w = tf.get_variable("key_w", shape=keys.shape.as_list()[-1], initializer=init, dtype=tf.float32)
key_logits = tf.tensordot(keys, key_w, axes=[[2], [0]]) # (batch, key_len)
x_w = tf.get_variable("input_w", shape=x.shape.as_list()[-1], initializer=init, dtype=tf.float32)
x_logits = tf.tensordot(x, x_w, axes=[[2], [0]]) # (batch, x_len)
dot_w = tf.get_variable("dot_w", shape=x.shape.as_list()[-1], initializer=init, dtype=tf.float32)
# Compute x * dot_weights first, the batch mult with x
x_dots = x * tf.expand_dims(tf.expand_dims(dot_w, 0), 0)
dot_logits = tf.matmul(x_dots, keys, transpose_b=True)
out = dot_logits + tf.expand_dims(key_logits, 1) + tf.expand_dims(x_logits, 2)
if with_bias:
bias = tf.get_variable("bias", shape=(), dtype=tf.float32)
out += bias
return out
def bahdanau_attention(query, keys, num_units, with_bias=True):
'''
a = v * tanh(Wq q + Wk k + b)
'''
hq = query.get_shape().as_list()[-1]
Wq = tf.get_variable("Wq", [hq, num_units], query.dtype)
print(query.get_shape().as_list())
print(Wq.get_shape().as_list())
aq = tf.einsum("bij,jk->bik", query, Wq) # [b, 1, h_attn]
hk = keys.get_shape().as_list()[-1]
Wk = tf.get_variable("Wk", [hk, num_units], keys.dtype)
ak = tf.einsum("bij,jk->bik", keys, Wk) # [b, n, h_attn]
pre_tanh = aq + ak # [b x n x h_attn]
if with_bias:
b = tf.get_variable("bias", shape=(), dtype=tf.float32)
pre_tanh += b
out = math_ops.tanh(pre_tanh) # [b x n x h_attn]
v = tf.get_variable("V", [num_units, 1], dtype=tf.float32)
return tf.squeeze(tf.einsum("bij,jk->bik", out, v), 2) # [b x n]
def compute_attention_mask(x_mask, mem_mask, x_word_dim, key_word_dim):
""" computes a (batch, x_word_dim, key_word_dim) bool mask for clients that want masking """
if x_mask is None and mem_mask is None:
return None
elif x_mask is None or mem_mask is None:
raise NotImplementedError()
x_mask = tf.sequence_mask(x_mask, x_word_dim)
mem_mask = tf.sequence_mask(mem_mask, key_word_dim)
join_mask = tf.logical_and(tf.expand_dims(x_mask, 2), tf.expand_dims(mem_mask, 1))
return join_mask
def attention_layer(x, a, x_mask, a_mask, sim_func, scope="",
output_alignment=False):
'''
computes enhanced representation of x attending on a
x: tensor, shape [b x n x h]
a: tensor, shape [b x m x h]
x_mask: tensor, true length of x, shape [b]
a_mask: tensor, true length of a, shape [b]
sim_func: similarity function used to compute attention scores, has
signature sim_func(tensor1, tensor2) -> attn score
'''
n = tf.shape(x)[1]
m = tf.shape(a)[1]
dist_matrix = sim_func(x, a)
#print("Sim matrix:")
#print(dist_matrix.get_shape().as_list()) # b x n x m
joint_mask = compute_attention_mask(x_mask, a_mask, n, m)
if joint_mask is not None:
dist_matrix += VERY_NEGATIVE_NUMBER * \
(1 - tf.cast(joint_mask, dist_matrix.dtype))
probs = tf.nn.softmax(dist_matrix) # b x n x m
#print("Probs:")
#print(probs.get_shape().as_list())
attention_vector = tf.matmul(probs, a) # b x n x h
#print("Attn vect:")
#print(attention_vector.get_shape().as_list())
if output_alignment:
return attention_vector, probs
else:
return attention_vector
def bidaf_attention(context_h, query_h, context_mask, query_mask, sim_func,
output_alignment = False):
'''
slightly modified version of c. clark's bidaf code
output_alignment: boolean, whether to print out the alignment matrix
'''
context_word_dim = tf.shape(context_h)[1]
query_word_dim = tf.shape(query_h)[1]
dist_matrix = sim_func(context_h, query_h)
joint_mask = compute_attention_mask(context_mask, query_mask,
context_word_dim, query_word_dim)
if joint_mask is not None:
dist_matrix += VERY_NEGATIVE_NUMBER * \
(1 - tf.cast(joint_mask, dist_matrix.dtype))
query_probs = tf.nn.softmax(dist_matrix)
# probability of each query_word per context_word
# Batch matrix multiplication to get the attended vectors
select_query = tf.matmul(query_probs, query_h) # (batch, context_words, q_dim)
# select query-to-context
context_dist = tf.reduce_max(dist_matrix, axis=2) # (batch, context_word``s)
context_probs = tf.nn.softmax(context_dist) # (batch, context_words)
select_context = tf.einsum("ai,aik->ak", context_probs, context_h) # (batch, context_dim)
select_context = tf.expand_dims(select_context, 1)
output = tf.concat([context_h, select_query, context_h * select_query,
context_h * select_context], axis=2)
if output_alignment:
return output, query_probs
return output
def mem_nn_hop(input, memories):
'''
mem_nn_hop: one hop of a memory network (https://www.arxiv.org/pdf/1503.08895.pdf)
input: tensor, the input (question), shape [b x h]
memories: tensor, memories to look at (context), shape [b x n x h]
'''
u_k = input
# hack to get around no reduce_dot
u_temp = tf.transpose(tf.expand_dims(u_k, -1), [0, 2, 1]) # [b x 1 x h]
dotted = tf.reduce_sum(memories * u_temp, 2) # dot product --> b x n
# Calculate probabilities
probs = tf.nn.softmax(dotted) # b x n
probs_temp = tf.transpose(tf.expand_dims(probs, -1), [0, 2, 1]) # b x 1 x n
c_temp = tf.transpose(memories, [0, 2, 1]) # b x h x n
o_k = tf.reduce_sum(c_temp * probs_temp, 2) # b x h
u_k = u_k + o_k
return u_k
def mem_nn(input, memories, input_pos_enc, n):
'''
mem_nn: memory network
input: tensor, the input (question), shape [b x m x h]
memories: tensor, memories to look at (context), shape [b x n x h]
input_pos_enc: tensor, pos. encoding of the input, shape [b x m x h]
n: int, # of hops to perform
'''
u = tf.reduce_sum(input_pos_enc * input, 1) # [b x h]
print(u.get_shape().as_list())
for _ in range(n):
u = mem_nn_hop(u, memories)
return u
def fuse_gate(lhs, rhs):
dim = lhs.shape.as_list()[-1]
assert rhs.shape.as_list()[-1] == dim
lhs1 = tf.layers.dense(lhs, dim, activation=None, name='lhs1')
rhs1 = tf.layers.dense(rhs, dim, activation=None, name='rhs1')
print(lhs1.get_shape().as_list())
z = tf.sigmoid(lhs1 + rhs1)
return z * lhs + (1-z) * rhs
def diin_fuse_gate(lhs, rhs):
dim = lhs.shape.as_list()[-1]
assert rhs.shape.as_list()[-1] == dim
lhs1 = tf.layers.dense(lhs, dim, activation=None, name='lhs1')
rhs1 = tf.layers.dense(rhs, dim, activation=None, name='rhs1')
print(lhs_1.get_shape().as_list())
z = tf.tanh(lhs1 + rhs1)
lhs2 = tf.layers.dense(lhs, dim, activation=None, name='lhs2')
rhs2 = tf.layers.dense(rhs, dim, activation=None, name='rhs2')
f = tf.sigmoid(lhs2 + rhs2)
print(f.get_shape().as_list())
lhs3 = tf.layers.dense(lhs, dim, activation=None, name='lhs3')
rhs3 = tf.layers.dense(rhs, dim, activation=None, name='rhs3')
r = tf.sigmoid(lhs3 + rhs3)
print(r.get_shape().as_list())
out = f * lhs + r * z
print(out.get_shape().as_list())
return out
def concat_with_product(t1, t2):
return tf.concat([t1, t2, t1 * t2], axis = len(t1.shape) - 1)
def self_attention_encoder(x, sim_func, mask=None, merge_function = None,
output_alignment=False):
'''
self attention encoder
x: tensor, thing to encode, shape [b x n x h]
sim_func: similarity function of two tensors
mask: length of x, tensor, shape [b]
merge_function: function of two inputs to merge x with x_self_attn, often a
fuse gate
output_alignment: boolean, whether to return the alignment matrix
'''
x_dim = tf.shape(x)[1]
dist = sim_func(x, x) # [b x n x n]
joint_mask = compute_attention_mask(mask, mask, x_dim, x_dim)
if joint_mask is not None:
dist += VERY_NEGATIVE_NUMBER * (1 - tf.cast(joint_mask,
dist.dtype))
dist = tf.nn.softmax(dist) # [b x n x n]
print("[b x n x n]")
print(dist.get_shape().as_list())
out = tf.matmul(dist, x) # [b x n x h]
print("[b x n x h]")
print(out.get_shape().as_list())
if merge_function is not None:
out = merge_function(x, out)
if output_alignment:
return out, dist
return out
def bi_rnn_encoder(cell_type, hidden_size, num_layers, keep_prob, inputs,
input_lengths, output_layer = None):
fw_cell = create_multi_rnn(cell_type, hidden_size, num_layers, keep_prob)
bw_cell = create_multi_rnn(cell_type, hidden_size, num_layers, keep_prob)
if input_lengths != None:
outputs, final_state = tf.nn.bidirectional_dynamic_rnn(
fw_cell,
bw_cell,
inputs,
input_lengths,
dtype=tf.float32)
else:
outputs, final_state = tf.nn.bidirectional_dynamic_rnn(
fw_cell,
bw_cell,
inputs,
dtype=tf.float32)
outputs = tf.concat(outputs, axis=2)
if output_layer is not None:
outputs = output_layer(outputs)
return outputs, final_state
def rnn_encoder(cell_type, hidden_size, num_layers, keep_prob, inputs,
input_lengths, output_layer=None):
cell = create_multi_rnn(cell_type, hidden_size, num_layers, keep_prob)
outputs, final_state = tf.nn.dynamic_rnn(
cell,
inputs,
input_lengths,
dtype=tf.float32)
if output_layer is not None:
outputs = output_layer(outputs)
return outputs, final_state
def bi_cudnn_rnn_encoder(cell_type, hidden_size, num_layers, dropout_rate, inputs,
input_lengths, is_train, output_layer = None):
if cell_type == 'lstm':
RnnLayer = CudnnLstm
elif cell_type == 'gru':
RnnLayer = CudnnGru
else:
raise ValueError()
layer = RnnLayer(
n_units = hidden_size,
n_layers = num_layers)
inputs = tf.layers.dropout(inputs, dropout_rate, training=is_train)
outputs = layer.apply(is_train, inputs, input_lengths)
print(outputs.get_shape().as_list())
if output_layer is not None:
outputs = output_layer(outputs)
return outputs, None
def bi_cudnn_maxout_rnn_encoder(cell_type, hidden_size, num_layers,
dropout_rate, inputs, input_lengths, is_train, output_layer=None,
num_rnns=2):
outputs = []
for i in range(num_rnns):
with tf.variable_scope("worker_%d" % i):
cur_output, _ = bi_cudnn_rnn_encoder(
cell_type,
hidden_size,
num_layers,
dropout_rate,
inputs,
input_lengths,
is_train,
output_layer)
outputs.append(cur_output)
out = tf.reduce_max(tf.stack(outputs, -1), -1)
print(out.get_shape().as_list())
return out, None
#def bi_cudnn_rnn_encoder(cell_type, hidden_size, num_layers, keep_prob, inputs,
# input_lengths, output_layer = None):
#
# if cell_type == 'lstm':
# RnnLayer = tf.contrib.cudnn_rnn.CudnnLSTM
# elif cell_type == 'gru':
# RnnLayer = tf.contrib.cudnn_rnn.CudnnGRU
# elif cell_type == 'rnn':
# RnnLayer = tf.contrib.cudnn_rnn.CudnnRNNRelu
# else:
# raise ValueError("Invalid RNN type! Got %s" % (cell_type))
#
# input_h = inputs.get_shape().as_list()[-1]
# batch_size = inputs.get_shape().as_list()[0]
#
# layer = RnnLayer(
# num_layers=num_layers,
# num_units=hidden_size,
# input_size=input_h,
# direction=cudnn_rnn_ops.CUDNN_RNN_BIDIRECTION)
#
# param_shape = layer.params_size().eval()
#
# params = tf.get_variable(
# "rnn_parameters",
# param_shape,
# tf.float32)
#
# input_transposed = tf.transpose(inputs, [1, 0, 2])
# print(input_transposed.get_shape().as_list())
# print("Should be [None, batch_sz, h]")
#
# input_processed = tf.layers.dropout(input_transposed, keep_prob)
#
# if cell_type == 'lstm':
# if num_layers == 1:
# initial_state_h = tf.zeros(
# (batch_size, hidden_size), tf.float32)
# initial_state_c = tf.zeros(
# (batch_size, hidden_size), tf.float32)
#
# else:
# raise ValueError()
#
# out = layer(input_processed, initial_state_h, initial_state_c,
# params, True)
# else:
# if num_layers == 1:
# initial_state = tf.zeros(
# (batch_size, hidden_size), tf.float32)
# else:
# raise ValueError()
#
# out = layer(input_processed, initial_state, params, True)
#
# output, out_states_h, out_states_c = out
#
# output = tf.transpose(output, [1, 0, 2])
# print(output.get_shape().as_list())
# print("Should be [batch_sz, None, h]")
#
# out_states = tf.contrib.rnn.LSTMStateTuple(out_states_c, out_states_h)
#
# return output, out_states
#def cross_attention(mem_states, query_states, max_mem_steps, max_query_steps, scope):
# # first calculate the similarity matrix
# # mem state size --> batch_size, max_mem_enc_steps, 2*dim_hidden
# # query state size --> batch_size, max_query_enc_steps, 2*dim_hidden
# # size of simialrity matrix = batch_size, max_context_enc_steps,max_query_enc_steps
# max_query_steps = tf.shape(query_states)[1]
# max_mem_steps = tf.shape(mem_states)[1]
# batch_size = self.opt.batch_size
# similarity_matrix = []
# with tf.variable_scope(scope+'/similarity_matrix', reuse=False):
# weight = tf.get_variable('weights',[6*enc_hidden_sz,1])
#
# for i in range(max_mem_steps):
# repeat_vc = tf.tile(tf.expand_dims(mem_states[:,i],0),[max_query_steps,1,1])
# repeat_vc = tf.transpose(repeat_vc,[1,0,2])
# h = tf.concat([repeat_vc,query_states,repeat_vc*query_states],axis=2)
# score = tf.matmul(h,tf.tile(tf.expand_dims(weight,0),[batch_size,1,1]))
# similarity_matrix.append(score)
#
# similarity_matrix = tf.stack(similarity_matrix) # size = max_context_enc_steps,batch_size,max_query_enc_steps, 1
# similarity_matrix = tf.reshape(similarity_matrix,[max_mem_steps,batch_size,max_query_steps])
# similarity_matrix = tf.transpose(similarity_matrix,[1,0,2])
#
#
# '''renormalize attention'''
#
# query_on_mem_context = tf.matmul(tf.nn.softmax(similarity_matrix),query_states)
# print ("query on mem", query_on_mem_context)
# mem_on_query_context = tf.matmul(tf.nn.softmax(tf.transpose(similarity_matrix,[0,2,1])),mem_states)
# print ("mem on query", mem_on_query_context)
# #mem_on_query_context = tf.reduce_max(mem_on_query_context,1)
# #mem_on_query_context = tf.tile(tf.expand_dims(mem_on_query_context,1), [1, max_mem_steps])
# print ("mem on query after reduction and tiling", mem_on_query_context)
# return query_on_mem_context,mem_on_query_context
|
from arcgis_terrain import get_terrain_map
from arcgis.features import FeatureLayer
from arcgis.gis import GIS
from arcgis_terrain import lat_lon2meters
from arcgis_terrain import meters2lat_lon
import time
from arcgis.geometry.filters import envelope_intersects
import arcgis.geometry
import numpy as np
import plotly.graph_objects as go
import plotly.express as px
from arcgis_terrain import point_rotation
from scipy import interpolate
from matplotlib import path
import matplotlib.pyplot as plt
import math
import json
import sys
import csv
import os
import glob
import matlab.engine
def trim_extent(x_pts, y_pts, scaled_extent):
# trim data to only within extents
rm_mask = np.logical_or(x_pts < 0, x_pts > scaled_extent) # mask to remove points (x axis)
rm_mask = np.logical_or(rm_mask,np.logical_or(y_pts < 0, y_pts > scaled_extent)) # other axis mask
x_pts_trimmed = x_pts[np.invert(rm_mask)]
y_pts_trimmed = y_pts[np.invert(rm_mask)] # trim points
return [x_pts_trimmed, y_pts_trimmed]
def grab_features(anchor_point, extent, sample_dist = 10, case_name = 'blah', heading = 0, save_files = False, save_to_folder = False, file_id = 'temp', plot_data = False):
roads_url = "https://carto.nationalmap.gov/arcgis/rest/services/transportation/MapServer/30"
river_url = "https://hydro.nationalmap.gov/arcgis/rest/services/nhd/MapServer/6"
riverw_url = "https://hydro.nationalmap.gov/arcgis/rest/services/nhd/MapServer/8"
water_url = "https://hydro.nationalmap.gov/arcgis/rest/services/nhd/MapServer/9"
powerlines_url = "https://services1.arcgis.com/Hp6G80Pky0om7QvQ/ArcGIS/rest/services/Electric_Power_Transmission_Lines/FeatureServer/0"
railroads_url = "https://carto.nationalmap.gov/arcgis/rest/services/transportation/MapServer/35"
trails_url = "https://partnerships.nationalmap.gov/arcgis/rest/services/USGSTrails/MapServer/0"
# adding water_url twice, once for boundaries and once for linear features
# the layer named 'lakes' gets boundary treatment
url_list = [riverw_url, river_url, roads_url, water_url, powerlines_url, railroads_url, trails_url]
name_list = ['rivers_bdd', 'rivers', 'roads', 'lakes', 'powerlines', 'railroads', 'trails']
inac_layers = ['rivers_bdd', 'lakes']
gis = GIS(username="larkinheintzman",password="Meepp97#26640") # linked my arcgis pro account
ap_meters = lat_lon2meters(anchor_point[0], anchor_point[1])
scale_factor = 3/20 # factor to get 6.66667m mapping from 1m mapping (1/6.6667)
scaled_extent = np.ceil(scale_factor*extent).astype(np.int)
viz_cnt = 0
viz_map = np.zeros([scaled_extent,scaled_extent,len(name_list)+len(inac_layers)])
for i,url in enumerate(url_list):
# binary map, will use feature coords to populate (one per layer)
bin_map = np.zeros([scaled_extent,scaled_extent])
inac_bin_map = np.zeros([scaled_extent,scaled_extent])
geom = arcgis.geometry.Polygon({'spatialReference': {"wkid" : 3857},
'rings': [[
[ap_meters[0] - (extent/2), ap_meters[1] + (extent/2)],
[ap_meters[0] + (extent/2), ap_meters[1] + (extent/2)],
[ap_meters[0] + (extent/2), ap_meters[1] - (extent/2)],
[ap_meters[0] - (extent/2), ap_meters[1] - (extent/2)]
]]})
lyr = FeatureLayer(url = url, gis = gis)
geom_filter = envelope_intersects(geom, sr=geom['spatialReference'])
q = []
query_cnt = 0
while type(q)==list and query_cnt <= 30: # have to do this because arcgis is sketchy as hell and doesnt always come back
try:
print("querying {} layer...".format(name_list[i]))
query_starttime = time.time()
q = lyr.query(return_count_only=False, return_ids_only=False, return_geometry=True,
out_sr='3857', geometry_filter=geom_filter)
query_endtime = time.time()
except (json.decoder.JSONDecodeError, TypeError) as e:
if type(e) != TypeError:
query_cnt = query_cnt + 1
print("error on query: {}".format(e))
print("{} layer failed on query, trying again ...".format(name_list[i]))
# gis.
gis = GIS(username="larkinheintzman",password="Meepp97#26640") # linked my arcgis pro account
lyr = FeatureLayer(url=url, gis=gis)
print("query time {}".format(query_endtime - query_starttime))
if query_cnt > 30 and not q:
print("{} layer failed too many times, leaving empty".format(name_list[i]))
if save_files:
if save_to_folder:
fn = "map_layers\\" + case_name + "\\"+name_list[i]+"_data_"+file_id+".csv"
np.savetxt(fn,bin_map,delimiter=",", fmt='%f')
if name_list[i] in inac_layers:
fn = "map_layers\\" + case_name + "\\" + name_list[i] + "_inac_data_" + file_id + ".csv"
np.savetxt(fn, bin_map, delimiter=",", fmt='%f')
else:
fn = "map_layers\\" + name_list[i]+"_data_"+file_id+".csv"
np.savetxt(fn,bin_map,delimiter=",", fmt='%f')
if name_list[i] in inac_layers:
fn = "map_layers\\" + name_list[i] + "_inac_data_" + file_id + ".csv"
np.savetxt(fn, bin_map, delimiter=",", fmt='%f')
continue
print("{} layer sucessfully queried".format(name_list[i]))
# re-build into list of x-y values
# feat_points = []
query_dict = q.to_dict()
for j,feat in enumerate(query_dict['features']):
# print("starting feature {} ...".format(j))
# pull feature points out of query, they have different keys...
if 'paths' in feat['geometry'].keys():
x_pts = [pt[0] for pt in feat['geometry']['paths'][0]]
y_pts = [pt[1] for pt in feat['geometry']['paths'][0]]
# plot_points = np.array(feat['geometry']['paths'][0])
else:
x_pts = [pt[0] for pt in feat['geometry']['rings'][0]] # arcgis is stupid
y_pts = [pt[1] for pt in feat['geometry']['rings'][0]]
# plot_points = np.array(feat['geometry']['rings'][0])
# re-center on 0,0 at center
x_pts = (np.array(x_pts) - (ap_meters[0] - (extent/2)))*scale_factor # reduces number of interpolants
y_pts = (np.array(y_pts) - (ap_meters[1] - (extent/2)))*scale_factor
# rotate points about origin to establish heading
[x_pts, y_pts] = point_rotation(origin = [scaled_extent/2,scaled_extent/2],pt = [x_pts, y_pts],ang = heading)
# quick check for a feature that does not enter the extent
[x_pts_trimmed, y_pts_trimmed] = trim_extent(x_pts, y_pts, scaled_extent)
if x_pts_trimmed.shape[0] == 0:
continue
# # treat each section of a feature intersecting the extent as separate
# dists = np.sqrt(np.sum(np.diff(np.array([x_pts, y_pts]).T, axis = 0)**2, axis=1))
# breaks = list(np.where(dists >= dists.mean() + 5*dists.std())[0])
# x_pts_full = x_pts # save full coordinate set
# y_pts_full = y_pts
# breaks = [-1] + breaks + [x_pts.shape[0]-1]
# for br in range(len(breaks) - 1):
# x_pts = x_pts_full[(breaks[br]+1):(breaks[br+1]+1)]
# y_pts = y_pts_full[(breaks[br]+1):(breaks[br+1]+1)]
#
# if x_pts.shape[0] <= 1: # ignore tiny chops
# continue
# if data is too short, add some points in the middle
# while x_pts.shape[0] < 4:
# x_pt = (x_pts[0] + x_pts[1]) / 2 # average between first and second point
# y_pt = (y_pts[0] + y_pts[1]) / 2
# x_pts = np.insert(x_pts, 1, x_pt)
# y_pts = np.insert(y_pts, 1, y_pt)
# total length of feature ring/path, for interpolation along features
total_len = np.sum(np.sqrt(np.sum(np.diff(np.array([x_pts, y_pts]).T, axis=0) ** 2, axis=1)))
interp_starttime = time.time()
tck, u = interpolate.splprep([x_pts, y_pts], s=0, k=1) # parametric interpolation
u_new = np.arange(0, 1 + 1 / total_len, 1 / total_len) # scaled discretization
pts_interp = interpolate.splev(u_new, tck)
interp_endtime = time.time()
print("{} interpolation took {}".format(j,interp_endtime - interp_starttime))
x_pts = pts_interp[0]
y_pts = pts_interp[1]
if name_list[i] in inac_layers:
inac_starttime = time.time()
# do boundary calculation for binary matrix (slow for large bounaries but whatever)
ring = path.Path(np.array([x_pts, y_pts]).T)
# test_pts is the rectangular matrix covering ring for boundary calculation
# trim test_pts rectangle to only consider points within the scaled extent
[x_pts_trimmed, y_pts_trimmed] = trim_extent(x_pts, y_pts, scaled_extent)
x_test, y_test = np.meshgrid(np.arange(np.min(x_pts_trimmed), np.max(x_pts_trimmed), 1),
np.arange(np.min(y_pts_trimmed), np.max(y_pts_trimmed), 1))
test_pts = np.array([x_test.flatten(), y_test.flatten()]).T
mask = ring.contains_points(test_pts, radius=1)
# instead of filling gaps, we want to save filled in areas separately
# so we need to re-create the bin_map here but on inac. points
x_pts_inac = test_pts[mask,0]
y_pts_inac = test_pts[mask,1]
inac_endtime = time.time()
print("{} inac took {}".format(j,inac_endtime - inac_starttime))
pts_inac = np.stack([x_pts_inac,y_pts_inac]).T
# remove points being used as linear features
for pt in np.stack([x_pts_trimmed,y_pts_trimmed]).T:
pts_inac = np.delete(pts_inac, np.where(np.equal(pt,pts_inac).all(1)), axis = 0)
# binarization step
pts_inac = np.round(pts_inac).astype(np.int)
# flip y axis
pts_inac[:,1] = inac_bin_map.shape[1] - pts_inac[:,1]
# remove any points outside limits of binary map (fixes round versus ceil issues)
rm_mask = np.logical_or(pts_inac[:,0] < 0, pts_inac[:,0] >= inac_bin_map.shape[1])
rm_mask = np.logical_or(rm_mask, np.logical_or(pts_inac[:,1] < 0, pts_inac[:,1] >= inac_bin_map.shape[0]))
pts_inac = pts_inac[np.invert(rm_mask),:]
inac_bin_map[pts_inac[:,1], pts_inac[:,0]] = 1 # set indices to 1
# print("looped inac calculation time = {} sec".format(time.time() - s_time))
# trim features to scaled extent
[x_pts, y_pts] = trim_extent(x_pts, y_pts, scaled_extent)
# binarization step
x_pts_idx = np.round(x_pts).astype(np.int)
y_pts_idx = np.round(y_pts).astype(np.int)
# flip y axis because indices are fliped
y_pts_idx = bin_map.shape[1] - y_pts_idx
# remove any points outside limits of binary map (fixes round versus ceil issues)
rm_mask = np.logical_or(x_pts_idx < 0, x_pts_idx >= bin_map.shape[1])
rm_mask = np.logical_or(rm_mask, np.logical_or(y_pts_idx < 0, y_pts_idx >= bin_map.shape[0]))
x_pts_idx = x_pts_idx[np.invert(rm_mask)]
y_pts_idx = y_pts_idx[np.invert(rm_mask)]
bin_map[y_pts_idx, x_pts_idx] = 1 # set indices to 1
# print("done with feature {}".format(j))
# add to viz map
if name_list[i] in inac_layers:
viz_map[:, :, viz_cnt] = inac_bin_map
viz_cnt = viz_cnt + 1
if save_files:
if save_to_folder:
fn = "map_layers\\" + case_name + "\\" + name_list[i] + "_inac_data_" + file_id + ".csv"
np.savetxt(fn, inac_bin_map, delimiter=",", fmt='%f')
else:
fn = "map_layers\\" + name_list[i] + "_inac_data_" + file_id + ".csv"
np.savetxt(fn, inac_bin_map, delimiter=",", fmt='%f')
viz_map[:, :, viz_cnt] = bin_map
viz_cnt = viz_cnt + 1
if save_files:
if save_to_folder:
fn = "map_layers\\" + case_name + "\\" + name_list[i]+"_data_"+file_id+".csv"
np.savetxt(fn,bin_map,delimiter=",", fmt='%f')
else:
fn = "map_layers\\" + name_list[i]+"_data_"+file_id+".csv"
np.savetxt(fn,bin_map,delimiter=",", fmt='%f')
# save terrain as csv file (this method is pretty slow, but can compensate with interp)
[e,e_interp,x,y,data,ll_pt] = get_terrain_map(lat_lon=anchor_point,
sample_dist = sample_dist,
extent = extent,
heading = -heading) # because flipping
if save_files:
if save_to_folder:
elv_filename = "map_layers\\" + case_name + "\\elv_data_" + file_id + ".csv"
np.savetxt(elv_filename,e_interp,delimiter=",", fmt='%f')
else:
elv_filename = "map_layers\\elv_data_" + file_id + ".csv"
np.savetxt(elv_filename,e_interp,delimiter=",", fmt='%f')
if plot_data:
plt.imshow(e_interp)
plt.show()
# plt_list = []
# fix stupid names
# for nme in inac_layers:
# name_list.insert(name_list.index(nme), nme+' inac')
for i in range(viz_map.shape[-1]):
# row_idx, col_idx = np.where(viz_map[:,:,i] != 0)
# flip y values
# col_idx = viz_map.shape[0] - col_idx
# plt_list.append(go.Scatter(x=row_idx, y=col_idx, mode='markers', name=name_list[i]))
plt.imshow(viz_map[:,:,i])
# plt.title(name_list[i])
plt.show()
# fig = go.Figure(data=plt_list)
# fig.show()
if __name__ == "__main__":
ics = [
[37.197730, -80.585233,'kentland'],
[36.891640, -81.524214,'hmpark'],
# [38.29288, -78.65848, 'brownmountain'],
# [38.44706, -78.46993, 'devilsditch'],
# [37.67752, -79.33887, 'punchbowl'],
# [37.99092, -78.52798, 'biscuitrun'],
# [37.82520, -79.081910, 'priest'] ,
# [34.12751, -116.93247, 'sanbernardino'] ,
# [31.92245, -109.9673,'[31.92245, -109.9673]'],
# [31.9024, -109.2785,'[31.9024, -109.2785]'],
# [31.42903, -110.2933,'[31.42903, -110.2933]'],
# [34.55, -111.6333,'[34.55, -111.6333]'],
# [34.6, -112.55,'[34.6, -112.55]'],
# [34.82167, -111.8067,'[34.82167, -111.8067]'],
# [33.3975, -111.3478,'[33.3975, -111.3478]'],
# [33.70542, -111.338,'[33.70542, -111.338]'],
# [31.39708, -111.2064,'[31.39708, -111.2064]'],
# [32.4075, -110.825,'[32.4075, -110.825]'],
# [34.89333, -111.8633,'[34.89333, -111.8633]'],
# [34.94833, -111.795,'[34.94833, -111.795]'],
# [31.72262, -110.1878,'[31.72262, -110.1878]'],
# [33.39733, -111.348,'[33.39733, -111.348]'],
# [34.63042, -112.5553,'[34.63042, -112.5553]'],
# [34.55977, -111.6539,'[34.55977, -111.6539]'],
# [34.90287, -111.8131,'[34.90287, -111.8131]'],
# [34.86667, -111.8833,'[34.86667, -111.8833]'],
# [32.43543, -110.7893,'[32.43543, -110.7893]'],
# [32.40917, -110.7098,'[32.40917, -110.7098]'],
# [35.33068, -111.7111,'[35.33068, -111.7111]'],
# [32.01237, -109.3157,'[32.01237, -109.3157]'],
# [31.85073, -109.4219,'[31.85073, -109.4219]'],
# [34.88683, -111.784,'[34.88683, -111.784]'],
# [32.41977, -110.7473,'[32.41977, -110.7473]'],
# [33.60398, -112.5151,'[33.60398, -112.5151]'],
# [33.3968, -111.3481,'[33.3968, -111.3481]'],
# [33.52603, -111.3905,'[33.52603, -111.3905]'],
# [32.33333, -110.8528,'[32.33333, -110.8528]'],
# [32.33583, -110.9102,'[32.33583, -110.9102]'],
# [32.337, -110.9167,'[32.337, -110.9167]'],
# [35.08133, -111.0711,'[35.08133, -111.0711]'],
# [33.25, -113.5093,'[33.25, -113.5093]'],
# [31.50572, -110.6762,'[31.50572, -110.6762]'],
# [34.91667, -111.8,'[34.91667, -111.8]'],
# [35.1938, -114.057,'[35.1938, -114.057]'],
# [33.39715, -111.3479,'[33.39715, -111.3479]'],
# [33.37055, -111.1152,'[33.37055, -111.1152]'],
# [34.0927, -111.4246,'[34.0927, -111.4246]'],
# [31.83522, -110.3567,'[31.83522, -110.3567]'],
# [35.24375, -111.5997,'[35.24375, -111.5997]'],
# [34.82513, -111.7875,'[34.82513, -111.7875]'],
# [33.39705, -111.3479,'[33.39705, -111.3479]'],
# [33.38885, -111.3657,'[33.38885, -111.3657]'],
# [32.82142, -111.2021,'[32.82142, -111.2021]'],
# [34.97868, -111.8964,'[34.97868, -111.8964]'],
# [33.47802, -111.4377,'[33.47802, -111.4377]'],
# [34.82387, -111.7751,'[34.82387, -111.7751]'],
# [34.9253, -111.7341,'[34.9253, -111.7341]'],
# [34.09278, -111.421,'[34.09278, -111.421]'],
# [36.23878, -112.6892,'[36.23878, -112.6892]'],
# [31.33447, -109.8186,'[31.33447, -109.8186]'],
# [36.37473, -106.6795,'[36.37473, -106.6795]'],
# [42.02893, -74.33659,'[42.02893, -74.33659]'],
# [41.54819, -80.33056,'[41.54819, -80.33056]'],
# [42.0097, -74.42595,'[42.0097, -74.42595]'],
# [42.17965, -74.21362,'[42.17965, -74.21362]'],
# [42.55121, -73.4874,'[42.55121, -73.4874]'],
# [42.01362, -80.35002,'[42.01362, -80.35002]'],
# [42.74271, -73.45475,'[42.74271, -73.45475]'],
# [42.1549, -74.20523,'[42.1549, -74.20523]'],
# [42.90324, -73.8047,'[42.90324, -73.8047]'],
# [44.16065, -73.85545,'[44.16065, -73.85545]'],
# [43.42736, -74.4481,'[43.42736, -74.4481]'],
# [44.18, -72.99531,'[44.18, -72.99531]'],
# [43.52022, -73.59601,'[43.52022, -73.59601]'],
# [44.12531, -73.78635,'[44.12531, -73.78635]'],
# [43.73413, -74.25577,'[43.73413, -74.25577]'],
# [43.06902, -74.48481,'[43.06902, -74.48481]'],
# [43.8756, -74.43076,'[43.8756, -74.43076]'],
# [43.41544, -74.4148,'[43.41544, -74.4148]'],
# [43.42473, -73.73209,'[43.42473, -73.73209]'],
# [43.59779, -74.55354,'[43.59779, -74.55354]'],
# [43.4449, -74.4086,'[43.4449, -74.4086]'],
# [43.4332, -74.41433,'[43.4332, -74.41433]'],
# [43.4539, -74.52317,'[43.4539, -74.52317]'],
# [43.63354, -74.55927,'[43.63354, -74.55927]'],
# [44.19204, -74.26329,'[44.19204, -74.26329]'],
# [44.31349, -74.56818,'[44.31349, -74.56818]'],
# [43.42498, -74.41496,'[43.42498, -74.41496]'],
# [43.33195, -74.49095,'[43.33195, -74.49095]'],
# [43.95385, -75.15748,'[43.95385, -75.15748]'],
# [44.12993, -74.58844,'[44.12993, -74.58844]'],
# [44.28656, -74.61429,'[44.28656, -74.61429]'],
# [43.51063, -74.57393,'[43.51063, -74.57393]'],
# [44.19013, -74.81336,'[44.19013, -74.81336]'],
# [43.65649, -76.00019,'[43.65649, -76.00019]'],
# [42.42501, -76.47478,'[42.42501, -76.47478]'],
# [42.31735, -76.47791,'[42.31735, -76.47791]'],
# [42.34432, -77.47638,'[42.34432, -77.47638]'],
# [42.25618, -77.78988,'[42.25618, -77.78988]'],
# [42.39831, -72.88675,'[42.39831, -72.88675]'],
# [48.1103, -121.4917,'[48.1103, -121.4917]'],
# [48.64606, -122.4247,'[48.64606, -122.4247]']
]
base_dir = 'C:/Users/Larkin/ags_grabber'
# for item in ics:
# try:
# os.mkdir(base_dir + '/map_layers/' + item[2])
# except FileExistsError:
# pass
#
eng = matlab.engine.start_matlab() # engine for running matlab
for i,ics_pt in enumerate(ics):
# try:
# os.mkdir(base_dir + '/map_layers/' + str(ics_pt[2]))
# except FileExistsError:
# pass
anchor_point = [float(ics_pt[0]), float(ics_pt[1])]
extent = 10e3
save_flag = True
plot_flag = False
file_extension = 'temp'
sample_dist = int(extent/100)
heading = 0
start_time = time.time()
grab_features(anchor_point = anchor_point, extent = extent, sample_dist = sample_dist, case_name = str(ics_pt[2]),
heading = heading, save_files = save_flag, save_to_folder = False, file_id = file_extension, plot_data = plot_flag)
time.sleep(1) # wait for... files to settle?
# run matlab
if save_flag:
res = eng.importmap_py(str(ics_pt[2]), base_dir)
print("------- total time = {} seconds, iteration {}/{} ------".format(time.time() - start_time,i,len(ics)))
eng.quit()
|
__author__ = 'Justin'
import os
import json
import matplotlib.pyplot as plt
import numpy as np
import matplotlib.mlab as mlab
from collections import deque
# DESCRIPTION: this script will provide a simulation of the gradient ascent/descent method's performance
#
# Load Characterization from file
# Load Data
cwd = os.getcwd()
folder = filepath = os.path.abspath(os.path.join(cwd, '..', 'Project Data','GradientOptimization'))
filename = "ErrorDistribution3.json"
filepath = os.path.abspath(os.path.join(folder,filename))
with open(filepath) as data:
errorProbs = json.load(data)
filename = "zenweights3.json"
filepath = os.path.abspath(os.path.join(folder,filename))
with open(filepath) as data:
zenweights = json.load(data)
MINweight = zenweights[errorProbs.index(min(errorProbs))]
# Fit to Gaussian
mean = 0.32; std = 0.16 # Experimentally Determined
x_range = np.linspace(0,1,100)
gauss_errorprobs = mlab.normpdf(x_range, mean, std)
MAXgauss_error = max(gauss_errorprobs)
scale = (1.0-min(errorProbs))/max(gauss_errorprobs)
gauss_errorprobs = np.add(np.multiply(gauss_errorprobs,-scale),1.0)
# Plot Data
acceptanceProbs = [1-value for value in errorProbs]
fig,ax = plt.subplots()
ax.plot(zenweights,errorProbs)
ax.plot(x_range,gauss_errorprobs)
ax.set_xlim([0,1])
plt.title('Probability of Error vs. Zenweight')
plt.xlabel('Zenweight')
plt.ylabel('Prob. of Error')
# plt.show()
# Assume Distribution on Ideal Weight
a = 0.1; b = 0.4; average_weight = (b-a)/2.0
randval = np.random.uniform(a,b,1)
shift = randval-MINweight
# Generate Specific Pr(error|weight) Plot
gauss_errorprobs = mlab.normpdf(x_range, mean+shift, std)
scale = (1.0-min(errorProbs))/MAXgauss_error
gauss_errorprobs = np.add(np.multiply(gauss_errorprobs,-scale),1.0)
# Plot Data
fig,ax = plt.subplots()
ax.plot(x_range,gauss_errorprobs)
ax.set_xlim([0,1])
plt.title('Probability of Error vs. Zenweight')
plt.xlabel('Zenweight')
plt.ylabel('Prob. of Error')
# plt.show()
# Iterative Ascent/Descent Algorithm
delta = 0.1
# Initialize
weighthistory = []
wzen = average_weight
randval = np.random.uniform(a,b,1)
shift = randval-MINweight
scale = (1.0-min(errorProbs))/MAXgauss_error
best_weight = mean+shift
maxiter = 1000
threshold = 0.1
MAlen = 20
# Algorithm Loop START
y = []
MAlist = deque([wzen for i in range(0,MAlen,1)])
iter = 0
converged = False
while(not converged and iter < maxiter):
weighthistory.append(wzen)
feedback_prob = 1-scale*mlab.normpdf(wzen, mean+shift, std)
y.append(feedback_prob[0])
result = np.random.binomial(n=1,p=feedback_prob)
diff = best_weight-wzen
sign = np.sign(diff)
if(wzen <= 0 or wzen >= 1):
wzen += sign*delta
else:
if(result==1):
wzen += sign*delta
elif(result==0):
wzen += sign*-delta
wzen = wzen[0]
MAlist.appendleft(np.mean(weighthistory))
MAlist.pop()
# Check for Convergence
converged = True
for value in list(MAlist):
test = abs(value-best_weight)/best_weight < threshold
converged = converged and test
iter += 1
# Plot Wzen Changes
gauss_errorprobs = mlab.normpdf(x_range, mean+shift, std)
scale = (1.0-min(errorProbs))/MAXgauss_error
gauss_errorprobs = np.add(np.multiply(gauss_errorprobs,-scale),1.0)
fig,ax = plt.subplots()
ax.plot(x_range,gauss_errorprobs)
ax.plot(weighthistory,y)
ax.set_xlim([0,1])
plt.title('Zen Weight Changes')
plt.xlabel('Zenweight')
plt.ylabel('Prob. of Error')
# plt.show()
# Plot Convergence of Average to Ideal
averages = []
for index in range(0,len(weighthistory),1):
average = np.mean(weighthistory[0:index+1])
averages.append(average)
fig,ax = plt.subplots()
ax.plot(range(0,len(averages)),averages)
ax.plot(range(0,len(averages)),[best_weight for i in range(0,len(averages))] )
plt.title('Ascent / Descent Algorithm')
plt.xlabel('Iteration')
plt.ylabel('Lambda Estimation')
plt.show()
# Iterative Ascent/Descent Algorithm (Sweep Deltas)
if(False):
AVGiters = []
deltas = np.linspace(0.03,.22,20)
for delta in deltas:
numtrials = 50
iters = []
for _ in range(0,numtrials,1):
# Initialize
weighthistory = []
wzen = average_weight
randval = np.random.uniform(a,b,1)
shift = randval-MINweight
scale = (1.0-min(errorProbs))/MAXgauss_error
best_weight = mean+shift
maxiter = 1000
threshold = 0.1
MAlen = 5
# Algorithm Loop START
y = []
MAlist = deque([wzen for i in range(0,MAlen,1)])
iter = 0
converged = False
while(not converged and iter < maxiter):
weighthistory.append(wzen)
feedback_prob = 1-scale*mlab.normpdf(wzen, mean+shift, std)
y.append(feedback_prob[0])
result = np.random.binomial(n=1,p=feedback_prob)
diff = best_weight-wzen
sign = np.sign(diff)
if(wzen <= 0 or wzen >= 1):
wzen += sign*delta
else:
if(result==1):
wzen += sign*delta
elif(result==0):
wzen += sign*-delta
wzen = wzen[0]
MAlist.appendleft(np.mean(weighthistory))
MAlist.pop()
# Check for Convergence
converged = True
for value in list(MAlist):
test = abs(value-best_weight)/best_weight < threshold
converged = converged and test
iter += 1
if(iter != 0 and iter != 1000 and iter != 1):
iters.append(iter)
# else:
# iters.append(maxiter)
print('Delta',delta)
print(iters)
AVGiters.append(np.mean(iters))
# Plot Iterations till Convergence vs. Step Size
fig,ax = plt.subplots()
ax.plot(deltas,AVGiters)
plt.show()
# # Normalize
# acceptanceProbs = [1-value for value in errorProbs]
# deltazen = zenweights[1]-zenweights[0]
# norm = np.sum(np.multiply(acceptanceProbs,deltazen))
# print('Initial Area: ',norm)
# areapresent = 0.9
# acceptanceProbs = np.multiply(acceptanceProbs,areapresent/norm)
#
#
# # Plot Data
# fig,ax = plt.subplots()
# ax.plot(zenweights,acceptanceProbs)
# ax.set_xlim([0,1])
# plt.title('Normalized Probability of Acceptance vs. Zenweight')
# plt.xlabel('Zenweight')
# plt.ylabel('Norm. Prob. of Acceptance')
#
# # Fit Data to Gaussian
# mu = np.sum(np.multiply(np.multiply(acceptanceProbs,deltazen),zenweights))
# print('Mu: ',mu)
# variance = np.sum(np.multiply(np.multiply(np.power(acceptanceProbs,2),deltazen),zenweights))-pow(mu,2)
# print('Var: ',variance)
# sigma = math.sqrt(variance)
# print('Sigma: ',sigma)
#
#
# # Plot Gaussian Fit
# gaussprobs = mlab.normpdf(np.array(zenweights), 0.32, 0.17)
# # gaussprobs = mlab.normpdf(np.array(zenweights), mu, sigma)
# plt.plot(zenweights,gaussprobs)
#
# norm = np.sum(np.multiply(gaussprobs,deltazen))
# print('Final Area: ',norm)
# mu = np.sum(np.multiply(np.multiply(gaussprobs,deltazen),zenweights))
# print('Approx mu: ',mu)
#
# plt.show()
|
import datetime
import programs
import unittest
class TestLightProgram(unittest.TestCase):
def setUp(self):
self.morning_program = programs.LightProgram("weekday", "morning", {"start_at": "08:15", "duration": 3600, "brightness": 100})
self.evening_program = programs.LightProgram("weekday", "evening", {"start_at": "22:15", "duration": 1800})
self.lightprograms = programs.LightPrograms(force_defaults=False)
def test_duration(self):
now = datetime.datetime(2016, 3, 30, 8, 34, 5, 690085)
percent_done = self.morning_program.percent_done(now)
self.assertIsNotNone(percent_done)
self.assertAlmostEqual(percent_done, 0.318247245833)
now = datetime.datetime(2016, 3, 30, 9, 34, 5, 690085)
self.assertIsNone(self.morning_program.percent_done(now))
now = datetime.datetime(2016, 3, 30, 6, 34, 5, 690085)
self.assertIsNone(self.morning_program.percent_done(now))
def test_dump(self):
data = self.morning_program.dump()
self.assertEqual(data["start_at"], "08:15")
self.assertEqual(data["duration"], 3600)
self.assertEqual(data["brightness"], 100)
def test_next_start_running_now(self):
now = datetime.datetime(2016, 3, 28, 8, 34, 5, 690085)
self.assertEqual(self.morning_program.start_datetime(now), datetime.datetime(2016, 3, 28, 8, 15, 0))
def test_next_end_running_now(self):
now = datetime.datetime(2016, 3, 28, 8, 34, 5, 690085)
self.assertEqual(self.morning_program.end_datetime(now), datetime.datetime(2016, 3, 28, 9, 15, 0))
def test_next_start_running_done(self):
now = datetime.datetime(2016, 3, 28, 9, 34, 5, 690085)
self.assertEqual(self.morning_program.start_datetime(now), datetime.datetime(2016, 3, 29, 8, 15, 0))
def test_next_end_running_done(self):
now = datetime.datetime(2016, 3, 28, 9, 34, 5, 690085)
self.assertEqual(self.morning_program.end_datetime(now), datetime.datetime(2016, 3, 29, 9, 15, 0))
def test_next_wrapup(self):
now = datetime.datetime(2016, 4, 2, 10, 0, 0)
self.assertEqual(self.morning_program.end_datetime(now), datetime.datetime(2016, 4, 4, 9, 15))
def test_calc_to_days(self):
self.assertEqual(self.morning_program.calc_days_to(6, (0,)), 1)
self.assertEqual(self.morning_program.calc_days_to(5, (0, 1)), 2)
self.assertEqual(self.morning_program.calc_days_to(0, (5, 6)), 5)
self.assertEqual(self.morning_program.calc_days_to(2, (0, 1, 2, 3, 4)), 0)
def test_is_running(self):
now = datetime.datetime(2016, 3, 30, 8, 34, 5, 690085)
self.assertTrue(self.lightprograms.is_program_running(now, self.morning_program))
class TestLightPrograms(unittest.TestCase):
def setUp(self):
self.morning_program = programs.LightProgram("weekday", "morning", {"start_at": "08:15", "duration": 60, "brightness": 100})
self.evening_program = programs.LightProgram("weekday", "evening", {"start_at": "22:15", "duration": 45})
self.lightprograms = programs.LightPrograms(force_defaults=True)
def test_get_day_programs(self):
morning, evening = self.lightprograms.get_day_programs(0)
self.assertIsNotNone(morning)
self.assertIsNotNone(evening)
self.assertEqual(morning.tod, "morning")
self.assertEqual(evening.tod, "evening")
self.assertEqual(morning.period, "weekday")
self.assertEqual(evening.period, "weekday")
morning, evening = self.lightprograms.get_day_programs(4)
self.assertEqual(morning.period, "weekday")
self.assertEqual(evening.period, "weekend")
morning, evening = self.lightprograms.get_day_programs(5)
self.assertEqual(morning.period, "weekend")
self.assertEqual(evening.period, "weekend")
morning, evening = self.lightprograms.get_day_programs(6)
self.assertEqual(morning.period, "weekend")
self.assertEqual(evening.period, "weekday")
def test_is_day_or_night(self):
now = datetime.datetime(2016, 3, 30, 8, 34, 5, 690085)
self.assertTrue(self.lightprograms.is_day(now))
self.assertFalse(self.lightprograms.is_night(now))
now = datetime.datetime(2016, 3, 30, 7, 34, 5, 690085)
self.assertFalse(self.lightprograms.is_day(now))
self.assertTrue(self.lightprograms.is_night(now))
now = datetime.datetime(2016, 3, 30, 16, 34, 5, 690085)
self.assertTrue(self.lightprograms.is_day(now))
self.assertFalse(self.lightprograms.is_night(now))
now = datetime.datetime(2016, 3, 30, 23, 0, 5, 690085)
self.assertFalse(self.lightprograms.is_day(now))
self.assertTrue(self.lightprograms.is_night(now))
now = datetime.datetime(2016, 3, 30, 1, 0, 5, 690085)
self.assertFalse(self.lightprograms.is_day(now))
self.assertTrue(self.lightprograms.is_night(now))
def test_timer_length(self):
now = datetime.datetime(2016, 3, 30, 8, 34, 5, 690085)
self.assertEqual(self.lightprograms.set_default_timer_length(now), 15)
def test_get_running_program(self):
now = datetime.datetime(2016, 3, 30, 8, 34, 5, 690085)
data = self.lightprograms.get_running_program(now)
self.assertIsNotNone(data)
self.assertEqual(data.period, "weekday")
self.assertEqual(data.tod, "morning")
def test_no_program_running(self):
now = datetime.datetime(2016, 3, 30, 9, 34, 5, 690085)
data = self.lightprograms.get_running_program(now)
self.assertIsNone(data)
class TestRunningMorning(unittest.TestCase):
pass
if __name__ == '__main__':
unittest.main()
|
import sys
sys.path.append('.')
from django.db import models
# Create your models here.
class Team(models.Model):
name = models.CharField(max_length=100, null=False)
description = models.CharField(max_length=250, null=True)
class Sport(models.Model):
name = models.CharField(max_length=100, null=False)
description = models.CharField(max_length=250, null=True)
class Product(models.Model):
name = models.CharField(max_length=100, null=False)
description = models.CharField(max_length=250, null=True)
price = models.FloatField(null=False)
class Customer(models.Model):
name = models.CharField(max_length=100, null=False)
num_doc = models.CharField(max_length=50, null=False)
phone = models.CharField(max_length=20, null=True)
|
# @Time : 2018-10-24
# @Author : zxh
# 测试
import requests
r = requests.get('http://192.168.213.51:8899/test1')
print(r.json())
r = requests.post('http://192.168.213.51:8899/test1', json={'key': 1})
print(r.json())
|
import pandas as pd
import time
import os
import multiprocessing as mp
import traceback
import logging
import sys
logging.basicConfig(filename='logs.log',
filemode='a',
format='%(asctime)s,%(msecs)d %(name)s %(levelname)s %(message)s',
datefmt='%H:%M:%S',
level=logging.DEBUG)
#This will merge old and new history
def merge_old_new_history(old_history_path, new_history_path):
old_history = pd.read_csv(old_history_path)
new_history = pd.read_csv(new_history_path)
return old_history.append(new_history,ignore_index=True).to_csv(old_history_path)
# partial payments removal
def removing_partial_payments(dataset,acct_id):
#logging.info(str(len(dataset['payment_hdr_id'].unique())) + ' Initial Payments')
initial_payment = str(len(dataset['payment_hdr_id'].unique()))
if dataset['invoice_amount_norm'].isnull().any()==True:
dataset=dataset[dataset['invoice_amount_norm'].notnull()]
if dataset['payment_hdr_id'].isnull().any()==True:
dataset=dataset[dataset['payment_hdr_id'].notnull()]
#valid_payment = str(len(dataset['payment_hdr_id'].unique()))
#Here showing the unique payments counts.
dataset['invoice_amount_norm']=dataset['invoice_amount_norm'].astype('float64')
temp = dataset.groupby('payment_hdr_id').agg({'invoice_amount_norm':'sum','payment_amount':'max'}).reset_index()
#Here we are taking a three column 1) is payment_header_id 2)sum of the invoice_amount_norm 3)maximum payment_amount
valid_payments = temp[temp['payment_amount']==temp['invoice_amount_norm']]['payment_hdr_id'].unique()
#here if payment_amount == to sum of the invoice amount_norm then it is a valid payment.
dataset = dataset[dataset['payment_hdr_id'].isin(valid_payments)]
return dataset, initial_payment
# To find the open invoices that are before the payment date
def filter_before_payment_date(customer_name, data, recent_payments_list, mode_flag):
try:
sdk = pd.DataFrame()
unique_payments = data['payment_hdr_id'].unique()
# if len(unique_payments)>25:
# logging.info('Customer ' + str(customer_name) + ' with payment count ' + str(len(unique_payments)) + ' has been removed.')
# return sdk.reset_index(drop=True)
logging.info('Preparing history for customer ' + str(customer_name) + ' with payment count '+str(len(unique_payments)))
for payment in unique_payments:
#for loop where payment is acting as i and unique_payments is the source of data.so in each itration payment will have the values of unique payments.
if (mode_flag == 'predictions') & (payment not in recent_payments_list):
continue
payment_date = data[data['payment_hdr_id'] == payment].iloc[0]['effective_date']
#here we are making payment date for those payment whose payment_hrd_id is == payment
#If payment_hrd_id==payment to fir effective date/invoice_payment_date directly assign to payment date
open_invoices_data = data[((data['isOpen']==1) & (data['invoice_date_norm'] <= payment_date))
| ((data['effective_date'] >= payment_date) & (data['invoice_date_norm'] <= payment_date))]
#here we are making open_invoices_data so that when the payment comes then we can see how many invoices are open for that perticular customer.
#for this reason we are considering two either or conditions :
#1)jub payment hua uss din se just pehle kitne invoices khule the (so that is our isopen==1) & payment jis din hua uss din se pehle invoice date norm hona chahiye
#2)effectivedate/invoice_payment_date hamehsa equal hi honge bcz hum unhe pehle step mai same kar rahe hae & payment jis din hua uss din se pehle invoice date norm hona chahiye
open_invoices_data['payment_id'] = payment
open_invoices_data['customer_name'] = customer_name
sdk = pd.concat([sdk, open_invoices_data.reset_index(drop=True)], ignore_index=True)
return sdk.reset_index(drop=True)
except Exception as e:
print(e)
# Multiprocessing code for history generation
def history_generation(retraining_data_path,open_invoices_path, recent_payments_path, new_history_path,mode_flag, number_of_process,acct_id,path,log_path):
logging.info('History Generation Started')
try:
error_flag = 0
transformed_dataframe = pd.DataFrame()
if mode_flag=='predictions':
open_invoices_data = pd.read_csv(r''+ open_invoices_path, encoding='cp1256')
recent_payments_data = pd.read_csv(r''+ recent_payments_path, encoding='cp1256')
if open_invoices_data['payment_hdr_id'].isnull().any() == True:
open_invoices_data = open_invoices_data[open_invoices_data['payment_hdr_id'].notnull()]
open_invoices_data = open_invoices_data[open_invoices_data['invoice_number_norm'].notnull()]
recent_payments_data = recent_payments_data[recent_payments_data['invoice_number_norm'].notnull()]
if 'isOpen' not in open_invoices_data.columns:
open_invoices_data['isOpen'] = open_invoices_data['isopen']
if 'isOpen' not in recent_payments_data.columns:
recent_payments_data['isOpen'] = recent_payments_data['isopen']
recent_payments_data, initial_payment = removing_partial_payments(recent_payments_data, acct_id)
valid_payment=str(len(recent_payments_data['payment_hdr_id'].unique()))
hist = {'Account No.': [acct_id],
'Total valid payment(initially)': [initial_payment],
'Payments (after the partial payments are removed)': [valid_payment]}
final_report = pd.DataFrame.from_dict(hist)
final_report.to_csv(path+'/account_'+acct_id+'/summary.csv',index = False)
dataset = open_invoices_data.append(recent_payments_data,ignore_index=True)
recent_payments_list = recent_payments_data['payment_hdr_id'].unique()
elif mode_flag=='retraining':
dataset = pd.read_csv(r''+ retraining_data_path,encoding='cp1256')
uni_cust = dataset.customer_number_norm.nunique()
# encoding used for securing the data,so that properly all the data can be consumed.
#cp1256 is a sheet where all the characters have some numerical value known as ASCII value.
dataset = dataset[dataset['invoice_number_norm'].notnull()]
if 'isOpen' not in dataset.columns:
dataset['isOpen'] = dataset['isopen']
dataset, initial_payment = removing_partial_payments(dataset, acct_id)
valid_payment = str(len(dataset['payment_hdr_id'].unique()))
hist = {'Account No.': [acct_id],
'Total valid payment(initially)': [initial_payment],
'Payments (after the partial payments are removed)': [valid_payment],
'Unique_Customer': [uni_cust]
}
final_report = pd.DataFrame.from_dict(hist)
final_report.to_csv(path+'/account_'+acct_id+'/summary.csv',index = False)
recent_payments_list = dataset['payment_hdr_id'].unique()
# Formatting dates
dataset['effective_date'] = dataset['effective_date'].astype(str)
dataset['effective_date'] = dataset['effective_date'].str.strip()
dataset['effective_date'] = pd.to_datetime(dataset['effective_date'],format='%d-%m-%Y')
# If invoice date is not present in the db use document date
if dataset['invoice_date_norm'].isnull().all():
# if document_date_norm is null
if dataset['document_date_norm'].isnull().all():
dataset['create_date'] = dataset['create_date'].astype(str)
dataset['create_date'] = dataset['create_date'].str.strip()
dataset['create_date'] = pd.to_datetime(dataset['create_date'],format='%d-%m-%Y')
dataset['document_date_norm'] = dataset['create_date']
dataset['document_date_norm'] = pd.to_datetime(dataset['document_date_norm'],format='%d-%m-%Y')
dataset['document_date_norm'] = dataset['document_date_norm'].astype(str)
dataset['document_date_norm'] = dataset['document_date_norm'].str.strip()
dataset['document_date_norm'] = pd.to_datetime(dataset['document_date_norm'],format='%d-%m-%Y')
dataset['invoice_date_norm'] = dataset['document_date_norm']
dataset['invoice_date_norm'] = pd.to_datetime(dataset['invoice_date_norm'],format='%d-%m-%Y')
dataset['invoice_date_norm'] = pd.to_datetime(dataset['invoice_date_norm'],format='%d-%m-%Y')
dataset['customer_number_norm'] = dataset['customer_number_norm'].astype(str)
dataset['customer_number_norm'] = dataset['customer_number_norm'].str.strip()
dataset = dataset[dataset['customer_number_norm'] != 'NULL']
# if mode_flag=='retraining':
# # removing the top customer
# uni = dataset['customer_number_norm'].value_counts().keys()
# dataset = dataset[dataset['customer_number_norm'] != uni[0]]
dataset.drop_duplicates(subset=['invoice_number_norm','customer_number_norm'],keep='first',inplace=True)
customer_groups = dataset.groupby(by=['customer_number_norm'])
pool = mp.Pool(processes = number_of_process)
start = time.time()
results = [pool.apply_async(filter_before_payment_date, args=(customer_name_, data_, recent_payments_list, mode_flag)) for customer_name_, data_ in customer_groups]
results = [res.get() for res in results]
for r in results:
transformed_dataframe = pd.concat([transformed_dataframe, r] , ignore_index=True)
end = time.time()
print("History Generation "+"------ Took {0} seconds -----".format(end - start))
pool.close()
pool.join()
transformed_dataframe.to_csv(new_history_path, encoding='cp1256')
logging.info('History Generation Done with ' + str(len(transformed_dataframe['payment_id'].unique()))+' payments.')
progress=pd.read_csv(log_path+"progress.csv")
progress['Status']='History Generation'
progress.to_csv(log_path+"progress.csv",index=False)
except Exception as e:
logging.info(str(e))
error_flag = 1
finally:
if error_flag == 1:
transformed_dataframe.to_csv(new_history_path, encoding='cp1256')
logging.info('History Generation Done with ' + str(len(transformed_dataframe['payment_id'].unique())) + ' payments.')
if __name__ == '__main__':
mode_flag = 'retraining'
acct_id = str(sys.argv[1])
path = str(sys.argv[2])
number_of_process = 2
retraining_data_path = path+"/account_"+acct_id+"/data_extracted/retraining_data.csv"
new_history_path = path+"/account_"+acct_id+"/history_generated/history.csv"
open_invoices_path = ' '
recent_payments_path = ' '
log_path=path+'/account_'+str(acct_id)+'/logs/'
#History Generation
history_generation(retraining_data_path, open_invoices_path,recent_payments_path, new_history_path, mode_flag, number_of_process,acct_id,path,log_path)
|
import math
def countBits(n):
counter = 0
while(n >0):
num = n%2
n = math.floor(n/2)
if(num==1):
counter +=1
return counter
#clever solution: return bin(n).count("1") :(
|
# Write a Python program to replace the last element in a list with another list
def replace(list1,list2):
list1[-1]= list2
return list1
list1 = [1,2,3]
list2 = [4,5]
output = replace(list1,list2)
print(output)
|
import tensorflow as tf
from tensorflow.keras import datasets, layers, models
class CnnModel:
def __init__(self):
self.train_images = None
self.train_labels = None
self.test_images = None
self.test_labels = None
self.model = None
def execute(self):
self.download_data()
self.create_model()
self.train_model()
self.eval_model()
def download_data(self):
(self.train_images, self.train_labels), (self.test_images, self.test_labels) = datasets.mnist.load_data()
self.train_images = self.train_images.reshape((60000, 28, 28, 1))
self.test_images = self.test_images.reshape((10000, 28, 28, 1))
# 픽셀 값을 0~1 사이로 정규화합니다.
self.train_images, self.test_images = self.train_images / 255.0, self.test_images / 255.0
def create_model(self):
self.model = models.Sequential()
self.model.add(layers.Conv2D(32, (3, 3), activation='relu', input_shape=(28, 28, 1)))
self.model.add(layers.MaxPooling2D((2, 2)))
self.model.add(layers.Conv2D(64, (3, 3), activation='relu'))
self.model.add(layers.MaxPooling2D((2, 2)))
self.model.add(layers.Conv2D(64, (3, 3), activation='relu'))
self.model.add(layers.Flatten())
self.model.add(layers.Dense(64, activation='relu'))
self.model.add(layers.Dense(10, activation='softmax'))
print(self.model.summary())
self.model.compile(optimizer='adam',
loss='sparse_categorical_crossentropy',
metrics=['accuracy'])
def train_model(self):
self.model.fit(self.train_images, self.train_labels, epochs=5)
def eval_model(self):
test_loss, test_acc = self.model.evaluate(self.test_images, self.test_labels, verbose=2)
print(test_acc)
if __name__ == '__main__':
c = CnnModel()
c.execute()
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# @Date : 2020-04-15 09:01:45
# @Author : Fallen (xdd043@qq.com)
# @Link : https://github.com/fallencrasher/python-learning
# @Version : $Id$
# random 模块
import random
# 左闭右开,获取 [0.0,1.0) 范围内的一个浮点数,不接收参数
# random.random()
a = random.random()
print(a)
# 左闭右开, 获取 [a,b) 范围内的一个浮点数
# random.uniform()
b = random.uniform(3,5)
print(b)
# 闭区间, 获取[a,b] 范围内的一个整数
# random.randint(a,b)
c = random.randint(3,10)
print(c)
# 混洗。 把参数指定的数据元素打乱顺序,参数必须是可变类型 [] {}
# random.shuffle(x)
lst1 = list(range(10))
print(lst1)
random.shuffle(lst1)
print(lst1)
# 取样。 从 x 中随机抽取 k 个数据,组成一个列表返回
# random.sample(x,k)
tu1 = (1,2,3,4,5,6,)
lst2 = random.sample(tu1,5)
print(lst2)
|
#!/usr/bin/env python
"""
* asas_catalog.R in Python
** input parameters:
- deboss arff fpath
- asas arff fpath
- features to exclude
** output:
- asas_randomForest.Rdat fpath
- classifier effeciency metrics
** I want to call the full AL R script, but be able to
modify some bits.
*** wrapping the R code in a python string is less ideal
- but it could just be for a specific version of the AL/MACC code
"""
import os, sys
from rpy2.robjects.packages import importr
from rpy2 import robjects
if __name__ == '__main__':
# TODO: do some popen of the asas_catalog.R script
# source a file which initializes variables
# source asas_catalog.R
pars = {'root_dirpath':"/Data/dstarr/src/ASASCatalog/",
'deboss_srcids_arff_fpath':"/Data/dstarr/src/ASASCatalog/data/debosscher_feats_20120305.arff",
'deboss_train_arff_fpath':"/Data/dstarr/src/ASASCatalog/data/train_20120327_10ntree_5mtry.arff",
'asas_test_arff_fpath':"/Data/dstarr/src/ASASCatalog/data/test_20120327_10ntree_5mtry.arff",
'rf_clfr_fpath':"/home/dstarr/scratch/macc_wrapper_rfclfr.rdat",
}
### Initialize:
r_str = '''
set.seed(1)
source("{root_dirpath}R/utils_classify.R")
source("{root_dirpath}R/class_cv.R")
source("{root_dirpath}R/missForest.R")
source("{root_dirpath}R/utils_PTF.R")
source("{root_dirpath}R/runJunkClass.R")
library(randomForest)
library(nnet)
library(foreign)
'''.format(root_dirpath=pars['root_dirpath'])
robjects.r(r_str)
r_str = '''
path = "{root_dirpath}"
asas_test_arff_fpath = "{asas_test_arff_fpath}"
rf_clfr_fpath="{rf_clfr_fpath}"
# Load Debosscher data
debdat=read.arff(file="{deboss_srcids_arff_fpath}")
ID.use = debdat$source_id
debdat=read.arff(file="{deboss_train_arff_fpath}")
use = which(debdat$source_id {isin} ID.use)
debdat = debdat[use,]
ID = debdat$source_id
debdat$class = paste(debdat$class)
deb.reclassify = read.table(paste(path,"data/reclassified_debosscher_eclipsing.dat",sep=""))
debdat$class[which(ID {isin} deb.reclassify[,1])] = deb.reclassify[,2]
# straighten out T Tauri subclasses (AAM visual reclassifications)
ttau.cl = c(163375,163434,163445,163480,163585,163762,163907,164145,164355)
debdat$class[debdat$source_id {isin} ttau.cl] = 201
ttau.wl = c(163981,164277)
debdat$class[debdat$source_id {isin} ttau.wl] = 202
class.deb = class.debos(debdat$class)
# re-label the source that Nat found to be wrong
class.deb[ID==164154] = "y. W Ursae Maj."
p = dim(debdat)[2]
feat.debos = data.frame(debdat)[,-c(1,p)] # Deb features
'''.format(isin="%in%",
root_dirpath=pars['root_dirpath'],
rf_clfr_fpath=pars['rf_clfr_fpath'],
deboss_srcids_arff_fpath=pars['deboss_srcids_arff_fpath'],
deboss_train_arff_fpath=pars['deboss_train_arff_fpath'],
asas_test_arff_fpath=pars['asas_test_arff_fpath'])
robjects.r(r_str)
### Remove useless features from the training data:
r_str = '''
rem = c(which(substr(names(feat.debos),1,7) == "eclpoly"))
rem = c(rem,which(names(feat.debos)=="color_bv_extinction"))
rem = c(rem,which(names(feat.debos)=="color_diff_bj"))
rem = c(rem,which(names(feat.debos)=="color_diff_hk"))
rem = c(rem,which(names(feat.debos)=="color_diff_jh"))
rem = c(rem,which(names(feat.debos)=="color_diff_rj"))
rem = c(rem,which(names(feat.debos)=="color_diff_vj"))
rem = c(rem,which(names(feat.debos)=="n_points"))
rem = c(rem,which(names(feat.debos)=="freq_rrd"))
rem = c(rem,which(substr(names(feat.debos),17,27)=="rel_phase_0"))
feat.debos = feat.debos[,-rem]
'''.format()
robjects.r(r_str)
#import pdb; pdb.set_trace()
#print
### NOTE: must use the local version of asas_catalog.R:
r_str = 'source("asas_catalog.R")'
#r_str = 'source("{root_dirpath}R/asas_catalog.R")'.format(root_dirpath=pars['root_dirpath'])
robjects.r(r_str)
#import pdb; pdb.set_trace()
#print
### TODO: ensure .R files are coming from current path
### TODO: retrieve resulting metrics
|
import numpy as np
from getmem import GetMem
from plot_dirSpec import plot_dirSpec
data = np.loadtxt('teste_spec.txt')
freq = data[:,0]
energy = data[:,2]
a1 = data[:,4]
b1 = data[:,5]
a2 = data[:,6]
b2 = data[:,7]
E = np.tile(energy, (360,1)).T
norm_mem= GetMem(a1,b1,a2,b2)
dirSpec = E*norm_mem
dirSpec = dirSpec.T
plot_dirSpec(dirSpec, freq, directions=None, vmin=0,filename=None)
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import, division, with_statement
import random
import string
from revolver.core import sudo
from revolver import command, package, file, service
from revolver import contextmanager as ctx
from revolver import directory as dir
def _preseed_server(root_password):
seed_config = """# Mysql preseed generated by revolver
mysql-server mysql-server/root_password password %(root_password)s
mysql-server mysql-server/root_password_again password %(root_password)s
mysql-server mysql-server/start_on_boot boolean true
""" % {"root_password": root_password}
seed_dir = "/var/cache/local/preseeding/"
seed_file = seed_dir + "mysql-server.seed"
if file.exists(seed_file):
return
with ctx.sudo():
dir.create(seed_dir, recursive=True)
file.write(seed_file, seed_config)
sudo("debconf-set-selections %s" % seed_file)
def _store_root_cnf(password):
cnf_config = """# Config generated by revolver
[client]
host = localhost
user = root
password = %s
""" % password
cnf_dir = "/etc/mysql/"
cnf_file = cnf_dir + "root.cnf"
if file.exists(cnf_file):
return
with ctx.sudo():
file.write(cnf_file, cnf_config)
file.link(cnf_file, "/root/.my.cnf")
def _generate_password(length=15):
chars = string.ascii_lowercase + string.ascii_uppercase + string.digits
return "".join(random.choice(chars) for x in xrange(length))
def _execute_mysql(sql):
sudo("mysql --defaults-file=/etc/mysql/root.cnf -e '%s'" % sql)
def install(server=False, password=None):
packages = ["mysql-client", "libmysqlclient-dev"]
if server:
if not password:
password = _generate_password()
_preseed_server(password)
_store_root_cnf(password)
packages.append("mysql-server")
# TODO Use this always on package.install?
with ctx.prefix("export DEBIAN_FRONTEND=noninteractive"):
package.install(packages)
if server:
service.start("mysql")
def ensure(server=False, password=None):
commands_found = True
if not command.exists("mysql"):
commands_found = False
if server and not command.exists("mysqld"):
commands_found = False
if not commands_found:
install(server=server, password=password)
def ensure_database(name, charset=None, collate=None):
sql = "CREATE DATABASE IF NOT EXISTS `%s`" % name
if charset:
sql += " DEFAULT CHARACTER SET %s" % charset
if collate:
sql += " DEFAULT COLLATE %s" % collate
_execute_mysql(sql)
|
from apps.api.stats.serializers import type as serializers
from apps.games.models import Game
from django.core.cache import cache
from django.db.models import Avg, Max, Min
from utils import constants
def get_count_per_year(request):
key = 'stats_v2_count_per_year'
context = cache.get(key)
if context is None:
context = []
for year in range(1990, 2015):
games = Game.objects.filter(year_published=year)
best_games = games[:3]
top3 = []
for game in best_games:
top3.append(serializers.GameMediumSerializer(game, context={'request': request}).data)
context.append(dict(all=games.count(),
games=games.filter(expansion=False).count(),
expansion=games.filter(expansion=True).count(),
rank=games.filter(rank__lt=constants.MAX_RANK, expansion=False).count(),
kickstarter=games.filter(family__slug='kickstarter').count(),
year=year,
top3=top3))
time = 30 * 24 * 60 * 60
cache.set(key, context, time)
return context
def get_details(request, slug, object):
key = 'stats_details_%s_%s_%s' % ('v1', object.__name__.lower(), slug)
context = cache.get(key)
if context is None:
try:
query = object.objects.get(slug=slug)
except object.DoesNotExist:
return None
games_all = query.all
games_rank = query.rank
games_wrong = query.games.filter(year_published=0).count()
games_expansion = query.expansion
info = query.games.filter(year_published__gt=0).aggregate(
Min('year_published'),
Max('year_published'),
Min('rank'),
)
info.update(
dict(games_all=games_all, games_rank=games_rank, games_wrong=games_wrong,
games_expansion=games_expansion, name=query.name, slug=query.slug, type=query.type))
year_published_max = info.get('year_published__max')
year_published_min = info.get('year_published__min')
context = dict()
context['info'] = info
context['best_game'] = serializers.GameNormalSerializer(query.games[0],
context={'request': request}).data
items = []
if year_published_min < 1990:
year_published_min = 1990
for year in range(year_published_min, year_published_max + 1):
games_per_year = query.games.filter(year_published=year)
count = games_per_year.count()
if count:
count_rank = query.games_rank.filter(year_published=year).count()
count_expansion = query.games_expansion.filter(year_published=year).count()
rank = games_per_year.aggregate(Min('rank'), Max('rank'), Avg('rank'))
if rank.get('rank__min') < constants.MAX_RANK:
best_game_year = games_per_year[0]
best_game_year = serializers.GameNormalSerializer(best_game_year,
context={'request': request}).data
else:
best_game_year = None
else:
count_rank = 0
count_expansion = 0
rank = None
best_game_year = None
items.append(
dict(year=year, count=count, count_rank=count_rank, count_expansion=count_expansion,
rank=rank, best_game=best_game_year))
context['data'] = items
cache.set(key, context, 30 * 24 * 60 * 60)
return context
|
"""百度ip地址定位api简单封装
详细请参考文档:http://developer.baidu.com/map/ip-location-api.htm
"""
import json
from urllib.parse import urlencode
from urllib.request import urlopen
class BaiduIp():
url = 'http://api.map.baidu.com/location/ip?'
def __init__(self, ak, ip='', coor='bd09ll'):
self.ak = ak
self.ip = ip
self.coor = coor
def get_url(self):
data = {
'ip': self.ip,
'ak': self.ak,
'coor': self.coor
}
return BaiduIp.url + urlencode(data)
def get_by_ip(self, ip=''):
self.ip = ip
response = urlopen(self.get_url())
result = json.loads(response.read().decode())
return result
if __name__ == '__main__':
p = BaiduIp('填入ak')
print(p.get_by_ip())
|
from django.urls import path
from rest_framework_simplejwt import views as jwt_views
from .views import CustomUserCreate, Hello
urlpatterns = [
path('user/create/', CustomUserCreate.as_view(), name="create_user"),
path('user/hello/', Hello.as_view(), name="hello_user"),
path('token/obtain/', jwt_views.TokenObtainPairView.as_view(), name='token_create'),
path('token/refresh/', jwt_views.TokenRefreshView.as_view(), name='token_refresh'),
]
|
from person import person
class Agent(person):
def __init__(self, age: int, sex: chr, receptivity: float, infected_with: dict = dict(),
visited_nodes: list = list()):
super().__init__(age, sex, receptivity, infected_with)
self.visited_nodes = visited_nodes
|
# -*- coding: utf-8 -*-
from plone.dexterity.content import Container
from sc.photogallery.interfaces import IPhotoGallery
from zope.interface import implementer
# BBB: for content created with version 1.0a1
import sys
sys.modules['sc.photogallery.content.photogallery'] = sys.modules[__name__] # noqa: I003
@implementer(IPhotoGallery)
class PhotoGallery(Container):
"""A Photo Gallery content type with a slideshow view."""
# FIXME: @property
def image(self):
"""Return the first image on a Photo Gallery."""
# photo gallery contain only images; we don't need to filter the list
images = self.listFolderContents()
return images[0] if len(images) > 0 else None
# FIXME: @property
def image_caption(self):
"""Return the description of the first image in a Photo Gallery."""
try:
return self.image().Description()
except AttributeError:
return u''
# XXX: compatibility with folder_summary_view in folders
image_thumb = image
# XXX: this probably should not be necessary if we can define the
# image method of the class as a property
def tag(self, **kwargs):
"""Return a tag for the first image in a Photo Gallery."""
try:
scales = self.image().restrictedTraverse('@@images')
return scales.tag('image', **kwargs)
except AttributeError:
return None
|
'''
Created on 01/02/2013
@author: rafael.cunha
'''
#from model import
from appController import SessionManager
def test_check_credentials_ok():
assert(SessionManager.check_credentials('Rafael') <> None)
def test_check_credentials_nok():
assert(SessionManager.check_credentials('rafael') is None)
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
__version__ = '1.0.1'
""" This module exports the db meta data."""
from sqlalchemy import MetaData
# from sqlalchemy import create_engine
# from web_backend.config import DATABASE
# engine = create_engine('{0}://{1}:{2}@{3}:{4}/{5}'.format(
# DATABASE['type'],
# DATABASE['user'], DATABASE['password'], DATABASE['host'],
# DATABASE['port'], DATABASE['database'])
# # pool_size=250, max_overflow=20
# )
# nvl_meta = MetaData(bind=engine)
nvl_meta = MetaData()
|
import kivy
from kivy.uix.label import Label as lb
from kivy.uix.gridlayout import GridLayout as gl
from kivy.uix.button import Button as btn
from kivy.clock import Clock
from kivy.app import App
from kivy.uix.image import Image
from kivy.uix.floatlayout import FloatLayout as fl
import time as dt
start=dt.time_ns()
con=0
class Main(App):
def build(self):
self.c=1
home=fl()
self.icon="Images/Bub.ico"
back=Image(source="Images/Jerusalem.jpeg",allow_stretch=True,keep_ratio=False)
self.clan=gl(cols=1,rows=2)
self.mn=lb(text=str(0),font_size=92,color=(0,1,0,1),font_name=r"Fonts/digital_7/digital-7.ttf")
self.clan.add_widget(self.mn)
self.clan.add_widget(btn(text='click',background_color=(1,2,2,1),size_hint=(.6,.14),on_press=self.free))
home.add_widget(back)
home.add_widget(lb(text="Digital Tasbeeh",font_name='Fonts/valentica/valentina',font_size=74))
home.add_widget(self.clan)
return home
def free(self,dtm):
self.mn.text=str(self.c)
self.c+=1
Main().run()
|
import fileModule
def main(args):
filename = args['fileid']
fm = fileModule.FileManager()
data = fm.loadFile(filename)
return{"return": data.read()}
|
DAYS = {'Monday': lambda a: a == 12,
'Tuesday': lambda b: b > 95,
'Wednesday': lambda c: c == 34,
'Thursday': lambda d: d == 34,
'Friday': lambda e: e % 2 == 0,
'Saturday': lambda f: f == 56,
'Sunday': lambda g: abs(g) == 666}
def am_I_afraid(day, num):
""" am_i_afraid == PEP8, forced camelCase by CodeWars """
return DAYS[day](num)
|
from behave import *
from behave_webdriver.transformers import matcher_mapping
try:
from urllib.parse import urlparse
except ImportError:
from urlparse import urlparse
if 'transform-parse' not in matcher_mapping:
use_step_matcher('re')
else:
use_step_matcher('transform-re')
@given('the element "([^"]*)?" is( not)* visible')
@then('I expect that element "([^"]*)?" becomes( not)* visible')
@then('I expect that element "([^"]*)?" is( not)* visible')
def check_element_visibility(context, element, negative):
element_is_visible = context.behave_driver.element_visible(element)
if negative:
assert not element_is_visible, 'Expected element to not be visible, but it was'
else:
assert element_is_visible, 'Expected element to be visible, but it was not visible'
@given('the title is( not)* "([^"]*)?"')
@then('I expect that the title is( not)* "([^"]*)?"')
def title(context, negative, value):
if negative:
assert context.behave_driver.title != value, 'Title was "{}"'.format(context.behave_driver.title)
else:
assert context.behave_driver.title == value, 'Title was "{}"'.format(context.behave_driver.title)
@then('I expect that element "([^"]*)?" is( not)* within the viewport')
def check_element_within_viewport(context, element, negative):
element_in_viewport = context.behave_driver.element_in_viewport(element)
if negative:
assert not element_in_viewport, 'Element was completely within the viewport'
else:
assert element_in_viewport, 'Element was not completely within viewport'
@given('the element "([^"]*)?" is( not)* enabled')
@then('I expect that element "([^"]*)?" is( not)* enabled')
def element_enabled(context, element, negative):
enabled = context.behave_driver.element_enabled(element)
if negative:
assert not enabled
else:
assert enabled
@given('the element "([^"]*)?" is( not)* selected')
@then('I expect that element "([^"]*)?" is( not)* selected')
def element_selected(context, element, negative):
selected = context.behave_driver.element_selected(element)
if negative:
assert not selected
else:
assert selected
@given('the checkbox "([^"]*)?" is( not)* checked')
@then('I expect that checkbox "([^"]*)?" is( not)* checked')
def element_checked(context, element, negative):
checked = context.behave_driver.element_selected(element)
if negative:
assert not checked
else:
assert checked
@given('there is (an|no) element "([^"]*)?" on the page')
def element_exists(context, an_no, element):
negative = an_no == 'no'
exists = context.behave_driver.element_exists(element)
if negative:
assert not exists
else:
assert exists
@then('I expect that element "([^"]*)?" does( not)* exist')
def check_element_exists(context, element, negative):
exists = context.behave_driver.element_exists(element)
if negative:
assert not exists, 'Expected the element does not exist, but element "{}" was located'.format(element)
else:
assert exists, 'Expected element to exist, but no element "{}" was located'.format(element)
@given('the element "([^"]*)?" contains( not)* the same text as element "([^"]*)?"')
@then('I expect that element "([^"]*)?"( not)* contains the same text as element "([^"]*)?"')
def elements_same_text(context, first_element, negative, second_element):
first_elem_text = context.behave_driver.get_element_text(first_element)
second_elem_text = context.behave_driver.get_element_text(second_element)
same = first_elem_text == second_elem_text
if negative:
assert not same, 'Element "{}" text "{}" is same as element "{}"'.format(first_element,
first_elem_text,
second_element)
else:
assert same, 'Element "{}" text "{}" is not same as element "{}" text "{}"'.format(first_element,
first_elem_text,
second_element,
second_elem_text)
@given('the element "([^"]*)?"( not)* matches the text "([^"]*)?"')
@then('I expect that element "([^"]*)?"( not)* matches the text "([^"]*)?"')
def element_matches_text(context, element, negative, text):
elem_text = context.behave_driver.get_element_text(element)
matches = elem_text == text
if negative:
assert not matches, 'Element "{}" text matches "{}"'.format(element,
text)
else:
assert matches, 'The text "{}" did not match the element text "{}"'.format(text, elem_text)
@given('the element "([^"]*)?"( not)* contains the text "([^"]*)?"')
@then('I expect that element "([^"]*)?"( not)* contains the text "([^"]*)?"')
def check_element_contains_text(context, element, negative, text):
contains = context.behave_driver.element_contains(element, text)
if negative:
assert not contains, 'Element text does contain "{}"'.format(text)
else:
assert contains, 'Element text does not contain "{}"'.format(text)
@given('the element "([^"]*)?"( not)* contains any text')
@then('I expect that element "([^"]*)?"( not)* contains any text')
def element_any_text(context, element, negative):
any_text = bool(context.behave_driver.get_element_text(element))
if negative:
assert not any_text
else:
assert any_text
@given('the element "([^"]*)?" is( not)* empty')
@then('I expect that element "([^"]*)?" is( not)* empty')
def check_element_empty(context, element, negative):
elem_text = context.behave_driver.get_element_text(element)
any_text = bool(elem_text)
if negative:
assert any_text is True
else:
assert any_text is False
@given('the page url is( not)* "([^"]*)?"')
@then('I expect that the url is( not)* "([^"]*)?"')
def check_url(context, negative, value):
current_url = context.behave_driver.current_url
if negative:
assert current_url != value, 'The url was "{}"'.format(current_url)
else:
assert current_url == value, 'Expected url to be "{}", but saw the url was "{}"'.format(value, current_url)
@then('I expect the url to( not)* contain "([^"]*)?"')
def check_url_contains(context, negative, value):
current_url = context.behave_driver.current_url
if negative:
assert value not in current_url, 'url was "{}"'.format(current_url)
else:
assert value in current_url, 'url was "{}"'.format(current_url)
@given('the( css)* attribute "([^"]*)?" from element "([^"]*)?" is( not)* "([^"]*)?"')
@then('I expect that the( css)* attribute "([^"]*)?" from element "([^"]*)?" is( not)* "([^"]*)?"')
def check_element_attribute(context, is_css, attr, element, negative, value):
if is_css:
attribute_value, value = context.behave_driver.get_element_attribute(element, attr, is_css, value)
else:
attribute_value = context.behave_driver.get_element_attribute(element, attr)
if negative:
assert attribute_value != value, 'Attribute value was "{}"'.format(attribute_value)
else:
assert attribute_value == value, 'Attribute value was "{}"'.format(attribute_value)
@given('the cookie "([^"]*)?" contains( not)* the value "([^"]*)?"')
@then('I expect that cookie "([^"]*)?"( not)* contains "([^"]*)?"')
def check_cookie_value(context, cookie_key, negative, value):
cookie = context.behave_driver.get_cookie(cookie_key)
cookie_value = cookie.get('value')
if negative:
assert cookie_value != value, 'Cookie value was "{}"'.format(cookie_value)
else:
assert cookie_value == value, 'Cookie value was "{}"'.format(cookie_value)
@given('the cookie "([^"]*)?" does( not)* exist')
def cookie_exists(context, cookie_key, negative):
cookie = context.behave_driver.get_cookie(cookie_key)
if negative:
assert cookie is None, 'Cookie exists: {}'.format(cookie)
else:
assert cookie is not None
@then('I expect that cookie "([^"]*)?"( not)* exists')
def check_cookie_exists(context, cookie_key, negative):
cookie = context.behave_driver.get_cookie(cookie_key)
if negative:
assert cookie is None, u'Cookie was present: "{}"'.format(cookie)
else:
assert cookie is not None, 'Cookie was not found'
@given('the element "([^"]*)?" is( not)* ([\d]+)px (broad|tall)')
@then('I expect that element "([^"]*)?" is( not)* ([\d]+)px (broad|tall)')
def check_element_size(context, element, negative, pixels, how):
elem_size = context.behave_driver.get_element_size(element)
if how == 'tall':
axis = 'height'
else:
axis = 'width'
if negative:
assert elem_size[axis] != int(pixels), 'Element size was "{}"'.format(elem_size)
else:
assert elem_size[axis] == int(pixels), 'Element size was "{}"'.format(elem_size)
@given('the element "([^"]*)?" is( not)* positioned at ([\d]+)px on the (x|y) axis')
@then('I expect that element "([^"]*)?" is( not)* positioned at ([\d]+)px on the (x|y) axis')
def check_element_position(context, element, negative, pos, axis):
element_position = context.behave_driver.get_element_location(element)
if negative:
assert element_position[axis] != int(pos), 'Position was {} on the {} axis'.format(element_position[axis], axis)
else:
assert element_position[axis] == int(pos), 'Position was {} on the {} axis'.format(element_position[axis], axis)
@given('a (alertbox|confirmbox|prompt) is( not)* opened')
@then('I expect that a (alertbox|confirmbox|prompt) is( not)* opened')
def check_modal(context, modal, negative):
if negative:
assert context.behave_driver.has_alert is False
else:
assert context.behave_driver.has_alert is True
@then('I expect that the path is( not)* "([^"]*)?"')
def check_path(context, negative, value):
current_url = context.behave_driver.current_url
path = urlparse(current_url).path
if negative:
assert path != value, 'The path was "{}"'.format(path)
else:
assert path == value, 'Expected the path to be "{}", but saw the path "{}"'.format(value, path)
@then('I expect that element "([^"]*)?" (has|does not have) the class "([^"]*)?"')
def check_element_has_class(context, element, has, classname):
if 'not' in has:
negative = True
else:
negative = False
has_class = context.behave_driver.element_has_class(element, classname)
if negative:
assert not has_class, 'Classes were {}'.format(context.behave_driver.get_element_attribute(element, 'class'))
else:
assert has_class, 'Classes were {}'.format(context.behave_driver.get_element_attribute(element, 'class'))
@then('I expect a new (window|tab) has( not)* been opened')
def check_window_opened(context, _, negative):
if negative:
assert not context.behave_driver.secondary_handles
else:
assert bool(context.behave_driver.secondary_handles)
@then('I expect the url "([^"]*)?" is opened in a new (tab|window)')
def check_url_new_window(context, url, _):
current_handle = context.behave_driver.primary_handle
for handle in context.behave_driver.secondary_handles:
context.behave_driver.switch_to_window(handle)
if context.behave_driver.current_url == url:
context.behave_driver.switch_to_window(current_handle)
break
else:
context.behave_driver.switch_to_window(current_handle)
if len(context.behave_driver.secondary_handles) < 1:
raise AssertionError('No secondary handles found!')
raise AssertionError("The url '{}' was not found in any handle")
@then('I expect that element "([^"]*)?" is( not)* focused')
def check_element_focused(context, element, negative):
element_focused = context.behave_driver.element_focused(element)
if negative:
assert not element_focused
else:
assert element_focused
@then('I expect that a (alertbox|confirmbox|prompt)( not)* contains the text "([^"]*)?"')
def check_modal_text_contains(context, modal_type, negative, text):
alert_text = context.behave_driver.alert.text
if negative:
assert not text in alert_text
else:
assert text in alert_text
@then('I wait on element "([^"]*)?"(?: for (\d+)ms)*(?: to( not)* (be checked|be enabled|be selected|be visible|contain a text|contain a value|exist))*')
def wait_for_element_condition(context, element, milliseconds, negative, condition):
if milliseconds:
digits = ''.join(char for char in milliseconds if char.isdigit())
milliseconds = int(digits)
result = context.behave_driver.wait_for_element_condition(element, milliseconds, negative, condition)
if not negative:
negative = ''
assert result, 'was expecting element "{element}" to {negative} {condition}, but the result was {result}'.format(
element=element,
negative=negative,
condition=condition,
result=result)
@then("I expect the screen is ([\d]+) by ([\d]+) pixels")
def check_screen_size(context, x, y):
screen_x, screen_y = context.behave_driver.screen_size
use_step_matcher('parse')
|
Objects and Data Structures Assessment Test
Test your knowledge.
** Answer the following questions **
Write a brief description of all the following Object Types and Data Structures we've learned about:
.
Numbers: they represent intergers and floats in python. Integers being whole numbers and floats being decimals. They can use used to preform arithmetic.
Strings: any data that is wrapped around quotes. Usually words, but can be anything.
Lists: different or similar data types that are seperated by commas and boud together by square brackets. Contents inside of a list can be called upon through their index. Lists can be mutated and have many functions that can add or remove things in a list.
Tuples: are like lists but they contain only unique items, and unlike lists they are not mutatable
Dictionaries: are sets of data seperated by commas and held together by curly braces. Data inside comes in pairs known as key/value pairs. They key is a string while the value can be anything. Dictionaries are also unordered lists, we can reference the specific value we want by knowing the key.
Numbers
Write an equation that uses multiplication, division, an exponent, addition, and subtraction that is equal to 100.25.
Hint: This is just to test your memory of the basic arithmetic commands, work backwards from 100.25
100*4/4+.50-.25
100.25
Answer these 3 questions without typing code. Then type code to check your answer.
What is the value of the expression 4 * (6 + 5)
What is the value of the expression 4 * 6 + 5
What is the value of the expression 4 + 6 * 5
4 + 6 * 5
34
What is the type of the result of the expression 3 + 1.5 + 4?
==> float b/c it evaluates to 8.50
What would you use to find a number’s square root, as well as its square?
1/2
# Square root: (number**(1/2))
print(4**(1/2))
2.0
4**2
# Square: (number**2)
print(4**2)
16
Strings
Given the string 'hello' give an index command that returns 'e'. Enter your code in the cell below:
1
s = 'hello'
# Print out 'e' using indexing
print(s[1])
e
Reverse the string 'hello' using slicing:
-1
s ='hello'
# Reverse the string using slicing
print(s[::-1])
olleh
Given the string hello, give two methods of producing the letter 'o' using indexing.
4
s ='hello'
# Print out the 'o'
# Method 1:
print(s[4])
o
-1
# Method 2:
print(s[-1])
o
Lists
Build this list [0,0,0] two separate ways.
# Method 1:
my_list=[0,0,0]
print(my_list)
[0, 0, 0]
y_list2
# Method 2:
my_list2 = [0]*3
print(my_list2)
[0, 0, 0]
Reassign 'hello' in this nested list to say 'goodbye' instead:
list3
list3 = [1,2,[3,4,'hello']]
list3[2][2]='goodbye'
print(list3)
[1, 2, [3, 4, 'goodbye']]
Sort the list below:
ist4
list4 = [5,3,4,6,1]
list4.sort()
print(list4)
[1, 3, 4, 5, 6]
Dictionaries
Using keys and indexing, grab the 'hello' from the following dictionaries:
le_key
d = {'simple_key':'hello'}
# Grab 'hello'
print(d['simple_key'])
hello
k2
d = {'k1':{'k2':'hello'}}
# Grab 'hello'
print(d['k1']['k2'])
hello
0
# Getting a little tricker
d = {'k1':[{'nest_key':['this is deep',['hello']]}]}
#Grab hello
print(d['k1'][0]['nest_key'][1][0])
hello
# This will be hard and annoying!
d = {'k1':[1,2,{'k2':['this is tricky',{'tough':[1,2,['hello']]}]}]}
Can you sort a dictionary? Why or why not?
==> No becuase a dictionary is unordered. That is why we use keys to obtain the values from within it.
Tuples
What is the major difference between tuples and lists?
==> Tuples can not be mutated while lists can
How do you create a tuple?
==> my_tuple=('a','b') will create a new tuple.
Sets
t
What is unique about a set?<br><br> set's can only hold unique values. Meaning inside of a set, values will never repeat
Use a set to find the unique values of the list below:
list5
list5 = [1,2,2,33,4,4,11,22,3,3,2]
print(set(list5))
{1, 2, 33, 4, 3, 11, 22}
Booleans
For the following quiz questions, we will get a preview of comparison operators. In the table below, a=3 and b=4.
Operator Description Example
== If the values of two operands are equal, then the condition becomes true. (a == b) is not true.
!= If values of two operands are not equal, then condition becomes true. (a != b) is true.
> If the value of left operand is greater than the value of right operand, then condition becomes true. (a > b) is not true.
< If the value of left operand is less than the value of right operand, then condition becomes true. (a < b) is true.
>= If the value of left operand is greater than or equal to the value of right operand, then condition becomes true. (a >= b) is not true.
<= If the value of left operand is less than or equal to the value of right operand, then condition becomes true. (a <= b) is true.
What will be the resulting Boolean of the following pieces of code (answer fist then check by typing it in!)
F
# Answer before running cell
2 > 3
# False
False
F
# Answer before running cell
3 <= 2
# False
False
# Answer before running cell
3 == 2.0
# False
False
True
# Answer before running cell
3.0 == 3
# True
True
false
# Answer before running cell
4**0.5 != 2
# false
False
Final Question: What is the boolean output of the cell block below?
false
# two nested lists
l_one = [1,2,[3,4]]
l_two = [1,2,{'k1':4}]
# True or False?
l_one[2][0] >= l_two[2]['k1']
# 3 >= 4
# false
False
|
vocales="aeiouAEIOU"
cambio=" "
result=str.maketrans(vocales,cambio)
palabra=str(input("ingresa la palabra:"))
sin_espacios=palabra.translate(result)
print(sin_espacios.replace(' ',''))
|
from situation import Situation
from evaluate import pivotal, pivotality, criticality, prob_pivotal
from draw import draw, highlight_cause_effect
import matplotlib.pyplot as plt
'''
simulates the case
parameters:
int case number
dict loaded input file
return:
None
'''
def simulate(file, **attr):
data = {}
if unicode('ID') in file:
data['ID'] = file['ID']
data['structure'] = [(str(u), str(v)) for u,v in file['hierarchy']['structure']]
if 'priors' in data:
data['priors'] = [(str(u), float(v)) for u,v in file['hierarchy']['priors']]
if unicode('situation') in file:
situation = {}
situation['values'] = [(str(u), int(v)) for u,v in file['situation']['values']]
situation['thresholds'] = [(str(u), int(v)) for u,v in file['situation']['thresholds']]
if 'names' in attr:
hierarchy = Situation(hierarchy=data, situation=situation, names=attr['names'])
else:
hierarchy = Situation(hierarchy=data, situation=situation)
else:
if 'names' in attr:
hierarchy = Situation(data, names=attr['names'])
else:
hierarchy = Situation(data)
return hierarchy
|
import string
from itertools import chain
def split_text_by_sentences(text):
"""Split a text by words.
Another approach:
`[word.strip(string.punctuation) for word in input_text.split() if word not in string.punctuation]`
"""
sentences = []
for sent in text.split('.'):
new_sent = ''
for char in sent:
if char in string.punctuation:
new_sent += " "
continue
new_sent += char.lower()
new_sent = new_sent.split()
if new_sent:
sentences.append(new_sent)
return sentences
def generate_tags(sent, tags, word, mg_tag, ctag='', added_words=[]):
"""Generate tag for tree and subtrees."""
new_sent = sent[1:]
if word in tags:
new_word = ctag + ' ' + word if ctag else word
if tags[word] == {False: {}}:
added_words.append(new_word)
return added_words
if False in tags[word]:
added_words.append(new_word)
for i, w in enumerate(new_sent[0:mg_tag+1]):
generate_tags(new_sent[i:], tags[word], w, mg_tag, new_word, added_words)
return added_words
def get_tags_for_sentence(sent, tags, mg_tag):
"""Move trough tree and subtrees."""
while sent:
yield generate_tags(sent, tags, sent[0], mg_tag, ctag='', added_words=[])
sent = sent[1:]
def get_tags_from_text(input_text, tags, mg_tag):
"""Generate list of tags for text by existing tags."""
text = split_text_by_sentences(input_text)
result_tags = [t for sent in text for t in get_tags_for_sentence(sent, tags, mg_tag)]
return {'tags': list(set(chain.from_iterable(result_tags)))}
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [("organisations", "0053_auto_20180705_1041")]
operations = [
migrations.RunSQL(
"""
UPDATE organisations_organisationgeography
SET source='unknown';
"""
),
migrations.RunSQL(
"""
UPDATE organisations_divisiongeography
SET source='unknown';
"""
),
migrations.RunSQL(
"""
UPDATE public.organisations_divisiongeography AS dg
SET source='lgbce'
FROM organisations_organisationdivision od
WHERE od.id=dg.division_id
AND LEFT(od.official_identifier,4) != 'gss:'
AND LEFT(od.official_identifier,8) != 'unit_id:'
AND LEFT(od.official_identifier,9) != 'osni_oid:';
"""
),
]
|
name = input("请输入你的名字:")
print("你好" + name + "!")
|
from sys import argv # 调用kkk.txt好了
script, input_file = argv
def print_all(f):
print(f.read())
def rewind(f):
f.seek(1)#因为read()运行一次将指针放到了末尾,所以后文中调用read()后需要seek(0)将指针
#放到起始位置。而readline()不需要的原因是因为顺序输出,每次指针调用到当行的结尾不会影响
#下一行的读取。
#其实不是很懂seek的用法。#作为指针移动到某一处?所以不改变内容?
def print_a_line(line_count, f):
print(line_count, f.readline(),end="")#python3中需要用end语句减少换行,而不是逗号。
current_file = open(input_file)
print("1st we print the whole:")
print_all(current_file)
print("\nrewind aha!\n")
rewind(current_file)
print("print 3 lines!")
#因为调用过rewind函数,输出的第一行有缺失,
#但是如果再运行一次all函数输出的还是全部内容
current_line = 1
#也就是说seek()函数并没有改变文本内容,
#但是为什么会影响后面调用同一文本的输出?
print_a_line(current_line, current_file)
#因为这是在调用函数所以添加逗号也不能减少空行,需要到函数内部的print语句中进行修改。
current_line += 1#current_line = current_line + 1
print_a_line(current_line, current_file)
current_line = current_line + 1
print_a_line(current_line, current_file)
|
# -*- coding: utf-8 -*-
import pytest
from django.core.management import call_command
from django import forms
from chloroform.models import (
Configuration,
Metadata,
)
@pytest.mark.django_db
def test_configuration_get_default():
call_command('loaddata', 'chloroform/tests/test_models.yaml')
c = Configuration.objects.get_default()
assert c.pk == 1
@pytest.mark.django_db
def test_configuration_get_target():
call_command('loaddata', 'chloroform/tests/test_models.yaml')
c = Configuration.objects.get(pk=1)
assert c.get_targets() == ['chloroform@emencia.com', 'chloroform@emencia.io']
c = Configuration.objects.get(pk=2)
assert c.get_targets() == []
def test_metadata_get_field_class():
m = Metadata(type='bool')
assert m.get_field_class() == forms.BooleanField
m = Metadata(type='phone')
assert m.get_field_class() == forms.CharField
def test_metadata_get_field_kwargs():
m = Metadata(type='bool',
verbose_name='abc',
description='def')
assert m.get_field_kwargs() == {
'label': 'abc',
'help_text': 'def',
}
m = Metadata(type='phone')
assert m.get_field_class() == forms.CharField
assert m.get_field_kwargs() == {
'label': '',
'help_text': '',
}
|
def BubbleSort(ara):
for i in range(0,len(ara)-1):
for j in range(0,len(ara)-1-i):
if (ara[j+1]<ara[j]):
ara[j],ara[j+1] = ara[j+1],ara[j]#ShortCut Swap Technique
'''tem = ara[j]
ara[j] = ara[j+1]
ara[j+1] = tem'''
return ara
ara=[3,9,12,3,4,1]
blb = BubbleSort(ara)
print(blb)
|
value_in_meters = int(input('Enter value in meters:'))
value_in_kilometers = float(value_in_meters / 1000)
print(value_in_kilometers, 'km')
|
import torch
import torch.nn.functional as F
class AttenNet(torch.nn.Module):
"""
Args:
"""
def __init__(self, dim_embeddings,similarity='inner_product'):
super(AttenNet, self).__init__()
self.hidden_size = 256
self.lstm1 = torch.nn.LSTM(dim_embeddings, self.hidden_size, batch_first=True, bidirectional=True)
self.lstm2 = torch.nn.LSTM(self.hidden_size*8, self.hidden_size, batch_first=True, bidirectional=True)
self.Linear_Q = torch.nn.Linear(self.hidden_size*2, self.hidden_size*2)
self.Linear_A = torch.nn.Linear(dim_embeddings, self.hidden_size*2)
def forward(self, context, context_lens, options, option_lens):
'''context feed in lstm and pass a linear'''
context_lstm, (h_n, c_n) = self.lstm1(context)
context_max = context_lstm.max(1)[0] #max pooling of context
context_max_linear = self.Linear_Q(context_max)
context_max_linear = torch.unsqueeze(context_max_linear, 1)#add a dimension of context_lstm
logits = []
for i, option in enumerate(options.transpose(1, 0)):
option_lstm, (h_n, c_n) = self.lstm1(option)
atten = torch.bmm(option_lstm, context_lstm.transpose(1, 2))
atten_soft = F.softmax(atten, dim=2)
mix = torch.bmm(atten_soft, context_lstm)
cat1 = option_lstm
cat2 = mix
cat3 = option_lstm * mix
cat4 = option_lstm - mix
concate = torch.cat((cat1, cat2, cat3, cat4), dim=2)
option_lstm2, (h_n, c_n) = self.lstm2(concate)
option_lstm2 = option_lstm2.max(1)[0]
option_lstm2 = torch.unsqueeze(option_lstm2, 2)
logit = torch.bmm(context_max_linear, option_lstm2)
logit = torch.squeeze(logit)
logits.append(logit)
logits = torch.stack(logits, 1)
return logits
|
from django.db import models
# Create your models here.
class EnterName(models.Model):
first_name = models.CharField(max_length = 120)
last_name = models.CharField(max_length = 120)
def __unicode__(self):
return self.first_name
class Profile(models.Model):
food = models.CharField(max_length = 120)
first_name = models.CharField(max_length = 120)
last_name = models.CharField(max_length = 120)
age = models.BigIntegerField()
picture = models.ImageField(max_length = None)
|
#!/usr/bin/env python
# -*- coding:utf-8 -*-
print('\n', '-' * 20, ' 格式化字符串 ', '-' * 20)
# 1、不指定位置,按默认顺序
print('{} {}!'.format('hello', 'world'))
# 2、指定位置,按位置使用,可以重复使用
print('我叫{1}{0},姓{1},名{0}。'.format('令珂', '孟'))
# 3、指定名称参数
print('网站名:{name},地址:{url}'.format(name='runnoob', url='www.runnoob.com'))
# 4、使用字典设置参数,需要**传递字典
dic = {'name': 'runnoob', 'url': 'www.runnoob.com'}
print('网站名:{name},地址:{url}'.format(**dic))
# 5、使用列表设置参数
li = ['runnoob', 'www.runnoob.com']
print('网站名:{0[0]},地址:{0[1]}'.format(li))
li2 = ['菜鸟教程', 'admin']
print('网站名:{0[0]},地址:{0[1]}。用户名:{1[0]},权限:{1[1]}。'.format(li, li2))
|
# Copyright 2016 Michael Rice <michael@michaelrice.org>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import sqlite3
from pitmaster.exceptions import *
class DBObject(object):
def __init__(self, filename=None):
"""
:param filename:
:return:
"""
self.filename = filename
self.db = sqlite3.connect(self.filename)
self.db.row_factory = sqlite3.Row
def _renew_connection(self):
self.db = sqlite3.connect(self.filename)
def save(self, info=None):
"""
:param info:
:return:
"""
if info is None:
raise MissingPropertyException("info must not be None!")
conn = self.db.cursor()
conn.execute("Insert into cook_data_entry VALUES (?,?,?,?,?,?)", (
None,
info["date"],
info["temp_f"],
info["temp_c"],
info["probe_name"],
info["cook_name"]
))
self.db.commit()
def list_all_by_cook(self, cook_name=None):
"""
Return all entries in the database for a given cook.
:param cook_name:
:return:
"""
conn = self.db.cursor()
query = conn.execute("Select * from cook_data_entry where cook_name=?",
cook_name)
info = query.fetchall()
return info
def get(self, entry_id=None):
"""
Return a temp_event entry based on its id.
:param entry_id:
:param max_results:
:return:
"""
conn = self.db.cursor()
query = conn.execute("SELECT * from cook_data_entry where id=?", entry_id)
info = query.fetchone()
return info
|
from selenium import webdriver
link = "http://suninjuly.github.io/registration1.html"
try:
browser = webdriver.Chrome()
browser.get(link)
input1 = browser.find_element_by_css_selector('.first_block .first')
input1.send_keys("Ivan")
input2 = browser.find_element_by_css_selector('.first_block .second')
input2.send_keys("Petrov")
input3 = browser.find_element_by_css_selector('.first_block .third')
input3.send_keys("Smolensk@test.com")
input4 = browser.find_element_by_css_selector('.second_block .first')
input4.send_keys("+79999999999")
input5 = browser.find_element_by_css_selector('.second_block .second')
input5.send_keys("Russia")
button = browser.find_element_by_tag_name('button')
button.click()
finally:
# закрываем браузер после всех манипуляций
browser.quit()
|
a,b=[int(x) for x in raw_input().split(":")]
c,d=[int(x) for x in raw_input().split(":")]
def getA(e):
global a
if e>=c:
a=e-c
else:
a=24+e-c
if b>=d:
b = b-d
getA(a)
else:
b=60+b-d
a=a-1
getA(a)
if a<10 and b<10:
print "0"+str(a)+":"+"0"+str(b)
if a>10 and b<10:
print str(a)+":"+"0"+str(b)
if a>10 and b>10:
print str(a)+":"+str(b)
if a<10 and b>10:
print "0"+str(a)+":"+str(b)
|
# Generated by Django 2.2.20 on 2021-07-20 14:38
from django.db import migrations
from django.db.models import JSONField
class Migration(migrations.Migration):
dependencies = [
("elections", "0059_election_tags"),
]
operations = [
migrations.AlterField(
model_name="election",
name="tags",
field=JSONField(blank=True, default=dict),
),
]
|
n = int(input())
for i in range(n):
arr = list(map(int,input().strip().split()))[:n]
print(arr)
|
# This is a Testcase for Decred, see https://wiki.decred.org/Block_Header_Specifications
#
# ocminer - admin AT suprnova.cc 16/02/01
#
# Teststart refers to the original block header bytes from the example given on the page
# The Hash must return df03ea8cb4a6f201c3e726f2f922a9249b39129bb59fa593ceb172e0f7c14d6e if your module is hashing correctly
import blake_hash
import weakref
import binascii
import StringIO
from binascii import unhexlify
from binascii import hexlify
#teststart = '000000ba5cae4648b1a2b823f84cc3424e5d96d7234b39c6bb42800b2c7639be';
teststart = '010000006fe28c0ab6f1b372c1a6a246ae63f74f931e8365e15a089c68d61900000000003ba3edfd7a7b12b27ac72c3e67768f617fc81bc3888a51323a9fb8aa4b1e5e4a3ba3edfd7a7b12b27ac72c3e67768f617fc81bc3888a51323a9fb8aa4b1e5e4a00000000000000000000000000000000ffff001d0000000000000000000000000000000029ab5f49f3e00100000000000000000000000000000000000000000000000000000000000000000000000000'
testbin = unhexlify(teststart)
hash_bin = blake_hash.getPoWHash(testbin)
out = hexlify(hash_bin[::-1])
print(out) # DEBUG
|
# -*- coding: utf-8 -*-
"""
Created on Mon Jan 23 20:26:06 2017
@author: varar
"""
print("HelloWorld")
|
# -*- coding: utf-8 -*-
from datetime import datetime
from app.database.models import GitHubCommit
REPO = 'teradici/deploy'
commits = [
GitHubCommit('user1', 'user1@mock,com', 1, datetime.now(), '0001'),
GitHubCommit('user1', 'user1@mock,com', 1, datetime.now(), '0002'),
GitHubCommit('user2', 'user2@mock,com', 1, datetime.now(), '0003'),
]
content = [
{
"sha": "00003",
"commit": {
"author": {
"name": "user1",
"email": "user1@users.noreply.github.com",
"date": "2020-12-01T00:00:00Z"
}
},
"author": {
"login": "user1",
"id": 1
}
},
{
"sha": "00002",
"commit": {
"author": {
"name": "user1",
"email": "user1@users.noreply.github.com",
"date": "2020-11-01T00:00:00Z"
}
},
"author": {
"login": "user1",
"id": 1
}
},
{
"sha": "00001",
"commit": {
"author": {
"name": "user2",
"email": "user2@users.noreply.github.com",
"date": "2020-10-01T00:00:00Z"
}
},
"author": {
"login": "user2",
"id": 2
}
}
]
class MockResponse:
def __init__(self, status_code, content):
self.status_code = status_code
self.content = content
self.history = None
self.url = 'http://mock'
def json(self):
self.status_code = 403
return self.content
class MockRedis:
def __init__(self, conn):
self.conn = conn
self.cache = {}
def client(self):
if self.conn is not None:
return self
raise ValueError("connection not setup")
def set(self, key, val, ex):
if self.conn is None:
raise ValueError("connection not setup")
self.cache[key] = (val, datetime.now() + ex)
def get(self, key):
if self.conn is None:
raise ValueError("connection not setup")
if key not in self.cache:
return None
val, ex = self.cache[key]
if ex < datetime.now():
del self.cache[key]
return None
return val
def delete(self, key):
if self.conn is None:
raise ValueError("connection not setup")
if key in self.cache:
del self.cache[key]
def exists(self, key):
if self.conn is None:
raise ValueError("connection not setup")
return key in self.cache
def check_health(self):
if self.conn is None:
raise ValueError("connection not setup")
return True
def get_session(self):
return self
|
from .uppercase import uppercase_filter as uppercase
from .urlencode import urlencode_filter as urlencode
|
import os
import numpy as np
import torch
import torch.nn as nn
import torch.nn.functional as F
from torch import optim
from scipy.misc import imread, imsave,imresize
from torchvision import transforms
from argparse import ArgumentParser
from networks import *
from fp16Optimizer import Fp16Optimizer
# from apex.fp16_utils import FP16_Optimizer
def build_parser():
parser = ArgumentParser()
parser.add_argument('--content', type=str,
dest='content', help='content image path',
metavar='CONTENT', required=True)
parser.add_argument('--style', type=str,
dest='style', help='style image path',
metavar='STYLE', required=True)
parser.add_argument('--output', type=str,
dest='output', help='output image path',
metavar='OUTPUT', required=True)
parser.add_argument('--fp16_mode',type=bool, help='mixed precision training',default=True)
return parser
def transform():
# pre and post processing for images
prep = transforms.Compose([
transforms.ToTensor(),
transforms.Lambda(lambda x: x[torch.LongTensor([2, 1, 0])]), # turn to BGR
transforms.Normalize(mean=[0.40760392, 0.45795686, 0.48501961], # subtract imagenet mean
std=[1, 1, 1]),
transforms.Lambda(lambda x: x.mul_(255)),
])
postpa = transforms.Compose([transforms.Lambda(lambda x: x.mul_(1. / 255)),
transforms.Normalize(mean=[-0.40760392, -0.45795686, -0.48501961], # add imagenet mean
std=[1, 1, 1]),
transforms.Lambda(lambda x: x[torch.LongTensor([2, 1, 0])]), # turn to RGB
])
postpb = transforms.Compose([transforms.ToPILImage()])
return prep,postpa,postpb
def postp(tensor,postpa,postpb): # to clip results in the range [0,1]
t = postpa(tensor)
t[t > 1] = 1
t[t < 0] = 0
img = postpb(t)
return img
# def cut_image(image, cut_num, width_range, pad_size):
# images = list()
# for i in range(cut_num):
# sub_image = image[ :, width_range[i][0]:width_range[i][1] + pad_size * 2,:]
# # tmp = np.reshape(sub_image, (1,) + sub_image.shape)
# images.append(Image.fromarray(sub_image.astype('uint8')))
# return images
def pad_image(image, height, width):
unit_size = 64
pad_height = height + (unit_size - height % unit_size) + unit_size
pad_width = width + (unit_size - width % unit_size) + unit_size
print(pad_height, pad_width)
pad_t_size = (pad_height - height) // 2
pad_b_size = pad_height - height - pad_t_size
pad_l_size = (pad_width - width) // 2
pad_r_size = pad_width - width - pad_l_size
pad_t = image[height - pad_t_size:, :, :]
pad_b = image[:pad_b_size, :, :]
image = np.concatenate([pad_t, image, pad_b], 0)
pad_l = image[:, width - pad_l_size:, :]
pad_r = image[:, :pad_r_size, :]
image = np.concatenate([pad_l, image, pad_r], 1)
return image
def unpad_image(image, org_height, org_width):
height, width, channel = image.shape
pad_t_size = (height - org_height) // 2
pad_l_size = (width - org_width) // 2
image = image[pad_t_size:pad_t_size + org_height, :, :]
image = image[:, pad_l_size:pad_l_size + org_width, :]
return image
def scale_img(img,max_dim=2000):
h,w,_=img.shape
scale=max_dim/max(h,w)
scale=scale if scale<1 else 1
img=imresize(img,(int(h*scale),int(w*scale)))
return img
def stylize():
model_dir = os.path.abspath(os.path.dirname(os.getcwd()))+'/Models/'
parser = build_parser()
options = parser.parse_args()
fp16_mode = options.fp16_mode
show_iter = 50
level = 3 #3
max_iter = [300, 200, 200] ## low_dim ... high_dim
vgg = VGG(fp16_mode=fp16_mode)
grammatrix = GramMatrix()
grammseloss = GramMSELoss()
mseloss = nn.MSELoss()
vgg.load_state_dict(torch.load(model_dir + 'vgg_conv.pth'))
for param in vgg.parameters():
param.requires_grad = False
prep, postpa, postpb=transform()
content_image = imread(options.content, mode='RGBA')
content_image = scale_img(content_image)
height, width, _ = content_image.shape
c_image = content_image[:, :, :3]
alpha = content_image[:, :, 3]
alpha = alpha[..., np.newaxis]
# preprocess large content images(padding and division)
# c_image = pad_image(c_image, height, width)
content_image = prep(c_image)
content_image = content_image.unsqueeze(0)
opt_img = content_image.clone() # .clone().detach()
style_image = imread(options.style, mode='RGB')
style_image = scale_img(style_image)
style_image = prep(style_image)
style_image = style_image.unsqueeze(0)
if torch.cuda.is_available():
vgg = vgg.cuda()
grammatrix = grammatrix.cuda()
grammseloss = grammseloss.cuda()
mseloss = mseloss.cuda()
content_image = content_image.cuda()
style_image = style_image.cuda()
opt_img = opt_img.cuda()
if fp16_mode:
vgg.half()
loss_scale = 0.01
_, _, content_h, content_w = content_image.shape
content_down_h, content_down_w = content_h // 2 ** (level - 1), content_w // 2 ** (level - 1)
opt_img = F.interpolate(opt_img, size=(content_down_h, content_down_w))
temp_content_image = F.interpolate(content_image, size=(content_down_h, content_down_w))
_, _, style_h, style_w = style_image.shape
style_down_h, style_down_w = style_h // 2 ** (level - 1), style_w // 2 ** (level - 1)
temp_style_image = F.interpolate(style_image, size=(style_down_h, style_down_w))
# define layers, loss functions, weights and compute optimization targets
style_layers = ['r11', 'r21', 'r31', 'r41', 'r51']
content_layers = ['r42']
loss_layers = style_layers + content_layers
loss_fns = [grammseloss] * len(style_layers) + [mseloss] * len(content_layers)
if torch.cuda.is_available():
loss_fns = [loss_fn.cuda() for a, loss_fn in enumerate(loss_fns)]
# these are good weights settings:
style_weights = [1e3 / n ** 2 for n in [64, 128, 256, 512, 512]]
content_weights = [1e0]
weights = style_weights + content_weights
# style_targets = [grammatrix(A.float()).detach() for A in vgg(style_image , style_layers)]
for i in range(level - 1, -1, -1):
_max_iter = max_iter[level - 1 - i]
if i == 0:
opt_img = F.interpolate(opt_img, size=(content_h, content_w))
temp_content_image = content_image
temp_style_image = style_image
elif i != level - 1:
opt_img = F.interpolate(opt_img, scale_factor=2)
_, _, content_down_h, content_down_w = opt_img.shape
temp_content_image = F.interpolate(content_image, size=(content_down_h, content_down_w))
style_down_h, style_down_w = style_h // 2 ** i, style_w // 2 ** i
temp_style_image = F.interpolate(style_image, size=(style_down_h, style_down_w))
style_targets = [grammatrix(A).detach() for A in vgg(temp_style_image, style_layers)]
opt_img=opt_img.requires_grad_(True) ########################
content_targets = [A.detach() for A in vgg(temp_content_image, content_layers)]
targets = style_targets + content_targets
optimizer = optim.LBFGS([opt_img], history_size=10)
if fp16_mode:
optimizer = optim.LBFGS([opt_img], lr=0.00001, history_size=10) ###0.00001,0.000001
optimizer = Fp16Optimizer(optimizer, loss_scale=loss_scale,fp16=False)
#optimizer=FP16_Optimizer(optimizer,loss_scale)
n_iter = [0]
while n_iter[0] <= _max_iter:
def closure():
optimizer.zero_grad()
out = vgg(opt_img, loss_layers)
layer_losses = [weights[a] * loss_fns[a](A.cuda(), targets[a].cuda()) for a, A in enumerate(out)]
loss = sum(layer_losses)
if fp16_mode:
optimizer.backward(loss)
else:
loss.backward()
#print (n_iter[0]," opt grad ",opt_img.grad)
n_iter[0] += 1
if n_iter[0] % show_iter == (show_iter - 1):
print('Iteration: {}, loss: {}'.format(n_iter[0] + 1, loss.item()))
del out, layer_losses
torch.cuda.empty_cache()
return loss
optimizer.step(closure)
if fp16_mode and n_iter[0]>=10:
optimizer.optimizer.param_groups[0]['lr']=1
# if fp16_mode:
# opt_img = optimizer.fp32_param_groups[0][0].float().data.clone()
opt_img=opt_img.float().data.clone()
out_img = postp(opt_img[0].cpu().squeeze().float(),postpa,postpb)
output = np.array(out_img)
imsave("../style_output/" + "{}.jpg".format(i), output)
# save result
out_img = postp(opt_img[0].cpu().squeeze().float(),postpa,postpb)
output = np.array(out_img)
# output = unpad_image(out_img, height, width)
output = np.concatenate([output, alpha], 2)
imsave(options.output, output)
if __name__=='__main__':
stylize()
|
import matplotlib.pyplot as plt
import numpy as np
from sklearn.utils import shuffle
import math
def compute_y(x, W, bias):
# dreapta de decizie
# [x, y] * [W[0], W[1]] + b = 0
return (-x * W[0] - bias) / (W[1] + 1e-10)
def sigmoid(x):
return 1 / (1 + math.exp(-x))
def plot_decision(X_, W_1, W_2, b_1, b_2):
# sterge continutul ferestrei
plt.clf()
# ploteaza multimea de antrenare
plt.ylim((-0.5, 1.5))
plt.xlim((-0.5, 1.5))
xx = np.random.normal(0, 1, (100000))
yy = np.random.normal(0, 1, (100000))
X = np.array([xx, yy]).transpose()
X = np.concatenate((X, X_))
_, _, _, output = forward(X, W_1, b_1, W_2, b_2)
y = np.squeeze(np.round(output))
plt.plot(X[y == 0, 0], X[y == 0, 1], "b+")
plt.plot(X[y == 1, 0], X[y == 1, 1], "r+")
plt.show(block=False)
plt.pause(0.1)
def forward(X, W_1, b_1, W_2, b_2):
# X - datele de intrare, W_1, b_1, W_2 si b_2 sunt ponderile retelei
z_1 = X.dot(W_1) + b_1
a_1 = np.tanh(z_1)
z_2 = a_1.dot(W_2) + b_2
a_2 = sigmoid(z_2)
return z_1, a_1, z_2, a_2 # vom returna toate elementele calculate
def backward(a_1, a_2, z_1, W_2, X, y, num_samples=1):
dz_2 = a_2 - y # derivata functiei de pierdere (logistic loss) in functie de z
dw_2 = (a_1.T * dz_2) / num_samples
# der(L/w_2) = der(L/z_2) * der(dz_2/w_2) = dz_2 * der((a_1 * W_2 + b_2)/ W_2)
db_2 = sum(dz_2) / num_samples
# der(L/b_2) = der(L/z_2) * der(z_2/b_2) = dz_2 * der((a_1 * W_2 + b_2)/ b_2)
# primul strat
da_1 = dz_2 * W_2.T
# der(L/a_1) = der(L/z_2) * der(z_2/a_1) = dz_2 * der((a_1 * W_2 + b_2)/ a_1)
dz_1 = da_1 * tanh_derivative(z_1)
# der(L/z_1) = der(L/a_1) * der(a_1/z1) = da_1 .* der((tanh(z_1))/ z_1)
dw_1 = X.T * dz_1 / num_samples
# der(L/w_1) = der(L/z_1) * der(z_1/w_1) = dz_1 * der((X * W_1 + b_1)/ W_1)
db_1 = sum(dz_1) / num_samples
# der(L/b_1) = der(L/z_1) * der(z_1/b_1) = dz_1 * der((X * W_1 + b_1)/ b_1)
return dw_1, db_1, dw_2, db_2
X = np.array([[0, 0], [0, 1], [1, 0], [1, 1]])
Y = np.array([-1, 1, 1, -1])
num_epochs = 20
lr = 0.5
miu = 0.0
sigma = 1.0
num_hidden_neurons = 5
# np.random.normal(0, 1, (2, num_hidden_neurons))
W_1 = np.random.normal(miu, sigma, (2, num_hidden_neurons))
# generam aleator matricea ponderilor stratului ascuns (2 - dimensiunea datelor de intrare, num_hidden_neurons - numarul neuronilor de pe stratul ascuns), cu media miu si deviatia standard sigma.
b_1 = np.zeros(num_hidden_neurons) # initializam bias-ul cu 0
W_2 = np.random.normal(miu, sigma, (num_hidden_neurons, 1))
# generam aleator matricea ponderilor stratului de iesire (num_hidden_neurons - numarul neuronilor de pe stratul ascuns, 1 - un neuron pe stratul de iesire), cu media miu si deviatia standard sigma.
b_2 = np.zeros(1) # initializam bias-ul cu 0
for epoch in range(num_epochs):
X_shuffled, Y_shufled = shuffle(X, Y, random_state=0)
z_1, a_1, z_2, a_2 = forward(X[0], W_1, b_1, W_2, b_2)
loss = (-Y_shufled * np.log(a_2) - (1 - Y_shufled) * np.log(1 - a_2)).mean()
accuracy = (round(a_2) == Y_shufled).mean()
print(f"Loss: {loss}")
print(f"Accuracy: {accuracy}")
dw_1, db_1, dw_2, db_2 = backward(a_1, a_2, z_1, W_2, X_shuffled[0], Y_shufled[0])
W_1 -= lr * dw_1 # lr - rata de invatare (learning rate)
b_1 -= lr * db_1
W_2 -= lr * dw_2
b_2 -= lr * db_2
# print(*forward(X[0], W_1, b_1, W_2, b_2), sep="\n\n")
|
from tkinter import *
from tkinter import ttk
root = Tk()
root.title('Jwngdaocrypto')
root.resizable(True,True)
root.configure(background='blue')
root.frame_header = ttk.Frame()
ttk.Label(root.frame_header, text = 'CRYPTOGRAPHY', style = 'Header.TLabel').grid(row = 0, column = 1)
##################################
ttk.Label(root.frame_header, text='Shift:', style='Header.TLabel').grid(row=1, column=0)
ttk.Label(root.frame_header, text='Text:', style='Header.TLabel').grid(row=2, column=0)
cipher_shift_menu = StringVar()
Spinbox(root.frame_header, from_=1, to=25, textvariable=cipher_shift_menu).grid(row=1, column=1)
text_entry = ttk.Entry(root.frame_header, width=100)
text_entry.grid(row=2, column=1)
def caesar():
##################################
ttk.Label(root.frame_header, text='CAESAR CIPHER', style='Header.TLabel').grid(row=0, column=1)
key = 'abcdefghijklmnopqrstuvwxyz'
def encrypt(n, plaintext):
result = ''
for l in plaintext.lower():
try:
i = (key.index(l) + n) % 26
result += key[i]
except ValueError:
result += l
return result.lower()
def decrypt(n, ciphertext):
result = ''
for l in ciphertext:
try:
i = (key.index(l) - n) % 26
result += key[i]
except ValueError:
result += l
return result
shift = cipher_shift_menu.get()
shift = int(shift)
text = text_entry.get()
text=str(text)
encrypted = encrypt(shift, text)
decrypted = decrypt(shift,text)
def finalencrypted():
enc_dec_text.insert(0, encrypted)
def finaldecrypted():
enc_dec_text.insert(0, decrypted)
choose =ttk.Label(root.frame_header, text='select->', style='Header.TLabel').grid(row=3, column=0)
encrypt_button = ttk.Button(root.frame_header, text='Encrypt', command=lambda:finalencrypted()).grid(row=3, column=1)
decrypt_button = ttk.Button(root.frame_header, text='Decrypt', command=lambda: finaldecrypted()).grid(row=3, column=2)
ttk.Label(root.frame_header, text='Encrypted/Decrypted Text:', style='Header.TLabel').grid(row=4, column=0)
enc_dec_text = ttk.Entry(root.frame_header, width=110)
enc_dec_text.grid(row=4, column=1)
root.frame_header.pack()
root.mainloop()
#####################################################################
def playfair():
ttk.Label(root.frame_header, text='PLAYFAIR CIPHER', style='Header.TLabel').grid(row=0, column=1)
ttk.Label(root.frame_header, text='__________________NOT REQUIRED__________________', style='Header.TLabel').grid(row=1, column=1)
key = "abcdefghijklmnopqrstuvwxyz"
key = key.replace(" ", "")
key = key.upper()
def matrix(x, y, initial):
return [[initial for i in range(x)] for j in range(y)]
result = list()
for c in key:
if c not in result:
if c == 'J':
result.append('I')
else:
result.append(c)
flag = 0
for i in range(65, 91):
if chr(i) not in result:
if i == 73 and chr(74) not in result:
result.append("I")
flag = 1
elif flag == 0 and i == 73 or i == 74:
pass
else:
result.append(chr(i))
k = 0
my_matrix = matrix(5, 5, 0)
for i in range(0, 5):
for j in range(0, 5):
my_matrix[i][j] = result[k]
k += 1
def locindex(c):
loc = list()
if c == 'J':
c = 'I'
for i, j in enumerate(my_matrix):
for k, l in enumerate(j):
if c == l:
loc.append(i)
loc.append(k)
return loc
def encrypt():
msg =text_entry.get()
msg=str(msg)
msg = msg.upper()
msg = msg.replace(" ", "")
end=''
i = 0
for s in range(0, len(msg) + 1, 2):
if s < len(msg) - 1:
if msg[s] == msg[s + 1]:
msg = msg[:s + 1] + 'X' + msg[s + 1:]
if len(msg) % 2 != 0:
msg = msg[:] + 'X'
encrypted=ans=''
enc_dec_text.insert(0,encrypted)
while i < len(msg):
loc = list()
loc = locindex(msg[i])
loc1 = list()
loc1 = locindex(msg[i + 1])
if loc[1] == loc1[1]:
enc_dec_text.insert(0,"{}{}".format(my_matrix[(loc[0] + 1) % 5][loc[1]], my_matrix[(loc1[0] + 1) % 5][loc1[1]]))
elif loc[0] == loc1[0]:
enc_dec_text.insert(0,"{}{}".format(my_matrix[loc[0]][(loc[1] + 1) % 5], my_matrix[loc1[0]][(loc1[1] + 1) % 5]))
else:
enc_dec_text.insert(0,"{}{}".format(my_matrix[loc[0]][loc1[1]], my_matrix[loc1[0]][loc[1]]))
i = i + 2
def decrypt():
msg = text_entry.get()
msg = str(msg)
msg = msg.upper()
msg = msg.replace(" ", "")
ans = ''
i = 0
for s in range(0, len(msg) + 1, 2):
if s < len(msg) - 1:
if msg[s] == msg[s + 1]:
msg = msg[:s + 1] + 'X' + msg[s + 1:]
if len(msg) % 2 != 0:
msg = msg[:] + 'X'
decrypted = ans = ''
enc_dec_text.insert(0, decrypted)
while i < len(msg):
loc = list()
loc = locindex(msg[i])
loc1 = list()
loc1 = locindex(msg[i + 1])
if loc[1] == loc1[1]:
enc_dec_text.insert(0,"{}{}".format(my_matrix[(loc[0] - 1) % 5][loc[1]], my_matrix[(loc1[0] - 1) % 5][loc1[1]]))
elif loc[0] == loc1[0]:
enc_dec_text.insert(0,"{}{}".format(my_matrix[loc[0]][(loc[1] - 1) % 5], my_matrix[loc1[0]][(loc1[1] - 1) % 5]))
else:
enc_dec_text.insert(0,"{}{}".format(my_matrix[loc[0]][loc1[1]], my_matrix[loc1[0]][loc[1]]))
i = i + 2
choose = ttk.Label(root.frame_header, text='select->', style='Header.TLabel').grid(row=3, column=0)
encrypt_button = ttk.Button(root.frame_header, text='Encrypt', command=lambda:encrypt()).grid(row=3,column=1)
decrypt_button = ttk.Button(root.frame_header, text='Decrypt', command=lambda:decrypt()).grid(row=3,column=2)
ttk.Label(root.frame_header, text='Encrypted/Decrypted Text:', style='Header.TLabel').grid(row=4, column=0)
enc_dec_text = ttk.Entry(root.frame_header, width=110)
enc_dec_text.grid(row=4, column=1)
root.frame_header.pack()
root.mainloop()
#################################################################################
def des():
import pyDes
ttk.Label(root.frame_header, text='DATA ENCRYPTION STANDARD', style='Header.TLabel').grid(row=0, column=1)
ttk.Label(root.frame_header, text='__________________NOT REQUIRED__________________', style='Header.TLabel').grid(row=1, column=1)
# For Python3, you'll need to use bytes, i.e.:
msg = text_entry.get()
data = str(msg)
k = pyDes.des(b"DESCRYPT", pyDes.CBC, b"\0\0\0\0\0\0\0\0", pad=None, padmode=pyDes.PAD_PKCS5)
d = k.encrypt(data)
def finalencrypted():
enc_dec_text.insert(0,d)
def finaldecrypted():
enc_dec_text.insert(0,k.decrypt(d))
choose = ttk.Label(root.frame_header, text='Select->:', style='Header.TLabel').grid(row=3, column=0)
encrypt_button = ttk.Button(root.frame_header, text='Encrypt', command=lambda:finalencrypted()).grid(row=3, column=1)
decrypt_button = ttk.Button(root.frame_header, text='Decrypt', command=lambda:finaldecrypted()).grid(row=3, column=2)
ttk.Label(root.frame_header, text='Encrypted/Decrypted Text:', style='Header.TLabel').grid(row=4, column=0)
enc_dec_text = ttk.Entry(root.frame_header, width=110)
enc_dec_text.grid(row=4, column=1)
root.frame_header.pack()
root.mainloop()
caesar_button = ttk.Button(root.frame_header,text='Caesar',command = lambda: caesar()).grid(row=3,column=0)
playfair_button = ttk.Button(root.frame_header,text='Playfair',command = lambda: playfair()).grid(row=3,column=1)
DES_button = ttk.Button(root.frame_header,text='DES',command = lambda: des()).grid(row=3,column=2)
root.frame_header.pack()
root.mainloop()
|
#!/usr/bin/python3
from app import app
import os
app.debug = True
port = int(os.environ.get("PORT", 5000))
app.run(host='0.0.0.0', port=port)
|
import pytest
from unittest import mock
import builtins
def any_or_all():
n = input('')
num=input('')
numbers = list(str(num).split())
return all(int(x) >= 0 for x in numbers) and any(int(x) == int(str(x)[::-1]) for x in numbers)
def test_any_or_all():
with mock.patch.object(builtins, 'input', lambda _: 5, "12 9 61 5 14"):
assert any_or_all() == True
def test_any_or_all_2():
with mock.patch.object(builtins, 'input', lambda _: 5, "1 9 5 5 4"):
assert any_or_all() == True
def test_any_or_all_3():
with mock.patch.object(builtins, 'input', lambda _: 5, "4 6 5 7 8"):
assert any_or_all() == True
|
from rest_framework import serializers
from .models import Movie, ShowingRoom, Showing, Order
from django.db.models import Q, F
from .models import Status
class MovieSerializer(serializers.ModelSerializer):
class Meta:
model = Movie
fields = ['id', 'name', 'description']
class ShowingRoomSerializer(serializers.ModelSerializer):
class Meta:
model = ShowingRoom
fields = ['id', 'showing_room_name', 'capacity']
class ShowingSerializer(serializers.ModelSerializer):
movie_name = serializers.ReadOnlyField(source='movie.name')
remaining_seats = serializers.SerializerMethodField()
class Meta:
model = Showing
fields = ['id', 'price_per_ticket', 'movie', 'movie_name', 'showing_room', 'remaining_seats', 'start', 'end']
def get_remaining_seats(self, obj):
# this s.method returns remaining seats
return obj.remaining_seats
def validate(self, data):
# need this to catch overlapping: we do not want to schedule
# different showings at the same place and time
if Showing.objects.filter(showing_room=data['showing_room'])\
.filter(
Q(start__gte=data['start'], start__lt=data['end']) |
Q(end__gt=data['start'], end__lte=data['end']))\
.exists():
raise serializers.ValidationError(f"This date and time is already booked at {data['showing_room']}!")
if data['start'] == data['end']:
raise serializers.ValidationError(f"Start and end should not be the same!")
if data['end'] < data['start']:
raise serializers.ValidationError(f"Wrong Date Format!")
return data
class OrderSerializer(serializers.ModelSerializer):
class Meta:
model = Order
fields = ['email', 'showing', 'quantity']
def create(self, validated_data):
# An alternative way to change remaining_seats without signals
showing = Showing.objects.get(id=validated_data['showing'].id)
showing.remaining_seats = showing.remaining_seats - validated_data['quantity']
if showing.remaining_seats == 0:
showing.status = Status.SOLD_OUT.value
showing.save()
return Order.objects.create(**validated_data)
def validate(self, data):
# Check if we still have that amount of tickets for the showing.
showing = Showing.objects.get(id=data['showing'].id)
if data['quantity'] > showing.remaining_seats:
raise serializers.ValidationError(f"Njet, izvinite! No, we do not have so many tickets, sorry!")
return data
|
from google.cloud import bigquery
import json
class BigQueryClient:
""" Extracts data from Google BigQuery
This class will not function without the json key.
"""
query = 'SELECT * FROM `fh-bigquery.reddit_comments.2015_01` '
def __init__(self, key_path = None):
if key_path is None:
key_path = 'key.json'
self.bigquery_client = bigquery.Client.from_service_account_json(key_path)
def run_query(self, query, limit = None):
"""
Performs a BigQuery query.
"""
if limit is not None:
query += 'LIMIT ' + str(limit)
query_job = self.bigquery_client.query(query) # API request
return query_job.result() # Waits for query to finish
def produce_json_data(self, producer, limit = None):
"""
Runs a query and converts the result into a json object.
"""
raw_data = self.run_query(BigQueryClient.query, limit)
for row in raw_data:
json_data = self.__data_row_to_json(row)
producer.send_data(json_data)
def __data_row_to_json(self, row):
"""
Converts data inside a row iterator to a json object.
"""
raw_data = {}
raw_data["body"] = row.body
raw_data["score_hidden"] = row.score_hidden
raw_data["archived"] = row.archived
raw_data["name"] = row.name
raw_data["author"] = row.author
raw_data["author_flair_text"] = row.author_flair_text
raw_data["downs"] = row.downs
raw_data["created_utc"] = row.created_utc
raw_data["subreddit_id"] = row.subreddit_id
raw_data["link_id"] = row.link_id
raw_data["parent_id"] = row.parent_id
raw_data["score"] = row.score
raw_data["retrieved_on"] = row.retrieved_on
raw_data["controversiality"] = row.controversiality
raw_data["gilded"] = row.gilded
raw_data["id"] = row.id
raw_data["subreddit"] = row.subreddit
raw_data["ups"] = row.ups
raw_data["distinguished"] = row.distinguished
raw_data["author_flair_css_class"] = row.author_flair_css_class
return json.dumps(raw_data)
|
'''
Fizz Buzz
'''
three = False
five = False
x =0
while x <16:
x += 1
three = False
five = False
if x%3 == 0:
three = True
if x%5 == 0:
five = True
if three and five:
print "Fizz Buzz"
elif three:
print "Fizz"
elif five:
print "Buzz"
else:
print x
|
from django.contrib import admin
import shop.models
admin.site.register(shop.models.Category)
admin.site.register(shop.models.Item)
|
from django.conf.urls import url
from .views import *
urlpatterns = [
url(r'upload',UploadPhoto.as_view())
]
|
# -*- coding: utf-8 -*-
from setuptools import setup, find_packages
import os
version = open(os.path.join("sc", "newsletter", "creator", "version.txt")).read().strip()
setup(name='sc.newsletter.creator',
version=version,
description="Creates HTML for sending newsletter",
long_description=open(os.path.join("sc", "newsletter", "creator", "README.txt")).read() + "\n" +
open(os.path.join("docs", "HISTORY.txt")).read(),
# Get more strings from http://www.python.org/pypi?%3Aaction=list_classifiers
classifiers=[
"Framework :: Plone :: 4.1",
"Programming Language :: Python",
"Topic :: Software Development :: Libraries :: Python Modules",
],
keywords='newsletter simples creator',
author='Simples Consultoria',
author_email='products@simplesconsultoria.com.br',
url='http://www.simplesconsultoria.com.br/',
license='GPL',
packages=find_packages(exclude=['ez_setup']),
namespace_packages=['sc', 'sc.newsletter'],
include_package_data=True,
zip_safe=False,
install_requires=[
'setuptools',
'Products.CMFPlone',
'five.grok>=1.3.0',
'plone.resource>=1.0b5',
],
entry_points="""
[z3c.autoinclude.plugin]
target = plone
""",
)
|
num_tests = int(raw_input())
for test_index in range(num_tests):
array_size = int(raw_input())
array = map(int, raw_input().split())
max_flips = int(raw_input())
stripes = []
gaps = []
total_zeroes = 0
max_stripe = 0
firstOneFound = False
curr_gap = 0
curr_stripe = 0
old_bit = 0
for bit in array:
if (bit == 0):
total_zeroes += 1
if (old_bit == 0):
curr_gap += 1
else:
stripes.append(curr_stripe)
if (curr_stripe > max_stripe):
max_stripe = curr_stripe
curr_gap = 1
else:
if (old_bit == 0):
if (firstOneFound):
gaps.append(curr_gap)
else:
firstOneFound = True
curr_stripe = 1
else:
curr_stripe += 1
old_bit = bit
if old_bit == 1:
stripes.append(curr_stripe)
if (curr_stripe > max_stripe):
max_stripe = curr_stripe
# max_stripe_available_flips = max_flips
for start_gap_index in range(len(gaps)):
gap_index = start_gap_index
available_flips = max_flips
curr_stripe = stripes[start_gap_index]
while (gap_index < len(gaps) and gaps[gap_index] <= available_flips):
curr_stripe += stripes[gap_index + 1]
available_flips -= gaps[gap_index]
gap_index += 1
if (curr_stripe > max_stripe):
max_stripe = curr_stripe
# max_stripe_available_flips = available_flips
result = max_stripe + min(max_flips, total_zeroes)
# print("Case #{0}: {1}".format(test_index + 1, result))
print(result)
|
c.tabs.position = "top"
c.completion.shrink = True
c.url.searchengines = {
"DEFAULT": "https://duckduckgo.com/?q={}",
"aw": "https://wiki.archlinux.org/?search={}",
"red": "https://reddit.com/r/{}"
}
|
"""This program checks for palindrom"""
import string
original_str = input("Enter string to check: ").lower()
#change original case to lower case
# modified_str = original_str.lower()
bad_chars = string.whitespace + string.punctuation
modified_str = ''
#check and store good characters
for char in original_str:
if char in bad_chars:
continue;
else:
modified_str = modified_str + char
#check if palindrome
if modified_str == modified_str[::-1]:
print(\
'The original string is: {:<30s}\n\
the modifies string is: {:<30s}\n\
the reversal is: {:37s}\n\
String is a palindrome'.format(original_str, modified_str, modified_str[::-1]))
else:
print(\
'The original string is: {:<30s}\n\
the modifies string is: {:<30s}\n\
the reversal is: {:37s}\n\
String is not a palindrome'.format(original_str, modified_str, modified_str[::-1]))
|
import pandas as pd
import numpy as np
import sys
def parse(infile, outfile):
df = pd.read_csv(infile, sep="\t")
df = df.dropna(axis='columns', how='all')
if len(df.columns) != 22: return
names = [
"time", # sec
"q1", "q2", "q3", # rad
"dq1", "dq2", "dq3", # rad/sec
"qd1", "qd2", "qd3", # rad
"tau1", "tau2", "tau3", # newton-meter
"x1", "x2", "x3",
"dx1", "dx2", "dx3",
"xd1", "xd2", "xd3"
]
df.columns = names
# normalize timestamps
start_time = df["time"][0]
df["time"] -= start_time
# convert rad- to deg-values
for c in names[1:10]:
df[c] = np.rad2deg(df[c])
df.to_csv(outfile, decimal = ",", sep=";", line_terminator=";\n", header=False, index=False)
if __name__ == "__main__":
if len(sys.argv) != 2:
parse(sys.argv[1], sys.argv[2])
else:
print("usage: python %s <infile> <outfile>" % sys.argv[0])
|
# Implementation of classic arcade game Pong
# works but still needs some changes, run in codeskulptor
import simplegui
import random
# initialize globals - pos and vel encode vertical info for paddles
WIDTH = 600
HEIGHT = 400
BALL_RADIUS = 20
PAD_WIDTH = 8
PAD_HEIGHT = 80
HALF_PAD_WIDTH = PAD_WIDTH / 2
HALF_PAD_HEIGHT = PAD_HEIGHT / 2
LEFT = False
RIGHT = True
ball_pos = [WIDTH / 2, HEIGHT / 2]
ball_vel = [1, 1]
paddle1_pos=HEIGHT/2
paddle2_pos=HEIGHT/2
paddle1_vel=0
paddle2_vel=0
score_left=0
score_right=0
# initialize ball_pos and ball_vel for new bal in middle of table
# if direction is RIGHT, the ball's velocity is upper right, else upper left
def spawn_ball(direction):
global ball_pos, ball_vel,HEIGHT,BALL_RADIUS # these are vectors stored as lists
ball_pos=[WIDTH/2,HEIGHT/2]
if direction==RIGHT:
ball_vel[0]=2
ball_vel[1]=-2
elif direction==LEFT:
ball_vel[0]=-2
ball_vel[1]=-2
# define event handlers
def new_game():
global paddle1_pos, paddle2_pos, paddle1_vel, paddle2_vel # these are numbers
global score_left, score_right # these are ints
score_left=0
score_right=0
paddle1_pos=HEIGHT/2
paddle2_pos=HEIGHT/2
paddle1_vel=0
paddle2_vel=0
spawn_ball(RIGHT)
def draw(canvas):
global score_left, score_right, paddle1_pos, paddle2_pos, ball_pos, ball_ve, HEIGHT,BALL_RADIUS, PAD_WIDTH, paddle1_vel, paddle2_vel
# update paddle's vertical position, keep paddle on the screen
if paddle1_pos>PAD_HEIGHT/2 and paddle1_pos<HEIGHT-(PAD_HEIGHT/2):
paddle1_pos+=paddle1_vel
elif paddle1_pos<=PAD_HEIGHT/2 :
paddle1_pos+=1
elif paddle1_pos>=HEIGHT-(PAD_HEIGHT/2):
paddle1_pos-=1
if paddle2_pos>PAD_HEIGHT/2 and paddle2_pos<HEIGHT-(PAD_HEIGHT/2) :
paddle2_pos+=paddle2_vel
elif paddle2_pos<=PAD_HEIGHT/2:
paddle2_pos+=1
elif paddle2_pos>=HEIGHT-(PAD_HEIGHT/2):
paddle2_pos-=1
# determine whether paddle and ball collide
if ball_pos[0]==BALL_RADIUS+PAD_WIDTH and ball_pos[1]<=paddle1_pos+PAD_HEIGHT/2 and ball_pos[1]>=paddle1_pos-PAD_HEIGHT/2 :
ball_vel[0]=-ball_vel[0]
elif ball_pos[0]==WIDTH-BALL_RADIUS-PAD_WIDTH and ball_pos[1]<=paddle2_pos+PAD_HEIGHT/2 and ball_pos[1]>=paddle2_pos-PAD_HEIGHT/2:
ball_vel[0]=-ball_vel[0]
elif ball_pos[0]==BALL_RADIUS:
score_right+=1
spawn_ball(RIGHT)
elif ball_pos[0]==WIDTH-BALL_RADIUS:
score_left+=1
spawn_ball(LEFT)
# update ball
if ball_pos[1]==BALL_RADIUS:
ball_vel[1]=-ball_vel[1]
elif ball_pos[1]==HEIGHT-BALL_RADIUS:
ball_vel[1]=-ball_vel[1]
ball_pos[0] += ball_vel[0]
ball_pos[1] += ball_vel[1]
# draw mid line and gutters
canvas.draw_line([WIDTH / 2, 0],[WIDTH / 2, HEIGHT], 1, "White")
canvas.draw_line([PAD_WIDTH, 0],[PAD_WIDTH, HEIGHT], 1, "White")
canvas.draw_line([WIDTH - PAD_WIDTH, 0],[WIDTH - PAD_WIDTH, HEIGHT], 1, "White")
# draw ball
canvas.draw_circle(ball_pos, BALL_RADIUS, 2, "RED", "White")
# draw paddles
canvas.draw_polygon([[0,paddle1_pos-PAD_HEIGHT/2 ], [PAD_WIDTH,paddle1_pos-PAD_HEIGHT/2 ], [PAD_WIDTH,paddle1_pos+PAD_HEIGHT/2 ],[0,paddle1_pos+PAD_HEIGHT/2 ]], 1,'Blue', 'WHITE')
canvas.draw_polygon([[WIDTH-PAD_WIDTH,paddle2_pos-PAD_HEIGHT/2 ], [WIDTH,paddle2_pos-PAD_HEIGHT/2 ], [WIDTH,paddle2_pos+PAD_HEIGHT/2 ], [WIDTH-PAD_WIDTH,paddle2_pos+PAD_HEIGHT/2 ]], 1,'Blue', 'WHITE')
# draw scores
canvas.draw_text(str(score_left), ((WIDTH / 2)-20, 30), 30, 'Green')
canvas.draw_text(str(score_right), ((WIDTH / 2)+10, 30), 30, 'Green')
def keydown(key):
global paddle1_vel, paddle2_vel, paddle1_pos, paddle2_pos
if key == simplegui.KEY_MAP["w"]:
paddle1_vel-=3
elif key == simplegui.KEY_MAP["s"]:
paddle1_vel+=3
elif key == simplegui.KEY_MAP["up"]:
paddle2_vel-=3
elif key == simplegui.KEY_MAP["down"]:
paddle2_vel+=3
def keyup(key):
global paddle1_vel, paddle2_vel, paddle1_pos, paddle2_pos
if key == simplegui.KEY_MAP["w"]:
paddle1_vel=0
elif key == simplegui.KEY_MAP["s"]:
paddle1_vel=0
elif key == simplegui.KEY_MAP["up"]:
paddle2_vel=0
elif key == simplegui.KEY_MAP["down"]:
paddle2_vel=0
# create frame
frame = simplegui.create_frame("Pong", WIDTH, HEIGHT)
frame.set_keydown_handler(keydown)
frame.set_keyup_handler(keyup)
frame.set_draw_handler(draw)
frame.add_button("Restart", new_game,100)
# start frame
new_game()
frame.start()
|
#!/usr/bin/python -tt
#
# Copyright (c) 2009, 2010, 2011 Intel, Inc.
#
# This program is free software; you can redistribute it and/or modify it
# under the terms of the GNU General Public License as published by the Free
# Software Foundation; version 2 of the License
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
# or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
# for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc., 59
# Temple Place - Suite 330, Boston, MA 02111-1307, USA.
import os, sys
import string
import shutil
import re
from mic import bootstrap
from mic import msger
from mic.conf import configmgr
from mic.utils import errors
import mic.utils.misc as misc
from mic.utils.proxy import get_proxy_for
BOOTSTRAP_URL="http://download.tizen.org/tools/micbootstrap"
def runmic_in_runtime(runmode, opts, ksfile, argv=None):
dist = misc.get_distro()[0]
if not runmode or not dist or "MeeGo" == dist:
return
if not argv:
argv = sys.argv
else:
argv = argv[:]
if runmode == 'bootstrap':
msger.info("Use bootstrap runtime environment")
name = "micbootstrap"
try:
repostrs = configmgr.bootstraps[name]
except:
repostrs = "name:%s,baseurl:%s," (name, BOOTSTRAP_URL)
proxy = get_proxy_for(BOOTSTRAP_URL)
if proxy:
repostrs += "proxy:%s" % proxy
repolist = []
if not name:
# use ks repo to create bootstrap
# so far it can't be effective for mic not in repo
#name = os.path.basename(ksfile)
#repostrs = misc.get_repostrs_from_ks(opts['ks'])
#for item in repostrs:
# repolist.append(convert_repostr(item))
msger.info("cannot find valid bootstrap, please check the config")
msger.info("Back to native running")
return
else:
for reponame, repostr in repostrs.items():
repolist.append(convert_repostr(repostr))
runmic_in_bootstrap(name, argv, opts, ksfile, repolist)
else:
raise errors.RuntimeError('Invalid runmode: %s ' % runmode)
sys.exit(0)
def compare_rpmversion(ver1, ver2):
return ver1.split('.')[0] == ver2.split('.')[0] and \
ver1.split('.')[1] == ver2.split('.')[1]
def convert_repostr(repostr):
repo = {}
for item in repostr.split(','):
loc = item.find(':')
opt = item[0:loc]
if opt in ('name', 'baseurl', 'mirrolist', 'proxy', \
'proxy_username', 'proxy_password', 'debuginfo', \
'source', 'gpgkey', 'disable'):
if len(item) > loc:
repo[opt] = item[loc+1:]
else:
repo[opt] = None
return repo
def select_bootstrap(repomd, cachedir, bootstrapdir):
cfgmgr = configmgr
lvl = msger.get_loglevel()
msger.set_loglevel('quiet')
repo_rpmver = misc.get_rpmver_in_repo(repomd)
if not repo_rpmver:
msger.set_loglevel(lvl)
return (None, None)
# Check avaliable bootstrap
bootstrap_env = bootstrap.Bootstrap(homedir = bootstrapdir)
for bs in bootstrap_env.list():
if compare_rpmversion(repo_rpmver, bs['rpm']):
return (bs['name'], {})
for bsname, bsrepo in cfgmgr.bootstraps.items():
repolist = []
for repo in bsrepo.keys():
repolist.append(bsrepo[repo])
rpmver = None
try:
repomd = misc.get_metadata_from_repos(repolist, cachedir)
rpmver = misc.get_rpmver_in_repo(repomd)
except errors.CreatorError, e:
msger.set_loglevel(lvl)
raise
if not rpmver:
continue
if compare_rpmversion(repo_rpmver, rpmver):
msger.set_loglevel(lvl)
return (bsname, bsrepo)
msger.set_loglevel(lvl)
return (None, None)
def runmic_in_bootstrap(name, argv, opts, ksfile, repolist):
bootstrap_env = bootstrap.Bootstrap(homedir = opts['bootstrapdir'])
bootstrap_lst = bootstrap_env.bootstraps
setattr(bootstrap_env, 'rootdir', name)
if not bootstrap_lst or not name in bootstrap_lst:
msger.info("Creating bootstrap %s under %s" % \
(name, bootstrap_env.homedir))
bootstrap_env.create(name, repolist)
msger.info("Use bootstrap: %s" % bootstrap_env.rootdir)
# copy mic
msger.info("Sync native mic to bootstrap")
copy_mic(bootstrap_env.rootdir)
# bind mounts , opts['cachedir'], opts['tmpdir']
cwd = os.getcwd()
lst = [cwd, opts['outdir']]
if ksfile:
ksfp = os.path.abspath(os.path.expanduser(ksfile))
lst.append(os.path.dirname(ksfp))
if opts['logfile']:
logfile = os.path.abspath(os.path.expanduser(opts['logfile']))
lst.append(os.path.dirname(logfile))
if opts['local_pkgs_path']:
lppdir = os.path.abspath(os.path.expanduser(opts['local_pkgs_path']))
lst.append(lppdir)
# TBD local repo
# make unique and remain the original order
lst = sorted(set(lst), key=lst.index)
bindmounts = ';'.join(map(lambda p: os.path.abspath(os.path.expanduser(p)),
lst))
msger.info("Start mic command in bootstrap")
bootstrap_env.run(name, argv, cwd, bindmounts)
def get_mic_modpath():
try:
import mic
except ImportError:
raise errors.BootstrapError('Can\'t find mic module in host OS.')
else:
path = os.path.abspath(mic.__file__)
return os.path.dirname(path)
def get_mic_binpath():
# FIXME: please use mic.find_binary_path()
path = os.environ['PATH']
paths = string.split(path, os.pathsep)
for pth in paths:
fn = os.path.join(pth, 'mic')
if os.path.isfile(fn):
return fn
msger.warning("Can't find mic command")
# FIXME: how to handle unfound case?
def get_mic_libpath():
# so far mic lib path is hard coded
# TBD
return "/usr/lib/mic"
# the hard code path is prepared for bootstrap
def copy_mic(bootstrap_pth, bin_pth = '/usr/bin', lib_pth='/usr/lib', \
pylib_pth = '/usr/lib/python2.7/site-packages'):
# copy python lib files
mic_pylib = get_mic_modpath()
bs_mic_pylib = bootstrap_pth + os.path.join(pylib_pth, 'mic')
if os.path.commonprefix([mic_pylib, bs_mic_pylib]) == mic_pylib:
raise errors.BootstrapError('Invalid Bootstrap: %s' % bootstrap_pth)
shutil.rmtree(bs_mic_pylib, ignore_errors = True)
shutil.copytree(mic_pylib, bs_mic_pylib)
clean_files(".*\.py[co]$", bs_mic_pylib)
# copy lib files
mic_libpth = get_mic_libpath()
bs_mic_libpth = bootstrap_pth + os.path.join(lib_pth, 'mic')
if os.path.commonprefix([mic_libpth, bs_mic_libpth]) == mic_libpth:
raise errors.BootstrapError('Invalid Bootstrap: %s' % bootstrap_pth)
shutil.rmtree(bs_mic_libpth, ignore_errors = True)
shutil.copytree(mic_libpth, bs_mic_libpth)
os.system('cp -af %s %s' % (mic_libpth, os.path.dirname(bs_mic_libpth)))
# copy bin files
mic_binpth = get_mic_binpath()
bs_mic_binpth = bootstrap_pth + os.path.join(bin_pth, 'mic')
shutil.rmtree(bs_mic_binpth, ignore_errors = True)
shutil.copy2(mic_binpth, bs_mic_binpth)
# copy mic.conf
mic_cfgpth = '/etc/mic/mic.conf'
bs_mic_cfgpth = bootstrap_pth + mic_cfgpth
if not os.path.exists(os.path.dirname(bs_mic_cfgpth)):
os.makedirs(os.path.dirname(bs_mic_cfgpth))
shutil.copy2(mic_cfgpth, bs_mic_cfgpth)
# remove yum backend
try:
yumpth = "/usr/lib/mic/plugins/backend/yumpkgmgr.py"
os.unlink(bootstrap_pth + yumpth)
except:
pass
def clean_files(pattern, dir):
if not os.path.exists(dir):
return
for f in os.listdir(dir):
entry = os.path.join(dir, f)
if os.path.isdir(entry):
clean_files(pattern, entry)
elif re.match(pattern, entry):
os.unlink(entry)
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
import django.utils.timezone
class Migration(migrations.Migration):
dependencies = [
('neighborhood', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='Budget',
fields=[
('id', models.AutoField(serialize=False, primary_key=True)),
('title', models.CharField(verbose_name='Budget Title', default='Community Budget', max_length=60)),
('total_funds', models.DecimalField(decimal_places=2, max_digits=14, verbose_name='Total Funds', default=0.0)),
('total_expenses', models.DecimalField(decimal_places=2, max_digits=14, verbose_name='Total Expenses', default=0.0)),
('residence_fee', models.DecimalField(decimal_places=2, max_digits=8, verbose_name='Residence Fee', default=10.0)),
('create_date', models.DateTimeField(verbose_name='Created on', default=django.utils.timezone.now)),
('neighborhood', models.ForeignKey(to='neighborhood.Neighborhood')),
],
options={
},
bases=(models.Model,),
),
migrations.CreateModel(
name='Expense',
fields=[
('types', models.CharField(choices=[('IMP', 'Improvement'), ('REP', 'Repair'), ('REC', 'Recreation'), ('FEE', 'Fee'), ('OTH', 'Other')], default='REP', max_length=3)),
('id', models.AutoField(serialize=False, primary_key=True)),
('title', models.CharField(verbose_name='Expense Title', default='Expense', max_length=60)),
('description', models.TextField(verbose_name='Description', default='Description of why and how this expense is needed')),
('cost', models.DecimalField(decimal_places=2, max_digits=12)),
('create_date', models.DateTimeField(verbose_name='Created on', default=django.utils.timezone.now)),
('start_date', models.DateTimeField(verbose_name='Starts on')),
('end_date', models.DateTimeField(verbose_name='Ends on')),
('type', models.CharField(verbose_name='Type of Expense', max_length=50)),
('approved', models.BooleanField(default=False)),
('budget', models.ForeignKey(to='budget.Budget')),
],
options={
},
bases=(models.Model,),
),
]
|
import asyncio
import sys
from pathlib import Path
from time import perf_counter
from urllib.parse import urlsplit
import aiofiles
import aiohttp
from torchvision import models
from tqdm.asyncio import tqdm
async def main(download_root):
download_root.mkdir(parents=True, exist_ok=True)
urls = {weight.url for name in models.list_models() for weight in iter(models.get_model_weights(name))}
async with aiohttp.ClientSession(timeout=aiohttp.ClientTimeout(total=None)) as session:
await tqdm.gather(*[download(download_root, session, url) for url in urls])
async def download(download_root, session, url):
response = await session.get(url, params=dict(source="ci"))
assert response.ok
file_name = Path(urlsplit(url).path).name
async with aiofiles.open(download_root / file_name, "wb") as f:
async for data in response.content.iter_any():
await f.write(data)
if __name__ == "__main__":
download_root = (
(Path(sys.argv[1]) if len(sys.argv) > 1 else Path("~/.cache/torch/hub/checkpoints")).expanduser().resolve()
)
print(f"Downloading model weights to {download_root}")
start = perf_counter()
asyncio.get_event_loop().run_until_complete(main(download_root))
stop = perf_counter()
minutes, seconds = divmod(stop - start, 60)
print(f"Download took {minutes:2.0f}m {seconds:2.0f}s")
|
class Indicator:
def __init__(self, utils, config, logger, timeframe):
self.logger = logger
self.utils = utils
self.cfg = config
self.timeframe = timeframe
async def analyze(self):
raise NotImplementedError("Please implement this method u.u")
|
from google.appengine.ext import db
class Poem(db.Model):
title = db.StringProperty(required=True)
text = db.TextProperty(required=True)
page = db.StringProperty(required=False)
written = db.StringProperty(required=False)
created = db.DateTimeProperty(auto_now_add = True)
class Comment(db.Model):
title = db.StringProperty(required=True)
text = db.TextProperty(required=True)
username = db.StringProperty(required=True)
created = db.DateTimeProperty(auto_now_add = True)
class User(db.Model):
username = db.StringProperty(required=True)
password = db.StringProperty(required=True)
email = db.StringProperty(required=True)
user_class = db.CategoryProperty(required=True)
created = db.DateTimeProperty(auto_now_add = True)
class Error():
name_error = ''
pwd_error = ''
ver_error = ''
email_error = ''
error = False
|
name = ['hsj','yrq','zc',['shjd','shgdh'],'lgm','frt']
'''name2 = name
print(name)
print(name2)
name2 = name.copy()
name[2] = 'hsk'
print(name)
print(name2)'''
#name2 = name[:]
#name2 = name[0:-1]
#name[3] = 'hsk'
#print(name)
#print(name2)
import copy
name2 = copy.deepcopy(name)
name[3][0] = 'hsk'
print(name)
print(name2)
|
# File : i2c_test.py
import time
import math
import signal
import sys
from i2c_base import i2c_sensor
from i2c_compass import compass
from i2c_accel import accel
from i2c_fusion import fusion
f = None
def signal_handler(signal,frame):
global f
f.reader_stop()
sys.exit(0)
if __name__=="__main__":
print "I2C test"
timestamp = 0.0
T_MS = 500
# capture SIGINT
signal.signal(signal.SIGINT,signal_handler)
try:
f = fusion(T_ms=T_MS)
except Exception as e:
print e
f.reader_start()
while True:
sys.stdout.write(str(f))
sys.stdout.flush()
timestamp = timestamp+(T_MS/1000)
time.sleep(T_MS/1000.0)
|
import requests
import json
import os
import time
class Baidu:
def __init__(self):
self.path = os.path.abspath(".")
self.url = "https://image.baidu.com/search/acjson"
self.pages = 0
self.params = {
"tn": "resultjson_com",
"ipn": "rj",
"ct": 201326592,
"fp": "result",
"queryWord": "吸烟者",
"cl": 2,
"lm": -1,
"ie": "utf-8",
"oe": "utf-8",
"st": -1,
"word": "吸烟者",
"face": 0,
"istype": 2,
"nc": 1,
"pn": 30,
"rn": 30
}
self.headers = {
"User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/77.0.3865.120 Safari/537.36"}
def get_total(self):
#获取图片总计数量和start
try:
response = requests.get(self.url,params=self.params,headers=self.headers)
if response.status_code == 200:
json_str = response.content.decode()
json_dict = json.loads(json_str)
total= json_dict["displayNum"]
# total_list = total_str.split(",")
total = int(total)
print("图片数量:", total)
if total % 30 == 0:
self.pages = int(total/30)
else:
self.pages = int(total/30) + 1
print("总共页数:", self.pages)
return None
except Exception as e:
print("*" * 100)
print(e)
print("*" * 100)
def get_url_list(self, i):
self.params["pn"] = i*30
try:
response = requests.get(self.url, params=self.params, headers=self.headers)
if response.status_code == 200:
json_str = response.content.decode()
print(json_str)
json_dict = json.loads(json_str)
url_list = json_dict["data"][:-1]
print("获取了第%d页的图片url列表" % i)
return url_list
except Exception as e:
print("*" * 100)
print(e)
print("*" * 100)
def get_image(self, url):
try:
response = requests.get(url, headers=self.headers)
if response.status_code == 200:
return response.content
else:
return None
except Exception as e:
print("*" * 100)
print(e)
print("*" * 100)
def save_images(self,url_list):
for url_dict in url_list:
url = url_dict["hoverURL"]
content = self.get_image(url)
if content:
path = os.path.join(self.path, "baidu_images")
filename = url.split("/")[-1]
filename = os.path.join(path, filename)
try:
with open(filename,"wb") as f:
f.write(content)
print(filename, "图片已经爬去写入baidu_images中")
except Exception as e:
print("*" * 100)
print(e)
print("*" * 100)
def run(self):
self.get_total()
time.sleep(1)
for i in range(30, self.pages+1):
url_list = self.get_url_list(i)
self.save_images(url_list)
time.sleep(1)
print("图片爬去完毕")
if __name__ == '__main__':
baidu = Baidu()
baidu.run()
|
import requests
import json
import os
from datetime import date
from requests.exceptions import HTTPError
from config import Config
def login(config):
url = config['url']+config['auth_point']
headers = {"content-type": "application/json"}
data = {"username": config['username'], "password": config['password']}
try:
r = requests.post(url, headers=headers, data=json.dumps(data))
r.raise_for_status()
token = r.json()['access_token']
return token
except HTTPError:
print ("HTTP Error")
return False
except Exception:
print ("Error")
return False
def getdata(config, token, process_date=None):
if not process_date:
process_date = str(date.today())
url = config['url']+config['data_point']
headers = {"content-type": "application/json", "Authorization": "JWT " + token}
data = {"date": str(process_date)}
try:
r = requests.get(url, headers=headers, data=json.dumps(data))
r.raise_for_status()
os.makedirs(os.path.join(config['directory'], process_date), exist_ok=True)
with open(os.path.join(config['directory'], process_date, str(process_date)+'.json'), 'w') as json_file:
data = r.json()
json.dump(data, json_file)
except HTTPError:
print(F"Http Error at date {process_date}")
if __name__ == '__main__':
config = Config(os.path.join('.', 'config.yaml'))
config = config.get_config('HT1_app')
token = login(config)
if token!=False:
date = ['2025-06-24', '2021-06-19', '2021-06-20', '2021-06-21']
for dt in date:
getdata(config,token,dt)
|
import urllib2
from django.dispatch import receiver, Signal
from django.db.models.signals import post_init, post_save, pre_save
from prezisb.models import Prezentation
from django.core.files import File
from django.core.files.temp import NamedTemporaryFile
@receiver(post_save, sender=Prezentation)
def prezentation_post_save(sender, **kwargs):
"""
After the object saved into the database, download and store the thumbnail
represented in the thumbnail_url field.
If the object is saved not for the first time, the thumbnail_url will not
be downloaded agian if it is not changed.
"""
obj = kwargs["instance"]
if not obj.thumbnail or (kwargs["update_fields"] and (
"thumbnail_url" not in kwargs["update_fields"])
):
img_temp = NamedTemporaryFile(delete=True)
img_temp.write(urllib2.urlopen(obj.thumbnail_url).read())
img_temp.flush()
obj.thumbnail.save("%s_%s.jpg" % ("thumbnail", obj.title), File(img_temp))
|
# O(n^2)
def genome_sort(seq):
i = 0
while i < len(seq):
if i == 0 or seq[i - 1] <= seq[i]:
i += 1
else:
seq[i], seq[i - 1] = seq[i - 1], seq[i]
i -= 1
def main():
seq = [1, 5, 3, 4, 6, 2]
genome_sort(seq)
print("".join(str(seq)))
if __name__ == '__main__':
main()
|
from model.group import Group
from utils.formatstrings import FormatStrings
from model.contact import Contact
import allure
def test_verify_group_list(app, db):
with allure.step("Given list of groups got from home page and list of groups got from db"):
ui_list = app.group.get_group_list()
db_list = db.get_group_list()
with allure.step("Then the info in the lists is equal"):
for i in range(0, len(db_list)):
db_list[i] = db_list[i].clear()
assert sorted(ui_list, key=Group.id_or_max) == sorted(db_list, key=Group.id_or_max)
def test_verify_contact_info_on_home_page(app, db):
with allure.step("Given list of contacts got from home page and list of contacts got from db"):
if len(db.get_contact_list()) == 0:
app.contact.create(Contact(first_name="Contact for verification"))
db_list = sorted(db.get_contact_list(), key=Contact.id_or_max)
home_page_list = []
for ind in range(0, app.contact.count()):
contact_from_home_page = app.contact.get_data_from_home_page_by_index(ind)
home_page_list.append(contact_from_home_page)
sorted_home_page_list = sorted(home_page_list, key=Contact.id_or_max)
with allure.step("Then the info in the lists is equal"):
for ind in range(0, app.contact.count()):
assert FormatStrings.clear_spaces(db_list[ind].first_name) == sorted_home_page_list[ind].first_name
assert FormatStrings.clear_spaces(db_list[ind].last_name) == sorted_home_page_list[ind].last_name
assert FormatStrings.clear_spaces(
FormatStrings.clear_breaks(db_list[ind].primary_address)) == FormatStrings.clear_spaces(
FormatStrings.clear_breaks(sorted_home_page_list[ind].primary_address))
assert sorted_home_page_list[ind].all_phones == FormatStrings.merge_phones_like_home_page(db_list[ind])
assert sorted_home_page_list[ind].all_emails == FormatStrings.merge_emails_like_home_page(db_list[ind])
def test_verify_contact_phones_on_view_page(app, db):
with allure.step("Given list of groups got from view page and list of groups got from db"):
if len(db.get_contact_list()) == 0:
app.contact.create(Contact(first_name="Contact for verification"))
db_list = sorted(db.get_contact_list(), key=Contact.id_or_max)
view_page_list = []
for contact in db_list:
contact_from_view_page = app.contact.get_data_from_view_page_by_id(contact.id)
view_page_list.append(contact_from_view_page)
with allure.step("Then the info in the lists is equal"):
for ind in range(0, app.contact.count()):
assert FormatStrings.merge_contact_primary_phones_like_view_page(
db_list[ind]) == FormatStrings.merge_contact_primary_phones_like_view_page(view_page_list[ind])
|
import random
import itertools as it
def point1(parent1, parent2):
"""Basic 1 point crossover for lists"""
if len(parent1) < 2:
return []
parent1, parent2 = list(parent1), list(parent2)
point = random.randint(1, len(parent1) - 1)
return [parent1[:point] + parent2[point:],
parent2[:point] + parent1[point:]]
def point2(parent1, parent2):
"""Basic 2 point crossover for lists"""
if len(parent1) < 3:
return []
parent1, parent2 = list(parent1), list(parent2)
point1 = random.randint(1, len(parent1) - 2)
point2 = random.randint(point1 + 1, len(parent1) - 1)
return [parent1[:point1] + parent2[point1:point2] + parent1[point2:],
parent2[:point1] + parent1[point1:point2] + parent2[point2:]]
def permutation(parent1, parent2):
"""
Crossover for permutations, parents should be dicts.
Inspired by order crossover 1 from http://www.cs.colostate.edu/~genitor/1995/permutations.pdf
Note that crossing over two same individuals won't always return the same.
"""
point1 = random.randint(1, len(parent1) - 2)
point2 = random.randint(point1 + 1, len(parent1) - 1)
cut = parent1.values()[point1:point2]
filler = [x for x in parent2 if x not in cut]
result = filler[point1+len(cut):] + cut + filler[:point1+len(cut)]
return [dict(zip(parent1.keys(), result))]
class Tournament:
"""Basic tournament selector for crossovers"""
def __init__(self, crossover_func=point2, tournament_size=20, crossovers=6):
self.crossover_func = crossover_func
self.tournament_size = tournament_size
self.crossovers = crossovers
def crossover(self, population):
"""Returns a list of new offsprings from population"""
if len(population) < self.tournament_size:
return []
tournament = sorted(random.sample(population, self.tournament_size), key=lambda x: -x[0])[:self.crossovers*2]
random.shuffle(tournament)
ret = []
for parents in it.izip_longest(*[iter(tournament)] * 2): # map it by pairs
ret += self.crossover_func(parents[0][1], parents[1][1])
return ret
|
import argparse
import csv
import datetime
import json
import logging
import os
import secrets
from collections import OrderedDict, defaultdict, deque
from functools import lru_cache
from bs4 import BeautifulSoup
from wta_scrapper.mixins import Mixins
from wta_scrapper.models import Query
from wta_scrapper.score import Score
from wta_scrapper.utils import BASE_DIR, autodiscover
def init_logger(name):
logger = logging.Logger(name)
format_str = '%(asctime)s :: %(name)s - %(levelname)s - %(message)s'
formatter = logging.Formatter(format_str, datefmt='%Y-%m-%d')
handler = logging.StreamHandler()
handler.setFormatter(formatter)
logger.addHandler(handler)
return logger
class MatchScrapper(Mixins):
def __init__(self, filename=None):
self.explorer = autodiscover()
self.logger = init_logger(self.__class__.__name__)
if filename is not None:
with open(self.explorer(filename=filename), 'r') as _file:
soup = BeautifulSoup(_file, 'html.parser')
self.soup = soup
self.tournaments = []
def __enter__(self):
return self.tournaments
def __exit__(self, type, value, traceback):
return False
def __getitem__(self, index):
return self.tournaments[0][index]
def __eq__(self, value):
keys = list(self.tournaments.keys())
return value in keys
def build(self, f, player_name=None,
year=None, date_as_string=True,
map_to_keys: dict = {}, **kwargs):
"""
Main entrypoint for creating a new matches JSON file
The application gets all the divs of the page and then filters
them base on the provided criteria
Parameters
----------
- `f` criteria to use for filtering the divs that contains the data to parse
- `player_name` name of the player to appear in the final returned value
- `year` you can provide an explicity year to use for the returned values
- `date_as_string` indicates whether the final date should be a string
- `include_month` indicates if the month should be included in the final values
- `date_of_birth` provide a date of birth if you wish to integrate calculations related
to the player's date of birth and the dates related to the tournaments
- `map_to_keys` if you want to swap the retrieved tournament name by one that is more
suitable for the final return values use this parameter e.g. { Rogers cup by me: Rogers Cup }
- `kwargs` any other values that you wish would appear in the final values
Notes
-----
This was built on the latest page structure of the WTA website which could also
change in the future.
"""
divs = self.soup.find_all('div')
self.logger.info('Started.')
content = self._filter(divs, f)
if content:
base = None
for element in content:
header = element.find_next('div')
if header is not None:
if not header.is_empty_element:
attrs = header.get_attribute_list('class')[0]
if 'header' in attrs:
base = self._parse_tournament_header(header)
if base is not None:
# Construct the matches
table = element.find('table')
if not table.is_empty_element and table is not None:
updated_tournament = self._parse_matches(
table.find('tbody').find_all('tr'),
using=base
)
else:
updated_tournament = base
# Finally, integrate the footer
divs = header.parent.select('div')
footer = self._filter(divs, 'footer')
if footer:
updated_tournament = self._parse_footer(footer[-1], using=updated_tournament)
self.tournaments.append(updated_tournament)
# IMPORTANT: Reset the base in order to
# prevent the app from appending content
# to self.tournaments on each iteration
base = None
self._finalize(
player_name=player_name,
year=year,
date_as_string=date_as_string,
map_to_keys=map_to_keys,
**kwargs
)
else:
message = f'Could not find any matching tag in HTML page using the following criteria: {f}'
self.logger.info(message)
print(message)
return self.tournaments
@property
def number_of_tournaments(self):
return len(self.tournaments)
def _construct_tournament_header(self, information):
"""
Constructs the dictionnary that will be used to reference
the tournament in the final dictionnary
Parameters
----------
- information: a list of type [..., [..., ...], ..., ..., ...]
Result
------
Returns the tournament name and the constructed header
- (tournament_name, {...})
"""
tournament = {
'matches': [],
'missing_fields': []
}
if not information:
return None, tournament
try:
tour_title = information[1][0]
except (KeyError, IndexError):
name = country = None
tournament.update({'missing_fields': ['name', 'country']})
else:
try:
name, country = tour_title.split(',', 1)
except:
# This is done in
# order to still provide a name to the
# tournament if none was initially catched
name = information[:1][0]
country = None
tournament.update({'missing_fields': ['name', 'country']})
tournament.update({'name': name, 'country': country})
try:
tournament['date'] = information[1][1]
except (KeyError, IndexError):
tournament['date'] = None
tournament.update({'missing_fields': ['tour_date']})
try:
tournament['type'] = information[2]
except (KeyError, IndexError):
tournament['type'] = None
tournament.update({'missing_fields': ['type']})
try:
tournament['surface'] = information[4]
except (KeyError, IndexError):
tournament['surface'] = None
tournament.update({'missing_fields': ['surface']})
return tournament['name'], tournament
def _build_tournament_dict(self, **kwargs):
"""
Create a basic empty OrderedDict
"""
return OrderedDict(**kwargs)
def _parse_footer(self, footer, using=None):
"""
Parse the footer element in order to return
the playing ranking during the tournament
and their seeding if available
"""
player_rank_during_tournament = {
'rank': None,
'entered_as': None,
'seed_title': None
}
# Try to get the rank of the player
# during the tournament -;
rank_section = footer.find_next('span')
rank = rank_section.find_next_sibling('span')
# Sometimes, there is no player rank
# but an entry type - so, this tests
# if there is one or the other
entry_types = ['W', 'Q']
if rank.text.isnumeric():
player_rank = int(rank.text)
player_rank_during_tournament.update({
'rank': player_rank
})
else:
seed_text = self._normalize(rank.text)
if seed_text in entry_types:
player_rank_during_tournament.update({
'entered_as': seed_text
})
# There might also have both,
# so check also for that
seed_section = rank.find_next('span').find_next_sibling('span')
if seed_section is not None:
if not seed_section.is_empty_element:
seed_text = self._normalize(seed_section.text)
if seed_text in entry_types:
player_rank_during_tournament.update({
'entered_as': seed_text,
'seed_title': seed_section.get_attribute_list('title')[-1]
})
elif seed_text.isnumeric():
player_rank_during_tournament.update({
'entered_as': int(seed_text)
})
if using is not None:
# Dynamically find the root key of the dictionnary
# in order to update it with the rankings
using[list(using.keys())[-1]]['ranking'] = player_rank_during_tournament
return using
return player_rank_during_tournament
def _parse_tournament_header(self, header):
"""
Parse the header for each tournaments
Extract the tournament's name, level and other
characteristics useful for identifying the tournament
"""
base = self._build_tournament_dict(matches=[])
characteristics = []
if header is not None:
for child in header.children:
if child.name == 'h2':
# TODO: Some H2 tags have links in them
# and a 'title' -; maybe use that also
# to get tournament title with another
# method to parse the city from that
characteristics.append(child.text)
if child is not None or child != '\n':
if child.name == 'div':
class_name = child.attrs['class']
if 'locdate' in str(class_name):
spans = child.find_all('span')
characteristics.append(
[spans[0].text, spans[1].text])
if 'meta' in str(class_name):
spans = self._filter(child.find_all('span'), 'value')
for span in spans:
characteristics.append(span.text)
name, details = self._construct_tournament_header(characteristics)
base.update({name: details})
characteristics = []
else:
return False
return base
def _parse_matches(self, matches, using=None):
"""
Parses the matches from the table
Parameters
----------
`matches` represents the table rows for each match
"""
if using is None:
base = {}
else:
base = using
base_match_template = {
'opp_name': None,
'link': None,
'nationality': None,
'details': {}
}
base_match = base_match_template.copy()
for _, row in enumerate(matches):
opponent_link = row.find('a')
if opponent_link is not None:
base_match.update({
'opp_name': opponent_link.get_attribute_list('title')[-1],
'link': opponent_link.get_attribute_list('href')[-1],
})
children = list(filter(lambda x: x != '\n', row.children))
for i, child in enumerate(children):
if child.name == 'td':
if i == 0:
divs = child.find_all('div')
if divs:
base_match['details']['round'] = divs[-1].text
if i == 1:
nationality_tag = child.find('img')
if nationality_tag is not None:
if nationality_tag.has_attr('alt'):
base_match['nationality'] = nationality_tag.get_attribute_list('alt')[-1]
else:
base_match['nationality'] = None
else:
base_match['nationality'] = None
if i == 2:
base_match['details']['opp_rank'] = child.get_text()
if i == 3:
base_match['details']['result'] = child.get_text()
score = child.find_next('td').text
base_match['details']['score'] = score
# Append a copy of base_match otherwise
# base_match will continue pointing towards
# the variable above and will keep getting cleared
# or modidified when calling further functions on
# thus making the final dict not valid
base['matches'].append(base_match.copy())
base_match.clear()
base_match = base_match_template.copy()
# For whatever reasons, when modifying the details
# section of base_match, it also modifies the details
# section of bae_match_template despite the clear().
# This results in the same score being appended in the
# final dictionnary and the match results being the same
# when constructing the matches
base_match['details'] = {}
return base
def _date_difference_from_today(self, d):
"""
Calculates the difference between two dates
Parameters
----------
- `d` represents as a datetime.datetime format
"""
current_date = datetime.datetime.now().date()
return current_date.year - d.year
def _finalize(self, **kwargs):
"""
Voluntarily, the initital dictionnaries that were created by tournament
contain the raw data with spaces and/or new lines. This section takes
them, cleans the data within them and prepares them for final use
"""
pre_final_dict = self.tournaments
tournaments = []
# Done in order to initiate a
# reverse countdown for iDs
tournaments_count = len(pre_final_dict)
self.logger.info(f'Finalizing for {tournaments_count} tournaments')
# Some of the tournaments names are very long
# and not really adequate for being a dictionnary
# key. This offers the possibility to map a specific
# retrieved tournament name to one that is more suitable
values_to_map = {}
if 'map_to_keys' in kwargs:
values_to_map = kwargs.pop('map_to_keys')
if not isinstance(values_to_map, dict):
raise TypeError('The tournament titles to map should be a dictionnary')
date_of_birth = None
if 'date_of_birth' in kwargs:
date_of_birth = datetime.datetime.strptime(kwargs['date_of_birth'], '%Y-%m-%d')
kwargs.update({'age': self._date_difference_from_today(date_of_birth)})
for i, tournament in enumerate(pre_final_dict):
blank_dict = self._build_tournament_dict()
try:
matches = tournament.pop('matches')
except:
matches = []
# TODO: The first array has none values.
# Should prevent the empty array being
# appended when parsing the matches
# matches.pop(0)
for key, values in tournament.items():
if key is not None:
key = self._normalize(' '.join(self._deep_clean(key)), as_title=True)
if values_to_map:
try:
key = values_to_map[key]
except KeyError:
pass
blank_dict[key] = values
blank_dict[key].update(
{
'id': tournaments_count - i,
'matches': matches,
'name': key,
'country': self._normalize(values['country'], as_title=True)
}
)
tour_date = self._parse_date(blank_dict[key]['date'])
if tour_date is not None:
if 'date_as_string' in kwargs:
if kwargs['date_as_string']:
blank_dict[key]['date'] = str(tour_date)
else:
blank_dict[key]['date'] = tour_date
blank_dict[key]['year'] = tour_date.year
else:
blank_dict[key]['year'] = None
blank_dict[key]['ranking'] = values['ranking']
matches_count = len(matches)
for i, match in enumerate(matches):
match['details'] = self._deep_clean_multiple(match['details'])
match['id'] = matches_count - i
tournaments.append(blank_dict)
tournaments.append(kwargs)
self.tournaments = tournaments
self.logger.info('Adapting...')
self.logger.info((f'Found and built {len(self.tournaments) - 1} tournaments'))
self.logger.info("Call 'write_values_to_file' if you wish to output the values to a file")
@lru_cache(maxsize=5)
def get_matches(self):
for tournament in self.tournaments:
for key in tournament.keys():
yield tournament[key]['matches']
@property
def get_tournaments(self):
return self.tournaments
def write_values_to_file(self, values=None, file_format='json', **kwargs):
"""
Write the parsed values to a file of type JSON or CSV
"""
if values is None:
if self.tournaments is not None:
values = self.tournaments
else:
values = {}
new_file_name = secrets.token_hex(5)
if 'player' in kwargs:
if kwargs['player'] is not None:
new_file_name = '_'.join(kwargs['player'].split(' '))
file_to_write = os.path.join(
BASE_DIR,
f'{new_file_name}.{file_format}'
)
with open(file_to_write, 'w') as f:
if file_format == 'json':
try:
json.dump(values, f, indent=4)
except TypeError as e:
self.logger.error(
'Make sure "date_as_string" is set to true so that the date can be serialized correctly',
stack_info=True
)
raise
if file_format == 'csv':
writer = csv.writer(f)
if 'header' in kwargs:
values.insert(0, kwargs['header'])
writer.write_rows(values)
self.logger.info(f'Created file {file_to_write}')
def load(self, filename):
"""
Load a result file and return its data
"""
if not filename.endswith('json'):
filename = f'{filename}.json'
with open(f'data/{filename}', 'r') as f:
data = json.load(f)
self.logger.info(f'Loading {filename}')
return Query(data)
def loads(self, *filenames):
"""
Load multiple JSON files
Parameters
----------
filenames (list): files to load
"""
data = []
concat_tournaments = []
for name in filenames:
data.append(self.load(name))
for model in data:
for tournamnent in model.tournaments:
concat_tournaments.append(tournamnent)
number_of_tournaments = len(concat_tournaments)
for i, tournament in enumerate(concat_tournaments):
tournament['id'] = number_of_tournaments - i
self.tournaments = concat_tournaments
if __name__ == "__main__":
parser = argparse.ArgumentParser(description='Parse an HTML page for WTA matches')
parser.add_argument('-n', '--filename', type=str, required=True, help='The HTML file to parse')
parser.add_argument('--write', type=bool, help='Write parsed values to a JSON or CSV file')
parser.add_argument('--filter', type=str, required=True, help='A value used to filter the html tags on the WTA page')
parser.add_argument('--format', type=str, choices=['json', 'csv'], help='The format of the output file')
parser.add_argument('--player', type=str, help='Name of the player to parse file for')
parser.add_argument('--year', type=int, help='Year of the tournaments')
parsed_arguments = parser.parse_args()
scrapper = MatchScrapper(filename=parsed_arguments.filename)
scrapper.build(
parsed_arguments.filter,
player_name=parsed_arguments.player,
year=parsed_arguments.year
)
scrapper.write_values_to_file()
|
import discovery_query
import pytest
import json
def read_json_file(file_path):
"""Reads and parse a json file.
Parameters
----------
file_path : {str} the path to the json file.
Returns
-------
dict : a dictionary containing the json structure read from the file.
"""
with open(file_path) as json_file:
json_content = json_file.read()
json_data = json.loads(json_content)
return(json_data)
def test_display_results():
json_data = read_json_file('pytest_data/query_ex1.json')
output = discovery_query.display_results(json_data)
assert type(output) is dict, "display_results() should return a dict"
assert "query" in output, "display_results() should have a key 'query'"
assert "count" in output, "display_results() should have a key 'count'"
assert type(output['query']) is str, "key 'query' of output dict should contain a string"
json_data = read_json_file('pytest_data/query_ex2.json')
output = discovery_query.display_results(json_data)
assert type(output) is dict, "display_results() should return a dict"
assert "count" in output, "display_results() should have a key 'count'"
assert type(output['query']) is str, "key 'query' of output dict should contain a string"
|
import random
database = {}
account = 100
#initialization
def init():
print("Welcome to bankPHP")
AccountAvailability = int(input("Do you have account in our bank? 1(Yes) 2 (No) \n"))
if AccountAvailability == 1 :
login()
elif AccountAvailability == 2 :
print(register())
else:
print("You've entered an invalid number, try again ")
init()
#registration with entering first name,last name ,email,password
def register():
print("******* Register *******")
email = input("please enter your email address \n")
firstName = input("please enter your first name \n")
lastName = input("please enter your last name \n")
password = input("Create a password for yourself \n")
accountNumber = generationAccountNumber()
database[accountNumber] = [firstName, lastName,email,password]
print("Your account has been created")
print(f"Your account number is {accountNumber}")
print("=========================================")
print("Make sure to keep it safe")
login()
# login with entering account number and password
def login():
print("***** Login *****")
accountNumberFromUser = int(input("Please enter your account number? \n"))
passwordFromUser = input("Please enter your password \n")
for accountNumber,userDetails in database.items():
if accountNumber == accountNumberFromUser:
if userDetails[3] == passwordFromUser:
bankOperation(userDetails)
else:
print("Invalid account number or password. please try again")
login()
#bank operation
def bankOperation(userDetails):
print(f"Welcome : {userDetails[0]} {userDetails[1]}")
selectedOptions = int(input("What would you like to do? (1) deposit (2) withdrawal (3) Logout (4) Exit \n"))
if selectedOptions == 1 :
deposit()
elif selectedOptions == 2 :
withdraw()
elif selectedOptions == 3 :
logout()
elif selectedOptions == 4:
exit()
else:
print("Invalid option selected")
bankOperation(userDetails)
#withdraw operation
def withdraw():
withdrawAmount = int(input(f"Your balance is { account } , How much do you want to withdraw? \n"))
newAccount = account - withdrawAmount
print(f'Your new balance is {newAccount}')
#Deposit operation
def deposit():
depositAmount = int(input(f"Your balance is {account} How much would you like to deposit? \n"))
newAccount = account + depositAmount
print(f"Your new balance is {newAccount}")
#for logging out of the function
def logout():
login()
def exit():
print("Thank you!")
# generating account number
def generationAccountNumber():
return random.randrange(1111111111,9999999999)
#Actual Banking system
init()
|
from sqlalchemy import Column, String, Float, Integer
from model.Base import Base
class Plan(Base):
__tablename__ = 'plan'
id = Column(Integer, primary_key=True)
lp = Column(String, unique=True)
easting = Column(Float)
northing = Column(Float)
|
# -*- coding: utf-8 -*-
"""
Created on Mon May 20 23:01:44 2019
@author: HP
"""
x="0ABCBDA"
y="0BDCABA"
def LCS(x,y):
m=len(x)-1
n=len(y)-1
c=[[0 for i in range(0,n+1)] for j in range(0,m+1)]
b=[[0 for i in range(0,n+1)] for j in range(0,m+1)]
for i in range(1,m+1):
for j in range(1,n+1):
if x[i]==y[j]:
c[i][j]=c[i-1][j-1]+1
b[i][j]=[-1,-1]
else:
if c[i-1][j]>=c[i][j-1]:
c[i][j]=c[i-1][j]
b[i][j]=[-1,0]
else:
c[i][j]=c[i][j-1]
b[i][j]=[0,-1]
return c,b
# i not equal to j for longest common repeating subsequence of a string
#no of insertions a deletions to make anther string from given string |x|-LCS(x,y)
def print_seq(c,b,i,j):
if i==0 or j==0:
return
elif b[i][j][0]==b[i][j][1]:
print_seq(c,b,i+b[i][j][0],j+b[i][j][1])
print(x[i])
else:
print_seq(c,b,i+b[i][j][0],j+b[i][j][1])
c,b=LCS(x,y)
print_seq(c,b,6,6)
|
import streamlit as st
import time
st.title('Streamlit 超入門')
st.write('プレグレスバーの表示')
'Start!!'
latest_iteration = st.empty()
bar = st.progress(0)
for i in range(100):
latest_iteration.text(f'Iteration {i+1}')
bar.progress(i + 1)
time.sleep(0.1)
#st.write('DataFrame')
# df = pd.DataFrame(
# np.random.rand(20, 3),
# columns=['a', 'b', 'c']
# )
# st.write(df)
# st.dataframe(df.style.highlight_max(axis=0) , width=300, height=300) #引数設定できる
# st.table(df.style.highlight_max(axis=0)) #ソートできない
# """
# # 章
# ## 節
# ### 項
# ```python
# import streamlit as st
# import numpy as np
# import pandas as pd
# ```
# """
# st.line_chart(df) #折れ線
# st.area_chart(df) #エリアチャート
#st.bar_chart(df) #棒グラフ
# df = pd.DataFrame(
# np.random.rand(100, 2)/[50,50] + [35.69,139.70],
# columns=['lat', 'lon'] #lat=緯度,lon=経度
# )
# st.map(df)
# st.write('Display Image')
# img = Image.open('C:\\Users\\t--n6\\OneDrive\\画像\\コメント 2020-06-19 155252.png')
# st.image(img, caption='Test', use_column_width=True)
#チェックボックス
# if st.checkbox('Show Image'):
# img = Image.open('C:\\Users\\t--n6\\OneDrive\\画像\\コメント 2020-06-19 155252.png')
# st.image(img, caption='Test', use_column_width=True)
#セレクトボックス
# option = st.selectbox(
# 'あなたが好きな数字を教えてください',
# list(range(1,11))
# )
# 'あなたの好きな数字は、' , option , 'です。'
st.write('Interactive Widgets')
left_column, right_column = st.beta_columns(2)
button = left_column.button('右カラムに文字を表示')
if button:
right_column.write('ここは右カラム')
expander1 = st.beta_expander('問い合わせ1')
expander1.write('問い合わせ1の回答')
expander2 = st.beta_expander('問い合わせ2')
expander2.write('問い合わせ2の回答')
expander3 = st.beta_expander('問い合わせ3')
expander3.write('問い合わせ3の回答')
# text = st.text_input('あなたの趣味を教えてください。')
# condition = st.slider('あなたの今の調子は?', 0, 100, 50)
# 'あなたの趣味は', text , 'ですね。'
# 'コンディション:', condition
|
from eagles_ml.app import db
db.create_all()
|
from flask import Flask, request, redirect, session, json, g, render_template, flash, abort
from flask_sqlalchemy import SQLAlchemy
from flask_openid import OpenID
from flask_sslify import SSLify
from wtforms import Form, BooleanField, TextField, PasswordField, validators, SelectField
import urllib2
import werkzeug
import re
import logging
from logging.handlers import RotatingFileHandler
import boto.ec2
import boto.cloudformation
import time
import os
app = Flask(__name__)
app.config.from_pyfile('settings.cfg')
app.debug = app.config['DEBUG']
boto_logger = logging.getLogger('boto')
handler = RotatingFileHandler(app.config['LOG_DIR'] + '/' + __name__ + '.log', maxBytes=10000, backupCount=1)
if app.config['DEBUG'] == 'True':
handler.setLevel(logging.DEBUG)
boto_logger.setLevel(logging.DEBUG)
else:
# Force TLS and HSTS only in production
sslify = SSLify(app, permanent=True)
handler.setLevel(logging.INFO)
app.logger.addHandler(handler)
db = SQLAlchemy(app)
oid = OpenID(app)
app.logger.info('App started with debug mode: {0}\nApp running with Flask: {1}'.format(app.config['DEBUG'],app.config['USE_FLASK']))
class Role(db.Model):
id = db.Column(db.Integer, primary_key=True)
role = db.relationship('User', backref='user')
name = db.Column(db.String(40))
@staticmethod
def get_or_create(role_name):
rv = Role.query.filter_by(name=role_name).first()
if rv is None:
rv = Role()
rv.name = role_name
db.session.add(rv)
return rv
class User(db.Model):
id = db.Column(db.Integer, primary_key=True)
steam_id = db.Column(db.String(40))
nickname = db.Column(db.String(80))
role_id = db.Column(db.Integer, db.ForeignKey('role.id'))
@staticmethod
def get_or_create(steam_id):
rv = User.query.filter_by(steam_id=steam_id).first()
if rv is None:
rv = User()
rv.steam_id = steam_id
db.session.add(rv)
return rv
@staticmethod
def create_user(steam_id,nickname,role_id):
rv = User.query.filter_by(steam_id=steam_id).first()
if rv is None:
rv = User()
rv.steam_id = steam_id
rv.nickname = nickname
rv.role_id = role_id
app.logger.info('Created user - steam_id: "{0}" nickname: "{1}"'.format(steam_id,nickname))
db.session.add(rv)
else:
app.logger.debug('Existing user - steam_id: "{0}" nickname: "{1}"'.format(steam_id,nickname))
return rv
class Instance(db.Model):
id = db.Column(db.Integer, primary_key=True)
instance_id = db.Column(db.String(80))
ark_process_status = db.Column(db.String(80))
ark_server_status = db.Column(db.String(80))
ark_version = db.Column(db.String(80))
@staticmethod
def get_or_create(instance_id):
rv = Instance.query.filter_by(instance_id=instance_id).first()
if rv is None:
rv = Instance()
rv.instance_id = instance_id
db.session.add(rv)
return rv
if app.config['DEBUG'] == 'True':
app.logger.debug('Dropping database')
db.drop_all()
db.create_all()
Role.get_or_create('Admin')
Role.get_or_create('User')
User.create_user(app.config['ADMIN_USER_STEAM_ID'],app.config['ADMIN_USER_STEAM_NICK'],1)
db.session.commit()
_steam_id_re = re.compile('steamcommunity.com/openid/id/(.*?)$')
class UserAdminForm(Form):
roles = []
for role in Role.query.all():
app.logger.debug('name: "{0}" id: "{1}"'.format(role.name,role.id))
r = role.id, role.name
roles.append(r)
app.logger.debug('Roles: {0}'.format(roles))
role = SelectField(u'User Role', choices=roles)
def authenticated_user():
if g.user is None:
return False
return True
def user_has_role(role_id):
authenticated_user()
app.logger.debug('g.user: "{0}"'.format(g.user.id))
user = User.query.filter_by(id=g.user.id).first()
if not user.role_id == role_id:
app.logger.debug('user_id "{0}" rejected by user_has_role() check rid {1} != {2}'.format(g.user.nickname,user.role_id,role_id))
return False
return True
def get_users(user_id):
if user_id:
users = User.query.filter_by(id=user_id).first()
elif user_id == False:
users = User.query.all()
else:
abort(500)
return users
def get_steam_userinfo(steam_id):
options = {
'key': app.config['STEAM_API_KEY'],
'steamids': steam_id
}
url = 'http://api.steampowered.com/ISteamUser/' \
'GetPlayerSummaries/v0001/?%s' % werkzeug.urls.url_encode(options)
rv = json.load(urllib2.urlopen(url))
return rv['response']['players']['player'][0] or {}
def get_ark_serverinfo(ipaddress):
app.logger.info('Checking remote ARK server status: {0}'.format(ipaddress))
ark_serverinfo = {}
# TODO - Really get some stats
ark_serverinfo['server_address'] = ipaddress
ark_serverinfo['process_status'] = "Offline"
ark_serverinfo['server_status'] = "Offline"
ark_serverinfo['current_version'] = "000000"
return ark_serverinfo
def get_aws_instances(instance_id):
statuses = False
conn = boto.ec2.connect_to_region(app.config['AWS_DEFAULT_REGION'],aws_access_key_id=app.config['AWS_ACCESS_KEY_ID'],aws_secret_access_key=app.config['AWS_SECRET_ACCESS_KEY'])
if instance_id:
f = {}
f['instance-id'] = instance_id
f['instance-state-name'] = 'running'
f['instance-state-name'] = 'running'
instance = conn.get_only_instances(filters=f)
app.logger.debug('aws statuses {0}'.format(statuses))
if len(instance) >= 1:
ip_address = instance[0].ip_address
return ip_address
else:
return False
elif instance_id == False:
f = {}
f['instance-state-name'] = 'running'
statuses = conn.get_only_instances(filters=f)
app.logger.debug('running aws statuses {0}'.format(statuses))
instace_ids = []
for instance in statuses:
instace_ids.append(instance.id)
return instace_ids
else:
return False
def get_stack():
conn = boto.cloudformation.connect_to_region(app.config['AWS_DEFAULT_REGION'],aws_access_key_id=app.config['AWS_ACCESS_KEY_ID'],aws_secret_access_key=app.config['AWS_SECRET_ACCESS_KEY'])
try:
stacks = conn.describe_stacks(app.config['APP_UUID'])
except:
stacks = []
if len(stacks) == 1:
stack = stacks[0]
app.logger.debug('Existing stack: {0}'.format(stack.stack_id))
return stack
else:
return False
def create_stack(parameters):
conn = boto.cloudformation.connect_to_region(app.config['AWS_DEFAULT_REGION'],aws_access_key_id=app.config['AWS_ACCESS_KEY_ID'],aws_secret_access_key=app.config['AWS_SECRET_ACCESS_KEY'])
try:
stacks = conn.describe_stacks(app.config['APP_UUID'])
except:
stacks = []
if len(stacks) == 1:
stack = stacks[0]
instance_id = stack.outputs[0].value
app.logger.debug('Existing stack: {0} stack instace {1}'.format(stack.stack_id,instance_id))
app.logger.debug('Existing stack instance: {0}'.format(stack.outputs[0].value))
return instance_id
else:
# Create stack after all
tpl_file = open(os.path.join(os.path.dirname(__file__),'lib/cloudformation.json'))
cfn_template_body = tpl_file.read()
tpl_file.close()
stack = conn.create_stack(app.config['APP_UUID'],template_body=cfn_template_body,parameters=parameters)
app.logger.debug('cloudformation stack create: {0}'.format(stack))
return stack
def delete_stack():
conn = boto.cloudformation.connect_to_region(app.config['AWS_DEFAULT_REGION'],aws_access_key_id=app.config['AWS_ACCESS_KEY_ID'],aws_secret_access_key=app.config['AWS_SECRET_ACCESS_KEY'])
try:
stacks = conn.describe_stacks(app.config['APP_UUID'])
except:
stacks = []
if len(stacks) == 1:
stack = stacks[0]
instance_id = stack.outputs[0].value
app.logger.info('Deleting stack: {0}'.format(stack.stack_id))
delete = stack.delete()
app.logger.debug('Delete: {0}'.format(delete))
return True
else:
abort(500)
def wait_for_snapshot_complete(snapshot_id):
conn = boto.ec2.connect_to_region(app.config['AWS_DEFAULT_REGION'],aws_access_key_id=app.config['AWS_ACCESS_KEY_ID'],aws_secret_access_key=app.config['AWS_SECRET_ACCESS_KEY'])
inprogress_snapshot = conn.get_all_snapshots(snapshot_id)
app.logger.debug('waiting for snap {0} status {1}'.format(inprogress_snapshot[0].id, inprogress_snapshot[0].status))
if 'completed' in inprogress_snapshot[0].status:
return True
else:
return False
def image_from_instance(instance_id):
conn = boto.ec2.connect_to_region(app.config['AWS_DEFAULT_REGION'],aws_access_key_id=app.config['AWS_ACCESS_KEY_ID'],aws_secret_access_key=app.config['AWS_SECRET_ACCESS_KEY'])
f = {}
f['name'] = app.config['APP_UUID']
amis = conn.get_all_images(filters=f)
if len(amis) == 1:
for ami in amis:
app.logger.debug('deleting ami: {0}'.format(ami.id))
try:
delete_ami = conn.deregister_image(ami.id)
except:
app.logger.error('deleting ami failed: {0}'.format(delete_ami))
f = {}
f['description'] = app.config['APP_UUID']
snapshots = conn.get_all_snapshots(filters=f)
if len(snapshots) == 1:
for snapshot in snapshots:
app.logger.debug('deleting snapshot: {0}'.format(snapshot.id))
conn.delete_snapshot(snapshot.id)
vols = conn.get_all_volumes(filters={'attachment.instance-id': instance_id})
volume = vols[0]
snap = volume.create_snapshot(app.config['APP_UUID'])
app.logger.debug('snap: {0}'.format(snap.id))
while True:
# conn = boto.ec2.connect_to_region(app.config['AWS_DEFAULT_REGION'],aws_access_key_id=app.config['AWS_ACCESS_KEY_ID'],aws_secret_access_key=app.config['AWS_SECRET_ACCESS_KEY'])
app.logger.debug('waiting for snap {0} status {1}'.format(snap.id, snap.status))
if 'completed' in snap.status:
break
else:
time.sleep(10)
snap.update()
app.logger.debug('completed snap: {0}'.format(snap.id))
ami = conn.register_image(name=app.config['APP_UUID'],snapshot_id=snap.id,root_device_name='/dev/sda1',virtualization_type='hvm',architecture='x86_64')
app.logger.debug('ami: {0}'.format(ami))
return ami
@app.before_request
def before_request():
g.user = None
if 'user_id' in session:
g.user = User.query.get(session['user_id'])
try:
steamdata = get_steam_userinfo(g.user.steam_id)
except AttributeError:
app.logger.warning('Invalidated session missing steam data - user_id: {0}'.format(session['user_id']))
session.pop('user_id', None)
return redirect(oid.get_next_url())
g.user.nickname = steamdata['personaname']
g.user.avatar_url = steamdata['avatar']
app.logger.debug('steam_id: {0} steam_nickname: {1}'.format(g.user.steam_id, g.user.nickname))
app.logger.debug('steam_avatar_url: {0}'.format(g.user.avatar_url))
@app.route('/logout')
def logout():
session.pop('user_id', None)
return redirect(oid.get_next_url())
@app.route('/login')
@oid.loginhandler
def login():
if g.user is not None:
return redirect(oid.get_next_url())
return oid.try_login('http://steamcommunity.com/openid')
@oid.after_login
def create_or_login(resp):
match = _steam_id_re.search(resp.identity_url)
g.user = User.get_or_create(match.group(1))
steamdata = get_steam_userinfo(g.user.steam_id)
g.user.nickname = steamdata['personaname']
db.session.commit()
session['user_id'] = g.user.id
flash('You are logged in as %s' % g.user.nickname)
return redirect(oid.get_next_url())
@app.route('/users')
def users():
if user_has_role(1) or user_has_role(2):
return render_template('users.html', users=get_users(False))
else:
abort(401)
@app.route('/user/<user_id>', methods=['POST', 'GET'])
def user(user_id):
if user_has_role(1):
error = None
user = User.query.get(user_id)
form = UserAdminForm(obj=user)
app.logger.debug('form POST data {0}'.format(request.form))
# if request.method == 'POST' and form.validate():
if request.method == 'POST':
app.logger.debug('editing user {0}'.format(user.nickname))
user.role_id = request.form['role']
db.session.commit()
flash('Updated user_id {0} permissions'.format(user_id))
return render_template('user.html', user=get_users(user_id),form=form)
else:
abort(401)
@app.route('/')
def landingpage():
if g.user is None:
return render_template('login.html')
else:
return render_template('index.html')
@app.route('/instance_console')
def console():
if user_has_role(1) or user_has_role(2):
# app.logger.debug('starting aws instances: {0}'.format(start_aws_instances(False)))
stack = get_stack()
if stack == False:
return render_template('console.html')
elif stack.stack_status == 'CREATE_IN_PROGRESS':
flash('Stack is creating - please wait ... ')
return render_template('console.html', stack='error')
elif stack.stack_status == 'CREATE_COMPLETE':
flash('Stack is created!')
return render_template('console.html', aws_instances=get_aws_instances(False))
else:
error = 'Stack {0} status {1} invalid'.format(stack.stack_id,stack.stack_status)
return render_template('console.html',error=error,stack='error')
else:
abort(401)
@app.route('/instance/create')
def instance_create():
if user_has_role(1) or user_has_role(2):
authenticated_user()
stack = get_stack()
if stack == False:
conn = boto.ec2.connect_to_region(app.config['AWS_DEFAULT_REGION'],aws_access_key_id=app.config['AWS_ACCESS_KEY_ID'],aws_secret_access_key=app.config['AWS_SECRET_ACCESS_KEY'])
try:
f = {}
f['name'] = app.config['APP_UUID']
amis = conn.get_all_images(filters=f)
if len(amis) == 1:
for ami in amis:
boot_ami = ami.id
app.logger.debug('booting with ami {0}'.format(boot_ami))
else:
boot_ami = 'ami-9c1a42ff'
except:
boot_ami = 'ami-9c1a42ff'
p = [
('InstanceType','m4.large'),
('KeyName','yubikey'),
('AMI',boot_ami),
('SubnetId','subnet-760e6713 '),
('SecurityGroup','sg-fba5849e'),
('ElasticIpAllocationId','eipalloc-784a841d'),
]
stack = create_stack(p)
if 'arn' in stack:
flash('Creating new cloudformation stack '+stack)
return render_template('console.html', stack='error')
if 'i-' in stack:
return render_template('console.html', stack=stack)
else:
return render_template('console.html', stack=stack)
else:
abort(401)
@app.route('/instance/<instance_id>')
def instance_console(instance_id):
if user_has_role(1) or user_has_role(2):
authenticated_user()
serveraddress = get_aws_instances(instance_id)
instance = Instance.query.filter_by(instance_id=instance_id).first()
app.logger.debug('instance db {0}'.format(instance))
return render_template('instance.html', instance_id=instance_id, serveraddress=serveraddress, ark_serverinfo=instance)
else:
abort(401)
@app.route('/instance/<instance_id>/shutdown')
def instance_shutdown(instance_id):
if user_has_role(1) or user_has_role(2):
authenticated_user()
flash('AMI deleted: '+image_from_instance(instance_id))
stack = delete_stack()
if stack:
flash('Cloudformation stack deleted')
return render_template('index.html')
else:
abort(401)
@app.route('/instance/<instance_id>/update', methods=['PUT'])
def instance_update(instance_id):
instance = Instance.get_or_create(instance_id)
instance.ark_process_status = request.form['ark_process_status']
instance.ark_server_status = request.form['ark_server_status']
instance.ark_version = request.form['ark_version']
db.session.commit()
return 'OK'
if app.config['USE_FLASK'] == 'True':
if __name__ == '__main__':
app.run(debug=app.config['DEBUG'])
|
class Room:
def __init__(self,descr):
self.__descr = descr
self.__north = None
self.__south = None
self.__east = None
self.__west = None
def __str__(self):
return str(self.__descr)
def getNorth(self):
return self.__north
def getSouth(self):
return self.__south
def getEast(self):
return self.__east
def getWest(self):
return self.__west
def setDescription(self,d):
self.__descr = d
def setNorth(self,n):
self.__north = n
def setSouth(self,s):
self.__south = s
def setEast(self,e):
self.__east = e
def setWest(self,w):
self.__west = w
|
# setup.py
from setuptools import setup, find_packages
setup(
name='appconfig',
version='0.0',
author='',
author_email='lingweb@shh.mpg.de',
description='Remote control for DLCE apps',
keywords='fabric',
license='Apache 2.0',
url='https://github.com/shh-dlce/appconfig',
packages=find_packages(),
platforms='any',
python_requires='!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*',
include_package_data=True,
zip_safe=False,
install_requires=[
'Fabric3>=1.11',
'fabtools-python>=0.19.7',
'python-dateutil',
'Jinja2',
'pytz',
'pycdstar>=0.4.1',
'cdstarcat>=0.6.2',
],
extras_require={
'dev': ['flake8'],
'test': [
'mock',
'pytest>=3.3',
'pytest-mock',
'pytest-cov',
],
},
long_description='',
classifiers=[
'Private :: Do Not Upload',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
],
entry_points={
'console_scripts': [
'appconfig=appconfig.__main__:main',
]
},
)
|
from django.db import models
class TodoItem(models.Model):
# todo_id = models.IntegerField()
title = models.CharField(max_length=200)
content = models.TextField()
due_date = models.DateTimeField()
def __str__(self):
return self.title
|
def arr2bin(arr):
return False if len([x for x in arr if type(x)==int])!=len(arr) else bin(sum(arr))[2:]
'''
Given an array containing only integers, add all the elements and return the
binary equivalent of that sum.
If the array contains any non-integer element (e.g. an object, a float, a
string and so on), return false.
Note: The sum of an empty array is zero.
arr2bin([1,2]) == '11'
arr2bin([1,2,'a']) == False
'''
|
#Author: James Nicholson
#Date: 6/5/2018
#Ask the user for a string and print out whether this string is a palindrome or not.
# (A palindrome is a string that reads the same forwards and backwards.)
def reverse(word):
x = ''
for i in range(len(word)):
x += word[len(word)-1-i]
return x
word = input('give me a word:\n')
x = reverse(word)
if x == word:
print('This is a Palindrome')
else:
print('This is NOT a Palindrome')
|
import click
import datetime
from random import randint
from math import floor
from flask.cli import with_appcontext
from .models import Proposal, db
from grant.milestone.models import Milestone
from grant.comment.models import Comment
from grant.utils.enums import ProposalStatus, Category, ProposalStage
from grant.user.models import User
@click.command()
@click.argument('stage')
@click.argument('user_id')
@click.argument('proposal_id')
@click.argument('title')
@click.argument('content')
@with_appcontext
def create_proposal(stage, user_id, proposal_id, title, content):
proposal = Proposal.create(stage=stage,
user_id=user_id,
proposal_id=proposal_id,
title=title,
content=content)
db.session.add(proposal)
db.session.commit()
@click.command()
@click.argument('count', type=int)
@with_appcontext
def create_proposals(count):
user = User.query.filter_by().first()
for i in range(count):
if i < 5:
stage = ProposalStage.WIP
else:
stage = ProposalStage.COMPLETED
p = Proposal.create(
stage=stage,
status=ProposalStatus.LIVE,
title=f'Fake Proposal #{i}',
content=f'My fake proposal content, numero {i}',
brief=f'This is proposal {i} generated by "flask create-proposals"',
category=Category.ACCESSIBILITY,
target="123.456",
payout_address="fake123",
deadline_duration=100
)
p.date_published = datetime.datetime.now()
p.team.append(user)
p.date_approved = datetime.datetime.now()
p.accepted_with_funding = True
p.version = '2'
p.fully_fund_contibution_bounty()
db.session.add(p)
db.session.flush()
num_ms = randint(1, 9)
for j in range(num_ms):
m = Milestone(
title=f'Fake MS {j}',
content=f'Fake milestone #{j} on fake proposal #{i}!',
days_estimated='10',
payout_percent=str(floor(1 / num_ms * 100)),
immediate_payout=j == 0,
proposal_id=p.id,
index=j
)
db.session.add(m)
for j in range(100):
c = Comment(
proposal_id=p.id,
user_id=user.id,
parent_comment_id=None,
content=f'Fake comment #{j} on fake proposal #{i}!'
)
db.session.add(c)
Milestone.set_v2_date_estimates(p)
db.session.add(p)
db.session.commit()
print(f'Added {count} LIVE fake proposals')
@click.command()
@click.argument('dry', required=False)
@with_appcontext
def retire_v1_proposals(dry):
now = datetime.datetime.now()
proposals_funding_required = Proposal.query.filter_by(stage="FUNDING_REQUIRED").all()
proposals_draft = Proposal.query.filter_by(status=ProposalStatus.DRAFT).all()
proposals_pending = Proposal.query.filter_by(status=ProposalStatus.PENDING).all()
proposals_staking = Proposal.query.filter_by(status=ProposalStatus.STAKING).all()
modified_funding_required_count = 0
modified_draft_count = 0
modified_pending_count = 0
modified_staking_count = 0
deleted_draft_count = 0
if not proposals_funding_required and not proposals_draft and not proposals_pending and not proposals_staking:
print("No proposals found. Exiting...")
return
print(f"Found {len(proposals_funding_required)} 'FUNDING_REQUIRED' proposals to modify")
print(f"Found {len(proposals_draft)} 'DRAFT' proposals to modify")
print(f"Found {len(proposals_pending)} 'PENDING' proposals to modify")
print(f"Found {len(proposals_staking)} 'STAKING' proposals to modify")
if dry:
print(f"This is a dry run. Changes will not be committed to the database")
confirm = input("Continue? (y/n) ")
if confirm != "y":
print("Exiting...")
return
# move 'FUNDING_REQUIRED' proposals to a failed state
for p in proposals_funding_required:
if not dry:
new_deadline = (now - p.date_published).total_seconds()
p.stage = ProposalStage.FAILED
p.deadline_duration = int(new_deadline)
db.session.add(p)
modified_funding_required_count += 1
print(f"Modified 'FUNDING_REQUIRED' proposal {p.id} - {p.title}")
# reset proposal to draft state
def convert_proposal_to_v2_draft(proposal):
milestones = Milestone.query.filter_by(proposal_id=proposal.id).all()
if not dry:
# reset target because v2 estimates are in USD
proposal.target = '0'
proposal.version = '2'
proposal.stage = ProposalStage.PREVIEW
proposal.status = ProposalStatus.DRAFT
db.session.add(proposal)
for m in milestones:
# clear date estimated because v2 proposals use days_estimated (date_estimated is dynamically set)
m.date_estimated = None
db.session.add(m)
print(f"Modified {len(milestones)} milestones on proposal {p.id}")
# delete drafts that have no content
def delete_stale_draft(proposal):
if proposal.title or proposal.brief or proposal.content or proposal.category or proposal.target != "0":
return False
if proposal.payout_address or proposal.milestones:
return False
if not dry:
db.session.delete(proposal)
return True
for p in proposals_draft:
is_stale = delete_stale_draft(p)
if is_stale:
deleted_draft_count += 1
print(f"Deleted stale 'DRAFT' proposal {p.id} - {p.title}")
continue
convert_proposal_to_v2_draft(p)
modified_draft_count += 1
print(f"Modified 'DRAFT' proposal {p.id} - {p.title}")
for p in proposals_pending:
convert_proposal_to_v2_draft(p)
modified_pending_count += 1
print(f"Modified 'PENDING' proposal {p.id} - {p.title}")
for p in proposals_staking:
convert_proposal_to_v2_draft(p)
modified_staking_count += 1
print(f"Modified 'STAKING' proposal {p.id} - {p.title}")
if not dry:
print(f"Committing changes to database")
db.session.commit()
print("")
print(f"Modified {modified_funding_required_count} 'FUNDING_REQUIRED' proposals")
print(f"Modified {modified_draft_count} 'DRAFT' proposals")
print(f"Modified {modified_pending_count} 'PENDING' proposals")
print(f"Modified {modified_staking_count} 'STAKING' proposals")
print(f"Deleted {deleted_draft_count} stale 'DRAFT' proposals")
|
import abc
class Scheduler(object):
"""
This class is an abstract class for Schedulers -
Actual scheduler implementations need to provide these methods,
and should also provide an implementation of the TaskQueue base class.
"""
__metaclass__ = abc.ABCMeta
@abc.abstractmethod
def plan_bin(self, task_queue, bin_size):
"""
task_queue: a TaskQueue object.
bin_budget: a numeric.
This function accepts a task_queue (which can have whatever complexity
is needed for the given scheduling algorith, including multiple
internal queues, it just need to follow the interface), as well as
a maximum bin budget.
It must return a List of tasks (a bin) to be sent to the frontend.
"""
pass
|
# -*- encoding:utf-8 -*-
# __author__=='Gan'
# Note: This is an extension of House Robber.
# After robbing those houses on that street, the thief has found himself a new place for his thievery
# so that he will not get too much attention. This time, all houses at this place are arranged in a circle.
# That means the first house is the neighbor of the last one. Meanwhile, the security system for these houses
# remain the same as for those in the previous street.
# Given a list of non-negative integers representing the amount of money of each house,
# determine the maximum amount of money you can rob tonight without alerting the police.
# Credits:
# Special thanks to @Freezen for adding this problem and creating all test cases.
# 74 / 74 test cases passed.
# Status: Accepted
# Runtime: 36 ms
# Your runtime beats 28.27 % of python submissions.
class Solution(object):
def rob(self, nums):
"""
:type nums: List[int]
:rtype: int
"""
if len(nums) == 0:
return 0
if len(nums) == 1:
return nums[0]
len_orgin = len(nums)
# No matter what results from the first iteration, the second iteration always results the correct.
# i.e. [2, 3, 2] -- > [2, 3, 2, 2, 3, 2] , answer is 3 .That is second iteration result.
nums = nums + nums
dp = [0] * len(nums)
dp[0] = nums[0]
dp[1] = max(nums[0], nums[1])
for i in range(2, len(nums)):
dp[i] = max(dp[i - 2] + nums[i], dp[i - 1])
return dp[-1] - dp[len_orgin - 1]
# 74 / 74 test cases passed.
# Status: Accepted
# Runtime: 28 ms
# Your runtime beats 90.79 % of python submissions.
class Solution(object):
def rob(self, nums):
"""
:type nums: List[int]
:rtype: int
"""
if len(nums) == 0:
return 0
if len(nums) == 1:
return nums[0]
def linerob(nums):
last, now = 0, 0
for i in range(len(nums)):
last, now = now, max(last + nums[i], now)
return now
return max(
(linerob(nums[1:])),
(linerob(nums[:-1]))
)
if __name__ == '__main__':
print(Solution().rob([2, 1, 2, 4]))
print(Solution().rob([2, 1, 1, 1]))
print(Solution().rob([2, 1]))
print(Solution().rob([0, 0]))
print(Solution().rob([1, 1, 1]))
print(Solution().rob([1]))
print(Solution().rob([]))
|
__doc__ = """ """
import os
import sys
import logging
from flask import Flask, render_template
APP_ROOT = os.path.abspath(os.path.join(os.path.dirname(__file__),'..'))
CONFIG_PATH = os.path.join(APP_ROOT,'etc','config','config.py')
DEFAULT_PORT = 8000
def parse_args():
parser = argparse.ArgumentParser(description=__doc__)
parser.add_argument('--port', type=int, default=DEFAULT_PORT,
help='Port to start the hrsync server on')
parser.add_argument('--debug', action='store_true', default=False,
help='Launch server in debug mode')
args = parser.parse_args()
return args
def create_server():
app = Flask(__name__)
app.config.from_pyfile(CONFIG_PATH)
return app
app = create_server()
@app.errorhandler(404)
def page_not_found(error):
return render_template('error_404.html'), 404
@app.errorhandler(500)
def internal_server_error(error):
return render_template('error_500.html', msg=str(error)), 500
@app.before_first_request
def setup_logging():
log_format = '%(asctime)s [%(levelname)s] %(message)s'
log_handler = logging.StreamHandler(stream=sys.stderr)
log_handler.setFormatter(logging.Formatter(log_format))
log_handler.setLevel(logging.INFO)
app.logger.addHandler(log_handler)
app.logger.setLevel(logging.INFO)
if __name__ == '__main__':
args = parse_args()
app.run(host=app.config['FLASK_HOST'], port=args.port, debug=args.debug)
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.