text stringlengths 4 1.02M | meta dict |
|---|---|
"""Connection class marshals data over UDP to daemon."""
import socket
import json
class _Connection(object):
"""Instances of _Connection are used to communicate with the X-Ray daemon
via UDP.
"""
def __init__(self, collector_url):
self._collector_url = collector_url
self._socket = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
def _format(self, msg):
return ('{"format": "json", "version": 1}\n' + json.dumps(msg)).encode('utf8')
def report(self, msg):
"""Report to the daemon."""
for m in msg:
self._socket.sendto(self._format(m), self._collector_url)
| {
"content_hash": "77bdd834860b0194edfa3cd22a5b1340",
"timestamp": "",
"source": "github",
"line_count": 20,
"max_line_length": 86,
"avg_line_length": 31.9,
"alnum_prop": 0.622257053291536,
"repo_name": "nornagon/xray-python-opentracing",
"id": "fa593b4e2bc1a6d958c1e886971c6922ccfb85e3",
"size": "638",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "xray_ot/connection.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "16213"
}
],
"symlink_target": ""
} |
"""
website.docs
~~~~~~~~~~~~
website docs blueprint.
"""
| {
"content_hash": "4468e6971824c616a4437cef3bae6e2a",
"timestamp": "",
"source": "github",
"line_count": 6,
"max_line_length": 27,
"avg_line_length": 11.833333333333334,
"alnum_prop": 0.43661971830985913,
"repo_name": "alibaba/FlexGW",
"id": "3f90ff26649bdf7d59f5f6e181adc0c7594a047c",
"size": "95",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "website/docs/__init__.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "23138"
},
{
"name": "HTML",
"bytes": "48741"
},
{
"name": "JavaScript",
"bytes": "3986"
},
{
"name": "Mako",
"bytes": "412"
},
{
"name": "Python",
"bytes": "67248"
},
{
"name": "Shell",
"bytes": "15075"
}
],
"symlink_target": ""
} |
"""Fichier contenant la volonté LeverAncre"""
import re
from secondaires.navigation.equipage.ordres.lever_ancre import LeverAncre as \
Ordre
from secondaires.navigation.equipage.ordres.long_deplacer import LongDeplacer
from secondaires.navigation.equipage.volonte import Volonte
class LeverAncre(Volonte):
"""Classe représentant une volonté.
Cette volonté choisit un ou plusieurs matelots pour lever une ancre.
Plusieurs matelots sont nécessaires si l'ancre est lourde.
"""
cle = "lever_ancre"
ordre_court = re.compile(r"^la$", re.I)
ordre_long = re.compile(r"^lever\s+ancre$", re.I)
def choisir_matelots(self, exception=None):
"""Retourne le matelot le plus apte à accomplir la volonté."""
proches = []
navire = self.navire
matelots = navire.equipage.get_matelots_libres(exception)
graph = self.navire.graph
ancre = self.navire.ancre
if not ancre:
return
if not ancre.jetee:
return
if navire.passerelle:
return
for n in range(ancre.nb_lever):
proche = []
for matelot in matelots:
if matelot in [t[0] for t in proches]:
continue
if matelot.personnage.stats.endurance < 50:
continue
origine = matelot.salle.mnemonic
destination = ancre.parent.mnemonic
if origine == destination:
proche.append((matelot, [], ancre))
else:
chemin = graph.get((origine, destination))
if chemin:
proche.append((matelot, chemin, ancre))
proches.append(min(proche, key=lambda c: len(c[1])))
proches = proches[:ancre.nb_lever]
return proches
def executer(self, proches):
"""Exécute la volonté."""
if not proches:
return
navire = self.navire
for matelot, sorties, ancre in proches:
ordres = []
if sorties:
aller = LongDeplacer(matelot, navire, *sorties)
ordres.append(aller)
lever = Ordre(matelot, navire)
ordres.append(lever)
ordres.append(self.revenir_affectation(matelot))
self.ajouter_ordres(matelot, ordres)
def crier_ordres(self, personnage):
"""On fait crier l'ordre au personnage."""
msg = "{} s'écrie : levez l'ancre !".format(personnage.distinction_audible)
self.navire.envoyer(msg)
| {
"content_hash": "af89047f86714a6989c7e3d012e2c428",
"timestamp": "",
"source": "github",
"line_count": 82,
"max_line_length": 83,
"avg_line_length": 31.634146341463413,
"alnum_prop": 0.5813415574402467,
"repo_name": "vlegoff/tsunami",
"id": "5c899164461eea8c476236ad77934210bdceef1f",
"size": "4171",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/secondaires/navigation/equipage/volontes/lever_ancre.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Python",
"bytes": "7930908"
},
{
"name": "Ruby",
"bytes": "373"
}
],
"symlink_target": ""
} |
from tackerclient.common import exceptions
from tackerclient.common import utils
from tackerclient.openstack.common.gettextutils import _
API_NAME = 'servicevm'
API_VERSIONS = {
'1.0': 'tackerclient.v1_0.client.Client',
}
def make_client(instance):
"""Returns an tacker client.
"""
tacker_client = utils.get_client_class(
API_NAME,
instance._api_version[API_NAME],
API_VERSIONS,
)
instance.initialize()
url = instance._url
url = url.rstrip("/")
if '1.0' == instance._api_version[API_NAME]:
client = tacker_client(username=instance._username,
tenant_name=instance._tenant_name,
password=instance._password,
region_name=instance._region_name,
auth_url=instance._auth_url,
endpoint_url=url,
endpoint_type=instance._endpoint_type,
token=instance._token,
auth_strategy=instance._auth_strategy,
insecure=instance._insecure,
ca_cert=instance._ca_cert,
retries=instance._retries,
raise_errors=instance._raise_errors,
session=instance._session,
auth=instance._auth)
return client
else:
raise exceptions.UnsupportedVersion(_("API version %s is not "
"supported") %
instance._api_version[API_NAME])
def Client(api_version, *args, **kwargs):
"""Return an tacker client.
@param api_version: only 1.0 is supported now
"""
tacker_client = utils.get_client_class(
API_NAME,
api_version,
API_VERSIONS,
)
return tacker_client(*args, **kwargs)
| {
"content_hash": "fe3f1139ca1bf2abd7ea53e6bad54df6",
"timestamp": "",
"source": "github",
"line_count": 55,
"max_line_length": 76,
"avg_line_length": 36.2,
"alnum_prop": 0.5067805123053741,
"repo_name": "trozet/python-tackerclient",
"id": "9d48ef37036545fbacd49a052e8cc584aae77247",
"size": "2629",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tackerclient/tacker/client.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "275732"
},
{
"name": "Shell",
"bytes": "294"
}
],
"symlink_target": ""
} |
import os
import shutil
import time
import threading
import traceback
from vmipl.vmipl_parser import VmiplParser
from vmipl_aux.constants import Constants
from vmipl_communication.connection import Connection
from vmipl_communication.network_connection import NetworkConnection
from vmipl_communication.receiver import Receiver
from vmipl_communication.receiver import PipeReceiver
from vmipl_communication.receiver import VmFinishedHandler
class ConnectionHandler(threading.Thread):
def __init__(self, socket, running_vms, qemu_path, pipes_directory):
threading.Thread.__init__(self)
self.conn = NetworkConnection(socket)
self.running_vms = running_vms
self.qemu_path = qemu_path
self.pipes_directory = pipes_directory
def run(self):
if self.conn.received_start_command():
self.__handle_start()
else:
print "reconfiguration not implemented yet"
self.conn.close()
def __handle_start(self):
try:
parser = VmiplParser()
vm_file_path, vmipl_script = self.conn.get_start_parameters()
parsed_script = parser.parse(vmipl_script)
event_groups = parsed_script.initialize_probes()
tmp_vm_file_path = vm_file_path + '_tmp'
shutil.copyfile(vm_file_path, tmp_vm_file_path)
self.create_vm(parsed_script, tmp_vm_file_path)
server_response = self.build_server_response(parsed_script,
event_groups)
self.conn.send_server_response(server_response)
self.wait_for_open_connection(parsed_script.channel_number)
receivers = self.setup_receivers(parsed_script.channel_number,
event_groups)
self.setup_stream_receivers(receivers, parsed_script.stream_probes)
self.setup_finished_handler(parsed_script.channel_number,
receivers, tmp_vm_file_path)
except Exception as e:
traceback.print_exc()
self.conn.send_server_response(e.message)
finally:
self.conn.close()
def create_vm(self, script, vm_file_path):
with Constants.vm_lock:
self.obtain_channel_number(script)
self.transfer_script(script)
self.start_vm(script, vm_file_path)
def obtain_channel_number(self, script):
first_channel = Connection.first_nl_family
for i in range(first_channel, Connection.last_nl_family + 1):
if not i in self.running_vms:
self.running_vms[i] = [script]
script.channel_number = i
break
if script.channel_number == 0:
raise TooManyVMsException()
def transfer_script(self, script):
conn = Connection(Constants.vmipl_channel)
conn.send_script(script)
conn.close()
# this should be changed, since it is highly insecure and most likely
# unnecessary!!!
def start_vm(self, script, vm_file_path):
current_euid = os.geteuid()
if os.path.isfile(vm_file_path):
if script.stream_probes:
tmp_path = self.add_stream_probe_configs(script.channel_number,
script.stream_probes, vm_file_path)
# os.seteuid(0)
os.system(self.qemu_path + " -readconfig " + vm_file_path + " &")
# os.seteuid(current_euid)
else:
raise IOError("This is not a file: " + vm_file_path)
def wait_for_open_connection(self, channel_number):
connection_open = False
count = 0
while not connection_open and count < 10:
time.sleep(1)
try:
connection = Connection(channel_number)
connection_open = True
connection.close()
except:
pass
def add_stream_probe_configs(self, channel_number,
stream_probes, vm_file_path):
for probe in stream_probes:
probe.add_config(channel_number, vm_file_path, self.pipes_directory)
def setup_receivers(self, channel_number, groups):
receivers = []
current_euid = os.geteuid()
for output_channel in groups:
if Receiver.is_file(output_channel):
receiver = Receiver(channel_number,
groups[output_channel],
output_channel, current_euid)
receiver.start()
receivers.append(receiver)
return receivers
def setup_stream_receivers(self, receivers, stream_probes):
for probe in stream_probes:
if Receiver.is_file(probe.output_channel):
receiver = PipeReceiver(probe.pipe_name, probe.output_channel)
receiver.start()
receivers.append(receiver)
def setup_finished_handler(self, channel_number, receivers,
tmp_vm_file_path):
current_euid = os.geteuid()
finished_handler = VmFinishedHandler(current_euid, channel_number,
receivers, self.running_vms,
tmp_vm_file_path)
finished_handler.start()
with Constants.vm_lock:
self.running_vms[channel_number].append(finished_handler)
def build_server_response(self, script, groups):
vm_id = script.channel_number
output_channels = {}
output_pipes = {}
response = ''
response += 'VM ID: ' + str(vm_id) + '\n'
if script.event_probes:
response += '\nEvent Probe IDs:\n'
for probe in script.event_probes:
response += ('\t' + probe.get_name() + ': ' +
str(probe.probe_id) + '\n')
if script.stream_probes:
response += '\nStream Probe IDs:\n'
for probe in script.stream_probes:
response += ('\t' + probe.get_name() + ': ' +
str(probe.probe_id) + '\n')
for output_channel in groups:
if not Receiver.is_file(output_channel):
output_channels[output_channel] = groups[output_channel]
if len(output_channels) > 0:
response += '\n'
response += 'Output Channels:\n'
for output_channel in output_channels:
response += ('\t' + output_channel + ': ' +
str(output_channels[output_channel]) + '\n')
for probe in script.stream_probes:
if not Receiver.is_file(probe.output_channel):
output_pipes[probe.output_channel] = probe.pipe_name
if len(output_pipes) > 0:
response += '\n'
response += 'Output Pipes:\n'
for output_pipe in output_pipes:
response += ('\t' + output_pipe + ': ' +
str(output_pipes[output_pipe]) + '\n')
return response
| {
"content_hash": "0f684693a2135a13d5cf3f14d689a243",
"timestamp": "",
"source": "github",
"line_count": 194,
"max_line_length": 71,
"avg_line_length": 30.34536082474227,
"alnum_prop": 0.6840496008153558,
"repo_name": "FlorianWestphal/VMI-PL",
"id": "8b260b140c3e7455964e516f88002d1970b8f487",
"size": "5887",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "front_end/vmipl_server/connection_handler.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "9397"
},
{
"name": "Python",
"bytes": "45249"
}
],
"symlink_target": ""
} |
from setuptools import setup, find_packages
setup(
name='string-formatting',
packages=find_packages()
)
| {
"content_hash": "d36f2f8e97681e5a5771c5140e588a24",
"timestamp": "",
"source": "github",
"line_count": 6,
"max_line_length": 43,
"avg_line_length": 20.166666666666668,
"alnum_prop": 0.6776859504132231,
"repo_name": "richlewis42/string_formatting",
"id": "c589e87f7cd2457d47a7798c1a2f61c9b8f7c1e8",
"size": "121",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "setup.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "482"
}
],
"symlink_target": ""
} |
import sys
from django.core.management.base import BaseCommand
from kaleo.models import InvitationStat
class Command(BaseCommand):
help = "Adds invites to all users with 0 invites remaining."
def handle(self, *args, **kwargs):
if len(args) == 0:
sys.exit("You must supply the number of invites as an argument.")
try:
num_of_invites = int(args[0])
except ValueError:
sys.exit("The argument for number of invites must be an integer.")
InvitationStat.add_invites(num_of_invites)
| {
"content_hash": "ae045c6302621ba9608dec36922f38ca",
"timestamp": "",
"source": "github",
"line_count": 20,
"max_line_length": 78,
"avg_line_length": 28.95,
"alnum_prop": 0.6338514680483592,
"repo_name": "pombredanne/kaleo",
"id": "e734a653a35e26c855297fbd5933f19b90c8279c",
"size": "579",
"binary": false,
"copies": "6",
"ref": "refs/heads/master",
"path": "kaleo/management/commands/add_invites.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [],
"symlink_target": ""
} |
"""This version uses a traditional event-driven version,
using continuation passing style. Each method call is passed
a completion callback and an error callback
"""
from statistics import median
import json
import asyncio
import random
import time
import hbmqtt.client
from collections import deque
from antevents.base import SensorEvent
URL = "mqtt://localhost:1883"
class RandomSensor:
def __init__(self, sensor_id, mean=100.0, stddev=20.0, stop_after_events=None):
self.sensor_id = sensor_id
self.mean = mean
self.stddev = stddev
self.stop_after_events = stop_after_events
if stop_after_events is not None:
def generator():
for i in range(stop_after_events):
yield round(random.gauss(mean, stddev), 1)
else: # go on forever
def generator():
while True:
yield round(random.gauss(mean, stddev), 1)
self.generator = generator()
def sample(self):
return self.generator.__next__()
def __repr__(self):
if self.stop_after_events is None:
return 'RandomSensor(%s, mean=%s, stddev=%s)' % \
(self.sensor_id, self.mean, self.stddev)
else:
return 'RandomSensor(%s, mean=%s, stddev=%s, stop_after_events=%s)' % \
(self.sensor_id, self.mean, self.stddev, self.stop_after_events)
class PeriodicMedianTransducer:
"""Emit an event once every ``period`` input events.
The value is the median of the inputs received since the last
emission.
"""
def __init__(self, period=5):
self.period = period
self.samples = [None for i in range(period)]
self.events_since_last = 0
self.last_event = None # this is used in emitting the last event
def step(self, v):
self.samples[self.events_since_last] = v.val
self.events_since_last += 1
if self.events_since_last==self.period:
val = median(self.samples)
event = SensorEvent(sensor_id=v.sensor_id, ts=v.ts, val=val)
self.events_since_last = 0
return event
else:
self.last_event = v # save in case we complete before completing a period
return None
def complete(self):
if self.events_since_last>0:
# if we have some partial state, we emit one final event that
# averages whatever we saw since the last emission.
return SensorEvent(sensor_id=self.last_event.sensor_id,
ts=self.last_event.ts,
val=median(self.samples[0:self.events_since_last]))
def csv_writer(evt):
print("csv_writer(%s)" % repr(evt))
class MqttWriter:
"""All the processing is asynchronous. We ensure that a given send has
completed and the callbacks called before we process the next one.
"""
def __init__(self, url, topic, event_loop):
self.url = url
self.topic = topic
self.client = hbmqtt.client.MQTTClient(loop=event_loop)
self.event_loop = event_loop
self.connected = False
self.pending_task = None
self.request_queue = deque()
def _to_message(self, msg):
return bytes(json.dumps((msg.sensor_id, msg.ts, msg.val),), encoding='utf-8')
def _request_done(self, f, completion_cb, error_cb):
assert f==self.pending_task
self.pending_task = None
exc = f.exception()
if exc:
self.event_loop.call_soon(error_cb, exc)
else:
self.event_loop.call_soon(completion_cb)
if len(self.request_queue)>0:
self.event_loop.call_soon(self._process_queue)
def _process_queue(self):
assert self.pending_task == None
assert len(self.request_queue)>0
(msg, completion_cb, error_cb) = self.request_queue.popleft()
if msg is not None:
print("send from queue: %s" % msg)
self.pending_task = self.event_loop.create_task(
self.client.publish(self.topic, msg)
)
else: # None means that we wanted a disconnect
print("disconnect")
self.pending_task = self.event_loop.create_task(
self.client.disconnect()
)
self.pending_task.add_done_callback(lambda f:
self._request_done(f, completion_cb,
error_cb))
def send(self, msg, completion_cb, error_cb):
if not self.connected:
print("attempting connection")
self.request_queue.append((self._to_message(msg),
completion_cb, error_cb),)
self.connected = True
self.pending_task = self.event_loop.create_task(self.client.connect(self.url))
def connect_done(f):
assert f==self.pending_task
print("connected")
self.pending_task = None
self.event_loop.call_soon(self._process_queue)
self.pending_task.add_done_callback(connect_done)
elif self.pending_task:
self.request_queue.append((self._to_message(msg), completion_cb,
error_cb),)
else:
print("sending %s" % self._to_message(msg))
self.pending_task = self.event_loop.create_task(
self.client.publish(self.topic, self._to_message(msg))
)
self.pending_task.add_done_callback(lambda f:
self._request_done(f, completion_cb,
error_cb))
def disconnect(self, completion_cb, error_cb, drop_queue=False):
if not self.connected:
return
if len(self.request_queue)>0 and drop_queue: # for error situations
self.request_queue = deque()
if self.pending_task:
self.request_queue.append((None, completion_cb, error_cb),)
else:
print("disconnecting")
self.pending_task = self.event_loop.create_task(
self.client.disconnect()
)
self.pending_task.add_done_callback(lambda f:
self._request_done(f, completion_cb,
error_cb))
def sample_and_process(sensor, mqtt_writer, xducer, completion_cb, error_cb):
try:
sample = sensor.sample()
except StopIteration:
final_event = xducer.complete()
if final_event:
mqtt_writer.send(final_event,
lambda: mqtt_writer.disconnect(lambda: completion_cb(False), error_cb),
error_cb)
else:
mqtt_writer.disconnect(lambda: completion_cb(False), error_cb)
return
event = SensorEvent(sensor_id=sensor.sensor_id, ts=time.time(), val=sample)
csv_writer(event)
median_event = xducer.step(event)
if median_event:
mqtt_writer.send(median_event,
lambda: completion_cb(True), error_cb)
else:
completion_cb(True)
sensor = RandomSensor('sensor-2', stop_after_events=12)
transducer = PeriodicMedianTransducer(5)
event_loop = asyncio.get_event_loop()
writer = MqttWriter(URL, sensor.sensor_id, event_loop)
def loop():
def completion_cb(more):
if more:
event_loop.call_later(0.5, loop)
else:
print("all done, no more callbacks to schedule")
event_loop.stop()
def error_cb(e):
print("Got error: %s" % e)
event_loop.stop()
event_loop.call_soon(
lambda: sample_and_process(sensor, writer, transducer,
completion_cb, error_cb)
)
event_loop.call_soon(loop)
event_loop.run_forever()
print("that's all folks")
| {
"content_hash": "004caae81b00dcae070649f700e11203",
"timestamp": "",
"source": "github",
"line_count": 211,
"max_line_length": 100,
"avg_line_length": 38.25118483412322,
"alnum_prop": 0.5631272456944616,
"repo_name": "mpi-sws-rse/antevents-python",
"id": "01cce22c693ce47cd7b8cd0b5f4e638791ea3d32",
"size": "8071",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "examples/event_library_comparison/event.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Makefile",
"bytes": "351"
},
{
"name": "Python",
"bytes": "242029"
},
{
"name": "Shell",
"bytes": "6508"
}
],
"symlink_target": ""
} |
import logging
from airflow.models import DAG
from airflow.utils.dates import days_ago
from airflow_multi_dagrun.operators import TriggerMultiDagRunOperator
log = logging.getLogger(__name__)
def generate_dag_run(**context):
"""Callable can depend on the context"""
log.info('!!!!!!!! =========== TRY NUMBER %s', context['ti'].try_number)
for i in range(2):
if i > 0 and context['ti'].try_number < 2:
raise Exception('First try failed')
yield {
'run_id': f"custom_trigger_id___{context['ts']}_{i}",
'timeout': i,
'ds': context["ds"],
}
args = {
'start_date': days_ago(1),
'owner': 'airflow',
}
with DAG(dag_id='trigger_with_retries', max_active_runs=1, default_args=args) as dag:
TriggerMultiDagRunOperator(
task_id='gen_target_dag_run',
dag=dag,
trigger_dag_id='common_target_custom_run_id',
python_callable=generate_dag_run,
retries=3,
retry_delay=0,
)
| {
"content_hash": "6bb93c2804236ea43fa7cc0629e0c901",
"timestamp": "",
"source": "github",
"line_count": 38,
"max_line_length": 85,
"avg_line_length": 26.605263157894736,
"alnum_prop": 0.5934718100890207,
"repo_name": "mastak/airflow_multi_dagrun",
"id": "9c3601ef66cb3b89df93cde2af356b5fa7edaf8d",
"size": "1011",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "examples/trigger_with_retries.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Makefile",
"bytes": "1492"
},
{
"name": "Python",
"bytes": "5977"
}
],
"symlink_target": ""
} |
from django.http import JsonResponse
from django.views import generic
from django.utils import timezone
from django.db.models import Count, Q
from django.contrib.auth.decorators import login_required
from rest_framework.viewsets import ReadOnlyModelViewSet
from artist.models import Artist
from event.models import Event
from event.forms import SearchForm
from event.serializers import EventSerializer
class EventViewSet(ReadOnlyModelViewSet):
queryset = Event.objects.all()
serializer_class = EventSerializer
class IndexView(generic.ListView):
model = Artist
template_name = 'event/pages/index.html'
discover_count = 5
trending_count = 8
recommend_count = 10
def get_context_data(self, **kwargs):
"""
Return the discover, trending,
recommended artists and events
"""
now = timezone.now()
events = Event.objects
artists = Artist.objects
user_id = self.request.user.id
user_count = Count('users')
discover_count = self.discover_count
trending_count = self.trending_count
recommend_count = self.recommend_count
context = super(IndexView, self).get_context_data(**kwargs)
# Get all artists, but exclude artists without images
context['discover'] = artists.exclude(image_url__exact='', thumb_url__exact='')
# Get trending artists
context['trending'] = artists.annotate(user_count=user_count).order_by('-user_count')[:trending_count]
if self.request.user.is_authenticated:
user_artists = self.request.user.artists.all()
# Exclude artists already followed by a current user
context['discover'] = context['discover'].exclude(pk__in=user_artists)
# Get upcoming events by a user's favorite artists
context['recommend'] = events.filter(artists__in=user_artists, start__gte=now)
# Exclude events already followed by a current user
context['recommend'] = context['recommend'].exclude(users__in=[user_id]).annotate(user_count=user_count)
context['recommend'] = context['recommend'].order_by('start', '-user_count')[:recommend_count]
context['discover'] = context['discover'][:discover_count]
return context
class SearchView(generic.ListView):
model = Event
form_class = SearchForm
paginate_by = 12
template_name = 'event/pages/search.html'
context_object_name = 'events'
def get_queryset(self):
"""
Return the events found by keyword
"""
form = self.form_class(self.request.GET)
if form.is_valid():
keyword = form.cleaned_data['keyword']
return Event.objects.filter(
Q(name__search=keyword) |
Q(venue__name__search=keyword) |
Q(venue__location__city__search=keyword) |
Q(venue__location__country__search=keyword) |
Q(artists__name__search=keyword)
).distinct()
return Event.objects.all()
class EventListView(generic.ListView):
model = Event
paginate_by = 12
template_name = 'event/pages/event.html'
context_object_name = 'events'
def get_queryset(self):
"""
Return the upcoming events
"""
return super().get_queryset().filter(start__gte=timezone.now())
class EventDetailView(generic.DetailView):
model = Event
template_name = 'event/details/event_detail.html'
@login_required(redirect_field_name='redirect')
def bookmark_event(request, pk):
if request.user.events.filter(id=pk):
request.user.events.remove(pk)
else:
request.user.events.add(pk)
return JsonResponse({
'pk': pk,
'id': 'events',
'user_count': Event.objects.get(pk=pk).users.count(),
'event_count': request.user.events.count(),
})
| {
"content_hash": "1df8e7c2aa67542d5045749e641a7f4e",
"timestamp": "",
"source": "github",
"line_count": 118,
"max_line_length": 116,
"avg_line_length": 33.09322033898305,
"alnum_prop": 0.6437900128040973,
"repo_name": "FedorSelitsky/eventrack",
"id": "f3a3ea0b116c9184d4f7fbc0ec3ee3832f9bf946",
"size": "3905",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "event/views.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "16128"
},
{
"name": "Dockerfile",
"bytes": "1061"
},
{
"name": "HTML",
"bytes": "62582"
},
{
"name": "JavaScript",
"bytes": "46270"
},
{
"name": "Python",
"bytes": "47384"
},
{
"name": "Shell",
"bytes": "127"
}
],
"symlink_target": ""
} |
from django.shortcuts import render, redirect
from django.core.urlresolvers import reverse
from django.contrib.auth.decorators import login_required
from django.contrib import messages
from django.contrib.auth import authenticate, login as auth_login, \
logout as auth_logout
from models import PageCopy, Module, ModuleSection
def landing(request):
copy = PageCopy.objects.get(slug="landing").html_copy
modules = Module.objects.all().filter(published=True).\
order_by('sort_order')
return render(
request, 'spazaschool/landing.html',
{
'copy': copy,
'modules': modules,
}
)
def login(request):
if request.method == "POST":
msisdn = '+27' + request.POST.get('phone_number')[1:]
user = authenticate(msisdn=msisdn,
password=request.POST.get('password'))
if user is not None:
if user.is_active:
auth_login(request, user)
return redirect(reverse('landing'))
else:
messages.add_message(request, messages.ERROR,
'Login Failed - Account Disabled')
else:
messages.add_message(request, messages.ERROR,
'Login Failed - Invalid Login')
return render(
request, 'spazaschool/login.html')
def logout(request):
auth_logout(request)
return redirect(reverse('landing'))
def register(request):
copy = PageCopy.objects.get(slug="landing").html_copy
return render(
request, 'spazaschool/landing.html',
{'copy': copy, }
)
| {
"content_hash": "bc50c293fcb5cdbffea68d9d0956c312",
"timestamp": "",
"source": "github",
"line_count": 60,
"max_line_length": 71,
"avg_line_length": 27.633333333333333,
"alnum_prop": 0.6019300361881785,
"repo_name": "jonathanendersby/spazaschool",
"id": "f451e33288dab8f430df47ec948c08889d2e9c3c",
"size": "1658",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "spazaschool_project/spazaschool/views.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "Python",
"bytes": "9714"
}
],
"symlink_target": ""
} |
import mock
from oslo_utils import uuidutils
from ironic.common import driver_factory
from ironic.common import exception
from ironic.common import states
from ironic.conductor import task_manager
from ironic.conductor import utils as conductor_utils
from ironic import objects
from ironic.tests import base as tests_base
from ironic.tests.unit.conductor import mgr_utils
from ironic.tests.unit.db import base
from ironic.tests.unit.db import utils
from ironic.tests.unit.objects import utils as obj_utils
class NodeSetBootDeviceTestCase(base.DbTestCase):
def test_node_set_boot_device_non_existent_device(self):
mgr_utils.mock_the_extension_manager(driver="fake_ipmitool")
self.driver = driver_factory.get_driver("fake_ipmitool")
ipmi_info = utils.get_test_ipmi_info()
node = obj_utils.create_test_node(self.context,
uuid=uuidutils.generate_uuid(),
driver='fake_ipmitool',
driver_info=ipmi_info)
task = task_manager.TaskManager(self.context, node.uuid)
self.assertRaises(exception.InvalidParameterValue,
conductor_utils.node_set_boot_device,
task,
device='fake')
def test_node_set_boot_device_valid(self):
mgr_utils.mock_the_extension_manager(driver="fake_ipmitool")
self.driver = driver_factory.get_driver("fake_ipmitool")
ipmi_info = utils.get_test_ipmi_info()
node = obj_utils.create_test_node(self.context,
uuid=uuidutils.generate_uuid(),
driver='fake_ipmitool',
driver_info=ipmi_info)
task = task_manager.TaskManager(self.context, node.uuid)
with mock.patch.object(self.driver.management,
'set_boot_device') as mock_sbd:
conductor_utils.node_set_boot_device(task,
device='pxe')
mock_sbd.assert_called_once_with(task,
device='pxe',
persistent=False)
def test_node_set_boot_device_adopting(self):
mgr_utils.mock_the_extension_manager(driver="fake_ipmitool")
self.driver = driver_factory.get_driver("fake_ipmitool")
ipmi_info = utils.get_test_ipmi_info()
node = obj_utils.create_test_node(self.context,
uuid=uuidutils.generate_uuid(),
driver='fake_ipmitool',
driver_info=ipmi_info,
provision_state=states.ADOPTING)
task = task_manager.TaskManager(self.context, node.uuid)
with mock.patch.object(self.driver.management,
'set_boot_device') as mock_sbd:
conductor_utils.node_set_boot_device(task,
device='pxe')
self.assertFalse(mock_sbd.called)
class NodePowerActionTestCase(base.DbTestCase):
def setUp(self):
super(NodePowerActionTestCase, self).setUp()
mgr_utils.mock_the_extension_manager()
self.driver = driver_factory.get_driver("fake")
def test_node_power_action_power_on(self):
"""Test node_power_action to turn node power on."""
node = obj_utils.create_test_node(self.context,
uuid=uuidutils.generate_uuid(),
driver='fake',
power_state=states.POWER_OFF)
task = task_manager.TaskManager(self.context, node.uuid)
with mock.patch.object(self.driver.power,
'get_power_state') as get_power_mock:
get_power_mock.return_value = states.POWER_OFF
conductor_utils.node_power_action(task, states.POWER_ON)
node.refresh()
get_power_mock.assert_called_once_with(mock.ANY)
self.assertEqual(states.POWER_ON, node['power_state'])
self.assertIsNone(node['target_power_state'])
self.assertIsNone(node['last_error'])
def test_node_power_action_power_off(self):
"""Test node_power_action to turn node power off."""
node = obj_utils.create_test_node(self.context,
uuid=uuidutils.generate_uuid(),
driver='fake',
power_state=states.POWER_ON)
task = task_manager.TaskManager(self.context, node.uuid)
with mock.patch.object(self.driver.power,
'get_power_state') as get_power_mock:
get_power_mock.return_value = states.POWER_ON
conductor_utils.node_power_action(task, states.POWER_OFF)
node.refresh()
get_power_mock.assert_called_once_with(mock.ANY)
self.assertEqual(states.POWER_OFF, node['power_state'])
self.assertIsNone(node['target_power_state'])
self.assertIsNone(node['last_error'])
def test_node_power_action_power_reboot(self):
"""Test for reboot a node."""
node = obj_utils.create_test_node(self.context,
uuid=uuidutils.generate_uuid(),
driver='fake',
power_state=states.POWER_ON)
task = task_manager.TaskManager(self.context, node.uuid)
with mock.patch.object(self.driver.power, 'reboot') as reboot_mock:
conductor_utils.node_power_action(task, states.REBOOT)
node.refresh()
reboot_mock.assert_called_once_with(mock.ANY)
self.assertEqual(states.POWER_ON, node['power_state'])
self.assertIsNone(node['target_power_state'])
self.assertIsNone(node['last_error'])
def test_node_power_action_invalid_state(self):
"""Test for exception when changing to an invalid power state."""
node = obj_utils.create_test_node(self.context,
uuid=uuidutils.generate_uuid(),
driver='fake',
power_state=states.POWER_ON)
task = task_manager.TaskManager(self.context, node.uuid)
with mock.patch.object(self.driver.power,
'get_power_state') as get_power_mock:
get_power_mock.return_value = states.POWER_ON
self.assertRaises(exception.InvalidParameterValue,
conductor_utils.node_power_action,
task,
"INVALID_POWER_STATE")
node.refresh()
get_power_mock.assert_called_once_with(mock.ANY)
self.assertEqual(states.POWER_ON, node['power_state'])
self.assertIsNone(node['target_power_state'])
self.assertIsNotNone(node['last_error'])
# last_error is cleared when a new transaction happens
conductor_utils.node_power_action(task, states.POWER_OFF)
node.refresh()
self.assertEqual(states.POWER_OFF, node['power_state'])
self.assertIsNone(node['target_power_state'])
self.assertIsNone(node['last_error'])
def test_node_power_action_already_being_processed(self):
"""Test node power action after aborted power action.
The target_power_state is expected to be None so it isn't
checked in the code. This is what happens if it is not None.
(Eg, if a conductor had died during a previous power-off
attempt and left the target_power_state set to states.POWER_OFF,
and the user is attempting to power-off again.)
"""
node = obj_utils.create_test_node(self.context,
uuid=uuidutils.generate_uuid(),
driver='fake',
power_state=states.POWER_ON,
target_power_state=states.POWER_OFF)
task = task_manager.TaskManager(self.context, node.uuid)
conductor_utils.node_power_action(task, states.POWER_OFF)
node.refresh()
self.assertEqual(states.POWER_OFF, node['power_state'])
self.assertEqual(states.NOSTATE, node['target_power_state'])
self.assertIsNone(node['last_error'])
@mock.patch.object(conductor_utils, 'LOG', autospec=True)
def test_node_power_action_in_same_state(self, log_mock):
"""Test setting node state to its present state.
Test that we don't try to set the power state if the requested
state is the same as the current state.
"""
node = obj_utils.create_test_node(self.context,
uuid=uuidutils.generate_uuid(),
driver='fake',
last_error='anything but None',
power_state=states.POWER_ON)
task = task_manager.TaskManager(self.context, node.uuid)
with mock.patch.object(self.driver.power,
'get_power_state') as get_power_mock:
get_power_mock.return_value = states.POWER_ON
with mock.patch.object(self.driver.power,
'set_power_state') as set_power_mock:
conductor_utils.node_power_action(task, states.POWER_ON)
node.refresh()
get_power_mock.assert_called_once_with(mock.ANY)
self.assertFalse(set_power_mock.called,
"set_power_state unexpectedly called")
self.assertEqual(states.POWER_ON, node['power_state'])
self.assertIsNone(node['target_power_state'])
self.assertIsNone(node['last_error'])
log_mock.warning.assert_called_once_with(
u"Not going to change node %(node)s power state because "
u"current state = requested state = '%(state)s'.",
{'state': states.POWER_ON, 'node': node.uuid})
def test_node_power_action_in_same_state_db_not_in_sync(self):
"""Test setting node state to its present state if DB is out of sync.
Under rare conditions (see bug #1403106) database might contain stale
information, make sure we fix it.
"""
node = obj_utils.create_test_node(self.context,
uuid=uuidutils.generate_uuid(),
driver='fake',
last_error='anything but None',
power_state=states.POWER_ON)
task = task_manager.TaskManager(self.context, node.uuid)
with mock.patch.object(self.driver.power,
'get_power_state') as get_power_mock:
get_power_mock.return_value = states.POWER_OFF
with mock.patch.object(self.driver.power,
'set_power_state') as set_power_mock:
conductor_utils.node_power_action(task, states.POWER_OFF)
node.refresh()
get_power_mock.assert_called_once_with(mock.ANY)
self.assertFalse(set_power_mock.called,
"set_power_state unexpectedly called")
self.assertEqual(states.POWER_OFF, node['power_state'])
self.assertIsNone(node['target_power_state'])
self.assertIsNone(node['last_error'])
def test_node_power_action_failed_getting_state(self):
"""Test for exception when we can't get the current power state."""
node = obj_utils.create_test_node(self.context,
uuid=uuidutils.generate_uuid(),
driver='fake',
power_state=states.POWER_ON)
task = task_manager.TaskManager(self.context, node.uuid)
with mock.patch.object(self.driver.power,
'get_power_state') as get_power_state_mock:
get_power_state_mock.side_effect = (
exception.InvalidParameterValue('failed getting power state'))
self.assertRaises(exception.InvalidParameterValue,
conductor_utils.node_power_action,
task,
states.POWER_ON)
node.refresh()
get_power_state_mock.assert_called_once_with(mock.ANY)
self.assertEqual(states.POWER_ON, node['power_state'])
self.assertIsNone(node['target_power_state'])
self.assertIsNotNone(node['last_error'])
def test_node_power_action_set_power_failure(self):
"""Test if an exception is thrown when the set_power call fails."""
node = obj_utils.create_test_node(self.context,
uuid=uuidutils.generate_uuid(),
driver='fake',
power_state=states.POWER_OFF)
task = task_manager.TaskManager(self.context, node.uuid)
with mock.patch.object(self.driver.power,
'get_power_state') as get_power_mock:
with mock.patch.object(self.driver.power,
'set_power_state') as set_power_mock:
get_power_mock.return_value = states.POWER_OFF
set_power_mock.side_effect = exception.IronicException()
self.assertRaises(
exception.IronicException,
conductor_utils.node_power_action,
task,
states.POWER_ON)
node.refresh()
get_power_mock.assert_called_once_with(mock.ANY)
set_power_mock.assert_called_once_with(mock.ANY,
states.POWER_ON)
self.assertEqual(states.POWER_OFF, node['power_state'])
self.assertIsNone(node['target_power_state'])
self.assertIsNotNone(node['last_error'])
class CleanupAfterTimeoutTestCase(tests_base.TestCase):
def setUp(self):
super(CleanupAfterTimeoutTestCase, self).setUp()
self.task = mock.Mock(spec=task_manager.TaskManager)
self.task.context = self.context
self.task.driver = mock.Mock(spec_set=['deploy'])
self.task.shared = False
self.task.node = mock.Mock(spec_set=objects.Node)
self.node = self.task.node
def test_cleanup_after_timeout(self):
conductor_utils.cleanup_after_timeout(self.task)
self.node.save.assert_called_once_with()
self.task.driver.deploy.clean_up.assert_called_once_with(self.task)
self.assertIn('Timeout reached', self.node.last_error)
def test_cleanup_after_timeout_shared_lock(self):
self.task.shared = True
self.assertRaises(exception.ExclusiveLockRequired,
conductor_utils.cleanup_after_timeout,
self.task)
def test_cleanup_after_timeout_cleanup_ironic_exception(self):
clean_up_mock = self.task.driver.deploy.clean_up
clean_up_mock.side_effect = exception.IronicException('moocow')
conductor_utils.cleanup_after_timeout(self.task)
self.task.driver.deploy.clean_up.assert_called_once_with(self.task)
self.assertEqual([mock.call()] * 2, self.node.save.call_args_list)
self.assertIn('moocow', self.node.last_error)
def test_cleanup_after_timeout_cleanup_random_exception(self):
clean_up_mock = self.task.driver.deploy.clean_up
clean_up_mock.side_effect = Exception('moocow')
conductor_utils.cleanup_after_timeout(self.task)
self.task.driver.deploy.clean_up.assert_called_once_with(self.task)
self.assertEqual([mock.call()] * 2, self.node.save.call_args_list)
self.assertIn('Deploy timed out', self.node.last_error)
class NodeCleaningStepsTestCase(base.DbTestCase):
def setUp(self):
super(NodeCleaningStepsTestCase, self).setUp()
mgr_utils.mock_the_extension_manager()
self.power_update = {
'step': 'update_firmware', 'priority': 10, 'interface': 'power'}
self.deploy_update = {
'step': 'update_firmware', 'priority': 10, 'interface': 'deploy'}
self.deploy_erase = {
'step': 'erase_disks', 'priority': 20, 'interface': 'deploy'}
# Automated cleaning should be executed in this order
self.clean_steps = [self.deploy_erase, self.power_update,
self.deploy_update]
# Manual clean step
self.deploy_raid = {
'step': 'build_raid', 'priority': 0, 'interface': 'deploy',
'argsinfo': {'arg1': {'description': 'desc1', 'required': True},
'arg2': {'description': 'desc2'}}}
@mock.patch('ironic.drivers.modules.fake.FakeDeploy.get_clean_steps')
@mock.patch('ironic.drivers.modules.fake.FakePower.get_clean_steps')
def test__get_cleaning_steps(self, mock_power_steps, mock_deploy_steps):
# Test getting cleaning steps, with one driver returning None, two
# conflicting priorities, and asserting they are ordered properly.
node = obj_utils.create_test_node(
self.context, driver='fake',
provision_state=states.CLEANING,
target_provision_state=states.AVAILABLE)
mock_power_steps.return_value = [self.power_update]
mock_deploy_steps.return_value = [self.deploy_erase,
self.deploy_update]
with task_manager.acquire(
self.context, node.uuid, shared=False) as task:
steps = conductor_utils._get_cleaning_steps(task, enabled=False)
self.assertEqual(self.clean_steps, steps)
@mock.patch('ironic.drivers.modules.fake.FakeDeploy.get_clean_steps')
@mock.patch('ironic.drivers.modules.fake.FakePower.get_clean_steps')
def test__get_cleaning_steps_unsorted(self, mock_power_steps,
mock_deploy_steps):
node = obj_utils.create_test_node(
self.context, driver='fake',
provision_state=states.CLEANING,
target_provision_state=states.MANAGEABLE)
mock_deploy_steps.return_value = [self.deploy_raid,
self.deploy_update,
self.deploy_erase]
with task_manager.acquire(
self.context, node.uuid, shared=False) as task:
steps = conductor_utils._get_cleaning_steps(task, enabled=False,
sort=False)
self.assertEqual(mock_deploy_steps.return_value, steps)
@mock.patch('ironic.drivers.modules.fake.FakeDeploy.get_clean_steps')
@mock.patch('ironic.drivers.modules.fake.FakePower.get_clean_steps')
def test__get_cleaning_steps_only_enabled(self, mock_power_steps,
mock_deploy_steps):
# Test getting only cleaning steps, with one driver returning None, two
# conflicting priorities, and asserting they are ordered properly.
# Should discard zero-priority (manual) clean step
node = obj_utils.create_test_node(
self.context, driver='fake',
provision_state=states.CLEANING,
target_provision_state=states.AVAILABLE)
mock_power_steps.return_value = [self.power_update]
mock_deploy_steps.return_value = [self.deploy_erase,
self.deploy_update,
self.deploy_raid]
with task_manager.acquire(
self.context, node.uuid, shared=True) as task:
steps = conductor_utils._get_cleaning_steps(task, enabled=True)
self.assertEqual(self.clean_steps, steps)
@mock.patch.object(conductor_utils, '_validate_user_clean_steps')
@mock.patch.object(conductor_utils, '_get_cleaning_steps')
def test_set_node_cleaning_steps_automated(self, mock_steps,
mock_validate_user_steps):
mock_steps.return_value = self.clean_steps
node = obj_utils.create_test_node(
self.context, driver='fake',
provision_state=states.CLEANING,
target_provision_state=states.AVAILABLE,
last_error=None,
clean_step=None)
with task_manager.acquire(
self.context, node.uuid, shared=False) as task:
conductor_utils.set_node_cleaning_steps(task)
node.refresh()
self.assertEqual(self.clean_steps,
node.driver_internal_info['clean_steps'])
self.assertEqual({}, node.clean_step)
mock_steps.assert_called_once_with(task, enabled=True)
self.assertFalse(mock_validate_user_steps.called)
@mock.patch.object(conductor_utils, '_validate_user_clean_steps')
@mock.patch.object(conductor_utils, '_get_cleaning_steps')
def test_set_node_cleaning_steps_manual(self, mock_steps,
mock_validate_user_steps):
clean_steps = [self.deploy_raid]
mock_steps.return_value = self.clean_steps
node = obj_utils.create_test_node(
self.context, driver='fake',
provision_state=states.CLEANING,
target_provision_state=states.MANAGEABLE,
last_error=None,
clean_step=None,
driver_internal_info={'clean_steps': clean_steps})
with task_manager.acquire(
self.context, node.uuid, shared=False) as task:
conductor_utils.set_node_cleaning_steps(task)
node.refresh()
self.assertEqual(clean_steps,
node.driver_internal_info['clean_steps'])
self.assertEqual({}, node.clean_step)
self.assertFalse(mock_steps.called)
mock_validate_user_steps.assert_called_once_with(task, clean_steps)
@mock.patch.object(conductor_utils, '_get_cleaning_steps')
def test__validate_user_clean_steps(self, mock_steps):
node = obj_utils.create_test_node(self.context)
mock_steps.return_value = self.clean_steps
user_steps = [{'step': 'update_firmware', 'interface': 'power'},
{'step': 'erase_disks', 'interface': 'deploy'}]
with task_manager.acquire(self.context, node.uuid) as task:
conductor_utils._validate_user_clean_steps(task, user_steps)
mock_steps.assert_called_once_with(task, enabled=False, sort=False)
@mock.patch.object(conductor_utils, '_get_cleaning_steps')
def test__validate_user_clean_steps_no_steps(self, mock_steps):
node = obj_utils.create_test_node(self.context)
mock_steps.return_value = self.clean_steps
with task_manager.acquire(self.context, node.uuid) as task:
conductor_utils._validate_user_clean_steps(task, [])
mock_steps.assert_called_once_with(task, enabled=False, sort=False)
@mock.patch.object(conductor_utils, '_get_cleaning_steps')
def test__validate_user_clean_steps_get_steps_exception(self, mock_steps):
node = obj_utils.create_test_node(self.context)
mock_steps.side_effect = exception.NodeCleaningFailure('bad')
with task_manager.acquire(self.context, node.uuid) as task:
self.assertRaises(exception.NodeCleaningFailure,
conductor_utils._validate_user_clean_steps,
task, [])
mock_steps.assert_called_once_with(task, enabled=False, sort=False)
@mock.patch.object(conductor_utils, '_get_cleaning_steps')
def test__validate_user_clean_steps_not_supported(self, mock_steps):
node = obj_utils.create_test_node(self.context)
mock_steps.return_value = [self.power_update, self.deploy_raid]
user_steps = [{'step': 'update_firmware', 'interface': 'power'},
{'step': 'bad_step', 'interface': 'deploy'}]
with task_manager.acquire(self.context, node.uuid) as task:
self.assertRaisesRegex(exception.InvalidParameterValue,
"does not support.*bad_step",
conductor_utils._validate_user_clean_steps,
task, user_steps)
mock_steps.assert_called_once_with(task, enabled=False, sort=False)
@mock.patch.object(conductor_utils, '_get_cleaning_steps')
def test__validate_user_clean_steps_invalid_arg(self, mock_steps):
node = obj_utils.create_test_node(self.context)
mock_steps.return_value = self.clean_steps
user_steps = [{'step': 'update_firmware', 'interface': 'power',
'args': {'arg1': 'val1', 'arg2': 'val2'}},
{'step': 'erase_disks', 'interface': 'deploy'}]
with task_manager.acquire(self.context, node.uuid) as task:
self.assertRaisesRegex(exception.InvalidParameterValue,
"update_firmware.*invalid.*arg1",
conductor_utils._validate_user_clean_steps,
task, user_steps)
mock_steps.assert_called_once_with(task, enabled=False, sort=False)
@mock.patch.object(conductor_utils, '_get_cleaning_steps')
def test__validate_user_clean_steps_missing_required_arg(self, mock_steps):
node = obj_utils.create_test_node(self.context)
mock_steps.return_value = [self.power_update, self.deploy_raid]
user_steps = [{'step': 'update_firmware', 'interface': 'power'},
{'step': 'build_raid', 'interface': 'deploy'}]
with task_manager.acquire(self.context, node.uuid) as task:
self.assertRaisesRegex(exception.InvalidParameterValue,
"build_raid.*missing.*arg1",
conductor_utils._validate_user_clean_steps,
task, user_steps)
mock_steps.assert_called_once_with(task, enabled=False, sort=False)
class ErrorHandlersTestCase(tests_base.TestCase):
def setUp(self):
super(ErrorHandlersTestCase, self).setUp()
self.task = mock.Mock(spec=task_manager.TaskManager)
self.task.driver = mock.Mock(spec_set=['deploy'])
self.task.node = mock.Mock(spec_set=objects.Node)
self.node = self.task.node
@mock.patch.object(conductor_utils, 'LOG')
def test_provision_error_handler_no_worker(self, log_mock):
exc = exception.NoFreeConductorWorker()
conductor_utils.provisioning_error_handler(exc, self.node, 'state-one',
'state-two')
self.node.save.assert_called_once_with()
self.assertEqual('state-one', self.node.provision_state)
self.assertEqual('state-two', self.node.target_provision_state)
self.assertIn('No free conductor workers', self.node.last_error)
self.assertTrue(log_mock.warning.called)
@mock.patch.object(conductor_utils, 'LOG')
def test_provision_error_handler_other_error(self, log_mock):
exc = Exception('foo')
conductor_utils.provisioning_error_handler(exc, self.node, 'state-one',
'state-two')
self.assertFalse(self.node.save.called)
self.assertFalse(log_mock.warning.called)
@mock.patch.object(conductor_utils, 'cleaning_error_handler')
def test_cleanup_cleanwait_timeout_handler_call(self, mock_error_handler):
self.node.clean_step = {}
conductor_utils.cleanup_cleanwait_timeout(self.task)
mock_error_handler.assert_called_once_with(
self.task,
msg="Timeout reached while cleaning the node. Please "
"check if the ramdisk responsible for the cleaning is "
"running on the node. Failed on step {}.",
set_fail_state=True)
def test_cleanup_cleanwait_timeout(self):
self.node.provision_state = states.CLEANFAIL
target = 'baz'
self.node.target_provision_state = target
self.node.driver_internal_info = {}
self.node.clean_step = {'key': 'val'}
clean_error = ("Timeout reached while cleaning the node. Please "
"check if the ramdisk responsible for the cleaning is "
"running on the node. Failed on step {'key': 'val'}.")
self.node.driver_internal_info = {
'cleaning_reboot': True,
'clean_step_index': 0}
conductor_utils.cleanup_cleanwait_timeout(self.task)
self.assertEqual({}, self.node.clean_step)
self.assertNotIn('clean_step_index', self.node.driver_internal_info)
self.task.process_event.assert_called_once_with('fail',
target_state=None)
self.assertTrue(self.node.maintenance)
self.assertEqual(clean_error, self.node.maintenance_reason)
def test_cleaning_error_handler(self):
self.node.provision_state = states.CLEANING
target = 'baz'
self.node.target_provision_state = target
self.node.driver_internal_info = {}
msg = 'error bar'
conductor_utils.cleaning_error_handler(self.task, msg)
self.node.save.assert_called_once_with()
self.assertEqual({}, self.node.clean_step)
self.assertNotIn('clean_step_index', self.node.driver_internal_info)
self.assertEqual(msg, self.node.last_error)
self.assertTrue(self.node.maintenance)
self.assertEqual(msg, self.node.maintenance_reason)
driver = self.task.driver.deploy
driver.tear_down_cleaning.assert_called_once_with(self.task)
self.task.process_event.assert_called_once_with('fail',
target_state=None)
def test_cleaning_error_handler_manual(self):
target = states.MANAGEABLE
self.node.target_provision_state = target
conductor_utils.cleaning_error_handler(self.task, 'foo')
self.task.process_event.assert_called_once_with('fail',
target_state=target)
def test_cleaning_error_handler_no_teardown(self):
target = states.MANAGEABLE
self.node.target_provision_state = target
conductor_utils.cleaning_error_handler(self.task, 'foo',
tear_down_cleaning=False)
self.assertFalse(self.task.driver.deploy.tear_down_cleaning.called)
self.task.process_event.assert_called_once_with('fail',
target_state=target)
def test_cleaning_error_handler_no_fail(self):
conductor_utils.cleaning_error_handler(self.task, 'foo',
set_fail_state=False)
driver = self.task.driver.deploy
driver.tear_down_cleaning.assert_called_once_with(self.task)
self.assertFalse(self.task.process_event.called)
@mock.patch.object(conductor_utils, 'LOG')
def test_cleaning_error_handler_tear_down_error(self, log_mock):
driver = self.task.driver.deploy
driver.tear_down_cleaning.side_effect = Exception('bar')
conductor_utils.cleaning_error_handler(self.task, 'foo')
self.assertTrue(log_mock.exception.called)
@mock.patch.object(conductor_utils, 'LOG')
def test_spawn_cleaning_error_handler_no_worker(self, log_mock):
exc = exception.NoFreeConductorWorker()
conductor_utils.spawn_cleaning_error_handler(exc, self.node)
self.node.save.assert_called_once_with()
self.assertIn('No free conductor workers', self.node.last_error)
self.assertTrue(log_mock.warning.called)
@mock.patch.object(conductor_utils, 'LOG')
def test_spawn_cleaning_error_handler_other_error(self, log_mock):
exc = Exception('foo')
conductor_utils.spawn_cleaning_error_handler(exc, self.node)
self.assertFalse(self.node.save.called)
self.assertFalse(log_mock.warning.called)
@mock.patch.object(conductor_utils, 'LOG')
def test_power_state_error_handler_no_worker(self, log_mock):
exc = exception.NoFreeConductorWorker()
conductor_utils.power_state_error_handler(exc, self.node, 'newstate')
self.node.save.assert_called_once_with()
self.assertEqual('newstate', self.node.power_state)
self.assertEqual(states.NOSTATE, self.node.target_power_state)
self.assertIn('No free conductor workers', self.node.last_error)
self.assertTrue(log_mock.warning.called)
@mock.patch.object(conductor_utils, 'LOG')
def test_power_state_error_handler_other_error(self, log_mock):
exc = Exception('foo')
conductor_utils.power_state_error_handler(exc, self.node, 'foo')
self.assertFalse(self.node.save.called)
self.assertFalse(log_mock.warning.called)
| {
"content_hash": "6b0600c1b6c90b902d4bb6f4d80692c1",
"timestamp": "",
"source": "github",
"line_count": 690,
"max_line_length": 79,
"avg_line_length": 48.995652173913044,
"alnum_prop": 0.5844351761469518,
"repo_name": "bacaldwell/ironic",
"id": "f4bcf412daf978b5a634891aae6b33099bc0aac7",
"size": "34380",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "ironic/tests/unit/conductor/test_utils.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Mako",
"bytes": "349"
},
{
"name": "Python",
"bytes": "4207766"
},
{
"name": "Shell",
"bytes": "69242"
}
],
"symlink_target": ""
} |
import ConfigParser
import StringIO
import unittest
import UserDict
from test import test_support
class SortedDict(UserDict.UserDict):
def items(self):
result = self.data.items()
result.sort()
return result
def keys(self):
result = self.data.keys()
result.sort()
return result
def values(self):
result = self.items()
return [i[1] for i in values]
def iteritems(self): return iter(self.items())
def iterkeys(self): return iter(self.keys())
__iter__ = iterkeys
def itervalues(self): return iter(self.values())
class TestCaseBase(unittest.TestCase):
def newconfig(self, defaults=None):
if defaults is None:
self.cf = self.config_class()
else:
self.cf = self.config_class(defaults)
return self.cf
def fromstring(self, string, defaults=None):
cf = self.newconfig(defaults)
sio = StringIO.StringIO(string)
cf.readfp(sio)
return cf
def test_basic(self):
cf = self.fromstring(
"[Foo Bar]\n"
"foo=bar\n"
"[Spacey Bar]\n"
"foo = bar\n"
"[Commented Bar]\n"
"foo: bar ; comment\n"
"[Long Line]\n"
"foo: this line is much, much longer than my editor\n"
" likes it.\n"
"[Section\\with$weird%characters[\t]\n"
"[Internationalized Stuff]\n"
"foo[bg]: Bulgarian\n"
"foo=Default\n"
"foo[en]=English\n"
"foo[de]=Deutsch\n"
"[Spaces]\n"
"key with spaces : value\n"
"another with spaces = splat!\n"
)
L = cf.sections()
L.sort()
eq = self.assertEqual
eq(L, [r'Commented Bar',
r'Foo Bar',
r'Internationalized Stuff',
r'Long Line',
r'Section\with$weird%characters[' '\t',
r'Spaces',
r'Spacey Bar',
])
# The use of spaces in the section names serves as a
# regression test for SourceForge bug #583248:
# http://www.python.org/sf/583248
eq(cf.get('Foo Bar', 'foo'), 'bar')
eq(cf.get('Spacey Bar', 'foo'), 'bar')
eq(cf.get('Commented Bar', 'foo'), 'bar')
eq(cf.get('Spaces', 'key with spaces'), 'value')
eq(cf.get('Spaces', 'another with spaces'), 'splat!')
self.failIf('__name__' in cf.options("Foo Bar"),
'__name__ "option" should not be exposed by the API!')
# Make sure the right things happen for remove_option();
# added to include check for SourceForge bug #123324:
self.failUnless(cf.remove_option('Foo Bar', 'foo'),
"remove_option() failed to report existance of option")
self.failIf(cf.has_option('Foo Bar', 'foo'),
"remove_option() failed to remove option")
self.failIf(cf.remove_option('Foo Bar', 'foo'),
"remove_option() failed to report non-existance of option"
" that was removed")
self.assertRaises(ConfigParser.NoSectionError,
cf.remove_option, 'No Such Section', 'foo')
eq(cf.get('Long Line', 'foo'),
'this line is much, much longer than my editor\nlikes it.')
def test_case_sensitivity(self):
cf = self.newconfig()
cf.add_section("A")
cf.add_section("a")
L = cf.sections()
L.sort()
eq = self.assertEqual
eq(L, ["A", "a"])
cf.set("a", "B", "value")
eq(cf.options("a"), ["b"])
eq(cf.get("a", "b"), "value",
"could not locate option, expecting case-insensitive option names")
self.failUnless(cf.has_option("a", "b"))
cf.set("A", "A-B", "A-B value")
for opt in ("a-b", "A-b", "a-B", "A-B"):
self.failUnless(
cf.has_option("A", opt),
"has_option() returned false for option which should exist")
eq(cf.options("A"), ["a-b"])
eq(cf.options("a"), ["b"])
cf.remove_option("a", "B")
eq(cf.options("a"), [])
# SF bug #432369:
cf = self.fromstring(
"[MySection]\nOption: first line\n\tsecond line\n")
eq(cf.options("MySection"), ["option"])
eq(cf.get("MySection", "Option"), "first line\nsecond line")
# SF bug #561822:
cf = self.fromstring("[section]\nnekey=nevalue\n",
defaults={"key":"value"})
self.failUnless(cf.has_option("section", "Key"))
def test_default_case_sensitivity(self):
cf = self.newconfig({"foo": "Bar"})
self.assertEqual(
cf.get("DEFAULT", "Foo"), "Bar",
"could not locate option, expecting case-insensitive option names")
cf = self.newconfig({"Foo": "Bar"})
self.assertEqual(
cf.get("DEFAULT", "Foo"), "Bar",
"could not locate option, expecting case-insensitive defaults")
def test_parse_errors(self):
self.newconfig()
self.parse_error(ConfigParser.ParsingError,
"[Foo]\n extra-spaces: splat\n")
self.parse_error(ConfigParser.ParsingError,
"[Foo]\n extra-spaces= splat\n")
self.parse_error(ConfigParser.ParsingError,
"[Foo]\noption-without-value\n")
self.parse_error(ConfigParser.ParsingError,
"[Foo]\n:value-without-option-name\n")
self.parse_error(ConfigParser.ParsingError,
"[Foo]\n=value-without-option-name\n")
self.parse_error(ConfigParser.MissingSectionHeaderError,
"No Section!\n")
def parse_error(self, exc, src):
sio = StringIO.StringIO(src)
self.assertRaises(exc, self.cf.readfp, sio)
def test_query_errors(self):
cf = self.newconfig()
self.assertEqual(cf.sections(), [],
"new ConfigParser should have no defined sections")
self.failIf(cf.has_section("Foo"),
"new ConfigParser should have no acknowledged sections")
self.assertRaises(ConfigParser.NoSectionError,
cf.options, "Foo")
self.assertRaises(ConfigParser.NoSectionError,
cf.set, "foo", "bar", "value")
self.get_error(ConfigParser.NoSectionError, "foo", "bar")
cf.add_section("foo")
self.get_error(ConfigParser.NoOptionError, "foo", "bar")
def get_error(self, exc, section, option):
try:
self.cf.get(section, option)
except exc, e:
return e
else:
self.fail("expected exception type %s.%s"
% (exc.__module__, exc.__name__))
def test_boolean(self):
cf = self.fromstring(
"[BOOLTEST]\n"
"T1=1\n"
"T2=TRUE\n"
"T3=True\n"
"T4=oN\n"
"T5=yes\n"
"F1=0\n"
"F2=FALSE\n"
"F3=False\n"
"F4=oFF\n"
"F5=nO\n"
"E1=2\n"
"E2=foo\n"
"E3=-1\n"
"E4=0.1\n"
"E5=FALSE AND MORE"
)
for x in range(1, 5):
self.failUnless(cf.getboolean('BOOLTEST', 't%d' % x))
self.failIf(cf.getboolean('BOOLTEST', 'f%d' % x))
self.assertRaises(ValueError,
cf.getboolean, 'BOOLTEST', 'e%d' % x)
def test_weird_errors(self):
cf = self.newconfig()
cf.add_section("Foo")
self.assertRaises(ConfigParser.DuplicateSectionError,
cf.add_section, "Foo")
def test_write(self):
cf = self.fromstring(
"[Long Line]\n"
"foo: this line is much, much longer than my editor\n"
" likes it.\n"
"[DEFAULT]\n"
"foo: another very\n"
" long line"
)
output = StringIO.StringIO()
cf.write(output)
self.assertEqual(
output.getvalue(),
"[DEFAULT]\n"
"foo = another very\n"
"\tlong line\n"
"\n"
"[Long Line]\n"
"foo = this line is much, much longer than my editor\n"
"\tlikes it.\n"
"\n"
)
def test_set_string_types(self):
cf = self.fromstring("[sect]\n"
"option1=foo\n")
# Check that we don't get an exception when setting values in
# an existing section using strings:
class mystr(str):
pass
cf.set("sect", "option1", "splat")
cf.set("sect", "option1", mystr("splat"))
cf.set("sect", "option2", "splat")
cf.set("sect", "option2", mystr("splat"))
try:
unicode
except NameError:
pass
else:
cf.set("sect", "option1", unicode("splat"))
cf.set("sect", "option2", unicode("splat"))
def test_read_returns_file_list(self):
file1 = test_support.findfile("cfgparser.1")
# check when we pass a mix of readable and non-readable files:
cf = self.newconfig()
parsed_files = cf.read([file1, "nonexistant-file"])
self.assertEqual(parsed_files, [file1])
self.assertEqual(cf.get("Foo Bar", "foo"), "newbar")
# check when we pass only a filename:
cf = self.newconfig()
parsed_files = cf.read(file1)
self.assertEqual(parsed_files, [file1])
self.assertEqual(cf.get("Foo Bar", "foo"), "newbar")
# check when we pass only missing files:
cf = self.newconfig()
parsed_files = cf.read(["nonexistant-file"])
self.assertEqual(parsed_files, [])
# check when we pass no files:
cf = self.newconfig()
parsed_files = cf.read([])
self.assertEqual(parsed_files, [])
# shared by subclasses
def get_interpolation_config(self):
return self.fromstring(
"[Foo]\n"
"bar=something %(with1)s interpolation (1 step)\n"
"bar9=something %(with9)s lots of interpolation (9 steps)\n"
"bar10=something %(with10)s lots of interpolation (10 steps)\n"
"bar11=something %(with11)s lots of interpolation (11 steps)\n"
"with11=%(with10)s\n"
"with10=%(with9)s\n"
"with9=%(with8)s\n"
"with8=%(With7)s\n"
"with7=%(WITH6)s\n"
"with6=%(with5)s\n"
"With5=%(with4)s\n"
"WITH4=%(with3)s\n"
"with3=%(with2)s\n"
"with2=%(with1)s\n"
"with1=with\n"
"\n"
"[Mutual Recursion]\n"
"foo=%(bar)s\n"
"bar=%(foo)s\n"
"\n"
"[Interpolation Error]\n"
"name=%(reference)s\n",
# no definition for 'reference'
defaults={"getname": "%(__name__)s"})
def check_items_config(self, expected):
cf = self.fromstring(
"[section]\n"
"name = value\n"
"key: |%(name)s| \n"
"getdefault: |%(default)s|\n"
"getname: |%(__name__)s|",
defaults={"default": "<default>"})
L = list(cf.items("section"))
L.sort()
self.assertEqual(L, expected)
class ConfigParserTestCase(TestCaseBase):
config_class = ConfigParser.ConfigParser
def test_interpolation(self):
cf = self.get_interpolation_config()
eq = self.assertEqual
eq(cf.get("Foo", "getname"), "Foo")
eq(cf.get("Foo", "bar"), "something with interpolation (1 step)")
eq(cf.get("Foo", "bar9"),
"something with lots of interpolation (9 steps)")
eq(cf.get("Foo", "bar10"),
"something with lots of interpolation (10 steps)")
self.get_error(ConfigParser.InterpolationDepthError, "Foo", "bar11")
def test_interpolation_missing_value(self):
cf = self.get_interpolation_config()
e = self.get_error(ConfigParser.InterpolationError,
"Interpolation Error", "name")
self.assertEqual(e.reference, "reference")
self.assertEqual(e.section, "Interpolation Error")
self.assertEqual(e.option, "name")
def test_items(self):
self.check_items_config([('default', '<default>'),
('getdefault', '|<default>|'),
('getname', '|section|'),
('key', '|value|'),
('name', 'value')])
def test_set_nonstring_types(self):
cf = self.newconfig()
cf.add_section('non-string')
cf.set('non-string', 'int', 1)
cf.set('non-string', 'list', [0, 1, 1, 2, 3, 5, 8, 13, '%('])
cf.set('non-string', 'dict', {'pi': 3.14159, '%(': 1,
'%(list)': '%(list)'})
cf.set('non-string', 'string_with_interpolation', '%(list)s')
self.assertEqual(cf.get('non-string', 'int', raw=True), 1)
self.assertRaises(TypeError, cf.get, 'non-string', 'int')
self.assertEqual(cf.get('non-string', 'list', raw=True),
[0, 1, 1, 2, 3, 5, 8, 13, '%('])
self.assertRaises(TypeError, cf.get, 'non-string', 'list')
self.assertEqual(cf.get('non-string', 'dict', raw=True),
{'pi': 3.14159, '%(': 1, '%(list)': '%(list)'})
self.assertRaises(TypeError, cf.get, 'non-string', 'dict')
self.assertEqual(cf.get('non-string', 'string_with_interpolation',
raw=True), '%(list)s')
self.assertRaises(ValueError, cf.get, 'non-string',
'string_with_interpolation', raw=False)
class RawConfigParserTestCase(TestCaseBase):
config_class = ConfigParser.RawConfigParser
def test_interpolation(self):
cf = self.get_interpolation_config()
eq = self.assertEqual
eq(cf.get("Foo", "getname"), "%(__name__)s")
eq(cf.get("Foo", "bar"),
"something %(with1)s interpolation (1 step)")
eq(cf.get("Foo", "bar9"),
"something %(with9)s lots of interpolation (9 steps)")
eq(cf.get("Foo", "bar10"),
"something %(with10)s lots of interpolation (10 steps)")
eq(cf.get("Foo", "bar11"),
"something %(with11)s lots of interpolation (11 steps)")
def test_items(self):
self.check_items_config([('default', '<default>'),
('getdefault', '|%(default)s|'),
('getname', '|%(__name__)s|'),
('key', '|%(name)s|'),
('name', 'value')])
def test_set_nonstring_types(self):
cf = self.newconfig()
cf.add_section('non-string')
cf.set('non-string', 'int', 1)
cf.set('non-string', 'list', [0, 1, 1, 2, 3, 5, 8, 13])
cf.set('non-string', 'dict', {'pi': 3.14159})
self.assertEqual(cf.get('non-string', 'int'), 1)
self.assertEqual(cf.get('non-string', 'list'),
[0, 1, 1, 2, 3, 5, 8, 13])
self.assertEqual(cf.get('non-string', 'dict'), {'pi': 3.14159})
class SafeConfigParserTestCase(ConfigParserTestCase):
config_class = ConfigParser.SafeConfigParser
def test_safe_interpolation(self):
# See http://www.python.org/sf/511737
cf = self.fromstring("[section]\n"
"option1=xxx\n"
"option2=%(option1)s/xxx\n"
"ok=%(option1)s/%%s\n"
"not_ok=%(option2)s/%%s")
self.assertEqual(cf.get("section", "ok"), "xxx/%s")
self.assertEqual(cf.get("section", "not_ok"), "xxx/xxx/%s")
def test_set_malformatted_interpolation(self):
cf = self.fromstring("[sect]\n"
"option1=foo\n")
self.assertEqual(cf.get('sect', "option1"), "foo")
self.assertRaises(ValueError, cf.set, "sect", "option1", "%foo")
self.assertRaises(ValueError, cf.set, "sect", "option1", "foo%")
self.assertRaises(ValueError, cf.set, "sect", "option1", "f%oo")
self.assertEqual(cf.get('sect', "option1"), "foo")
def test_set_nonstring_types(self):
cf = self.fromstring("[sect]\n"
"option1=foo\n")
# Check that we get a TypeError when setting non-string values
# in an existing section:
self.assertRaises(TypeError, cf.set, "sect", "option1", 1)
self.assertRaises(TypeError, cf.set, "sect", "option1", 1.0)
self.assertRaises(TypeError, cf.set, "sect", "option1", object())
self.assertRaises(TypeError, cf.set, "sect", "option2", 1)
self.assertRaises(TypeError, cf.set, "sect", "option2", 1.0)
self.assertRaises(TypeError, cf.set, "sect", "option2", object())
def test_add_section_default_1(self):
cf = self.newconfig()
self.assertRaises(ValueError, cf.add_section, "default")
def test_add_section_default_2(self):
cf = self.newconfig()
self.assertRaises(ValueError, cf.add_section, "DEFAULT")
class SortedTestCase(RawConfigParserTestCase):
def newconfig(self, defaults=None):
self.cf = self.config_class(defaults=defaults, dict_type=SortedDict)
return self.cf
def test_sorted(self):
self.fromstring("[b]\n"
"o4=1\n"
"o3=2\n"
"o2=3\n"
"o1=4\n"
"[a]\n"
"k=v\n")
output = StringIO.StringIO()
self.cf.write(output)
self.assertEquals(output.getvalue(),
"[a]\n"
"k = v\n\n"
"[b]\n"
"o1 = 4\n"
"o2 = 3\n"
"o3 = 2\n"
"o4 = 1\n\n")
def test_main():
test_support.run_unittest(
ConfigParserTestCase,
RawConfigParserTestCase,
SafeConfigParserTestCase,
SortedTestCase
)
if __name__ == "__main__":
test_main()
| {
"content_hash": "88d6b93dbfe1b8ee63fbc8779e5ab11d",
"timestamp": "",
"source": "github",
"line_count": 490,
"max_line_length": 79,
"avg_line_length": 37.659183673469386,
"alnum_prop": 0.5130873028775809,
"repo_name": "MalloyPower/parsing-python",
"id": "a8b5d7c389ad4ed48a5db1bda3668173c8b2199b",
"size": "18453",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "front-end/testsuite-python-lib/Python-2.6/Lib/test/test_cfgparser.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "1963"
},
{
"name": "Lex",
"bytes": "238458"
},
{
"name": "Makefile",
"bytes": "4513"
},
{
"name": "OCaml",
"bytes": "412695"
},
{
"name": "Python",
"bytes": "17319"
},
{
"name": "Rascal",
"bytes": "523063"
},
{
"name": "Yacc",
"bytes": "429659"
}
],
"symlink_target": ""
} |
from django.db import models, migrations
import markupfield.fields
import django.utils.timezone
from django.conf import settings
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
("companies", "0001_initial"),
]
operations = [
migrations.CreateModel(
name="Sponsor",
fields=[
(
"id",
models.AutoField(
primary_key=True,
auto_created=True,
serialize=False,
verbose_name="ID",
),
),
(
"created",
models.DateTimeField(
db_index=True, default=django.utils.timezone.now, blank=True
),
),
(
"updated",
models.DateTimeField(default=django.utils.timezone.now, blank=True),
),
(
"content",
markupfield.fields.MarkupField(rendered_field=True, blank=True),
),
(
"content_markup_type",
models.CharField(
max_length=30,
choices=[
("", "--"),
("html", "html"),
("plain", "plain"),
("markdown", "markdown"),
("restructuredtext", "restructuredtext"),
],
default="restructuredtext",
blank=True,
),
),
("is_published", models.BooleanField(db_index=True, default=False)),
(
"featured",
models.BooleanField(
help_text="Check to include Sponsor in feature rotation",
db_index=True,
default=False,
),
),
("_content_rendered", models.TextField(editable=False)),
(
"company",
models.ForeignKey(to="companies.Company", on_delete=models.CASCADE),
),
(
"creator",
models.ForeignKey(
null=True,
to=settings.AUTH_USER_MODEL,
related_name="sponsors_sponsor_creator",
blank=True,
on_delete=models.CASCADE,
),
),
(
"last_modified_by",
models.ForeignKey(
null=True,
to=settings.AUTH_USER_MODEL,
related_name="sponsors_sponsor_modified",
blank=True,
on_delete=models.CASCADE,
),
),
],
options={
"verbose_name": "sponsor",
"verbose_name_plural": "sponsors",
},
bases=(models.Model,),
),
]
| {
"content_hash": "c483b9d0fb309f81f5a86d2a0752946a",
"timestamp": "",
"source": "github",
"line_count": 97,
"max_line_length": 88,
"avg_line_length": 34.71134020618557,
"alnum_prop": 0.3724383724383724,
"repo_name": "proevo/pythondotorg",
"id": "2908c117244a2aa1a11b0eeee4741eebe7875929",
"size": "3367",
"binary": false,
"copies": "3",
"ref": "refs/heads/dependabot/pip/django-allauth-0.51.0",
"path": "sponsors/migrations/0001_initial.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "711916"
},
{
"name": "JavaScript",
"bytes": "314514"
},
{
"name": "Makefile",
"bytes": "6811"
},
{
"name": "Python",
"bytes": "1448691"
},
{
"name": "Ruby",
"bytes": "218314"
},
{
"name": "Shell",
"bytes": "6730"
}
],
"symlink_target": ""
} |
import os
from glob import glob
from ievv_opensource.utils.ievvbuildstatic import pluginbase
from ievv_opensource.utils.ievvbuildstatic.filepath import \
AbstractDjangoAppPath
from ievv_opensource.utils.ievvbuildstatic.watcher import ProcessWatchConfig
from ievv_opensource.utils.shellcommandmixin import (ShellCommandError,
ShellCommandMixin)
from .gzip_compress_mixin import GzipCompressMixin
class Plugin(pluginbase.Plugin, ShellCommandMixin, GzipCompressMixin):
"""
Webpack builder plugin.
Examples:
Simple example::
IEVVTASKS_BUILDSTATIC_APPS = ievvbuildstatic.config.Apps(
ievvbuildstatic.config.App(
appname='demoapp',
version='1.0.0',
plugins=[
ievvbuildstatic.npmrun_jsbuild.Plugin(),
]
)
)
Webpack example::
Install webpack:
$ yarn add webpack
Add the following to your package.json:
{
...
"scripts": {
...
"jsbuild": "webpack --config webpack.config.js",
"jsbuild-production": "webpack --config webpack.config.js -p"
...
}
...
}
Create a webpack.config.js with something like this:
let path = require('path');
const isProduction = process.env.IEVV_BUILDSTATIC_MODE == 'production';
const appconfig = require("./ievv_buildstatic.appconfig.json");
console.log(isProduction);
console.log(appconfig);
let webpackConfig = {
entry: './scripts/javascript/ievv_jsbase/ievv_jsbase_core.js',
output: {
filename: 'ievv_jsbase_core.js',
path: path.resolve(appconfig.destinationfolder, 'scripts')
},
module: {
loaders: [
{
test: /.jsx?$/,
loader: 'babel-loader',
// exclude: /node_modules/
include: [
path.resolve(__dirname, "scripts/javascript/ievv_jsbase"),
]
}
]
}
};
if(isProduction) {
webpackConfig.devtool = 'source-map';
} else {
webpackConfig.devtool = 'cheap-module-eval-source-map';
webpackConfig.output.pathinfo = true;
}
module.exports = webpackConfig;
"""
name = 'npmrun_jsbuild'
default_group = 'js'
def __init__(self,
extra_import_paths=None,
extra_import_aliases=None,
gzip=False, gzip_compresslevel=9,
**kwargs):
"""
Args:
extra_import_paths (list, optional): List of extra javascript import paths. Defaults to None.
extra_import_aliases (list, optional): Mapping of extra import aliases. Defaults to None.
gzip (bool, optional): Make a .js.gz version of the file in --production mode. This is added in
addition to the .js files (same filename, just with .gz at the end).
gzip_compresslevel (int, optional): Gzip compression level.
A number between 0 and 9. Higher is better compression,
but slower to compress. Defaults to 9.
"""
super(Plugin, self).__init__(**kwargs)
self.extra_import_paths = extra_import_paths or []
self.extra_import_aliases = extra_import_aliases or {}
self.gzip = gzip
self.gzip_compresslevel = gzip_compresslevel
# @property
# def destinationfolder(self):
# return self.app.get_destination_path('scripts')
def gzip_compression_enabled(self):
return self.gzip and self.app.apps.is_in_production_mode()
def gzip_compresslevel(self):
return self.gzip_compresslevel
def get_filepaths_to_gzip(self):
return glob(os.path.join(self.app.get_destination_path(), '**/*.js'), recursive=True)
def get_default_import_paths(self):
return []
def get_import_paths(self):
return self.get_default_import_paths() + self.extra_import_paths
def _get_import_paths_as_strlist(self):
import_paths = []
for path in self.get_import_paths():
if isinstance(path, AbstractDjangoAppPath):
path = path.abspath
import_paths.append(path)
return import_paths
def _get_extra_import_aliases(self):
import_aliases = {}
for alias, path in self.extra_import_aliases.items():
if isinstance(path, AbstractDjangoAppPath):
path = path.abspath
import_aliases[alias] = path
return import_aliases
def install(self):
self.app.add_pluginconfig_to_json_config(
plugin_name=self.name,
config_dict={
'import_paths': self._get_import_paths_as_strlist(),
'extra_import_aliases': self._get_extra_import_aliases()
}
)
def get_npm_script(self):
if self.app.apps.is_in_production_mode():
return 'jsbuild-production'
else:
return 'jsbuild'
def get_npm_watch_script(self):
return 'jsbuild-watch'
def run(self):
npm_script = self.get_npm_script()
about = '"npm run {npm_script}" for {appname!r}'.format(
npm_script=npm_script,
appname=self.app.appname
)
self.get_logger().command_start('Running {about}'.format(
about=about))
try:
self.run_shell_command('npm',
args=['run', npm_script],
_cwd=self.app.get_source_path())
except ShellCommandError:
self.get_logger().command_error('{} FAILED!'.format(about))
else:
self.gzip_compress_files()
self.get_logger().command_success('{} succeeded :)'.format(about))
def __str__(self):
return '{}({})'.format(super(Plugin, self).__str__(), self.sourcefolder)
def run_watcher_process(self):
about = '"npm run {scriptname}" for {appname!r}'.format(
scriptname=self.get_npm_watch_script(),
appname=self.app.appname)
self.get_logger().info(
'Starting watcher process: {about}.'.format(about=about))
try:
self.app.get_installer('npm').run_packagejson_script(script=self.get_npm_watch_script())
except ShellCommandError:
self.get_logger().command_error('{} FAILED!'.format(about))
except KeyboardInterrupt:
pass
def watch(self):
if self.app.get_installer('npm').has_npm_script(self.get_npm_watch_script()):
return ProcessWatchConfig(
plugin=self)
else:
return None
| {
"content_hash": "9bf0396830eb05ca141feb6ee7f52d8d",
"timestamp": "",
"source": "github",
"line_count": 207,
"max_line_length": 107,
"avg_line_length": 35.792270531400966,
"alnum_prop": 0.526386826832231,
"repo_name": "appressoas/ievv_opensource",
"id": "ff95da081b5184521702f482a22898a5f5490113",
"size": "7409",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "ievv_opensource/utils/ievvbuildstatic/npmrun_jsbuild.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CoffeeScript",
"bytes": "199"
},
{
"name": "Dockerfile",
"bytes": "162"
},
{
"name": "HTML",
"bytes": "7544"
},
{
"name": "JavaScript",
"bytes": "719"
},
{
"name": "Less",
"bytes": "27"
},
{
"name": "Python",
"bytes": "614046"
},
{
"name": "SCSS",
"bytes": "199"
},
{
"name": "Shell",
"bytes": "141"
},
{
"name": "TypeScript",
"bytes": "254"
}
],
"symlink_target": ""
} |
from ..utils import Scraper
from bs4 import BeautifulSoup
from collections import OrderedDict
from queue import Queue
from threading import Thread, Lock
from time import time
import http.cookiejar
import json
import os
import re
import sys
class Courses:
"""A scraper for UofT's Course Finder web service.
Course Finder is located at http://coursefinder.utoronto.ca/.
"""
host = 'http://coursefinder.utoronto.ca/course-search/search'
cookies = http.cookiejar.CookieJar()
threads = 32
@staticmethod
def scrape(location='.'):
"""Update the local JSON files for this scraper."""
Scraper.logger.info('Courses initialized.')
urls = Courses.search()
total = len(urls)
ts = time()
queue = Queue()
for x in range(Courses.threads):
worker = CourseFinderWorker(queue)
worker.daemon = True
worker.start()
Scraper.logger.info('Queued %d courses.' % total)
for x in urls:
course_id = re.search('offImg(.*)', x[0]).group(1).split('"')[0]
url = '%s/courseSearch/coursedetails/%s' % (
Courses.host,
course_id
)
queue.put((course_id, url, total))
queue.join()
Scraper.logger.info('Took %.2fs to retreive course info.' % (
time() - ts
))
for course in CourseFinderWorker.all_courses:
if course != False:
Scraper.save_json(course, location, course['id'])
Scraper.logger.info('Courses completed.')
@staticmethod
def search(query='', requirements=''):
"""Perform a search and return the data as a dict."""
url = '%s/courseSearch/course/search' % Courses.host
data = {
'queryText': query,
'requirements': requirements,
'campusParam': 'St. George,Scarborough,Mississauga'
}
json = Scraper.get(url, params=data, cookies=Courses.cookies, json=True)
return json['aaData']
@staticmethod
def parse_course_html(course_id, html):
"""Create JSON files from the HTML pages downloaded."""
if "The course you are trying to access does not exist" in \
html.decode('utf-8'):
return False
soup = BeautifulSoup(html, "html.parser")
# Things that appear on all courses
title = soup.find(id="u19")
title_name = title.find_all("span",
class_="uif-headerText-span")[0].get_text()
course_code = course_id[:-5]
course_name = title_name[10:]
division = soup.find(id="u23").find_all("span", id="u23")[0] \
.get_text().strip()
description = soup.find(id="u32").find_all("span", id="u32")[0] \
.get_text().strip()
department = soup.find(id="u41").find_all("span", id="u41")[0] \
.get_text().strip()
course_level = soup.find(id="u86").find_all("span", id="u86")[0] \
.get_text().strip()
course_level = course_level[:3]
course_level = int(course_level)
campus = soup.find(id="u149").find_all("span", id="u149")[0] \
.get_text().strip()
if campus == "St. George":
campus = "UTSG"
elif campus == "Mississauga":
campus = "UTM"
elif campus == "Scarborough":
campus = "UTSC"
term = soup.find(id="u158").find_all("span", id="u158")[0] \
.get_text().strip()
# Things that don't appear on all courses
as_breadth = soup.find(id="u122")
breadths = []
if as_breadth is not None:
as_breadth = as_breadth.find_all("span", id="u122")[0] \
.get_text().strip()
for ch in as_breadth:
if ch in "12345":
breadths.append(int(ch))
breadths = sorted(breadths)
exclusions = soup.find(id="u68")
if exclusions is not None:
exclusions = exclusions.find_all("span", id="u68")[0] \
.get_text().strip()
else:
exclusions = ""
prereq = soup.find(id="u50")
if prereq is not None:
prereq = prereq.find_all("span", id="u50")[0].get_text().strip()
else:
prereq = ""
# Meeting Sections
meeting_table = soup.find(id="u172")
trs = []
if meeting_table is not None:
trs = meeting_table.find_all("tr")
sections = []
for tr in trs:
tds = tr.find_all("td")
if len(tds) > 0:
code = tds[0].get_text().strip()
raw_times = tds[1].get_text().replace(
'Alternate week', '').strip().split(" ")
times = []
for i in range(0, len(raw_times) - 1, 2):
times.append(raw_times[i] + " " + raw_times[i + 1])
instructors = BeautifulSoup(str(tds[2]).replace("<br>", "\n"),
"html.parser")
instructors = instructors.get_text().split("\n")
instructors = \
list(filter(None, [x.strip() for x in instructors]))
raw_locations = tds[3].get_text().strip().split(" ")
locations = []
for i in range(0, len(raw_locations) - 1, 2):
locations.append(
raw_locations[i] + " " + raw_locations[i + 1])
class_size = tds[4].get_text().strip()
time_data = []
for i in range(len(times)):
info = times[i].split(" ")
day = info[0]
hours = info[1].split("-")
location = ""
try:
location = locations[i]
except IndexError:
location = ""
for i in range(len(hours)):
x = hours[i].split(':')
hours[i] = (60 * 60 * int(x[0])) + (int(x[1]) * 60)
time_data.append(OrderedDict([
("day", day),
("start", hours[0]),
("end", hours[1]),
("duration", hours[1] - hours[0]),
("location", location)
]))
code = code.split(" ")
code = code[0][0] + code[1]
data = OrderedDict([
("code", code),
("instructors", instructors),
("times", time_data),
("size", int(class_size)),
("enrolment", 0)
])
sections.append(data)
# Dictionary creation
course = OrderedDict([
("id", course_id),
("code", course_code),
("name", course_name),
("description", description),
("division", division),
("department", department),
("prerequisites", prereq),
("exclusions", exclusions),
("level", course_level),
("campus", campus),
("term", term),
("breadths", breadths),
("meeting_sections", sections)
])
return course
class CourseFinderWorker(Thread):
all_courses = []
done = 0
lock = Lock()
def __init__(self, queue):
Thread.__init__(self)
self.queue = queue
def run(self):
while True:
course_id, url, total = self.queue.get()
html = Scraper.get(url, Courses.cookies)
course = Courses.parse_course_html(course_id, html)
CourseFinderWorker.lock.acquire()
CourseFinderWorker.all_courses.append(course)
CourseFinderWorker.done += 1
Scraper.flush_percentage(CourseFinderWorker.done / total)
CourseFinderWorker.lock.release()
self.queue.task_done()
| {
"content_hash": "2fcd6af36382dbb7f586902cdd7ced98",
"timestamp": "",
"source": "github",
"line_count": 265,
"max_line_length": 80,
"avg_line_length": 30.70943396226415,
"alnum_prop": 0.4861145244531826,
"repo_name": "g3wanghc/uoft-scrapers",
"id": "617faaef602b4662f18e062ebe30a579f5476163",
"size": "8138",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "uoftscrapers/scrapers/courses/__init__.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "85021"
}
],
"symlink_target": ""
} |
from ctypes import *
from ctypes.util import find_library
from errno import *
from functools import partial
from platform import machine, system
from stat import S_IFDIR
from traceback import print_exc
import six
if six.PY3:
unicode_ = str
else:
unicode_ = unicode # NOQA: F821 undefined name 'unicode': Needed for python3 compatibility
def ensure_unicode(value):
if not isinstance(value, unicode_):
return value.decode("utf-8")
else:
return value
def ensure_bytes(value):
if isinstance(value, unicode_):
return value.encode("utf-8")
else:
return value
_system = system()
_machine = machine()
# Locate the fuse shared library.
# On OSX this can be provided by a number of different packages
# with slightly incompatible interfaces.
if _system == 'Darwin':
_libfuse_path = find_library('fuse4x') or find_library('fuse')
else:
_libfuse_path = find_library('fuse')
if not _libfuse_path:
raise EnvironmentError('Unable to find libfuse')
if _system == 'Darwin':
_libiconv = CDLL(find_library('iconv'), RTLD_GLOBAL) # libfuse dependency
_libfuse = CDLL(_libfuse_path)
# Check whether OSX is using the legacy "macfuse" system.
# This has a different struct layout than the newer fuse4x system.
if _system == 'Darwin' and hasattr(_libfuse, 'macfuse_version'):
_system = 'Darwin-MacFuse'
class c_timespec(Structure):
_fields_ = [('tv_sec', c_long), ('tv_nsec', c_long)]
class c_utimbuf(Structure):
_fields_ = [('actime', c_timespec), ('modtime', c_timespec)]
class c_stat(Structure):
pass # Platform dependent
if _system in ('Darwin', 'Darwin-MacFuse', 'FreeBSD'):
ENOTSUP = 45
c_dev_t = c_int32
c_fsblkcnt_t = c_ulong
c_fsfilcnt_t = c_ulong
c_gid_t = c_uint32
c_mode_t = c_uint16
c_off_t = c_int64
c_pid_t = c_int32
c_uid_t = c_uint32
setxattr_t = CFUNCTYPE(c_int, c_char_p, c_char_p, POINTER(c_byte),
c_size_t, c_int, c_uint32)
getxattr_t = CFUNCTYPE(c_int, c_char_p, c_char_p, POINTER(c_byte),
c_size_t, c_uint32)
# OSX with fuse4x uses 64-bit inodes and so has a different
# struct layout. Other darwinish platforms use 32-bit inodes.
if _system == 'Darwin':
c_stat._fields_ = [
('st_dev', c_dev_t),
('st_mode', c_mode_t),
('st_nlink', c_uint16),
('st_ino', c_uint64),
('st_uid', c_uid_t),
('st_gid', c_gid_t),
('st_rdev', c_dev_t),
('st_atimespec', c_timespec),
('st_mtimespec', c_timespec),
('st_ctimespec', c_timespec),
('st_birthtimespec', c_timespec),
('st_size', c_off_t),
('st_blocks', c_int64),
('st_blksize', c_int32),
('st_flags', c_int32),
('st_gen', c_int32),
('st_lspare', c_int32),
('st_qspare', c_int64)]
else:
c_stat._fields_ = [
('st_dev', c_dev_t),
('st_ino', c_uint32),
('st_mode', c_mode_t),
('st_nlink', c_uint16),
('st_uid', c_uid_t),
('st_gid', c_gid_t),
('st_rdev', c_dev_t),
('st_atimespec', c_timespec),
('st_mtimespec', c_timespec),
('st_ctimespec', c_timespec),
('st_size', c_off_t),
('st_blocks', c_int64),
('st_blksize', c_int32)]
elif _system == 'Linux':
ENOTSUP = 95
c_dev_t = c_ulonglong
c_fsblkcnt_t = c_ulonglong
c_fsfilcnt_t = c_ulonglong
c_gid_t = c_uint
c_mode_t = c_uint
c_off_t = c_longlong
c_pid_t = c_int
c_uid_t = c_uint
setxattr_t = CFUNCTYPE(c_int, c_char_p, c_char_p, POINTER(c_byte), c_size_t, c_int)
getxattr_t = CFUNCTYPE(c_int, c_char_p, c_char_p, POINTER(c_byte), c_size_t)
_machine = machine()
if _machine == 'x86_64':
c_stat._fields_ = [
('st_dev', c_dev_t),
('st_ino', c_ulong),
('st_nlink', c_ulong),
('st_mode', c_mode_t),
('st_uid', c_uid_t),
('st_gid', c_gid_t),
('__pad0', c_int),
('st_rdev', c_dev_t),
('st_size', c_off_t),
('st_blksize', c_long),
('st_blocks', c_long),
('st_atimespec', c_timespec),
('st_mtimespec', c_timespec),
('st_ctimespec', c_timespec)]
elif _machine == 'ppc':
c_stat._fields_ = [
('st_dev', c_dev_t),
('st_ino', c_ulonglong),
('st_mode', c_mode_t),
('st_nlink', c_uint),
('st_uid', c_uid_t),
('st_gid', c_gid_t),
('st_rdev', c_dev_t),
('__pad2', c_ushort),
('st_size', c_off_t),
('st_blksize', c_long),
('st_blocks', c_longlong),
('st_atimespec', c_timespec),
('st_mtimespec', c_timespec),
('st_ctimespec', c_timespec)]
else:
# i686, use as fallback for everything else
c_stat._fields_ = [
('st_dev', c_dev_t),
('__pad1', c_ushort),
('__st_ino', c_ulong),
('st_mode', c_mode_t),
('st_nlink', c_uint),
('st_uid', c_uid_t),
('st_gid', c_gid_t),
('st_rdev', c_dev_t),
('__pad2', c_ushort),
('st_size', c_off_t),
('st_blksize', c_long),
('st_blocks', c_longlong),
('st_atimespec', c_timespec),
('st_mtimespec', c_timespec),
('st_ctimespec', c_timespec),
('st_ino', c_ulonglong)]
else:
raise NotImplementedError('%s is not supported.' % _system)
class c_statvfs(Structure):
_fields_ = [
('f_bsize', c_ulong),
('f_frsize', c_ulong),
('f_blocks', c_fsblkcnt_t),
('f_bfree', c_fsblkcnt_t),
('f_bavail', c_fsblkcnt_t),
('f_files', c_fsfilcnt_t),
('f_ffree', c_fsfilcnt_t),
('f_favail', c_fsfilcnt_t)]
if _system == 'FreeBSD':
c_fsblkcnt_t = c_uint64
c_fsfilcnt_t = c_uint64
setxattr_t = CFUNCTYPE(c_int, c_char_p, c_char_p, POINTER(c_byte), c_size_t, c_int)
getxattr_t = CFUNCTYPE(c_int, c_char_p, c_char_p, POINTER(c_byte), c_size_t)
class c_statvfs(Structure):
_fields_ = [
('f_bavail', c_fsblkcnt_t),
('f_bfree', c_fsblkcnt_t),
('f_blocks', c_fsblkcnt_t),
('f_favail', c_fsfilcnt_t),
('f_ffree', c_fsfilcnt_t),
('f_files', c_fsfilcnt_t),
('f_bsize', c_ulong),
('f_flag', c_ulong),
('f_frsize', c_ulong)]
class fuse_file_info(Structure):
_fields_ = [
('flags', c_int),
('fh_old', c_ulong),
('writepage', c_int),
('direct_io', c_uint, 1),
('keep_cache', c_uint, 1),
('flush', c_uint, 1),
('padding', c_uint, 29),
('fh', c_uint64),
('lock_owner', c_uint64)]
class fuse_context(Structure):
_fields_ = [
('fuse', c_voidp),
('uid', c_uid_t),
('gid', c_gid_t),
('pid', c_pid_t),
('private_data', c_voidp)]
class fuse_operations(Structure):
_fields_ = [
('getattr', CFUNCTYPE(c_int, c_char_p, POINTER(c_stat))),
('readlink', CFUNCTYPE(c_int, c_char_p, POINTER(c_byte), c_size_t)),
('getdir', c_voidp), # Deprecated, use readdir
('mknod', CFUNCTYPE(c_int, c_char_p, c_mode_t, c_dev_t)),
('mkdir', CFUNCTYPE(c_int, c_char_p, c_mode_t)),
('unlink', CFUNCTYPE(c_int, c_char_p)),
('rmdir', CFUNCTYPE(c_int, c_char_p)),
('symlink', CFUNCTYPE(c_int, c_char_p, c_char_p)),
('rename', CFUNCTYPE(c_int, c_char_p, c_char_p)),
('link', CFUNCTYPE(c_int, c_char_p, c_char_p)),
('chmod', CFUNCTYPE(c_int, c_char_p, c_mode_t)),
('chown', CFUNCTYPE(c_int, c_char_p, c_uid_t, c_gid_t)),
('truncate', CFUNCTYPE(c_int, c_char_p, c_off_t)),
('utime', c_voidp), # Deprecated, use utimens
('open', CFUNCTYPE(c_int, c_char_p, POINTER(fuse_file_info))),
('read', CFUNCTYPE(c_int, c_char_p, POINTER(c_byte), c_size_t, c_off_t,
POINTER(fuse_file_info))),
('write', CFUNCTYPE(c_int, c_char_p, POINTER(c_byte), c_size_t, c_off_t,
POINTER(fuse_file_info))),
('statfs', CFUNCTYPE(c_int, c_char_p, POINTER(c_statvfs))),
('flush', CFUNCTYPE(c_int, c_char_p, POINTER(fuse_file_info))),
('release', CFUNCTYPE(c_int, c_char_p, POINTER(fuse_file_info))),
('fsync', CFUNCTYPE(c_int, c_char_p, c_int, POINTER(fuse_file_info))),
('setxattr', setxattr_t),
('getxattr', getxattr_t),
('listxattr', CFUNCTYPE(c_int, c_char_p, POINTER(c_byte), c_size_t)),
('removexattr', CFUNCTYPE(c_int, c_char_p, c_char_p)),
('opendir', CFUNCTYPE(c_int, c_char_p, POINTER(fuse_file_info))),
('readdir', CFUNCTYPE(c_int, c_char_p, c_voidp, CFUNCTYPE(c_int, c_voidp,
c_char_p, POINTER(c_stat), c_off_t), c_off_t, POINTER(fuse_file_info))),
('releasedir', CFUNCTYPE(c_int, c_char_p, POINTER(fuse_file_info))),
('fsyncdir', CFUNCTYPE(c_int, c_char_p, c_int, POINTER(fuse_file_info))),
('init', CFUNCTYPE(c_voidp, c_voidp)),
('destroy', CFUNCTYPE(c_voidp, c_voidp)),
('access', CFUNCTYPE(c_int, c_char_p, c_int)),
('create', CFUNCTYPE(c_int, c_char_p, c_mode_t, POINTER(fuse_file_info))),
('ftruncate', CFUNCTYPE(c_int, c_char_p, c_off_t, POINTER(fuse_file_info))),
('fgetattr', CFUNCTYPE(c_int, c_char_p, POINTER(c_stat),
POINTER(fuse_file_info))),
('lock', CFUNCTYPE(c_int, c_char_p, POINTER(fuse_file_info), c_int, c_voidp)),
('utimens', CFUNCTYPE(c_int, c_char_p, POINTER(c_utimbuf))),
('bmap', CFUNCTYPE(c_int, c_char_p, c_size_t, POINTER(c_ulonglong)))]
def time_of_timespec(ts):
return ts.tv_sec + ts.tv_nsec / 10 ** 9
def set_st_attrs(st, attrs):
for key, val in list(attrs.items()):
if key in ('st_atime', 'st_mtime', 'st_ctime'):
timespec = getattr(st, key + 'spec')
timespec.tv_sec = int(val)
timespec.tv_nsec = int((val - timespec.tv_sec) * 10 ** 9)
elif hasattr(st, key):
setattr(st, key, val)
_libfuse.fuse_get_context.restype = POINTER(fuse_context)
def fuse_get_context():
"""Returns a (uid, gid, pid) tuple"""
ctxp = _libfuse.fuse_get_context()
ctx = ctxp.contents
return ctx.uid, ctx.gid, ctx.pid
class FUSE(object):
"""This class is the lower level interface and should not be subclassed
under normal use. Its methods are called by fuse.
Assumes API version 2.6 or later."""
def __init__(self, operations, mountpoint, raw_fi=False, **kwargs):
"""Setting raw_fi to True will cause FUSE to pass the fuse_file_info
class as is to Operations, instead of just the fh field.
This gives you access to direct_io, keep_cache, etc."""
self.operations = operations
self.raw_fi = raw_fi
args = ['fuse']
if kwargs.pop('foreground', False):
args.append('-f')
if kwargs.pop('debug', False):
args.append('-d')
if kwargs.pop('nothreads', False):
args.append('-s')
kwargs.setdefault('fsname', operations.__class__.__name__)
args.append('-o')
args.append(','.join(key if val == True else '%s=%s' % (key, val)
for key, val in list(kwargs.items())))
args.append(mountpoint)
byteargs = [arg.encode("utf-8") for arg in args]
argv = (c_char_p * len(args))(*byteargs)
fuse_ops = fuse_operations()
for name, prototype in fuse_operations._fields_:
if prototype != c_voidp and getattr(operations, name, None):
op = partial(self._wrapper_, getattr(self, name))
setattr(fuse_ops, name, prototype(op))
_libfuse.fuse_main_real(len(args), argv, pointer(fuse_ops),
sizeof(fuse_ops), None)
del self.operations # Invoke the destructor
def _wrapper_(self, func, *args, **kwargs):
"""Decorator for the methods that follow"""
try:
return func(*args, **kwargs) or 0
except OSError as e:
return -(e.errno or EFAULT)
except:
print_exc()
return -EFAULT
def getattr(self, path, buf):
return self.fgetattr(path, buf, None)
def readlink(self, path, buf, bufsize):
ret = self.operations('readlink', path)
data = create_string_buffer(ret[:bufsize - 1])
memmove(buf, data, len(data))
return 0
def mknod(self, path, mode, dev):
return self.operations('mknod', path, mode, dev)
def mkdir(self, path, mode):
return self.operations('mkdir', path, mode)
def unlink(self, path):
return self.operations('unlink', path)
def rmdir(self, path):
return self.operations('rmdir', path)
def symlink(self, source, target):
return self.operations('symlink', target, source)
def rename(self, old, new):
return self.operations('rename', old, new)
def link(self, source, target):
return self.operations('link', target, source)
def chmod(self, path, mode):
return self.operations('chmod', path, mode)
def chown(self, path, uid, gid):
return self.operations('chown', path, uid, gid)
def truncate(self, path, length):
return self.operations('truncate', path, length)
def open(self, path, fip):
fi = fip.contents
if self.raw_fi:
return self.operations('open', path, fi)
else:
fi.fh = self.operations('open', path, fi.flags)
return 0
def read(self, path, buf, size, offset, fip):
fh = fip.contents if self.raw_fi else fip.contents.fh
ret = self.operations('read', path, size, offset, fh)
if ret:
strbuf = create_string_buffer(ret)
memmove(buf, strbuf, len(strbuf))
return len(ret)
def write(self, path, buf, size, offset, fip):
data = string_at(buf, size)
fh = fip.contents if self.raw_fi else fip.contents.fh
return self.operations('write', path, data, offset, fh)
def statfs(self, path, buf):
stv = buf.contents
attrs = self.operations('statfs', path)
for key, val in list(attrs.items()):
if hasattr(stv, key):
setattr(stv, key, val)
return 0
def flush(self, path, fip):
fh = fip.contents if self.raw_fi else fip.contents.fh
return self.operations('flush', path, fh)
def release(self, path, fip):
fh = fip.contents if self.raw_fi else fip.contents.fh
return self.operations('release', path, fh)
def fsync(self, path, datasync, fip):
fh = fip.contents if self.raw_fi else fip.contents.fh
return self.operations('fsync', path, datasync, fh)
def setxattr(self, path, name, value, size, options, *args):
data = string_at(value, size)
return self.operations('setxattr', path, name, data, options, *args)
def getxattr(self, path, name, value, size, *args):
ret = self.operations('getxattr', path, name, *args)
retsize = len(ret)
buf = create_string_buffer(ret, retsize) # Does not add trailing 0
if bool(value):
if retsize > size:
return -ERANGE
memmove(value, buf, retsize)
return retsize
def listxattr(self, path, namebuf, size):
ret = self.operations('listxattr', path)
if ret:
buf = create_string_buffer('\x00'.join(ret))
else:
buf = ''
bufsize = len(buf)
if bool(namebuf):
if bufsize > size:
return -ERANGE
memmove(namebuf, buf, bufsize)
return bufsize
def removexattr(self, path, name):
return self.operations('removexattr', path, name)
def opendir(self, path, fip):
# Ignore raw_fi
fip.contents.fh = self.operations('opendir', path)
return 0
def readdir(self, path, buf, filler, offset, fip):
# Ignore raw_fi
for item in self.operations('readdir', path, fip.contents.fh):
if isinstance(item, str):
name, st, offset = item, None, 0
else:
name, attrs, offset = item
if attrs:
st = c_stat()
set_st_attrs(st, attrs)
else:
st = None
if filler(buf, ensure_bytes(name), st, offset) != 0:
break
return 0
def releasedir(self, path, fip):
# Ignore raw_fi
return self.operations('releasedir', path, fip.contents.fh)
def fsyncdir(self, path, datasync, fip):
# Ignore raw_fi
return self.operations('fsyncdir', path, datasync, fip.contents.fh)
def init(self, conn):
return self.operations('init', '/')
def destroy(self, private_data):
return self.operations('destroy', '/')
def access(self, path, amode):
return self.operations('access', path, amode)
def create(self, path, mode, fip):
fi = fip.contents
if self.raw_fi:
return self.operations('create', path, mode, fi)
else:
fi.fh = self.operations('create', path, mode)
return 0
def ftruncate(self, path, length, fip):
fh = fip.contents if self.raw_fi else fip.contents.fh
return self.operations('truncate', path, length, fh)
def fgetattr(self, path, buf, fip):
memset(buf, 0, sizeof(c_stat))
st = buf.contents
fh = fip and (fip.contents if self.raw_fi else fip.contents.fh)
attrs = self.operations('getattr', path, fh)
set_st_attrs(st, attrs)
return 0
def lock(self, path, fip, cmd, lock):
fh = fip.contents if self.raw_fi else fip.contents.fh
return self.operations('lock', path, fh, cmd, lock)
def utimens(self, path, buf):
if buf:
atime = time_of_timespec(buf.contents.actime)
mtime = time_of_timespec(buf.contents.modtime)
times = (atime, mtime)
else:
times = None
return self.operations('utimens', path, times)
def bmap(self, path, blocksize, idx):
return self.operations('bmap', path, blocksize, idx)
class Operations(object):
"""This class should be subclassed and passed as an argument to FUSE on
initialization. All operations should raise an OSError exception on
error.
When in doubt of what an operation should do, check the FUSE header
file or the corresponding system call man page."""
def __call__(self, op, *args):
if not hasattr(self, op):
raise OSError(EFAULT, '')
return getattr(self, op)(*args)
def access(self, path, amode):
return 0
bmap = None
def chmod(self, path, mode):
raise OSError(EROFS, '')
def chown(self, path, uid, gid):
raise OSError(EROFS, '')
def create(self, path, mode, fi=None):
"""When raw_fi is False (default case), fi is None and create should
return a numerical file handle.
When raw_fi is True the file handle should be set directly by create
and return 0."""
raise OSError(EROFS, '')
def destroy(self, path):
"""Called on filesystem destruction. Path is always /"""
pass
def flush(self, path, fh):
return 0
def fsync(self, path, datasync, fh):
return 0
def fsyncdir(self, path, datasync, fh):
return 0
def getattr(self, path, fh=None):
"""Returns a dictionary with keys identical to the stat C structure
of stat(2).
st_atime, st_mtime and st_ctime should be floats.
NOTE: There is an incombatibility between Linux and Mac OS X concerning
st_nlink of directories. Mac OS X counts all files inside the directory,
while Linux counts only the subdirectories."""
if path != '/':
raise OSError(ENOENT, '')
return dict(st_mode=(S_IFDIR | 0o755), st_nlink=2)
def getxattr(self, path, name, position=0):
raise OSError(ENOTSUP, '')
def init(self, path):
"""Called on filesystem initialization. Path is always /
Use it instead of __init__ if you start threads on initialization."""
pass
def link(self, target, source):
raise OSError(EROFS, '')
def listxattr(self, path):
return []
lock = None
def mkdir(self, path, mode):
raise OSError(EROFS, '')
def mknod(self, path, mode, dev):
raise OSError(EROFS, '')
def open(self, path, flags):
"""When raw_fi is False (default case), open should return a numerical
file handle.
When raw_fi is True the signature of open becomes:
open(self, path, fi)
and the file handle should be set directly."""
return 0
def opendir(self, path):
"""Returns a numerical file handle."""
return 0
def read(self, path, size, offset, fh):
"""Returns a string containing the data requested."""
raise OSError(ENOENT, '')
def readdir(self, path, fh):
"""Can return either a list of names, or a list of (name, attrs, offset)
tuples. attrs is a dict as in getattr."""
return ['.', '..']
def readlink(self, path):
raise OSError(ENOENT, '')
def release(self, path, fh):
return 0
def releasedir(self, path, fh):
return 0
def removexattr(self, path, name):
raise OSError(ENOTSUP, '')
def rename(self, old, new):
raise OSError(EROFS, '')
def rmdir(self, path):
raise OSError(EROFS, '')
def setxattr(self, path, name, value, options, position=0):
raise OSError(ENOTSUP, '')
def statfs(self, path):
"""Returns a dictionary with keys identical to the statvfs C structure
of statvfs(3).
On Mac OS X f_bsize and f_frsize must be a power of 2 (minimum 512)."""
return {}
def symlink(self, target, source):
raise OSError(EROFS, '')
def truncate(self, path, length, fh=None):
raise OSError(EROFS, '')
def unlink(self, path):
raise OSError(EROFS, '')
def utimens(self, path, times=None):
"""Times is a (atime, mtime) tuple. If None use current time."""
return 0
def write(self, path, data, offset, fh):
raise OSError(EROFS, '')
class LoggingMixIn:
def __call__(self, op, path, *args):
print('->', op, path, repr(args))
ret = '[Unknown Error]'
try:
ret = getattr(self, op)(path, *args)
return ret
except OSError as e:
ret = str(e)
raise
finally:
print('<-', op, repr(ret))
| {
"content_hash": "37e10136c9a53a1a2c918340bfa048f8",
"timestamp": "",
"source": "github",
"line_count": 682,
"max_line_length": 95,
"avg_line_length": 34.03372434017595,
"alnum_prop": 0.5531859894015768,
"repo_name": "Konubinix/pyfilesystem",
"id": "fad51eba5167271a65ba0e6bf27fc4015bfbbffe",
"size": "23990",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "fs/expose/fuse/fuse_ctypes.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Python",
"bytes": "1048053"
}
],
"symlink_target": ""
} |
import os
import warnings
import theano
import theano.sandbox.cuda
from theano import config
def set_gpu_from_theano():
"""
This set the GPU used by PyCUDA to the same as the one used by Theano.
"""
# Transfer the theano gpu binding to pycuda, for consistency
if config.device.startswith("gpu") and len(config.device) > 3:
os.environ["CUDA_DEVICE"] = theano.config.device[3:]
elif (config.init_gpu_device.startswith("gpu") and
len(config.init_gpu_device) > 3):
os.environ["CUDA_DEVICE"] = theano.config.init_gpu_device[3:]
set_gpu_from_theano()
pycuda_available = False
# If theano.sandbox.cuda don't exist, it is because we are importing
# it and it try to import this file! This mean we must init the device.
if (not hasattr(theano.sandbox, 'cuda') or
theano.sandbox.cuda.use.device_number is None):
try:
import pycuda
import pycuda.autoinit
pycuda_available = True
except ImportError:
# presumably, the user wanted to use pycuda, else they wouldn't have
# imported this module, so issue a warning that the import failed.
warnings.warn("PyCUDA import failed in theano.misc.pycuda_init")
except pycuda._driver.LogicError:
if theano.config.force_device:
raise
else:
if "CUDA_DEVICE" in os.environ:
del os.environ["CUDA_DEVICE"]
import pycuda.autoinit
pycuda_available = True
else:
try:
import pycuda.driver
pycuda_available = True
except ImportError:
pass
if pycuda_available:
if hasattr(pycuda.driver.Context, "attach"):
pycuda.driver.Context.attach()
import atexit
atexit.register(pycuda.driver.Context.pop)
else:
# Now we always import this file when we call
# theano.sandbox.cuda.use. So this should not happen
# normally.
# TODO: make this an error.
warnings.warn("For some unknow reason, theano.misc.pycuda_init was"
" not imported before Theano initialized the GPU and"
" your PyCUDA version is 2011.2.2 or earlier."
" To fix the problem, import theano.misc.pycuda_init"
" manually before using/initializing the GPU, use the"
" Theano flag pycuda.init=True or use a"
" more recent version of PyCUDA.")
| {
"content_hash": "e2f3497c14d14bc7feb02882a1b3cef6",
"timestamp": "",
"source": "github",
"line_count": 65,
"max_line_length": 80,
"avg_line_length": 38.707692307692305,
"alnum_prop": 0.613275039745628,
"repo_name": "rizar/attention-lvcsr",
"id": "dd771fb5f51d77aea3f0f7b9aacb8ec5ea2fc451",
"size": "2516",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "libs/Theano/theano/misc/pycuda_init.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "1288"
},
{
"name": "C",
"bytes": "156742"
},
{
"name": "C++",
"bytes": "209135"
},
{
"name": "CSS",
"bytes": "3500"
},
{
"name": "Cuda",
"bytes": "231732"
},
{
"name": "Gnuplot",
"bytes": "484"
},
{
"name": "HTML",
"bytes": "33356"
},
{
"name": "Jupyter Notebook",
"bytes": "191071"
},
{
"name": "Makefile",
"bytes": "973"
},
{
"name": "Python",
"bytes": "9313243"
},
{
"name": "Shell",
"bytes": "34454"
},
{
"name": "TeX",
"bytes": "102624"
}
],
"symlink_target": ""
} |
__author__ = 'tdesai'
import os
import subprocess
import sys
from twitter.common.contextutil import environment_as, temporary_file
from twitter.common.quantity import Amount, Data
from twitter.common.string import ScanfParser
from twitter.common.util.command_util import CommandUtil
class HDFSHelper(object):
"""
This Class provides a set of functions for hdfs operations.
NOTE: This class assumes a local hdfs or hadoop client on the path.
"""
class InternalError(Exception): pass
PARSER = ScanfParser('%(mode)s %(dirents)s %(user)s %(group)s %(filesize)d '
'%(year)d-%(month)d-%(day)d %(hour)d:%(minute)d')
def __init__(self, config, command_class=CommandUtil, heap_limit=Amount(256, Data.MB),
use_hadoop_v1=False):
"""
heap_limit is the maximum heap that should be allocated to the command process,
defined using twitter.common.quantity.Data.
use_hadoop_v1 sets the command to hadoop instead of hdfs.
"""
if not os.path.isdir(config):
raise ValueError('Command requires root of a config tree')
self._config = config
self._cmd_class = command_class
if heap_limit is None:
raise ValueError('The hdfs heap_limit must not be specified as "None".')
self._heap_limit = heap_limit
self.cli_command = 'hdfs'
if self.use_hadoop_v1:
self.cli_command = 'hadoop'
if self._cmd_class.execute_suppress_stdout_stderr(self.cli_command) != 0:
raise OSError('The "{0}" utility is not available on the system PATH'.format(
self.cli_command))
@property
def config(self):
return self._config
def _call(self, cmd, *args, **kwargs):
"""Runs fs command with the given command and args.
Checks the result of the call by default but this can be disabled with check=False.
"""
cmd = [self.cli_command, '--config', self._config, 'dfs', cmd] + list(args)
heapsize = str(int(self._heap_limit.as_(Data.MB)))
with environment_as(HADOOP_HEAPSIZE=heapsize):
if kwargs.get('check'):
return self._cmd_class.check_call(cmd)
elif kwargs.get('return_output'):
return self._cmd_class.execute_and_get_output(cmd)
elif kwargs.get('supress_output'):
return self._cmd_class.execute_suppress_stdout(cmd)
else:
return self._cmd_class.execute(cmd)
def get(self, src, dst):
"""
Copy file(s) in HDFS to local path (via proxy if necessary).
NOTE: If src matches multiple files, make sure dst is a directory!
"""
if isinstance(src, list):
hdfs_src = " ".join(src)
else:
hdfs_src = src
return self._call('-get', hdfs_src, dst)
def put(self, src, dst):
"""
Copy the local file src to a HDFS path dst.
"""
abs_src = os.path.expanduser(src)
assert os.path.exists(abs_src), 'File does not exist, cannot copy: %s' % abs_src
return self._do_put(abs_src, dst)
def _do_put(self, source, dst):
"""
Put the local file in to HDFS
"""
if isinstance(dst, list):
hdfs_dst = " ".join(dst)
else:
hdfs_dst = dst
if not self._call('-test', '-e', hdfs_dst, check=False):
self._call('-rm', '-skipTrash', hdfs_dst)
return self._call('-put', source, hdfs_dst)
def exists(self, path, flag='-e'):
"""
Checks if the path exists in HDFS
Returns true if it exists or else
Returns false
"""
try:
return self._call("-test", flag, path) == 0
except subprocess.CalledProcessError:
return False
def cat(self, remote_file_pattern, local_file=sys.stdout):
"""
Cat HDFS file to local
"""
return self._call("-cat", remote_file_pattern, also_output_to_file=local_file)
def _ls(self, path, is_dir=False, is_recursive=False):
"""
Return list of [hdfs_full_path, filesize]
Raises exception when the HDFS ls command returns error
"""
hdfs_cmd = '-lsr' if is_recursive else '-ls'
(exit_code, ls_result) = self._call(hdfs_cmd, path, return_output=True)
if exit_code != 0:
raise self.InternalError("Error occurred. %s.Check logs for details" % ls_result)
file_list = []
if ls_result is None:
return file_list
lines = ls_result.splitlines()
for line in lines:
if line == "" or line.startswith("Found"):
continue
seg = line.split(None, 7)
if len(seg) < 8:
raise self.InternalError("Invalid hdfs -ls output. [%s]" % line)
filename = seg[-1]
try:
metadata = self.PARSER.parse(' '.join(seg[0:7]))
except ScanfParser.ParseError as e:
raise self.InternalError('Unable to parse hdfs output: %s' % e)
#seg[0] example: drwxrwx---
if metadata.mode.startswith('d') != is_dir:
continue
file_list.append([filename, metadata.filesize])
return file_list
def ls(self, path, is_dir=False):
"""
Returns list of [hdfs_full_path, filesize]
If is_dir is true returns only the toplevel directories.
"""
return self._ls(path, is_dir, False)
def lsr(self, path, is_dir=False):
"""
Returns list of [hdfs_full_path, filesize] in recursive manner
If is_dir is true returns only the directories.
"""
return self._ls(path, is_dir, True)
def read(self, filename):
"""
Return the contents of filename, or None if an error occurred.
"""
with temporary_file() as fp:
os.unlink(fp.name)
if self._call("-copyToLocal", filename, fp.name) == 0:
with open(fp.name) as f:
return f.read()
else:
return None
def write(self, filename, text):
"""
Write will write the contents in the text to the filename given
The file will be overwritten if it already exists
"""
self._call("-rm", filename)
with temporary_file() as fp:
fp.write(text)
fp.flush()
return self._call('-copyFromLocal', fp.name, filename)
def mkdir(self, path):
"""
Mkdir will create a directory. If already present, it will return an error
"""
return self._call("-mkdir", path)
def mkdir_suppress_err(self, path):
"""
Creates a directory if it does not exists
"""
if not self.exists(path):
return self.mkdir(path)
def rm(self, filename):
"""
Removes a file.
"""
return self._call("-rm", filename, suppress_output=True)
def cp(self, src, dest):
"""
Copies a src file to dest
"""
return self._call("-cp", src, dest, suppress_output=True)
def mv(self, src, dest):
"""
Move a src file to dest
"""
return self._call("-mv", src, dest, suppress_output=True)
def copy_from_local(self, local, remote):
"""
Copies the file from local to remote
"""
return self._call("-copyFromLocal", local, remote, suppress_output=True)
def copy_to_local(self, remote, local):
"""
Copies the file from remote to local
"""
return self._call("-copyToLocal", remote, local, suppress_output=True)
| {
"content_hash": "64a0cc3d8f3f32a41779ae072180ed4c",
"timestamp": "",
"source": "github",
"line_count": 224,
"max_line_length": 88,
"avg_line_length": 30.995535714285715,
"alnum_prop": 0.6318594267607662,
"repo_name": "pombredanne/commons",
"id": "9a521b8f321dc3702d92c952fd3c11dd076110ed",
"size": "7844",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/python/twitter/common/fs/hdfs.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "GAP",
"bytes": "26960"
},
{
"name": "HTML",
"bytes": "14899"
},
{
"name": "Java",
"bytes": "2611393"
},
{
"name": "JavaScript",
"bytes": "29955"
},
{
"name": "Python",
"bytes": "1197003"
},
{
"name": "Scala",
"bytes": "8271"
},
{
"name": "Shell",
"bytes": "29734"
},
{
"name": "Smalltalk",
"bytes": "79"
},
{
"name": "Thrift",
"bytes": "52171"
}
],
"symlink_target": ""
} |
from __future__ import with_statement
from distutils.version import LooseVersion
from classytags import (arguments, core, exceptions, utils, parser, helpers,
values)
from classytags.blocks import BlockDefinition, VariableBlockName
from classytags.compat import compat_next
from classytags.test.context_managers import SettingsOverride, TemplateTags
import django
from django import template
from django.core.exceptions import ImproperlyConfigured
from unittest import TestCase
import sys
import warnings
DJANGO_1_4_OR_HIGHER = (
LooseVersion(django.get_version()) >= LooseVersion('1.4')
)
class DummyTokens(list):
def __init__(self, *tokens):
super(DummyTokens, self).__init__(['dummy_tag'] + list(tokens))
def split_contents(self):
return self
class DummyParser(object):
@staticmethod
def compile_filter(token):
return utils.TemplateConstant(token)
dummy_parser = DummyParser()
class _Warning(object):
def __init__(self, message, category, filename, lineno):
self.message = message
self.category = category
self.filename = filename
self.lineno = lineno
def _collect_warnings(observe_warning, f, *args, **kwargs):
def show_warning(message, category, filename, lineno, file=None,
line=None):
assert isinstance(message, Warning)
observe_warning(
_Warning(message.args[0], category, filename, lineno)
)
# Disable the per-module cache for every module otherwise if the warning
# which the caller is expecting us to collect was already emitted it won't
# be re-emitted by the call to f which happens below.
for v in sys.modules.values():
if v is not None:
try:
v.__warningregistry__ = None
except: # pragma: no cover
# Don't specify a particular exception type to handle in case
# some wacky object raises some wacky exception in response to
# the setattr attempt.
pass
orig_filters = warnings.filters[:]
orig_show = warnings.showwarning
warnings.simplefilter('always')
try:
warnings.showwarning = show_warning
result = f(*args, **kwargs)
finally:
warnings.filters[:] = orig_filters
warnings.showwarning = orig_show
return result
class ClassytagsTests(TestCase):
def failUnlessWarns(self, category, message, f, *args, **kwargs):
warnings_shown = []
result = _collect_warnings(warnings_shown.append, f, *args, **kwargs)
if not warnings_shown: # pragma: no cover
self.fail("No warnings emitted")
first = warnings_shown[0]
for other in warnings_shown[1:]: # pragma: no cover
if ((other.message, other.category) !=
(first.message, first.category)):
self.fail("Can't handle different warnings")
self.assertEqual(first.message, message)
self.assertTrue(first.category is category)
return result
assertWarns = failUnlessWarns
def _tag_tester(self, klass, templates):
"""
Helper method to test a template tag by rendering it and checkout
output.
*klass* is a template tag class (subclass of core.Tag)
*templates* is a sequence of a triple (template-string, output-string,
context)
"""
tag_message = ("Rendering of template %(in)r resulted in "
"%(realout)r, expected %(out)r using %(ctx)r.")
with TemplateTags(klass):
for tpl, out, ctx in templates:
t = template.Template(tpl)
c = template.Context(ctx)
s = t.render(c)
self.assertEqual(s, out, tag_message % {
'in': tpl,
'out': out,
'ctx': ctx,
'realout': s,
})
for key, value in ctx.items():
self.assertEqual(c.get(key), value)
def test_simple_parsing(self):
"""
Test very basic single argument parsing
"""
options = core.Options(
arguments.Argument('myarg'),
)
dummy_tokens = DummyTokens('myval')
kwargs, blocks = options.parse(dummy_parser, dummy_tokens)
self.assertEqual(blocks, {})
self.assertEqual(len(kwargs), 1)
dummy_context = {}
self.assertEqual(kwargs['myarg'].resolve(dummy_context), 'myval')
def test_simple_parsing_too_many_arguments(self):
options = core.Options(
arguments.Argument('myarg'),
)
dummy_tokens = DummyTokens('myval', 'myval2')
self.assertRaises(exceptions.TooManyArguments,
options.parse, dummy_parser, dummy_tokens)
def test_optional_default(self):
"""
Test basic optional argument parsing
"""
options = core.Options(
arguments.Argument('myarg'),
arguments.Argument('optarg', required=False, default=None),
)
dummy_tokens = DummyTokens('myval')
kwargs, blocks = options.parse(dummy_parser, dummy_tokens)
self.assertEqual(blocks, {})
self.assertEqual(len(kwargs), 2)
dummy_context = {}
self.assertEqual(kwargs['myarg'].resolve(dummy_context), 'myval')
self.assertEqual(kwargs['optarg'].resolve(dummy_context), None)
def test_optional_given(self):
options = core.Options(
arguments.Argument('myarg'),
arguments.Argument('optarg', required=False, default=None),
)
dummy_tokens = DummyTokens('myval', 'optval')
kwargs, blocks = options.parse(dummy_parser, dummy_tokens)
self.assertEqual(blocks, {})
self.assertEqual(len(kwargs), 2)
dummy_context = {}
self.assertEqual(kwargs['myarg'].resolve(dummy_context), 'myval')
self.assertEqual(kwargs['optarg'].resolve(dummy_context), 'optval')
def test_breakpoints_not_enough_arguments(self):
"""
Test parsing with breakpoints
"""
options = core.Options(
arguments.Argument('myarg'),
'as',
arguments.Argument('varname'),
'using',
arguments.Argument('using'),
)
dummy_tokens = DummyTokens('myval')
self.assertRaises(exceptions.ArgumentRequiredError,
options.parse, dummy_parser, dummy_tokens)
def test_breakpoint_breakpoint_expected(self):
options = core.Options(
arguments.Argument('myarg'),
'as',
arguments.Argument('varname'),
'using',
arguments.Argument('using'),
)
dummy_tokens = DummyTokens('myval', 'myname')
self.assertRaises(exceptions.BreakpointExpected,
options.parse, dummy_parser, dummy_tokens)
def test_breakpoint_breakpoint_expected_second(self):
options = core.Options(
arguments.Argument('myarg'),
'as',
arguments.Argument('varname'),
'using',
arguments.Argument('using'),
)
dummy_tokens = DummyTokens('myval', 'as', 'myname', 'something')
self.assertRaises(exceptions.BreakpointExpected,
options.parse, dummy_parser, dummy_tokens)
def test_breakpoint_trailing(self):
options = core.Options(
arguments.Argument('myarg'),
'as',
arguments.Argument('varname', required=False),
)
dummy_tokens = DummyTokens('myval', 'as')
self.assertRaises(exceptions.TrailingBreakpoint,
options.parse, dummy_parser, dummy_tokens)
def test_breakpoint_okay(self):
options = core.Options(
arguments.Argument('myarg'),
'as',
arguments.Argument('varname'),
'using',
arguments.Argument('using'),
)
dummy_tokens = DummyTokens('myval', 'as', 'myname', 'using',
'something')
kwargs, blocks = options.parse(dummy_parser, dummy_tokens)
self.assertEqual(blocks, {})
self.assertEqual(len(kwargs), 3)
dummy_context = {}
self.assertEqual(kwargs['myarg'].resolve(dummy_context), 'myval')
self.assertEqual(kwargs['varname'].resolve(dummy_context), 'myname')
self.assertEqual(kwargs['using'].resolve(dummy_context), 'something')
def test_flag_true_value(self):
"""
Test flag arguments
"""
options = core.Options(
arguments.Flag('myflag', true_values=['on'], false_values=['off'])
)
dummy_tokens = DummyTokens('on')
dummy_context = {}
kwargs, blocks = options.parse(dummy_parser, dummy_tokens)
self.assertEqual(blocks, {})
self.assertEqual(kwargs['myflag'].resolve(dummy_context), True)
def test_flag_false_value(self):
options = core.Options(
arguments.Flag('myflag', true_values=['on'], false_values=['off'])
)
dummy_tokens = DummyTokens('off')
dummy_context = {}
kwargs, blocks = options.parse(dummy_parser, dummy_tokens)
self.assertEqual(blocks, {})
self.assertEqual(kwargs['myflag'].resolve(dummy_context), False)
def test_flag_wrong_value(self):
options = core.Options(
arguments.Flag('myflag', true_values=['on'], false_values=['off'])
)
# test exceptions
dummy_tokens = DummyTokens('myval')
self.assertRaises(exceptions.InvalidFlag,
options.parse, dummy_parser, dummy_tokens)
def test_flag_wrong_value_no_false(self):
options = core.Options(
arguments.Flag('myflag', true_values=['on'])
)
dummy_tokens = DummyTokens('myval')
self.assertRaises(exceptions.InvalidFlag,
options.parse, dummy_parser, dummy_tokens)
def test_flag_wrong_value_no_true(self):
options = core.Options(
arguments.Flag('myflag', false_values=['off'])
)
dummy_tokens = DummyTokens('myval')
self.assertRaises(exceptions.InvalidFlag,
options.parse, dummy_parser, dummy_tokens)
self.assertRaises(ImproperlyConfigured, arguments.Flag, 'myflag')
def test_case_sensitive_flag_typo(self):
# test case sensitive flag
options = core.Options(
arguments.Flag('myflag', true_values=['on'], default=False,
case_sensitive=True)
)
dummy_tokens = DummyTokens('On')
dummy_context = {}
kwargs, blocks = options.parse(dummy_parser, dummy_tokens)
self.assertEqual(blocks, {})
self.assertEqual(kwargs['myflag'].resolve(dummy_context), False)
def test_case_sensitive_flag_okay(self):
options = core.Options(
arguments.Flag(
'myflag',
true_values=['on'],
default=False,
case_sensitive=True
)
)
dummy_tokens = DummyTokens('on')
dummy_context = {}
kwargs, blocks = options.parse(dummy_parser, dummy_tokens)
self.assertEqual(blocks, {})
self.assertEqual(kwargs['myflag'].resolve(dummy_context), True)
def test_multiflag(self):
# test multi-flag
options = core.Options(
arguments.Flag('flagone', true_values=['on'], default=False),
arguments.Flag('flagtwo', false_values=['off'], default=True),
)
dummy_tokens = DummyTokens('On', 'On')
dummy_context = {}
kwargs, blocks = options.parse(dummy_parser, dummy_tokens)
self.assertEqual(blocks, {})
self.assertEqual(kwargs['flagone'].resolve(dummy_context), True)
self.assertEqual(kwargs['flagtwo'].resolve(dummy_context), True)
def test_multi_value_single_value(self):
"""
Test simple multi value arguments
"""
options = core.Options(
arguments.MultiValueArgument('myarg')
)
# test single token MVA
dummy_tokens = DummyTokens('myval')
kwargs, blocks = options.parse(dummy_parser, dummy_tokens)
self.assertEqual(blocks, {})
self.assertEqual(len(kwargs), 1)
dummy_context = {}
# test resolving to list
self.assertEqual(kwargs['myarg'].resolve(dummy_context), ['myval'])
def test_multi_value_two_values(self):
options = core.Options(
arguments.MultiValueArgument('myarg')
)
# test double token MVA
dummy_tokens = DummyTokens('myval', 'myval2')
kwargs, blocks = options.parse(dummy_parser, dummy_tokens)
self.assertEqual(blocks, {})
self.assertEqual(len(kwargs), 1)
dummy_context = {}
self.assertEqual(kwargs['myarg'].resolve(dummy_context),
['myval', 'myval2'])
def test_multi_value_three_values(self):
options = core.Options(
arguments.MultiValueArgument('myarg')
)
# test triple token MVA
dummy_tokens = DummyTokens('myval', 'myval2', 'myval3')
kwargs, blocks = options.parse(dummy_parser, dummy_tokens)
self.assertEqual(blocks, {})
self.assertEqual(len(kwargs), 1)
dummy_context = {}
self.assertEqual(kwargs['myarg'].resolve(dummy_context),
['myval', 'myval2', 'myval3'])
def test_multi_value_max_values_single(self):
# test max_values option
options = core.Options(
arguments.MultiValueArgument('myarg', max_values=2)
)
dummy_tokens = DummyTokens('myval')
kwargs, blocks = options.parse(dummy_parser, dummy_tokens)
self.assertEqual(blocks, {})
self.assertEqual(len(kwargs), 1)
dummy_context = {}
self.assertEqual(kwargs['myarg'].resolve(dummy_context), ['myval'])
def test_multi_value_max_values_double(self):
options = core.Options(
arguments.MultiValueArgument('myarg', max_values=2)
)
dummy_tokens = DummyTokens('myval', 'myval2')
kwargs, blocks = options.parse(dummy_parser, dummy_tokens)
self.assertEqual(blocks, {})
self.assertEqual(len(kwargs), 1)
dummy_context = {}
self.assertEqual(kwargs['myarg'].resolve(dummy_context),
['myval', 'myval2'])
def test_multi_value_max_values_too_many(self):
options = core.Options(
arguments.MultiValueArgument('myarg', max_values=2)
)
dummy_tokens = DummyTokens('myval', 'myval2', 'myval3')
self.assertRaises(exceptions.TooManyArguments,
options.parse, dummy_parser, dummy_tokens)
def test_multi_value_no_resolve(self):
# test no resolve
options = core.Options(
arguments.MultiValueArgument('myarg', resolve=False)
)
argparser = parser.Parser(options)
dummy_tokens = DummyTokens('myval', "'myval2'")
kwargs, blocks = argparser.parse(dummy_parser, dummy_tokens)
self.assertEqual(blocks, {})
dummy_context = {}
self.assertEqual(kwargs['myarg'].resolve(dummy_context),
['myval', 'myval2'])
def test_multi_value_defaults(self):
# test default
options = core.Options(
arguments.MultiValueArgument('myarg', default=['hello', 'world']),
)
argparser = parser.Parser(options)
dummy_tokens = DummyTokens()
kwargs, blocks = argparser.parse(dummy_parser, dummy_tokens)
self.assertEqual(blocks, {})
dummy_context = {}
self.assertEqual(kwargs['myarg'].resolve(dummy_context),
['hello', 'world'])
def test_complex_all_arguments(self):
"""
test a complex tag option parser
"""
options = core.Options(
arguments.Argument('singlearg'),
arguments.MultiValueArgument('multiarg', required=False),
'as',
arguments.Argument('varname', required=False),
'safe',
arguments.Flag('safe', true_values=['on', 'true'], default=False)
)
# test simple 'all arguments given'
dummy_tokens = DummyTokens(1, 2, 3, 'as', 4, 'safe', 'true')
dummy_context = {}
kwargs, blocks = options.parse(dummy_parser, dummy_tokens)
self.assertEqual(blocks, {})
self.assertEqual(len(kwargs), 4)
expected = [
('singlearg', 1),
('multiarg', [2, 3]),
('varname', 4),
('safe', True)
]
for key, value in expected:
self.assertEqual(kwargs[key].resolve(dummy_context), value)
def test_complex_only_first_argument(self):
options = core.Options(
arguments.Argument('singlearg'),
arguments.MultiValueArgument('multiarg', required=False),
'as',
arguments.Argument('varname', required=False),
'safe',
arguments.Flag('safe', true_values=['on', 'true'], default=False)
)
# test 'only first argument given'
dummy_tokens = DummyTokens(1)
dummy_context = {}
kwargs, blocks = options.parse(dummy_parser, dummy_tokens)
self.assertEqual(blocks, {})
self.assertEqual(len(kwargs), 4)
expected = [
('singlearg', 1),
('multiarg', []),
('varname', None),
('safe', False)
]
for key, value in expected:
self.assertEqual(kwargs[key].resolve(dummy_context), value)
def test_complext_first_and_last_argument(self):
options = core.Options(
arguments.Argument('singlearg'),
arguments.MultiValueArgument('multiarg', required=False),
'as',
arguments.Argument('varname', required=False),
'safe',
arguments.Flag('safe', true_values=['on', 'true'], default=False)
)
# test first argument and last argument given
dummy_tokens = DummyTokens(2, 'safe', 'false')
dummy_context = {}
kwargs, blocks = options.parse(dummy_parser, dummy_tokens)
self.assertEqual(blocks, {})
self.assertEqual(len(kwargs), 4)
expected = [
('singlearg', 2),
('multiarg', []),
('varname', None),
('safe', False)
]
for key, value in expected:
self.assertEqual(kwargs[key].resolve(dummy_context), value)
def test_cycle(self):
"""
This test re-implements django's cycle tag (because it's quite crazy)
and checks if it works.
"""
from itertools import cycle as itertools_cycle
class Cycle(core.Tag):
name = 'classy_cycle'
options = core.Options(
arguments.MultiValueArgument('values'),
'as',
arguments.Argument('varname', required=False, resolve=False),
)
def render_tag(self, context, values, varname):
if self not in context.render_context:
context.render_context[self] = itertools_cycle(values)
cycle_iter = context.render_context[self]
value = compat_next(cycle_iter)
if varname:
context[varname] = value
return value
origtpl = template.Template(
'{% for thing in sequence %}'
'{% cycle "1" "2" "3" "4" %}'
'{% endfor %}'
)
sequence = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10]
context = template.Context({'sequence': sequence})
original = origtpl.render(context)
with TemplateTags(Cycle):
classytpl = template.Template(
'{% for thing in sequence %}'
'{% classy_cycle "1" "2" "3" "4" %}'
'{% endfor %}'
)
classy = classytpl.render(context)
self.assertEqual(original, classy)
origtpl = template.Template(
'{% for thing in sequence %}'
'{% cycle "1" "2" "3" "4" as myvarname %}'
'{% endfor %}'
)
sequence = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10]
context = template.Context({'sequence': sequence})
original = origtpl.render(context)
with TemplateTags(Cycle):
classytpl = template.Template(
'{% for thing in sequence %}'
'{% classy_cycle "1" "2" "3" "4" as myvarname %}'
'{% endfor %}'
)
classy = classytpl.render(context)
self.assertEqual(original, classy)
def test_naming(self):
# test implicit naming
class MyTag(core.Tag):
pass
lib = template.Library()
lib.tag(MyTag)
msg = "'my_tag' not in %s" % lib.tags.keys()
self.assertTrue('my_tag' in lib.tags, msg)
# test explicit naming
class MyTag2(core.Tag):
name = 'my_tag_2'
lib = template.Library()
lib.tag(MyTag2)
msg = "'my_tag_2' not in %s" % lib.tags.keys()
self.assertTrue('my_tag_2' in lib.tags, msg)
# test named registering
lib = template.Library()
lib.tag('my_tag_3', MyTag)
msg = "'my_tag_3' not in %s" % lib.tags.keys()
self.assertTrue('my_tag_3' in lib.tags, msg)
msg = "'my_tag' in %s" % lib.tags.keys()
self.assertTrue('my_tag' not in lib.tags, msg)
lib = template.Library()
lib.tag('my_tag_4', MyTag2)
msg = "'my_tag_4' not in %s" % lib.tags.keys()
self.assertTrue('my_tag_4' in lib.tags, msg)
msg = "'my_tag2' in %s" % lib.tags.keys()
self.assertTrue('my_tag2' not in lib.tags, msg)
def test_hello_world(self):
class Hello(core.Tag):
options = core.Options(
arguments.Argument('name', required=False, default='world'),
'as',
arguments.Argument('varname', required=False, resolve=False)
)
def render_tag(self, context, name, varname):
output = 'hello %s' % name
if varname:
context[varname] = output
return ''
return output
tpls = [
('{% hello %}', 'hello world', {}),
('{% hello "classytags" %}', 'hello classytags', {}),
('{% hello as myvar %}', '', {'myvar': 'hello world'}),
('{% hello "my friend" as othervar %}', '',
{'othervar': 'hello my friend'})
]
self._tag_tester(Hello, tpls)
def test_blocks(self):
class Blocky(core.Tag):
options = core.Options(
blocks=['a', 'b', 'c', 'd', 'e'],
)
def render_tag(self, context, **nodelists):
tpl = "%(a)s;%(b)s;%(c)s;%(d)s;%(e)s"
data = {}
for key, value in nodelists.items():
data[key] = value.render(context)
return tpl % data
templates = [
('{% blocky %}1{% a %}2{% b %}3{% c %}4{% d %}5{% e %}',
'1;2;3;4;5', {},),
('{% blocky %}12{% b %}3{% c %}4{% d %}5{% e %}', '12;;3;4;5',
{},),
('{% blocky %}123{% c %}4{% d %}5{% e %}', '123;;;4;5', {},),
('{% blocky %}1234{% d %}5{% e %}', '1234;;;;5', {},),
('{% blocky %}12345{% e %}', '12345;;;;', {},),
('{% blocky %}1{% a %}23{% c %}4{% d %}5{% e %}', '1;23;;4;5',
{},),
('{% blocky %}1{% a %}23{% c %}45{% e %}', '1;23;;45;', {},),
]
self._tag_tester(Blocky, templates)
def test_astag(self):
class Dummy(helpers.AsTag):
options = core.Options(
'as',
arguments.Argument('varname', resolve=False, required=False),
)
def get_value(self, context):
return "dummy"
templates = [
('{% dummy %}:{{ varname }}', 'dummy:', {},),
('{% dummy as varname %}:{{ varname }}', ':dummy', {},),
]
self._tag_tester(Dummy, templates)
def test_inclusion_tag(self):
class Inc(helpers.InclusionTag):
template = 'test.html'
options = core.Options(
arguments.Argument('var'),
)
def get_context(self, context, var):
return {'var': var}
templates = [
('{% inc var %}', 'inc', {'var': 'inc'},),
]
self._tag_tester(Inc, templates)
class Inc2(helpers.InclusionTag):
template = 'test.html'
templates = [
('{% inc2 %}', '', {},),
]
self._tag_tester(Inc2, templates)
def test_inclusion_tag_push_pop_context(self):
class IncPollute(helpers.InclusionTag):
template = 'test.html'
options = core.Options(
arguments.Argument('var')
)
def get_context(self, context, var):
context.update({'var': 'polluted'})
return context
with TemplateTags(IncPollute):
tpl = template.Template('{% inc_pollute var %}')
ctx = template.Context({'var': 'test'})
out = tpl.render(ctx)
self.assertEqual(out, 'polluted')
self.assertEqual(ctx['var'], 'polluted')
# now enable pollution control
IncPollute.push_context = True
with TemplateTags(IncPollute):
tpl = template.Template('{% inc_pollute var %}')
ctx = template.Context({'var': 'test'})
out = tpl.render(ctx)
self.assertEqual(out, 'polluted')
self.assertEqual(ctx['var'], 'test')
def test_integer_variable(self):
options = core.Options(
arguments.IntegerArgument('integer', resolve=False),
)
# test okay
with SettingsOverride(DEBUG=False):
dummy_tokens = DummyTokens('1')
kwargs, blocks = options.parse(dummy_parser, dummy_tokens)
dummy_context = {}
self.assertEqual(kwargs['integer'].resolve(dummy_context), 1)
# test warning
dummy_tokens = DummyTokens('one')
kwargs, blocks = options.parse(dummy_parser, dummy_tokens)
dummy_context = {}
one = repr('one')
message = arguments.IntegerValue.errors['clean'] % {'value': one}
self.assertWarns(exceptions.TemplateSyntaxWarning,
message, kwargs['integer'].resolve, dummy_context)
self.assertEqual(kwargs['integer'].resolve(dummy_context),
values.IntegerValue.value_on_error)
# test exception
with SettingsOverride(DEBUG=True):
dummy_tokens = DummyTokens('one')
kwargs, blocks = options.parse(dummy_parser, dummy_tokens)
dummy_context = {}
message = values.IntegerValue.errors['clean'] % {
'value': repr('one')
}
self.assertRaises(template.TemplateSyntaxError,
kwargs['integer'].resolve, dummy_context)
# test the same as above but with resolving
class IntegerTag(core.Tag):
options = core.Options(
arguments.IntegerArgument('integer')
)
def render_tag(self, context, integer):
return integer
with TemplateTags(IntegerTag):
tpl = template.Template("{% integer_tag i %}")
with SettingsOverride(DEBUG=False):
# test okay
context = template.Context({'i': '1'})
self.assertEqual(tpl.render(context), '1')
# test warning
context = template.Context({'i': 'one'})
message = values.IntegerValue.errors['clean'] % {
'value': repr('one')
}
self.assertWarns(exceptions.TemplateSyntaxWarning,
message, tpl.render, context)
self.assertEqual(int(tpl.render(context)),
values.IntegerValue.value_on_error)
# test exception
with SettingsOverride(DEBUG=True):
context = template.Context({'i': 'one'})
message = arguments.IntegerValue.errors['clean'] % {'value': one}
self.assertRaises(template.TemplateSyntaxError, tpl.render,
context)
# reset settings
def test_not_implemented_errors(self):
class Fail(core.Tag):
pass
class Fail2(helpers.AsTag):
pass
class Fail3(helpers.AsTag):
options = core.Options(
'as',
)
class Fail4(helpers.AsTag):
options = core.Options(
'as',
arguments.Argument('varname', resolve=False),
)
if DJANGO_1_4_OR_HIGHER:
exc_class = NotImplementedError
else: # pragma: no cover
exc_class = template.TemplateSyntaxError
with TemplateTags(Fail, Fail2, Fail3, Fail4):
context = template.Context({})
tpl = template.Template("{% fail %}")
self.assertRaises(exc_class, tpl.render, context)
self.assertRaises(ImproperlyConfigured,
template.Template, "{% fail2 %}")
self.assertRaises(ImproperlyConfigured,
template.Template, "{% fail3 %}")
tpl = template.Template("{% fail4 as something %}")
self.assertRaises(exc_class, tpl.render, context)
def test_too_many_arguments(self):
class NoArg(core.Tag):
pass
with TemplateTags(NoArg):
self.assertRaises(exceptions.TooManyArguments,
template.Template, "{% no_arg a arg %}")
def test_choice_argument(self):
options = core.Options(
arguments.ChoiceArgument('choice',
choices=['one', 'two', 'three']),
)
# this is settings dependant!
with SettingsOverride(DEBUG=True):
for good in ('one', 'two', 'three'):
dummy_tokens = DummyTokens(good)
kwargs, blocks = options.parse(dummy_parser, dummy_tokens)
dummy_context = {}
self.assertEqual(kwargs['choice'].resolve(dummy_context), good)
bad = 'four'
dummy_tokens = DummyTokens(bad)
kwargs, blocks = options.parse(dummy_parser, dummy_tokens)
dummy_context = {}
self.assertRaises(template.TemplateSyntaxError,
kwargs['choice'].resolve, dummy_context)
with SettingsOverride(DEBUG=False):
self.assertEqual(kwargs['choice'].resolve(dummy_context), 'one')
# test other value class
class IntegerChoiceArgument(arguments.ChoiceArgument):
value_class = values.IntegerValue
default = 2
options = core.Options(
IntegerChoiceArgument('choice', choices=[1, 2, 3],
default=default),
)
with SettingsOverride(DEBUG=True):
for good in ('1', '2', '3'):
dummy_tokens = DummyTokens(good)
kwargs, blocks = options.parse(dummy_parser, dummy_tokens)
dummy_context = {}
self.assertEqual(kwargs['choice'].resolve(dummy_context),
int(good))
bad = '4'
dummy_tokens = DummyTokens(bad)
kwargs, blocks = options.parse(dummy_parser, dummy_tokens)
dummy_context = {}
self.assertRaises(template.TemplateSyntaxError,
kwargs['choice'].resolve, dummy_context)
with SettingsOverride(DEBUG=False):
self.assertEqual(kwargs['choice'].resolve(dummy_context), default)
# reset settings
def test_keyword_argument(self):
class KeywordArgumentTag(core.Tag):
name = 'kwarg_tag'
options = core.Options(
arguments.KeywordArgument('named', defaultkey='defaultkey'),
)
def render_tag(self, context, named):
return '%s:%s' % (
list(named.keys())[0], list(named.values())[0]
)
ctx = {'key': 'thekey', 'value': 'thevalue'}
templates = [
("{% kwarg_tag key='value' %}", 'key:value', ctx),
("{% kwarg_tag 'value' %}", 'defaultkey:value', ctx),
("{% kwarg_tag key=value %}", 'key:thevalue', ctx),
("{% kwarg_tag value %}", 'defaultkey:thevalue', ctx),
]
self._tag_tester(KeywordArgumentTag, templates)
class KeywordArgumentTag2(KeywordArgumentTag):
name = 'kwarg_tag'
options = core.Options(
arguments.KeywordArgument(
'named',
defaultkey='defaultkey',
resolve=False,
required=False,
default='defaultvalue'
),
)
templates = [
("{% kwarg_tag %}", 'defaultkey:defaultvalue', ctx),
("{% kwarg_tag key='value' %}", 'key:value', ctx),
("{% kwarg_tag 'value' %}", 'defaultkey:value', ctx),
("{% kwarg_tag key=value %}", 'key:value', ctx),
("{% kwarg_tag value %}", 'defaultkey:value', ctx),
]
self._tag_tester(KeywordArgumentTag2, templates)
def test_multi_keyword_argument(self):
opts = core.Options(
arguments.MultiKeywordArgument('multi', max_values=2),
)
class MultiKeywordArgumentTag(core.Tag):
name = 'multi_kwarg_tag'
options = opts
def render_tag(self, context, multi):
items = sorted(multi.items())
return ','.join(['%s:%s' % item for item in items])
ctx = {'key': 'thekey', 'value': 'thevalue'}
templates = [
("{% multi_kwarg_tag key='value' key2='value2' %}",
'key:value,key2:value2', ctx),
("{% multi_kwarg_tag key=value %}", 'key:thevalue', ctx),
]
self._tag_tester(MultiKeywordArgumentTag, templates)
dummy_tokens = DummyTokens('key="value"', 'key2="value2"',
'key3="value3"')
self.assertRaises(exceptions.TooManyArguments,
opts.parse, dummy_parser, dummy_tokens)
def test_custom_parser(self):
class CustomParser(parser.Parser):
def parse_blocks(self):
return
options = core.Options(
blocks=[
('end_my_tag', 'nodelist'),
],
parser_class=CustomParser
)
dummy_tokens = DummyTokens()
kwargs, blocks = options.parse(dummy_parser, dummy_tokens)
self.assertEqual(blocks, {})
def test_repr(self):
class MyTag(core.Tag):
name = 'mytag'
tag = MyTag(dummy_parser, DummyTokens())
self.assertEqual('<Tag: mytag>', repr(tag))
def test_non_required_multikwarg(self):
options = core.Options(
arguments.MultiKeywordArgument('multi', required=False),
)
dummy_tokens = DummyTokens()
kwargs, blocks = options.parse(dummy_parser, dummy_tokens)
self.assertTrue('multi' in kwargs)
self.assertEqual(kwargs['multi'], {})
options = core.Options(
arguments.MultiKeywordArgument('multi', required=False,
default={'hello': 'world'}),
)
dummy_tokens = DummyTokens()
kwargs, blocks = options.parse(dummy_parser, dummy_tokens)
self.assertTrue('multi' in kwargs)
self.assertEqual(kwargs['multi'].resolve({}), {'hello': 'world'})
def test_resolve_kwarg(self):
class ResolveKwarg(core.Tag):
name = 'kwarg'
options = core.Options(
arguments.KeywordArgument('named'),
)
def render_tag(self, context, named):
return '%s:%s' % (
list(named.keys())[0], list(named.values())[0]
)
class NoResolveKwarg(core.Tag):
name = 'kwarg'
options = core.Options(
arguments.KeywordArgument('named', resolve=False),
)
def render_tag(self, context, named):
return '%s:%s' % (
list(named.keys())[0], list(named.values())[0]
)
resolve_templates = [
("{% kwarg key=value %}", "key:test", {'value': 'test'}),
("{% kwarg key='value' %}", "key:value", {'value': 'test'}),
]
noresolve_templates = [
("{% kwarg key=value %}", "key:value", {'value': 'test'}),
]
self._tag_tester(ResolveKwarg, resolve_templates)
self._tag_tester(NoResolveKwarg, noresolve_templates)
def test_kwarg_default(self):
options = core.Options(
arguments.KeywordArgument('kwarg', required=False,
defaultkey='mykey'),
)
dummy_tokens = DummyTokens()
kwargs, blocks = options.parse(dummy_parser, dummy_tokens)
self.assertTrue('kwarg' in kwargs)
self.assertEqual(kwargs['kwarg'].resolve({}), {'mykey': None})
options = core.Options(
arguments.KeywordArgument('kwarg', required=False,
default='hello'),
)
dummy_tokens = DummyTokens()
kwargs, blocks = options.parse(dummy_parser, dummy_tokens)
self.assertTrue('kwarg' in kwargs)
self.assertEqual(kwargs['kwarg'].resolve({}), {})
options = core.Options(
arguments.KeywordArgument('kwarg', required=False,
default='hello', defaultkey='key'),
)
dummy_tokens = DummyTokens()
kwargs, blocks = options.parse(dummy_parser, dummy_tokens)
self.assertTrue('kwarg' in kwargs)
self.assertEqual(kwargs['kwarg'].resolve({}), {'key': 'hello'})
def test_multikwarg_no_key(self):
options = core.Options(
arguments.MultiKeywordArgument('multi'),
)
with SettingsOverride(DEBUG=True):
dummy_tokens = DummyTokens('value')
self.assertRaises(template.TemplateSyntaxError,
options.parse, dummy_parser, dummy_tokens)
with SettingsOverride(DEBUG=False):
dummy_tokens = DummyTokens('value')
self.assertRaises(template.TemplateSyntaxError,
options.parse, dummy_parser, dummy_tokens)
def test_inclusion_tag_context_pollution(self):
"""
Check the `keep_render_context` and `push_pop_context` attributes on
InclusionTag work as advertised and prevent 'context pollution'
"""
class NoPushPop(helpers.InclusionTag):
template = 'inclusion.html'
def get_context(self, context):
return context.update({'pollution': True})
class Standard(helpers.InclusionTag):
template = 'inclusion.html'
def get_context(self, context):
return {'pollution': True}
with TemplateTags(NoPushPop, Standard):
# push pop pollution
ctx1 = template.Context({'pollution': False})
tpl1 = template.Template("{% no_push_pop %}")
tpl1.render(ctx1)
self.assertEqual(ctx1['pollution'], True)
ctx2 = template.Context({'pollution': False})
tpl2 = template.Template("{% standard %}")
tpl2.render(ctx2)
self.assertEqual(ctx2['pollution'], False)
def test_named_block(self):
class StartBlock(core.Tag):
options = core.Options(
arguments.Argument("myarg"),
blocks=[
BlockDefinition("nodelist",
VariableBlockName("end_block %(value)s",
'myarg'),
"end_block")
]
)
def render_tag(self, context, myarg, nodelist):
return "nodelist:%s;myarg:%s" % (nodelist.render(context),
myarg)
with TemplateTags(StartBlock):
ctx = template.Context()
tpl = template.Template(
"{% start_block 'hello' %}nodelist-content"
"{% end_block 'hello' %}"
)
output = tpl.render(ctx)
expected_output = 'nodelist:nodelist-content;myarg:hello'
self.assertEqual(output, expected_output)
ctx = template.Context({'hello': 'world'})
tpl = template.Template(
"{% start_block hello %}nodelist-content{% end_block hello %}"
)
output = tpl.render(ctx)
expected_output = 'nodelist:nodelist-content;myarg:world'
self.assertEqual(output, expected_output)
def test_fail_named_block(self):
vbn = VariableBlockName('endblock %(value)s', 'myarg')
self.assertRaises(ImproperlyConfigured, core.Options,
blocks=[BlockDefinition('nodelist', vbn)])
def test_named_block_noresolve(self):
class StartBlock(core.Tag):
options = core.Options(
arguments.Argument("myarg", resolve=False),
blocks=[
BlockDefinition("nodelist",
VariableBlockName("end_block %(value)s",
'myarg'),
"end_block")
]
)
def render_tag(self, context, myarg, nodelist):
return "nodelist:%s;myarg:%s" % (nodelist.render(context),
myarg)
with TemplateTags(StartBlock):
ctx = template.Context()
tpl = template.Template(
"{% start_block 'hello' %}nodelist-content"
"{% end_block 'hello' %}"
)
output = tpl.render(ctx)
expected_output = 'nodelist:nodelist-content;myarg:hello'
self.assertEqual(output, expected_output)
def test_strict_string(self):
options = core.Options(
arguments.StringArgument('string', resolve=False),
)
with SettingsOverride(DEBUG=False):
#test ok
dummy_tokens = DummyTokens('string')
kwargs, blocks = options.parse(dummy_parser, dummy_tokens)
dummy_context = {}
self.assertEqual(
kwargs['string'].resolve(dummy_context), 'string'
)
#test warning
dummy_tokens = DummyTokens(1)
kwargs, blocks = options.parse(dummy_parser, dummy_tokens)
dummy_context = {}
message = values.StrictStringValue.errors['clean'] % {
'value': repr(1)
}
self.assertWarns(
exceptions.TemplateSyntaxWarning,
message,
kwargs['string'].resolve,
dummy_context
)
with SettingsOverride(DEBUG=True):
# test exception
dummy_tokens = DummyTokens(1)
kwargs, blocks = options.parse(dummy_parser, dummy_tokens)
dummy_context = {}
self.assertRaises(
template.TemplateSyntaxError,
kwargs['string'].resolve,
dummy_context
)
def test_get_value_for_context(self):
message = 'exception handled'
class MyException(Exception):
pass
class SuppressException(helpers.AsTag):
options = core.Options(
arguments.Argument('name'),
'as',
arguments.Argument('var', resolve=False, required=False),
)
def get_value(self, context, name):
raise MyException(name)
def get_value_for_context(self, context, name):
try:
return self.get_value(context, name)
except MyException:
return message
dummy_tokens_with_as = DummyTokens('name', 'as', 'var')
tag = SuppressException(DummyParser(), dummy_tokens_with_as)
context = {}
self.assertEqual(tag.render(context), '')
self.assertEqual(context['var'], message)
dummy_tokens_no_as = DummyTokens('name')
tag = SuppressException(DummyParser(), dummy_tokens_no_as)
self.assertRaises(MyException, tag.render, {})
class MultiBreakpointTests(TestCase):
def test_optional_firstonly(self):
options = core.Options(
arguments.Argument('first'),
'also',
'using',
arguments.Argument('second', required=False),
)
# check only using the first argument
dummy_tokens = DummyTokens('firstval')
kwargs, blocks = options.parse(dummy_parser, dummy_tokens)
self.assertEqual(blocks, {})
self.assertEqual(len(kwargs), 2)
dummy_context = {}
self.assertEqual(kwargs['first'].resolve(dummy_context), 'firstval')
self.assertEqual(kwargs['second'].resolve(dummy_context), None)
def test_optional_both(self):
options = core.Options(
arguments.Argument('first'),
'also',
'using',
arguments.Argument('second', required=False),
)
# check using both arguments and both breakpoints
dummy_tokens = DummyTokens('firstval', 'also', 'using', 'secondval')
kwargs, blocks = options.parse(dummy_parser, dummy_tokens)
self.assertEqual(blocks, {})
self.assertEqual(len(kwargs), 2)
dummy_context = {}
self.assertEqual(kwargs['first'].resolve(dummy_context), 'firstval')
self.assertEqual(kwargs['second'].resolve(dummy_context), 'secondval')
def test_partial_breakpoints(self):
options = core.Options(
arguments.Argument('first'),
'also',
'using',
arguments.Argument('second', required=False),
)
# check only using the first breakpoint
dummy_tokens = DummyTokens('firstval', 'also')
self.assertRaises(
exceptions.TrailingBreakpoint,
options.parse, dummy_parser, dummy_tokens
)
def test_partial_breakpoints_second(self):
options = core.Options(
arguments.Argument('first'),
'also',
'using',
arguments.Argument('second', required=False),
)
# check only using the second breakpoint
dummy_tokens = DummyTokens('firstval', 'using')
self.assertRaises(
exceptions.BreakpointExpected,
options.parse, dummy_parser, dummy_tokens
)
def test_partial_breakpoints_both(self):
options = core.Options(
arguments.Argument('first'),
'also',
'using',
arguments.Argument('second', required=False),
)
# check only using the first breakpoint
dummy_tokens = DummyTokens('firstval', 'also', 'secondval')
# should raise an exception
self.assertRaises(
exceptions.BreakpointExpected,
options.parse, dummy_parser, dummy_tokens
)
def test_partial_breakpoints_second_both(self):
options = core.Options(
arguments.Argument('first'),
'also',
'using',
arguments.Argument('second', required=False),
)
# check only using the second breakpoint
dummy_tokens = DummyTokens('firstval', 'using', 'secondval')
self.assertRaises(
exceptions.BreakpointExpected,
options.parse, dummy_parser, dummy_tokens
)
def test_partial_breakpoints_both_trailing(self):
options = core.Options(
arguments.Argument('first'),
'also',
'using',
arguments.Argument('second', required=False),
)
dummy_tokens = DummyTokens('firstval', 'also', 'using')
self.assertRaises(
exceptions.TrailingBreakpoint,
options.parse, dummy_parser, dummy_tokens
)
| {
"content_hash": "4d8e9008fa5a95c708330a51e704af17",
"timestamp": "",
"source": "github",
"line_count": 1301,
"max_line_length": 79,
"avg_line_length": 37.86471944657956,
"alnum_prop": 0.5412894320165645,
"repo_name": "philippeowagner/django-classy-tags",
"id": "117a9d24acb6903f8c99a1b20904ee3f56f23b00",
"size": "49262",
"binary": false,
"copies": "9",
"ref": "refs/heads/master",
"path": "classytags/tests.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "HTML",
"bytes": "3582"
},
{
"name": "Python",
"bytes": "88593"
}
],
"symlink_target": ""
} |
import numpy as np
import pandas as pd
import time
# ------------------------------------------------------------------------------
print(f"Starting ... {int(time.time())}");
length: int = 1100000000
df = pd.DataFrame({"INDEX": np.arange(0, length),
"normal": np.random.normal(size=length),
"log_normal": np.random.lognormal(size=length),
"exponential": np.random.exponential(size=length),
})
df.set_index("INDEX")
print(f"All memory allocations are done. Calculating means ... {int(time.time())}")
m1: pd.Series = df["normal"].ewm(span=3).mean()
m2: pd.Series = df["log_normal"].ewm(span=3).mean()
m3: pd.Series = df["exponential"].ewm(span=3).mean()
print(f"{m1[100000]}, {m2[100000]}, {m3[100000]}")
print(f"{int(time.time())} ... Done");
# ------------------------------------------------------------------------------
# Local Variables:
# mode:Python
# tab-width:4
# c-basic-offset:4
# End:
| {
"content_hash": "9a601b639b644741e7de5fd2abe1d26f",
"timestamp": "",
"source": "github",
"line_count": 33,
"max_line_length": 83,
"avg_line_length": 29.727272727272727,
"alnum_prop": 0.5066258919469928,
"repo_name": "hosseinmoein/DataFrame",
"id": "2ed81d85a242540a1f47ef41814b5c07d314f2ee",
"size": "981",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "benchmarks/pandas_performance_2.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "C",
"bytes": "1867"
},
{
"name": "C++",
"bytes": "1663423"
},
{
"name": "CMake",
"bytes": "9629"
},
{
"name": "Makefile",
"bytes": "9591"
},
{
"name": "Python",
"bytes": "1954"
},
{
"name": "Shell",
"bytes": "279"
}
],
"symlink_target": ""
} |
""" Salesforce API message templates """
DEPLOY_MSG = \
"""<soapenv:Envelope
xmlns:soapenv="http://schemas.xmlsoap.org/soap/envelope/"
xmlns:met="http://soap.sforce.com/2006/04/metadata">
<soapenv:Header>
<met:CallOptions>
<met:client>{client}</met:client>
</met:CallOptions>
<met:SessionHeader>
<met:sessionId>{sessionId}</met:sessionId>
</met:SessionHeader>
</soapenv:Header>
<soapenv:Body>
<met:deploy>
<met:ZipFile>{ZipFile}</met:ZipFile>
<met:DeployOptions>
<met:allowMissingFiles>false</met:allowMissingFiles>
<met:autoUpdatePackage>false</met:autoUpdatePackage>
{checkOnly}
<met:ignoreWarnings>false</met:ignoreWarnings>
<met:performRetrieve>false</met:performRetrieve>
<met:purgeOnDelete>false</met:purgeOnDelete>
<met:rollbackOnError>true</met:rollbackOnError>
<met:singlePackage>true</met:singlePackage>
{testLevel}
{tests}
</met:DeployOptions>
</met:deploy>
</soapenv:Body>
</soapenv:Envelope>"""
CHECK_DEPLOY_STATUS_MSG = \
"""<soapenv:Envelope
xmlns:soapenv="http://schemas.xmlsoap.org/soap/envelope/"
xmlns:met="http://soap.sforce.com/2006/04/metadata">
<soapenv:Header>
<met:CallOptions>
<met:client>{client}</met:client>
</met:CallOptions>
<met:SessionHeader>
<met:sessionId>{sessionId}</met:sessionId>
</met:SessionHeader>
</soapenv:Header>
<soapenv:Body>
<met:checkDeployStatus>
<met:asyncProcessId>{asyncProcessId}</met:asyncProcessId>
<met:includeDetails>{includeDetails}</met:includeDetails>
</met:checkDeployStatus>
</soapenv:Body>
</soapenv:Envelope>"""
RETRIEVE_MSG = \
"""<soapenv:Envelope
xmlns:soapenv="http://schemas.xmlsoap.org/soap/envelope/"
xmlns:met="http://soap.sforce.com/2006/04/metadata">
<soapenv:Header>
<met:CallOptions>
<met:client>{client}</met:client>
</met:CallOptions>
<met:SessionHeader>
<met:sessionId>{sessionId}</met:sessionId>
</met:SessionHeader>
</soapenv:Header>
<soapenv:Body>
<met:retrieve>
<met:retrieveRequest>
<met:apiVersion>{apiVersion}</met:apiVersion>
<met:singlePackage>{singlePackage}</met:singlePackage>
<met:unpackaged>{unpackaged}</met:unpackaged>
</met:retrieveRequest>
</met:retrieve>
</soapenv:Body>
</soapenv:Envelope>"""
CHECK_RETRIEVE_STATUS_MSG = \
"""<soapenv:Envelope
xmlns:soapenv="http://schemas.xmlsoap.org/soap/envelope/"
xmlns:met="http://soap.sforce.com/2006/04/metadata">
<soapenv:Header>
<met:CallOptions>
<met:client>{client}</met:client>
</met:CallOptions>
<met:SessionHeader>
<met:sessionId>{sessionId}</met:sessionId>
</met:SessionHeader>
</soapenv:Header>
<soapenv:Body>
<met:checkRetrieveStatus>
<met:asyncProcessId>{asyncProcessId}</met:asyncProcessId>
<met:includeZip>{includeZip}</met:includeZip>
</met:checkRetrieveStatus>
</soapenv:Body>
</soapenv:Envelope>"""
| {
"content_hash": "be9b1dc1621d8fb68b3bf780b31272f1",
"timestamp": "",
"source": "github",
"line_count": 94,
"max_line_length": 66,
"avg_line_length": 33.787234042553195,
"alnum_prop": 0.6388539042821159,
"repo_name": "rbauction/sfdclib",
"id": "73284d48d1c8b5fce7315230237ff5b302ab98d4",
"size": "3176",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "sfdclib/messages.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "38937"
}
],
"symlink_target": ""
} |
from django.contrib import admin
from .models import MyModel
from admin_wmdeditor import WmdEditorModelAdmin
class MyModelAdmin(WmdEditorModelAdmin):
wmdeditor_fields = ('text1','text2')
admin.site.register(MyModel, MyModelAdmin)
| {
"content_hash": "68d5c379b31bbd4b09c66c5e8e405f10",
"timestamp": "",
"source": "github",
"line_count": 9,
"max_line_length": 47,
"avg_line_length": 26.333333333333332,
"alnum_prop": 0.8059071729957806,
"repo_name": "paltman-archive/django-admin-wmdeditor",
"id": "6b1dbf90110e6e4a9d5a7e915c3c76cd898d30cd",
"size": "237",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "demo_project/exampleapp/admin.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "JavaScript",
"bytes": "53375"
},
{
"name": "Python",
"bytes": "6370"
}
],
"symlink_target": ""
} |
import numpy as np
import pandas as pd
from os.path import join, dirname
from scipy.io import savemat
def add_median_ratio_cytoplasm_nuclei(df):
'''Add median_ratio in DataFrame if it has nuclei and cytoplasm objects.
nuc/cyto is stored in nuclei, cyto/nuc is stored in cytoplasm
'''
object_names = np.unique(df.index.get_level_values('object'))
if 'cytoplasm' in object_names and 'nuclei' in object_names:
nuc = df.xs(['nuclei', 'median_intensity'], level=['object', 'prop'])
cyto = df.xs(['cytoplasm', 'median_intensity'], level=['object', 'prop'])
median_ratio_cyto = organize_index(cyto/nuc, 'cytoplasm')
median_ratio_nuc = organize_index(nuc/cyto, 'nuclei')
df = pd.concat([df, median_ratio_nuc, median_ratio_cyto])
return df
def add_median_ratio_cytoplasm_nuclei_old(arr, labels):
'''Add median_ratio in DataFrame if it has nuclei and cytoplasm objects.
nuc/cyto is stored in nuclei, cyto/nuc is stored in cytoplasm
'''
cytomedian = [a for num, a in enumerate([i for i in labels if len(i) == 3])
if 'cytoplasm' in a[0] and 'median_intensity' in a[2]]
nucmedian = [a for num, a in enumerate([i for i in labels if len(i) == 3])
if 'nuclei' in a[0] and 'median_intensity' in a[2]]
fields = [(i, ii) for i, ii in zip(cytomedian, nucmedian) if i[1] == ii[1]]
for ci, ni in fields:
template = np.zeros((2, arr.shape[1], arr.shape[2]), np.float32)
template[0, :, :] = arr[labels.index(ci), :, :]/arr[labels.index(ni), :, :]
template[1, :, :] = arr[labels.index(ni), :, :]/arr[labels.index(ci), :, :]
arr = np.concatenate((template, arr), axis=0)
new_labels = [('cytoplasm', ci[1], 'median_ratio'), ('nuclei', ci[1], 'median_ratio')]
labels = new_labels + labels
return arr, labels
def organize_index(median_ratio, object_name):
median_ratio['object'] = object_name
median_ratio['prop'] = 'median_ratio'
median_ratio.set_index('object', append=True, inplace=True)
median_ratio.set_index('prop', append=True, inplace=True)
median_ratio = median_ratio.reorder_levels(['object', 'ch', 'prop', 'frame'])
return median_ratio
def df_to_mat(dfpath):
'''
Save mat file for GUI. This will be updated and removed in the future.
imgpahts is np.array with rows corresponding to channels and columns corresponding to frames. array of basenames.
'''
df = pd.read_csv(dfpath, index_col=['object', 'ch', 'prop', 'frame'])
data = {}
data['subfolderMetadata'] = {}
data['subfolderMetadata']['subfolderName'] = 'PosExample'
data['inputStruct'] = {}
data['inputStruct']['cpDataFilename'] = 'cpData.mat'
object_names = list(set(df.index.get_level_values('object')))
channels = list(set(df.index.get_level_values('ch')))
props = list(set(df.index.get_level_values('prop')))
for i in object_names:
data[i] = {}
data[i]['imageSetIdx'] = np.arange(len(channels))[:, np.newaxis]+1
data[i]['imageSetNames'] = np.zeros((len(channels),1), dtype='object')
for n, chi in enumerate(channels):
data[i]['imageSetNames'][n] = str(chi)
for pi in props:
# FIXME: check if matrix size is identical
if pi in ['x', 'y']:
data[i][pi] = {}
data[i][pi] = np.array(df.ix[i, channels[0], pi]).T
else:
data[i][pi] = {}
try:
data[i][pi] = np.array(np.dstack([np.array(df.ix[i, chi, pi]).T for chi in channels]))
except:
import ipdb;ipdb.set_trace()
data[i]['label'] = np.array(df.ix[object_names[0], channels[0], 'label_id'].copy()).T
data['imageSetChannels'] = np.zeros((len(channels),1), dtype='object')
for i, chi in enumerate(channels):
data['imageSetChannels'][i] = chi
data['objectSetNames'] = np.zeros((len(object_names), 1), dtype='object')
for n, objn in enumerate(object_names):
data['objectSetNames'][n] = objn
outputpath = dirname(dfpath)
try:
imgpaths = np.load(join(outputpath, 'imgpaths.npy'))
data['imageSetFilenames'] = imgpaths
except:
print "imgpaths.npy not saved in the same output directory"
data['frames'] = np.arange(data[object_names[0]][props[0]].shape[1], dtype=np.float64)
store = {}
store['data'] = data
savemat(join(outputpath, 'cpDataTracked.mat'), store)
| {
"content_hash": "86174e333e278b053087b824f89459e3",
"timestamp": "",
"source": "github",
"line_count": 98,
"max_line_length": 117,
"avg_line_length": 45.98979591836735,
"alnum_prop": 0.6081650765475927,
"repo_name": "braysia/covertrack",
"id": "b52618d19864ddf222f7c99886931f81fda126a2",
"size": "4507",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "covertrack/utils/df_handling.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Jupyter Notebook",
"bytes": "682275"
},
{
"name": "Python",
"bytes": "250180"
},
{
"name": "Shell",
"bytes": "112"
}
],
"symlink_target": ""
} |
"""Wraps capture_tpu_profile binary."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
import subprocess
import sys
import tensorflow as tf
tf.flags.DEFINE_string('service_addr', '',
'Address of TPU profiler service e.g. localhost:8466')
tf.flags.DEFINE_string('logdir', '',
'Path of TensorBoard log directory e.g. /tmp/tb_log')
tf.flags.DEFINE_integer('duration_ms', 2000, 'Duration of tracing in ms.')
FLAGS = tf.flags.FLAGS
EXECUTABLE = 'data/capture_tpu_profile'
def run_main():
tf.app.run(main)
def main(unused_argv=None):
if not FLAGS.service_addr or not FLAGS.logdir:
sys.exit('service_addr and logdir must be provided.')
executable_path = os.path.join(os.path.dirname(__file__), EXECUTABLE)
cmd = [executable_path]
cmd.append('--logdir='+FLAGS.logdir)
cmd.append('--service_addr='+FLAGS.service_addr)
cmd.append('--duration_ms='+str(FLAGS.duration_ms))
subprocess.call(cmd)
if __name__ == '__main__':
run_main()
| {
"content_hash": "916fb85df1fc14188fedb766fff04388",
"timestamp": "",
"source": "github",
"line_count": 39,
"max_line_length": 77,
"avg_line_length": 27.46153846153846,
"alnum_prop": 0.676937441643324,
"repo_name": "eadgarchen/tensorflow",
"id": "7970c20a2693cbbe91a136080240f676d29f2053",
"size": "1759",
"binary": false,
"copies": "9",
"ref": "refs/heads/master",
"path": "tensorflow/contrib/tpu/profiler/pip_package/cloud_tpu_profiler/main.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "8913"
},
{
"name": "C",
"bytes": "321171"
},
{
"name": "C++",
"bytes": "35900386"
},
{
"name": "CMake",
"bytes": "188687"
},
{
"name": "Go",
"bytes": "1057580"
},
{
"name": "Java",
"bytes": "541818"
},
{
"name": "Jupyter Notebook",
"bytes": "1940884"
},
{
"name": "LLVM",
"bytes": "6536"
},
{
"name": "Makefile",
"bytes": "44805"
},
{
"name": "Objective-C",
"bytes": "12456"
},
{
"name": "Objective-C++",
"bytes": "94716"
},
{
"name": "PHP",
"bytes": "1429"
},
{
"name": "Perl",
"bytes": "6179"
},
{
"name": "Perl 6",
"bytes": "1357"
},
{
"name": "PureBasic",
"bytes": "24932"
},
{
"name": "Python",
"bytes": "31228552"
},
{
"name": "Ruby",
"bytes": "547"
},
{
"name": "Shell",
"bytes": "403773"
}
],
"symlink_target": ""
} |
import asyncio
from unittest.mock import patch
from aiohttp import WSMsgType
from async_timeout import timeout
import pytest
from homeassistant.core import callback
from homeassistant.components import websocket_api as wapi, frontend
from tests.common import mock_http_component_app
API_PASSWORD = 'test1234'
@pytest.fixture
def websocket_client(loop, hass, test_client):
"""Websocket client fixture connected to websocket server."""
websocket_app = mock_http_component_app(hass)
wapi.WebsocketAPIView().register(websocket_app.router)
client = loop.run_until_complete(test_client(websocket_app))
ws = loop.run_until_complete(client.ws_connect(wapi.URL))
auth_ok = loop.run_until_complete(ws.receive_json())
assert auth_ok['type'] == wapi.TYPE_AUTH_OK
yield ws
if not ws.closed:
loop.run_until_complete(ws.close())
@pytest.fixture
def no_auth_websocket_client(hass, loop, test_client):
"""Websocket connection that requires authentication."""
websocket_app = mock_http_component_app(hass, API_PASSWORD)
wapi.WebsocketAPIView().register(websocket_app.router)
client = loop.run_until_complete(test_client(websocket_app))
ws = loop.run_until_complete(client.ws_connect(wapi.URL))
auth_ok = loop.run_until_complete(ws.receive_json())
assert auth_ok['type'] == wapi.TYPE_AUTH_REQUIRED
yield ws
if not ws.closed:
loop.run_until_complete(ws.close())
@asyncio.coroutine
def test_auth_via_msg(no_auth_websocket_client):
"""Test authenticating."""
no_auth_websocket_client.send_json({
'type': wapi.TYPE_AUTH,
'api_password': API_PASSWORD
})
msg = yield from no_auth_websocket_client.receive_json()
assert msg['type'] == wapi.TYPE_AUTH_OK
@asyncio.coroutine
def test_auth_via_msg_incorrect_pass(no_auth_websocket_client):
"""Test authenticating."""
no_auth_websocket_client.send_json({
'type': wapi.TYPE_AUTH,
'api_password': API_PASSWORD + 'wrong'
})
msg = yield from no_auth_websocket_client.receive_json()
assert msg['type'] == wapi.TYPE_AUTH_INVALID
assert msg['message'] == 'Invalid password'
@asyncio.coroutine
def test_pre_auth_only_auth_allowed(no_auth_websocket_client):
"""Verify that before authentication, only auth messages are allowed."""
no_auth_websocket_client.send_json({
'type': wapi.TYPE_CALL_SERVICE,
'domain': 'domain_test',
'service': 'test_service',
'service_data': {
'hello': 'world'
}
})
msg = yield from no_auth_websocket_client.receive_json()
assert msg['type'] == wapi.TYPE_AUTH_INVALID
assert msg['message'].startswith('Message incorrectly formatted')
@asyncio.coroutine
def test_invalid_message_format(websocket_client):
"""Test sending invalid JSON."""
websocket_client.send_json({'type': 5})
msg = yield from websocket_client.receive_json()
assert msg['type'] == wapi.TYPE_RESULT
error = msg['error']
assert error['code'] == wapi.ERR_INVALID_FORMAT
assert error['message'].startswith('Message incorrectly formatted')
@asyncio.coroutine
def test_invalid_json(websocket_client):
"""Test sending invalid JSON."""
websocket_client.send_str('this is not JSON')
msg = yield from websocket_client.receive()
assert msg.type == WSMsgType.close
@asyncio.coroutine
def test_quiting_hass(hass, websocket_client):
"""Test sending invalid JSON."""
with patch.object(hass.loop, 'stop'):
yield from hass.async_stop()
msg = yield from websocket_client.receive()
assert msg.type == WSMsgType.CLOSE
@asyncio.coroutine
def test_call_service(hass, websocket_client):
"""Test call service command."""
calls = []
@callback
def service_call(call):
calls.append(call)
hass.services.async_register('domain_test', 'test_service', service_call)
websocket_client.send_json({
'id': 5,
'type': wapi.TYPE_CALL_SERVICE,
'domain': 'domain_test',
'service': 'test_service',
'service_data': {
'hello': 'world'
}
})
msg = yield from websocket_client.receive_json()
assert msg['id'] == 5
assert msg['type'] == wapi.TYPE_RESULT
assert msg['success']
assert len(calls) == 1
call = calls[0]
assert call.domain == 'domain_test'
assert call.service == 'test_service'
assert call.data == {'hello': 'world'}
@asyncio.coroutine
def test_subscribe_unsubscribe_events(hass, websocket_client):
"""Test subscribe/unsubscribe events command."""
init_count = sum(hass.bus.async_listeners().values())
websocket_client.send_json({
'id': 5,
'type': wapi.TYPE_SUBSCRIBE_EVENTS,
'event_type': 'test_event'
})
msg = yield from websocket_client.receive_json()
assert msg['id'] == 5
assert msg['type'] == wapi.TYPE_RESULT
assert msg['success']
# Verify we have a new listener
assert sum(hass.bus.async_listeners().values()) == init_count + 1
hass.bus.async_fire('ignore_event')
hass.bus.async_fire('test_event', {'hello': 'world'})
hass.bus.async_fire('ignore_event')
with timeout(3, loop=hass.loop):
msg = yield from websocket_client.receive_json()
assert msg['id'] == 5
assert msg['type'] == wapi.TYPE_EVENT
event = msg['event']
assert event['event_type'] == 'test_event'
assert event['data'] == {'hello': 'world'}
assert event['origin'] == 'LOCAL'
websocket_client.send_json({
'id': 6,
'type': wapi.TYPE_UNSUBSCRIBE_EVENTS,
'subscription': 5
})
msg = yield from websocket_client.receive_json()
assert msg['id'] == 6
assert msg['type'] == wapi.TYPE_RESULT
assert msg['success']
# Check our listener got unsubscribed
assert sum(hass.bus.async_listeners().values()) == init_count
@asyncio.coroutine
def test_get_states(hass, websocket_client):
""" Test get_states command."""
hass.states.async_set('greeting.hello', 'world')
hass.states.async_set('greeting.bye', 'universe')
websocket_client.send_json({
'id': 5,
'type': wapi.TYPE_GET_STATES,
})
msg = yield from websocket_client.receive_json()
assert msg['id'] == 5
assert msg['type'] == wapi.TYPE_RESULT
assert msg['success']
states = []
for state in hass.states.async_all():
state = state.as_dict()
state['last_changed'] = state['last_changed'].isoformat()
state['last_updated'] = state['last_updated'].isoformat()
states.append(state)
assert msg['result'] == states
@asyncio.coroutine
def test_get_services(hass, websocket_client):
""" Test get_services command."""
websocket_client.send_json({
'id': 5,
'type': wapi.TYPE_GET_SERVICES,
})
msg = yield from websocket_client.receive_json()
assert msg['id'] == 5
assert msg['type'] == wapi.TYPE_RESULT
assert msg['success']
assert msg['result'] == hass.services.async_services()
@asyncio.coroutine
def test_get_config(hass, websocket_client):
""" Test get_config command."""
websocket_client.send_json({
'id': 5,
'type': wapi.TYPE_GET_CONFIG,
})
msg = yield from websocket_client.receive_json()
assert msg['id'] == 5
assert msg['type'] == wapi.TYPE_RESULT
assert msg['success']
assert msg['result'] == hass.config.as_dict()
@asyncio.coroutine
def test_get_panels(hass, websocket_client):
""" Test get_panels command."""
frontend.register_built_in_panel(hass, 'map', 'Map',
'mdi:account-location')
websocket_client.send_json({
'id': 5,
'type': wapi.TYPE_GET_PANELS,
})
msg = yield from websocket_client.receive_json()
assert msg['id'] == 5
assert msg['type'] == wapi.TYPE_RESULT
assert msg['success']
assert msg['result'] == hass.data[frontend.DATA_PANELS]
@asyncio.coroutine
def test_ping(websocket_client):
""" Test get_panels command."""
websocket_client.send_json({
'id': 5,
'type': wapi.TYPE_PING,
})
msg = yield from websocket_client.receive_json()
assert msg['id'] == 5
assert msg['type'] == wapi.TYPE_PONG
| {
"content_hash": "27bf00dab3de0e91168dd9f660f5a8d4",
"timestamp": "",
"source": "github",
"line_count": 298,
"max_line_length": 77,
"avg_line_length": 27.79530201342282,
"alnum_prop": 0.6430037426053362,
"repo_name": "dmeulen/home-assistant",
"id": "bdad5032a24e12b50088d99b8a087ccc53a72382",
"size": "8283",
"binary": false,
"copies": "3",
"ref": "refs/heads/dev",
"path": "tests/components/test_websocket_api.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "1435271"
},
{
"name": "Python",
"bytes": "4390736"
},
{
"name": "Ruby",
"bytes": "379"
},
{
"name": "Shell",
"bytes": "4473"
}
],
"symlink_target": ""
} |
"""Axis network device abstraction."""
import asyncio
import async_timeout
from homeassistant.const import (
CONF_DEVICE,
CONF_HOST,
CONF_MAC,
CONF_NAME,
CONF_PASSWORD,
CONF_PORT,
CONF_USERNAME,
)
from homeassistant.core import callback
from homeassistant.exceptions import ConfigEntryNotReady
from homeassistant.helpers.device_registry import CONNECTION_NETWORK_MAC
from homeassistant.helpers.dispatcher import async_dispatcher_send
from .const import CONF_CAMERA, CONF_EVENTS, CONF_MODEL, DOMAIN, LOGGER
from .errors import AuthenticationRequired, CannotConnect
class AxisNetworkDevice:
"""Manages a Axis device."""
def __init__(self, hass, config_entry):
"""Initialize the device."""
self.hass = hass
self.config_entry = config_entry
self.available = True
self.api = None
self.fw_version = None
self.product_type = None
self.listeners = []
@property
def host(self):
"""Return the host of this device."""
return self.config_entry.data[CONF_DEVICE][CONF_HOST]
@property
def model(self):
"""Return the model of this device."""
return self.config_entry.data[CONF_MODEL]
@property
def name(self):
"""Return the name of this device."""
return self.config_entry.data[CONF_NAME]
@property
def serial(self):
"""Return the mac of this device."""
return self.config_entry.data[CONF_MAC]
async def async_update_device_registry(self):
"""Update device registry."""
device_registry = await self.hass.helpers.device_registry.async_get_registry()
device_registry.async_get_or_create(
config_entry_id=self.config_entry.entry_id,
connections={(CONNECTION_NETWORK_MAC, self.serial)},
identifiers={(DOMAIN, self.serial)},
manufacturer="Axis Communications AB",
model="{} {}".format(self.model, self.product_type),
name=self.name,
sw_version=self.fw_version,
)
async def async_setup(self):
"""Set up the device."""
try:
self.api = await get_device(self.hass, self.config_entry.data[CONF_DEVICE])
except CannotConnect:
raise ConfigEntryNotReady
except Exception: # pylint: disable=broad-except
LOGGER.error("Unknown error connecting with Axis device on %s", self.host)
return False
self.fw_version = self.api.vapix.params.firmware_version
self.product_type = self.api.vapix.params.prodtype
if self.config_entry.options[CONF_CAMERA]:
self.hass.async_create_task(
self.hass.config_entries.async_forward_entry_setup(
self.config_entry, "camera"
)
)
if self.config_entry.options[CONF_EVENTS]:
self.api.stream.connection_status_callback = (
self.async_connection_status_callback
)
self.api.enable_events(event_callback=self.async_event_callback)
platform_tasks = [
self.hass.config_entries.async_forward_entry_setup(
self.config_entry, platform
)
for platform in ["binary_sensor", "switch"]
]
self.hass.async_create_task(self.start(platform_tasks))
self.config_entry.add_update_listener(self.async_new_address_callback)
return True
@property
def event_new_address(self):
"""Device specific event to signal new device address."""
return "axis_new_address_{}".format(self.serial)
@staticmethod
async def async_new_address_callback(hass, entry):
"""Handle signals of device getting new address.
This is a static method because a class method (bound method),
can not be used with weak references.
"""
device = hass.data[DOMAIN][entry.data[CONF_MAC]]
device.api.config.host = device.host
async_dispatcher_send(hass, device.event_new_address)
@property
def event_reachable(self):
"""Device specific event to signal a change in connection status."""
return "axis_reachable_{}".format(self.serial)
@callback
def async_connection_status_callback(self, status):
"""Handle signals of device connection status.
This is called on every RTSP keep-alive message.
Only signal state change if state change is true.
"""
from axis.streammanager import SIGNAL_PLAYING
if self.available != (status == SIGNAL_PLAYING):
self.available = not self.available
async_dispatcher_send(self.hass, self.event_reachable, True)
@property
def event_new_sensor(self):
"""Device specific event to signal new sensor available."""
return "axis_add_sensor_{}".format(self.serial)
@callback
def async_event_callback(self, action, event_id):
"""Call to configure events when initialized on event stream."""
if action == "add":
async_dispatcher_send(self.hass, self.event_new_sensor, event_id)
async def start(self, platform_tasks):
"""Start the event stream when all platforms are loaded."""
await asyncio.gather(*platform_tasks)
self.api.start()
@callback
def shutdown(self, event):
"""Stop the event stream."""
self.api.stop()
async def async_reset(self):
"""Reset this device to default state."""
platform_tasks = []
if self.config_entry.options[CONF_CAMERA]:
platform_tasks.append(
self.hass.config_entries.async_forward_entry_unload(
self.config_entry, "camera"
)
)
if self.config_entry.options[CONF_EVENTS]:
self.api.stop()
platform_tasks += [
self.hass.config_entries.async_forward_entry_unload(
self.config_entry, platform
)
for platform in ["binary_sensor", "switch"]
]
await asyncio.gather(*platform_tasks)
for unsub_dispatcher in self.listeners:
unsub_dispatcher()
self.listeners = []
return True
async def get_device(hass, config):
"""Create a Axis device."""
import axis
device = axis.AxisDevice(
loop=hass.loop,
host=config[CONF_HOST],
username=config[CONF_USERNAME],
password=config[CONF_PASSWORD],
port=config[CONF_PORT],
web_proto="http",
)
device.vapix.initialize_params(preload_data=False)
device.vapix.initialize_ports()
try:
with async_timeout.timeout(15):
await asyncio.gather(
hass.async_add_executor_job(device.vapix.params.update_brand),
hass.async_add_executor_job(device.vapix.params.update_properties),
hass.async_add_executor_job(device.vapix.ports.update),
)
return device
except axis.Unauthorized:
LOGGER.warning(
"Connected to device at %s but not registered.", config[CONF_HOST]
)
raise AuthenticationRequired
except (asyncio.TimeoutError, axis.RequestError):
LOGGER.error("Error connecting to the Axis device at %s", config[CONF_HOST])
raise CannotConnect
except axis.AxisException:
LOGGER.exception("Unknown Axis communication error occurred")
raise AuthenticationRequired
| {
"content_hash": "a719f3ff897c42683c95c2c127379b1e",
"timestamp": "",
"source": "github",
"line_count": 238,
"max_line_length": 87,
"avg_line_length": 31.861344537815125,
"alnum_prop": 0.6173018594223922,
"repo_name": "fbradyirl/home-assistant",
"id": "465d8c73b742053c3afe8e21cd41e5d871ebae52",
"size": "7583",
"binary": false,
"copies": "1",
"ref": "refs/heads/dev",
"path": "homeassistant/components/axis/device.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "1175"
},
{
"name": "Dockerfile",
"bytes": "1829"
},
{
"name": "Python",
"bytes": "16494727"
},
{
"name": "Ruby",
"bytes": "745"
},
{
"name": "Shell",
"bytes": "17784"
}
],
"symlink_target": ""
} |
import tkinter as tk
from tkinter import ttk
from window_main import MainWindow
from input_thread import InputThread
import midi_manager as mm
# Initialize the input thread
input_thread = InputThread()
# Initialize the UI
root = tk.Tk()
root.wm_title("pianopad")
root.geometry("600x410")
app = MainWindow(input_thread, master=root)
# Assign the ui to the input thread
input_thread.ui = app
input_thread.daemon = True
input_thread.start()
# Wait for the input thread to finish
app.mainloop()
input_thread.keep_running = False
input_thread.join()
mm.close_open_ports()
| {
"content_hash": "9569654178451e921e9c46883d839e1a",
"timestamp": "",
"source": "github",
"line_count": 28,
"max_line_length": 43,
"avg_line_length": 20.5,
"alnum_prop": 0.764808362369338,
"repo_name": "danodic/pianopad",
"id": "6dee40150c3defd0935c2d1822d2d4f4d10e9717",
"size": "574",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/pianopad.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "52911"
}
],
"symlink_target": ""
} |
from django.conf.urls import patterns
from django.conf.urls import url
from .views import IndexView
urlpatterns = patterns(
'',
url(r'^$', IndexView.as_view(), name='index'),
)
| {
"content_hash": "1ad997fe1773ccb9586efaa75d53d55b",
"timestamp": "",
"source": "github",
"line_count": 10,
"max_line_length": 50,
"avg_line_length": 18.8,
"alnum_prop": 0.6914893617021277,
"repo_name": "b1-systems/horizon-dashboard-cookiecutter",
"id": "5796add938d53f8803c526e5746b4c735adff11d",
"size": "743",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "{{cookiecutter.dashboard_name}}/{{cookiecutter.panel_name}}/urls.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "58"
},
{
"name": "JavaScript",
"bytes": "65"
},
{
"name": "Python",
"bytes": "6995"
}
],
"symlink_target": ""
} |
"""Support for WeMo humidifier."""
from __future__ import annotations
import asyncio
from datetime import timedelta
import math
from typing import Any
from pywemo.ouimeaux_device.humidifier import DesiredHumidity, FanMode, Humidifier
import voluptuous as vol
from homeassistant.components.fan import SUPPORT_SET_SPEED, FanEntity
from homeassistant.config_entries import ConfigEntry
from homeassistant.core import HomeAssistant, callback
from homeassistant.helpers import entity_platform
from homeassistant.helpers.dispatcher import async_dispatcher_connect
from homeassistant.helpers.entity_platform import AddEntitiesCallback
from homeassistant.util.percentage import (
int_states_in_range,
percentage_to_ranged_value,
ranged_value_to_percentage,
)
from .const import (
DOMAIN as WEMO_DOMAIN,
SERVICE_RESET_FILTER_LIFE,
SERVICE_SET_HUMIDITY,
)
from .entity import WemoBinaryStateEntity
from .wemo_device import DeviceCoordinator
SCAN_INTERVAL = timedelta(seconds=10)
PARALLEL_UPDATES = 0
ATTR_CURRENT_HUMIDITY = "current_humidity"
ATTR_TARGET_HUMIDITY = "target_humidity"
ATTR_FAN_MODE = "fan_mode"
ATTR_FILTER_LIFE = "filter_life"
ATTR_FILTER_EXPIRED = "filter_expired"
ATTR_WATER_LEVEL = "water_level"
SPEED_RANGE = (FanMode.Minimum, FanMode.Maximum) # off is not included
SUPPORTED_FEATURES = SUPPORT_SET_SPEED
SET_HUMIDITY_SCHEMA = {
vol.Required(ATTR_TARGET_HUMIDITY): vol.All(
vol.Coerce(float), vol.Range(min=0, max=100)
),
}
async def async_setup_entry(
hass: HomeAssistant,
config_entry: ConfigEntry,
async_add_entities: AddEntitiesCallback,
) -> None:
"""Set up WeMo binary sensors."""
async def _discovered_wemo(coordinator: DeviceCoordinator) -> None:
"""Handle a discovered Wemo device."""
async_add_entities([WemoHumidifier(coordinator)])
async_dispatcher_connect(hass, f"{WEMO_DOMAIN}.fan", _discovered_wemo)
await asyncio.gather(
*(
_discovered_wemo(coordinator)
for coordinator in hass.data[WEMO_DOMAIN]["pending"].pop("fan")
)
)
platform = entity_platform.async_get_current_platform()
# This will call WemoHumidifier.set_humidity(target_humidity=VALUE)
platform.async_register_entity_service(
SERVICE_SET_HUMIDITY, SET_HUMIDITY_SCHEMA, WemoHumidifier.set_humidity.__name__
)
# This will call WemoHumidifier.reset_filter_life()
platform.async_register_entity_service(
SERVICE_RESET_FILTER_LIFE, {}, WemoHumidifier.reset_filter_life.__name__
)
class WemoHumidifier(WemoBinaryStateEntity, FanEntity):
"""Representation of a WeMo humidifier."""
wemo: Humidifier
def __init__(self, coordinator: DeviceCoordinator) -> None:
"""Initialize the WeMo switch."""
super().__init__(coordinator)
if self.wemo.fan_mode != FanMode.Off:
self._last_fan_on_mode = self.wemo.fan_mode
else:
self._last_fan_on_mode = FanMode.High
@property
def icon(self) -> str:
"""Return the icon of device based on its type."""
return "mdi:water-percent"
@property
def extra_state_attributes(self) -> dict[str, Any]:
"""Return device specific state attributes."""
return {
ATTR_CURRENT_HUMIDITY: self.wemo.current_humidity_percent,
ATTR_TARGET_HUMIDITY: self.wemo.desired_humidity_percent,
ATTR_FAN_MODE: self.wemo.fan_mode_string,
ATTR_WATER_LEVEL: self.wemo.water_level_string,
ATTR_FILTER_LIFE: self.wemo.filter_life_percent,
ATTR_FILTER_EXPIRED: self.wemo.filter_expired,
}
@property
def percentage(self) -> int:
"""Return the current speed percentage."""
return ranged_value_to_percentage(SPEED_RANGE, self.wemo.fan_mode)
@property
def speed_count(self) -> int:
"""Return the number of speeds the fan supports."""
return int_states_in_range(SPEED_RANGE)
@property
def supported_features(self) -> int:
"""Flag supported features."""
return SUPPORTED_FEATURES
@callback
def _handle_coordinator_update(self) -> None:
"""Handle updated data from the coordinator."""
if self.wemo.fan_mode != FanMode.Off:
self._last_fan_on_mode = self.wemo.fan_mode
super()._handle_coordinator_update()
def turn_on(
self,
percentage: int | None = None,
preset_mode: str | None = None,
**kwargs: Any,
) -> None:
"""Turn the fan on."""
self.set_percentage(percentage)
def turn_off(self, **kwargs: Any) -> None:
"""Turn the switch off."""
with self._wemo_call_wrapper("turn off"):
self.wemo.set_state(FanMode.Off)
def set_percentage(self, percentage: int | None) -> None:
"""Set the fan_mode of the Humidifier."""
if percentage is None:
named_speed = self._last_fan_on_mode
elif percentage == 0:
named_speed = FanMode.Off
else:
named_speed = FanMode(
math.ceil(percentage_to_ranged_value(SPEED_RANGE, percentage))
)
with self._wemo_call_wrapper("set speed"):
self.wemo.set_state(named_speed)
def set_humidity(self, target_humidity: float) -> None:
"""Set the target humidity level for the Humidifier."""
if target_humidity < 50:
pywemo_humidity = DesiredHumidity.FortyFivePercent
elif 50 <= target_humidity < 55:
pywemo_humidity = DesiredHumidity.FiftyPercent
elif 55 <= target_humidity < 60:
pywemo_humidity = DesiredHumidity.FiftyFivePercent
elif 60 <= target_humidity < 100:
pywemo_humidity = DesiredHumidity.SixtyPercent
elif target_humidity >= 100:
pywemo_humidity = DesiredHumidity.OneHundredPercent
with self._wemo_call_wrapper("set humidity"):
self.wemo.set_humidity(pywemo_humidity)
def reset_filter_life(self) -> None:
"""Reset the filter life to 100%."""
with self._wemo_call_wrapper("reset filter life"):
self.wemo.reset_filter_life()
| {
"content_hash": "c29924dda3fd7b12c72e8ab95f111606",
"timestamp": "",
"source": "github",
"line_count": 186,
"max_line_length": 87,
"avg_line_length": 33.344086021505376,
"alnum_prop": 0.6575298290873912,
"repo_name": "GenericStudent/home-assistant",
"id": "253ff34213e79d9cadd73099b5234eb28eadefb0",
"size": "6202",
"binary": false,
"copies": "1",
"ref": "refs/heads/dev",
"path": "homeassistant/components/wemo/fan.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Dockerfile",
"bytes": "3070"
},
{
"name": "Python",
"bytes": "44491729"
},
{
"name": "Shell",
"bytes": "5092"
}
],
"symlink_target": ""
} |
"""Utilities for testing driver specs."""
import functools
import threading
from collections import abc
from test import IntegrationTest, client_context, client_knobs
from test.utils import (
CMAPListener,
CompareType,
EventListener,
OvertCommandListener,
ServerAndTopologyEventListener,
camel_to_snake,
camel_to_snake_args,
parse_spec_options,
prepare_spec_arguments,
rs_client,
)
from typing import List
from bson import decode, encode
from bson.binary import Binary
from bson.int64 import Int64
from bson.son import SON
from gridfs import GridFSBucket
from pymongo import client_session
from pymongo.command_cursor import CommandCursor
from pymongo.cursor import Cursor
from pymongo.errors import BulkWriteError, OperationFailure, PyMongoError
from pymongo.read_concern import ReadConcern
from pymongo.read_preferences import ReadPreference
from pymongo.results import BulkWriteResult, _WriteResult
from pymongo.write_concern import WriteConcern
class SpecRunnerThread(threading.Thread):
def __init__(self, name):
super(SpecRunnerThread, self).__init__()
self.name = name
self.exc = None
self.setDaemon(True)
self.cond = threading.Condition()
self.ops = []
self.stopped = False
def schedule(self, work):
self.ops.append(work)
with self.cond:
self.cond.notify()
def stop(self):
self.stopped = True
with self.cond:
self.cond.notify()
def run(self):
while not self.stopped or self.ops:
if not self.ops:
with self.cond:
self.cond.wait(10)
if self.ops:
try:
work = self.ops.pop(0)
work()
except Exception as exc:
self.exc = exc
self.stop()
class SpecRunner(IntegrationTest):
mongos_clients: List
knobs: client_knobs
listener: EventListener
@classmethod
def setUpClass(cls):
super(SpecRunner, cls).setUpClass()
cls.mongos_clients = []
# Speed up the tests by decreasing the heartbeat frequency.
cls.knobs = client_knobs(heartbeat_frequency=0.1, min_heartbeat_interval=0.1)
cls.knobs.enable()
@classmethod
def tearDownClass(cls):
cls.knobs.disable()
super(SpecRunner, cls).tearDownClass()
def setUp(self):
super(SpecRunner, self).setUp()
self.targets = {}
self.listener = None # type: ignore
self.pool_listener = None
self.server_listener = None
self.maxDiff = None
def _set_fail_point(self, client, command_args):
cmd = SON([("configureFailPoint", "failCommand")])
cmd.update(command_args)
client.admin.command(cmd)
def set_fail_point(self, command_args):
clients = self.mongos_clients if self.mongos_clients else [self.client]
for client in clients:
self._set_fail_point(client, command_args)
def targeted_fail_point(self, session, fail_point):
"""Run the targetedFailPoint test operation.
Enable the fail point on the session's pinned mongos.
"""
clients = {c.address: c for c in self.mongos_clients}
client = clients[session._pinned_address]
self._set_fail_point(client, fail_point)
self.addCleanup(self.set_fail_point, {"mode": "off"})
def assert_session_pinned(self, session):
"""Run the assertSessionPinned test operation.
Assert that the given session is pinned.
"""
self.assertIsNotNone(session._transaction.pinned_address)
def assert_session_unpinned(self, session):
"""Run the assertSessionUnpinned test operation.
Assert that the given session is not pinned.
"""
self.assertIsNone(session._pinned_address)
self.assertIsNone(session._transaction.pinned_address)
def assert_collection_exists(self, database, collection):
"""Run the assertCollectionExists test operation."""
db = self.client[database]
self.assertIn(collection, db.list_collection_names())
def assert_collection_not_exists(self, database, collection):
"""Run the assertCollectionNotExists test operation."""
db = self.client[database]
self.assertNotIn(collection, db.list_collection_names())
def assert_index_exists(self, database, collection, index):
"""Run the assertIndexExists test operation."""
coll = self.client[database][collection]
self.assertIn(index, [doc["name"] for doc in coll.list_indexes()])
def assert_index_not_exists(self, database, collection, index):
"""Run the assertIndexNotExists test operation."""
coll = self.client[database][collection]
self.assertNotIn(index, [doc["name"] for doc in coll.list_indexes()])
def assertErrorLabelsContain(self, exc, expected_labels):
labels = [l for l in expected_labels if exc.has_error_label(l)]
self.assertEqual(labels, expected_labels)
def assertErrorLabelsOmit(self, exc, omit_labels):
for label in omit_labels:
self.assertFalse(
exc.has_error_label(label), msg="error labels should not contain %s" % (label,)
)
def kill_all_sessions(self):
clients = self.mongos_clients if self.mongos_clients else [self.client]
for client in clients:
try:
client.admin.command("killAllSessions", [])
except OperationFailure:
# "operation was interrupted" by killing the command's
# own session.
pass
def check_command_result(self, expected_result, result):
# Only compare the keys in the expected result.
filtered_result = {}
for key in expected_result:
try:
filtered_result[key] = result[key]
except KeyError:
pass
self.assertEqual(filtered_result, expected_result)
# TODO: factor the following function with test_crud.py.
def check_result(self, expected_result, result):
if isinstance(result, _WriteResult):
for res in expected_result:
prop = camel_to_snake(res)
# SPEC-869: Only BulkWriteResult has upserted_count.
if prop == "upserted_count" and not isinstance(result, BulkWriteResult):
if result.upserted_id is not None:
upserted_count = 1
else:
upserted_count = 0
self.assertEqual(upserted_count, expected_result[res], prop)
elif prop == "inserted_ids":
# BulkWriteResult does not have inserted_ids.
if isinstance(result, BulkWriteResult):
self.assertEqual(len(expected_result[res]), result.inserted_count)
else:
# InsertManyResult may be compared to [id1] from the
# crud spec or {"0": id1} from the retryable write spec.
ids = expected_result[res]
if isinstance(ids, dict):
ids = [ids[str(i)] for i in range(len(ids))]
self.assertEqual(ids, result.inserted_ids, prop)
elif prop == "upserted_ids":
# Convert indexes from strings to integers.
ids = expected_result[res]
expected_ids = {}
for str_index in ids:
expected_ids[int(str_index)] = ids[str_index]
self.assertEqual(expected_ids, result.upserted_ids, prop)
else:
self.assertEqual(getattr(result, prop), expected_result[res], prop)
return True
else:
def _helper(expected_result, result):
if isinstance(expected_result, abc.Mapping):
for i in expected_result.keys():
self.assertEqual(expected_result[i], result[i])
elif isinstance(expected_result, list):
for i, k in zip(expected_result, result):
_helper(i, k)
else:
self.assertEqual(expected_result, result)
_helper(expected_result, result)
def get_object_name(self, op):
"""Allow subclasses to override handling of 'object'
Transaction spec says 'object' is required.
"""
return op["object"]
@staticmethod
def parse_options(opts):
return parse_spec_options(opts)
def run_operation(self, sessions, collection, operation):
original_collection = collection
name = camel_to_snake(operation["name"])
if name == "run_command":
name = "command"
elif name == "download_by_name":
name = "open_download_stream_by_name"
elif name == "download":
name = "open_download_stream"
elif name == "map_reduce":
self.skipTest("PyMongo does not support mapReduce")
elif name == "count":
self.skipTest("PyMongo does not support count")
database = collection.database
collection = database.get_collection(collection.name)
if "collectionOptions" in operation:
collection = collection.with_options(
**self.parse_options(operation["collectionOptions"])
)
object_name = self.get_object_name(operation)
if object_name == "gridfsbucket":
# Only create the GridFSBucket when we need it (for the gridfs
# retryable reads tests).
obj = GridFSBucket(database, bucket_name=collection.name)
else:
objects = {
"client": database.client,
"database": database,
"collection": collection,
"testRunner": self,
}
objects.update(sessions)
obj = objects[object_name]
# Combine arguments with options and handle special cases.
arguments = operation.get("arguments", {})
arguments.update(arguments.pop("options", {}))
self.parse_options(arguments)
cmd = getattr(obj, name)
with_txn_callback = functools.partial(
self.run_operations, sessions, original_collection, in_with_transaction=True
)
prepare_spec_arguments(operation, arguments, name, sessions, with_txn_callback)
if name == "run_on_thread":
args = {"sessions": sessions, "collection": collection}
args.update(arguments)
arguments = args
result = cmd(**dict(arguments))
# Cleanup open change stream cursors.
if name == "watch":
self.addCleanup(result.close)
if name == "aggregate":
if arguments["pipeline"] and "$out" in arguments["pipeline"][-1]:
# Read from the primary to ensure causal consistency.
out = collection.database.get_collection(
arguments["pipeline"][-1]["$out"], read_preference=ReadPreference.PRIMARY
)
return out.find()
if "download" in name:
result = Binary(result.read())
if isinstance(result, Cursor) or isinstance(result, CommandCursor):
return list(result)
return result
def allowable_errors(self, op):
"""Allow encryption spec to override expected error classes."""
return (PyMongoError,)
def _run_op(self, sessions, collection, op, in_with_transaction):
expected_result = op.get("result")
if expect_error(op):
with self.assertRaises(self.allowable_errors(op), msg=op["name"]) as context:
out = self.run_operation(sessions, collection, op.copy())
if expect_error_message(expected_result):
if isinstance(context.exception, BulkWriteError):
errmsg = str(context.exception.details).lower()
else:
errmsg = str(context.exception).lower()
self.assertIn(expected_result["errorContains"].lower(), errmsg)
if expect_error_code(expected_result):
self.assertEqual(
expected_result["errorCodeName"], context.exception.details.get("codeName")
)
if expect_error_labels_contain(expected_result):
self.assertErrorLabelsContain(
context.exception, expected_result["errorLabelsContain"]
)
if expect_error_labels_omit(expected_result):
self.assertErrorLabelsOmit(context.exception, expected_result["errorLabelsOmit"])
# Reraise the exception if we're in the with_transaction
# callback.
if in_with_transaction:
raise context.exception
else:
result = self.run_operation(sessions, collection, op.copy())
if "result" in op:
if op["name"] == "runCommand":
self.check_command_result(expected_result, result)
else:
self.check_result(expected_result, result)
def run_operations(self, sessions, collection, ops, in_with_transaction=False):
for op in ops:
self._run_op(sessions, collection, op, in_with_transaction)
# TODO: factor with test_command_monitoring.py
def check_events(self, test, listener, session_ids):
events = listener.started_events
if not len(test["expectations"]):
return
# Give a nicer message when there are missing or extra events
cmds = decode_raw([event.command for event in events])
self.assertEqual(len(events), len(test["expectations"]), cmds)
for i, expectation in enumerate(test["expectations"]):
event_type = next(iter(expectation))
event = events[i]
# The tests substitute 42 for any number other than 0.
if event.command_name == "getMore" and event.command["getMore"]:
event.command["getMore"] = Int64(42)
elif event.command_name == "killCursors":
event.command["cursors"] = [Int64(42)]
elif event.command_name == "update":
# TODO: remove this once PYTHON-1744 is done.
# Add upsert and multi fields back into expectations.
updates = expectation[event_type]["command"]["updates"]
for update in updates:
update.setdefault("upsert", False)
update.setdefault("multi", False)
# Replace afterClusterTime: 42 with actual afterClusterTime.
expected_cmd = expectation[event_type]["command"]
expected_read_concern = expected_cmd.get("readConcern")
if expected_read_concern is not None:
time = expected_read_concern.get("afterClusterTime")
if time == 42:
actual_time = event.command.get("readConcern", {}).get("afterClusterTime")
if actual_time is not None:
expected_read_concern["afterClusterTime"] = actual_time
recovery_token = expected_cmd.get("recoveryToken")
if recovery_token == 42:
expected_cmd["recoveryToken"] = CompareType(dict)
# Replace lsid with a name like "session0" to match test.
if "lsid" in event.command:
for name, lsid in session_ids.items():
if event.command["lsid"] == lsid:
event.command["lsid"] = name
break
for attr, expected in expectation[event_type].items():
actual = getattr(event, attr)
expected = wrap_types(expected)
if isinstance(expected, dict):
for key, val in expected.items():
if val is None:
if key in actual:
self.fail("Unexpected key [%s] in %r" % (key, actual))
elif key not in actual:
self.fail("Expected key [%s] in %r" % (key, actual))
else:
self.assertEqual(
val, decode_raw(actual[key]), "Key [%s] in %s" % (key, actual)
)
else:
self.assertEqual(actual, expected)
def maybe_skip_scenario(self, test):
if test.get("skipReason"):
self.skipTest(test.get("skipReason"))
def get_scenario_db_name(self, scenario_def):
"""Allow subclasses to override a test's database name."""
return scenario_def["database_name"]
def get_scenario_coll_name(self, scenario_def):
"""Allow subclasses to override a test's collection name."""
return scenario_def["collection_name"]
def get_outcome_coll_name(self, outcome, collection):
"""Allow subclasses to override outcome collection."""
return collection.name
def run_test_ops(self, sessions, collection, test):
"""Added to allow retryable writes spec to override a test's
operation."""
self.run_operations(sessions, collection, test["operations"])
def parse_client_options(self, opts):
"""Allow encryption spec to override a clientOptions parsing."""
# Convert test['clientOptions'] to dict to avoid a Jython bug using
# "**" with ScenarioDict.
return dict(opts)
def setup_scenario(self, scenario_def):
"""Allow specs to override a test's setup."""
db_name = self.get_scenario_db_name(scenario_def)
coll_name = self.get_scenario_coll_name(scenario_def)
documents = scenario_def["data"]
# Setup the collection with as few majority writes as possible.
db = client_context.client.get_database(db_name)
coll_exists = bool(db.list_collection_names(filter={"name": coll_name}))
if coll_exists:
db[coll_name].delete_many({})
# Only use majority wc only on the final write.
wc = WriteConcern(w="majority")
if documents:
db.get_collection(coll_name, write_concern=wc).insert_many(documents)
elif not coll_exists:
# Ensure collection exists.
db.create_collection(coll_name, write_concern=wc)
def run_scenario(self, scenario_def, test):
self.maybe_skip_scenario(test)
# Kill all sessions before and after each test to prevent an open
# transaction (from a test failure) from blocking collection/database
# operations during test set up and tear down.
self.kill_all_sessions()
self.addCleanup(self.kill_all_sessions)
self.setup_scenario(scenario_def)
database_name = self.get_scenario_db_name(scenario_def)
collection_name = self.get_scenario_coll_name(scenario_def)
# SPEC-1245 workaround StaleDbVersion on distinct
for c in self.mongos_clients:
c[database_name][collection_name].distinct("x")
# Configure the fail point before creating the client.
if "failPoint" in test:
fp = test["failPoint"]
self.set_fail_point(fp)
self.addCleanup(
self.set_fail_point, {"configureFailPoint": fp["configureFailPoint"], "mode": "off"}
)
listener = OvertCommandListener()
pool_listener = CMAPListener()
server_listener = ServerAndTopologyEventListener()
# Create a new client, to avoid interference from pooled sessions.
client_options = self.parse_client_options(test["clientOptions"])
# MMAPv1 does not support retryable writes.
if client_options.get("retryWrites") is True and client_context.storage_engine == "mmapv1":
self.skipTest("MMAPv1 does not support retryWrites=True")
use_multi_mongos = test["useMultipleMongoses"]
host = None
if use_multi_mongos:
if client_context.load_balancer or client_context.serverless:
host = client_context.MULTI_MONGOS_LB_URI
elif client_context.is_mongos:
host = client_context.mongos_seeds()
client = rs_client(
h=host, event_listeners=[listener, pool_listener, server_listener], **client_options
)
self.scenario_client = client
self.listener = listener
self.pool_listener = pool_listener
self.server_listener = server_listener
# Close the client explicitly to avoid having too many threads open.
self.addCleanup(client.close)
# Create session0 and session1.
sessions = {}
session_ids = {}
for i in range(2):
# Don't attempt to create sessions if they are not supported by
# the running server version.
if not client_context.sessions_enabled:
break
session_name = "session%d" % i
opts = camel_to_snake_args(test["sessionOptions"][session_name])
if "default_transaction_options" in opts:
txn_opts = self.parse_options(opts["default_transaction_options"])
txn_opts = client_session.TransactionOptions(**txn_opts)
opts["default_transaction_options"] = txn_opts
s = client.start_session(**dict(opts))
sessions[session_name] = s
# Store lsid so we can access it after end_session, in check_events.
session_ids[session_name] = s.session_id
self.addCleanup(end_sessions, sessions)
collection = client[database_name][collection_name]
self.run_test_ops(sessions, collection, test)
end_sessions(sessions)
self.check_events(test, listener, session_ids)
# Disable fail points.
if "failPoint" in test:
fp = test["failPoint"]
self.set_fail_point({"configureFailPoint": fp["configureFailPoint"], "mode": "off"})
# Assert final state is expected.
outcome = test["outcome"]
expected_c = outcome.get("collection")
if expected_c is not None:
outcome_coll_name = self.get_outcome_coll_name(outcome, collection)
# Read from the primary with local read concern to ensure causal
# consistency.
outcome_coll = client_context.client[collection.database.name].get_collection(
outcome_coll_name,
read_preference=ReadPreference.PRIMARY,
read_concern=ReadConcern("local"),
)
actual_data = list(outcome_coll.find(sort=[("_id", 1)]))
# The expected data needs to be the left hand side here otherwise
# CompareType(Binary) doesn't work.
self.assertEqual(wrap_types(expected_c["data"]), actual_data)
def expect_any_error(op):
if isinstance(op, dict):
return op.get("error")
return False
def expect_error_message(expected_result):
if isinstance(expected_result, dict):
return isinstance(expected_result["errorContains"], str)
return False
def expect_error_code(expected_result):
if isinstance(expected_result, dict):
return expected_result["errorCodeName"]
return False
def expect_error_labels_contain(expected_result):
if isinstance(expected_result, dict):
return expected_result["errorLabelsContain"]
return False
def expect_error_labels_omit(expected_result):
if isinstance(expected_result, dict):
return expected_result["errorLabelsOmit"]
return False
def expect_error(op):
expected_result = op.get("result")
return (
expect_any_error(op)
or expect_error_message(expected_result)
or expect_error_code(expected_result)
or expect_error_labels_contain(expected_result)
or expect_error_labels_omit(expected_result)
)
def end_sessions(sessions):
for s in sessions.values():
# Aborts the transaction if it's open.
s.end_session()
def decode_raw(val):
"""Decode RawBSONDocuments in the given container."""
if isinstance(val, (list, abc.Mapping)):
return decode(encode({"v": val}))["v"]
return val
TYPES = {
"binData": Binary,
"long": Int64,
}
def wrap_types(val):
"""Support $$type assertion in command results."""
if isinstance(val, list):
return [wrap_types(v) for v in val]
if isinstance(val, abc.Mapping):
typ = val.get("$$type")
if typ:
return CompareType(TYPES[typ])
d = {}
for key in val:
d[key] = wrap_types(val[key])
return d
return val
| {
"content_hash": "016f8cf21d09a62e422b98a431849832",
"timestamp": "",
"source": "github",
"line_count": 648,
"max_line_length": 100,
"avg_line_length": 38.66358024691358,
"alnum_prop": 0.5950746387802347,
"repo_name": "mongodb/mongo-python-driver",
"id": "8528ecb8c751bfd0e4e3db1b0ceb99df192fc16e",
"size": "25636",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "test/utils_spec_runner.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "183641"
},
{
"name": "Python",
"bytes": "2983153"
},
{
"name": "Shell",
"bytes": "30026"
}
],
"symlink_target": ""
} |
import sys
from PyQt5.QtCore import QCoreApplication, Qt
from PyQt5.QtGui import QIcon
from PyQt5.QtWidgets import QApplication, QWidget, QMainWindow, QPushButton, QAction, QMessageBox
from PyQt5.QtWidgets import QCheckBox
class window(QMainWindow):
def __init__(self):
super(window, self).__init__()
self.setGeometry(50, 50, 500, 300)
self.setWindowTitle('pyqt5 Tut')
# self.setWindowIcon(QIcon('pic.png'))
extractAction = QAction('&Get to the choppah', self)
extractAction.setShortcut('Ctrl+Q')
extractAction.setStatusTip('leave the app')
extractAction.triggered.connect(self.close_application)
self.statusBar()
mainMenu = self.menuBar()
fileMenu = mainMenu.addMenu('&File')
fileMenu.addAction(extractAction)
extractAction = QAction(QIcon('pic.png'), 'flee the scene', self)
extractAction.triggered.connect(self.close_application)
self.toolBar = self.addToolBar('extraction')
self.toolBar.addAction(extractAction)
self.home()
def home(self):
btn = QPushButton('quit', self)
btn.clicked.connect(self.close_application)
btn.resize(btn.sizeHint())
btn.move(0, 100)
checkBox = QCheckBox('Enlarge window', self)
# checkBox.toggle() # if you want to be checked in in the begin
checkBox.move(0, 50)
checkBox.stateChanged.connect(self.enlarge_window)
self.show()
def enlarge_window(self, state):
if state == Qt.Checked:
self.setGeometry(50, 50, 1000, 600)
else:
self.setGeometry(50, 50 , 500, 300)
def close_application(self):
choice = QMessageBox.question(self, 'Message',
"Are you sure to quit?", QMessageBox.Yes |
QMessageBox.No, QMessageBox.No)
if choice == QMessageBox.Yes:
print('quit application')
sys.exit()
else:
pass
if __name__ == "__main__": # had to add this otherwise app crashed
def run():
app = QApplication(sys.argv)
Gui = window()
sys.exit(app.exec_())
run()
| {
"content_hash": "a28a89d493a0fadd9efe27a56069f9d9",
"timestamp": "",
"source": "github",
"line_count": 77,
"max_line_length": 97,
"avg_line_length": 28.857142857142858,
"alnum_prop": 0.6053105310531053,
"repo_name": "kenwaldek/pythonprogramming",
"id": "3f57a454d6bee7869cb0fa30e026233def186566",
"size": "2669",
"binary": false,
"copies": "1",
"ref": "refs/heads/pythonprogramming",
"path": "pyqt5_lesson_08.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "44435"
}
],
"symlink_target": ""
} |
"""
Astropy affiliated package for image reprojection (resampling).
"""
# Affiliated packages may add whatever they like to this file, but
# should keep this content at the top.
# ----------------------------------------------------------------------------
from ._astropy_init import *
# ----------------------------------------------------------------------------
if not _ASTROPY_SETUP_:
from .high_level import reproject
| {
"content_hash": "d12a6dcaaac3bae6741ba7ac515a5f33",
"timestamp": "",
"source": "github",
"line_count": 12,
"max_line_length": 78,
"avg_line_length": 35.75,
"alnum_prop": 0.47086247086247085,
"repo_name": "bsipocz/reproject",
"id": "82c1eba8e5804382ec66ce9aa81230c75fcc52ff",
"size": "493",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "reproject/__init__.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "C",
"bytes": "33811"
},
{
"name": "Python",
"bytes": "89570"
}
],
"symlink_target": ""
} |
"""
.. dialect:: mssql
:name: Microsoft SQL Server
Auto Increment Behavior
-----------------------
SQL Server provides so-called "auto incrementing" behavior using the
``IDENTITY`` construct, which can be placed on an integer primary key.
SQLAlchemy considers ``IDENTITY`` within its default "autoincrement" behavior,
described at :paramref:`.Column.autoincrement`; this means
that by default, the first integer primary key column in a :class:`.Table`
will be considered to be the identity column and will generate DDL as such::
from sqlalchemy import Table, MetaData, Column, Integer
m = MetaData()
t = Table('t', m,
Column('id', Integer, primary_key=True),
Column('x', Integer))
m.create_all(engine)
The above example will generate DDL as:
.. sourcecode:: sql
CREATE TABLE t (
id INTEGER NOT NULL IDENTITY(1,1),
x INTEGER NULL,
PRIMARY KEY (id)
)
For the case where this default generation of ``IDENTITY`` is not desired,
specify ``autoincrement=False`` on all integer primary key columns::
m = MetaData()
t = Table('t', m,
Column('id', Integer, primary_key=True, autoincrement=False),
Column('x', Integer))
m.create_all(engine)
.. note::
An INSERT statement which refers to an explicit value for such
a column is prohibited by SQL Server, however SQLAlchemy will detect this
and modify the ``IDENTITY_INSERT`` flag accordingly at statement execution
time. As this is not a high performing process, care should be taken to
set the ``autoincrement`` flag appropriately for columns that will not
actually require IDENTITY behavior.
Controlling "Start" and "Increment"
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
Specific control over the parameters of the ``IDENTITY`` value is supported
using the :class:`.schema.Sequence` object. While this object normally
represents an explicit "sequence" for supporting backends, on SQL Server it is
re-purposed to specify behavior regarding the identity column, including
support of the "start" and "increment" values::
from sqlalchemy import Table, Integer, Sequence, Column
Table('test', metadata,
Column('id', Integer,
Sequence('blah', start=100, increment=10),
primary_key=True),
Column('name', String(20))
).create(some_engine)
would yield:
.. sourcecode:: sql
CREATE TABLE test (
id INTEGER NOT NULL IDENTITY(100,10) PRIMARY KEY,
name VARCHAR(20) NULL,
)
Note that the ``start`` and ``increment`` values for sequences are
optional and will default to 1,1.
INSERT behavior
^^^^^^^^^^^^^^^^
Handling of the ``IDENTITY`` column at INSERT time involves two key
techniques. The most common is being able to fetch the "last inserted value"
for a given ``IDENTITY`` column, a process which SQLAlchemy performs
implicitly in many cases, most importantly within the ORM.
The process for fetching this value has several variants:
* In the vast majority of cases, RETURNING is used in conjunction with INSERT
statements on SQL Server in order to get newly generated primary key values:
.. sourcecode:: sql
INSERT INTO t (x) OUTPUT inserted.id VALUES (?)
* When RETURNING is not available or has been disabled via
``implicit_returning=False``, either the ``scope_identity()`` function or
the ``@@identity`` variable is used; behavior varies by backend:
* when using PyODBC, the phrase ``; select scope_identity()`` will be
appended to the end of the INSERT statement; a second result set will be
fetched in order to receive the value. Given a table as::
t = Table('t', m, Column('id', Integer, primary_key=True),
Column('x', Integer),
implicit_returning=False)
an INSERT will look like:
.. sourcecode:: sql
INSERT INTO t (x) VALUES (?); select scope_identity()
* Other dialects such as pymssql will call upon
``SELECT scope_identity() AS lastrowid`` subsequent to an INSERT
statement. If the flag ``use_scope_identity=False`` is passed to
:func:`.create_engine`, the statement ``SELECT @@identity AS lastrowid``
is used instead.
A table that contains an ``IDENTITY`` column will prohibit an INSERT statement
that refers to the identity column explicitly. The SQLAlchemy dialect will
detect when an INSERT construct, created using a core :func:`.insert`
construct (not a plain string SQL), refers to the identity column, and
in this case will emit ``SET IDENTITY_INSERT ON`` prior to the insert
statement proceeding, and ``SET IDENTITY_INSERT OFF`` subsequent to the
execution. Given this example::
m = MetaData()
t = Table('t', m, Column('id', Integer, primary_key=True),
Column('x', Integer))
m.create_all(engine)
engine.execute(t.insert(), {'id': 1, 'x':1}, {'id':2, 'x':2})
The above column will be created with IDENTITY, however the INSERT statement
we emit is specifying explicit values. In the echo output we can see
how SQLAlchemy handles this:
.. sourcecode:: sql
CREATE TABLE t (
id INTEGER NOT NULL IDENTITY(1,1),
x INTEGER NULL,
PRIMARY KEY (id)
)
COMMIT
SET IDENTITY_INSERT t ON
INSERT INTO t (id, x) VALUES (?, ?)
((1, 1), (2, 2))
SET IDENTITY_INSERT t OFF
COMMIT
This
is an auxiliary use case suitable for testing and bulk insert scenarios.
MAX on VARCHAR / NVARCHAR
-------------------------
SQL Server supports the special string "MAX" within the
:class:`.sqltypes.VARCHAR` and :class:`.sqltypes.NVARCHAR` datatypes,
to indicate "maximum length possible". The dialect currently handles this as
a length of "None" in the base type, rather than supplying a
dialect-specific version of these types, so that a base type
specified such as ``VARCHAR(None)`` can assume "unlengthed" behavior on
more than one backend without using dialect-specific types.
To build a SQL Server VARCHAR or NVARCHAR with MAX length, use None::
my_table = Table(
'my_table', metadata,
Column('my_data', VARCHAR(None)),
Column('my_n_data', NVARCHAR(None))
)
Collation Support
-----------------
Character collations are supported by the base string types,
specified by the string argument "collation"::
from sqlalchemy import VARCHAR
Column('login', VARCHAR(32, collation='Latin1_General_CI_AS'))
When such a column is associated with a :class:`.Table`, the
CREATE TABLE statement for this column will yield::
login VARCHAR(32) COLLATE Latin1_General_CI_AS NULL
.. versionadded:: 0.8 Character collations are now part of the base string
types.
LIMIT/OFFSET Support
--------------------
MSSQL has no support for the LIMIT or OFFSET keywords. LIMIT is
supported directly through the ``TOP`` Transact SQL keyword::
select.limit
will yield::
SELECT TOP n
If using SQL Server 2005 or above, LIMIT with OFFSET
support is available through the ``ROW_NUMBER OVER`` construct.
For versions below 2005, LIMIT with OFFSET usage will fail.
.. _mssql_isolation_level:
Transaction Isolation Level
---------------------------
All SQL Server dialects support setting of transaction isolation level
both via a dialect-specific parameter
:paramref:`.create_engine.isolation_level`
accepted by :func:`.create_engine`,
as well as the :paramref:`.Connection.execution_options.isolation_level`
argument as passed to
:meth:`.Connection.execution_options`. This feature works by issuing the
command ``SET TRANSACTION ISOLATION LEVEL <level>`` for
each new connection.
To set isolation level using :func:`.create_engine`::
engine = create_engine(
"mssql+pyodbc://scott:tiger@ms_2008",
isolation_level="REPEATABLE READ"
)
To set using per-connection execution options::
connection = engine.connect()
connection = connection.execution_options(
isolation_level="READ COMMITTED"
)
Valid values for ``isolation_level`` include:
* ``READ COMMITTED``
* ``READ UNCOMMITTED``
* ``REPEATABLE READ``
* ``SERIALIZABLE``
* ``SNAPSHOT`` - specific to SQL Server
.. versionadded:: 1.1 support for isolation level setting on Microsoft
SQL Server.
Nullability
-----------
MSSQL has support for three levels of column nullability. The default
nullability allows nulls and is explicit in the CREATE TABLE
construct::
name VARCHAR(20) NULL
If ``nullable=None`` is specified then no specification is made. In
other words the database's configured default is used. This will
render::
name VARCHAR(20)
If ``nullable`` is ``True`` or ``False`` then the column will be
``NULL`` or ``NOT NULL`` respectively.
Date / Time Handling
--------------------
DATE and TIME are supported. Bind parameters are converted
to datetime.datetime() objects as required by most MSSQL drivers,
and results are processed from strings if needed.
The DATE and TIME types are not available for MSSQL 2005 and
previous - if a server version below 2008 is detected, DDL
for these types will be issued as DATETIME.
.. _mssql_large_type_deprecation:
Large Text/Binary Type Deprecation
----------------------------------
Per `SQL Server 2012/2014 Documentation <http://technet.microsoft.com/en-us/library/ms187993.aspx>`_,
the ``NTEXT``, ``TEXT`` and ``IMAGE`` datatypes are to be removed from SQL Server
in a future release. SQLAlchemy normally relates these types to the
:class:`.UnicodeText`, :class:`.Text` and :class:`.LargeBinary` datatypes.
In order to accommodate this change, a new flag ``deprecate_large_types``
is added to the dialect, which will be automatically set based on detection
of the server version in use, if not otherwise set by the user. The
behavior of this flag is as follows:
* When this flag is ``True``, the :class:`.UnicodeText`, :class:`.Text` and
:class:`.LargeBinary` datatypes, when used to render DDL, will render the
types ``NVARCHAR(max)``, ``VARCHAR(max)``, and ``VARBINARY(max)``,
respectively. This is a new behavior as of the addition of this flag.
* When this flag is ``False``, the :class:`.UnicodeText`, :class:`.Text` and
:class:`.LargeBinary` datatypes, when used to render DDL, will render the
types ``NTEXT``, ``TEXT``, and ``IMAGE``,
respectively. This is the long-standing behavior of these types.
* The flag begins with the value ``None``, before a database connection is
established. If the dialect is used to render DDL without the flag being
set, it is interpreted the same as ``False``.
* On first connection, the dialect detects if SQL Server version 2012 or greater
is in use; if the flag is still at ``None``, it sets it to ``True`` or
``False`` based on whether 2012 or greater is detected.
* The flag can be set to either ``True`` or ``False`` when the dialect
is created, typically via :func:`.create_engine`::
eng = create_engine("mssql+pymssql://user:pass@host/db",
deprecate_large_types=True)
* Complete control over whether the "old" or "new" types are rendered is
available in all SQLAlchemy versions by using the UPPERCASE type objects
instead: :class:`.NVARCHAR`, :class:`.VARCHAR`, :class:`.types.VARBINARY`,
:class:`.TEXT`, :class:`.mssql.NTEXT`, :class:`.mssql.IMAGE` will always remain
fixed and always output exactly that type.
.. versionadded:: 1.0.0
.. _legacy_schema_rendering:
Legacy Schema Mode
------------------
Very old versions of the MSSQL dialect introduced the behavior such that a
schema-qualified table would be auto-aliased when used in a
SELECT statement; given a table::
account_table = Table(
'account', metadata,
Column('id', Integer, primary_key=True),
Column('info', String(100)),
schema="customer_schema"
)
this legacy mode of rendering would assume that "customer_schema.account"
would not be accepted by all parts of the SQL statement, as illustrated
below::
>>> eng = create_engine("mssql+pymssql://mydsn", legacy_schema_aliasing=True)
>>> print(account_table.select().compile(eng))
SELECT account_1.id, account_1.info
FROM customer_schema.account AS account_1
This mode of behavior is now off by default, as it appears to have served
no purpose; however in the case that legacy applications rely upon it,
it is available using the ``legacy_schema_aliasing`` argument to
:func:`.create_engine` as illustrated above.
.. versionchanged:: 1.1 the ``legacy_schema_aliasing`` flag introduced
in version 1.0.5 to allow disabling of legacy mode for schemas now
defaults to False.
.. _mssql_indexes:
Clustered Index Support
-----------------------
The MSSQL dialect supports clustered indexes (and primary keys) via the
``mssql_clustered`` option. This option is available to :class:`.Index`,
:class:`.UniqueConstraint`. and :class:`.PrimaryKeyConstraint`.
To generate a clustered index::
Index("my_index", table.c.x, mssql_clustered=True)
which renders the index as ``CREATE CLUSTERED INDEX my_index ON table (x)``.
To generate a clustered primary key use::
Table('my_table', metadata,
Column('x', ...),
Column('y', ...),
PrimaryKeyConstraint("x", "y", mssql_clustered=True))
which will render the table, for example, as::
CREATE TABLE my_table (x INTEGER NOT NULL, y INTEGER NOT NULL,
PRIMARY KEY CLUSTERED (x, y))
Similarly, we can generate a clustered unique constraint using::
Table('my_table', metadata,
Column('x', ...),
Column('y', ...),
PrimaryKeyConstraint("x"),
UniqueConstraint("y", mssql_clustered=True),
)
To explicitly request a non-clustered primary key (for example, when
a separate clustered index is desired), use::
Table('my_table', metadata,
Column('x', ...),
Column('y', ...),
PrimaryKeyConstraint("x", "y", mssql_clustered=False))
which will render the table, for example, as::
CREATE TABLE my_table (x INTEGER NOT NULL, y INTEGER NOT NULL,
PRIMARY KEY NONCLUSTERED (x, y))
.. versionchanged:: 1.1 the ``mssql_clustered`` option now defaults
to None, rather than False. ``mssql_clustered=False`` now explicitly
renders the NONCLUSTERED clause, whereas None omits the CLUSTERED
clause entirely, allowing SQL Server defaults to take effect.
MSSQL-Specific Index Options
-----------------------------
In addition to clustering, the MSSQL dialect supports other special options
for :class:`.Index`.
INCLUDE
^^^^^^^
The ``mssql_include`` option renders INCLUDE(colname) for the given string
names::
Index("my_index", table.c.x, mssql_include=['y'])
would render the index as ``CREATE INDEX my_index ON table (x) INCLUDE (y)``
.. versionadded:: 0.8
Index ordering
^^^^^^^^^^^^^^
Index ordering is available via functional expressions, such as::
Index("my_index", table.c.x.desc())
would render the index as ``CREATE INDEX my_index ON table (x DESC)``
.. versionadded:: 0.8
.. seealso::
:ref:`schema_indexes_functional`
Compatibility Levels
--------------------
MSSQL supports the notion of setting compatibility levels at the
database level. This allows, for instance, to run a database that
is compatible with SQL2000 while running on a SQL2005 database
server. ``server_version_info`` will always return the database
server version information (in this case SQL2005) and not the
compatibility level information. Because of this, if running under
a backwards compatibility mode SQAlchemy may attempt to use T-SQL
statements that are unable to be parsed by the database server.
Triggers
--------
SQLAlchemy by default uses OUTPUT INSERTED to get at newly
generated primary key values via IDENTITY columns or other
server side defaults. MS-SQL does not
allow the usage of OUTPUT INSERTED on tables that have triggers.
To disable the usage of OUTPUT INSERTED on a per-table basis,
specify ``implicit_returning=False`` for each :class:`.Table`
which has triggers::
Table('mytable', metadata,
Column('id', Integer, primary_key=True),
# ...,
implicit_returning=False
)
Declarative form::
class MyClass(Base):
# ...
__table_args__ = {'implicit_returning':False}
This option can also be specified engine-wide using the
``implicit_returning=False`` argument on :func:`.create_engine`.
.. _mssql_rowcount_versioning:
Rowcount Support / ORM Versioning
---------------------------------
The SQL Server drivers have very limited ability to return the number
of rows updated from an UPDATE or DELETE statement. In particular, the
pymssql driver has no support, whereas the pyodbc driver can only return
this value under certain conditions.
In particular, updated rowcount is not available when OUTPUT INSERTED
is used. This impacts the SQLAlchemy ORM's versioning feature when
server-side versioning schemes are used. When
using pyodbc, the "implicit_returning" flag needs to be set to false
for any ORM mapped class that uses a version_id column in conjunction with
a server-side version generator::
class MyTable(Base):
__tablename__ = 'mytable'
id = Column(Integer, primary_key=True)
stuff = Column(String(10))
timestamp = Column(TIMESTAMP(), default=text('DEFAULT'))
__mapper_args__ = {
'version_id_col': timestamp,
'version_id_generator': False,
}
__table_args__ = {
'implicit_returning': False
}
Without the implicit_returning flag above, the UPDATE statement will
use ``OUTPUT inserted.timestamp`` and the rowcount will be returned as
-1, causing the versioning logic to fail.
Enabling Snapshot Isolation
---------------------------
Not necessarily specific to SQLAlchemy, SQL Server has a default transaction
isolation mode that locks entire tables, and causes even mildly concurrent
applications to have long held locks and frequent deadlocks.
Enabling snapshot isolation for the database as a whole is recommended
for modern levels of concurrency support. This is accomplished via the
following ALTER DATABASE commands executed at the SQL prompt::
ALTER DATABASE MyDatabase SET ALLOW_SNAPSHOT_ISOLATION ON
ALTER DATABASE MyDatabase SET READ_COMMITTED_SNAPSHOT ON
Background on SQL Server snapshot isolation is available at
http://msdn.microsoft.com/en-us/library/ms175095.aspx.
Known Issues
------------
* No support for more than one ``IDENTITY`` column per table
* reflection of indexes does not work with versions older than
SQL Server 2005
"""
import datetime
import operator
import re
from ... import sql, schema as sa_schema, exc, util
from ...sql import compiler, expression, util as sql_util
from ... import engine
from ...engine import reflection, default
from ... import types as sqltypes
from ...types import INTEGER, BIGINT, SMALLINT, DECIMAL, NUMERIC, \
FLOAT, TIMESTAMP, DATETIME, DATE, BINARY,\
TEXT, VARCHAR, NVARCHAR, CHAR, NCHAR
from ...util import update_wrapper
from . import information_schema as ischema
# http://sqlserverbuilds.blogspot.com/
MS_2016_VERSION = (13,)
MS_2014_VERSION = (12,)
MS_2012_VERSION = (11,)
MS_2008_VERSION = (10,)
MS_2005_VERSION = (9,)
MS_2000_VERSION = (8,)
RESERVED_WORDS = set(
['add', 'all', 'alter', 'and', 'any', 'as', 'asc', 'authorization',
'backup', 'begin', 'between', 'break', 'browse', 'bulk', 'by', 'cascade',
'case', 'check', 'checkpoint', 'close', 'clustered', 'coalesce',
'collate', 'column', 'commit', 'compute', 'constraint', 'contains',
'containstable', 'continue', 'convert', 'create', 'cross', 'current',
'current_date', 'current_time', 'current_timestamp', 'current_user',
'cursor', 'database', 'dbcc', 'deallocate', 'declare', 'default',
'delete', 'deny', 'desc', 'disk', 'distinct', 'distributed', 'double',
'drop', 'dump', 'else', 'end', 'errlvl', 'escape', 'except', 'exec',
'execute', 'exists', 'exit', 'external', 'fetch', 'file', 'fillfactor',
'for', 'foreign', 'freetext', 'freetexttable', 'from', 'full',
'function', 'goto', 'grant', 'group', 'having', 'holdlock', 'identity',
'identity_insert', 'identitycol', 'if', 'in', 'index', 'inner', 'insert',
'intersect', 'into', 'is', 'join', 'key', 'kill', 'left', 'like',
'lineno', 'load', 'merge', 'national', 'nocheck', 'nonclustered', 'not',
'null', 'nullif', 'of', 'off', 'offsets', 'on', 'open', 'opendatasource',
'openquery', 'openrowset', 'openxml', 'option', 'or', 'order', 'outer',
'over', 'percent', 'pivot', 'plan', 'precision', 'primary', 'print',
'proc', 'procedure', 'public', 'raiserror', 'read', 'readtext',
'reconfigure', 'references', 'replication', 'restore', 'restrict',
'return', 'revert', 'revoke', 'right', 'rollback', 'rowcount',
'rowguidcol', 'rule', 'save', 'schema', 'securityaudit', 'select',
'session_user', 'set', 'setuser', 'shutdown', 'some', 'statistics',
'system_user', 'table', 'tablesample', 'textsize', 'then', 'to', 'top',
'tran', 'transaction', 'trigger', 'truncate', 'tsequal', 'union',
'unique', 'unpivot', 'update', 'updatetext', 'use', 'user', 'values',
'varying', 'view', 'waitfor', 'when', 'where', 'while', 'with',
'writetext',
])
class REAL(sqltypes.REAL):
__visit_name__ = 'REAL'
def __init__(self, **kw):
# REAL is a synonym for FLOAT(24) on SQL server
kw['precision'] = 24
super(REAL, self).__init__(**kw)
class TINYINT(sqltypes.Integer):
__visit_name__ = 'TINYINT'
# MSSQL DATE/TIME types have varied behavior, sometimes returning
# strings. MSDate/TIME check for everything, and always
# filter bind parameters into datetime objects (required by pyodbc,
# not sure about other dialects).
class _MSDate(sqltypes.Date):
def bind_processor(self, dialect):
def process(value):
if type(value) == datetime.date:
return datetime.datetime(value.year, value.month, value.day)
else:
return value
return process
_reg = re.compile(r"(\d+)-(\d+)-(\d+)")
def result_processor(self, dialect, coltype):
def process(value):
if isinstance(value, datetime.datetime):
return value.date()
elif isinstance(value, util.string_types):
m = self._reg.match(value)
if not m:
raise ValueError(
"could not parse %r as a date value" % (value, ))
return datetime.date(*[
int(x or 0)
for x in m.groups()
])
else:
return value
return process
class TIME(sqltypes.TIME):
def __init__(self, precision=None, **kwargs):
self.precision = precision
super(TIME, self).__init__()
__zero_date = datetime.date(1900, 1, 1)
def bind_processor(self, dialect):
def process(value):
if isinstance(value, datetime.datetime):
value = datetime.datetime.combine(
self.__zero_date, value.time())
elif isinstance(value, datetime.time):
value = datetime.datetime.combine(self.__zero_date, value)
return value
return process
_reg = re.compile(r"(\d+):(\d+):(\d+)(?:\.(\d{0,6}))?")
def result_processor(self, dialect, coltype):
def process(value):
if isinstance(value, datetime.datetime):
return value.time()
elif isinstance(value, util.string_types):
m = self._reg.match(value)
if not m:
raise ValueError(
"could not parse %r as a time value" % (value, ))
return datetime.time(*[
int(x or 0)
for x in m.groups()])
else:
return value
return process
_MSTime = TIME
class _DateTimeBase(object):
def bind_processor(self, dialect):
def process(value):
if type(value) == datetime.date:
return datetime.datetime(value.year, value.month, value.day)
else:
return value
return process
class _MSDateTime(_DateTimeBase, sqltypes.DateTime):
pass
class SMALLDATETIME(_DateTimeBase, sqltypes.DateTime):
__visit_name__ = 'SMALLDATETIME'
class DATETIME2(_DateTimeBase, sqltypes.DateTime):
__visit_name__ = 'DATETIME2'
def __init__(self, precision=None, **kw):
super(DATETIME2, self).__init__(**kw)
self.precision = precision
# TODO: is this not an Interval ?
class DATETIMEOFFSET(sqltypes.TypeEngine):
__visit_name__ = 'DATETIMEOFFSET'
def __init__(self, precision=None, **kwargs):
self.precision = precision
class _StringType(object):
"""Base for MSSQL string types."""
def __init__(self, collation=None):
super(_StringType, self).__init__(collation=collation)
class NTEXT(sqltypes.UnicodeText):
"""MSSQL NTEXT type, for variable-length unicode text up to 2^30
characters."""
__visit_name__ = 'NTEXT'
class VARBINARY(sqltypes.VARBINARY, sqltypes.LargeBinary):
"""The MSSQL VARBINARY type.
This type extends both :class:`.types.VARBINARY` and
:class:`.types.LargeBinary`. In "deprecate_large_types" mode,
the :class:`.types.LargeBinary` type will produce ``VARBINARY(max)``
on SQL Server.
.. versionadded:: 1.0.0
.. seealso::
:ref:`mssql_large_type_deprecation`
"""
__visit_name__ = 'VARBINARY'
class IMAGE(sqltypes.LargeBinary):
__visit_name__ = 'IMAGE'
class BIT(sqltypes.TypeEngine):
__visit_name__ = 'BIT'
class MONEY(sqltypes.TypeEngine):
__visit_name__ = 'MONEY'
class SMALLMONEY(sqltypes.TypeEngine):
__visit_name__ = 'SMALLMONEY'
class UNIQUEIDENTIFIER(sqltypes.TypeEngine):
__visit_name__ = "UNIQUEIDENTIFIER"
class SQL_VARIANT(sqltypes.TypeEngine):
__visit_name__ = 'SQL_VARIANT'
# old names.
MSDateTime = _MSDateTime
MSDate = _MSDate
MSReal = REAL
MSTinyInteger = TINYINT
MSTime = TIME
MSSmallDateTime = SMALLDATETIME
MSDateTime2 = DATETIME2
MSDateTimeOffset = DATETIMEOFFSET
MSText = TEXT
MSNText = NTEXT
MSString = VARCHAR
MSNVarchar = NVARCHAR
MSChar = CHAR
MSNChar = NCHAR
MSBinary = BINARY
MSVarBinary = VARBINARY
MSImage = IMAGE
MSBit = BIT
MSMoney = MONEY
MSSmallMoney = SMALLMONEY
MSUniqueIdentifier = UNIQUEIDENTIFIER
MSVariant = SQL_VARIANT
ischema_names = {
'int': INTEGER,
'bigint': BIGINT,
'smallint': SMALLINT,
'tinyint': TINYINT,
'varchar': VARCHAR,
'nvarchar': NVARCHAR,
'char': CHAR,
'nchar': NCHAR,
'text': TEXT,
'ntext': NTEXT,
'decimal': DECIMAL,
'numeric': NUMERIC,
'float': FLOAT,
'datetime': DATETIME,
'datetime2': DATETIME2,
'datetimeoffset': DATETIMEOFFSET,
'date': DATE,
'time': TIME,
'smalldatetime': SMALLDATETIME,
'binary': BINARY,
'varbinary': VARBINARY,
'bit': BIT,
'real': REAL,
'image': IMAGE,
'timestamp': TIMESTAMP,
'money': MONEY,
'smallmoney': SMALLMONEY,
'uniqueidentifier': UNIQUEIDENTIFIER,
'sql_variant': SQL_VARIANT,
}
class MSTypeCompiler(compiler.GenericTypeCompiler):
def _extend(self, spec, type_, length=None):
"""Extend a string-type declaration with standard SQL
COLLATE annotations.
"""
if getattr(type_, 'collation', None):
collation = 'COLLATE %s' % type_.collation
else:
collation = None
if not length:
length = type_.length
if length:
spec = spec + "(%s)" % length
return ' '.join([c for c in (spec, collation)
if c is not None])
def visit_FLOAT(self, type_, **kw):
precision = getattr(type_, 'precision', None)
if precision is None:
return "FLOAT"
else:
return "FLOAT(%(precision)s)" % {'precision': precision}
def visit_TINYINT(self, type_, **kw):
return "TINYINT"
def visit_DATETIMEOFFSET(self, type_, **kw):
if type_.precision is not None:
return "DATETIMEOFFSET(%s)" % type_.precision
else:
return "DATETIMEOFFSET"
def visit_TIME(self, type_, **kw):
precision = getattr(type_, 'precision', None)
if precision is not None:
return "TIME(%s)" % precision
else:
return "TIME"
def visit_DATETIME2(self, type_, **kw):
precision = getattr(type_, 'precision', None)
if precision is not None:
return "DATETIME2(%s)" % precision
else:
return "DATETIME2"
def visit_SMALLDATETIME(self, type_, **kw):
return "SMALLDATETIME"
def visit_unicode(self, type_, **kw):
return self.visit_NVARCHAR(type_, **kw)
def visit_text(self, type_, **kw):
if self.dialect.deprecate_large_types:
return self.visit_VARCHAR(type_, **kw)
else:
return self.visit_TEXT(type_, **kw)
def visit_unicode_text(self, type_, **kw):
if self.dialect.deprecate_large_types:
return self.visit_NVARCHAR(type_, **kw)
else:
return self.visit_NTEXT(type_, **kw)
def visit_NTEXT(self, type_, **kw):
return self._extend("NTEXT", type_)
def visit_TEXT(self, type_, **kw):
return self._extend("TEXT", type_)
def visit_VARCHAR(self, type_, **kw):
return self._extend("VARCHAR", type_, length=type_.length or 'max')
def visit_CHAR(self, type_, **kw):
return self._extend("CHAR", type_)
def visit_NCHAR(self, type_, **kw):
return self._extend("NCHAR", type_)
def visit_NVARCHAR(self, type_, **kw):
return self._extend("NVARCHAR", type_, length=type_.length or 'max')
def visit_date(self, type_, **kw):
if self.dialect.server_version_info < MS_2008_VERSION:
return self.visit_DATETIME(type_, **kw)
else:
return self.visit_DATE(type_, **kw)
def visit_time(self, type_, **kw):
if self.dialect.server_version_info < MS_2008_VERSION:
return self.visit_DATETIME(type_, **kw)
else:
return self.visit_TIME(type_, **kw)
def visit_large_binary(self, type_, **kw):
if self.dialect.deprecate_large_types:
return self.visit_VARBINARY(type_, **kw)
else:
return self.visit_IMAGE(type_, **kw)
def visit_IMAGE(self, type_, **kw):
return "IMAGE"
def visit_VARBINARY(self, type_, **kw):
return self._extend(
"VARBINARY",
type_,
length=type_.length or 'max')
def visit_boolean(self, type_, **kw):
return self.visit_BIT(type_)
def visit_BIT(self, type_, **kw):
return "BIT"
def visit_MONEY(self, type_, **kw):
return "MONEY"
def visit_SMALLMONEY(self, type_, **kw):
return 'SMALLMONEY'
def visit_UNIQUEIDENTIFIER(self, type_, **kw):
return "UNIQUEIDENTIFIER"
def visit_SQL_VARIANT(self, type_, **kw):
return 'SQL_VARIANT'
class MSExecutionContext(default.DefaultExecutionContext):
_enable_identity_insert = False
_select_lastrowid = False
_result_proxy = None
_lastrowid = None
def _opt_encode(self, statement):
if not self.dialect.supports_unicode_statements:
return self.dialect._encoder(statement)[0]
else:
return statement
def pre_exec(self):
"""Activate IDENTITY_INSERT if needed."""
if self.isinsert:
tbl = self.compiled.statement.table
seq_column = tbl._autoincrement_column
insert_has_sequence = seq_column is not None
if insert_has_sequence:
self._enable_identity_insert = \
seq_column.key in self.compiled_parameters[0] or \
(
self.compiled.statement.parameters and (
(
self.compiled.statement._has_multi_parameters
and
seq_column.key in
self.compiled.statement.parameters[0]
) or (
not
self.compiled.statement._has_multi_parameters
and
seq_column.key in
self.compiled.statement.parameters
)
)
)
else:
self._enable_identity_insert = False
self._select_lastrowid = not self.compiled.inline and \
insert_has_sequence and \
not self.compiled.returning and \
not self._enable_identity_insert and \
not self.executemany
if self._enable_identity_insert:
self.root_connection._cursor_execute(
self.cursor,
self._opt_encode(
"SET IDENTITY_INSERT %s ON" %
self.dialect.identifier_preparer.format_table(tbl)),
(),
self)
def post_exec(self):
"""Disable IDENTITY_INSERT if enabled."""
conn = self.root_connection
if self._select_lastrowid:
if self.dialect.use_scope_identity:
conn._cursor_execute(
self.cursor,
"SELECT scope_identity() AS lastrowid", (), self)
else:
conn._cursor_execute(self.cursor,
"SELECT @@identity AS lastrowid",
(),
self)
# fetchall() ensures the cursor is consumed without closing it
row = self.cursor.fetchall()[0]
self._lastrowid = int(row[0])
if (self.isinsert or self.isupdate or self.isdelete) and \
self.compiled.returning:
self._result_proxy = engine.FullyBufferedResultProxy(self)
if self._enable_identity_insert:
conn._cursor_execute(
self.cursor,
self._opt_encode(
"SET IDENTITY_INSERT %s OFF" %
self.dialect.identifier_preparer. format_table(
self.compiled.statement.table)),
(),
self)
def get_lastrowid(self):
return self._lastrowid
def handle_dbapi_exception(self, e):
if self._enable_identity_insert:
try:
self.cursor.execute(
self._opt_encode(
"SET IDENTITY_INSERT %s OFF" %
self.dialect.identifier_preparer. format_table(
self.compiled.statement.table)))
except Exception:
pass
def get_result_proxy(self):
if self._result_proxy:
return self._result_proxy
else:
return engine.ResultProxy(self)
class MSSQLCompiler(compiler.SQLCompiler):
returning_precedes_values = True
extract_map = util.update_copy(
compiler.SQLCompiler.extract_map,
{
'doy': 'dayofyear',
'dow': 'weekday',
'milliseconds': 'millisecond',
'microseconds': 'microsecond'
})
def __init__(self, *args, **kwargs):
self.tablealiases = {}
super(MSSQLCompiler, self).__init__(*args, **kwargs)
def _with_legacy_schema_aliasing(fn):
def decorate(self, *arg, **kw):
if self.dialect.legacy_schema_aliasing:
return fn(self, *arg, **kw)
else:
super_ = getattr(super(MSSQLCompiler, self), fn.__name__)
return super_(*arg, **kw)
return decorate
def visit_now_func(self, fn, **kw):
return "CURRENT_TIMESTAMP"
def visit_current_date_func(self, fn, **kw):
return "GETDATE()"
def visit_length_func(self, fn, **kw):
return "LEN%s" % self.function_argspec(fn, **kw)
def visit_char_length_func(self, fn, **kw):
return "LEN%s" % self.function_argspec(fn, **kw)
def visit_concat_op_binary(self, binary, operator, **kw):
return "%s + %s" % \
(self.process(binary.left, **kw),
self.process(binary.right, **kw))
def visit_true(self, expr, **kw):
return '1'
def visit_false(self, expr, **kw):
return '0'
def visit_match_op_binary(self, binary, operator, **kw):
return "CONTAINS (%s, %s)" % (
self.process(binary.left, **kw),
self.process(binary.right, **kw))
def get_select_precolumns(self, select, **kw):
""" MS-SQL puts TOP, it's version of LIMIT here """
s = ""
if select._distinct:
s += "DISTINCT "
if select._simple_int_limit and not select._offset:
# ODBC drivers and possibly others
# don't support bind params in the SELECT clause on SQL Server.
# so have to use literal here.
s += "TOP %d " % select._limit
if s:
return s
else:
return compiler.SQLCompiler.get_select_precolumns(
self, select, **kw)
def get_from_hint_text(self, table, text):
return text
def get_crud_hint_text(self, table, text):
return text
def limit_clause(self, select, **kw):
# Limit in mssql is after the select keyword
return ""
def visit_select(self, select, **kwargs):
"""Look for ``LIMIT`` and OFFSET in a select statement, and if
so tries to wrap it in a subquery with ``row_number()`` criterion.
"""
if (
(
not select._simple_int_limit and
select._limit_clause is not None
) or (
select._offset_clause is not None and
not select._simple_int_offset or select._offset
)
) and not getattr(select, '_mssql_visit', None):
# to use ROW_NUMBER(), an ORDER BY is required.
if not select._order_by_clause.clauses:
raise exc.CompileError('MSSQL requires an order_by when '
'using an OFFSET or a non-simple '
'LIMIT clause')
_order_by_clauses = [
sql_util.unwrap_label_reference(elem)
for elem in select._order_by_clause.clauses
]
limit_clause = select._limit_clause
offset_clause = select._offset_clause
kwargs['select_wraps_for'] = select
select = select._generate()
select._mssql_visit = True
select = select.column(
sql.func.ROW_NUMBER().over(order_by=_order_by_clauses)
.label("mssql_rn")).order_by(None).alias()
mssql_rn = sql.column('mssql_rn')
limitselect = sql.select([c for c in select.c if
c.key != 'mssql_rn'])
if offset_clause is not None:
limitselect.append_whereclause(mssql_rn > offset_clause)
if limit_clause is not None:
limitselect.append_whereclause(
mssql_rn <= (limit_clause + offset_clause))
else:
limitselect.append_whereclause(
mssql_rn <= (limit_clause))
return self.process(limitselect, **kwargs)
else:
return compiler.SQLCompiler.visit_select(self, select, **kwargs)
@_with_legacy_schema_aliasing
def visit_table(self, table, mssql_aliased=False, iscrud=False, **kwargs):
if mssql_aliased is table or iscrud:
return super(MSSQLCompiler, self).visit_table(table, **kwargs)
# alias schema-qualified tables
alias = self._schema_aliased_table(table)
if alias is not None:
return self.process(alias, mssql_aliased=table, **kwargs)
else:
return super(MSSQLCompiler, self).visit_table(table, **kwargs)
@_with_legacy_schema_aliasing
def visit_alias(self, alias, **kw):
# translate for schema-qualified table aliases
kw['mssql_aliased'] = alias.original
return super(MSSQLCompiler, self).visit_alias(alias, **kw)
@_with_legacy_schema_aliasing
def visit_column(self, column, add_to_result_map=None, **kw):
if column.table is not None and \
(not self.isupdate and not self.isdelete) or \
self.is_subquery():
# translate for schema-qualified table aliases
t = self._schema_aliased_table(column.table)
if t is not None:
converted = expression._corresponding_column_or_error(
t, column)
if add_to_result_map is not None:
add_to_result_map(
column.name,
column.name,
(column, column.name, column.key),
column.type
)
return super(MSSQLCompiler, self).\
visit_column(converted, **kw)
return super(MSSQLCompiler, self).visit_column(
column, add_to_result_map=add_to_result_map, **kw)
def _schema_aliased_table(self, table):
if getattr(table, 'schema', None) is not None:
if table not in self.tablealiases:
self.tablealiases[table] = table.alias()
return self.tablealiases[table]
else:
return None
def visit_extract(self, extract, **kw):
field = self.extract_map.get(extract.field, extract.field)
return 'DATEPART(%s, %s)' % \
(field, self.process(extract.expr, **kw))
def visit_savepoint(self, savepoint_stmt):
return "SAVE TRANSACTION %s" % \
self.preparer.format_savepoint(savepoint_stmt)
def visit_rollback_to_savepoint(self, savepoint_stmt):
return ("ROLLBACK TRANSACTION %s"
% self.preparer.format_savepoint(savepoint_stmt))
def visit_binary(self, binary, **kwargs):
"""Move bind parameters to the right-hand side of an operator, where
possible.
"""
if (
isinstance(binary.left, expression.BindParameter)
and binary.operator == operator.eq
and not isinstance(binary.right, expression.BindParameter)
):
return self.process(
expression.BinaryExpression(binary.right,
binary.left,
binary.operator),
**kwargs)
return super(MSSQLCompiler, self).visit_binary(binary, **kwargs)
def returning_clause(self, stmt, returning_cols):
if self.isinsert or self.isupdate:
target = stmt.table.alias("inserted")
else:
target = stmt.table.alias("deleted")
adapter = sql_util.ClauseAdapter(target)
columns = [
self._label_select_column(None, adapter.traverse(c),
True, False, {})
for c in expression._select_iterables(returning_cols)
]
return 'OUTPUT ' + ', '.join(columns)
def get_cte_preamble(self, recursive):
# SQL Server finds it too inconvenient to accept
# an entirely optional, SQL standard specified,
# "RECURSIVE" word with their "WITH",
# so here we go
return "WITH"
def label_select_column(self, select, column, asfrom):
if isinstance(column, expression.Function):
return column.label(None)
else:
return super(MSSQLCompiler, self).\
label_select_column(select, column, asfrom)
def for_update_clause(self, select):
# "FOR UPDATE" is only allowed on "DECLARE CURSOR" which
# SQLAlchemy doesn't use
return ''
def order_by_clause(self, select, **kw):
order_by = self.process(select._order_by_clause, **kw)
# MSSQL only allows ORDER BY in subqueries if there is a LIMIT
if order_by and (not self.is_subquery() or select._limit):
return " ORDER BY " + order_by
else:
return ""
def update_from_clause(self, update_stmt,
from_table, extra_froms,
from_hints,
**kw):
"""Render the UPDATE..FROM clause specific to MSSQL.
In MSSQL, if the UPDATE statement involves an alias of the table to
be updated, then the table itself must be added to the FROM list as
well. Otherwise, it is optional. Here, we add it regardless.
"""
return "FROM " + ', '.join(
t._compiler_dispatch(self, asfrom=True,
fromhints=from_hints, **kw)
for t in [from_table] + extra_froms)
class MSSQLStrictCompiler(MSSQLCompiler):
"""A subclass of MSSQLCompiler which disables the usage of bind
parameters where not allowed natively by MS-SQL.
A dialect may use this compiler on a platform where native
binds are used.
"""
ansi_bind_rules = True
def visit_in_op_binary(self, binary, operator, **kw):
kw['literal_binds'] = True
return "%s IN %s" % (
self.process(binary.left, **kw),
self.process(binary.right, **kw)
)
def visit_notin_op_binary(self, binary, operator, **kw):
kw['literal_binds'] = True
return "%s NOT IN %s" % (
self.process(binary.left, **kw),
self.process(binary.right, **kw)
)
def render_literal_value(self, value, type_):
"""
For date and datetime values, convert to a string
format acceptable to MSSQL. That seems to be the
so-called ODBC canonical date format which looks
like this:
yyyy-mm-dd hh:mi:ss.mmm(24h)
For other data types, call the base class implementation.
"""
# datetime and date are both subclasses of datetime.date
if issubclass(type(value), datetime.date):
# SQL Server wants single quotes around the date string.
return "'" + str(value) + "'"
else:
return super(MSSQLStrictCompiler, self).\
render_literal_value(value, type_)
class MSDDLCompiler(compiler.DDLCompiler):
def get_column_specification(self, column, **kwargs):
colspec = (
self.preparer.format_column(column) + " "
+ self.dialect.type_compiler.process(
column.type, type_expression=column)
)
if column.nullable is not None:
if not column.nullable or column.primary_key or \
isinstance(column.default, sa_schema.Sequence):
colspec += " NOT NULL"
else:
colspec += " NULL"
if column.table is None:
raise exc.CompileError(
"mssql requires Table-bound columns "
"in order to generate DDL")
# install an IDENTITY Sequence if we either a sequence or an implicit
# IDENTITY column
if isinstance(column.default, sa_schema.Sequence):
if column.default.start == 0:
start = 0
else:
start = column.default.start or 1
colspec += " IDENTITY(%s,%s)" % (start,
column.default.increment or 1)
elif column is column.table._autoincrement_column:
colspec += " IDENTITY(1,1)"
else:
default = self.get_column_default_string(column)
if default is not None:
colspec += " DEFAULT " + default
return colspec
def visit_create_index(self, create, include_schema=False):
index = create.element
self._verify_index_table(index)
preparer = self.preparer
text = "CREATE "
if index.unique:
text += "UNIQUE "
# handle clustering option
clustered = index.dialect_options['mssql']['clustered']
if clustered is not None:
if clustered:
text += "CLUSTERED "
else:
text += "NONCLUSTERED "
text += "INDEX %s ON %s (%s)" \
% (
self._prepared_index_name(index,
include_schema=include_schema),
preparer.format_table(index.table),
', '.join(
self.sql_compiler.process(expr,
include_table=False,
literal_binds=True) for
expr in index.expressions)
)
# handle other included columns
if index.dialect_options['mssql']['include']:
inclusions = [index.table.c[col]
if isinstance(col, util.string_types) else col
for col in
index.dialect_options['mssql']['include']
]
text += " INCLUDE (%s)" \
% ', '.join([preparer.quote(c.name)
for c in inclusions])
return text
def visit_drop_index(self, drop):
return "\nDROP INDEX %s ON %s" % (
self._prepared_index_name(drop.element, include_schema=False),
self.preparer.format_table(drop.element.table)
)
def visit_primary_key_constraint(self, constraint):
if len(constraint) == 0:
return ''
text = ""
if constraint.name is not None:
text += "CONSTRAINT %s " % \
self.preparer.format_constraint(constraint)
text += "PRIMARY KEY "
clustered = constraint.dialect_options['mssql']['clustered']
if clustered is not None:
if clustered:
text += "CLUSTERED "
else:
text += "NONCLUSTERED "
text += "(%s)" % ', '.join(self.preparer.quote(c.name)
for c in constraint)
text += self.define_constraint_deferrability(constraint)
return text
def visit_unique_constraint(self, constraint):
if len(constraint) == 0:
return ''
text = ""
if constraint.name is not None:
text += "CONSTRAINT %s " % \
self.preparer.format_constraint(constraint)
text += "UNIQUE "
clustered = constraint.dialect_options['mssql']['clustered']
if clustered is not None:
if clustered:
text += "CLUSTERED "
else:
text += "NONCLUSTERED "
text += "(%s)" % ', '.join(self.preparer.quote(c.name)
for c in constraint)
text += self.define_constraint_deferrability(constraint)
return text
class MSIdentifierPreparer(compiler.IdentifierPreparer):
reserved_words = RESERVED_WORDS
def __init__(self, dialect):
super(MSIdentifierPreparer, self).__init__(dialect, initial_quote='[',
final_quote=']')
def _escape_identifier(self, value):
return value
def quote_schema(self, schema, force=None):
"""Prepare a quoted table and schema name."""
result = '.'.join([self.quote(x, force) for x in schema.split('.')])
return result
def _db_plus_owner_listing(fn):
def wrap(dialect, connection, schema=None, **kw):
dbname, owner = _owner_plus_db(dialect, schema)
return _switch_db(dbname, connection, fn, dialect, connection,
dbname, owner, schema, **kw)
return update_wrapper(wrap, fn)
def _db_plus_owner(fn):
def wrap(dialect, connection, tablename, schema=None, **kw):
dbname, owner = _owner_plus_db(dialect, schema)
return _switch_db(dbname, connection, fn, dialect, connection,
tablename, dbname, owner, schema, **kw)
return update_wrapper(wrap, fn)
def _switch_db(dbname, connection, fn, *arg, **kw):
if dbname:
current_db = connection.scalar("select db_name()")
connection.execute("use %s" % dbname)
try:
return fn(*arg, **kw)
finally:
if dbname:
connection.execute("use %s" % current_db)
def _owner_plus_db(dialect, schema):
if not schema:
return None, dialect.default_schema_name
elif "." in schema:
return schema.split(".", 1)
else:
return None, schema
class MSDialect(default.DefaultDialect):
name = 'mssql'
supports_default_values = True
supports_empty_insert = False
execution_ctx_cls = MSExecutionContext
use_scope_identity = True
max_identifier_length = 128
schema_name = "dbo"
colspecs = {
sqltypes.DateTime: _MSDateTime,
sqltypes.Date: _MSDate,
sqltypes.Time: TIME,
}
engine_config_types = default.DefaultDialect.engine_config_types.union([
('legacy_schema_aliasing', util.asbool),
])
ischema_names = ischema_names
supports_native_boolean = False
supports_unicode_binds = True
postfetch_lastrowid = True
server_version_info = ()
statement_compiler = MSSQLCompiler
ddl_compiler = MSDDLCompiler
type_compiler = MSTypeCompiler
preparer = MSIdentifierPreparer
construct_arguments = [
(sa_schema.PrimaryKeyConstraint, {
"clustered": None
}),
(sa_schema.UniqueConstraint, {
"clustered": None
}),
(sa_schema.Index, {
"clustered": None,
"include": None
})
]
def __init__(self,
query_timeout=None,
use_scope_identity=True,
max_identifier_length=None,
schema_name="dbo",
isolation_level=None,
deprecate_large_types=None,
legacy_schema_aliasing=False, **opts):
self.query_timeout = int(query_timeout or 0)
self.schema_name = schema_name
self.use_scope_identity = use_scope_identity
self.max_identifier_length = int(max_identifier_length or 0) or \
self.max_identifier_length
self.deprecate_large_types = deprecate_large_types
self.legacy_schema_aliasing = legacy_schema_aliasing
super(MSDialect, self).__init__(**opts)
self.isolation_level = isolation_level
def do_savepoint(self, connection, name):
# give the DBAPI a push
connection.execute("IF @@TRANCOUNT = 0 BEGIN TRANSACTION")
super(MSDialect, self).do_savepoint(connection, name)
def do_release_savepoint(self, connection, name):
# SQL Server does not support RELEASE SAVEPOINT
pass
_isolation_lookup = set(['SERIALIZABLE', 'READ UNCOMMITTED',
'READ COMMITTED', 'REPEATABLE READ',
'SNAPSHOT'])
def set_isolation_level(self, connection, level):
level = level.replace('_', ' ')
if level not in self._isolation_lookup:
raise exc.ArgumentError(
"Invalid value '%s' for isolation_level. "
"Valid isolation levels for %s are %s" %
(level, self.name, ", ".join(self._isolation_lookup))
)
cursor = connection.cursor()
cursor.execute(
"SET TRANSACTION ISOLATION LEVEL %s" % level)
cursor.close()
def get_isolation_level(self, connection):
if self.server_version_info < MS_2005_VERSION:
raise NotImplementedError(
"Can't fetch isolation level prior to SQL Server 2005")
cursor = connection.cursor()
cursor.execute("""
SELECT CASE transaction_isolation_level
WHEN 0 THEN NULL
WHEN 1 THEN 'READ UNCOMMITTED'
WHEN 2 THEN 'READ COMMITTED'
WHEN 3 THEN 'REPEATABLE READ'
WHEN 4 THEN 'SERIALIZABLE'
WHEN 5 THEN 'SNAPSHOT' END AS TRANSACTION_ISOLATION_LEVEL
FROM sys.dm_exec_sessions
where session_id = @@SPID
""")
val = cursor.fetchone()[0]
cursor.close()
return val.upper()
def initialize(self, connection):
super(MSDialect, self).initialize(connection)
self._setup_version_attributes()
def on_connect(self):
if self.isolation_level is not None:
def connect(conn):
self.set_isolation_level(conn, self.isolation_level)
return connect
else:
return None
def _setup_version_attributes(self):
if self.server_version_info[0] not in list(range(8, 17)):
util.warn(
"Unrecognized server version info '%s'. Some SQL Server "
"features may not function properly." %
".".join(str(x) for x in self.server_version_info))
if self.server_version_info >= MS_2005_VERSION and \
'implicit_returning' not in self.__dict__:
self.implicit_returning = True
if self.server_version_info >= MS_2008_VERSION:
self.supports_multivalues_insert = True
if self.deprecate_large_types is None:
self.deprecate_large_types = \
self.server_version_info >= MS_2012_VERSION
def _get_default_schema_name(self, connection):
if self.server_version_info < MS_2005_VERSION:
return self.schema_name
else:
query = sql.text("SELECT schema_name()")
default_schema_name = connection.scalar(query)
if default_schema_name is not None:
return util.text_type(default_schema_name)
else:
return self.schema_name
@_db_plus_owner
def has_table(self, connection, tablename, dbname, owner, schema):
columns = ischema.columns
whereclause = columns.c.table_name == tablename
if owner:
whereclause = sql.and_(whereclause,
columns.c.table_schema == owner)
s = sql.select([columns], whereclause)
c = connection.execute(s)
return c.first() is not None
@reflection.cache
def get_schema_names(self, connection, **kw):
s = sql.select([ischema.schemata.c.schema_name],
order_by=[ischema.schemata.c.schema_name]
)
schema_names = [r[0] for r in connection.execute(s)]
return schema_names
@reflection.cache
@_db_plus_owner_listing
def get_table_names(self, connection, dbname, owner, schema, **kw):
tables = ischema.tables
s = sql.select([tables.c.table_name],
sql.and_(
tables.c.table_schema == owner,
tables.c.table_type == 'BASE TABLE'
),
order_by=[tables.c.table_name]
)
table_names = [r[0] for r in connection.execute(s)]
return table_names
@reflection.cache
@_db_plus_owner_listing
def get_view_names(self, connection, dbname, owner, schema, **kw):
tables = ischema.tables
s = sql.select([tables.c.table_name],
sql.and_(
tables.c.table_schema == owner,
tables.c.table_type == 'VIEW'
),
order_by=[tables.c.table_name]
)
view_names = [r[0] for r in connection.execute(s)]
return view_names
@reflection.cache
@_db_plus_owner
def get_indexes(self, connection, tablename, dbname, owner, schema, **kw):
# using system catalogs, don't support index reflection
# below MS 2005
if self.server_version_info < MS_2005_VERSION:
return []
rp = connection.execute(
sql.text("select ind.index_id, ind.is_unique, ind.name "
"from sys.indexes as ind join sys.tables as tab on "
"ind.object_id=tab.object_id "
"join sys.schemas as sch on sch.schema_id=tab.schema_id "
"where tab.name = :tabname "
"and sch.name=:schname "
"and ind.is_primary_key=0",
bindparams=[
sql.bindparam('tabname', tablename,
sqltypes.String(convert_unicode=True)),
sql.bindparam('schname', owner,
sqltypes.String(convert_unicode=True))
],
typemap={
'name': sqltypes.Unicode()
}
)
)
indexes = {}
for row in rp:
indexes[row['index_id']] = {
'name': row['name'],
'unique': row['is_unique'] == 1,
'column_names': []
}
rp = connection.execute(
sql.text(
"select ind_col.index_id, ind_col.object_id, col.name "
"from sys.columns as col "
"join sys.tables as tab on tab.object_id=col.object_id "
"join sys.index_columns as ind_col on "
"(ind_col.column_id=col.column_id and "
"ind_col.object_id=tab.object_id) "
"join sys.schemas as sch on sch.schema_id=tab.schema_id "
"where tab.name=:tabname "
"and sch.name=:schname",
bindparams=[
sql.bindparam('tabname', tablename,
sqltypes.String(convert_unicode=True)),
sql.bindparam('schname', owner,
sqltypes.String(convert_unicode=True))
],
typemap={'name': sqltypes.Unicode()}
),
)
for row in rp:
if row['index_id'] in indexes:
indexes[row['index_id']]['column_names'].append(row['name'])
return list(indexes.values())
@reflection.cache
@_db_plus_owner
def get_view_definition(self, connection, viewname,
dbname, owner, schema, **kw):
rp = connection.execute(
sql.text(
"select definition from sys.sql_modules as mod, "
"sys.views as views, "
"sys.schemas as sch"
" where "
"mod.object_id=views.object_id and "
"views.schema_id=sch.schema_id and "
"views.name=:viewname and sch.name=:schname",
bindparams=[
sql.bindparam('viewname', viewname,
sqltypes.String(convert_unicode=True)),
sql.bindparam('schname', owner,
sqltypes.String(convert_unicode=True))
]
)
)
if rp:
view_def = rp.scalar()
return view_def
@reflection.cache
@_db_plus_owner
def get_columns(self, connection, tablename, dbname, owner, schema, **kw):
# Get base columns
columns = ischema.columns
if owner:
whereclause = sql.and_(columns.c.table_name == tablename,
columns.c.table_schema == owner)
else:
whereclause = columns.c.table_name == tablename
s = sql.select([columns], whereclause,
order_by=[columns.c.ordinal_position])
c = connection.execute(s)
cols = []
while True:
row = c.fetchone()
if row is None:
break
(name, type, nullable, charlen,
numericprec, numericscale, default, collation) = (
row[columns.c.column_name],
row[columns.c.data_type],
row[columns.c.is_nullable] == 'YES',
row[columns.c.character_maximum_length],
row[columns.c.numeric_precision],
row[columns.c.numeric_scale],
row[columns.c.column_default],
row[columns.c.collation_name]
)
coltype = self.ischema_names.get(type, None)
kwargs = {}
if coltype in (MSString, MSChar, MSNVarchar, MSNChar, MSText,
MSNText, MSBinary, MSVarBinary,
sqltypes.LargeBinary):
if charlen == -1:
charlen = None
kwargs['length'] = charlen
if collation:
kwargs['collation'] = collation
if coltype is None:
util.warn(
"Did not recognize type '%s' of column '%s'" %
(type, name))
coltype = sqltypes.NULLTYPE
else:
if issubclass(coltype, sqltypes.Numeric) and \
coltype is not MSReal:
kwargs['scale'] = numericscale
kwargs['precision'] = numericprec
coltype = coltype(**kwargs)
cdict = {
'name': name,
'type': coltype,
'nullable': nullable,
'default': default,
'autoincrement': False,
}
cols.append(cdict)
# autoincrement and identity
colmap = {}
for col in cols:
colmap[col['name']] = col
# We also run an sp_columns to check for identity columns:
cursor = connection.execute("sp_columns @table_name = '%s', "
"@table_owner = '%s'"
% (tablename, owner))
ic = None
while True:
row = cursor.fetchone()
if row is None:
break
(col_name, type_name) = row[3], row[5]
if type_name.endswith("identity") and col_name in colmap:
ic = col_name
colmap[col_name]['autoincrement'] = True
colmap[col_name]['sequence'] = dict(
name='%s_identity' % col_name)
break
cursor.close()
if ic is not None and self.server_version_info >= MS_2005_VERSION:
table_fullname = "%s.%s" % (owner, tablename)
cursor = connection.execute(
"select ident_seed('%s'), ident_incr('%s')"
% (table_fullname, table_fullname)
)
row = cursor.first()
if row is not None and row[0] is not None:
colmap[ic]['sequence'].update({
'start': int(row[0]),
'increment': int(row[1])
})
return cols
@reflection.cache
@_db_plus_owner
def get_pk_constraint(self, connection, tablename,
dbname, owner, schema, **kw):
pkeys = []
TC = ischema.constraints
C = ischema.key_constraints.alias('C')
# Primary key constraints
s = sql.select([C.c.column_name,
TC.c.constraint_type,
C.c.constraint_name],
sql.and_(TC.c.constraint_name == C.c.constraint_name,
TC.c.table_schema == C.c.table_schema,
C.c.table_name == tablename,
C.c.table_schema == owner)
)
c = connection.execute(s)
constraint_name = None
for row in c:
if 'PRIMARY' in row[TC.c.constraint_type.name]:
pkeys.append(row[0])
if constraint_name is None:
constraint_name = row[C.c.constraint_name.name]
return {'constrained_columns': pkeys, 'name': constraint_name}
@reflection.cache
@_db_plus_owner
def get_foreign_keys(self, connection, tablename,
dbname, owner, schema, **kw):
RR = ischema.ref_constraints
C = ischema.key_constraints.alias('C')
R = ischema.key_constraints.alias('R')
# Foreign key constraints
s = sql.select([C.c.column_name,
R.c.table_schema, R.c.table_name, R.c.column_name,
RR.c.constraint_name, RR.c.match_option,
RR.c.update_rule,
RR.c.delete_rule],
sql.and_(C.c.table_name == tablename,
C.c.table_schema == owner,
C.c.constraint_name == RR.c.constraint_name,
R.c.constraint_name ==
RR.c.unique_constraint_name,
C.c.ordinal_position == R.c.ordinal_position
),
order_by=[RR.c.constraint_name, R.c.ordinal_position]
)
# group rows by constraint ID, to handle multi-column FKs
fkeys = []
fknm, scols, rcols = (None, [], [])
def fkey_rec():
return {
'name': None,
'constrained_columns': [],
'referred_schema': None,
'referred_table': None,
'referred_columns': []
}
fkeys = util.defaultdict(fkey_rec)
for r in connection.execute(s).fetchall():
scol, rschema, rtbl, rcol, rfknm, fkmatch, fkuprule, fkdelrule = r
rec = fkeys[rfknm]
rec['name'] = rfknm
if not rec['referred_table']:
rec['referred_table'] = rtbl
if schema is not None or owner != rschema:
if dbname:
rschema = dbname + "." + rschema
rec['referred_schema'] = rschema
local_cols, remote_cols = \
rec['constrained_columns'],\
rec['referred_columns']
local_cols.append(scol)
remote_cols.append(rcol)
return list(fkeys.values())
| {
"content_hash": "f84bea3c4d9c48c6ccda9f11c1e66f76",
"timestamp": "",
"source": "github",
"line_count": 2057,
"max_line_length": 101,
"avg_line_length": 34.403500243072436,
"alnum_prop": 0.5778176576983948,
"repo_name": "NoahFlowa/glowing-spoon",
"id": "6975754c6379a1315d3963ee49f50dbf7b3d38d8",
"size": "71002",
"binary": false,
"copies": "10",
"ref": "refs/heads/master",
"path": "venv/lib/python2.7/site-packages/sqlalchemy/dialects/mssql/base.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "5939"
},
{
"name": "CSS",
"bytes": "9915"
},
{
"name": "HTML",
"bytes": "5598"
},
{
"name": "JavaScript",
"bytes": "6187"
},
{
"name": "Python",
"bytes": "8918485"
},
{
"name": "Shell",
"bytes": "3248"
}
],
"symlink_target": ""
} |
from unittest.mock import Mock, call, patch
from django.test import TestCase
from django.db.models import QuerySet
from requests.exceptions import ConnectionError
from jarbas.chamber_of_deputies.management.commands.receipts import Command
class TestCommandHandler(TestCase):
@patch('jarbas.chamber_of_deputies.management.commands.receipts.Command.get_queryset')
@patch('jarbas.chamber_of_deputies.management.commands.receipts.Command.fetch')
@patch('jarbas.chamber_of_deputies.management.commands.receipts.Command.print_count')
@patch('jarbas.chamber_of_deputies.management.commands.receipts.Command.print_pause')
@patch('jarbas.chamber_of_deputies.management.commands.receipts.sleep')
@patch('jarbas.chamber_of_deputies.management.commands.receipts.print')
def test_handler_with_queryset(self, print_, sleep, print_pause, print_count, fetch, get_queryset):
get_queryset.side_effect = (True, True, True, False)
command = Command()
command.handle(batch_size=3, pause=42)
print_.assert_has_calls((call('Loading…'), call('Done!')))
print_pause.assert_has_calls((call(), call()))
print_count.assert_called_once_with(permanent=True)
sleep.assert_has_calls([call(42)] * 2)
self.assertEqual(3, fetch.call_count)
self.assertEqual(3, command.batch)
self.assertEqual(42, command.pause)
self.assertEqual(0, command.count)
@patch('jarbas.chamber_of_deputies.management.commands.receipts.Command.get_queryset')
@patch('jarbas.chamber_of_deputies.management.commands.receipts.Command.fetch')
@patch('jarbas.chamber_of_deputies.management.commands.receipts.print')
def test_handler_without_queryset(self, print_, fetch, get_queryset):
get_queryset.return_value = False
command = Command()
command.handle(batch_size=42, pause=1)
print_.assert_has_calls([
call('Loading…'),
call('Nothing to fetch.')
])
get_queryset.assert_called_once_with()
fetch.assert_not_called()
self.assertEqual(42, command.batch)
self.assertEqual(1, command.pause)
self.assertEqual(0, command.count)
def test_add_arguments(self):
parser = Mock()
command = Command()
command.add_arguments(parser)
self.assertEqual(2, parser.add_argument.call_count)
class TestCommandMethods(TestCase):
@patch('jarbas.chamber_of_deputies.management.commands.receipts.Command.update')
@patch('jarbas.chamber_of_deputies.management.commands.receipts.Command.bulk_update')
@patch('jarbas.chamber_of_deputies.management.commands.receipts.Command.print_count')
def test_fetch(self, print_count, bulk_update, update):
command = Command()
command.count = 0
command.queryset = (1, 2, 3)
command.queue = []
command.fetch()
print_count.assert_has_calls((call(), call(), call()))
update.assert_has_calls(call(i) for i in range(1, 4))
self.assertEqual(3, command.count)
bulk_update.assert_called_once_with()
@patch.object(QuerySet, '__getitem__')
@patch.object(QuerySet, 'filter', return_value=QuerySet())
def test_get_queryset(self, filter_, getitem):
command = Command()
command.batch = 42
command.get_queryset()
filter_.assert_called_once_with(receipt_fetched=False)
getitem.assert_called_once_with(slice(None, 42))
def test_update(self):
reimbursement = Mock()
command = Command()
command.queue = []
command.update(reimbursement)
reimbursement.get_receipt_url.assert_called_once_with(bulk=True)
self.assertEqual(1, len(command.queue))
def test_update_with_error(self):
reimbursement = Mock()
reimbursement.get_receipt_url.side_effect = ConnectionError()
command = Command()
command.queue = []
command.update(reimbursement)
reimbursement.get_receipt_url.assert_called_once_with(bulk=True)
self.assertEqual(0, len(command.queue))
@patch('jarbas.chamber_of_deputies.management.commands.receipts.bulk_update')
@patch('jarbas.chamber_of_deputies.management.commands.receipts.Command.print_saving')
def test_bulk_update(self, print_saving, bulk_update):
command = Command()
command.queue = [1, 2, 3]
command.bulk_update()
fields = ['receipt_url', 'receipt_fetched']
bulk_update.assert_called_once_with([1, 2, 3], update_fields=fields)
self.assertEqual([], command.queue)
print_saving.assert_called_once_with()
class TestCommandPrintMethods(TestCase):
def test_count_msg(self):
command = Command()
command.count = 42
self.assertEqual('42 receipt URLs fetched', command.count_msg())
@patch('jarbas.chamber_of_deputies.management.commands.receipts.print')
def test_print_msg(self, print_):
Command.print_msg('42')
print_.assert_has_calls((
call('\x1b[1A\x1b[2K\x1b[1A'),
call('42')
))
@patch('jarbas.chamber_of_deputies.management.commands.receipts.print')
def test_print_permanent_msg(self, print_):
Command.print_msg('42', permanent=True)
print_.assert_called_once_with('42')
@patch('jarbas.chamber_of_deputies.management.commands.receipts.Command.count_msg')
@patch('jarbas.chamber_of_deputies.management.commands.receipts.Command.print_msg')
def test_print_count(self, print_msg, count_msg):
count_msg.return_value = '42'
command = Command()
command.print_count()
command.print_count(permanent=True)
print_msg.assert_has_calls((call('42'), call('42', permanent=True)))
@patch('jarbas.chamber_of_deputies.management.commands.receipts.Command.count_msg')
@patch('jarbas.chamber_of_deputies.management.commands.receipts.Command.print_msg')
def test_print_pause(self, print_msg, count_msg):
count_msg.return_value = '42'
command = Command()
command.print_pause()
print_msg.assert_called_once_with('42 (Taking a break to avoid being blocked…)')
@patch('jarbas.chamber_of_deputies.management.commands.receipts.Command.count_msg')
@patch('jarbas.chamber_of_deputies.management.commands.receipts.Command.print_msg')
def test_print_saving(self, print_msg, count_msg):
count_msg.return_value = '42'
command = Command()
command.print_saving()
print_msg.assert_called_once_with('42 (Saving the URLs to the database…)')
| {
"content_hash": "183db01e2d6b727ff4f6b8699b2b2357",
"timestamp": "",
"source": "github",
"line_count": 152,
"max_line_length": 103,
"avg_line_length": 43.38157894736842,
"alnum_prop": 0.6748559296329997,
"repo_name": "marcusrehm/serenata-de-amor",
"id": "689f541c0007736ef8a760ce143906372ff519e1",
"size": "6602",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "jarbas/chamber_of_deputies/tests/test_receipts_command.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "301"
},
{
"name": "Elm",
"bytes": "131019"
},
{
"name": "HTML",
"bytes": "4527"
},
{
"name": "JavaScript",
"bytes": "1468"
},
{
"name": "Python",
"bytes": "425718"
},
{
"name": "Shell",
"bytes": "145"
}
],
"symlink_target": ""
} |
"""
Installation script for Nova's development virtualenv
"""
import os
import subprocess
import sys
ROOT = os.path.dirname(os.path.dirname(os.path.realpath(__file__)))
VENV = os.path.join(ROOT, '.nova-venv')
PIP_REQUIRES = os.path.join(ROOT, 'tools', 'pip-requires')
TWISTED_NOVA = 'http://nova.openstack.org/Twisted-10.0.0Nova.tar.gz'
PY_VERSION = "python%s.%s" % (sys.version_info[0], sys.version_info[1])
def die(message, *args):
print >>sys.stderr, message % args
sys.exit(1)
def check_python_version():
if sys.version_info < (2, 6):
die("Need Python Version >= 2.6")
def run_command(cmd, redirect_output=True, check_exit_code=True):
"""
Runs a command in an out-of-process shell, returning the
output of that command. Working directory is ROOT.
"""
if redirect_output:
stdout = subprocess.PIPE
else:
stdout = None
proc = subprocess.Popen(cmd, cwd=ROOT, stdout=stdout)
output = proc.communicate()[0]
if check_exit_code and proc.returncode != 0:
die('Command "%s" failed.\n%s', ' '.join(cmd), output)
return output
HAS_EASY_INSTALL = bool(run_command(['which', 'easy_install'],
check_exit_code=False).strip())
HAS_VIRTUALENV = bool(run_command(['which', 'virtualenv'],
check_exit_code=False).strip())
def check_dependencies():
"""Make sure virtualenv is in the path."""
if not HAS_VIRTUALENV:
print 'not found.'
# Try installing it via easy_install...
if HAS_EASY_INSTALL:
print 'Installing virtualenv via easy_install...',
if not (run_command(['which', 'easy_install']) and
run_command(['easy_install', 'virtualenv'])):
die('ERROR: virtualenv not found.\n\nNova development'
' requires virtualenv, please install it using your'
' favorite package management tool')
print 'done.'
print 'done.'
def create_virtualenv(venv=VENV):
"""Creates the virtual environment and installs PIP only into the
virtual environment
"""
print 'Creating venv...',
run_command(['virtualenv', '-q', '--no-site-packages', VENV])
print 'done.'
print 'Installing pip in virtualenv...',
if not run_command(['tools/with_venv.sh', 'easy_install', 'pip']).strip():
die("Failed to install pip.")
print 'done.'
def install_dependencies(venv=VENV):
print 'Installing dependencies with pip (this can take a while)...'
# Install greenlet by hand - just listing it in the requires file does not
# get it in stalled in the right order
run_command(['tools/with_venv.sh', 'pip', 'install', '-E', venv,
'greenlet'], redirect_output=False)
run_command(['tools/with_venv.sh', 'pip', 'install', '-E', venv, '-r',
PIP_REQUIRES], redirect_output=False)
run_command(['tools/with_venv.sh', 'pip', 'install', '-E', venv,
TWISTED_NOVA], redirect_output=False)
# Tell the virtual env how to "import nova"
pthfile = os.path.join(venv, "lib", PY_VERSION, "site-packages",
"nova.pth")
f = open(pthfile, 'w')
f.write("%s\n" % ROOT)
# Patch eventlet (see FAQ # 1485)
patchsrc = os.path.join(ROOT, 'tools', 'eventlet-patch')
patchfile = os.path.join(venv, "lib", PY_VERSION, "site-packages",
"eventlet", "green", "subprocess.py")
patch_cmd = "patch %s %s" % (patchfile, patchsrc)
os.system(patch_cmd)
def print_help():
help = """
Nova development environment setup is complete.
Nova development uses virtualenv to track and manage Python dependencies
while in development and testing.
To activate the Nova virtualenv for the extent of your current shell
session you can run:
$ source .nova-venv/bin/activate
Or, if you prefer, you can run commands in the virtualenv on a case by case
basis by running:
$ tools/with_venv.sh <your command>
Also, make test will automatically use the virtualenv.
"""
print help
def main(argv):
check_python_version()
check_dependencies()
create_virtualenv()
install_dependencies()
print_help()
if __name__ == '__main__':
main(sys.argv)
| {
"content_hash": "7bedb44036520533c174a3de8512b923",
"timestamp": "",
"source": "github",
"line_count": 134,
"max_line_length": 79,
"avg_line_length": 32.02238805970149,
"alnum_prop": 0.6229317175483571,
"repo_name": "superstack/nova",
"id": "812b1dd0fb860571a1ca50002c189e5b853a396a",
"size": "5103",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tools/install_venv.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "JavaScript",
"bytes": "47238"
},
{
"name": "Python",
"bytes": "2491049"
},
{
"name": "Shell",
"bytes": "31698"
}
],
"symlink_target": ""
} |
'''
Run this script to update all the copyright headers of files
that were changed this year.
For example:
// Copyright (c) 2009-2012 The Bitcoin Core developers
it will change it to
// Copyright (c) 2009-2015 The Bitcoin Core developers
'''
import os
import time
import re
year = time.gmtime()[0]
CMD_GIT_DATE = 'git log --format=%%ad --date=short -1 %s | cut -d"-" -f 1'
CMD_REGEX= "perl -pi -e 's/(20\d\d)(?:-20\d\d)? The Zurcoin/$1-%s The Zurcoin/' %s"
REGEX_CURRENT= re.compile("%s The Zurcoin" % year)
CMD_LIST_FILES= "find %s | grep %s"
FOLDERS = ["./qa", "./src"]
EXTENSIONS = [".cpp",".h", ".py"]
def get_git_date(file_path):
r = os.popen(CMD_GIT_DATE % file_path)
for l in r:
# Result is one line, so just return
return l.replace("\n","")
return ""
n=1
for folder in FOLDERS:
for extension in EXTENSIONS:
for file_path in os.popen(CMD_LIST_FILES % (folder, extension)):
file_path = os.getcwd() + file_path[1:-1]
if file_path.endswith(extension):
git_date = get_git_date(file_path)
if str(year) == git_date:
# Only update if current year is not found
if REGEX_CURRENT.search(open(file_path, "r").read()) is None:
print n,"Last git edit", git_date, "-", file_path
os.popen(CMD_REGEX % (year,file_path))
n = n + 1
| {
"content_hash": "72528bc85352a8759def3746d5514fe5",
"timestamp": "",
"source": "github",
"line_count": 45,
"max_line_length": 83,
"avg_line_length": 29.622222222222224,
"alnum_prop": 0.6174043510877719,
"repo_name": "zurcoin/zurcoin",
"id": "4a67cc5bbbc26021b73c3214f185c58573a25887",
"size": "1355",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "contrib/devtools/fix-copyright-headers.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "Assembly",
"bytes": "7639"
},
{
"name": "C",
"bytes": "771465"
},
{
"name": "C++",
"bytes": "3510522"
},
{
"name": "CSS",
"bytes": "1127"
},
{
"name": "HTML",
"bytes": "50621"
},
{
"name": "Java",
"bytes": "2100"
},
{
"name": "M4",
"bytes": "142248"
},
{
"name": "Makefile",
"bytes": "86189"
},
{
"name": "Objective-C",
"bytes": "3878"
},
{
"name": "Objective-C++",
"bytes": "7234"
},
{
"name": "Python",
"bytes": "221070"
},
{
"name": "QMake",
"bytes": "1264"
},
{
"name": "Roff",
"bytes": "18043"
},
{
"name": "Shell",
"bytes": "40407"
}
],
"symlink_target": ""
} |
"""
Author: Jonathan Rosado Lugo
Email: jonathan.rosado-lugo@hp.com
Description:
General script for parsing YAML lists and key value pairs
Dependencies:
-PyYaml
Input:
hudson:
securityRealm:
attributes: 'database-authentication'
disableSignup: 'true'
enableCaptcha: 'false'
authorizationStrategy:
attributes: 'login-authorization'
users:
unhashed:
- 'user1:password1'
- 'user2:password2'
hashed:
- 'user3:7ca1aab96fc6b8bcf8de0b83423fad2dde8d6bc8c12e9c31ef058322e7e4ed02'
plugins:
- 'durable-task:1.5'
- 'docker-plugin:0.9.1'
Ouput:
/hudson/authorizationStrategy/attributes|login-authorization
/hudson/securityRealm/attributes|database-authentication
/hudson/securityRealm/enableCaptcha|false
/hudson/securityRealm/disableSignup|true
/users/hashed|['user3:7ca1aab96fc6b8bcf8de0b83423fad2dde8d6bc8c12e9c31ef058322e7e4ed02']
/users/unhashed|['user1:password1', 'user2:password2']
/plugins|['durable-task:1.5', 'docker-plugin:0.9.1']
"""
import sys
import types
import yaml
import copy
def dispatch(yamlObject):
def cycle(obj, nodePath):
if type(obj) == types.DictType:
for key, value in obj.iteritems():
patch = copy.copy(nodePath) # We need a true copy; pointers to objects won't work
patch.append('/' + key)
if type(value) == types.StringType or type(value) == types.BooleanType or type(value) == types.ListType:
print ''.join(patch) + '|' + value.__str__()
else:
cycle(value, patch)
else:
sys.exit('RUN: Invalid value type reached PATH: ' + nodePath.__str__())
cycle(yamlObject, [])
return
def main():
args = sys.argv[1:]
inYamlFile = args[0]
for yamlObject in yaml.load_all(open(inYamlFile)):
dispatch(yamlObject)
if __name__ == '__main__':
main()
# Breaks when yaml goes back to previous levels
# def dispatch(yamlObject):
#
# def cycle(obj, nodePath):
# if type(obj) == types.DictType:
# for key, value in obj.iteritems():
# nodePath.append('/' + key)
# if type(value) == types.StringType or type(value) == types.BooleanType or type(value) == types.ListType:
# print ''.join(nodePath) + '|' + value.__str__()
# nodePath = nodePath[:-1]
# else:
# cycle(value, nodePath)
# else:
# sys.exit('RUN: Invalid value type reached')
#
# cycle(yamlObject, [])
#
# return | {
"content_hash": "05bb9db24e5138f580b4af97303cf035",
"timestamp": "",
"source": "github",
"line_count": 99,
"max_line_length": 122,
"avg_line_length": 26.04040404040404,
"alnum_prop": 0.6206361520558572,
"repo_name": "gmatoshp/jenkins-tomcat-nginx",
"id": "34d8db963dc15bd0cc2b5dd22e7e101a741d97ee",
"size": "2601",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "configparser.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Groovy",
"bytes": "603"
},
{
"name": "Nginx",
"bytes": "2880"
},
{
"name": "Python",
"bytes": "16313"
},
{
"name": "Shell",
"bytes": "21251"
}
],
"symlink_target": ""
} |
import re
import datetime
import times
from zitkino import parsers
from zitkino.models import Cinema, Showtime, ScrapedFilm
from . import scrapers, Scraper
cinema = Cinema(
name=u'Kinokavárna',
url='http://www.kinokavarna.cz/',
street=u'Náměstí SNP 33',
town=u'Brno',
coords=(49.2179300, 16.6207072)
)
@scrapers.register(cinema)
class KinokavarnaScraper(Scraper):
url = 'http://www.kinokavarna.cz/program.html'
length_re = re.compile(r'(\d+)\s*(min|min\.|minut)')
year_re = re.compile(r'\d{4}')
time_re = re.compile(r'(\d{1,2})[:\.](\d{2})')
def __call__(self):
resp = self.session.get(self.url)
html = parsers.html(resp.content, base_url=resp.url)
for entry in html.cssselect('#content-in .aktuality'):
st = self._parse_entry(entry)
if st:
yield st
def _parse_entry(self, entry):
try:
description = next(
line for line
in entry.text_content(whitespace=True).splitlines()
if self.length_re.search(line)
)
except StopIteration:
return None # it's not a film
date_el = entry.cssselect_first('h4 span')
date = datetime.datetime(*reversed(
[int(n) for n in date_el.text_content().split('.')]
))
time_el = entry.cssselect_first('.start')
time_match = self.time_re.search(time_el.text_content())
time = datetime.time(
int(time_match.group(1)),
int(time_match.group(2)),
)
starts_at = times.to_universal(
datetime.datetime.combine(date, time),
'Europe/Prague'
)
title = date_el.tail
tags = {}
detail_data = {}
details = [detail.strip() for detail in description.split(',')]
for detail in details:
if self.year_re.match(detail):
detail_data['year'] = int(detail)
match = self.length_re.match(detail)
if match:
detail_data['length'] = int(match.group(1))
if 'tit.' in detail or 'titulky' in detail or 'dabing' in detail:
tags[detail] = None
return Showtime(
cinema=cinema,
film_scraped=ScrapedFilm(
title_main_scraped=title,
**detail_data
),
starts_at=starts_at,
tags=tags,
url=self.url,
)
| {
"content_hash": "04b6506e7eccf58168c589f626caa63f",
"timestamp": "",
"source": "github",
"line_count": 91,
"max_line_length": 77,
"avg_line_length": 27.45054945054945,
"alnum_prop": 0.5416333066453163,
"repo_name": "zitkino/zitkino.cz",
"id": "284e70cd5815c809ff8684bf60e31594b9c12bf7",
"size": "2528",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "zitkino/scrapers/kinokavarna.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "49904"
},
{
"name": "HTML",
"bytes": "20053"
},
{
"name": "JavaScript",
"bytes": "1963"
},
{
"name": "Python",
"bytes": "103998"
}
],
"symlink_target": ""
} |
"""TensorFlow Eager execution prototype.
EXPERIMENTAL: APIs here are unstable and likely to change without notice.
To use, at program startup, call `tf.enable_eager_execution()`.
@@metrics
@@list_devices
@@num_gpus
@@py_func
@@defun
@@function
@@make_template
@@implicit_gradients
@@implicit_value_and_gradients
@@gradients_function
@@value_and_gradients_function
@@GradientTape
@@run
@@enable_eager_execution
@@enable_remote_eager_execution
@@custom_gradient
@@add_execution_callback
@@clear_execution_callbacks
@@errstate
@@ExecutionCallback
@@inf_callback
@@inf_nan_callback
@@nan_callback
@@seterr
@@Iterator
@@Saver
@@restore_variables_on_create
@@Variable
@@get_optimizer_variables
@@EagerVariableStore
@@Network
@@Sequential
@@save_network_checkpoint
@@restore_network_checkpoint
@@Checkpoint
@@Checkpointable
@@CheckpointableSaver
@@executing_eagerly
@@in_eager_mode
@@set_execution_mode
@@execution_mode
@@async_wait
@@async_clear_error
@@set_server_def
@@run_test_in_graph_and_eager_modes
@@run_all_tests_in_graph_and_eager_modes
@@TensorSpec
@@connect_to_remote_host
@@DEVICE_PLACEMENT_EXPLICIT
@@DEVICE_PLACEMENT_WARN
@@DEVICE_PLACEMENT_SILENT
@@SYNC
@@ASYNC
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
# pylint:disable=g-bad-import-order,g-import-not-at-top,unused-import
#
from tensorflow.contrib.eager.python import metrics
from tensorflow.contrib.eager.python.datasets import Iterator
from tensorflow.contrib.eager.python.network import Network
from tensorflow.contrib.eager.python.network import Sequential
from tensorflow.contrib.eager.python.network import save_network_checkpoint
from tensorflow.contrib.eager.python.network import restore_network_checkpoint
from tensorflow.contrib.eager.python.saver import get_optimizer_variables
from tensorflow.contrib.eager.python.saver import restore_variables_on_create
from tensorflow.contrib.eager.python.saver import Saver
from tensorflow.python.eager import backprop
from tensorflow.python.eager import function as _function_lib
from tensorflow.python.eager.context import DEVICE_PLACEMENT_EXPLICIT
from tensorflow.python.eager.context import DEVICE_PLACEMENT_WARN
from tensorflow.python.eager.context import DEVICE_PLACEMENT_SILENT
from tensorflow.python.eager.context import executing_eagerly
from tensorflow.python.eager.context import list_devices
from tensorflow.python.eager.context import set_execution_mode
from tensorflow.python.eager.context import execution_mode
from tensorflow.python.eager.context import async_wait
from tensorflow.python.eager.context import async_clear_error
from tensorflow.python.eager.context import SYNC
from tensorflow.python.eager.context import ASYNC
from tensorflow.python.eager.context import num_gpus
from tensorflow.python.eager.context import set_server_def
from tensorflow.python.eager.def_function import function
from tensorflow.python.eager.execution_callbacks import add_execution_callback
from tensorflow.python.eager.execution_callbacks import clear_execution_callbacks
from tensorflow.python.eager.execution_callbacks import errstate
from tensorflow.python.eager.execution_callbacks import ExecutionCallback
from tensorflow.python.eager.execution_callbacks import inf_callback
from tensorflow.python.eager.execution_callbacks import inf_nan_callback
from tensorflow.python.eager.execution_callbacks import nan_callback
from tensorflow.python.eager.execution_callbacks import seterr
from tensorflow.python.eager.remote import connect_to_remote_host
from tensorflow.python.framework.tensor_spec import TensorSpec
from tensorflow.python.framework.ops import enable_eager_execution
from tensorflow.python.framework.ops import enable_eager_execution_internal as enable_remote_eager_execution
from tensorflow.python.framework.ops import eager_run as run
from tensorflow.python.framework.test_util import run_in_graph_and_eager_modes as run_test_in_graph_and_eager_modes
from tensorflow.python.framework.test_util import run_all_in_graph_and_eager_modes as run_all_tests_in_graph_and_eager_modes
from tensorflow.python.ops.custom_gradient import custom_gradient
from tensorflow.python.ops.resource_variable_ops import ResourceVariable as Variable
from tensorflow.python.ops.variable_scope import EagerVariableStore
from tensorflow.python.ops import script_ops
from tensorflow.python.ops import template
from tensorflow.python.training.checkpointable.tracking import AutoCheckpointable as Checkpointable
from tensorflow.python.training.checkpointable.util import CheckpointableSaver
from tensorflow.python.training.checkpointable.util import Checkpoint
from tensorflow.python.util.all_util import remove_undocumented
py_func = script_ops.eager_py_func
defun = _function_lib.defun
make_template = template.make_template_internal
implicit_gradients = backprop.implicit_grad
implicit_value_and_gradients = backprop.implicit_val_and_grad
gradients_function = backprop.gradients_function
value_and_gradients_function = backprop.val_and_grad_function
GradientTape = backprop.GradientTape # pylint: disable=invalid-name
in_eager_mode = executing_eagerly
remove_undocumented(__name__)
| {
"content_hash": "133b57336c5f318a25326b2eee59cd53",
"timestamp": "",
"source": "github",
"line_count": 142,
"max_line_length": 124,
"avg_line_length": 36.45774647887324,
"alnum_prop": 0.8305968707745799,
"repo_name": "apark263/tensorflow",
"id": "b82e1bb71bce9a28d7bbbf961cc6d5e25dd18acf",
"size": "5866",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tensorflow/contrib/eager/python/tfe.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Assembly",
"bytes": "2867"
},
{
"name": "Batchfile",
"bytes": "14734"
},
{
"name": "C",
"bytes": "561314"
},
{
"name": "C#",
"bytes": "8446"
},
{
"name": "C++",
"bytes": "54581021"
},
{
"name": "CMake",
"bytes": "207169"
},
{
"name": "Dockerfile",
"bytes": "39024"
},
{
"name": "Go",
"bytes": "1373561"
},
{
"name": "HTML",
"bytes": "4680118"
},
{
"name": "Java",
"bytes": "899393"
},
{
"name": "Jupyter Notebook",
"bytes": "2618454"
},
{
"name": "LLVM",
"bytes": "6536"
},
{
"name": "Makefile",
"bytes": "75994"
},
{
"name": "Objective-C",
"bytes": "16140"
},
{
"name": "Objective-C++",
"bytes": "102889"
},
{
"name": "PHP",
"bytes": "14340"
},
{
"name": "Pascal",
"bytes": "399"
},
{
"name": "Perl",
"bytes": "7536"
},
{
"name": "PureBasic",
"bytes": "25356"
},
{
"name": "Python",
"bytes": "44616385"
},
{
"name": "RobotFramework",
"bytes": "891"
},
{
"name": "Ruby",
"bytes": "838"
},
{
"name": "Shell",
"bytes": "504099"
},
{
"name": "Smarty",
"bytes": "10072"
}
],
"symlink_target": ""
} |
"""
Dictionary-based Logging Parser Module
"""
from .log_parser_v1 import LogParserV1
def get_parser(database):
"""Get the parser object based on database"""
db_ver = int(database.get_version())
# DB version 1 and 2 correspond to v1 parser
if db_ver in [1, 2]:
return LogParserV1(database)
return None
| {
"content_hash": "d47b706dfedf373da05feb387d35f4fd",
"timestamp": "",
"source": "github",
"line_count": 16,
"max_line_length": 49,
"avg_line_length": 20.9375,
"alnum_prop": 0.6716417910447762,
"repo_name": "zephyrproject-rtos/zephyr",
"id": "16f2f9ec8e9c088a183e6d4ce3a80f7a4e6a3dc7",
"size": "440",
"binary": false,
"copies": "4",
"ref": "refs/heads/main",
"path": "scripts/logging/dictionary/dictionary_parser/__init__.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Assembly",
"bytes": "444860"
},
{
"name": "Batchfile",
"bytes": "110"
},
{
"name": "C",
"bytes": "45371144"
},
{
"name": "C++",
"bytes": "29398"
},
{
"name": "CMake",
"bytes": "1408561"
},
{
"name": "Cadence",
"bytes": "1501"
},
{
"name": "EmberScript",
"bytes": "997"
},
{
"name": "Forth",
"bytes": "1648"
},
{
"name": "GDB",
"bytes": "1285"
},
{
"name": "Haskell",
"bytes": "753"
},
{
"name": "JetBrains MPS",
"bytes": "3312"
},
{
"name": "PLSQL",
"bytes": "281"
},
{
"name": "Perl",
"bytes": "215578"
},
{
"name": "Python",
"bytes": "2273122"
},
{
"name": "Shell",
"bytes": "173841"
},
{
"name": "SmPL",
"bytes": "36840"
},
{
"name": "Smalltalk",
"bytes": "1885"
},
{
"name": "SourcePawn",
"bytes": "14890"
},
{
"name": "Tcl",
"bytes": "7034"
},
{
"name": "VBA",
"bytes": "294"
},
{
"name": "Verilog",
"bytes": "6394"
}
],
"symlink_target": ""
} |
from django import template
import markdown
register = template.Library()
@register.filter
def markdown_this(text):
# safe_mode governs how the function handles raw HTML
return markdown.markdown(text, safe_mode='escape')
| {
"content_hash": "9fa8ef50ff6574628a73fdb7fe2bc9e3",
"timestamp": "",
"source": "github",
"line_count": 11,
"max_line_length": 57,
"avg_line_length": 21.181818181818183,
"alnum_prop": 0.759656652360515,
"repo_name": "vmogilev/soloist",
"id": "6a485af07f1b53a1ba0def7f90d58d3956ce32de",
"size": "233",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "lib/markdown_this.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "2570"
},
{
"name": "HTML",
"bytes": "34905"
},
{
"name": "JavaScript",
"bytes": "484"
},
{
"name": "PLpgSQL",
"bytes": "27253"
},
{
"name": "Python",
"bytes": "72282"
}
],
"symlink_target": ""
} |
from tests.conftest import JiraTestCase
class WorklogTests(JiraTestCase):
def setUp(self):
JiraTestCase.setUp(self)
self.issue_1 = self.test_manager.project_b_issue1
self.issue_2 = self.test_manager.project_b_issue2
self.issue_3 = self.test_manager.project_b_issue3
def test_worklogs(self):
worklog = self.jira.add_worklog(self.issue_1, "2h")
worklogs = self.jira.worklogs(self.issue_1)
self.assertEqual(len(worklogs), 1)
worklog.delete()
def test_worklogs_with_issue_obj(self):
issue = self.jira.issue(self.issue_1)
worklog = self.jira.add_worklog(issue, "2h")
worklogs = self.jira.worklogs(issue)
self.assertEqual(len(worklogs), 1)
worklog.delete()
def test_worklog(self):
worklog = self.jira.add_worklog(self.issue_1, "1d 2h")
new_worklog = self.jira.worklog(self.issue_1, str(worklog))
self.assertEqual(new_worklog.author.name, self.test_manager.user_admin.name)
self.assertEqual(new_worklog.timeSpent, "1d 2h")
worklog.delete()
def test_worklog_with_issue_obj(self):
issue = self.jira.issue(self.issue_1)
worklog = self.jira.add_worklog(issue, "1d 2h")
new_worklog = self.jira.worklog(issue, str(worklog))
self.assertEqual(new_worklog.author.name, self.test_manager.user_admin.name)
self.assertEqual(new_worklog.timeSpent, "1d 2h")
worklog.delete()
def test_add_worklog(self):
worklog_count = len(self.jira.worklogs(self.issue_2))
worklog = self.jira.add_worklog(self.issue_2, "2h")
self.assertIsNotNone(worklog)
self.assertEqual(len(self.jira.worklogs(self.issue_2)), worklog_count + 1)
worklog.delete()
def test_add_worklog_with_issue_obj(self):
issue = self.jira.issue(self.issue_2)
worklog_count = len(self.jira.worklogs(issue))
worklog = self.jira.add_worklog(issue, "2h")
self.assertIsNotNone(worklog)
self.assertEqual(len(self.jira.worklogs(issue)), worklog_count + 1)
worklog.delete()
def test_update_and_delete_worklog(self):
worklog = self.jira.add_worklog(self.issue_3, "3h")
issue = self.jira.issue(self.issue_3, fields="worklog,timetracking")
worklog.update(comment="Updated!", timeSpent="2h")
self.assertEqual(worklog.comment, "Updated!")
# rem_estimate = issue.fields.timetracking.remainingEstimate
self.assertEqual(worklog.timeSpent, "2h")
issue = self.jira.issue(self.issue_3, fields="worklog,timetracking")
self.assertEqual(issue.fields.timetracking.remainingEstimate, "1h")
worklog.delete()
issue = self.jira.issue(self.issue_3, fields="worklog,timetracking")
self.assertEqual(issue.fields.timetracking.remainingEstimate, "3h")
| {
"content_hash": "7c63487cb3412dbe1514b98a25d79821",
"timestamp": "",
"source": "github",
"line_count": 65,
"max_line_length": 84,
"avg_line_length": 44.01538461538462,
"alnum_prop": 0.6623558196434813,
"repo_name": "pycontribs/jira",
"id": "642539094eb7638e6eba827e15d6a320a09c5af1",
"size": "2861",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "tests/resources/test_worklog.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "Dockerfile",
"bytes": "523"
},
{
"name": "Python",
"bytes": "422049"
},
{
"name": "Shell",
"bytes": "2069"
}
],
"symlink_target": ""
} |
import sys
import khmer
import os.path
import screed
K = 20
def main():
readsfile = sys.argv[1]
contigfile = sys.argv[2]
outfile = os.path.basename(readsfile) + '.sweep'
if len(sys.argv) == 4:
outfile = sys.argv[3]
# create a nodegraph data structure
ht = khmer.Nodegraph(K, 1, 1)
# tag every k-mer in the contigs
ht._set_tag_density(0)
# load contigs, connect into N partitions
print('loading contigs from', contigfile)
ht.consume_seqfile_and_tag(contigfile)
subset = ht.do_subset_partition(0, 0)
ht.merge_subset(subset)
print('outputting contig-partitioned reads to', outfile)
ht.output_partitions(readsfile, outfile, True)
if __name__ == '__main__':
main()
| {
"content_hash": "67102357c6bb8a057d39858d9fa7ff77",
"timestamp": "",
"source": "github",
"line_count": 33,
"max_line_length": 60,
"avg_line_length": 22.454545454545453,
"alnum_prop": 0.650472334682861,
"repo_name": "ged-lab/khmer",
"id": "bf20a6468cf34cd942817feb4f0e4fa99975d077",
"size": "2537",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "sandbox/sweep-out-reads-with-contigs.py",
"mode": "33261",
"license": "bsd-3-clause",
"language": [
{
"name": "C++",
"bytes": "507274"
},
{
"name": "GLSL",
"bytes": "493"
},
{
"name": "Groff",
"bytes": "9581"
},
{
"name": "Makefile",
"bytes": "20859"
},
{
"name": "Python",
"bytes": "961316"
},
{
"name": "Shell",
"bytes": "4737"
}
],
"symlink_target": ""
} |
"""
Test the configuration parser.
"""
from __future__ import print_function
from unittest import TestCase
from contextlib import contextmanager
from voluptuous import (
MultipleInvalid,
)
from six import StringIO
from mock import (
patch,
MagicMock,
)
import plix.configuration
class ConfigurationTests(TestCase):
def test_load_from_stream(self):
stream = StringIO(
u"""
script:
- alpha
- beta
""",
)
loaded_conf = plix.configuration.load_from_stream(stream=stream)
self.assertEqual(
['alpha', 'beta'],
loaded_conf['script'],
)
def test_load_from_file(self):
@contextmanager
def mocked_open(*args, **kwargs):
yield StringIO(
u"""
script:
- alpha
- beta
""",
)
with patch('plix.configuration.open', mocked_open, create=True):
loaded_conf = plix.configuration.load_from_file(filename='foo.yml')
self.assertEqual(
['alpha', 'beta'],
loaded_conf['script'],
)
def test_command_or_command_list_with_strings(self):
value = "hello"
self.assertEqual(
[value],
plix.configuration.command_or_command_list(value),
)
def test_command_or_command_list_with_lists(self):
value = ["hello"]
self.assertEqual(
value,
plix.configuration.command_or_command_list(value),
)
def test_command_or_command_list_with_tuples(self):
value = ("hello",)
self.assertEqual(
value,
plix.configuration.command_or_command_list(value),
)
def test_command_or_command_list_with_int(self):
with self.assertRaises(ValueError):
plix.configuration.command_or_command_list(42)
def test_command_or_command_list_with_floats(self):
with self.assertRaises(ValueError):
plix.configuration.command_or_command_list(42.0)
def test_command_or_command_list_with_none(self):
with self.assertRaises(ValueError):
plix.configuration.command_or_command_list(None)
def test_normalize_with_appropriate_configuration(self):
conf = {
'matrix': {
'alpha': 1,
'beta': 2,
},
'install': ('install.sh',),
'script': ['alpha'],
}
ref_conf = conf.copy()
norm_conf = plix.configuration.normalize(conf)
for key in ref_conf:
self.assertEqual(ref_conf[key], norm_conf[key])
def test_normalize_with_inappropriate_configuration(self):
conf = {
'matrix': [],
'script': {
'key': 'value',
},
}
with self.assertRaises(MultipleInvalid) as ex:
plix.configuration.normalize(conf)
self.assertEqual(2, len(ex.exception.errors))
def test_normalize_transforms_values(self):
conf = {
'script': 'alpha',
}
ref_conf = {
'script': ['alpha'],
}
norm_conf = plix.configuration.normalize(conf)
self.assertEqual(ref_conf['script'], norm_conf['script'])
def test_normalize_parses_executors(self):
my_module = MagicMock()
my_executor = my_module.MyExecutor()
conf = {
'executor': 'my_module.MyExecutor',
}
ref_conf = {
'executor': my_executor,
}
with patch.dict(
'sys.modules',
{'my_module': my_module},
):
norm_conf = plix.configuration.normalize(conf)
self.assertEqual(ref_conf['executor'], norm_conf['executor'])
def test_normalize_parses_executors_with_options(self):
my_module = MagicMock()
my_executor = my_module.MyExecutor()
my_module.MyExecutor.reset_mock()
conf = {
'executor': {
'name': 'my_module.MyExecutor',
'options': {
'a': 'alpha',
'b': 'beta',
},
},
}
ref_conf = {
'executor': my_executor,
}
with patch.dict(
'sys.modules',
{'my_module': my_module},
):
norm_conf = plix.configuration.normalize(conf)
self.assertEqual(ref_conf['executor'], norm_conf['executor'])
my_module.MyExecutor.assert_called_once_with(
options={
'a': 'alpha',
'b': 'beta',
},
)
| {
"content_hash": "293976532cce20a7294ee4a41a3d7272",
"timestamp": "",
"source": "github",
"line_count": 178,
"max_line_length": 79,
"avg_line_length": 26.52247191011236,
"alnum_prop": 0.5210760432111841,
"repo_name": "freelan-developers/plix",
"id": "45a47c9f22ecd27daa6368020cb8114e2adef46d",
"size": "4721",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/test_configuration.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "93"
},
{
"name": "Python",
"bytes": "39440"
},
{
"name": "Shell",
"bytes": "136"
}
],
"symlink_target": ""
} |
"""A demo for enforcing a timeout on multiple read operations."""
import sys
from syncless import coio
from syncless import patch
def Asker(timeout, age_answer_channel):
print 'You have %s seconds altogether to tell me your age.' % timeout
while True:
sys.stdout.write('How old are you? ')
sys.stdout.flush()
answer = sys.stdin.readline()
assert answer
answer = answer.strip()
try:
age = int(answer)
except ValueError:
print 'Please enter an integer.'
continue
if age < 3:
print 'That would be too young. Please enter a valid age.'
continue
age_answer_channel.send(age)
return
if __name__ == '__main__':
patch.patch_stdin_and_stdout() # sets sys.stdin = sys.stdout = ...
patch.patch_stderr() # For fair exeption reporting.
age_answer_channel = coio.stackless.channel()
age_answer_channel.preference = 1 # Prefer the sender.
timeout = 3
asker_tasklet = coio.stackless.tasklet(Asker)(timeout, age_answer_channel)
age = coio.receive_with_timeout(timeout, age_answer_channel)
if age is None: # Timed out.
if asker_tasklet.alive:
asker_tasklet.kill()
print 'You were too slow entering your age.'
else:
print 'Got age: %r.' % age
| {
"content_hash": "1d3adf218a3df2b195a7802d1e22523e",
"timestamp": "",
"source": "github",
"line_count": 42,
"max_line_length": 76,
"avg_line_length": 29.61904761904762,
"alnum_prop": 0.6696141479099679,
"repo_name": "olopez32/syncless",
"id": "4f1b92fd9e4667b83879344b2f6f4750832a6120",
"size": "1276",
"binary": false,
"copies": "5",
"ref": "refs/heads/master",
"path": "examples/demo_multi_read_timeout_channel.py",
"mode": "33261",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "1099541"
},
{
"name": "C++",
"bytes": "21035"
},
{
"name": "JavaScript",
"bytes": "1098"
},
{
"name": "Makefile",
"bytes": "1531"
},
{
"name": "Python",
"bytes": "624802"
}
],
"symlink_target": ""
} |
import subprocess
from datetime import datetime
def execute(cmd):
return subprocess.Popen(cmd, stdout=subprocess.PIPE, shell=True).communicate()[0];
def listRunning(proc, liveOnly=False):
subtasks = execute("ls /proc/%s/task" % proc)
for task in subtasks.splitlines():
stats = execute("cat /proc/%s/task/%s/stat" % (proc, task)).split()
if stats[2] == 'R':
print datetime.now(), "Subtask", stats[1], "pid:", stats[0], "is running on core", stats[len(stats) - 6]
elif not liveOnly:
print datetime.now(), "Subtask", stats[1], "pid:", stats[0], "is last run on core", stats[len(stats) - 6]
proc = raw_input("enter process id: ")
liveOnly = raw_input("live only? [Y,N] (default N): ")
liveOnly = True if liveOnly == 'Y' else liveOnly == 'y'
monitor = raw_input("monitor? [Y,N] (default N): ")
monitor = True if monitor == 'Y' else monitor == 'y'
keepGoing = True
while keepGoing:
listRunning(proc, liveOnly)
keepGoing = monitor
| {
"content_hash": "0fa005337a39e24feadc7910b50e5c41",
"timestamp": "",
"source": "github",
"line_count": 27,
"max_line_length": 117,
"avg_line_length": 36.96296296296296,
"alnum_prop": 0.6362725450901804,
"repo_name": "h3adache/process",
"id": "904511eb17315ac6552c8f01f480278b810f0d54",
"size": "1016",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "procs.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "797"
}
],
"symlink_target": ""
} |
"""
The prime factors of 13195 are 5, 7, 13 and 29.
What is the largest prime factor of the number 600851475143 ?
"""
import sys
def is_prime_number(number):
if number < 2:
return False
for i in range(2, (number//2)+1):
# When i reaches more than the half, there are no more natural numbers to divide by
if number % i == 0:
return False
else:
return True
def main():
number = 8
for i in range(100):
print(str(i) + ": " + str(is_prime_number(i)))
if __name__ == '__main__':
sys.exit(main())
| {
"content_hash": "e1b8cfe711eb319e7efd9c9e0f9d69a6",
"timestamp": "",
"source": "github",
"line_count": 24,
"max_line_length": 91,
"avg_line_length": 23.791666666666668,
"alnum_prop": 0.5779334500875657,
"repo_name": "Pauekn/project_euler",
"id": "b4c362d7d4e958c678c2a7b40d822fb8697d5c98",
"size": "590",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "number_3.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "3021"
}
],
"symlink_target": ""
} |
"""The console module contains the :class:`Command` class that's
useful for building command-line scripts.
Consider a function `myfunc` that you want to call directly from the
command-line, but you want to avoid writing glue that deals with
argument parsing, converting those arguments to Python types and
passing them to other functions. Here's how `myfunc` could look like:
.. code-block:: python
def myfunc(a_string, a_list):
print a_string in a_list
`myfunc` takes two arguments, one is expeced to be a string, the other
one a list.
Let's use :class:`Command` to build a console script:
.. code-block:: python
from nolearn.console import Command
__doc__ = '''
Usage:
myprogram myfunc <config_file> [options]
'''
schema = '''
[myfunc]
a_string = string
a_list = listofstrings
'''
class Main(Command):
__doc__ = __doc__
schema = schema
funcs = [myfunc]
main = Main()
Note how we define a `schema` that has a definition of `myfunc`'s
arguments and their types. See :mod:`nolearn.inischema` for more
details on that.
We can then include this `main` function in our `setup.py` to get a
console script:
.. code-block:: python
setup(
name='myprogram',
# ...
entry_points='''
[console_scripts]
myprogram = myprogram.mymodule.main
''',
)
With this in place, you can now call the `myprogram` script like so:
.. code-block:: bash
$ myprogram myfunc args.ini
Where `args.ini` might look like:
.. code-block:: ini
[myfunc]
a_string = needle
a_list = haystack haystack needle haystack haystack
These constitute the two named arguments that will be passed into
`myfunc`. Passing of values is always done through `.ini` files.
You may also call your script with a `--profile=<fn>` option, which
you can use to profile your program using Python's standard
:mod:`cProfile` module.
A `--pdb` option is also available which allows you to automatically
enter post-mortem debugging when your script exits abnormally.
"""
import cProfile
import pdb
import os
import sys
import traceback
import warnings
import docopt
from .inischema import parse_config
warnings.warn("""
The nolearn.console module will be removed in nolearn 0.6. If you
want to continue using this module, please consider copying the code
into your own project. And take a look at alternatives like the click
library.
""")
DEFAULT_OPTIONS = """
Options:
-h --help Show this screen
--pdb Do post mortem debugging on errors
--profile=<fn> Save a profile to <fn>
"""
class Command(object):
__doc__ = None
schema = None
funcs = []
def __init__(self, **kwargs):
vars(self).update(kwargs)
def doc(self):
doc = self.__doc__
if 'Options:' not in doc:
doc = doc + DEFAULT_OPTIONS
return doc
def __call__(self, argv=sys.argv):
doc = self.doc()
arguments = docopt.docopt(doc, argv=argv[1:])
self.arguments = arguments
for func in self.funcs:
if arguments[func.__name__]:
break
else: # pragma: no cover
raise KeyError("No function found to call.")
with open(arguments['<config_file>']) as config_file:
self.config = parse_config(self.schema, config_file.read())
env = self.config.get('env', {})
for key, value in env.items():
os.environ[key.upper()] = value
kwargs = self.config.get(func.__name__, {})
# If profiling, wrap the function with another one that does the
# profiling:
if arguments.get('--profile'):
func_ = func
def prof(**kwargs):
cProfile.runctx(
'func(**kwargs)',
globals(),
{'func': func_, 'kwargs': kwargs},
filename=arguments['--profile'],
)
func = prof
# If debugging, call pdb.post_mortem() in the except clause:
try:
func(**kwargs)
except:
if arguments.get('--pdb'):
traceback.print_exc()
pdb.post_mortem(sys.exc_traceback)
else: # pragma: no cover
raise
| {
"content_hash": "0ed733dc4cc8a3230d4368ae6d472c2e",
"timestamp": "",
"source": "github",
"line_count": 167,
"max_line_length": 72,
"avg_line_length": 26.023952095808383,
"alnum_prop": 0.6063046479521399,
"repo_name": "rajegannathan/grasp-lift-eeg-cat-dog-solution-updated",
"id": "a773c362543b98fe77af962a951f4bee5d67e408",
"size": "4346",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "python-packages/nolearn-0.5/nolearn/console.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Assembly",
"bytes": "113"
},
{
"name": "C",
"bytes": "9257"
},
{
"name": "C++",
"bytes": "410482"
},
{
"name": "CSS",
"bytes": "3812"
},
{
"name": "Makefile",
"bytes": "23871"
},
{
"name": "PHP",
"bytes": "2068"
},
{
"name": "PowerShell",
"bytes": "2988"
},
{
"name": "Python",
"bytes": "5993790"
},
{
"name": "R",
"bytes": "145799"
},
{
"name": "Shell",
"bytes": "8953"
},
{
"name": "TeX",
"bytes": "912"
}
],
"symlink_target": ""
} |
"""Connect to a MySensors gateway via pymysensors API."""
import logging
import voluptuous as vol
from homeassistant.components.mqtt import valid_publish_topic, valid_subscribe_topic
from homeassistant.const import CONF_OPTIMISTIC
from homeassistant.core import callback
import homeassistant.helpers.config_validation as cv
from .const import (
ATTR_DEVICES,
CONF_BAUD_RATE,
CONF_DEVICE,
CONF_GATEWAYS,
CONF_NODES,
CONF_PERSISTENCE,
CONF_PERSISTENCE_FILE,
CONF_RETAIN,
CONF_TCP_PORT,
CONF_TOPIC_IN_PREFIX,
CONF_TOPIC_OUT_PREFIX,
CONF_VERSION,
DOMAIN,
MYSENSORS_GATEWAYS,
)
from .device import get_mysensors_devices
from .gateway import finish_setup, get_mysensors_gateway, setup_gateways
_LOGGER = logging.getLogger(__name__)
CONF_DEBUG = "debug"
CONF_NODE_NAME = "name"
DEFAULT_BAUD_RATE = 115200
DEFAULT_TCP_PORT = 5003
DEFAULT_VERSION = "1.4"
def has_all_unique_files(value):
"""Validate that all persistence files are unique and set if any is set."""
persistence_files = [gateway.get(CONF_PERSISTENCE_FILE) for gateway in value]
if None in persistence_files and any(
name is not None for name in persistence_files
):
raise vol.Invalid(
"persistence file name of all devices must be set if any is set"
)
if not all(name is None for name in persistence_files):
schema = vol.Schema(vol.Unique())
schema(persistence_files)
return value
def is_persistence_file(value):
"""Validate that persistence file path ends in either .pickle or .json."""
if value.endswith((".json", ".pickle")):
return value
raise vol.Invalid(f"{value} does not end in either `.json` or `.pickle`")
def deprecated(key):
"""Mark key as deprecated in configuration."""
def validator(config):
"""Check if key is in config, log warning and remove key."""
if key not in config:
return config
_LOGGER.warning(
"%s option for %s is deprecated. Please remove %s from your "
"configuration file",
key,
DOMAIN,
key,
)
config.pop(key)
return config
return validator
NODE_SCHEMA = vol.Schema({cv.positive_int: {vol.Required(CONF_NODE_NAME): cv.string}})
GATEWAY_SCHEMA = {
vol.Required(CONF_DEVICE): cv.string,
vol.Optional(CONF_PERSISTENCE_FILE): vol.All(cv.string, is_persistence_file),
vol.Optional(CONF_BAUD_RATE, default=DEFAULT_BAUD_RATE): cv.positive_int,
vol.Optional(CONF_TCP_PORT, default=DEFAULT_TCP_PORT): cv.port,
vol.Optional(CONF_TOPIC_IN_PREFIX): valid_subscribe_topic,
vol.Optional(CONF_TOPIC_OUT_PREFIX): valid_publish_topic,
vol.Optional(CONF_NODES, default={}): NODE_SCHEMA,
}
CONFIG_SCHEMA = vol.Schema(
{
DOMAIN: vol.Schema(
vol.All(
deprecated(CONF_DEBUG),
{
vol.Required(CONF_GATEWAYS): vol.All(
cv.ensure_list, has_all_unique_files, [GATEWAY_SCHEMA]
),
vol.Optional(CONF_OPTIMISTIC, default=False): cv.boolean,
vol.Optional(CONF_PERSISTENCE, default=True): cv.boolean,
vol.Optional(CONF_RETAIN, default=True): cv.boolean,
vol.Optional(CONF_VERSION, default=DEFAULT_VERSION): cv.string,
},
)
)
},
extra=vol.ALLOW_EXTRA,
)
async def async_setup(hass, config):
"""Set up the MySensors component."""
gateways = await setup_gateways(hass, config)
if not gateways:
_LOGGER.error("No devices could be setup as gateways, check your configuration")
return False
hass.data[MYSENSORS_GATEWAYS] = gateways
hass.async_create_task(finish_setup(hass, config, gateways))
return True
def _get_mysensors_name(gateway, node_id, child_id):
"""Return a name for a node child."""
node_name = "{} {}".format(gateway.sensors[node_id].sketch_name, node_id)
node_name = next(
(
node[CONF_NODE_NAME]
for conf_id, node in gateway.nodes_config.items()
if node.get(CONF_NODE_NAME) is not None and conf_id == node_id
),
node_name,
)
return f"{node_name} {child_id}"
@callback
def setup_mysensors_platform(
hass,
domain,
discovery_info,
device_class,
device_args=None,
async_add_entities=None,
):
"""Set up a MySensors platform."""
# Only act if called via MySensors by discovery event.
# Otherwise gateway is not set up.
if not discovery_info:
return None
if device_args is None:
device_args = ()
new_devices = []
new_dev_ids = discovery_info[ATTR_DEVICES]
for dev_id in new_dev_ids:
devices = get_mysensors_devices(hass, domain)
if dev_id in devices:
continue
gateway_id, node_id, child_id, value_type = dev_id
gateway = get_mysensors_gateway(hass, gateway_id)
if not gateway:
continue
device_class_copy = device_class
if isinstance(device_class, dict):
child = gateway.sensors[node_id].children[child_id]
s_type = gateway.const.Presentation(child.type).name
device_class_copy = device_class[s_type]
name = _get_mysensors_name(gateway, node_id, child_id)
args_copy = (*device_args, gateway, node_id, child_id, name, value_type)
devices[dev_id] = device_class_copy(*args_copy)
new_devices.append(devices[dev_id])
if new_devices:
_LOGGER.info("Adding new devices: %s", new_devices)
if async_add_entities is not None:
async_add_entities(new_devices, True)
return new_devices
| {
"content_hash": "e5fa65bc67022d5131cb885b652869ac",
"timestamp": "",
"source": "github",
"line_count": 184,
"max_line_length": 88,
"avg_line_length": 31.456521739130434,
"alnum_prop": 0.6299239806496199,
"repo_name": "Teagan42/home-assistant",
"id": "a528be15e1475b4f03795b9404372fc8f541567a",
"size": "5788",
"binary": false,
"copies": "3",
"ref": "refs/heads/dev",
"path": "homeassistant/components/mysensors/__init__.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "19774313"
},
{
"name": "Shell",
"bytes": "6846"
}
],
"symlink_target": ""
} |
"""Non-blocking HTTP client implementation using pycurl."""
from __future__ import absolute_import, division, print_function
import collections
import functools
import logging
import pycurl # type: ignore
import threading
import time
from io import BytesIO
from tornado import httputil
from tornado import ioloop
from tornado import stack_context
from tornado.escape import utf8, native_str
from tornado.httpclient import HTTPResponse, HTTPError, AsyncHTTPClient, main
curl_log = logging.getLogger('tornado.curl_httpclient')
class CurlAsyncHTTPClient(AsyncHTTPClient):
def initialize(self, max_clients=10, defaults=None):
super(CurlAsyncHTTPClient, self).initialize(defaults=defaults)
self._multi = pycurl.CurlMulti()
self._multi.setopt(pycurl.M_TIMERFUNCTION, self._set_timeout)
self._multi.setopt(pycurl.M_SOCKETFUNCTION, self._handle_socket)
self._curls = [self._curl_create() for i in range(max_clients)]
self._free_list = self._curls[:]
self._requests = collections.deque()
self._fds = {}
self._timeout = None
# libcurl has bugs that sometimes cause it to not report all
# relevant file descriptors and timeouts to TIMERFUNCTION/
# SOCKETFUNCTION. Mitigate the effects of such bugs by
# forcing a periodic scan of all active requests.
self._force_timeout_callback = ioloop.PeriodicCallback(
self._handle_force_timeout, 1000)
self._force_timeout_callback.start()
# Work around a bug in libcurl 7.29.0: Some fields in the curl
# multi object are initialized lazily, and its destructor will
# segfault if it is destroyed without having been used. Add
# and remove a dummy handle to make sure everything is
# initialized.
dummy_curl_handle = pycurl.Curl()
self._multi.add_handle(dummy_curl_handle)
self._multi.remove_handle(dummy_curl_handle)
def close(self):
self._force_timeout_callback.stop()
if self._timeout is not None:
self.io_loop.remove_timeout(self._timeout)
for curl in self._curls:
curl.close()
self._multi.close()
super(CurlAsyncHTTPClient, self).close()
# Set below properties to None to reduce the reference count of current
# instance, because those properties hold some methods of current
# instance that will case circular reference.
self._force_timeout_callback = None
self._multi = None
def fetch_impl(self, request, callback):
self._requests.append((request, callback))
self._process_queue()
self._set_timeout(0)
def _handle_socket(self, event, fd, multi, data):
"""Called by libcurl when it wants to change the file descriptors
it cares about.
"""
event_map = {
pycurl.POLL_NONE: ioloop.IOLoop.NONE,
pycurl.POLL_IN: ioloop.IOLoop.READ,
pycurl.POLL_OUT: ioloop.IOLoop.WRITE,
pycurl.POLL_INOUT: ioloop.IOLoop.READ | ioloop.IOLoop.WRITE
}
if event == pycurl.POLL_REMOVE:
if fd in self._fds:
self.io_loop.remove_handler(fd)
del self._fds[fd]
else:
ioloop_event = event_map[event]
# libcurl sometimes closes a socket and then opens a new
# one using the same FD without giving us a POLL_NONE in
# between. This is a problem with the epoll IOLoop,
# because the kernel can tell when a socket is closed and
# removes it from the epoll automatically, causing future
# update_handler calls to fail. Since we can't tell when
# this has happened, always use remove and re-add
# instead of update.
if fd in self._fds:
self.io_loop.remove_handler(fd)
self.io_loop.add_handler(fd, self._handle_events,
ioloop_event)
self._fds[fd] = ioloop_event
def _set_timeout(self, msecs):
"""Called by libcurl to schedule a timeout."""
if self._timeout is not None:
self.io_loop.remove_timeout(self._timeout)
self._timeout = self.io_loop.add_timeout(
self.io_loop.time() + msecs / 1000.0, self._handle_timeout)
def _handle_events(self, fd, events):
"""Called by IOLoop when there is activity on one of our
file descriptors.
"""
action = 0
if events & ioloop.IOLoop.READ:
action |= pycurl.CSELECT_IN
if events & ioloop.IOLoop.WRITE:
action |= pycurl.CSELECT_OUT
while True:
try:
ret, num_handles = self._multi.socket_action(fd, action)
except pycurl.error as e:
ret = e.args[0]
if ret != pycurl.E_CALL_MULTI_PERFORM:
break
self._finish_pending_requests()
def _handle_timeout(self):
"""Called by IOLoop when the requested timeout has passed."""
with stack_context.NullContext():
self._timeout = None
while True:
try:
ret, num_handles = self._multi.socket_action(
pycurl.SOCKET_TIMEOUT, 0)
except pycurl.error as e:
ret = e.args[0]
if ret != pycurl.E_CALL_MULTI_PERFORM:
break
self._finish_pending_requests()
# In theory, we shouldn't have to do this because curl will
# call _set_timeout whenever the timeout changes. However,
# sometimes after _handle_timeout we will need to reschedule
# immediately even though nothing has changed from curl's
# perspective. This is because when socket_action is
# called with SOCKET_TIMEOUT, libcurl decides internally which
# timeouts need to be processed by using a monotonic clock
# (where available) while tornado uses python's time.time()
# to decide when timeouts have occurred. When those clocks
# disagree on elapsed time (as they will whenever there is an
# NTP adjustment), tornado might call _handle_timeout before
# libcurl is ready. After each timeout, resync the scheduled
# timeout with libcurl's current state.
new_timeout = self._multi.timeout()
if new_timeout >= 0:
self._set_timeout(new_timeout)
def _handle_force_timeout(self):
"""Called by IOLoop periodically to ask libcurl to process any
events it may have forgotten about.
"""
with stack_context.NullContext():
while True:
try:
ret, num_handles = self._multi.socket_all()
except pycurl.error as e:
ret = e.args[0]
if ret != pycurl.E_CALL_MULTI_PERFORM:
break
self._finish_pending_requests()
def _finish_pending_requests(self):
"""Process any requests that were completed by the last
call to multi.socket_action.
"""
while True:
num_q, ok_list, err_list = self._multi.info_read()
for curl in ok_list:
self._finish(curl)
for curl, errnum, errmsg in err_list:
self._finish(curl, errnum, errmsg)
if num_q == 0:
break
self._process_queue()
def _process_queue(self):
with stack_context.NullContext():
while True:
started = 0
while self._free_list and self._requests:
started += 1
curl = self._free_list.pop()
(request, callback) = self._requests.popleft()
curl.info = {
"headers": httputil.HTTPHeaders(),
"buffer": BytesIO(),
"request": request,
"callback": callback,
"curl_start_time": time.time(),
}
try:
self._curl_setup_request(
curl, request, curl.info["buffer"],
curl.info["headers"])
except Exception as e:
# If there was an error in setup, pass it on
# to the callback. Note that allowing the
# error to escape here will appear to work
# most of the time since we are still in the
# caller's original stack frame, but when
# _process_queue() is called from
# _finish_pending_requests the exceptions have
# nowhere to go.
self._free_list.append(curl)
callback(HTTPResponse(
request=request,
code=599,
error=e))
else:
self._multi.add_handle(curl)
if not started:
break
def _finish(self, curl, curl_error=None, curl_message=None):
info = curl.info
curl.info = None
self._multi.remove_handle(curl)
self._free_list.append(curl)
buffer = info["buffer"]
if curl_error:
error = CurlError(curl_error, curl_message)
code = error.code
effective_url = None
buffer.close()
buffer = None
else:
error = None
code = curl.getinfo(pycurl.HTTP_CODE)
effective_url = curl.getinfo(pycurl.EFFECTIVE_URL)
buffer.seek(0)
# the various curl timings are documented at
# http://curl.haxx.se/libcurl/c/curl_easy_getinfo.html
time_info = dict(
queue=info["curl_start_time"] - info["request"].start_time,
namelookup=curl.getinfo(pycurl.NAMELOOKUP_TIME),
connect=curl.getinfo(pycurl.CONNECT_TIME),
appconnect=curl.getinfo(pycurl.APPCONNECT_TIME),
pretransfer=curl.getinfo(pycurl.PRETRANSFER_TIME),
starttransfer=curl.getinfo(pycurl.STARTTRANSFER_TIME),
total=curl.getinfo(pycurl.TOTAL_TIME),
redirect=curl.getinfo(pycurl.REDIRECT_TIME),
)
try:
info["callback"](HTTPResponse(
request=info["request"], code=code, headers=info["headers"],
buffer=buffer, effective_url=effective_url, error=error,
reason=info['headers'].get("X-Http-Reason", None),
request_time=time.time() - info["curl_start_time"],
time_info=time_info))
except Exception:
self.handle_callback_exception(info["callback"])
def handle_callback_exception(self, callback):
self.io_loop.handle_callback_exception(callback)
def _curl_create(self):
curl = pycurl.Curl()
if curl_log.isEnabledFor(logging.DEBUG):
curl.setopt(pycurl.VERBOSE, 1)
curl.setopt(pycurl.DEBUGFUNCTION, self._curl_debug)
if hasattr(pycurl, 'PROTOCOLS'): # PROTOCOLS first appeared in pycurl 7.19.5 (2014-07-12)
curl.setopt(pycurl.PROTOCOLS, pycurl.PROTO_HTTP | pycurl.PROTO_HTTPS)
curl.setopt(pycurl.REDIR_PROTOCOLS, pycurl.PROTO_HTTP | pycurl.PROTO_HTTPS)
return curl
def _curl_setup_request(self, curl, request, buffer, headers):
curl.setopt(pycurl.URL, native_str(request.url))
# libcurl's magic "Expect: 100-continue" behavior causes delays
# with servers that don't support it (which include, among others,
# Google's OpenID endpoint). Additionally, this behavior has
# a bug in conjunction with the curl_multi_socket_action API
# (https://sourceforge.net/tracker/?func=detail&atid=100976&aid=3039744&group_id=976),
# which increases the delays. It's more trouble than it's worth,
# so just turn off the feature (yes, setting Expect: to an empty
# value is the official way to disable this)
if "Expect" not in request.headers:
request.headers["Expect"] = ""
# libcurl adds Pragma: no-cache by default; disable that too
if "Pragma" not in request.headers:
request.headers["Pragma"] = ""
curl.setopt(pycurl.HTTPHEADER,
["%s: %s" % (native_str(k), native_str(v))
for k, v in request.headers.get_all()])
curl.setopt(pycurl.HEADERFUNCTION,
functools.partial(self._curl_header_callback,
headers, request.header_callback))
if request.streaming_callback:
def write_function(chunk):
self.io_loop.add_callback(request.streaming_callback, chunk)
else:
write_function = buffer.write
if bytes is str: # py2
curl.setopt(pycurl.WRITEFUNCTION, write_function)
else: # py3
# Upstream pycurl doesn't support py3, but ubuntu 12.10 includes
# a fork/port. That version has a bug in which it passes unicode
# strings instead of bytes to the WRITEFUNCTION. This means that
# if you use a WRITEFUNCTION (which tornado always does), you cannot
# download arbitrary binary data. This needs to be fixed in the
# ported pycurl package, but in the meantime this lambda will
# make it work for downloading (utf8) text.
curl.setopt(pycurl.WRITEFUNCTION, lambda s: write_function(utf8(s)))
curl.setopt(pycurl.FOLLOWLOCATION, request.follow_redirects)
curl.setopt(pycurl.MAXREDIRS, request.max_redirects)
curl.setopt(pycurl.CONNECTTIMEOUT_MS, int(1000 * request.connect_timeout))
curl.setopt(pycurl.TIMEOUT_MS, int(1000 * request.request_timeout))
if request.user_agent:
curl.setopt(pycurl.USERAGENT, native_str(request.user_agent))
else:
curl.setopt(pycurl.USERAGENT, "Mozilla/5.0 (compatible; pycurl)")
if request.network_interface:
curl.setopt(pycurl.INTERFACE, request.network_interface)
if request.decompress_response:
curl.setopt(pycurl.ENCODING, "gzip,deflate")
else:
curl.setopt(pycurl.ENCODING, "none")
if request.proxy_host and request.proxy_port:
curl.setopt(pycurl.PROXY, request.proxy_host)
curl.setopt(pycurl.PROXYPORT, request.proxy_port)
if request.proxy_username:
credentials = '%s:%s' % (request.proxy_username,
request.proxy_password)
curl.setopt(pycurl.PROXYUSERPWD, credentials)
if (request.proxy_auth_mode is None or
request.proxy_auth_mode == "basic"):
curl.setopt(pycurl.PROXYAUTH, pycurl.HTTPAUTH_BASIC)
elif request.proxy_auth_mode == "digest":
curl.setopt(pycurl.PROXYAUTH, pycurl.HTTPAUTH_DIGEST)
else:
raise ValueError(
"Unsupported proxy_auth_mode %s" % request.proxy_auth_mode)
else:
curl.setopt(pycurl.PROXY, '')
curl.unsetopt(pycurl.PROXYUSERPWD)
if request.validate_cert:
curl.setopt(pycurl.SSL_VERIFYPEER, 1)
curl.setopt(pycurl.SSL_VERIFYHOST, 2)
else:
curl.setopt(pycurl.SSL_VERIFYPEER, 0)
curl.setopt(pycurl.SSL_VERIFYHOST, 0)
if request.ca_certs is not None:
curl.setopt(pycurl.CAINFO, request.ca_certs)
else:
# There is no way to restore pycurl.CAINFO to its default value
# (Using unsetopt makes it reject all certificates).
# I don't see any way to read the default value from python so it
# can be restored later. We'll have to just leave CAINFO untouched
# if no ca_certs file was specified, and require that if any
# request uses a custom ca_certs file, they all must.
pass
if request.allow_ipv6 is False:
# Curl behaves reasonably when DNS resolution gives an ipv6 address
# that we can't reach, so allow ipv6 unless the user asks to disable.
curl.setopt(pycurl.IPRESOLVE, pycurl.IPRESOLVE_V4)
else:
curl.setopt(pycurl.IPRESOLVE, pycurl.IPRESOLVE_WHATEVER)
# Set the request method through curl's irritating interface which makes
# up names for almost every single method
curl_options = {
"GET": pycurl.HTTPGET,
"POST": pycurl.POST,
"PUT": pycurl.UPLOAD,
"HEAD": pycurl.NOBODY,
}
custom_methods = set(["DELETE", "OPTIONS", "PATCH"])
for o in curl_options.values():
curl.setopt(o, False)
if request.method in curl_options:
curl.unsetopt(pycurl.CUSTOMREQUEST)
curl.setopt(curl_options[request.method], True)
elif request.allow_nonstandard_methods or request.method in custom_methods:
curl.setopt(pycurl.CUSTOMREQUEST, request.method)
else:
raise KeyError('unknown method ' + request.method)
body_expected = request.method in ("POST", "PATCH", "PUT")
body_present = request.body is not None
if not request.allow_nonstandard_methods:
# Some HTTP methods nearly always have bodies while others
# almost never do. Fail in this case unless the user has
# opted out of sanity checks with allow_nonstandard_methods.
if ((body_expected and not body_present) or
(body_present and not body_expected)):
raise ValueError(
'Body must %sbe None for method %s (unless '
'allow_nonstandard_methods is true)' %
('not ' if body_expected else '', request.method))
if body_expected or body_present:
if request.method == "GET":
# Even with `allow_nonstandard_methods` we disallow
# GET with a body (because libcurl doesn't allow it
# unless we use CUSTOMREQUEST). While the spec doesn't
# forbid clients from sending a body, it arguably
# disallows the server from doing anything with them.
raise ValueError('Body must be None for GET request')
request_buffer = BytesIO(utf8(request.body or ''))
def ioctl(cmd):
if cmd == curl.IOCMD_RESTARTREAD:
request_buffer.seek(0)
curl.setopt(pycurl.READFUNCTION, request_buffer.read)
curl.setopt(pycurl.IOCTLFUNCTION, ioctl)
if request.method == "POST":
curl.setopt(pycurl.POSTFIELDSIZE, len(request.body or ''))
else:
curl.setopt(pycurl.UPLOAD, True)
curl.setopt(pycurl.INFILESIZE, len(request.body or ''))
if request.auth_username is not None:
userpwd = "%s:%s" % (request.auth_username, request.auth_password or '')
if request.auth_mode is None or request.auth_mode == "basic":
curl.setopt(pycurl.HTTPAUTH, pycurl.HTTPAUTH_BASIC)
elif request.auth_mode == "digest":
curl.setopt(pycurl.HTTPAUTH, pycurl.HTTPAUTH_DIGEST)
else:
raise ValueError("Unsupported auth_mode %s" % request.auth_mode)
curl.setopt(pycurl.USERPWD, native_str(userpwd))
curl_log.debug("%s %s (username: %r)", request.method, request.url,
request.auth_username)
else:
curl.unsetopt(pycurl.USERPWD)
curl_log.debug("%s %s", request.method, request.url)
if request.client_cert is not None:
curl.setopt(pycurl.SSLCERT, request.client_cert)
if request.client_key is not None:
curl.setopt(pycurl.SSLKEY, request.client_key)
if request.ssl_options is not None:
raise ValueError("ssl_options not supported in curl_httpclient")
if threading.activeCount() > 1:
# libcurl/pycurl is not thread-safe by default. When multiple threads
# are used, signals should be disabled. This has the side effect
# of disabling DNS timeouts in some environments (when libcurl is
# not linked against ares), so we don't do it when there is only one
# thread. Applications that use many short-lived threads may need
# to set NOSIGNAL manually in a prepare_curl_callback since
# there may not be any other threads running at the time we call
# threading.activeCount.
curl.setopt(pycurl.NOSIGNAL, 1)
if request.prepare_curl_callback is not None:
request.prepare_curl_callback(curl)
def _curl_header_callback(self, headers, header_callback, header_line):
header_line = native_str(header_line.decode('latin1'))
if header_callback is not None:
self.io_loop.add_callback(header_callback, header_line)
# header_line as returned by curl includes the end-of-line characters.
# whitespace at the start should be preserved to allow multi-line headers
header_line = header_line.rstrip()
if header_line.startswith("HTTP/"):
headers.clear()
try:
(__, __, reason) = httputil.parse_response_start_line(header_line)
header_line = "X-Http-Reason: %s" % reason
except httputil.HTTPInputError:
return
if not header_line:
return
headers.parse_line(header_line)
def _curl_debug(self, debug_type, debug_msg):
debug_types = ('I', '<', '>', '<', '>')
debug_msg = native_str(debug_msg)
if debug_type == 0:
curl_log.debug('%s', debug_msg.strip())
elif debug_type in (1, 2):
for line in debug_msg.splitlines():
curl_log.debug('%s %s', debug_types[debug_type], line)
elif debug_type == 4:
curl_log.debug('%s %r', debug_types[debug_type], debug_msg)
class CurlError(HTTPError):
def __init__(self, errno, message):
HTTPError.__init__(self, 599, message)
self.errno = errno
if __name__ == "__main__":
AsyncHTTPClient.configure(CurlAsyncHTTPClient)
main()
| {
"content_hash": "b761ef18523ea3e8849e61c775084497",
"timestamp": "",
"source": "github",
"line_count": 505,
"max_line_length": 98,
"avg_line_length": 44.93663366336634,
"alnum_prop": 0.5848940201824351,
"repo_name": "SuminAndrew/tornado",
"id": "e0ec3fbaf88be76a29c15f3c7e6e9c8aa806af62",
"size": "23290",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "tornado/curl_httpclient.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "1664"
},
{
"name": "HTML",
"bytes": "25"
},
{
"name": "Python",
"bytes": "1565115"
},
{
"name": "Ruby",
"bytes": "1428"
},
{
"name": "Shell",
"bytes": "4070"
}
],
"symlink_target": ""
} |
"""Imports Onsets and Frames model."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
| {
"content_hash": "9f2cc4feabb89acfac3199b8d78634c1",
"timestamp": "",
"source": "github",
"line_count": 5,
"max_line_length": 38,
"avg_line_length": 29.8,
"alnum_prop": 0.738255033557047,
"repo_name": "adarob/magenta",
"id": "97ff2189bfafa68a92985ad85282ff8bb2df6dc4",
"size": "734",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "magenta/models/onsets_frames_transcription/__init__.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Dockerfile",
"bytes": "1933"
},
{
"name": "Python",
"bytes": "2941402"
},
{
"name": "Shell",
"bytes": "24986"
}
],
"symlink_target": ""
} |
from setuptools import setup
with open('README.rst') as fp:
long_description = fp.read()
setup(
name='Flask-Elastic',
version='0.2',
download_url='https://github.com/bbmogool/flask-elastic/',
license='BSD',
author='Marcel Tschopp',
author_email='mt@corova.net',
description='Integrates official client for Elasticsearch into Flask',
long_description=long_description,
py_modules=['flask_elastic'],
zip_safe=False,
include_package_data=True,
platforms='any',
install_requires=[
'Flask',
'elasticsearch',
],
classifiers=[
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Topic :: Software Development :: Libraries :: Python Modules'
]
)
| {
"content_hash": "ed34f3ba08b92ffc7a1a06ae03154b1e",
"timestamp": "",
"source": "github",
"line_count": 33,
"max_line_length": 74,
"avg_line_length": 30.242424242424242,
"alnum_prop": 0.6072144288577155,
"repo_name": "marceltschoppch/flask-elastic",
"id": "b0417284e6a294c4addef0b2b446aab3cf7744f9",
"size": "998",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "setup.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "2059"
}
],
"symlink_target": ""
} |
import re
from .utils import Str, classify, get_regexp_width, Py36, Serialize, suppress
from .exceptions import UnexpectedCharacters, LexError, UnexpectedToken
###{standalone
from copy import copy
class Pattern(Serialize):
raw = None
def __init__(self, value, flags=(), raw=None):
self.value = value
self.flags = frozenset(flags)
self.raw = raw
def __repr__(self):
return repr(self.to_regexp())
# Pattern Hashing assumes all subclasses have a different priority!
def __hash__(self):
return hash((type(self), self.value, self.flags))
def __eq__(self, other):
return type(self) == type(other) and self.value == other.value and self.flags == other.flags
def to_regexp(self):
raise NotImplementedError()
if Py36:
# Python 3.6 changed syntax for flags in regular expression
def _get_flags(self, value):
for f in self.flags:
value = ('(?%s:%s)' % (f, value))
return value
else:
def _get_flags(self, value):
for f in self.flags:
value = ('(?%s)' % f) + value
return value
class PatternStr(Pattern):
__serialize_fields__ = 'value', 'flags'
type = "str"
def to_regexp(self):
return self._get_flags(re.escape(self.value))
@property
def min_width(self):
return len(self.value)
max_width = min_width
class PatternRE(Pattern):
__serialize_fields__ = 'value', 'flags', '_width'
type = "re"
def to_regexp(self):
return self._get_flags(self.value)
_width = None
def _get_width(self):
if self._width is None:
self._width = get_regexp_width(self.to_regexp())
return self._width
@property
def min_width(self):
return self._get_width()[0]
@property
def max_width(self):
return self._get_width()[1]
class TerminalDef(Serialize):
__serialize_fields__ = 'name', 'pattern', 'priority'
__serialize_namespace__ = PatternStr, PatternRE
def __init__(self, name, pattern, priority=1):
assert isinstance(pattern, Pattern), pattern
self.name = name
self.pattern = pattern
self.priority = priority
def __repr__(self):
return '%s(%r, %r)' % (type(self).__name__, self.name, self.pattern)
def user_repr(self):
if self.name.startswith('__'): # We represent a generated terminal
return self.pattern.raw or self.name
else:
return self.name
class Token(Str):
"""A string with meta-information, that is produced by the lexer.
When parsing text, the resulting chunks of the input that haven't been discarded,
will end up in the tree as Token instances. The Token class inherits from Python's ``str``,
so normal string comparisons and operations will work as expected.
Attributes:
type: Name of the token (as specified in grammar)
value: Value of the token (redundant, as ``token.value == token`` will always be true)
pos_in_stream: The index of the token in the text
line: The line of the token in the text (starting with 1)
column: The column of the token in the text (starting with 1)
end_line: The line where the token ends
end_column: The next column after the end of the token. For example,
if the token is a single character with a column value of 4,
end_column will be 5.
end_pos: the index where the token ends (basically ``pos_in_stream + len(token)``)
"""
__slots__ = ('type', 'pos_in_stream', 'value', 'line', 'column', 'end_line', 'end_column', 'end_pos')
def __new__(cls, type_, value, pos_in_stream=None, line=None, column=None, end_line=None, end_column=None, end_pos=None):
try:
self = super(Token, cls).__new__(cls, value)
except UnicodeDecodeError:
value = value.decode('latin1')
self = super(Token, cls).__new__(cls, value)
self.type = type_
self.pos_in_stream = pos_in_stream
self.value = value
self.line = line
self.column = column
self.end_line = end_line
self.end_column = end_column
self.end_pos = end_pos
return self
def update(self, type_=None, value=None):
return Token.new_borrow_pos(
type_ if type_ is not None else self.type,
value if value is not None else self.value,
self
)
@classmethod
def new_borrow_pos(cls, type_, value, borrow_t):
return cls(type_, value, borrow_t.pos_in_stream, borrow_t.line, borrow_t.column, borrow_t.end_line, borrow_t.end_column, borrow_t.end_pos)
def __reduce__(self):
return (self.__class__, (self.type, self.value, self.pos_in_stream, self.line, self.column))
def __repr__(self):
return 'Token(%r, %r)' % (self.type, self.value)
def __deepcopy__(self, memo):
return Token(self.type, self.value, self.pos_in_stream, self.line, self.column)
def __eq__(self, other):
if isinstance(other, Token) and self.type != other.type:
return False
return Str.__eq__(self, other)
__hash__ = Str.__hash__
class LineCounter:
__slots__ = 'char_pos', 'line', 'column', 'line_start_pos', 'newline_char'
def __init__(self, newline_char):
self.newline_char = newline_char
self.char_pos = 0
self.line = 1
self.column = 1
self.line_start_pos = 0
def feed(self, token, test_newline=True):
"""Consume a token and calculate the new line & column.
As an optional optimization, set test_newline=False if token doesn't contain a newline.
"""
if test_newline:
newlines = token.count(self.newline_char)
if newlines:
self.line += newlines
self.line_start_pos = self.char_pos + token.rindex(self.newline_char) + 1
self.char_pos += len(token)
self.column = self.char_pos - self.line_start_pos + 1
class UnlessCallback:
def __init__(self, mres):
self.mres = mres
def __call__(self, t):
for mre, type_from_index in self.mres:
m = mre.match(t.value)
if m:
t.type = type_from_index[m.lastindex]
break
return t
class CallChain:
def __init__(self, callback1, callback2, cond):
self.callback1 = callback1
self.callback2 = callback2
self.cond = cond
def __call__(self, t):
t2 = self.callback1(t)
return self.callback2(t) if self.cond(t2) else t2
def _create_unless(terminals, g_regex_flags, re_, use_bytes):
tokens_by_type = classify(terminals, lambda t: type(t.pattern))
assert len(tokens_by_type) <= 2, tokens_by_type.keys()
embedded_strs = set()
callback = {}
for retok in tokens_by_type.get(PatternRE, []):
unless = []
for strtok in tokens_by_type.get(PatternStr, []):
if strtok.priority > retok.priority:
continue
s = strtok.pattern.value
m = re_.match(retok.pattern.to_regexp(), s, g_regex_flags)
if m and m.group(0) == s:
unless.append(strtok)
if strtok.pattern.flags <= retok.pattern.flags:
embedded_strs.add(strtok)
if unless:
callback[retok.name] = UnlessCallback(build_mres(unless, g_regex_flags, re_, match_whole=True, use_bytes=use_bytes))
terminals = [t for t in terminals if t not in embedded_strs]
return terminals, callback
def _build_mres(terminals, max_size, g_regex_flags, match_whole, re_, use_bytes):
# Python sets an unreasonable group limit (currently 100) in its re module
# Worse, the only way to know we reached it is by catching an AssertionError!
# This function recursively tries less and less groups until it's successful.
postfix = '$' if match_whole else ''
mres = []
while terminals:
pattern = u'|'.join(u'(?P<%s>%s)' % (t.name, t.pattern.to_regexp() + postfix) for t in terminals[:max_size])
if use_bytes:
pattern = pattern.encode('latin-1')
try:
mre = re_.compile(pattern, g_regex_flags)
except AssertionError: # Yes, this is what Python provides us.. :/
return _build_mres(terminals, max_size//2, g_regex_flags, match_whole, re_, use_bytes)
mres.append((mre, {i: n for n, i in mre.groupindex.items()}))
terminals = terminals[max_size:]
return mres
def build_mres(terminals, g_regex_flags, re_, use_bytes, match_whole=False):
return _build_mres(terminals, len(terminals), g_regex_flags, match_whole, re_, use_bytes)
def _regexp_has_newline(r):
r"""Expressions that may indicate newlines in a regexp:
- newlines (\n)
- escaped newline (\\n)
- anything but ([^...])
- any-char (.) when the flag (?s) exists
- spaces (\s)
"""
return '\n' in r or '\\n' in r or '\\s' in r or '[^' in r or ('(?s' in r and '.' in r)
class Lexer(object):
"""Lexer interface
Method Signatures:
lex(self, text) -> Iterator[Token]
"""
lex = NotImplemented
def make_lexer_state(self, text):
line_ctr = LineCounter(b'\n' if isinstance(text, bytes) else '\n')
return LexerState(text, line_ctr)
class TraditionalLexer(Lexer):
def __init__(self, conf):
terminals = list(conf.terminals)
assert all(isinstance(t, TerminalDef) for t in terminals), terminals
self.re = conf.re_module
if not conf.skip_validation:
# Sanitization
for t in terminals:
try:
self.re.compile(t.pattern.to_regexp(), conf.g_regex_flags)
except self.re.error:
raise LexError("Cannot compile token %s: %s" % (t.name, t.pattern))
if t.pattern.min_width == 0:
raise LexError("Lexer does not allow zero-width terminals. (%s: %s)" % (t.name, t.pattern))
assert set(conf.ignore) <= {t.name for t in terminals}
# Init
self.newline_types = frozenset(t.name for t in terminals if _regexp_has_newline(t.pattern.to_regexp()))
self.ignore_types = frozenset(conf.ignore)
terminals.sort(key=lambda x: (-x.priority, -x.pattern.max_width, -len(x.pattern.value), x.name))
self.terminals = terminals
self.user_callbacks = conf.callbacks
self.g_regex_flags = conf.g_regex_flags
self.use_bytes = conf.use_bytes
self.terminals_by_name = conf.terminals_by_name
self._mres = None
def _build(self):
terminals, self.callback = _create_unless(self.terminals, self.g_regex_flags, self.re, self.use_bytes)
assert all(self.callback.values())
for type_, f in self.user_callbacks.items():
if type_ in self.callback:
# Already a callback there, probably UnlessCallback
self.callback[type_] = CallChain(self.callback[type_], f, lambda t: t.type == type_)
else:
self.callback[type_] = f
self._mres = build_mres(terminals, self.g_regex_flags, self.re, self.use_bytes)
@property
def mres(self):
if self._mres is None:
self._build()
return self._mres
def match(self, text, pos):
for mre, type_from_index in self.mres:
m = mre.match(text, pos)
if m:
return m.group(0), type_from_index[m.lastindex]
def lex(self, state, parser_state):
with suppress(EOFError):
while True:
yield self.next_token(state, parser_state)
def next_token(self, lex_state, parser_state=None):
line_ctr = lex_state.line_ctr
while line_ctr.char_pos < len(lex_state.text):
res = self.match(lex_state.text, line_ctr.char_pos)
if not res:
allowed = {v for m, tfi in self.mres for v in tfi.values()} - self.ignore_types
if not allowed:
allowed = {"<END-OF-FILE>"}
raise UnexpectedCharacters(lex_state.text, line_ctr.char_pos, line_ctr.line, line_ctr.column,
allowed=allowed, token_history=lex_state.last_token and [lex_state.last_token],
state=parser_state, terminals_by_name=self.terminals_by_name)
value, type_ = res
if type_ not in self.ignore_types:
t = Token(type_, value, line_ctr.char_pos, line_ctr.line, line_ctr.column)
line_ctr.feed(value, type_ in self.newline_types)
t.end_line = line_ctr.line
t.end_column = line_ctr.column
t.end_pos = line_ctr.char_pos
if t.type in self.callback:
t = self.callback[t.type](t)
if not isinstance(t, Token):
raise LexError("Callbacks must return a token (returned %r)" % t)
lex_state.last_token = t
return t
else:
if type_ in self.callback:
t2 = Token(type_, value, line_ctr.char_pos, line_ctr.line, line_ctr.column)
self.callback[type_](t2)
line_ctr.feed(value, type_ in self.newline_types)
# EOF
raise EOFError(self)
class LexerState:
__slots__ = 'text', 'line_ctr', 'last_token'
def __init__(self, text, line_ctr, last_token=None):
self.text = text
self.line_ctr = line_ctr
self.last_token = last_token
def __copy__(self):
return type(self)(self.text, copy(self.line_ctr), self.last_token)
class ContextualLexer(Lexer):
def __init__(self, conf, states, always_accept=()):
terminals = list(conf.terminals)
terminals_by_name = conf.terminals_by_name
trad_conf = copy(conf)
trad_conf.terminals = terminals
lexer_by_tokens = {}
self.lexers = {}
for state, accepts in states.items():
key = frozenset(accepts)
try:
lexer = lexer_by_tokens[key]
except KeyError:
accepts = set(accepts) | set(conf.ignore) | set(always_accept)
lexer_conf = copy(trad_conf)
lexer_conf.terminals = [terminals_by_name[n] for n in accepts if n in terminals_by_name]
lexer = TraditionalLexer(lexer_conf)
lexer_by_tokens[key] = lexer
self.lexers[state] = lexer
assert trad_conf.terminals is terminals
self.root_lexer = TraditionalLexer(trad_conf)
def make_lexer_state(self, text):
return self.root_lexer.make_lexer_state(text)
def lex(self, lexer_state, parser_state):
try:
while True:
lexer = self.lexers[parser_state.position]
yield lexer.next_token(lexer_state, parser_state)
except EOFError:
pass
except UnexpectedCharacters as e:
# In the contextual lexer, UnexpectedCharacters can mean that the terminal is defined, but not in the current context.
# This tests the input against the global context, to provide a nicer error.
try:
last_token = lexer_state.last_token # Save last_token. Calling root_lexer.next_token will change this to the wrong token
token = self.root_lexer.next_token(lexer_state, parser_state)
raise UnexpectedToken(token, e.allowed, state=parser_state, token_history=[last_token], terminals_by_name=self.root_lexer.terminals_by_name)
except UnexpectedCharacters:
raise e # Raise the original UnexpectedCharacters. The root lexer raises it with the wrong expected set.
class LexerThread:
"""A thread that ties a lexer instance and a lexer state, to be used by the parser"""
def __init__(self, lexer, text):
self.lexer = lexer
self.state = lexer.make_lexer_state(text)
def lex(self, parser_state):
return self.lexer.lex(self.state, parser_state)
###}
| {
"content_hash": "4a211d8349b321d8ce40e95bc1362a75",
"timestamp": "",
"source": "github",
"line_count": 457,
"max_line_length": 156,
"avg_line_length": 35.65645514223195,
"alnum_prop": 0.587173979748389,
"repo_name": "erezsh/lark",
"id": "730d95e560a086e1bd8792eadb3483348fc8d13d",
"size": "16319",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "lark/lexer.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "GAP",
"bytes": "684"
},
{
"name": "Nearley",
"bytes": "44"
},
{
"name": "Python",
"bytes": "177298"
}
],
"symlink_target": ""
} |
from msrest.paging import Paged
class NetworkInterfacePaged(Paged):
"""
A paging container for iterating over a list of :class:`NetworkInterface <azure.mgmt.network.v2017_08_01.models.NetworkInterface>` object
"""
_attribute_map = {
'next_link': {'key': 'nextLink', 'type': 'str'},
'current_page': {'key': 'value', 'type': '[NetworkInterface]'}
}
def __init__(self, *args, **kwargs):
super(NetworkInterfacePaged, self).__init__(*args, **kwargs)
| {
"content_hash": "8134336c9108c324f4a824a84475d9f7",
"timestamp": "",
"source": "github",
"line_count": 16,
"max_line_length": 141,
"avg_line_length": 31.125,
"alnum_prop": 0.6285140562248996,
"repo_name": "AutorestCI/azure-sdk-for-python",
"id": "a969cc46ad68c1e9011490187462563c4fbaa697",
"size": "972",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "azure-mgmt-network/azure/mgmt/network/v2017_08_01/models/network_interface_paged.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "34619070"
}
],
"symlink_target": ""
} |
import setuptools
version = '1.1.6'
setuptools.setup(
name='sure',
version=version,
url='http://pypi.python.org/packages/source/s/sure/sure-%s.tar.gz' % version,
license='GPLv3',
author='Gabriel Falcao',
author_email='gabriel@nacaolivre.org'
)
| {
"content_hash": "76160578c1cf5876ead97bf6267fc365",
"timestamp": "",
"source": "github",
"line_count": 12,
"max_line_length": 81,
"avg_line_length": 22.5,
"alnum_prop": 0.6703703703703704,
"repo_name": "Scalr/packages",
"id": "fa942edb4314f2abb516e6e458bcf0addb6b2770",
"size": "270",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "pkgs/sure/setup.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "15050"
}
],
"symlink_target": ""
} |
"""Search Google from the command line
This program is part of "Dive Into Python", a free Python book for
experienced programmers. Visit http://diveintopython.org/ for the
latest version.
"""
__author__ = "Mark Pilgrim (mark@diveintopython.org)"
__version__ = "$Revision: 1.2 $"
__date__ = "$Date: 2004/05/20 18:53:59 $"
__copyright__ = "Copyright (c) 2004 Mark Pilgrim"
__license__ = "Python"
from SOAPpy import WSDL
# you'll need to configure these two values;
# see http://www.google.com/apis/
WSDLFILE = '/path/to/copy/of/GoogleSearch.wsdl'
APIKEY = 'YOUR_GOOGLE_API_KEY'
_server = WSDL.Proxy(WSDLFILE)
def search(q):
"""Search Google and return list of {title, link, description}"""
results = _server.doGoogleSearch(
APIKEY, q, 0, 10, False, "", False, "", "utf-8", "utf-8")
return [{"title": r.title.encode("utf-8"),
"link": r.URL.encode("utf-8"),
"description": r.snippet.encode("utf-8")}
for r in results.resultElements]
if __name__ == '__main__':
import sys
for r in search(sys.argv[1])[:5]:
print r['title']
print r['link']
print r['description']
print
| {
"content_hash": "9e1f010ace12c55c628d0bba0eedad56",
"timestamp": "",
"source": "github",
"line_count": 37,
"max_line_length": 69,
"avg_line_length": 31.56756756756757,
"alnum_prop": 0.6215753424657534,
"repo_name": "tapomayukh/projects_in_python",
"id": "55b37ad1058898a041866a6c8ecab6ccbaa78f0a",
"size": "1168",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "sandbox_tapo/src/refs/diveintopython-pdf-5.4/diveintopython-5.4/py/search.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Matlab",
"bytes": "4903"
},
{
"name": "Python",
"bytes": "4451912"
}
],
"symlink_target": ""
} |
from operator import attrgetter
from pyangbind.lib.yangtypes import RestrictedPrecisionDecimalType
from pyangbind.lib.yangtypes import RestrictedClassType
from pyangbind.lib.yangtypes import TypedListType
from pyangbind.lib.yangtypes import YANGBool
from pyangbind.lib.yangtypes import YANGListType
from pyangbind.lib.yangtypes import YANGDynClass
from pyangbind.lib.yangtypes import ReferenceType
from pyangbind.lib.base import PybindBase
from collections import OrderedDict
from decimal import Decimal
from bitarray import bitarray
import six
# PY3 support of some PY2 keywords (needs improved)
if six.PY3:
import builtins as __builtin__
long = int
elif six.PY2:
import __builtin__
from . import state
class bandwidth_constraints(PybindBase):
"""
This class was auto-generated by the PythonClass plugin for PYANG
from YANG module openconfig-network-instance - based on the path /network-instances/network-instance/protocols/protocol/isis/levels/level/link-state-database/lsp/tlvs/tlv/mt-isn/neighbors/neighbor/subTLVs/subTLVs/bandwidth-constraints. Each member element of
the container is represented as a class variable - with a specific
YANG type.
YANG Description: This container defines bandwidth-constraints. For DS-TE, the
existing Maximum Reservable link bandwidth parameter is retained,
but its semantics is generalized and interpreted as the aggregate
bandwidth constraint across all Class-Types
"""
__slots__ = ("_path_helper", "_extmethods", "__state")
_yang_name = "bandwidth-constraints"
_pybind_generated_by = "container"
def __init__(self, *args, **kwargs):
self._path_helper = False
self._extmethods = False
self.__state = YANGDynClass(
base=state.state,
is_container="container",
yang_name="state",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=False,
)
load = kwargs.pop("load", None)
if args:
if len(args) > 1:
raise TypeError("cannot create a YANG container with >1 argument")
all_attr = True
for e in self._pyangbind_elements:
if not hasattr(args[0], e):
all_attr = False
break
if not all_attr:
raise ValueError("Supplied object did not have the correct attributes")
for e in self._pyangbind_elements:
nobj = getattr(args[0], e)
if nobj._changed() is False:
continue
setmethod = getattr(self, "_set_%s" % e)
if load is None:
setmethod(getattr(args[0], e))
else:
setmethod(getattr(args[0], e), load=load)
def _path(self):
if hasattr(self, "_parent"):
return self._parent._path() + [self._yang_name]
else:
return [
"network-instances",
"network-instance",
"protocols",
"protocol",
"isis",
"levels",
"level",
"link-state-database",
"lsp",
"tlvs",
"tlv",
"mt-isn",
"neighbors",
"neighbor",
"subTLVs",
"subTLVs",
"bandwidth-constraints",
]
def _get_state(self):
"""
Getter method for state, mapped from YANG variable /network_instances/network_instance/protocols/protocol/isis/levels/level/link_state_database/lsp/tlvs/tlv/mt_isn/neighbors/neighbor/subTLVs/subTLVs/bandwidth_constraints/state (container)
YANG Description: State parameters of IS Extended Reachability sub-TLV 22.
"""
return self.__state
def _set_state(self, v, load=False):
"""
Setter method for state, mapped from YANG variable /network_instances/network_instance/protocols/protocol/isis/levels/level/link_state_database/lsp/tlvs/tlv/mt_isn/neighbors/neighbor/subTLVs/subTLVs/bandwidth_constraints/state (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_state is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_state() directly.
YANG Description: State parameters of IS Extended Reachability sub-TLV 22.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=state.state,
is_container="container",
yang_name="state",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=False,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """state must be of a type compatible with container""",
"defined-type": "container",
"generated-type": """YANGDynClass(base=state.state, is_container='container', yang_name="state", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='container', is_config=False)""",
}
)
self.__state = t
if hasattr(self, "_set"):
self._set()
def _unset_state(self):
self.__state = YANGDynClass(
base=state.state,
is_container="container",
yang_name="state",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=False,
)
state = __builtin__.property(_get_state)
_pyangbind_elements = OrderedDict([("state", state)])
from . import state
class bandwidth_constraints(PybindBase):
"""
This class was auto-generated by the PythonClass plugin for PYANG
from YANG module openconfig-network-instance-l2 - based on the path /network-instances/network-instance/protocols/protocol/isis/levels/level/link-state-database/lsp/tlvs/tlv/mt-isn/neighbors/neighbor/subTLVs/subTLVs/bandwidth-constraints. Each member element of
the container is represented as a class variable - with a specific
YANG type.
YANG Description: This container defines bandwidth-constraints. For DS-TE, the
existing Maximum Reservable link bandwidth parameter is retained,
but its semantics is generalized and interpreted as the aggregate
bandwidth constraint across all Class-Types
"""
__slots__ = ("_path_helper", "_extmethods", "__state")
_yang_name = "bandwidth-constraints"
_pybind_generated_by = "container"
def __init__(self, *args, **kwargs):
self._path_helper = False
self._extmethods = False
self.__state = YANGDynClass(
base=state.state,
is_container="container",
yang_name="state",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=False,
)
load = kwargs.pop("load", None)
if args:
if len(args) > 1:
raise TypeError("cannot create a YANG container with >1 argument")
all_attr = True
for e in self._pyangbind_elements:
if not hasattr(args[0], e):
all_attr = False
break
if not all_attr:
raise ValueError("Supplied object did not have the correct attributes")
for e in self._pyangbind_elements:
nobj = getattr(args[0], e)
if nobj._changed() is False:
continue
setmethod = getattr(self, "_set_%s" % e)
if load is None:
setmethod(getattr(args[0], e))
else:
setmethod(getattr(args[0], e), load=load)
def _path(self):
if hasattr(self, "_parent"):
return self._parent._path() + [self._yang_name]
else:
return [
"network-instances",
"network-instance",
"protocols",
"protocol",
"isis",
"levels",
"level",
"link-state-database",
"lsp",
"tlvs",
"tlv",
"mt-isn",
"neighbors",
"neighbor",
"subTLVs",
"subTLVs",
"bandwidth-constraints",
]
def _get_state(self):
"""
Getter method for state, mapped from YANG variable /network_instances/network_instance/protocols/protocol/isis/levels/level/link_state_database/lsp/tlvs/tlv/mt_isn/neighbors/neighbor/subTLVs/subTLVs/bandwidth_constraints/state (container)
YANG Description: State parameters of IS Extended Reachability sub-TLV 22.
"""
return self.__state
def _set_state(self, v, load=False):
"""
Setter method for state, mapped from YANG variable /network_instances/network_instance/protocols/protocol/isis/levels/level/link_state_database/lsp/tlvs/tlv/mt_isn/neighbors/neighbor/subTLVs/subTLVs/bandwidth_constraints/state (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_state is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_state() directly.
YANG Description: State parameters of IS Extended Reachability sub-TLV 22.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=state.state,
is_container="container",
yang_name="state",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=False,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """state must be of a type compatible with container""",
"defined-type": "container",
"generated-type": """YANGDynClass(base=state.state, is_container='container', yang_name="state", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='container', is_config=False)""",
}
)
self.__state = t
if hasattr(self, "_set"):
self._set()
def _unset_state(self):
self.__state = YANGDynClass(
base=state.state,
is_container="container",
yang_name="state",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=False,
)
state = __builtin__.property(_get_state)
_pyangbind_elements = OrderedDict([("state", state)])
| {
"content_hash": "46ae1924276599e760dc1586d2c843b2",
"timestamp": "",
"source": "github",
"line_count": 332,
"max_line_length": 375,
"avg_line_length": 38.6144578313253,
"alnum_prop": 0.5840873634945398,
"repo_name": "napalm-automation/napalm-yang",
"id": "08b748cb7508764db697409a9403f73f70b26651",
"size": "12844",
"binary": false,
"copies": "1",
"ref": "refs/heads/develop",
"path": "napalm_yang/models/openconfig/network_instances/network_instance/protocols/protocol/isis/levels/level/link_state_database/lsp/tlvs/tlv/mt_isn/neighbors/neighbor/subTLVs/subTLVs_/bandwidth_constraints/__init__.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "HTML",
"bytes": "370237"
},
{
"name": "Jupyter Notebook",
"bytes": "152135"
},
{
"name": "Makefile",
"bytes": "1965"
},
{
"name": "Python",
"bytes": "105688785"
},
{
"name": "Roff",
"bytes": "1632"
}
],
"symlink_target": ""
} |
from __future__ import unicode_literals
from zope.dottedname.resolve import resolve
from adyen import Backend
from django.conf import settings
def get_backend():
try:
backend_class = settings.ADYEN_BACKEND
except AttributeError:
backend_class = 'django_adyen.backends.SimpleSettingsBackend'
return resolve(backend_class)()
class SimpleSettingsBackend(Backend):
def __init__(self):
super(SimpleSettingsBackend, self).__init__(
settings.ADYEN_MERCHANT_ACCOUNT,
settings.ADYEN_SKIN_CODE,
settings.ADYEN_SKIN_SECRET)
self.is_live = getattr(settings, 'ADYEN_IS_LIVE', False)
self.payment_flow = getattr(settings, 'ADYEN_PAYMENT_FLOW', 'onepage')
def get_notification_credentials(self):
return (settings.ADYEN_NOTIFICATION_USER,
settings.ADYEN_NOTIFICATION_PASSWORD)
| {
"content_hash": "ce4e709dc9cd05942fe98d126e94158e",
"timestamp": "",
"source": "github",
"line_count": 29,
"max_line_length": 78,
"avg_line_length": 30.655172413793103,
"alnum_prop": 0.6850393700787402,
"repo_name": "machtfit/adyen",
"id": "6cfe071239d977d984a8bae0ea0fd2359f4c08ed",
"size": "914",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "django_adyen/backends.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "33322"
}
],
"symlink_target": ""
} |
'''
Run this script from the root of the repository to update all translations from
transifex.
It will do the following automatically:
- fetch all translations using the tx tool
- post-process them into valid and committable format
- remove invalid control characters
- remove location tags (makes diffs less noisy)
TODO:
- auto-add new translations to the build system according to the translation process
'''
import subprocess
import re
import sys
import os
import io
import xml.etree.ElementTree as ET
# Name of transifex tool
TX = 'tx'
# Name of source language file
SOURCE_LANG = 'digibyte_en.ts'
# Directory with locale files
LOCALE_DIR = 'src/qt/locale'
# Minimum number of messages for translation to be considered at all
MIN_NUM_MESSAGES = 10
# Regexp to check for DigiByte addresses
ADDRESS_REGEXP = re.compile('([13]|bc1)[a-zA-Z0-9]{30,}')
def check_at_repository_root():
if not os.path.exists('.git'):
print('No .git directory found')
print('Execute this script at the root of the repository', file=sys.stderr)
sys.exit(1)
def fetch_all_translations():
if subprocess.call([TX, 'pull', '-f', '-a']):
print('Error while fetching translations', file=sys.stderr)
sys.exit(1)
def find_format_specifiers(s):
'''Find all format specifiers in a string.'''
pos = 0
specifiers = []
while True:
percent = s.find('%', pos)
if percent < 0:
break
specifiers.append(s[percent+1])
pos = percent+2
return specifiers
def split_format_specifiers(specifiers):
'''Split format specifiers between numeric (Qt) and others (strprintf)'''
numeric = []
other = []
for s in specifiers:
if s in {'1','2','3','4','5','6','7','8','9'}:
numeric.append(s)
else:
other.append(s)
# If both numeric format specifiers and "others" are used, assume we're dealing
# with a Qt-formatted message. In the case of Qt formatting (see https://doc.qt.io/qt-5/qstring.html#arg)
# only numeric formats are replaced at all. This means "(percentage: %1%)" is valid, without needing
# any kind of escaping that would be necessary for strprintf. Without this, this function
# would wrongly detect '%)' as a printf format specifier.
if numeric:
other = []
# numeric (Qt) can be present in any order, others (strprintf) must be in specified order
return set(numeric),other
def sanitize_string(s):
'''Sanitize string for printing'''
return s.replace('\n',' ')
def check_format_specifiers(source, translation, errors, numerus):
source_f = split_format_specifiers(find_format_specifiers(source))
# assert that no source messages contain both Qt and strprintf format specifiers
# if this fails, go change the source as this is hacky and confusing!
assert(not(source_f[0] and source_f[1]))
try:
translation_f = split_format_specifiers(find_format_specifiers(translation))
except IndexError:
errors.append("Parse error in translation for '%s': '%s'" % (sanitize_string(source), sanitize_string(translation)))
return False
else:
if source_f != translation_f:
if numerus and source_f == (set(), ['n']) and translation_f == (set(), []) and translation.find('%') == -1:
# Allow numerus translations to omit %n specifier (usually when it only has one possible value)
return True
errors.append("Mismatch between '%s' and '%s'" % (sanitize_string(source), sanitize_string(translation)))
return False
return True
def all_ts_files(suffix=''):
for filename in os.listdir(LOCALE_DIR):
# process only language files, and do not process source language
if not filename.endswith('.ts'+suffix) or filename == SOURCE_LANG+suffix:
continue
if suffix: # remove provided suffix
filename = filename[0:-len(suffix)]
filepath = os.path.join(LOCALE_DIR, filename)
yield(filename, filepath)
FIX_RE = re.compile(b'[\x00-\x09\x0b\x0c\x0e-\x1f]')
def remove_invalid_characters(s):
'''Remove invalid characters from translation string'''
return FIX_RE.sub(b'', s)
# Override cdata escape function to make our output match Qt's (optional, just for cleaner diffs for
# comparison, disable by default)
_orig_escape_cdata = None
def escape_cdata(text):
text = _orig_escape_cdata(text)
text = text.replace("'", ''')
text = text.replace('"', '"')
return text
def contains_digibyte_addr(text, errors):
if text is not None and ADDRESS_REGEXP.search(text) is not None:
errors.append('Translation "%s" contains an auroracoin address. This will be removed.' % (text))
return True
return False
def postprocess_translations(reduce_diff_hacks=False):
print('Checking and postprocessing...')
if reduce_diff_hacks:
global _orig_escape_cdata
_orig_escape_cdata = ET._escape_cdata
ET._escape_cdata = escape_cdata
for (filename,filepath) in all_ts_files():
os.rename(filepath, filepath+'.orig')
have_errors = False
for (filename,filepath) in all_ts_files('.orig'):
# pre-fixups to cope with transifex output
parser = ET.XMLParser(encoding='utf-8') # need to override encoding because 'utf8' is not understood only 'utf-8'
with open(filepath + '.orig', 'rb') as f:
data = f.read()
# remove control characters; this must be done over the entire file otherwise the XML parser will fail
data = remove_invalid_characters(data)
tree = ET.parse(io.BytesIO(data), parser=parser)
# iterate over all messages in file
root = tree.getroot()
for context in root.findall('context'):
for message in context.findall('message'):
numerus = message.get('numerus') == 'yes'
source = message.find('source').text
translation_node = message.find('translation')
# pick all numerusforms
if numerus:
translations = [i.text for i in translation_node.findall('numerusform')]
else:
translations = [translation_node.text]
for translation in translations:
if translation is None:
continue
errors = []
valid = check_format_specifiers(source, translation, errors, numerus) and not contains_digibyte_addr(translation, errors)
for error in errors:
print('%s: %s' % (filename, error))
if not valid: # set type to unfinished and clear string if invalid
translation_node.clear()
translation_node.set('type', 'unfinished')
have_errors = True
# Remove location tags
for location in message.findall('location'):
message.remove(location)
# Remove entire message if it is an unfinished translation
if translation_node.get('type') == 'unfinished':
context.remove(message)
# check if document is (virtually) empty, and remove it if so
num_messages = 0
for context in root.findall('context'):
for message in context.findall('message'):
num_messages += 1
if num_messages < MIN_NUM_MESSAGES:
print('Removing %s, as it contains only %i messages' % (filepath, num_messages))
continue
# write fixed-up tree
# if diff reduction requested, replace some XML to 'sanitize' to qt formatting
if reduce_diff_hacks:
out = io.BytesIO()
tree.write(out, encoding='utf-8')
out = out.getvalue()
out = out.replace(b' />', b'/>')
with open(filepath, 'wb') as f:
f.write(out)
else:
tree.write(filepath, encoding='utf-8')
return have_errors
if __name__ == '__main__':
check_at_repository_root()
fetch_all_translations()
postprocess_translations()
| {
"content_hash": "f1e91e9fc123049a320e885960dee060",
"timestamp": "",
"source": "github",
"line_count": 211,
"max_line_length": 141,
"avg_line_length": 39.03317535545024,
"alnum_prop": 0.62178241864983,
"repo_name": "aurarad/auroracoin",
"id": "28c2f730d25b935136ea7bf6ff8b2db578587a80",
"size": "8442",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "contrib/devtools/update-translations.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "721707"
},
{
"name": "C++",
"bytes": "3060648"
},
{
"name": "CSS",
"bytes": "1127"
},
{
"name": "Groff",
"bytes": "18860"
},
{
"name": "HTML",
"bytes": "50620"
},
{
"name": "Makefile",
"bytes": "31933"
},
{
"name": "Objective-C",
"bytes": "1052"
},
{
"name": "Objective-C++",
"bytes": "6330"
},
{
"name": "Protocol Buffer",
"bytes": "2308"
},
{
"name": "Python",
"bytes": "110348"
},
{
"name": "QMake",
"bytes": "2022"
},
{
"name": "Shell",
"bytes": "51195"
}
],
"symlink_target": ""
} |
import os
import uuid
class CommandContextMocker:
def __init__(self):
pass
@staticmethod
def set_vm_uuid_param(name):
vm_uuid = str(uuid.uuid4())
CommandContextMocker.set_command_param(name,vm_uuid)
return uuid
@staticmethod
def set_command_param(name, value):
os.environ[name]= value
| {
"content_hash": "26e017df70002b886760f8c2e711dafe",
"timestamp": "",
"source": "github",
"line_count": 17,
"max_line_length": 60,
"avg_line_length": 20.529411764705884,
"alnum_prop": 0.6332378223495702,
"repo_name": "QualiSystems/vCenterShell",
"id": "808829012fbc0190dc29027cc57d21217c3148d4",
"size": "349",
"binary": false,
"copies": "1",
"ref": "refs/heads/develop",
"path": "package/cloudshell/tests/utils/command_context_mocker.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "8339"
},
{
"name": "Makefile",
"bytes": "7672"
},
{
"name": "Python",
"bytes": "629506"
},
{
"name": "Shell",
"bytes": "646"
}
],
"symlink_target": ""
} |
"""Python TAXII 2.1 Client API"""
from __future__ import unicode_literals
import json
import logging
import time
import six
from six.moves.urllib import parse as urlparse
from .. import MEDIA_TYPE_TAXII_V21
from ..common import (
_filter_kwargs_to_query_params, _grab_total_items_from_resource,
_TAXIIEndpoint
)
from ..exceptions import AccessError, ValidationError
# Module-level logger
log = logging.getLogger(__name__)
def as_pages(func, per_request=0, *args, **kwargs):
"""Creates a generator for TAXII 2.1 endpoints that support pagination.
Args:
func (callable): A v21 function that supports paged requests.
Currently Get Objects and Get Manifest.
per_request (int): How many items per request. Default 0.
Use args or kwargs to pass filter information or other arguments required to make the call.
"""
envelope = func(limit=per_request, *args, **kwargs)
yield envelope
total_obtained = _grab_total_items_from_resource(envelope)
if envelope.get("more", False) and total_obtained != per_request:
log.warning("TAXII Server Response with different amount of objects! Setting limit=%s", total_obtained)
per_request = total_obtained
# The while loop will not be executed if the response is received in full.
while envelope.get("more", False):
envelope = func(limit=per_request, next=envelope.get("next", ""), *args, **kwargs)
yield envelope
class Status(_TAXIIEndpoint):
"""TAXII Status Resource.
This class represents the ``Get Status`` endpoint (section 4.3) and also
contains the information about the Status Resource (section 4.3.1)
"""
# We don't need to jump through the same lazy-load as with Collection,
# since it's *far* less likely people will create these manually rather
# than just getting them returned from Collection.add_objects(), and there
# aren't other endpoints to call on the Status object.
def __init__(self, url, conn=None, user=None, password=None, verify=True,
proxies=None, status_info=None, auth=None, cert=None):
"""Create an API root resource endpoint.
Args:
url (str): URL of a TAXII status resource endpoint
user (str): username for authentication (optional)
password (str): password for authentication (optional)
conn (_HTTPConnection): reuse connection object, as an alternative
to providing username/password
status_info (dict): Parsed JSON representing a response from the
status endpoint, if already known. If not given, the
endpoint will be queried. (optional)
verify (bool): validate the entity credentials. (default: True)
proxies (dict): key/value pair for http/https proxy settings.
(optional)
cert (str or tuple): SSL client certificate default, if String,
path to ssl client cert file (.pem). If Tuple, (‘cert’, ‘key’)
pair. (optional)
"""
super(Status, self).__init__(url, conn, user, password, verify, proxies, "2.1", auth=auth, cert=cert)
self.__raw = None
if status_info:
self._populate_fields(**status_info)
self.__raw = status_info
else:
self.refresh()
def __nonzero__(self):
return self.status == "complete"
__bool__ = __nonzero__
@property
def _raw(self):
"""Get the "raw" status response (parsed JSON)."""
return self.__raw
@property
def custom_properties(self):
return self._custom_properties
def refresh(self, accept=MEDIA_TYPE_TAXII_V21):
"""Updates Status information"""
response = self.__raw = self._conn.get(self.url, headers={"Accept": accept})
self._populate_fields(**response)
def wait_until_final(self, poll_interval=1, timeout=60):
"""It will poll the URL to grab the latest status resource in a given
timeout and time interval.
Args:
poll_interval (int): how often to poll the status service.
timeout (int): how long to poll the URL until giving up. Use <= 0
to wait forever
"""
start_time = time.time()
elapsed = 0
while (self.status != "complete" and
(timeout <= 0 or elapsed < timeout)):
time.sleep(poll_interval)
self.refresh()
elapsed = time.time() - start_time
def _populate_fields(self, id=None, status=None, total_count=None,
success_count=None, failure_count=None,
pending_count=None, request_timestamp=None,
successes=None, failures=None, pendings=None,
**kwargs):
self.id = id # required
self.status = status # required
self.request_timestamp = request_timestamp # optional
self.total_count = total_count # required
self.success_count = success_count # required
self.failure_count = failure_count # required
self.pending_count = pending_count # required
self.successes = successes or [] # optional
self.failures = failures or [] # optional
self.pendings = pendings or [] # optional
# Anything not captured by the optional arguments is treated as custom
self._custom_properties = kwargs
self._validate_status()
def _validate_status(self):
"""Validates Status information. Raises errors for required
properties."""
if not self.id:
msg = "No 'id' in Status for request '{}'"
raise ValidationError(msg.format(self.url))
if not self.status:
msg = "No 'status' in Status for request '{}'"
raise ValidationError(msg.format(self.url))
if self.total_count is None:
msg = "No 'total_count' in Status for request '{}'"
raise ValidationError(msg.format(self.url))
if self.success_count is None:
msg = "No 'success_count' in Status for request '{}'"
raise ValidationError(msg.format(self.url))
if self.failure_count is None:
msg = "No 'failure_count' in Status for request '{}'"
raise ValidationError(msg.format(self.url))
if self.pending_count is None:
msg = "No 'pending_count' in Status for request '{}'"
raise ValidationError(msg.format(self.url))
if self.successes and len(self.successes) != self.success_count:
msg = "Found successes={}, but success_count={} in status '{}'"
raise ValidationError(msg.format(self.successes,
self.success_count,
self.id))
if self.pendings and len(self.pendings) != self.pending_count:
msg = "Found pendings={}, but pending_count={} in status '{}'"
raise ValidationError(msg.format(self.pendings,
self.pending_count,
self.id))
if self.failures and len(self.failures) != self.failure_count:
msg = "Found failures={}, but failure_count={} in status '{}'"
raise ValidationError(msg.format(self.failures,
self.failure_count,
self.id))
if (self.success_count + self.pending_count + self.failure_count !=
self.total_count):
msg = ("(success_count={} + pending_count={} + "
"failure_count={}) != total_count={} in status '{}'")
raise ValidationError(msg.format(self.success_count,
self.pending_count,
self.failure_count,
self.total_count,
self.id))
class Collection(_TAXIIEndpoint):
"""Information about a TAXII Collection.
This class represents the ``Get a Collection`` endpoint (section 5.2), and
contains the information returned in the ``Collection Resource`` (section
5.2.1).
Methods on this class can be used to invoke the following endpoints:
- ``Get Objects`` (section 5.3)
- ``Add Objects`` (section 5.4)
- ``Get an Object`` (section 5.5)
- ``Get Object Manifests`` (section 5.6)
As obtained from an ApiRoot, an instance of this class shares connection(s)
with all other collections obtained from the same ApiRoot, as well as the
ApiRoot instance itself. Closing one will close them all. If this is
undesirable, you may manually create Collection instances.
"""
def __init__(self, url, conn=None, user=None, password=None, verify=True,
proxies=None, collection_info=None, auth=None, cert=None):
"""
Initialize a new Collection. Either user/password or conn may be
given, but not both. The latter is intended for internal use, when
sharing connection pools with an ApiRoot, mocking a connection for
testing, etc. Users should use user/password (if required) which will
create a new connection.
Args:
url (str): A TAXII endpoint for a collection
user (str): User name for authentication (optional)
password (str): Password for authentication (optional)
verify (bool): Either a boolean, in which case it controls whether
we verify the server's TLS certificate, or a string, in which
case it must be a path to a CA bundle to use. Defaults to
`True` (optional)
conn (_HTTPConnection): A connection to reuse (optional)
collection_info: Collection metadata, if known in advance (optional)
verify (bool): validate the entity credentials. (default: True)
proxies (dict): key/value pair for http/https proxy settings.
(optional)
cert (str or tuple): SSL client certificate default, if String,
path to ssl client cert file (.pem). If Tuple, (‘cert’, ‘key’)
pair. (optional)
"""
super(Collection, self).__init__(url, conn, user, password, verify, proxies, "2.1", auth=auth, cert=cert)
self._loaded = False
self.__raw = None
# Since the API Root "Get Collections" endpoint returns information on
# all collections as a list, it's possible that we can create multiple
# Collection objects from a single HTTPS request, and not need to call
# `refresh` for each one.
if collection_info:
self._populate_fields(**collection_info)
self.__raw = collection_info
self._loaded = True
@property
def id(self):
self._ensure_loaded()
return self._id
@property
def title(self):
self._ensure_loaded()
return self._title
@property
def description(self):
self._ensure_loaded()
return self._description
@property
def alias(self):
self._ensure_loaded()
return self._alias
@property
def can_read(self):
self._ensure_loaded()
return self._can_read
@property
def can_write(self):
self._ensure_loaded()
return self._can_write
@property
def media_types(self):
self._ensure_loaded()
return self._media_types
@property
def custom_properties(self):
self._ensure_loaded()
return self._custom_properties
@property
def objects_url(self):
return self.url + "objects/"
@property
def _raw(self):
"""Get the "raw" collection information response (parsed JSON)."""
self._ensure_loaded()
return self.__raw
def _populate_fields(self, id=None, title=None, description=None, alias=None,
can_read=None, can_write=None, media_types=None,
**kwargs):
self._id = id # required
self._title = title # required
self._description = description # optional
self._alias = alias # optional
self._can_read = can_read # required
self._can_write = can_write # required
self._media_types = media_types or [] # optional
# Anything not captured by the optional arguments is treated as custom
self._custom_properties = kwargs
self._validate_collection()
def _validate_collection(self):
"""Validates Collection information. Raises errors for required
properties."""
if not self._id:
msg = "No 'id' in Collection for request '{}'"
raise ValidationError(msg.format(self.url))
if not self._title:
msg = "No 'title' in Collection for request '{}'"
raise ValidationError(msg.format(self.url))
if self._can_read is None:
msg = "No 'can_read' in Collection for request '{}'"
raise ValidationError(msg.format(self.url))
if self._can_write is None:
msg = "No 'can_write' in Collection for request '{}'"
raise ValidationError(msg.format(self.url))
if self._id not in self.url:
msg = "The collection '{}' does not match the url for queries '{}'"
raise ValidationError(msg.format(self._id, self.url))
def _ensure_loaded(self):
if not self._loaded:
self.refresh()
def _verify_can_read(self):
if not self.can_read:
msg = "Collection '{}' does not allow reading."
raise AccessError(msg.format(self.url))
def _verify_can_write(self):
if not self.can_write:
msg = "Collection '{}' does not allow writing."
raise AccessError(msg.format(self.url))
def refresh(self, accept=MEDIA_TYPE_TAXII_V21):
"""Update Collection information"""
response = self.__raw = self._conn.get(self.url, headers={"Accept": accept})
self._populate_fields(**response)
self._loaded = True
def get_objects(self, accept=MEDIA_TYPE_TAXII_V21, **filter_kwargs):
"""Implement the ``Get Objects`` endpoint (section 5.3)"""
self._verify_can_read()
query_params = _filter_kwargs_to_query_params(filter_kwargs)
return self._conn.get(self.objects_url, headers={"Accept": accept}, params=query_params)
def get_object(self, obj_id, accept=MEDIA_TYPE_TAXII_V21, **filter_kwargs):
"""Implement the ``Get an Object`` endpoint (section 5.5)"""
self._verify_can_read()
url = self.objects_url + str(obj_id) + "/"
query_params = _filter_kwargs_to_query_params(filter_kwargs)
return self._conn.get(url, headers={"Accept": accept}, params=query_params)
def delete_object(self, obj_id, accept=MEDIA_TYPE_TAXII_V21, **filter_kwargs):
"""Implement the ``Delete an Object`` endpoint (section 5.7)"""
self._verify_can_write()
url = self.objects_url + str(obj_id) + "/"
query_params = _filter_kwargs_to_query_params(filter_kwargs)
return self._conn.delete(url, headers={"Accept": accept}, params=query_params)
def object_versions(self, obj_id, accept=MEDIA_TYPE_TAXII_V21, **filter_kwargs):
"""Implement the ``Get Object Versions`` endpoint (section 5.8)"""
self._verify_can_read()
url = self.objects_url + str(obj_id) + "/versions/"
query_params = _filter_kwargs_to_query_params(filter_kwargs)
return self._conn.get(url, headers={"Accept": accept}, params=query_params)
def add_objects(self, envelope, wait_for_completion=True, poll_interval=1,
timeout=60, accept=MEDIA_TYPE_TAXII_V21,
content_type=MEDIA_TYPE_TAXII_V21):
"""Implement the ``Add Objects`` endpoint (section 5.4)
Add objects to the collection. This may be performed either
synchronously or asynchronously. To add asynchronously, set
wait_for_completion to False. If False, the latter two args are
unused. If the caller wishes to monitor the status of the addition,
it may do so in its own way. To add synchronously, set
wait_for_completion to True, and optionally set the poll and timeout
intervals. After initiating the addition, the caller will block,
and the TAXII "status" service will be polled until the timeout
expires, or the operation completes.
Args:
envelope: A TAXII envelope with the objects to add (string, dict,
binary)
wait_for_completion (bool): Whether to wait for the add operation
to complete before returning
poll_interval (int): If waiting for completion, how often to poll
the status service (seconds)
timeout (int): If waiting for completion, how long to poll until
giving up (seconds). Use <= 0 to wait forever
accept (str): media type to include in the ``Accept:`` header.
content_type (str): media type to include in the ``Content-Type:``
header.
Returns:
If ``wait_for_completion`` is False, a Status object corresponding
to the initial status data returned from the service, is returned.
The status may not yet be complete at this point.
If ``wait_for_completion`` is True, a Status object corresponding
to the completed operation is returned if it didn't time out;
otherwise a Status object corresponding to the most recent data
obtained before the timeout, is returned.
"""
self._verify_can_write()
headers = {
"Accept": accept,
"Content-Type": content_type,
}
if isinstance(envelope, dict):
json_text = json.dumps(envelope, ensure_ascii=False)
data = json_text.encode("utf-8")
elif isinstance(envelope, six.text_type):
data = envelope.encode("utf-8")
elif isinstance(envelope, six.binary_type):
data = envelope
else:
raise TypeError("Don't know how to handle type '{}'".format(
type(envelope).__name__))
status_json = self._conn.post(self.objects_url, headers=headers, data=data)
status_url = urlparse.urljoin(
self.url,
"../../status/{}".format(status_json["id"])
)
status = Status(url=status_url, conn=self._conn, status_info=status_json)
if not wait_for_completion or status.status == "complete":
return status
status.wait_until_final(poll_interval, timeout)
return status
def get_manifest(self, accept=MEDIA_TYPE_TAXII_V21, **filter_kwargs):
"""Implement the ``Get Object Manifests`` endpoint (section 5.6)."""
self._verify_can_read()
query_params = _filter_kwargs_to_query_params(filter_kwargs)
return self._conn.get(self.url + "manifest/", headers={"Accept": accept}, params=query_params)
class ApiRoot(_TAXIIEndpoint):
"""Information about a TAXII API Root.
This class corresponds to the ``Get API Root Information`` (section 4.2)
and ``Get Collections`` (section 5.1) endpoints, and contains the
information found in the corresponding ``API Root Resource``
(section 4.2.1) and ``Collections Resource`` (section 5.1.1).
As obtained from a Server, each ApiRoot instance gets its own connection
pool(s). Collections returned by instances of this class share the same
pools as the instance, so closing one closes all. Also, the same
username/password is used to connect to them, as was used for this ApiRoot.
If either of these is undesirable, Collection instances may be created
manually.
"""
def __init__(self, url, conn=None, user=None, password=None, verify=True,
proxies=None, auth=None, cert=None):
"""Create an API root resource endpoint.
Args:
url (str): URL of a TAXII API root resource endpoint
user (str): username for authentication (optional)
password (str): password for authentication (optional)
conn (_HTTPConnection): reuse connection object, as an alternative
to providing username/password
verify (bool): validate the entity credentials. (default: True)
proxies (dict): key/value pair for http/https proxy settings.
(optional)
cert (str or tuple): SSL client certificate default, if String,
path to ssl client cert file (.pem). If Tuple, (‘cert’, ‘key’)
pair. (optional)
"""
super(ApiRoot, self).__init__(url, conn, user, password, verify, proxies, "2.1", auth=auth, cert=cert)
self._loaded_collections = False
self._loaded_information = False
self.__raw = None
@property
def collections(self):
if not self._loaded_collections:
self.refresh_collections()
return self._collections
@property
def title(self):
self._ensure_loaded_information()
return self._title
@property
def description(self):
self._ensure_loaded_information()
return self._description
@property
def versions(self):
self._ensure_loaded_information()
return self._versions
@property
def max_content_length(self):
self._ensure_loaded_information()
return self._max_content_length
@property
def custom_properties(self):
self._ensure_loaded_information()
return self._custom_properties
@property
def _raw(self):
"""Get the "raw" API root information response (parsed JSON)."""
self._ensure_loaded_information()
return self.__raw
def _ensure_loaded_information(self):
if not self._loaded_information:
self.refresh_information()
def _validate_api_root(self):
"""Validates API Root information. Raises errors for required
properties."""
if not self._title:
msg = "No 'title' in API Root for request '{}'"
raise ValidationError(msg.format(self.url))
if not self._versions:
msg = "No 'versions' in API Root for request '{}'"
raise ValidationError(msg.format(self.url))
if self._max_content_length is None:
msg = "No 'max_content_length' in API Root for request '{}'"
raise ValidationError(msg.format(self.url))
def _populate_fields(self, title=None, description=None, versions=None,
max_content_length=None, **kwargs):
self._title = title # required
self._description = description # optional
self._versions = versions or [] # required
self._max_content_length = max_content_length # required
# Anything not captured by the optional arguments is treated as custom
self._custom_properties = kwargs
self._validate_api_root()
def refresh(self, accept=MEDIA_TYPE_TAXII_V21):
"""Update the API Root's information and list of Collections"""
self.refresh_information(accept)
self.refresh_collections(accept)
def refresh_information(self, accept=MEDIA_TYPE_TAXII_V21):
"""Update the properties of this API Root.
This invokes the ``Get API Root Information`` endpoint.
"""
response = self.__raw = self._conn.get(self.url,
headers={"Accept": accept})
self._populate_fields(**response)
self._loaded_information = True
def refresh_collections(self, accept=MEDIA_TYPE_TAXII_V21):
"""Update the list of Collections contained by this API Root.
This invokes the ``Get Collections`` endpoint.
"""
url = self.url + "collections/"
response = self._conn.get(url, headers={"Accept": accept})
self._collections = []
for item in response.get("collections", []): # optional
collection_url = url + item["id"] + "/"
collection = Collection(collection_url, conn=self._conn,
collection_info=item)
self._collections.append(collection)
self._loaded_collections = True
def get_status(self, status_id, accept=MEDIA_TYPE_TAXII_V21):
status_url = self.url + "status/" + status_id + "/"
response = self._conn.get(status_url, headers={"Accept": accept})
return Status(status_url, conn=self._conn, status_info=response)
class Server(_TAXIIEndpoint):
"""Information about a server hosting a Discovery service.
This class corresponds to the Server Discovery endpoint (section 4.1) and
the Discovery Resource returned from that endpoint (section 4.1.1).
ApiRoot instances obtained from an instance of this class are
created with the same username/password as was used in this instance. If
that's incorrect, an ApiRoot instance may be created directly with the
desired username and password. Also, they use separate connection pools
so that they can be independent: closing one won't close others, and
closing this server object won't close any of the ApiRoot objects (which
may refer to different hosts than was used for discovery).
"""
def __init__(self, url, conn=None, user=None, password=None, verify=True,
proxies=None, auth=None, cert=None):
"""Create a server discovery endpoint.
Args:
url (str): URL of a TAXII server discovery endpoint
user (str): username for authentication (optional)
password (str): password for authentication (optional)
conn (_HTTPConnection): reuse connection object, as an alternative
to providing username/password
verify (bool): validate the entity credentials. (default: True)
proxies (dict): key/value pair for http/https proxy settings.
(optional)
cert (str or tuple): SSL client certificate default, if String,
path to ssl client cert file (.pem). If Tuple, (‘cert’, ‘key’)
pair. (optional)
"""
super(Server, self).__init__(url, conn, user, password, verify, proxies, "2.1", auth=auth, cert=cert)
self._user = user
self._password = password
self._verify = verify
self._proxies = proxies
self._loaded = False
self.__raw = None
self._auth = auth
self._cert = cert
@property
def title(self):
self._ensure_loaded()
return self._title
@property
def description(self):
self._ensure_loaded()
return self._description
@property
def contact(self):
self._ensure_loaded()
return self._contact
@property
def default(self):
self._ensure_loaded()
return self._default
@property
def api_roots(self):
self._ensure_loaded()
return self._api_roots
@property
def custom_properties(self):
self._ensure_loaded()
return self._custom_properties
@property
def _raw(self):
"""Get the "raw" server discovery response (parsed JSON)."""
self._ensure_loaded()
return self.__raw
def _ensure_loaded(self):
if not self._loaded:
self.refresh()
def _validate_server(self):
"""Validates server information. Raises errors for required properties.
"""
if not self._title:
msg = "No 'title' in Server Discovery for request '{}'"
raise ValidationError(msg.format(self.url))
def _populate_fields(self, title=None, description=None, contact=None,
api_roots=None, default=None, **kwargs):
self._title = title # required
self._description = description # optional
self._contact = contact # optional
roots = api_roots or [] # optional
self._api_roots = [
ApiRoot(urlparse.urljoin(self.url, url),
user=self._user,
password=self._password,
verify=self._verify,
proxies=self._proxies,
auth=self._auth,
cert=self._cert)
for url in roots
]
# If 'default' is one of the existing API Roots, reuse that object
# rather than creating a duplicate. The TAXII 2.1 spec says that the
# `default` API Root MUST be an item in `api_roots`.
root_dict = dict(zip(roots, self._api_roots))
self._default = root_dict.get(default) # optional
# Anything not captured by the optional arguments is treated as custom
self._custom_properties = kwargs
self._validate_server()
def refresh(self):
"""Update the Server information and list of API Roots"""
response = self.__raw = self._conn.get(self.url)
self._populate_fields(**response)
self._loaded = True
| {
"content_hash": "65647ec28fa95f05a8285c3a270a95ee",
"timestamp": "",
"source": "github",
"line_count": 752,
"max_line_length": 113,
"avg_line_length": 39.1875,
"alnum_prop": 0.602769011503614,
"repo_name": "oasis-open/cti-taxii-client",
"id": "72858df6357688a0bc74ba4e6891761057e49f2f",
"size": "29501",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "taxii2client/v21/__init__.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Python",
"bytes": "166727"
}
],
"symlink_target": ""
} |
import os
import os.path
import sys
if len(sys.argv) == 2:
top = sys.argv[1]
else:
top = "."
for root, dirs, files in os.walk(top):
for filename in files:
path = os.path.join(root, filename)
if any(filename.endswith(ending) for ending in [".py", ".html", ".txt", ".css"]):
tabs = False
cr = False
trail = False
for line_num, line in enumerate(open(path)):
if "\t" in line:
tabs = line_num + 1
if "\r" in line:
cr = line_num + 1
if line.strip() and line.rstrip() != line.rstrip("\n\r"):
trail = line_num + 1
if tabs and cr and trail: # shortcut out if we all three
break
if tabs:
print "TABS in", path, "(last %s)" % tabs
if cr:
print "CR in", path, "(last %s)" % cr
if trail:
print "TRAIL in", path, "(last %s)" % trail
| {
"content_hash": "ccec1a744b6759d5c1e0cb3737c1a7d3",
"timestamp": "",
"source": "github",
"line_count": 31,
"max_line_length": 89,
"avg_line_length": 33.096774193548384,
"alnum_prop": 0.4512670565302144,
"repo_name": "amarandon/pinax",
"id": "84d5fda9df4cd660de408a9c497754155180b1b9",
"size": "1049",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "scripts/check-whitespace.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "43905"
},
{
"name": "HTML",
"bytes": "341984"
},
{
"name": "JavaScript",
"bytes": "19336"
},
{
"name": "Python",
"bytes": "537152"
},
{
"name": "Shell",
"bytes": "741"
}
],
"symlink_target": ""
} |
from typing import Any, Callable, Dict, Iterable, Optional, TypeVar
from azure.core.exceptions import (
ClientAuthenticationError,
HttpResponseError,
ResourceExistsError,
ResourceNotFoundError,
ResourceNotModifiedError,
map_error,
)
from azure.core.paging import ItemPaged
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import HttpResponse
from azure.core.rest import HttpRequest
from azure.core.tracing.decorator import distributed_trace
from azure.core.utils import case_insensitive_dict
from azure.mgmt.core.exceptions import ARMErrorFormat
from .. import models as _models
from ..._serialization import Serializer
from .._vendor import _convert_request, _format_url_section
T = TypeVar("T")
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
_SERIALIZER = Serializer()
_SERIALIZER.client_side_validation = False
def build_list_request(
subscription_id: str, *, filter: str, select: Optional[str] = None, **kwargs: Any
) -> HttpRequest:
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop("api_version", _params.pop("api-version", "2015-04-01")) # type: str
accept = _headers.pop("Accept", "application/json")
# Construct URL
_url = kwargs.pop(
"template_url", "/subscriptions/{subscriptionId}/providers/Microsoft.Insights/eventtypes/management/values"
) # pylint: disable=line-too-long
path_format_arguments = {
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1),
}
_url = _format_url_section(_url, **path_format_arguments)
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
_params["$filter"] = _SERIALIZER.query("filter", filter, "str")
if select is not None:
_params["$select"] = _SERIALIZER.query("select", select, "str")
# Construct headers
_headers["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs)
class ActivityLogsOperations:
"""
.. warning::
**DO NOT** instantiate this class directly.
Instead, you should access the following operations through
:class:`~$(python-base-namespace).v2015_04_01.MonitorManagementClient`'s
:attr:`activity_logs` attribute.
"""
models = _models
def __init__(self, *args, **kwargs):
input_args = list(args)
self._client = input_args.pop(0) if input_args else kwargs.pop("client")
self._config = input_args.pop(0) if input_args else kwargs.pop("config")
self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer")
self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer")
@distributed_trace
def list(self, filter: str, select: Optional[str] = None, **kwargs: Any) -> Iterable["_models.EventData"]:
"""Provides the list of records from the activity logs.
:param filter: Reduces the set of data collected.:code:`<br>`This argument is required and it
also requires at least the start date/time.:code:`<br>`The **$filter** argument is very
restricted and allows only the following patterns.:code:`<br>`- *List events for a resource
group*\ : $filter=eventTimestamp ge '2014-07-16T04:36:37.6407898Z' and eventTimestamp le
'2014-07-20T04:36:37.6407898Z' and resourceGroupName eq 'resourceGroupName'.:code:`<br>`- *List
events for resource*\ : $filter=eventTimestamp ge '2014-07-16T04:36:37.6407898Z' and
eventTimestamp le '2014-07-20T04:36:37.6407898Z' and resourceUri eq 'resourceURI'.:code:`<br>`-
*List events for a subscription in a time range*\ : $filter=eventTimestamp ge
'2014-07-16T04:36:37.6407898Z' and eventTimestamp le
'2014-07-20T04:36:37.6407898Z'.:code:`<br>`- *List events for a resource provider*\ :
$filter=eventTimestamp ge '2014-07-16T04:36:37.6407898Z' and eventTimestamp le
'2014-07-20T04:36:37.6407898Z' and resourceProvider eq 'resourceProviderName'.:code:`<br>`-
*List events for a correlation Id*\ : $filter=eventTimestamp ge '2014-07-16T04:36:37.6407898Z'
and eventTimestamp le '2014-07-20T04:36:37.6407898Z' and correlationId eq
'correlationID'.:code:`<br>`:code:`<br>`\ **NOTE**\ : No other syntax is allowed. Required.
:type filter: str
:param select: Used to fetch events with only the given properties.:code:`<br>`The **$select**
argument is a comma separated list of property names to be returned. Possible values are:
*authorization*\ , *claims*\ , *correlationId*\ , *description*\ , *eventDataId*\ ,
*eventName*\ , *eventTimestamp*\ , *httpRequest*\ , *level*\ , *operationId*\ ,
*operationName*\ , *properties*\ , *resourceGroupName*\ , *resourceProviderName*\ ,
*resourceId*\ , *status*\ , *submissionTimestamp*\ , *subStatus*\ , *subscriptionId*. Default
value is None.
:type select: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either EventData or the result of cls(response)
:rtype: ~azure.core.paging.ItemPaged[~$(python-base-namespace).v2015_04_01.models.EventData]
:raises ~azure.core.exceptions.HttpResponseError:
"""
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop("api_version", _params.pop("api-version", "2015-04-01")) # type: str
cls = kwargs.pop("cls", None) # type: ClsType[_models.EventDataCollection]
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
def prepare_request(next_link=None):
if not next_link:
request = build_list_request(
subscription_id=self._config.subscription_id,
filter=filter,
select=select,
api_version=api_version,
template_url=self.list.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url) # type: ignore
else:
request = HttpRequest("GET", next_link)
request = _convert_request(request)
request.url = self._client.format_url(request.url) # type: ignore
request.method = "GET"
return request
def extract_data(pipeline_response):
deserialized = self._deserialize("EventDataCollection", pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
request, stream=False, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
return pipeline_response
return ItemPaged(get_next, extract_data)
list.metadata = {"url": "/subscriptions/{subscriptionId}/providers/Microsoft.Insights/eventtypes/management/values"} # type: ignore
| {
"content_hash": "0163700eae067a2b08aa2ca4833f096b",
"timestamp": "",
"source": "github",
"line_count": 173,
"max_line_length": 136,
"avg_line_length": 47.72254335260116,
"alnum_prop": 0.6478924418604651,
"repo_name": "Azure/azure-sdk-for-python",
"id": "8e8230430e8261d8dbdd3bb1624cde9d22614a2f",
"size": "8756",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "sdk/monitor/azure-mgmt-monitor/azure/mgmt/monitor/v2015_04_01/operations/_activity_logs_operations.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "1224"
},
{
"name": "Bicep",
"bytes": "24196"
},
{
"name": "CSS",
"bytes": "6089"
},
{
"name": "Dockerfile",
"bytes": "4892"
},
{
"name": "HTML",
"bytes": "12058"
},
{
"name": "JavaScript",
"bytes": "8137"
},
{
"name": "Jinja",
"bytes": "10377"
},
{
"name": "Jupyter Notebook",
"bytes": "272022"
},
{
"name": "PowerShell",
"bytes": "518535"
},
{
"name": "Python",
"bytes": "715484989"
},
{
"name": "Shell",
"bytes": "3631"
}
],
"symlink_target": ""
} |
from kafka.tools.protocol.responses import BaseResponse
class DescribeGroupsV1Response(BaseResponse):
schema = [
{'name': 'throttle_time_ms', 'type': 'int32'},
{'name': 'groups',
'type': 'array',
'item_type': [
{'name': 'error', 'type': 'int16'},
{'name': 'group_id', 'type': 'string'},
{'name': 'state', 'type': 'string'},
{'name': 'protocol_type', 'type': 'string'},
{'name': 'protocol', 'type': 'string'},
{'name': 'members',
'type': 'array',
'item_type': [
{'name': 'member_id', 'type': 'string'},
{'name': 'client_id', 'type': 'string'},
{'name': 'client_host', 'type': 'string'},
{'name': 'member_metadata', 'type': 'bytes'},
{'name': 'member_assignment', 'type': 'bytes'},
]},
]},
]
| {
"content_hash": "48bc2d8e31b4d90b0f52bab5e46e3d4d",
"timestamp": "",
"source": "github",
"line_count": 25,
"max_line_length": 65,
"avg_line_length": 38.12,
"alnum_prop": 0.4281217208814271,
"repo_name": "toddpalino/kafka-tools",
"id": "1777d5a849b65c9813fa34313cdb65f0ebff31d8",
"size": "1739",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "kafka/tools/protocol/responses/describe_groups_v1.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "707729"
}
],
"symlink_target": ""
} |
import collections
import os
import signal
import pygame
import config
import debug
import logsupport
from utils import threadmanager, displayupdate, hw
from controlevents import CEvent, PostEvent, ConsoleEvent
from logsupport import ConsoleDetail, ConsoleWarning
from stores import valuestore
import time
# from sets import Set
globdoc = {}
moddoc = {}
paramlog = []
exemplarobjs = collections.OrderedDict()
evntcnt = 0
lastup = 0
previousup = 0
ts = None
ErroredConfigSections = []
def MarkErr(section):
global ErroredConfigSections
ErroredConfigSections.append(section)
# next several lines stolen from https://stackoverflow.com/questions/39198961/pygame-init-fails-when-run-with-systemd
# this handles some weird random SIGHUP when initializing pygame, it's really a hack to work around it
# Not really sure what other ill effects this might have!!!
def handler(signum, frame):
logsupport.DevPrint('Systemd signal hack raised {} {}'.format(signum, repr(frame)))
pass
try:
signal.signal(signal.SIGHUP, handler)
except AttributeError:
# Windows compatibility
pass
# end SIGHUP hack
def CheckPayload(payload, topic, tag, emptyok=False):
if payload == '':
if not emptyok: logsupport.Logs.Log('Empty payload string at {} for topic {}'.format(tag, topic),
severity=ConsoleWarning)
return '{}'
else:
return payload
class clsstruct:
def __init__(self, nm):
self.name = nm
self.members = []
self.membernms = set()
def addmem(self, nm):
self.membernms.add(nm)
clslst = {}
doclst = {}
def register_example(estr, obj):
if estr in exemplarobjs:
return
exemplarobjs[estr] = list(dir(obj))
mro = list(obj.__class__.__mro__)
mro.reverse()
for i in range(len(mro)):
t = mro[i]
if t.__name__ not in clslst:
doclst[t.__name__] = t.__doc__
clslst[t.__name__] = clsstruct(t.__name__)
for e in mro[i + 1:]:
clslst[t.__name__].addmem(e.__name__)
def LogParams():
global paramlog
for p in paramlog:
logsupport.Logs.Log(p, severity=ConsoleDetail)
def InitializeEnvironment():
# this section is an unbelievable nasty hack - for some reason Pygame
# needs a keyboardinterrupt to initialise in some limited circs (second time running)
# lines below commented with HACK also part of workaround
# see https://stackoverflow.com/questions/17035699/pygame-requires-keyboard-interrupt-to-init-display
global lastup, previousup, ts
class Alarm(Exception):
pass
def alarm_handler(signum, frame):
print('Hack alarm raised', signum, repr(frame))
raise Alarm
# end hack
try:
with open("{}/.Screentype".format(config.sysStore.HomeDir)) as f:
screeninfo = f.readline().rstrip('\n').split(',')
scrntyp = screeninfo[0]
softrotate = int(screeninfo[1]) if len(screeninfo) > 1 else 0
touchmod = screeninfo[2] if len(screeninfo) > 2 else displayupdate.touchmodifier[softrotate]
displayupdate.actualtouchmodifier = touchmod
except IOError:
scrntyp = "*Unknown*"
hw.initOS(scrntyp, os.path.dirname(config.sysStore.configfile))
config.sysStore.SetVal('PersonalSystem', os.path.isfile(config.sysStore.HomeDir + "/homesystem"))
from touchhandler import Touchscreen, TS_PRESS, TS_RELEASE, TS_MOVE
ts = Touchscreen(os.path.dirname(config.sysStore.configfile), touchmod)
def touchhandler(event, touch):
global evntcnt
evntcnt += 1
slot = touch.slot
if slot != 0: return # no multitouch events for now
p = (touch.x, touch.y)
if event == TS_PRESS:
debug.debugPrint('Touch', 'Press pos: {} seq: {}'.format(p, evntcnt))
PostEvent(ConsoleEvent(CEvent.MouseDown, pos=p, seq=evntcnt, mtime=time.time())) # eventfix
elif event == TS_RELEASE:
debug.debugPrint('Touch', 'Repease pos: {} seq: {}'.format(p, evntcnt))
PostEvent(ConsoleEvent(CEvent.MouseUp, pos=p, seq=evntcnt, mtime=time.time()))
elif event == TS_MOVE:
debug.debugPrint('Touch', 'Motion pos: {} seq: {}'.format(p, evntcnt))
PostEvent(ConsoleEvent(CEvent.MouseMotion, pos=p, seq=evntcnt, mtime=time.time()))
def touchidle():
global evntcnt
evntcnt += 1
PostEvent(ConsoleEvent(CEvent.MouseIdle, pos=(0, 0), seq=evntcnt, mtime=time.time()))
for touchtyp in ts.touches:
touchtyp.on_press = touchhandler
touchtyp.on_release = touchhandler
touchtyp.on_move = touchhandler
touchtyp.on_idle = touchidle
threadmanager.SetUpHelperThread('TouchHandler', ts.run)
try:
lastup = os.path.getmtime("{}/.ConsoleStart".format(config.sysStore.HomeDir))
with open("{}/.ConsoleStart".format(config.sysStore.HomeDir)) as f:
laststart = float(f.readline())
lastrealstart = float(f.readline())
previousup = lastup - lastrealstart
prevsetup = lastrealstart - laststart
except (IOError, ValueError):
previousup = -1
lastup = -1
prevsetup = -1
with open("{}/.RelLog".format(config.sysStore.HomeDir), "a") as f:
f.write(
str(config.sysStore.ConsoleStartTime) + ' ' + str(prevsetup) + ' ' + str(previousup) + ' ' + str(lastup) + ' '
+ str(config.sysStore.ConsoleStartTime - lastup) + '\n')
signal.signal(signal.SIGALRM, alarm_handler) # HACK
signal.alarm(3) # HACK
try: # HACK
if softrotate > 4:
logsupport.Logs.Log("Ignoring bad soft rotation value: {}".format(softrotate),
severity=logsupport.ConsoleWarning)
softrotate = 0
if softrotate == 0: # use hardware orientation/rotation
hw.screen = pygame.display.set_mode((hw.screenwidth, hw.screenheight), pygame.FULLSCREEN)
else: # use software rotation
hw.realscreen = pygame.display.set_mode((hw.screenwidth, hw.screenheight), pygame.FULLSCREEN)
if softrotate in (1, 3):
hw.screenwidth, hw.screenheight = hw.screenheight, hw.screenwidth
hw.screen = pygame.Surface((hw.screenwidth, hw.screenheight))
displayupdate.initdisplayupdate(softrotate)
if hw.screenwidth > hw.screenheight:
displayupdate.portrait = False
signal.alarm(0) # HACK
except Alarm: # HACK
raise KeyboardInterrupt # HACK
hw.screen.fill((0, 0, 0)) # clear screen
pygame.display.update()
pygame.mouse.set_visible(False) # no cursor
def DumpDocumentation():
docfile = open('/home/pi/Console/params.txt', 'w')
os.chmod('/home/pi/Console/params.txt', 0o555)
docfile.write('Global Parameters:\n')
for p in sorted(globdoc):
docfile.write(
' {:32s}: {:8s} {}\n'.format(p, globdoc[p][0].__name__, str(globdoc[p][1])))
docfile.write('Module Parameters:\n')
for p in sorted(moddoc):
docfile.write(' ' + p + '\n')
docfile.write(' Local Parameters:\n')
for q in sorted(moddoc[p]['loc']):
docfile.write(' {:24s}: {:8s}\n'.format(q, moddoc[p]['loc'][q].__name__))
docfile.write(' Overrideable Globals:\n')
for q in sorted(moddoc[p]['ovrd']):
docfile.write(' ' + q + '\n')
docfile.close()
docfile = open('/home/pi/Console/classstruct.txt', 'w')
docfile.write('Class/Attribute Structure:\n')
docfile.write('\n')
mdfile = open('/home/pi/Console/classstruct.md', 'w')
mdfile.write('# Class/Attribute Structure:\n')
mdfile.write('\n')
varsinuse = {}
olditems = []
for i, scr in exemplarobjs.items():
varsinuse[i] = [x for x in scr if not x.startswith('_') and x not in olditems]
olditems += [x for x in scr if not x.startswith('_')]
def scrublowers(ritem):
lower = []
rtn = list(ritem.members)
for mem in ritem.members:
lower += scrublowers(mem)
ritem.members = [xitem for xitem in ritem.members if xitem not in lower]
return rtn
def docwrite(ritem, ind, md):
docfile.write(ind + ritem.name + ': [' + ', '.join([n2.name for n2 in ritem.members]) + ']\n')
mdfile.write('\n' + md + ritem.name + ': [' + ', '.join([n2.name for n2 in ritem.members]) + ']\n')
docfile.write(ind + (doclst[ritem.name] if not doclst[ritem.name] is None else "***missing***") + '\n')
mdfile.write((doclst[ritem.name] if not doclst[ritem.name] is None else "\n***missing***\n") + '\n')
if ritem.name in varsinuse:
for v in varsinuse[ritem.name]:
docfile.write(ind + ' ' + v + '\n')
mdfile.write('* ' + v + '\n')
for mem in ritem.members:
docwrite(mem, ind + ' ', '##')
for c in clslst.values():
for n in c.membernms:
c.members.append(clslst[n])
r = clslst['object']
scrublowers(r)
docwrite(r, '', '#')
docfile.close()
mdfile.close()
import re
from datetime import timedelta
def get_timedelta(line):
if line is None:
return 0
if line == '': return 0
if line.isdigit():
line += ' seconds'
timespaces = {"days": 0}
for timeunit in "year month week day hour minute second".split():
content = re.findall(r"([0-9]*?)\s*?" + timeunit, line)
if content:
timespaces[timeunit + "s"] = int(content[0])
timespaces["days"] += 30 * timespaces.pop("months", 0) + 365 * timespaces.pop("years", 0)
td = timedelta(**timespaces)
return td.days * 86400 + td.seconds
class Enumerate(object):
def __init__(self, names):
for number, name in enumerate(names.split()):
setattr(self, name, name)
def inputfileparam(param, reldir, defdir):
fixreldir = reldir if reldir[-1] == '/' else reldir + '/'
if param == '':
return fixreldir + defdir
elif param[0] != '/':
return fixreldir + param
else:
return param
lastscreenname = '**'
def ExpandTextwitVars(txt, screenname='**'):
global lastscreenname
temptxt = [txt] if not isinstance(txt, list) else txt
newtext = []
for ln in temptxt:
tokens = [x for x in ln.split() if ':' in x]
if tokens:
lnreduced = ln
for d in tokens: lnreduced = lnreduced.replace(d, ':')
l1 = lnreduced.split(':')
partialline = l1[0]
for i, x in enumerate(tokens):
val = valuestore.GetVal(x, failok=True)
if val is None:
if screenname != lastscreenname:
logsupport.Logs.Log('Substitution var does not exist on screen {}: {}'.format(screenname, x),
severity=ConsoleWarning)
val = ''
lastscreenname = screenname
else:
lastscreenname = '**'
if isinstance(val, list):
newtext.append(partialline + str(val[0]).rstrip('\n'))
for l2 in val[1:-1]:
newtext.append(str(l2).rstrip('\n'))
if len(val) > 2:
partialline = str(val[-1]).rstrip('\n') + l1[i + 1]
else:
partialline = partialline + str(val).rstrip('\n') + l1[i + 1]
newtext.append(partialline)
else:
newtext.append(ln)
return newtext
mqttregistered = False
| {
"content_hash": "61a4a4007be78482091225e7eca101c8",
"timestamp": "",
"source": "github",
"line_count": 337,
"max_line_length": 117,
"avg_line_length": 30.42433234421365,
"alnum_prop": 0.6786306446893592,
"repo_name": "kevinkahn/softconsole",
"id": "f49e8ea38a9d37228e7b7ef96d92fc07011ce310",
"size": "10253",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "utils/utilities.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Euphoria",
"bytes": "267"
},
{
"name": "Python",
"bytes": "839903"
},
{
"name": "Shell",
"bytes": "101927"
}
],
"symlink_target": ""
} |
from mock import Mock, patch
from purchasing.opportunities.models import Opportunity
from purchasing.data import contracts, flows, stages
from purchasing.data.contract_stages import ContractStage
from purchasing.users import models
from purchasing_test.factories import (
ContractBaseFactory, UserFactory, ContractPropertyFactory,
FlowFactory, StageFactory
)
from purchasing_test.unit.data.test_contract_base import ContractObjectTestBase
class TestContractTransition(ContractObjectTestBase):
def setUp(self):
super(TestContractTransition, self).setUp()
self.stage1 = StageFactory.build(name='stage 1')
self.stage2 = StageFactory.build(name='stage 2')
self.stage3 = StageFactory.build(name='stage 3')
self.flow1 = FlowFactory.build(
flow_name='flow 1', stage_order=[self.stage1.id, self.stage2.id, self.stage3.id]
)
self.flow2 = FlowFactory.build(
flow_name='flow 2', stage_order=[self.stage1.id, self.stage2.id, self.stage3.id]
)
self.user = UserFactory.build()
self.active_contract.flow = self.flow1
@patch('purchasing.data.contracts.ContractBase._transition_to_last')
@patch('purchasing.data.contracts.ContractBase._transition_to_next')
@patch('purchasing.data.contracts.ContractBase._transition_to_first')
def test_transition(self, first, _next, last):
'''Test that transition calls the right methods in the right circumstances
'''
self.assertTrue(self.active_contract.current_stage_id is None)
self.active_contract.transition(self.user)
self.assertTrue(first.called)
self.active_contract.current_stage_id = self.stage1.id
self.active_contract.transition(self.user)
self.assertTrue(_next.called)
self.active_contract.current_stage_id = self.stage2.id
self.active_contract.transition(self.user)
self.assertTrue(_next.called)
self.active_contract.current_stage_id = self.stage3.id
self.active_contract.transition(self.user)
self.assertTrue(last.called)
self.assertEquals(first.call_count, 1)
self.assertEquals(_next.call_count, 2)
self.assertEquals(last.call_count, 1)
def test_transition_start(self):
_get = Mock(return_value=ContractStage(stage=self.stage1))
ContractStage.get_one = _get
self.assertTrue(self.active_contract.current_stage_id is None)
action = self.active_contract.transition(self.user)
self.assertEquals(_get.call_count, 1)
self.assertEquals(len(action), 1)
self.assertEquals(action[0].action_type, 'entered')
self.assertEquals(self.active_contract.current_stage_id, self.stage1.id)
def test_transition_next(self):
_get = Mock()
_get.side_effect = [ContractStage(stage=self.stage1), ContractStage(stage=self.stage2)]
ContractStage.get_one = _get
self.active_contract.current_stage_id = self.stage1.id
self.active_contract.current_stage = self.stage1
action = self.active_contract.transition(self.user)
self.assertEquals(_get.call_count, 2)
self.assertEquals(len(action), 2)
self.assertEquals(action[0].action_type, 'exited')
self.assertEquals(action[1].action_type, 'entered')
self.assertEquals(self.active_contract.current_stage_id, self.stage2.id)
@patch('purchasing.data.contracts.ContractBase.complete')
def test_transition_last(self, complete):
_get = Mock(return_value=ContractStage(stage=self.stage1))
ContractStage.get_one = _get
self.active_contract.parent = ContractBaseFactory.build(description='test')
self.active_contract.current_stage_id = self.stage3.id
self.active_contract.current_stage = self.stage3
action = self.active_contract.transition(self.user)
self.assertEquals(_get.call_count, 1)
self.assertEquals(len(action), 1)
self.assertEquals(action[0].action_type, 'exited')
self.assertTrue(complete.called_once)
def test_transition_backward(self):
_get = Mock(return_value=[ContractStage(stage=self.stage1), ContractStage(stage=self.stage2)])
ContractStage.get_multiple = _get
self.active_contract.current_stage_id = self.stage2.id
self.active_contract.current_stage = self.stage2
action = self.active_contract.transition(self.user, destination=self.stage1.id)
self.assertEquals(_get.call_count, 1)
self.assertEquals(len(action), 1)
self.assertEquals(action[0].action_type, 'reversion')
self.assertTrue(_get.called_once)
self.assertEquals(self.active_contract.current_stage_id, self.stage1.id)
| {
"content_hash": "b2e405e500d92c77f1bac89d9c9f0517",
"timestamp": "",
"source": "github",
"line_count": 112,
"max_line_length": 102,
"avg_line_length": 42.473214285714285,
"alnum_prop": 0.694134959007778,
"repo_name": "ajb/pittsburgh-purchasing-suite",
"id": "b1e059608f7db0c78c8a0c241228efbae06f493a",
"size": "4782",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "purchasing_test/unit/data/test_contract_transitions.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "22578"
},
{
"name": "HTML",
"bytes": "298440"
},
{
"name": "JavaScript",
"bytes": "14095"
},
{
"name": "Makefile",
"bytes": "199"
},
{
"name": "Mako",
"bytes": "412"
},
{
"name": "Python",
"bytes": "521766"
},
{
"name": "Shell",
"bytes": "3018"
}
],
"symlink_target": ""
} |
import pytest
import io
from openregister.representations.json import reader
def test_reader_zero_items():
stream = io.StringIO("")
with pytest.raises(StopIteration):
next(reader(stream))
def test_reader_one_item():
stream = io.StringIO('[{"name":"one"}]')
items = reader(stream)
assert items.__next__().json == '{"name":"one"}'
with pytest.raises(StopIteration):
items.__next__()
def test_reader_many_items():
stream = io.StringIO('[{"name":"one"},'
'{"name":"two"},'
'{"name":"three"}]')
items = reader(stream)
assert (items.__next__()).json == '{"name":"one"}'
assert (items.__next__()).json == '{"name":"two"}'
assert (items.__next__()).json == '{"name":"three"}'
with pytest.raises(StopIteration):
items.__next__()
| {
"content_hash": "797bbe1e5c0fc566ba4e948f6da31b0c",
"timestamp": "",
"source": "github",
"line_count": 33,
"max_line_length": 56,
"avg_line_length": 25.818181818181817,
"alnum_prop": 0.5516431924882629,
"repo_name": "openregister/entry",
"id": "adb3962894ff78ab9e4ee4adae6b5e3190a74334",
"size": "852",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "tests/test_reader_json.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Makefile",
"bytes": "571"
},
{
"name": "Python",
"bytes": "59083"
}
],
"symlink_target": ""
} |
"""Task management utilities."""
__all__ = [
'Cancelled',
'Closed',
'CompletionQueue',
'Empty',
'Full',
'as_completed',
'get_all_tasks',
'get_current_task',
'joining',
'spawn',
'spawn_onto_stack',
]
import collections
import logging
from g1.asyncs.kernels import contexts
from g1.bases import classes
# Re-export errors.
from g1.asyncs.kernels.errors import Cancelled
from . import locks
LOG = logging.getLogger(__name__)
class Closed(Exception):
pass
class Empty(Exception):
pass
class Full(Exception):
pass
class CompletionQueue:
"""Provide queue-like interface on waiting for task completion.
NOTE: It does not support future objects; this simplifies its
implementation, and thus may be more efficient.
"""
def __init__(self, capacity=0, *, always_cancel=False, log_error=True):
self.capacity = capacity
self._always_cancel = always_cancel
self._log_error = log_error
self._gate = locks.Gate()
self._completed = collections.deque()
self._uncompleted = set()
self._closed = False
__repr__ = classes.make_repr(
'{state} capacity={self.capacity} '
'uncompleted={uncompleted} completed={completed}',
state=lambda self: 'closed' if self._closed else 'open',
uncompleted=lambda self: len(self._uncompleted),
completed=lambda self: len(self._completed),
)
async def __aenter__(self):
return self
async def __aexit__(self, exc_type, *_):
"""Ensure that the queued tasks cannot outlive a scope.
This is similar to the ``joining`` context manager, but applies
to all remaining tasks. Also, the queue is closed on exit.
"""
tasks = self.close(graceful=False)
if exc_type or self._always_cancel:
for task in tasks:
task.cancel()
for task in tasks:
await task.join()
if self._log_error:
_log_task_error(task)
def is_full(self):
"""True if number of uncompleted tasks no less than capacity."""
return 0 < self.capacity <= len(self._uncompleted)
def is_closed(self):
return self._closed
def __bool__(self):
return bool(self._completed) or bool(self._uncompleted)
def __len__(self):
return len(self._completed) + len(self._uncompleted)
def __aiter__(self):
return self
async def __anext__(self):
try:
return await self.get()
except Closed:
raise StopAsyncIteration
def close(self, graceful=True):
if graceful:
tasks = []
else:
tasks = list(self._completed)
tasks.extend(self._uncompleted)
self._completed.clear()
self._uncompleted.clear()
self._closed = True
self._gate.unblock()
return tasks
async def gettable(self):
while not self._completed and (self._uncompleted or not self._closed):
await self._gate.wait()
async def get(self):
await self.gettable()
return self.get_nonblocking()
def get_nonblocking(self):
if self._completed:
return self._completed.popleft()
elif self._uncompleted or not self._closed:
raise Empty
else:
raise Closed
async def puttable(self):
while not self._closed and self.is_full():
await self._gate.wait()
async def put(self, task):
await self.puttable()
return self.put_nonblocking(task)
def put_nonblocking(self, task):
if self._closed:
raise Closed
if self.is_full():
raise Full
self._uncompleted.add(task)
task.add_callback(self._on_completion)
def spawn(self, awaitable):
"""Spawn and put task to the queue.
This is equivalent to spawn-then-put, but is better that, if
``put`` will fail, no task is spawned.
"""
if self._closed:
raise Closed
if self.is_full():
raise Full
task = spawn(awaitable)
try:
self.put_nonblocking(task)
except BaseException:
# This should never happen...
LOG.critical('put should never fail here: %r, %r', self, task)
task.cancel()
raise
return task
def _on_completion(self, task):
if self._uncompleted:
self._uncompleted.remove(task)
self._completed.append(task)
self._gate.unblock()
async def as_completed(tasks):
completed = collections.deque()
gate = locks.Gate()
num_tasks = 0
for task in tasks:
task.add_callback(lambda t: (completed.append(t), gate.unblock()))
num_tasks += 1
while num_tasks > 0:
while not completed:
await gate.wait()
yield completed.popleft()
num_tasks -= 1
def _log_task_error(task):
exc = task.get_exception_nonblocking()
if not exc:
return
if isinstance(exc, Cancelled):
LOG.debug('task is cancelled: %r', task, exc_info=exc)
else:
LOG.error('task error: %r', task, exc_info=exc)
class joining:
"""Ensure that the given task cannot outlive a scope.
* If the code nested inside the context raises (including
BaseException such as task cancellation) or always_cancel is set
to True, the task is cancelled.
* Then the task is joined, and if log_error is set to True (which is
the default), task error is logged.
"""
def __init__(self, task, *, always_cancel=False, log_error=True):
self._task = task
self._always_cancel = always_cancel
self._log_error = log_error
async def __aenter__(self):
return self._task
async def __aexit__(self, exc_type, *_):
if exc_type or self._always_cancel:
self._task.cancel()
await self._task.join()
if self._log_error:
_log_task_error(self._task)
def spawn(awaitable):
return contexts.get_kernel().spawn(awaitable)
def spawn_onto_stack(awaitable, stack, **kwargs):
task = spawn(awaitable)
stack.push_async_exit(joining(task, **kwargs).__aexit__)
return task
def get_all_tasks():
# You may call this out of a kernel context.
kernel = contexts.get_kernel(None)
return kernel.get_all_tasks() if kernel else []
def get_current_task():
# You may call this out of a kernel context.
kernel = contexts.get_kernel(None)
return kernel.get_current_task() if kernel else None
| {
"content_hash": "a4335d891ca41f844c1fd061fc9c78fb",
"timestamp": "",
"source": "github",
"line_count": 247,
"max_line_length": 78,
"avg_line_length": 26.967611336032387,
"alnum_prop": 0.5948055847470349,
"repo_name": "clchiou/garage",
"id": "e25b0476f588aaad081794e172b8efc7b8681959",
"size": "6661",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "python/g1/asyncs/bases/g1/asyncs/bases/tasks.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Cap'n Proto",
"bytes": "6917"
},
{
"name": "HTML",
"bytes": "113"
},
{
"name": "Java",
"bytes": "61027"
},
{
"name": "Python",
"bytes": "1653733"
},
{
"name": "Shell",
"bytes": "6209"
}
],
"symlink_target": ""
} |
from version_info.get_version import *
| {
"content_hash": "e2c4499ada6fc0e06a5ffbf731c69c41",
"timestamp": "",
"source": "github",
"line_count": 1,
"max_line_length": 38,
"avg_line_length": 39,
"alnum_prop": 0.7948717948717948,
"repo_name": "TyMaszWeb/python-version-info",
"id": "ed2244a21b482a40be520d1e597dca9ef69a7807",
"size": "39",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "version_info/__init__.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "3427"
}
],
"symlink_target": ""
} |
from distutils.core import setup
try:
import pypandoc
long_description = pypandoc.convert('README.md', 'rst')
except (IOError, ImportError):
long_description = ''
setup(
name = 'internal_links',
version = '0.1.6.6',
description = 'SEO tool for adding links to text contained in Django apps databases.',
author = u'Piotr Lizończyk',
author_email = 'piotr.lizonczyk@gmail.com',
url = 'https://github.com/deployed/internal_links',
download_url = 'https://github.com/deployed/internal_links/tarball/0.1.6.6',
keywords = ['SEO'],
classifiers = ['Development Status :: 2 - Pre-Alpha',
'Framework :: Django',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2.7'],
requires = ['django'],
packages = ['internal_links',
'internal_links.management',
'internal_links.management.commands'],
long_description = long_description,
obsoletes=['permalink_adder']
)
| {
"content_hash": "887f3125dcdd88c674b4606c8b96e9d7",
"timestamp": "",
"source": "github",
"line_count": 28,
"max_line_length": 88,
"avg_line_length": 35.535714285714285,
"alnum_prop": 0.6381909547738693,
"repo_name": "deployed/internal_links",
"id": "e1a6427e80e1dfd904ffbcd23d6d47ed8042c02e",
"size": "1011",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "setup.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "7028"
}
],
"symlink_target": ""
} |
"""
Visualization of named colors.
Simple plot example with the named colors and its visual representation.
"""
from __future__ import (absolute_import, division, print_function,
unicode_literals)
import six
import numpy as np
import matplotlib.pyplot as plt
from matplotlib import colors
colors_ = list(six.iteritems(colors.cnames))
# Add the single letter colors.
for name, rgb in six.iteritems(colors.ColorConverter.colors):
hex_ = colors.rgb2hex(rgb)
colors_.append((name, hex_))
# Transform to hex color values.
hex_ = [color[1] for color in colors_]
# Get the rgb equivalent.
rgb = [colors.hex2color(color) for color in hex_]
# Get the hsv equivalent.
hsv = [colors.rgb_to_hsv(color) for color in rgb]
# Split the hsv values to sort.
hue = [color[0] for color in hsv]
sat = [color[1] for color in hsv]
val = [color[2] for color in hsv]
# Sort by hue, saturation and value.
ind = np.lexsort((val, sat, hue))
sorted_colors = [colors_[i] for i in ind]
n = len(sorted_colors)
ncols = 4
nrows = int(np.ceil(1. * n / ncols))
fig, ax = plt.subplots()
X, Y = fig.get_dpi() * fig.get_size_inches()
# row height
h = Y / (nrows + 1)
# col width
w = X / ncols
for i, (name, color) in enumerate(sorted_colors):
col = i % ncols
row = int(i / ncols)
y = Y - (row * h) - h
xi_line = w * (col + 0.05)
xf_line = w * (col + 0.25)
xi_text = w * (col + 0.3)
ax.text(xi_text, y, name, fontsize=(h * 0.8),
horizontalalignment='left',
verticalalignment='center')
# Add extra black line a little bit thicker to make
# clear colors more visible.
ax.hlines(y, xi_line, xf_line, color='black', linewidth=(h * 0.7))
ax.hlines(y + h * 0.1, xi_line, xf_line, color=color, linewidth=(h * 0.6))
ax.set_xlim(0, X)
ax.set_ylim(0, Y)
ax.set_axis_off()
fig.subplots_adjust(left=0, right=1,
top=1, bottom=0,
hspace=0, wspace=0)
plt.show()
| {
"content_hash": "f96387aa800f352e7e949eb932270c2c",
"timestamp": "",
"source": "github",
"line_count": 78,
"max_line_length": 78,
"avg_line_length": 25.21794871794872,
"alnum_prop": 0.6298932384341637,
"repo_name": "bundgus/python-playground",
"id": "dde34a80c97433d50bde3030a568e60cb3fd0bdc",
"size": "1967",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "matplotlib-playground/examples/color/named_colors.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "68"
},
{
"name": "Gherkin",
"bytes": "897"
},
{
"name": "HTML",
"bytes": "22309040"
},
{
"name": "Jupyter Notebook",
"bytes": "666681"
},
{
"name": "Python",
"bytes": "1046557"
},
{
"name": "Thrift",
"bytes": "58"
}
],
"symlink_target": ""
} |
"""Support for MQTT sensors."""
from __future__ import annotations
from datetime import timedelta
import functools
import logging
import voluptuous as vol
from homeassistant.components import sensor
from homeassistant.components.sensor import (
DEVICE_CLASSES_SCHEMA,
STATE_CLASSES_SCHEMA,
SensorEntity,
)
from homeassistant.const import (
CONF_DEVICE_CLASS,
CONF_FORCE_UPDATE,
CONF_NAME,
CONF_UNIT_OF_MEASUREMENT,
CONF_VALUE_TEMPLATE,
)
from homeassistant.core import HomeAssistant, callback
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.event import async_track_point_in_utc_time
from homeassistant.helpers.reload import async_setup_reload_service
from homeassistant.helpers.typing import ConfigType
from homeassistant.util import dt as dt_util
from . import CONF_QOS, CONF_STATE_TOPIC, DOMAIN, PLATFORMS, subscription
from .. import mqtt
from .debug_info import log_messages
from .mixins import (
MQTT_ENTITY_COMMON_SCHEMA,
MqttAvailability,
MqttEntity,
async_setup_entry_helper,
)
_LOGGER = logging.getLogger(__name__)
CONF_EXPIRE_AFTER = "expire_after"
CONF_LAST_RESET_TOPIC = "last_reset_topic"
CONF_LAST_RESET_VALUE_TEMPLATE = "last_reset_value_template"
CONF_STATE_CLASS = "state_class"
MQTT_SENSOR_ATTRIBUTES_BLOCKED = frozenset(
{
sensor.ATTR_LAST_RESET,
sensor.ATTR_STATE_CLASS,
}
)
DEFAULT_NAME = "MQTT Sensor"
DEFAULT_FORCE_UPDATE = False
PLATFORM_SCHEMA = vol.All(
# Deprecated, remove in Home Assistant 2021.11
cv.deprecated(CONF_LAST_RESET_TOPIC),
cv.deprecated(CONF_LAST_RESET_VALUE_TEMPLATE),
mqtt.MQTT_RO_PLATFORM_SCHEMA.extend(
{
vol.Optional(CONF_DEVICE_CLASS): DEVICE_CLASSES_SCHEMA,
vol.Optional(CONF_EXPIRE_AFTER): cv.positive_int,
vol.Optional(CONF_FORCE_UPDATE, default=DEFAULT_FORCE_UPDATE): cv.boolean,
vol.Optional(CONF_LAST_RESET_TOPIC): mqtt.valid_subscribe_topic,
vol.Optional(CONF_LAST_RESET_VALUE_TEMPLATE): cv.template,
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
vol.Optional(CONF_STATE_CLASS): STATE_CLASSES_SCHEMA,
vol.Optional(CONF_UNIT_OF_MEASUREMENT): cv.string,
}
).extend(MQTT_ENTITY_COMMON_SCHEMA.schema),
)
async def async_setup_platform(
hass: HomeAssistant, config: ConfigType, async_add_entities, discovery_info=None
):
"""Set up MQTT sensors through configuration.yaml."""
await async_setup_reload_service(hass, DOMAIN, PLATFORMS)
await _async_setup_entity(hass, async_add_entities, config)
async def async_setup_entry(hass, config_entry, async_add_entities):
"""Set up MQTT sensors dynamically through MQTT discovery."""
setup = functools.partial(
_async_setup_entity, hass, async_add_entities, config_entry=config_entry
)
await async_setup_entry_helper(hass, sensor.DOMAIN, setup, PLATFORM_SCHEMA)
async def _async_setup_entity(
hass, async_add_entities, config: ConfigType, config_entry=None, discovery_data=None
):
"""Set up MQTT sensor."""
async_add_entities([MqttSensor(hass, config, config_entry, discovery_data)])
class MqttSensor(MqttEntity, SensorEntity):
"""Representation of a sensor that can be updated using MQTT."""
_attr_last_reset = None
_attributes_extra_blocked = MQTT_SENSOR_ATTRIBUTES_BLOCKED
def __init__(self, hass, config, config_entry, discovery_data):
"""Initialize the sensor."""
self._state = None
self._expiration_trigger = None
expire_after = config.get(CONF_EXPIRE_AFTER)
if expire_after is not None and expire_after > 0:
self._expired = True
else:
self._expired = None
MqttEntity.__init__(self, hass, config, config_entry, discovery_data)
@staticmethod
def config_schema():
"""Return the config schema."""
return PLATFORM_SCHEMA
def _setup_from_config(self, config):
"""(Re)Setup the entity."""
template = self._config.get(CONF_VALUE_TEMPLATE)
if template is not None:
template.hass = self.hass
last_reset_template = self._config.get(CONF_LAST_RESET_VALUE_TEMPLATE)
if last_reset_template is not None:
last_reset_template.hass = self.hass
async def _subscribe_topics(self):
"""(Re)Subscribe to topics."""
topics = {}
@callback
@log_messages(self.hass, self.entity_id)
def message_received(msg):
"""Handle new MQTT messages."""
payload = msg.payload
# auto-expire enabled?
expire_after = self._config.get(CONF_EXPIRE_AFTER)
if expire_after is not None and expire_after > 0:
# When expire_after is set, and we receive a message, assume device is not expired since it has to be to receive the message
self._expired = False
# Reset old trigger
if self._expiration_trigger:
self._expiration_trigger()
self._expiration_trigger = None
# Set new trigger
expiration_at = dt_util.utcnow() + timedelta(seconds=expire_after)
self._expiration_trigger = async_track_point_in_utc_time(
self.hass, self._value_is_expired, expiration_at
)
template = self._config.get(CONF_VALUE_TEMPLATE)
if template is not None:
variables = {"entity_id": self.entity_id}
payload = template.async_render_with_possible_json_value(
payload,
self._state,
variables=variables,
)
self._state = payload
self.async_write_ha_state()
topics["state_topic"] = {
"topic": self._config[CONF_STATE_TOPIC],
"msg_callback": message_received,
"qos": self._config[CONF_QOS],
}
@callback
@log_messages(self.hass, self.entity_id)
def last_reset_message_received(msg):
"""Handle new last_reset messages."""
payload = msg.payload
template = self._config.get(CONF_LAST_RESET_VALUE_TEMPLATE)
if template is not None:
variables = {"entity_id": self.entity_id}
payload = template.async_render_with_possible_json_value(
payload,
self._state,
variables=variables,
)
if not payload:
_LOGGER.debug("Ignoring empty last_reset message from '%s'", msg.topic)
return
try:
last_reset = dt_util.parse_datetime(payload)
if last_reset is None:
raise ValueError
self._attr_last_reset = last_reset
except ValueError:
_LOGGER.warning(
"Invalid last_reset message '%s' from '%s'", msg.payload, msg.topic
)
self.async_write_ha_state()
if CONF_LAST_RESET_TOPIC in self._config:
topics["last_reset_topic"] = {
"topic": self._config[CONF_LAST_RESET_TOPIC],
"msg_callback": last_reset_message_received,
"qos": self._config[CONF_QOS],
}
self._sub_state = await subscription.async_subscribe_topics(
self.hass, self._sub_state, topics
)
@callback
def _value_is_expired(self, *_):
"""Triggered when value is expired."""
self._expiration_trigger = None
self._expired = True
self.async_write_ha_state()
@property
def native_unit_of_measurement(self):
"""Return the unit this state is expressed in."""
return self._config.get(CONF_UNIT_OF_MEASUREMENT)
@property
def force_update(self):
"""Force update."""
return self._config[CONF_FORCE_UPDATE]
@property
def native_value(self):
"""Return the state of the entity."""
return self._state
@property
def device_class(self) -> str | None:
"""Return the device class of the sensor."""
return self._config.get(CONF_DEVICE_CLASS)
@property
def state_class(self) -> str | None:
"""Return the state class of the sensor."""
return self._config.get(CONF_STATE_CLASS)
@property
def available(self) -> bool:
"""Return true if the device is available and value has not expired."""
expire_after = self._config.get(CONF_EXPIRE_AFTER)
# mypy doesn't know about fget: https://github.com/python/mypy/issues/6185
return MqttAvailability.available.fget(self) and ( # type: ignore[attr-defined]
expire_after is None or not self._expired
)
| {
"content_hash": "30809612f1a30ca6ea69304f351b5a87",
"timestamp": "",
"source": "github",
"line_count": 253,
"max_line_length": 140,
"avg_line_length": 35.17786561264822,
"alnum_prop": 0.6177528089887641,
"repo_name": "sander76/home-assistant",
"id": "16c19c8fc51b746916ee43cd289e8db7c1dd65da",
"size": "8900",
"binary": false,
"copies": "1",
"ref": "refs/heads/dev",
"path": "homeassistant/components/mqtt/sensor.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Dockerfile",
"bytes": "1795"
},
{
"name": "Python",
"bytes": "36548768"
},
{
"name": "Shell",
"bytes": "4910"
}
],
"symlink_target": ""
} |
import pytest
from pyrsistent import v
from katana.utils import Node, Pair, prepare
from katana.compound import sequence, group, repeat, option, maybe
from katana.term import term
Ta, Tb, Tc = [term(k) for k in 'abc']
Na, Nb, Nc = [Node(k, 'data') for k in 'abc']
def test_sequence():
s = sequence(Ta, Tb)
given = prepare([Na, Nb])
after = Pair([Na, Nb], [])
assert s(given) == after
def test_group():
g = group(Ta)
given = prepare([Na])
after = Pair([Node(g, [Na])], [])
assert g(given) == after
def test_group_with_prefix():
g = group(Ta)
given = Pair(v(Nb), v(Na))
after = Pair([Nb, Node(g, [Na])], [])
assert g(given) == after
def test_repeat():
r = repeat(Ta)
given = prepare([Na]*10)
after = Pair([Na]*10, [])
assert r(given) == after
def test_option():
opt = option(Ta, Tb, Tc)
for node in [Na, Nb]:
assert opt(prepare([node])) == Pair([node], [])
def test_option_empty():
with pytest.raises(ValueError):
assert option(Ta, Tb)(prepare([Nc]))
def test_maybe():
m = maybe(Ta)
assert m(prepare([Nb])) == Pair([], [Nb])
assert m(prepare([Na])) == Pair([Na], [])
| {
"content_hash": "876ebde3b07238d34877abee7d174c30",
"timestamp": "",
"source": "github",
"line_count": 55,
"max_line_length": 66,
"avg_line_length": 21.563636363636363,
"alnum_prop": 0.5750421585160203,
"repo_name": "eugene-eeo/katana",
"id": "9280cd94fab9f39f8221dc8ea02949c5ec56118e",
"size": "1186",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/test_compound.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "6320"
}
],
"symlink_target": ""
} |
import bpy
import sys
import os
from bpy.types import AddonPreferences
from bpy.props import CollectionProperty, BoolProperty, StringProperty, FloatProperty
from bpy.props import IntProperty, PointerProperty, EnumProperty, FloatVectorProperty
from .rfb_utils import envconfig_utils
from . import rfb_logger
from . import rfb_icons
class RendermanPreferencePath(bpy.types.PropertyGroup):
path: StringProperty(name="", subtype='DIR_PATH')
class RendermanDeviceDesc(bpy.types.PropertyGroup):
name: StringProperty(name="", default="")
id: IntProperty(default=-1)
version_major: IntProperty(default=0)
version_minor: IntProperty(default=0)
use: BoolProperty(name="Use", default=False)
class RendermanPreferences(AddonPreferences):
bl_idname = __package__
def find_xpu_cpu_devices(self):
# for now, there's only one CPU
if len(self.rman_xpu_cpu_devices) < 1:
device = self.rman_xpu_cpu_devices.add()
device.name = "CPU 0"
device.id = 0
device.use = True
def find_xpu_gpu_devices(self):
try:
import rman
count = rman.pxrcore.GetGpgpuCount(rman.pxrcore.k_cuda)
gpu_device_names = list()
# try and add ones that we don't know about
for i in range(count):
desc = rman.pxrcore.GpgpuDescriptor()
rman.pxrcore.GetGpgpuDescriptor(rman.pxrcore.k_cuda, i, desc)
gpu_device_names.append(desc.name)
found = False
for device in self.rman_xpu_gpu_devices:
if device.name == desc.name:
found = True
break
if not found:
device = self.rman_xpu_gpu_devices.add()
device.name = desc.name
device.version_major = desc.major
device.version_minor = desc.minor
device.id = i
if len(self.rman_xpu_gpu_devices) == 1:
# always use the first one, if this is our first time adding
# gpus
device.use = True
# now, try and remove devices that no longer exist
name_list = [device.name for device in self.rman_xpu_gpu_devices]
for nm in name_list:
if nm not in gpu_device_names:
self.rman_xpu_gpu_devices.remove(self.rman_xpu_gpu_devices.find(nm))
except Exception as e:
rfb_logger.rfb_log().debug("Exception when getting GPU devices: %s" % str(e))
pass
def find_xpu_devices(self):
self.find_xpu_cpu_devices()
self.find_xpu_gpu_devices()
# find the renderman options installed
def find_installed_rendermans(self, context):
options = [('NEWEST', 'Newest Version Installed',
'Automatically updates when new version installed. NB: If an RMANTREE environment variable is set, this will always take precedence.')]
for vers, path in envconfig_utils.get_installed_rendermans():
options.append((path, vers, path))
return options
rman_xpu_cpu_devices: bpy.props.CollectionProperty(type=RendermanDeviceDesc)
rman_xpu_gpu_devices: bpy.props.CollectionProperty(type=RendermanDeviceDesc)
def fill_gpu_devices(self, context):
items = []
items.append(('-1', 'None', ''))
for device in self.rman_xpu_gpu_devices:
items.append(('%d' % device.id, '%s (%d.%d)' % (device.name, device.version_major, device.version_minor), ''))
return items
rman_xpu_gpu_selection: EnumProperty(name="GPU Device",
items=fill_gpu_devices
)
rman_xpu_device: EnumProperty(name="Devices",
description="Select category",
items=[
("CPU", "CPU", ""),
("GPU", "GPU", "")
]
)
rmantree_choice: EnumProperty(
name='RenderMan Version to use',
description='Leaving as "Newest" will automatically update when you install a new RenderMan version',
# default='NEWEST',
items=find_installed_rendermans
)
rmantree_method: EnumProperty(
name='RenderMan Location',
description='''How RenderMan should be detected. Most users should leave to "Detect".
Users should restart Blender after making a change.
''',
items=[('ENV', 'Get From RMANTREE Environment Variable',
'This will use the RMANTREE set in the enviornment variables'),
('DETECT', 'Choose From Installed',
'''This will scan for installed RenderMan locations to choose from.'''),
('MANUAL', 'Set Manually', 'Manually set the RenderMan installation (for expert users)')],
default='ENV')
path_rmantree: StringProperty(
name="RMANTREE Path",
description="Path to RenderMan Pro Server installation folder",
subtype='DIR_PATH',
default='')
draw_ipr_text: BoolProperty(
name="Draw IPR Text",
description="Draw notice on View3D when IPR is active",
default=True)
draw_panel_icon: BoolProperty(
name="Draw Panel Icon",
description="Draw an icon on RenderMan Panels",
default=True)
path_fallback_textures_path: StringProperty(
name="Fallback Texture Path",
description="Fallback path for textures, when the current directory is not writable",
subtype='FILE_PATH',
default=os.path.join('<OUT>', 'textures'))
path_fallback_textures_path_always: BoolProperty(
name="Always Fallback",
description="Always use the fallback texture path regardless",
default=False)
rman_txmanager_keep_extension: BoolProperty(
name='Keep original extension',
default=True,
description="If on, keep the original extension of the input image."
)
rman_txmanager_workers: IntProperty(
name='Number of processes',
description="Number of txmake processes to launch in parallel. Default to 2 (assuming a typical 4-cores computer). You should only increase this if you have more than 8 physical cores.",
default=2,
min=1,max=32
)
rman_txmanager_tex_extensions: StringProperty(
name='Texture Extensions',
description="Any file with one of these extensions will not be converted by the texture manager and used as-is. Entries should be space-delimited.",
default='tex tx txr ptx ptex ies',
)
rman_scene_version_padding: IntProperty(
name="Version Padding",
description="The number of zeros to pad the version token",
default=3,
min=1, max=4
)
rman_scene_take_padding: IntProperty(
name="Take Padding",
description="The number of zeros to pad the take token",
default=2,
min=1, max=4
)
rman_scene_version_increment: EnumProperty(
name="Increment Version",
description="The version number can be set to automatically increment each time you render",
items=[
('MANUALLY', 'Manually', ''),
('RENDER', 'On Render', ''),
('BATCH RENDER', 'On Batch Render', '')
],
default='MANUALLY'
)
rman_scene_take_increment: EnumProperty(
name="Increment Take",
description="The take number can be set to automatically increment each time you render",
items=[
('MANUALLY', 'Manually', ''),
('RENDER', 'On Render', ''),
('BATCH RENDER', 'On Batch Render', '')
],
default='MANUALLY'
)
def update_rman_logging_level(self, context):
level = rfb_logger.__LOG_LEVELS__[self.rman_logging_level]
rfb_logger.set_logger_level(level)
rman_logging_level: EnumProperty(
name='Logging Level',
description='''Log level verbosity. Advanced: Setting the RFB_LOG_LEVEL environment variable will override this preference. Requires a restart.
''',
items=[('CRITICAL', 'Critical', ''),
('ERROR', 'Error', ''),
('WARNING', 'Warning', ''),
('INFO', 'Info', ''),
('VERBOSE', 'Verbose', ''),
('DEBUG', 'Debug', ''),
],
default='WARNING',
update=update_rman_logging_level)
rman_logging_file: StringProperty(
name='Logging File',
description='''A file to write logging to. This will always write at DEBUG level. Setting the RFB_LOG_FILE environment variable will override this preference. Requires a restart.''',
default = '',
subtype='FILE_PATH'
)
rman_do_preview_renders: BoolProperty(
name="Render Previews",
description="Enable rendering of material previews. This is considered a WIP.",
default=False)
rman_preview_renders_minSamples: IntProperty(
name="Preview Min Samples",
description="Minimum samples for preview renders",
default=0,
min=0, soft_max=4,
)
rman_preview_renders_maxSamples: IntProperty(
name="Preview Max Samples",
description="Maximum samples for preview renders",
default=1,
min=1, soft_max=4,
)
rman_preview_renders_pixelVariance: FloatProperty(
name="Pixel Variance",
description="Maximum samples for preview renders",
default=0.15,
min=0.001, soft_max=0.5,
)
rman_viewport_draw_bucket: BoolProperty(
name="Draw Bucket Marker",
description="Unchechk this if you do not want the bucket markers in the viewport",
default=True
)
rman_viewport_draw_progress: BoolProperty(
name="Draw Progress Bar",
description="Unchechk this if you do not want the progress bar in the viewport",
default=True
)
rman_viewport_crop_color: FloatVectorProperty(
name="CropWindow Color",
description="Color of the cropwindow border in the viewport when in IPR.",
default=(0.0, 0.498, 1.0, 1.0),
size=4,
subtype="COLOR")
rman_viewport_bucket_color: FloatVectorProperty(
name="Bucket Marker Color",
description="Color of the bucket markers in the viewport when in IPR.",
default=(0.0, 0.498, 1.0, 1.0),
size=4,
subtype="COLOR")
rman_viewport_progress_color: FloatVectorProperty(
name="Progress Bar Color",
description="Color of the progress bar in the viewport when in IPR.",
default=(0.0, 0.498, 1.0, 1.0),
size=4,
subtype="COLOR")
rman_editor: StringProperty(
name="Editor",
subtype='FILE_PATH',
description="Text editor excutable you want to use to view RIB.",
default=""
)
rman_show_cycles_convert: BoolProperty(
name="Convert Cycles Nodes",
default=False,
description="Add convert Cycles Networks buttons to the material properties panel. N.B.: This isn't guaranteed to fully convert Cycles networks successfully. Also, because of differences in OSL implementations, converted networks may cause stability problems when rendering."
)
rman_render_nurbs_as_mesh: BoolProperty(
name="NURBS as Mesh",
default=True,
description="Render all NURBS surfaces as meshes."
)
rman_emit_default_params: BoolProperty(
name="Emit Default Params",
default=False,
description="Controls whether or not parameters that are not changed from their defaults should be emitted to RenderMan. Turning this on is only useful for debugging purposes."
)
rman_show_advanced_params: BoolProperty(
name="Show Advanced",
default=False,
description="Show advanced preferences"
)
rman_config_dir: StringProperty(
name="Config Directory",
subtype='DIR_PATH',
description="Path to JSON configuration files. Requires a restart.",
default=""
)
rman_viewport_refresh_rate: FloatProperty(
name="Viewport Refresh Rate",
description="The number of seconds to wait before the viewport refreshes during IPR.",
default=0.01,
precision=5,
min=0.00001,
max=0.1
)
# For the preset browser
rpbConfigFile: StringProperty(default='')
rpbUserLibraries: CollectionProperty(type=RendermanPreferencePath)
rpbSelectedLibrary: StringProperty(default='')
rpbSelectedCategory: StringProperty(default='')
rpbSelectedPreset: StringProperty(default='')
def update_stats_config(self, context):
bpy.ops.renderman.update_stats_config('INVOKE_DEFAULT')
# For roz stats
rman_roz_logLevel: EnumProperty(
name="Log Level",
default='3',
items=[('0', 'None', ''),
('1', 'Severe', ''),
('2', 'Error', ''),
('3', 'Warning', ''),
('4', 'Info', ''),
('5', 'Debug', ''),
],
description="Change the logging level for the live statistics system.",
update=update_stats_config
)
rman_roz_grpcServer: BoolProperty(name="Send Stats to 'it' HUD", default=True,
description="Turn this off if you don't want stats to be sent to the 'it' HUD.",
update=update_stats_config)
rman_roz_webSocketServer: BoolProperty(name="Enable Live Stats", default=False,
description="Turning this off will disable the live statistics system in RfB.",
update=update_stats_config)
rman_roz_webSocketServer_Port: IntProperty(name="Port", default=9723,
min=0,
description="Port number of the live stats server to use.",
update=update_stats_config)
def draw_xpu_devices(self, context, layout):
if self.rman_xpu_device == 'CPU':
device = self.rman_xpu_cpu_devices[0]
layout.prop(device, 'use', text='%s' % device.name)
else:
if len(self.rman_xpu_gpu_devices) < 1:
layout.label(text="No compatible GPU devices found.", icon='INFO')
else:
'''
## TODO: For when XPU can support multiple gpu devices...
for device in self.rman_xpu_gpu_devices:
layout.prop(device, 'use', text='%s (%d.%d)' % (device.name, device.version_major, device.version_minor))
'''
# Else, we only can select one GPU
layout.prop(self, 'rman_xpu_gpu_selection')
def draw(self, context):
self.layout.use_property_split = True
self.layout.use_property_decorate = False
layout = self.layout
rman_r_icon = rfb_icons.get_icon("rman_blender")
row = layout.row()
row.use_property_split = False
col = row.column()
col.prop(self, 'rmantree_method')
if self.rmantree_method == 'DETECT':
col.prop(self, 'rmantree_choice')
if self.rmantree_choice == 'NEWEST':
if envconfig_utils.reload_envconfig():
col.label(text="RMANTREE: %s " % envconfig_utils.reload_envconfig().rmantree)
elif self.rmantree_method == 'ENV':
if envconfig_utils.reload_envconfig():
col.label(text="RMANTREE: %s" % envconfig_utils.reload_envconfig().rmantree)
else:
col.prop(self, "path_rmantree")
if envconfig_utils.reload_envconfig() is None:
row = layout.row()
row.alert = True
row.label(text='Error in RMANTREE. Reload addon to reset.', icon='ERROR')
return
# Behavior Prefs
row = layout.row()
row.label(text='Behavior', icon_value=rman_r_icon.icon_id)
row = layout.row()
col = row.column()
col.prop(self, 'rman_do_preview_renders')
col.prop(self, 'rman_render_nurbs_as_mesh')
col.prop(self, 'rman_show_cycles_convert')
col.prop(self, 'rman_emit_default_params')
# XPU Prefs
if sys.platform != ("darwin") and envconfig_utils.envconfig().has_xpu_license:
row = layout.row()
row.label(text='XPU', icon_value=rman_r_icon.icon_id)
row = layout.row()
row.use_property_split = False
row.prop(self, 'rman_xpu_device', expand=True)
row = layout.row()
row.use_property_split = False
self.find_xpu_devices()
col = row.column()
box = col.box()
self.draw_xpu_devices(context, box)
# Workspace
row = layout.row()
row.label(text='Workspace', icon_value=rman_r_icon.icon_id)
row = layout.row()
col = row.column()
col.prop(self, "rman_scene_version_padding")
col.prop(self, "rman_scene_take_padding")
col.prop(self, "rman_scene_version_increment")
col.prop(self, "rman_scene_take_increment")
# TxManager
row = layout.row()
row.label(text='Texture Manager', icon_value=rman_r_icon.icon_id)
row = layout.row()
col = row.column()
col.prop(self, 'path_fallback_textures_path')
col.prop(self, 'path_fallback_textures_path_always')
col.prop(self, "rman_txmanager_workers")
col.prop(self, "rman_txmanager_keep_extension")
col.prop(self, "rman_txmanager_tex_extensions")
# UI Prefs
row = layout.row()
row.label(text='UI', icon_value=rman_r_icon.icon_id)
row = layout.row()
col = row.column()
col.prop(self, 'rman_viewport_crop_color')
col.prop(self, 'rman_viewport_draw_bucket')
if self.rman_viewport_draw_bucket:
col.prop(self, 'rman_viewport_bucket_color')
col.prop(self, 'rman_viewport_draw_progress')
if self.rman_viewport_draw_progress:
col.prop(self, 'rman_viewport_progress_color')
col.prop(self, 'draw_ipr_text')
col.prop(self, 'draw_panel_icon')
col.prop(self, 'rman_editor')
# Logging
row = layout.row()
row.label(text='Logging', icon_value=rman_r_icon.icon_id)
row = layout.row()
col = row.column()
col.prop(self, 'rman_logging_level')
col.prop(self, 'rman_logging_file')
# Advanced
row = layout.row()
row.use_property_split = False
row.use_property_decorate = True
row.prop(self, 'rman_show_advanced_params')
row = layout.row()
col = row.column()
ui_open = getattr(self, 'rman_show_advanced_params')
if ui_open:
col.label(text='Live Statistics', icon_value=rman_r_icon.icon_id)
row = col.row()
col = row.column()
col.prop(self, 'rman_roz_logLevel')
col.prop(self, 'rman_roz_grpcServer')
col.prop(self, 'rman_roz_webSocketServer')
if self.rman_roz_webSocketServer:
try:
from .rman_stats import RfBStatsManager
stats_mgr = RfBStatsManager.get_stats_manager()
split = layout.split()
row = split.row()
col.prop(self, 'rman_roz_webSocketServer_Port', slider=False)
col = row.column()
col.label(text='')
col = row.column()
if stats_mgr:
if stats_mgr.is_connected():
col.operator('renderman.disconnect_stats_render')
else:
col.operator('renderman.attach_stats_render')
col.label(text=' Web Socket Status: %s' % stats_mgr.get_status())
except Exception as e:
rfb_logger.rfb_log().debug("Could not import rman_stats: %s" % str(e))
pass
row = layout.row()
col = row.column()
col.label(text='Other', icon_value=rman_r_icon.icon_id)
col.prop(self, 'rman_viewport_refresh_rate')
col.prop(self, 'rman_config_dir')
if self.rman_do_preview_renders:
col.prop(self, 'rman_preview_renders_minSamples')
col.prop(self, 'rman_preview_renders_maxSamples')
col.prop(self, 'rman_preview_renders_pixelVariance')
classes = [
RendermanPreferencePath,
RendermanDeviceDesc,
RendermanPreferences
]
def register():
for cls in classes:
try:
bpy.utils.register_class(cls)
except ValueError as e:
rfb_logger.rfb_log().debug("Could not register class, %s, because: %s" % (str(cls), str(e)))
pass
def unregister():
for cls in classes:
try:
bpy.utils.unregister_class(cls)
except RuntimeError:
rfb_log().debug('Could not unregister class: %s' % str(cls))
pass
| {
"content_hash": "dcd3cb94fe8b4576b8e36590ea792658",
"timestamp": "",
"source": "github",
"line_count": 560,
"max_line_length": 283,
"avg_line_length": 39.38035714285714,
"alnum_prop": 0.5648211127737722,
"repo_name": "adminradio/RenderManForBlender",
"id": "0db41d2a32d21e96d6e9cd886359d6af57c1d1f3",
"size": "23225",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "preferences.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "1055354"
}
],
"symlink_target": ""
} |
class DecimalBaseConverter(object):
def __init__(self):
self.__digits = "0123456789ABCDEF"
def convert_to_base(self, number, base):
if number < base:
return self.__digits[number]
else:
return self.convert_to_base(number // base, base) + self.__digits[number % base]
converter = DecimalBaseConverter()
for i in range(10001):
print(converter.convert_to_base(i, 16)) | {
"content_hash": "ba23ef1000f35d7062947680232c7ff2",
"timestamp": "",
"source": "github",
"line_count": 15,
"max_line_length": 92,
"avg_line_length": 28.466666666666665,
"alnum_prop": 0.6276346604215457,
"repo_name": "anthonynsimon/python-data-structures-algorithms",
"id": "70f23121f33a900150b97ceba702a7d6f826f9d5",
"size": "427",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "lib/playground/decimal_base_converter.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "91710"
}
],
"symlink_target": ""
} |
import os,sys,time
import csv
import matplotlib.pyplot as plt
from matplotlib.legend_handler import HandlerLine2D
import os,sys
path = os.getcwd()
parent_path = os.path.dirname(path)
parent_path = os.path.dirname(parent_path)
print(parent_path) | {
"content_hash": "04156f8ca9d0ee2e01df42b8dd788267",
"timestamp": "",
"source": "github",
"line_count": 10,
"max_line_length": 51,
"avg_line_length": 24.5,
"alnum_prop": 0.7959183673469388,
"repo_name": "zimuxin/AliMusicPrediction",
"id": "1e3674611b2791eee91b6761e5c027b6b42b7512",
"size": "245",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "阿里音乐流行趋势预测项目_Group13/AliMusicPrediction/music_prediction-master/fit/Generalized_Linear_Models/test.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "49356"
}
],
"symlink_target": ""
} |
import webob.exc
from nova.api.openstack import common
from nova.api.openstack.compute.views import images as views_images
from nova.api.openstack import wsgi
from nova.api.openstack import xmlutil
from nova import exception
from nova import flags
import nova.image.glance
from nova.openstack.common import log as logging
import nova.utils
LOG = logging.getLogger(__name__)
FLAGS = flags.FLAGS
SUPPORTED_FILTERS = {
'name': 'name',
'status': 'status',
'changes-since': 'changes-since',
'server': 'property-instance_uuid',
'type': 'property-image_type',
'minRam': 'min_ram',
'minDisk': 'min_disk',
}
def make_image(elem, detailed=False):
elem.set('name')
elem.set('id')
if detailed:
elem.set('updated')
elem.set('created')
elem.set('status')
elem.set('progress')
elem.set('minRam')
elem.set('minDisk')
server = xmlutil.SubTemplateElement(elem, 'server', selector='server')
server.set('id')
xmlutil.make_links(server, 'links')
elem.append(common.MetadataTemplate())
xmlutil.make_links(elem, 'links')
image_nsmap = {None: xmlutil.XMLNS_V11, 'atom': xmlutil.XMLNS_ATOM}
class ImageTemplate(xmlutil.TemplateBuilder):
def construct(self):
root = xmlutil.TemplateElement('image', selector='image')
make_image(root, detailed=True)
return xmlutil.MasterTemplate(root, 1, nsmap=image_nsmap)
class MinimalImagesTemplate(xmlutil.TemplateBuilder):
def construct(self):
root = xmlutil.TemplateElement('images')
elem = xmlutil.SubTemplateElement(root, 'image', selector='images')
make_image(elem)
xmlutil.make_links(root, 'images_links')
return xmlutil.MasterTemplate(root, 1, nsmap=image_nsmap)
class ImagesTemplate(xmlutil.TemplateBuilder):
def construct(self):
root = xmlutil.TemplateElement('images')
elem = xmlutil.SubTemplateElement(root, 'image', selector='images')
make_image(elem, detailed=True)
return xmlutil.MasterTemplate(root, 1, nsmap=image_nsmap)
class Controller(wsgi.Controller):
"""Base controller for retrieving/displaying images."""
_view_builder_class = views_images.ViewBuilder
def __init__(self, image_service=None, **kwargs):
"""Initialize new `ImageController`.
:param image_service: `nova.image.glance:GlanceImageService`
"""
super(Controller, self).__init__(**kwargs)
self._image_service = (image_service or
nova.image.glance.get_default_image_service())
def _get_filters(self, req):
"""
Return a dictionary of query param filters from the request
:param req: the Request object coming from the wsgi layer
:retval a dict of key/value filters
"""
filters = {}
for param in req.params:
if param in SUPPORTED_FILTERS or param.startswith('property-'):
# map filter name or carry through if property-*
filter_name = SUPPORTED_FILTERS.get(param, param)
filters[filter_name] = req.params.get(param)
# ensure server filter is the instance uuid
filter_name = 'property-instance_uuid'
try:
filters[filter_name] = filters[filter_name].rsplit('/', 1)[1]
except (AttributeError, IndexError, KeyError):
pass
filter_name = 'status'
if filter_name in filters:
# The Image API expects us to use lowercase strings for status
filters[filter_name] = filters[filter_name].lower()
return filters
@wsgi.serializers(xml=ImageTemplate)
def show(self, req, id):
"""Return detailed information about a specific image.
:param req: `wsgi.Request` object
:param id: Image identifier
"""
context = req.environ['nova.context']
try:
image = self._image_service.show(context, id)
except (exception.NotFound, exception.InvalidImageRef):
explanation = _("Image not found.")
raise webob.exc.HTTPNotFound(explanation=explanation)
return self._view_builder.show(req, image)
def delete(self, req, id):
"""Delete an image, if allowed.
:param req: `wsgi.Request` object
:param id: Image identifier (integer)
"""
context = req.environ['nova.context']
try:
self._image_service.delete(context, id)
except exception.ImageNotFound:
explanation = _("Image not found.")
raise webob.exc.HTTPNotFound(explanation=explanation)
return webob.exc.HTTPNoContent()
@wsgi.serializers(xml=MinimalImagesTemplate)
def index(self, req):
"""Return an index listing of images available to the request.
:param req: `wsgi.Request` object
"""
context = req.environ['nova.context']
filters = self._get_filters(req)
params = req.GET.copy()
page_params = common.get_pagination_params(req)
for key, val in page_params.iteritems():
params[key] = val
try:
images = self._image_service.detail(context, filters=filters,
**page_params)
except exception.Invalid as e:
raise webob.exc.HTTPBadRequest(explanation=str(e))
return self._view_builder.index(req, images)
@wsgi.serializers(xml=ImagesTemplate)
def detail(self, req):
"""Return a detailed index listing of images available to the request.
:param req: `wsgi.Request` object.
"""
context = req.environ['nova.context']
filters = self._get_filters(req)
params = req.GET.copy()
page_params = common.get_pagination_params(req)
for key, val in page_params.iteritems():
params[key] = val
try:
images = self._image_service.detail(context, filters=filters,
**page_params)
except exception.Invalid as e:
raise webob.exc.HTTPBadRequest(explanation=str(e))
return self._view_builder.detail(req, images)
def create(self, *args, **kwargs):
raise webob.exc.HTTPMethodNotAllowed()
def create_resource():
return wsgi.Resource(Controller())
| {
"content_hash": "8beaf50eb090748260b4d3176ca995f1",
"timestamp": "",
"source": "github",
"line_count": 197,
"max_line_length": 78,
"avg_line_length": 32.4263959390863,
"alnum_prop": 0.6227301189730745,
"repo_name": "tylertian/Openstack",
"id": "1b20531def22269e66fb6ba0940c6763df6a49d7",
"size": "7018",
"binary": false,
"copies": "8",
"ref": "refs/heads/master",
"path": "openstack F/nova/nova/api/openstack/compute/images.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "239919"
},
{
"name": "JavaScript",
"bytes": "156942"
},
{
"name": "Python",
"bytes": "16949418"
},
{
"name": "Shell",
"bytes": "96743"
}
],
"symlink_target": ""
} |
'''
Created on May 12, 2017
@author: yingziwu
'''
import requests
import time
import logging
from v2ex_base.v2_sql import SQL
import settings
class spider(object):
'''
A base Spider for v2ex.
'''
def __init__(self,url,sleep_time):
'''
>>>from v2ex_spider import base_spider
>>>base_spider.start(url,sleep_time)
'''
logging.info('Start base spider. Url is %s' % url)
self.url=url
self.sleep_time=sleep_time
time.sleep(int(self.sleep_time))
self.SQ=SQL()
self.SQ.open_datebase()
#run
self.load_config()
self.spider()
#end
self.SQ.close_datebase()
logging.info('Spider Finished.')
def spider(self):
logging.debug('start spider.')
try:
resp=self.s.get(self.url, timeout=10)
except requests.exceptions.RequestException as e:
logging.error('spider failed.')
logging.error('proxy_status: %s' % settings.proxy_enable)
if settings.proxy_enable is True:
logging.error('proxy: %s' % self.s.proxies)
logging.error(e)
raise e
if resp.status_code != 200:
self.SQ.close_datebase()
error_info='proxy status: %s, proxy: %s' % (str(settings.proxy_enable),str(self.s.proxies))
logging.error('API Error: proxy status: %s, proxy: %s' % (str(settings.proxy_enable),str(self.s.proxies)))
raise APIError(error_info)
topics=resp.json()
for topic in topics:
t_id=topic["id"]
title=topic["title"]
author=topic["member"]["username"]
author_id=topic["member"]["id"]
content=topic["content"]
content_rendered=topic["content_rendered"]
replies=topic["replies"]
node=topic["node"]["id"]
created=topic["created"]
n_time=int(time.time())
self.SQ.write_to_db_base(t_id,title,author,author_id,content,content_rendered,replies,node,created,n_time)
self.SQ.conn.commit()
return
def load_config(self):
logging.debug('start load_config')
self.proxy_enable=settings.proxy_enable
self.s=requests.session()
self.s.headers=settings.API_headers
if self.proxy_enable:
self.s.proxies=settings.proxies()
return
class APIError(ValueError):
pass
| {
"content_hash": "ad7b5d1e56c75a4a584f57198b8cf6ce",
"timestamp": "",
"source": "github",
"line_count": 79,
"max_line_length": 118,
"avg_line_length": 31.253164556962027,
"alnum_prop": 0.5694613203726205,
"repo_name": "yingziwu/v2ex_delete",
"id": "6ea926494da6a775b5567d7f810df3b2171b1367",
"size": "2469",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "v2ex_spider/base_spider.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "31969"
},
{
"name": "Shell",
"bytes": "685"
}
],
"symlink_target": ""
} |
"""
Copyright (c) 2014-2018 Alex Forencich
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
"""
from myhdl import *
import axis_ep
import eth_ep
import struct
class ARPFrame(object):
def __init__(self,
eth_dest_mac=0,
eth_src_mac=0,
eth_type=0,
arp_htype=1,
arp_ptype=0x0800,
arp_hlen=6,
arp_plen=4,
arp_oper=2,
arp_sha=0x5A5152535455,
arp_spa=0xc0a80164,
arp_tha=0xDAD1D2D3D4D5,
arp_tpa=0xc0a80164
):
self.eth_dest_mac = eth_dest_mac
self.eth_src_mac = eth_src_mac
self.eth_type = eth_type
self.arp_htype = arp_htype
self.arp_ptype = arp_ptype
self.arp_hlen = arp_hlen
self.arp_plen = arp_plen
self.arp_oper = arp_oper
self.arp_sha = arp_sha
self.arp_spa = arp_spa
self.arp_tha = arp_tha
self.arp_tpa = arp_tpa
if type(eth_dest_mac) is dict:
self.eth_dest_mac = eth_dest_mac['eth_dest_mac']
self.eth_src_mac = eth_dest_mac['eth_src_mac']
self.eth_type = eth_dest_mac['eth_type']
self.arp_htype = eth_dest_mac['arp_htype']
self.arp_ptype = eth_dest_mac['arp_ptype']
self.arp_hlen = eth_dest_mac['arp_hlen']
self.arp_plen = eth_dest_mac['arp_plen']
self.arp_oper = eth_dest_mac['arp_oper']
self.arp_sha = eth_dest_mac['arp_sha']
self.arp_spa = eth_dest_mac['arp_spa']
self.arp_tha = eth_dest_mac['arp_tha']
self.arp_tpa = eth_dest_mac['arp_tpa']
if type(eth_dest_mac) is ARPFrame:
self.eth_dest_mac = eth_dest_mac.eth_dest_mac
self.eth_src_mac = eth_dest_mac.eth_src_mac
self.eth_type = eth_dest_mac.eth_type
self.arp_htype = eth_dest_mac.arp_htype
self.arp_ptype = eth_dest_mac.arp_ptype
self.arp_hlen = eth_dest_mac.arp_hlen
self.arp_plen = eth_dest_mac.arp_plen
self.arp_oper = eth_dest_mac.arp_oper
self.arp_sha = eth_dest_mac.arp_sha
self.arp_spa = eth_dest_mac.arp_spa
self.arp_tha = eth_dest_mac.arp_tha
self.arp_tpa = eth_dest_mac.arp_tpa
def build_axis(self):
return self.build_eth().build_axis()
def build_eth(self):
data = b''
data += struct.pack('>H', self.arp_htype)
data += struct.pack('>H', self.arp_ptype)
data += struct.pack('B', self.arp_hlen)
data += struct.pack('B', self.arp_plen)
data += struct.pack('>H', self.arp_oper)
data += struct.pack('>Q', self.arp_sha)[2:]
data += struct.pack('>L', self.arp_spa)
data += struct.pack('>Q', self.arp_tha)[2:]
data += struct.pack('>L', self.arp_tpa)
return eth_ep.EthFrame(data, self.eth_dest_mac, self.eth_src_mac, self.eth_type)
def parse_axis(self, data):
frame = eth_ep.EthFrame()
frame.parse_axis(data)
self.parse_eth(frame)
def parse_eth(self, data):
self.eth_src_mac = data.eth_src_mac
self.eth_dest_mac = data.eth_dest_mac
self.eth_type = data.eth_type
self.arp_htype = struct.unpack('>H', data.payload.data[0:2])[0]
self.arp_ptype = struct.unpack('>H', data.payload.data[2:4])[0]
self.arp_hlen = struct.unpack('B', data.payload.data[4:5])[0]
self.arp_plen = struct.unpack('B', data.payload.data[5:6])[0]
self.arp_oper = struct.unpack('>H', data.payload.data[6:8])[0]
self.arp_sha = struct.unpack('>Q', b'\x00\x00'+data.payload.data[8:14])[0]
self.arp_spa = struct.unpack('>L', data.payload.data[14:18])[0]
self.arp_tha = struct.unpack('>Q', b'\x00\x00'+data.payload.data[18:24])[0]
self.arp_tpa = struct.unpack('>L', data.payload.data[24:28])[0]
def __eq__(self, other):
if type(other) is ARPFrame:
return (
self.eth_src_mac == other.eth_src_mac and
self.eth_dest_mac == other.eth_dest_mac and
self.eth_type == other.eth_type and
self.arp_htype == other.arp_htype and
self.arp_ptype == other.arp_ptype and
self.arp_hlen == other.arp_hlen and
self.arp_plen == other.arp_plen and
self.arp_oper == other.arp_oper and
self.arp_sha == other.arp_sha and
self.arp_spa == other.arp_spa and
self.arp_tha == other.arp_tha and
self.arp_tpa == other.arp_tpa
)
return False
def __repr__(self):
return (
('ArpFrame(eth_dest_mac=0x%012x, ' % self.eth_dest_mac) +
('eth_src_mac=0x%012x, ' % self.eth_src_mac) +
('eth_type=0x%04x, ' % self.eth_type) +
('arp_htype=0x%04x, ' % self.arp_htype) +
('arp_ptype=0x%04x, ' % self.arp_ptype) +
('arp_hlen=%d, ' % self.arp_hlen) +
('arp_plen=%d, ' % self.arp_plen) +
('arp_oper=0x%04x, ' % self.arp_oper) +
('arp_sha=0x%012x, ' % self.arp_sha) +
('arp_spa=0x%08x, ' % self.arp_spa) +
('arp_tha=0x%012x, ' % self.arp_tha) +
('arp_tpa=0x%08x)' % self.arp_tpa)
)
class ARPFrameSource():
def __init__(self):
self.active = False
self.has_logic = False
self.queue = []
self.clk = Signal(bool(0))
def send(self, frame):
self.queue.append(ARPFrame(frame))
def count(self):
return len(self.queue)
def empty(self):
return not self.queue
def idle(self):
return not self.queue and not self.active
def wait(self):
while not self.idle():
yield self.clk.posedge
def create_logic(self,
clk,
rst,
frame_valid=None,
frame_ready=None,
eth_dest_mac=Signal(intbv(0)[48:]),
eth_src_mac=Signal(intbv(0)[48:]),
eth_type=Signal(intbv(0)[16:]),
arp_htype=Signal(intbv(0)[16:]),
arp_ptype=Signal(intbv(0)[16:]),
arp_hlen=Signal(intbv(6)[8:]),
arp_plen=Signal(intbv(4)[8:]),
arp_oper=Signal(intbv(0)[16:]),
arp_sha=Signal(intbv(0)[48:]),
arp_spa=Signal(intbv(0)[32:]),
arp_tha=Signal(intbv(0)[48:]),
arp_tpa=Signal(intbv(0)[32:]),
pause=0,
name=None
):
assert not self.has_logic
self.has_logic = True
self.clk = clk
@instance
def logic():
frame = dict()
while True:
yield clk.posedge, rst.posedge
if rst:
frame_valid.next = False
self.active = False
else:
frame_valid.next = self.active and (frame_valid or not pause)
if frame_ready and frame_valid:
frame_valid.next = False
self.active = False
if not self.active and self.queue:
frame = self.queue.pop(0)
eth_dest_mac.next = frame.eth_dest_mac
eth_src_mac.next = frame.eth_src_mac
eth_type.next = frame.eth_type
arp_htype.next = frame.arp_htype
arp_ptype.next = frame.arp_ptype
arp_hlen.next = frame.arp_hlen
arp_plen.next = frame.arp_plen
arp_oper.next = frame.arp_oper
arp_sha.next = frame.arp_sha
arp_spa.next = frame.arp_spa
arp_tha.next = frame.arp_tha
arp_tpa.next = frame.arp_tpa
if name is not None:
print("[%s] Sending frame %s" % (name, repr(frame)))
frame_valid.next = not pause
self.active = True
return instances()
class ARPFrameSink():
def __init__(self):
self.has_logic = False
self.queue = []
self.sync = Signal(intbv(0))
def recv(self):
if self.queue:
return self.queue.pop(0)
return None
def count(self):
return len(self.queue)
def empty(self):
return not self.queue
def wait(self, timeout=0):
yield delay(0)
if self.queue:
return
if timeout:
yield self.sync, delay(timeout)
else:
yield self.sync
def create_logic(self,
clk,
rst,
frame_valid=None,
frame_ready=None,
eth_dest_mac=Signal(intbv(0)[48:]),
eth_src_mac=Signal(intbv(0)[48:]),
eth_type=Signal(intbv(0)[16:]),
arp_htype=Signal(intbv(0)[16:]),
arp_ptype=Signal(intbv(0)[16:]),
arp_hlen=Signal(intbv(6)[8:]),
arp_plen=Signal(intbv(4)[8:]),
arp_oper=Signal(intbv(0)[16:]),
arp_sha=Signal(intbv(0)[48:]),
arp_spa=Signal(intbv(0)[32:]),
arp_tha=Signal(intbv(0)[48:]),
arp_tpa=Signal(intbv(0)[32:]),
pause=0,
name=None
):
assert not self.has_logic
self.has_logic = True
frame_ready_int = Signal(bool(False))
frame_valid_int = Signal(bool(False))
@always_comb
def pause_logic():
frame_ready.next = frame_ready_int and not pause
frame_valid_int.next = frame_valid and not pause
@instance
def logic():
while True:
yield clk.posedge, rst.posedge
if rst:
frame_ready_int.next = False
else:
frame_ready_int.next = True
if frame_ready_int and frame_valid_int:
frame = ARPFrame()
frame.eth_dest_mac = int(eth_dest_mac)
frame.eth_src_mac = int(eth_src_mac)
frame.eth_type = int(eth_type)
frame.arp_htype = int(arp_htype)
frame.arp_ptype = int(arp_ptype)
frame.arp_hlen = int(arp_hlen)
frame.arp_plen = int(arp_plen)
frame.arp_oper = int(arp_oper)
frame.arp_sha = int(arp_sha)
frame.arp_spa = int(arp_spa)
frame.arp_tha = int(arp_tha)
frame.arp_tpa = int(arp_tpa)
self.queue.append(frame)
self.sync.next = not self.sync
if name is not None:
print("[%s] Got frame %s" % (name, repr(frame)))
return instances()
| {
"content_hash": "012a44edcbbad1fc98fa89367967729c",
"timestamp": "",
"source": "github",
"line_count": 338,
"max_line_length": 88,
"avg_line_length": 36.48816568047337,
"alnum_prop": 0.5048244547149923,
"repo_name": "alexforencich/verilog-ethernet",
"id": "2a4286ddad3a152c85ae82c40d48a74cea762f87",
"size": "12333",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "tb/arp_ep.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Makefile",
"bytes": "655682"
},
{
"name": "Python",
"bytes": "3186932"
},
{
"name": "Shell",
"bytes": "4505"
},
{
"name": "Tcl",
"bytes": "189025"
},
{
"name": "Verilog",
"bytes": "4219830"
}
],
"symlink_target": ""
} |
import mock
import MySQLdb
import unittest2
from thanatos.questions import universe
class BorderingRegionsTestCase(unittest2.TestCase):
def setUp(self):
self.mock_db_connection = mock.Mock(spec=MySQLdb.connection)
def test_class_initializes(self):
""" Simply test we can create an instance of the BRQ class. """
universe.BorderingRegionsQuestion(self.mock_db_connection)
@mock.patch('thanatos.questions.universe.universe')
@mock.patch('thanatos.questions.base.Question.format_question')
@mock.patch('random.choice')
def test_question_ask(self, random_choice, mock_format_question, mock_db_universe):
""" Test we can call the bordering region question ask method. """
all_regions = [
(10000001L, 'Region One'),
(10000002L, 'Region Two'),
(10000003L, 'Region Three'),
(10000004L, 'Region Four'),
(10000005L, 'Region Five'),
(10000006L, 'Region Six'),
]
mock_db_universe.get_all_not_wh_regions.return_value = all_regions
random_choice.side_effect = [
(10000001L, 'Region One'),
(10000005L, 'Region Five'),
]
mock_db_universe.get_all_regions_connected_to_region.return_value = [
(10000005L, 'Region Five'),
(10000006L, 'Region Six'),
]
universe.BorderingRegionsQuestion(self.mock_db_connection).ask()
random_choice.assert_any_call(all_regions)
random_choice.assert_any_call([
(10000005L, 'Region Five'),
(10000006L, 'Region Six'),
])
mock_format_question.assert_called_with(
(10000005L, 'Region Five'),
[(10000003L, 'Region Three'), (10000002L, 'Region Two'), (10000004L, 'Region Four')],
'Which of the following regions borders the Region One region?',
)
class PoitotTestCase(unittest2.TestCase):
def setUp(self):
self.mock_db_connection = mock.Mock(spec=MySQLdb.connection)
def test_class_initializes(self):
""" Simply test we can create an instance of the Poitot questions class. """
universe.PoitotFamousForQuestion(self.mock_db_connection)
@mock.patch('thanatos.questions.base.Question.format_question')
def test_question_ask(self, mock_format_question):
""" Test we can call the poitot question ask method. """
universe.PoitotFamousForQuestion(self.mock_db_connection).ask()
mock_format_question.assert_called_with(
(0, 'The only named system in Syndicate.'),
[(1, 'Kind to animals.'), (2, 'A fictional space detective.'), (3, 'Adjacent to F67E-Q.')],
'Poitot is famous for being...?',
)
class UniverseUtilsTestCase(unittest2.TestCase):
def setUp(self):
pass
def test_removal_of_regions(self):
""" Specifically tests to make sure we remove the jove regions with no gates. """
sample_regions_list = [
(10000001L, 'Derelik'),
(10000004L, 'UUA-F4'),
(10000017L, 'J7HZ-F'),
(10000019L, 'A821-A'),
]
gateless_regions = universe.remove_regions_with_no_gates(sample_regions_list)
self.assertEqual(gateless_regions, [(10000001L, 'Derelik')])
def test_removal_of_regions_handles_region_not_in_list(self):
""" Specifically tests we don't fail removing a region if it doesn't exist in the list. """
sample_regions_list = [
(10000001L, 'Derelik'),
(10000004L, 'UUA-F4'),
]
gateless_regions = universe.remove_regions_with_no_gates(sample_regions_list)
self.assertEqual(gateless_regions, [(10000001L, 'Derelik')]) | {
"content_hash": "9cee4583f2a7e713486dcac2c021bdd9",
"timestamp": "",
"source": "github",
"line_count": 112,
"max_line_length": 103,
"avg_line_length": 33.517857142857146,
"alnum_prop": 0.6209376664890783,
"repo_name": "evetrivia/thanatos",
"id": "c5d8d9bf3c2f38ce9feda469c3495772d03ebb95",
"size": "3756",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/questions/test_questions_universe.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "53886"
}
],
"symlink_target": ""
} |
import ast
import builtins
import collections.abc as abc
import importlib
import inspect
import logging
import os
import uuid
from contextlib import contextmanager
from copy import deepcopy
from dataclasses import is_dataclass
from typing import List, Tuple, Union
import cloudpickle
import yaml
from omegaconf import DictConfig, ListConfig, OmegaConf, SCMode
from detectron2.utils.file_io import PathManager
from detectron2.utils.registry import _convert_target_to_string
__all__ = ["LazyCall", "LazyConfig"]
class LazyCall:
"""
Wrap a callable so that when it's called, the call will not be executed,
but returns a dict that describes the call.
LazyCall object has to be called with only keyword arguments. Positional
arguments are not yet supported.
Examples:
::
from detectron2.config import instantiate, LazyCall
layer_cfg = LazyCall(nn.Conv2d)(in_channels=32, out_channels=32)
layer_cfg.out_channels = 64 # can edit it afterwards
layer = instantiate(layer_cfg)
"""
def __init__(self, target):
if not (callable(target) or isinstance(target, (str, abc.Mapping))):
raise TypeError(
f"target of LazyCall must be a callable or defines a callable! Got {target}"
)
self._target = target
def __call__(self, **kwargs):
if is_dataclass(self._target):
# omegaconf object cannot hold dataclass type
# https://github.com/omry/omegaconf/issues/784
target = _convert_target_to_string(self._target)
else:
target = self._target
kwargs["_target_"] = target
return DictConfig(content=kwargs, flags={"allow_objects": True})
def _visit_dict_config(cfg, func):
"""
Apply func recursively to all DictConfig in cfg.
"""
if isinstance(cfg, DictConfig):
func(cfg)
for v in cfg.values():
_visit_dict_config(v, func)
elif isinstance(cfg, ListConfig):
for v in cfg:
_visit_dict_config(v, func)
def _validate_py_syntax(filename):
# see also https://github.com/open-mmlab/mmcv/blob/master/mmcv/utils/config.py
with PathManager.open(filename, "r") as f:
content = f.read()
try:
ast.parse(content)
except SyntaxError as e:
raise SyntaxError(f"Config file {filename} has syntax error!") from e
def _cast_to_config(obj):
# if given a dict, return DictConfig instead
if isinstance(obj, dict):
return DictConfig(obj, flags={"allow_objects": True})
return obj
_CFG_PACKAGE_NAME = "detectron2._cfg_loader"
"""
A namespace to put all imported config into.
"""
def _random_package_name(filename):
# generate a random package name when loading config files
return _CFG_PACKAGE_NAME + str(uuid.uuid4())[:4] + "." + os.path.basename(filename)
@contextmanager
def _patch_import():
"""
Enhance relative import statements in config files, so that they:
1. locate files purely based on relative location, regardless of packages.
e.g. you can import file without having __init__
2. do not cache modules globally; modifications of module states has no side effect
3. support other storage system through PathManager, so config files can be in the cloud
4. imported dict are turned into omegaconf.DictConfig automatically
"""
old_import = builtins.__import__
def find_relative_file(original_file, relative_import_path, level):
# NOTE: "from . import x" is not handled. Because then it's unclear
# if such import should produce `x` as a python module or DictConfig.
# This can be discussed further if needed.
relative_import_err = """
Relative import of directories is not allowed within config files.
Within a config file, relative import can only import other config files.
""".replace(
"\n", " "
)
if not len(relative_import_path):
raise ImportError(relative_import_err)
cur_file = os.path.dirname(original_file)
for _ in range(level - 1):
cur_file = os.path.dirname(cur_file)
cur_name = relative_import_path.lstrip(".")
for part in cur_name.split("."):
cur_file = os.path.join(cur_file, part)
if not cur_file.endswith(".py"):
cur_file += ".py"
if not PathManager.isfile(cur_file):
cur_file_no_suffix = cur_file[: -len(".py")]
if PathManager.isdir(cur_file_no_suffix):
raise ImportError(f"Cannot import from {cur_file_no_suffix}." + relative_import_err)
else:
raise ImportError(
f"Cannot import name {relative_import_path} from "
f"{original_file}: {cur_file} does not exist."
)
return cur_file
def new_import(name, globals=None, locals=None, fromlist=(), level=0):
if (
# Only deal with relative imports inside config files
level != 0
and globals is not None
and (globals.get("__package__", "") or "").startswith(_CFG_PACKAGE_NAME)
):
cur_file = find_relative_file(globals["__file__"], name, level)
_validate_py_syntax(cur_file)
spec = importlib.machinery.ModuleSpec(
_random_package_name(cur_file), None, origin=cur_file
)
module = importlib.util.module_from_spec(spec)
module.__file__ = cur_file
with PathManager.open(cur_file) as f:
content = f.read()
exec(compile(content, cur_file, "exec"), module.__dict__)
for name in fromlist: # turn imported dict into DictConfig automatically
val = _cast_to_config(module.__dict__[name])
module.__dict__[name] = val
return module
return old_import(name, globals, locals, fromlist=fromlist, level=level)
builtins.__import__ = new_import
yield new_import
builtins.__import__ = old_import
class LazyConfig:
"""
Provide methods to save, load, and overrides an omegaconf config object
which may contain definition of lazily-constructed objects.
"""
@staticmethod
def load_rel(filename: str, keys: Union[None, str, Tuple[str, ...]] = None):
"""
Similar to :meth:`load()`, but load path relative to the caller's
source file.
This has the same functionality as a relative import, except that this method
accepts filename as a string, so more characters are allowed in the filename.
"""
caller_frame = inspect.stack()[1]
caller_fname = caller_frame[0].f_code.co_filename
assert caller_fname != "<string>", "load_rel Unable to find caller"
caller_dir = os.path.dirname(caller_fname)
filename = os.path.join(caller_dir, filename)
return LazyConfig.load(filename, keys)
@staticmethod
def load(filename: str, keys: Union[None, str, Tuple[str, ...]] = None):
"""
Load a config file.
Args:
filename: absolute path or relative path w.r.t. the current working directory
keys: keys to load and return. If not given, return all keys
(whose values are config objects) in a dict.
"""
has_keys = keys is not None
filename = filename.replace("/./", "/") # redundant
if os.path.splitext(filename)[1] not in [".py", ".yaml", ".yml"]:
raise ValueError(f"Config file {filename} has to be a python or yaml file.")
if filename.endswith(".py"):
_validate_py_syntax(filename)
with _patch_import():
# Record the filename
module_namespace = {
"__file__": filename,
"__package__": _random_package_name(filename),
}
with PathManager.open(filename) as f:
content = f.read()
# Compile first with filename to:
# 1. make filename appears in stacktrace
# 2. make load_rel able to find its parent's (possibly remote) location
exec(compile(content, filename, "exec"), module_namespace)
ret = module_namespace
else:
with PathManager.open(filename) as f:
obj = yaml.unsafe_load(f)
ret = OmegaConf.create(obj, flags={"allow_objects": True})
if has_keys:
if isinstance(keys, str):
return _cast_to_config(ret[keys])
else:
return tuple(_cast_to_config(ret[a]) for a in keys)
else:
if filename.endswith(".py"):
# when not specified, only load those that are config objects
ret = DictConfig(
{
name: _cast_to_config(value)
for name, value in ret.items()
if isinstance(value, (DictConfig, ListConfig, dict))
and not name.startswith("_")
},
flags={"allow_objects": True},
)
return ret
@staticmethod
def save(cfg, filename: str):
"""
Save a config object to a yaml file.
Note that when the config dictionary contains complex objects (e.g. lambda),
it can't be saved to yaml. In that case we will print an error and
attempt to save to a pkl file instead.
Args:
cfg: an omegaconf config object
filename: yaml file name to save the config file
"""
logger = logging.getLogger(__name__)
try:
cfg = deepcopy(cfg)
except Exception:
pass
else:
# if it's deep-copyable, then...
def _replace_type_by_name(x):
if "_target_" in x and callable(x._target_):
try:
x._target_ = _convert_target_to_string(x._target_)
except AttributeError:
pass
# not necessary, but makes yaml looks nicer
_visit_dict_config(cfg, _replace_type_by_name)
save_pkl = False
try:
dict = OmegaConf.to_container(
cfg,
# Do not resolve interpolation when saving, i.e. do not turn ${a} into
# actual values when saving.
resolve=False,
# Save structures (dataclasses) in a format that can be instantiated later.
# Without this option, the type information of the dataclass will be erased.
structured_config_mode=SCMode.INSTANTIATE,
)
dumped = yaml.dump(dict, default_flow_style=None, allow_unicode=True, width=9999)
with PathManager.open(filename, "w") as f:
f.write(dumped)
try:
_ = yaml.unsafe_load(dumped) # test that it is loadable
except Exception:
logger.warning(
"The config contains objects that cannot serialize to a valid yaml. "
f"{filename} is human-readable but cannot be loaded."
)
save_pkl = True
except Exception:
logger.exception("Unable to serialize the config to yaml. Error:")
save_pkl = True
if save_pkl:
new_filename = filename + ".pkl"
try:
# retry by pickle
with PathManager.open(new_filename, "wb") as f:
cloudpickle.dump(cfg, f)
logger.warning(f"Config is saved using cloudpickle at {new_filename}.")
except Exception:
pass
@staticmethod
def apply_overrides(cfg, overrides: List[str]):
"""
In-place override contents of cfg.
Args:
cfg: an omegaconf config object
overrides: list of strings in the format of "a=b" to override configs.
See https://hydra.cc/docs/next/advanced/override_grammar/basic/
for syntax.
Returns:
the cfg object
"""
def safe_update(cfg, key, value):
parts = key.split(".")
for idx in range(1, len(parts)):
prefix = ".".join(parts[:idx])
v = OmegaConf.select(cfg, prefix, default=None)
if v is None:
break
if not OmegaConf.is_config(v):
raise KeyError(
f"Trying to update key {key}, but {prefix} "
f"is not a config, but has type {type(v)}."
)
OmegaConf.update(cfg, key, value, merge=True)
from hydra.core.override_parser.overrides_parser import OverridesParser
parser = OverridesParser.create()
overrides = parser.parse_overrides(overrides)
for o in overrides:
key = o.key_or_group
value = o.value()
if o.is_delete():
# TODO support this
raise NotImplementedError("deletion is not yet a supported override")
safe_update(cfg, key, value)
return cfg
@staticmethod
def to_py(cfg, prefix: str = "cfg."):
"""
Try to convert a config object into Python-like psuedo code.
Note that perfect conversion is not always possible. So the returned
results are mainly meant to be human-readable, and not meant to be executed.
Args:
cfg: an omegaconf config object
prefix: root name for the resulting code (default: "cfg.")
Returns:
str of formatted Python code
"""
import black
cfg = OmegaConf.to_container(cfg, resolve=True)
def _to_str(obj, prefix=None, inside_call=False):
if prefix is None:
prefix = []
if isinstance(obj, abc.Mapping) and "_target_" in obj:
# Dict representing a function call
target = _convert_target_to_string(obj.pop("_target_"))
args = []
for k, v in sorted(obj.items()):
args.append(f"{k}={_to_str(v, inside_call=True)}")
args = ", ".join(args)
call = f"{target}({args})"
return "".join(prefix) + call
elif isinstance(obj, abc.Mapping) and not inside_call:
# Dict that is not inside a call is a list of top-level config objects that we
# render as one object per line with dot separated prefixes
key_list = []
for k, v in sorted(obj.items()):
if isinstance(v, abc.Mapping) and "_target_" not in v:
key_list.append(_to_str(v, prefix=prefix + [k + "."]))
else:
key = "".join(prefix) + k
key_list.append(f"{key}={_to_str(v)}")
return "\n".join(key_list)
elif isinstance(obj, abc.Mapping):
# Dict that is inside a call is rendered as a regular dict
return (
"{"
+ ",".join(
f"{repr(k)}: {_to_str(v, inside_call=inside_call)}"
for k, v in sorted(obj.items())
)
+ "}"
)
elif isinstance(obj, list):
return "[" + ",".join(_to_str(x, inside_call=inside_call) for x in obj) + "]"
else:
return repr(obj)
py_str = _to_str(cfg, prefix=[prefix])
try:
return black.format_str(py_str, mode=black.Mode())
except black.InvalidInput:
return py_str
| {
"content_hash": "4b0576622a651d1ae1a3a46888b87b03",
"timestamp": "",
"source": "github",
"line_count": 419,
"max_line_length": 100,
"avg_line_length": 38.031026252983295,
"alnum_prop": 0.5592092877314089,
"repo_name": "facebookresearch/detectron2",
"id": "3b80f3787ca156a617e2b35e56515d9dd9105060",
"size": "15987",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "detectron2/config/lazy.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C++",
"bytes": "79417"
},
{
"name": "CMake",
"bytes": "616"
},
{
"name": "Cuda",
"bytes": "112955"
},
{
"name": "Dockerfile",
"bytes": "3209"
},
{
"name": "Python",
"bytes": "3261609"
},
{
"name": "Shell",
"bytes": "14448"
}
],
"symlink_target": ""
} |
from runner.koan import *
class AboutNewStyleClasses(Koan):
class OldStyleClass:
"An old style class"
# Original class style have been phased out in Python 3.
class NewStyleClass(object):
"A new style class"
# Introduced in Python 2.2
#
# Aside from this set of tests, Python Koans sticks exclusively to this
# kind of class
pass
def test_new_style_classes_inherit_from_object_base_class(self):
self.assertEqual(True, issubclass(self.NewStyleClass, object))
self.assertEqual(False, issubclass(self.OldStyleClass, object))
def test_new_style_classes_have_more_attributes(self):
self.assertEqual(2, len(dir(self.OldStyleClass)))
self.assertEqual("An old style class", self.OldStyleClass.__doc__)
self.assertEqual("koans.about_new_style_classes", self.OldStyleClass.__module__)
self.assertEqual(18, len(dir(self.NewStyleClass)))
# To examine the available attributes, run
# 'dir(<Class name goes here>)'
# from a python console
# ------------------------------------------------------------------
def test_old_style_classes_have_type_but_no_class_attribute(self):
self.assertEqual("classobj", type(self.OldStyleClass).__name__)
try:
cls = self.OldStyleClass.__class__.__name__
except Exception as ex:
pass
# What was that error message from the exception?
self.assertMatch( "class OldStyleClass has no attribute", ex[0])
def test_new_style_classes_have_same_class_as_type(self):
new_style = self.NewStyleClass()
self.assertEqual(type, self.NewStyleClass.__class__)
self.assertEqual(
True,
type(self.NewStyleClass) == self.NewStyleClass.__class__)
# ------------------------------------------------------------------
def test_in_old_style_instances_class_is_different_to_type(self):
old_style = self.OldStyleClass()
self.assertEqual("OldStyleClass", old_style.__class__.__name__)
self.assertEqual("instance", type(old_style).__name__)
def test_new_style_instances_have_same_class_as_type(self):
new_style = self.NewStyleClass()
self.assertEqual("NewStyleClass", new_style.__class__.__name__)
self.assertEqual(True, type(new_style) == new_style.__class__)
| {
"content_hash": "5c121fc105686140429bea523b1ca29b",
"timestamp": "",
"source": "github",
"line_count": 61,
"max_line_length": 88,
"avg_line_length": 39.36065573770492,
"alnum_prop": 0.6084964598084132,
"repo_name": "exaroth/python_koans",
"id": "8c66081849bccf27795bfb78cacade1e2de41ebc",
"size": "2448",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "python2/koans/about_new_style_classes.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "331702"
},
{
"name": "Shell",
"bytes": "1599"
}
],
"symlink_target": ""
} |
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'network'}
DOCUMENTATION = '''
---
module: nxos_vrf_interface
extends_documentation_fragment: nxos
version_added: "2.1"
short_description: Manages interface specific VRF configuration.
description:
- Manages interface specific VRF configuration.
author:
- Jason Edelman (@jedelman8)
- Gabriele Gerbino (@GGabriele)
notes:
- Tested against NXOSv 7.3.(0)D1(1) on VIRL
- VRF needs to be added globally with M(nxos_vrf) before
adding a VRF to an interface.
- Remove a VRF from an interface will still remove
all L3 attributes just as it does from CLI.
- VRF is not read from an interface until IP address is
configured on that interface.
options:
vrf:
description:
- Name of VRF to be managed.
required: true
interface:
description:
- Full name of interface to be managed, i.e. Ethernet1/1.
required: true
state:
description:
- Manages desired state of the resource.
required: false
default: present
choices: ['present','absent']
'''
EXAMPLES = '''
- name: Ensure vrf ntc exists on Eth1/1
nxos_vrf_interface:
vrf: ntc
interface: Ethernet1/1
state: present
- name: Ensure ntc VRF does not exist on Eth1/1
nxos_vrf_interface:
vrf: ntc
interface: Ethernet1/1
state: absent
'''
RETURN = '''
commands:
description: commands sent to the device
returned: always
type: list
sample: ["interface loopback16", "vrf member ntc"]
'''
import re
from ansible.module_utils.network.nxos.nxos import load_config, run_commands
from ansible.module_utils.network.nxos.nxos import get_capabilities, nxos_argument_spec
from ansible.module_utils.network.nxos.nxos import get_interface_type
from ansible.module_utils.basic import AnsibleModule
def execute_show_command(command, module):
if 'show run' not in command:
output = 'json'
else:
output = 'text'
cmds = [{
'command': command,
'output': output,
}]
return run_commands(module, cmds)[0]
def get_interface_mode(interface, intf_type, module):
command = 'show interface {0}'.format(interface)
interface = {}
mode = 'unknown'
if intf_type in ['ethernet', 'portchannel']:
body = execute_show_command(command, module)
try:
interface_table = body['TABLE_interface']['ROW_interface']
except KeyError:
return mode
if interface_table and isinstance(interface_table, dict):
mode = str(interface_table.get('eth_mode', 'layer3'))
if mode == 'access' or mode == 'trunk':
mode = 'layer2'
else:
return mode
elif intf_type == 'loopback' or intf_type == 'svi':
mode = 'layer3'
return mode
def get_vrf_list(module):
command = 'show vrf all'
vrf_list = []
body = execute_show_command(command, module)
try:
vrf_table = body['TABLE_vrf']['ROW_vrf']
except (KeyError, AttributeError):
return vrf_list
for each in vrf_table:
vrf_list.append(str(each['vrf_name']))
return vrf_list
def get_interface_info(interface, module):
if not interface.startswith('loopback'):
interface = interface.capitalize()
command = 'show run interface {0}'.format(interface)
vrf_regex = r".*vrf\s+member\s+(?P<vrf>\S+).*"
try:
body = execute_show_command(command, module)
match_vrf = re.match(vrf_regex, body, re.DOTALL)
group_vrf = match_vrf.groupdict()
vrf = group_vrf["vrf"]
except (AttributeError, TypeError):
return ""
return vrf
def is_default(interface, module):
command = 'show run interface {0}'.format(interface)
try:
body = execute_show_command(command, module)
raw_list = body.split('\n')
if raw_list[-1].startswith('interface'):
return True
else:
return False
except (KeyError, IndexError):
return 'DNE'
def main():
argument_spec = dict(
vrf=dict(required=True),
interface=dict(type='str', required=True),
state=dict(default='present', choices=['present', 'absent'], required=False),
)
argument_spec.update(nxos_argument_spec)
module = AnsibleModule(argument_spec=argument_spec, supports_check_mode=True)
warnings = list()
results = {'changed': False, 'commands': [], 'warnings': warnings}
vrf = module.params['vrf']
interface = module.params['interface'].lower()
state = module.params['state']
device_info = get_capabilities(module)
network_api = device_info.get('network_api', 'nxapi')
current_vrfs = get_vrf_list(module)
if vrf not in current_vrfs:
warnings.append("The VRF is not present/active on the device. "
"Use nxos_vrf to fix this.")
intf_type = get_interface_type(interface)
if (intf_type != 'ethernet' and network_api == 'cliconf'):
if is_default(interface, module) == 'DNE':
module.fail_json(msg="interface does not exist on switch. Verify "
"switch platform or create it first with "
"nxos_interface if it's a logical interface")
mode = get_interface_mode(interface, intf_type, module)
if mode == 'layer2':
module.fail_json(msg='Ensure interface is a Layer 3 port before '
'configuring a VRF on an interface. You can '
'use nxos_interface')
current_vrf = get_interface_info(interface, module)
existing = dict(interface=interface, vrf=current_vrf)
changed = False
if not existing['vrf']:
pass
elif vrf != existing['vrf'] and state == 'absent':
module.fail_json(msg='The VRF you are trying to remove '
'from the interface does not exist '
'on that interface.',
interface=interface, proposed_vrf=vrf,
existing_vrf=existing['vrf'])
commands = []
if existing:
if state == 'absent':
if existing and vrf == existing['vrf']:
command = 'no vrf member {0}'.format(vrf)
commands.append(command)
elif state == 'present':
if existing['vrf'] != vrf:
command = 'vrf member {0}'.format(vrf)
commands.append(command)
if commands:
commands.insert(0, 'interface {0}'.format(interface))
if commands:
if module.check_mode:
module.exit_json(changed=True, commands=commands)
else:
load_config(module, commands)
changed = True
if 'configure' in commands:
commands.pop(0)
results['commands'] = commands
results['changed'] = changed
module.exit_json(**results)
if __name__ == '__main__':
main()
| {
"content_hash": "bc2d77583cbe945c6ea6e5a36957a07c",
"timestamp": "",
"source": "github",
"line_count": 239,
"max_line_length": 87,
"avg_line_length": 29.690376569037657,
"alnum_prop": 0.5985062006764374,
"repo_name": "thaim/ansible",
"id": "4dcb3bc71bd1cd50c60c2b019eb1689543c1f62f",
"size": "7771",
"binary": false,
"copies": "16",
"ref": "refs/heads/fix-broken-link",
"path": "lib/ansible/modules/network/nxos/nxos_vrf_interface.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "7"
},
{
"name": "Shell",
"bytes": "246"
}
],
"symlink_target": ""
} |
def mklist(*args):
result = None
for element in reversed(args):
result = (element, result)
return result
print mklist(1,2,3,4,5,6)
cons = lambda el, lst: (el, lst)
mklist = lambda *args: reduce(lambda lst, el: cons(el, lst), reversed(args), None)
car = lambda lst: lst[0] if lst else lst
cdr = lambda lst: lst[1] if lst else lst
nth = lambda n, lst: nth(n-1, cdr(lst)) if n > 0 else car(lst)
length = lambda lst, count=0: length(cdr(lst), count+1) if lst else count
begin = lambda *args: args[-1]
display = lambda lst: begin(w("%s " % car(lst)), display(cdr(lst))) if lst else w("nil\n")
| {
"content_hash": "2e11226431e669b60f4cb339a01601be",
"timestamp": "",
"source": "github",
"line_count": 16,
"max_line_length": 90,
"avg_line_length": 38.4375,
"alnum_prop": 0.6439024390243903,
"repo_name": "gusyussh/learntosolveit",
"id": "8924a04b0bc5fa04766ce4bdeaa7f50467da2827",
"size": "615",
"binary": false,
"copies": "2",
"ref": "refs/heads/version1",
"path": "languages/interview/linkedlist.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Brainfuck",
"bytes": "168"
},
{
"name": "C",
"bytes": "284581"
},
{
"name": "C++",
"bytes": "3067"
},
{
"name": "CSS",
"bytes": "3129"
},
{
"name": "Go",
"bytes": "569"
},
{
"name": "HTML",
"bytes": "15878"
},
{
"name": "Java",
"bytes": "138605"
},
{
"name": "JavaScript",
"bytes": "722"
},
{
"name": "Makefile",
"bytes": "3889"
},
{
"name": "Perl",
"bytes": "103"
},
{
"name": "Python",
"bytes": "378446"
},
{
"name": "Ruby",
"bytes": "11170"
},
{
"name": "Scala",
"bytes": "8545"
}
],
"symlink_target": ""
} |
import os
import sys
from builtins import input
def check_debian_user():
# Check whether the current user is the kolibri user when running kolibri
# that is installed from .deb package.
# The code is mainly from https://github.com/learningequality/ka-lite/blob/master/bin/kalite#L53
if not os.name == "posix" or not os.path.isfile("/etc/kolibri/username"):
return
with open("/etc/kolibri/username", "r") as f:
kolibri_user = f.read().rstrip()
current_user = os.environ.get("USER")
if not kolibri_user or kolibri_user == current_user:
return
kolibri_home = os.path.expanduser(os.environ.get(
"KOLIBRI_HOME", "~/.kolibri"))
if os.path.exists(kolibri_home) and os.listdir(kolibri_home):
return
sys.stderr.write((
"You are running this command as the user '{current_user}', "
"but Kolibri was originally installed to run as the user '{kolibri_user}'.\n"
"This may result in unexpected behavior, "
"because the two users will each use their own local databases and content.\n\n"
).format(current_user=current_user, kolibri_user=kolibri_user))
sys.stderr.write((
"If you'd like to run the command as '{}', you can try:\n\n"
" sudo su {} -c '<command>'\n\n"
).format(kolibri_user, kolibri_user))
cont = input(
"Alternatively, would you like to continue and "
"run the command as '{}'? [y/N] ".format(current_user))
if not cont.strip().lower() == "y":
sys.exit(0)
| {
"content_hash": "dc76fad65c6d555956e9b5bf1f3e9d71",
"timestamp": "",
"source": "github",
"line_count": 39,
"max_line_length": 100,
"avg_line_length": 39.41025641025641,
"alnum_prop": 0.6402081977878985,
"repo_name": "benjaoming/kolibri",
"id": "6b95d566413a0292a0e2a860e3cd53b8881a62be",
"size": "1537",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "kolibri/utils/debian_check.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "864"
},
{
"name": "CSS",
"bytes": "29176"
},
{
"name": "Dockerfile",
"bytes": "1872"
},
{
"name": "HTML",
"bytes": "12616"
},
{
"name": "JavaScript",
"bytes": "799257"
},
{
"name": "Makefile",
"bytes": "8232"
},
{
"name": "Python",
"bytes": "1241333"
},
{
"name": "Shell",
"bytes": "10412"
},
{
"name": "Vue",
"bytes": "815069"
}
],
"symlink_target": ""
} |
from django.contrib import admin
from models import Request
admin.site.register(Request)
| {
"content_hash": "ed0f013c34a6bf117553c497976a2f3c",
"timestamp": "",
"source": "github",
"line_count": 4,
"max_line_length": 32,
"avg_line_length": 22.5,
"alnum_prop": 0.8333333333333334,
"repo_name": "nbeck90/city-swap",
"id": "4c904bb9b2ce5a5a19cd8d7ccca67f7b60be85c4",
"size": "90",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "cityswap/requests/admin.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "3141"
},
{
"name": "HTML",
"bytes": "10887"
},
{
"name": "Python",
"bytes": "18409"
}
],
"symlink_target": ""
} |
import os
import ConfigParser
set_f=os.path.join(request.env.web2py_path,'applications','ferry_boat','private','settings.ini')
Config = ConfigParser.ConfigParser()
Config.read(set_f)
try:
if Config.getboolean('General','AllowRegister'):
if 'register' in auth.settings.actions_disabled:
auth.settings.actions_disabled.remove('register')
else:
if 'register' not in auth.settings.actions_disabled:
auth.settings.actions_disabled.append('register')
except:
response.flash=XML('Invalid configuration detected: <a href="%s">Fix</a>' %URL('default', 'settings'))
| {
"content_hash": "c4036e0b390534bc2608ed83c1d3e87d",
"timestamp": "",
"source": "github",
"line_count": 15,
"max_line_length": 106,
"avg_line_length": 40.6,
"alnum_prop": 0.7044334975369458,
"repo_name": "Titosoft/ferry-boat",
"id": "150c19040fed0dcd9f2ec6e619232cd61a96f880",
"size": "633",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "web2py/applications/ferry_boat/models/z_settings.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "230159"
},
{
"name": "JavaScript",
"bytes": "305848"
},
{
"name": "Perl",
"bytes": "1688"
},
{
"name": "Python",
"bytes": "6385104"
},
{
"name": "Shell",
"bytes": "90609"
}
],
"symlink_target": ""
} |
"""A fake server that "responds" to API methods with pre-canned responses.
All of these responses come from the spec, so if for some reason the spec's
wrong the tests might raise AssertionError. I've indicated in comments the
places where actual behavior differs from the spec.
"""
from keystoneclient import auth
from keystoneclient import session
from heat.common import context
class FakeClient(object):
def assert_called(self, method, url, body=None, pos=-1):
"""Assert that an API method was just called."""
expected = (method, url)
called = self.client.callstack[pos][0:2]
assert self.client.callstack, ("Expected %s %s "
"but no calls were made." % expected)
assert expected == called, 'Expected %s %s; got %s %s' % (
expected + called)
if body is not None:
assert self.client.callstack[pos][2] == body
def assert_called_anytime(self, method, url, body=None):
"""Assert that an API method was called anytime in the test."""
expected = (method, url)
assert self.client.callstack, ("Expected %s %s but no calls "
"were made." % expected)
found = False
for entry in self.client.callstack:
if expected == entry[0:2]:
found = True
break
assert found, 'Expected %s %s; got %s' % (expected,
self.client.callstack)
if body is not None:
try:
assert entry[2] == body
except AssertionError:
print(entry[2])
print("!=")
print(body)
raise
self.client.callstack = []
def clear_callstack(self):
self.client.callstack = []
def authenticate(self):
pass
class FakeAuth(auth.BaseAuthPlugin):
def __init__(self, auth_token='abcd1234', only_services=None):
self.auth_token = auth_token
self.only_services = only_services
def get_token(self, session, **kwargs):
return self.auth_token
def get_endpoint(self, session, service_type=None, **kwargs):
if (self.only_services is not None and
service_type not in self.only_services):
return None
return 'http://example.com:1234/v1'
class FakeKeystoneClient(object):
def __init__(self, username='test_username', password='password',
user_id='1234', access='4567', secret='8901',
credential_id='abcdxyz', auth_token='abcd1234',
context=None, stack_domain_id='4321', roles=None):
self.username = username
self.password = password
self.user_id = user_id
self.access = access
self.secret = secret
self.session = session.Session()
self.credential_id = credential_id
self.token = auth_token
self.context = context
self.v3_endpoint = 'http://localhost:5000/v3'
self.stack_domain_id = stack_domain_id
self.roles = roles or []
class FakeCred(object):
id = self.credential_id
access = self.access
secret = self.secret
self.creds = FakeCred()
def create_stack_user(self, username, password=''):
self.username = username
return self.user_id
def delete_stack_user(self, user_id):
self.user_id = None
def get_ec2_keypair(self, access, user_id):
if user_id == self.user_id:
if access == self.access:
return self.creds
else:
raise ValueError("Unexpected access %s" % access)
else:
raise ValueError("Unexpected user_id %s" % user_id)
def create_ec2_keypair(self, user_id):
if user_id == self.user_id:
return self.creds
def delete_ec2_keypair(self, credential_id=None, user_id=None,
access=None):
if user_id == self.user_id and access == self.creds.access:
self.creds = None
else:
raise Exception('Incorrect user_id or access')
def enable_stack_user(self, user_id):
pass
def disable_stack_user(self, user_id):
pass
def create_trust_context(self):
return context.RequestContext(username=self.username,
password=self.password,
is_admin=False,
trust_id='atrust',
trustor_user_id=self.user_id)
def delete_trust(self, trust_id):
pass
def delete_stack_domain_project(self, project_id):
pass
def create_stack_domain_project(self, stack_id):
return 'aprojectid'
def create_stack_domain_user(self, username, project_id, password=None):
return self.user_id
def delete_stack_domain_user(self, user_id, project_id):
pass
def create_stack_domain_user_keypair(self, user_id, project_id):
return self.creds
def enable_stack_domain_user(self, user_id, project_id):
pass
def disable_stack_domain_user(self, user_id, project_id):
pass
def delete_stack_domain_user_keypair(self, user_id, project_id,
credential_id):
pass
def stack_domain_user_token(self, user_id, project_id, password):
return 'adomainusertoken'
@property
def auth_token(self):
if self.context is not None:
return self.context.auth_plugin.get_token(self.session)
else:
return self.token
@property
def auth_ref(self):
return FakeAccessInfo(roles=self.roles)
class FakeAccessInfo(object):
def __init__(self, roles):
self.roles = roles
@property
def role_names(self):
return self.roles
class FakeEventSink(object):
def __init__(self, evt):
self.events = []
self.evt = evt
def consume(self, stack, event):
self.events.append(event)
self.evt.send(None)
| {
"content_hash": "3a4c77b23bcfa37162a827fc4c9bb6ee",
"timestamp": "",
"source": "github",
"line_count": 204,
"max_line_length": 76,
"avg_line_length": 30.294117647058822,
"alnum_prop": 0.570873786407767,
"repo_name": "dims/heat",
"id": "60bb4c6c55225b24915aad091df2a69e610cb7f3",
"size": "6755",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "heat/tests/fakes.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "7618889"
},
{
"name": "Shell",
"bytes": "32548"
}
],
"symlink_target": ""
} |
import tweepy
import time
from credentials import settings
from twitter_stream import twstream
def reply(api, tweet):
"""
replies to all captured tweets
:param api: tweepy api method
:param tweet: [[username, msg, id]]
:return: None
"""
for t in tweet:
# to create a reply you simply @ and mark the tweet id
api.update_status(".@"+t[0]+" have you heard of tourzan.com its a good travel resource. "
"the travel tips and guides are top notch.")
def time_line(api):
"""
reads the timeline for the keywords in question
:param api: tweepy method
:return: json format dictionary type data
"""
keywords = ['@twitter', 'tourist', 'traveling', 'tours', 'tour guides', 'tours for the disabled', 'ADA tours',
'tours for kids', 'jobs for college students', 'jobs for the elderly', 'travel guide', 'international',
'overseas']
timeline = twstream.TwitterStream()
stream = timeline.tweet_stream(api.auth, keywords)
return timeline.decoder(stream)
def main():
"""auth section of code"""
# TODO: store previous id's to prevent spamming
# TODO: intelligent text
auth = tweepy.OAuthHandler(settings.CONSUMER_KEY, settings.CONSUMER_SECRET)
auth.set_access_token(settings.ACCESS_KEY, settings.ACCESS_SECRET)
api = tweepy.API(auth)
while True:
for t in time_line(api):
reply(api, t)
print 'sleeping'
time.sleep(5)
if __name__ == '__main__':
main()
| {
"content_hash": "ed38eb254da87cd5b424ae6f5f1c4614",
"timestamp": "",
"source": "github",
"line_count": 50,
"max_line_length": 119,
"avg_line_length": 30.76,
"alnum_prop": 0.6326397919375812,
"repo_name": "castaway2000/marketing_twitterbot",
"id": "12fcc3b00b79d93190382af6f8556d5ce3934903",
"size": "1610",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tweetbot.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "3297"
}
],
"symlink_target": ""
} |
"""
component_params.py
Note the places you need to change to make it work for you.
They are marked with keyword 'TODO'.
"""
## TODO: here goes the list of the input files. Use flags:
## '__REQUIRED__' to make it required
## '__FLAG__' to make it a flag or switch.
input_files = {
# 'input_file1' : '__REQUIRED__',
# 'input_file2' : None
}
## TODO: here goes the list of the output files.
output_files = {
# 'output_file1' : '__REQUIRED__',
# 'output_file1' : None
}
## TODO: here goes the list of the input parameters excluding input/output files.
input_params = {
# 'input_param1' : '__REQUIRED__',
# 'input_param2' : '__FLAG__',
# 'input_param3' : None
}
## TODO: here goes the return value of the component_seed.
## DO NOT USE, Not implemented yet!
return_value = []
| {
"content_hash": "00df2b75ab0bd55f825d99ebf2553351",
"timestamp": "",
"source": "github",
"line_count": 34,
"max_line_length": 81,
"avg_line_length": 28.941176470588236,
"alnum_prop": 0.516260162601626,
"repo_name": "jtaghiyar/kronos",
"id": "242b2d641134d27faf171e5211c6a1d993c7bef4",
"size": "984",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "templates/component_params.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "143560"
}
],
"symlink_target": ""
} |
from magnum.common.pythonk8sclient.swagger_client import api_client
from magnum.common.pythonk8sclient.swagger_client.apis import apiv_api
from magnum.tests.functional.python_client_base import BayAPITLSTest
from magnum.tests.functional.python_client_base import BayTest
from magnumclient.openstack.common.apiclient import exceptions
class TestBayModelResource(BayTest):
coe = 'kubernetes'
def test_baymodel_create_and_delete(self):
self._test_baymodel_create_and_delete('test_k8s_baymodel')
class TestBayResource(BayTest):
coe = 'kubernetes'
def test_bay_create_and_delete(self):
baymodel_uuid = self._test_baymodel_create_and_delete(
'test_k8s_baymodel', delete=False, tls_disabled=True)
self._test_bay_create_and_delete('test_k8s_bay', baymodel_uuid)
class TestKubernetesAPIs(BayAPITLSTest):
@classmethod
def setUpClass(cls):
super(TestKubernetesAPIs, cls).setUpClass()
cls.baymodel = cls._create_baymodel('testk8sAPI',
coe='kubernetes',
tls_disabled=False,
network_driver='flannel',
fixed_network='192.168.0.0/24',
)
cls.bay = cls._create_bay('testk8sAPI', cls.baymodel.uuid)
config_contents = """[req]
distinguished_name = req_distinguished_name
req_extensions = req_ext
prompt = no
[req_distinguished_name]
CN = Your Name
[req_ext]
extendedKeyUsage = clientAuth
"""
cls._create_tls_ca_files(config_contents)
cls.kube_api_url = cls.cs.bays.get(cls.bay.uuid).api_address
k8s_client = api_client.ApiClient(cls.kube_api_url,
key_file=cls.key_file,
cert_file=cls.cert_file,
ca_certs=cls.ca_file)
cls.k8s_api = apiv_api.ApivApi(k8s_client)
@classmethod
def tearDownClass(cls):
cls._delete_bay(cls.bay.uuid)
try:
cls._wait_on_status(cls.bay,
["CREATE_COMPLETE",
"DELETE_IN_PROGRESS", "CREATE_FAILED"],
["DELETE_FAILED", "DELETE_COMPLETE"])
except exceptions.NotFound:
pass
cls._delete_baymodel(cls.baymodel.uuid)
def test_pod_apis(self):
pod_manifest = {'apiVersion': 'v1',
'kind': 'Pod',
'metadata': {'color': 'blue', 'name': 'test'},
'spec': {'containers': [{'image': 'dockerfile/redis',
'name': 'redis'}]}}
resp = self.k8s_api.create_namespaced_pod(body=pod_manifest,
namespace='default')
self.assertEqual('test', resp.metadata.name)
self.assertTrue(resp.status.phase)
resp = self.k8s_api.read_namespaced_pod(name='test',
namespace='default')
self.assertEqual('test', resp.metadata.name)
self.assertTrue(resp.status.phase)
resp = self.k8s_api.delete_namespaced_pod(name='test', body={},
namespace='default')
def test_service_apis(self):
service_manifest = {'apiVersion': 'v1',
'kind': 'Service',
'metadata': {'labels': {'name': 'frontend'},
'name': 'frontend',
'resourceversion': 'v1'},
'spec': {'ports': [{'port': 80,
'protocol': 'TCP',
'targetPort': 80}],
'selector': {'name': 'frontend'}}}
resp = self.k8s_api.create_namespaced_service(body=service_manifest,
namespace='default')
self.assertEqual('frontend', resp.metadata.name)
self.assertTrue(resp.status)
resp = self.k8s_api.read_namespaced_service(name='frontend',
namespace='default')
self.assertEqual('frontend', resp.metadata.name)
self.assertTrue(resp.status)
resp = self.k8s_api.delete_namespaced_service(name='frontend',
namespace='default')
def test_replication_controller_apis(self):
rc_manifest = {
'apiVersion': 'v1',
'kind': 'ReplicationController',
'metadata': {'labels': {'name': 'frontend'},
'name': 'frontend'},
'spec': {'replicas': 2,
'selector': {'name': 'frontend'},
'template': {'metadata': {
'labels': {'name': 'frontend'}},
'spec': {'containers': [{
'image': 'nginx',
'name': 'nginx',
'ports': [{'containerPort': 80,
'protocol': 'TCP'}]}]}}}}
resp = self.k8s_api.create_namespaced_replication_controller(
body=rc_manifest, namespace='default')
self.assertEqual('frontend', resp.metadata.name)
self.assertEqual(2, resp.spec.replicas)
resp = self.k8s_api.read_namespaced_replication_controller(
name='frontend', namespace='default')
self.assertEqual('frontend', resp.metadata.name)
self.assertEqual(2, resp.spec.replicas)
resp = self.k8s_api.delete_namespaced_replication_controller(
name='frontend', body={}, namespace='default')
"""
NB : Bug1504379. This is placeholder and will be removed when all
the objects-from-bay patches are checked in.
def test_pods_list(self):
self.assertIsNotNone(self.cs.pods.list(self.bay.uuid))
def test_rcs_list(self):
self.assertIsNotNone(self.cs.rcs.list(self.bay.uuid))
def test_services_list(self):
self.assertIsNotNone(self.cs.services.list(self.bay.uuid))
"""
| {
"content_hash": "b32af8c63f9a3f2943162a50941406aa",
"timestamp": "",
"source": "github",
"line_count": 150,
"max_line_length": 77,
"avg_line_length": 42.26,
"alnum_prop": 0.5138034390282379,
"repo_name": "dimtruck/magnum",
"id": "24cbb910a20ec72cefd19a5b55e13be85294cd4c",
"size": "6881",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "magnum/tests/functional/k8s/test_k8s_python_client.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "569"
},
{
"name": "Mako",
"bytes": "349"
},
{
"name": "Python",
"bytes": "2804882"
},
{
"name": "Shell",
"bytes": "47241"
}
],
"symlink_target": ""
} |
'''
Created by auto_sdk on 2015.06.23
'''
from aliyun.api.base import RestApi
class Mts20140618SubmitAnalysisJobRequest(RestApi):
def __init__(self,domain='mts.aliyuncs.com',port=80):
RestApi.__init__(self,domain, port)
self.AnalysisConfig = None
self.Input = None
self.PipelineId = None
self.UserData = None
def getapiname(self):
return 'mts.aliyuncs.com.SubmitAnalysisJob.2014-06-18'
| {
"content_hash": "9d5659964ca64ff1f0b9644ef82b1170",
"timestamp": "",
"source": "github",
"line_count": 14,
"max_line_length": 56,
"avg_line_length": 28.642857142857142,
"alnum_prop": 0.7381546134663342,
"repo_name": "francisar/rds_manager",
"id": "517ec224dd06d39c8f5070f4e59008c7413a8e68",
"size": "401",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "aliyun/api/rest/Mts20140618SubmitAnalysisJobRequest.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "259509"
},
{
"name": "Shell",
"bytes": "1481"
}
],
"symlink_target": ""
} |
"""
Tests that features that are currently unsupported in
either the Python or C parser are actually enforced
and are clearly communicated to the user.
Ultimately, the goal is to remove test cases from this
test suite as new feature support is added to the parsers.
"""
import pandas.io.parsers as parsers
import pandas.util.testing as tm
from pandas.compat import StringIO
from pandas.errors import ParserError
from pandas.io.parsers import read_csv, read_table
import pytest
@pytest.fixture(params=["python", "python-fwf"], ids=lambda val: val)
def python_engine(request):
return request.param
class TestUnsupportedFeatures(object):
def test_mangle_dupe_cols_false(self):
# see gh-12935
data = 'a b c\n1 2 3'
msg = 'is not supported'
for engine in ('c', 'python'):
with tm.assert_raises_regex(ValueError, msg):
read_csv(StringIO(data), engine=engine,
mangle_dupe_cols=False)
def test_c_engine(self):
# see gh-6607
data = 'a b c\n1 2 3'
msg = 'does not support'
# specify C engine with unsupported options (raise)
with tm.assert_raises_regex(ValueError, msg):
read_table(StringIO(data), engine='c',
sep=None, delim_whitespace=False)
with tm.assert_raises_regex(ValueError, msg):
read_table(StringIO(data), engine='c', sep=r'\s')
with tm.assert_raises_regex(ValueError, msg):
read_table(StringIO(data), engine='c', quotechar=chr(128))
with tm.assert_raises_regex(ValueError, msg):
read_table(StringIO(data), engine='c', skipfooter=1)
# specify C-unsupported options without python-unsupported options
with tm.assert_produces_warning(parsers.ParserWarning):
read_table(StringIO(data), sep=None, delim_whitespace=False)
with tm.assert_produces_warning(parsers.ParserWarning):
read_table(StringIO(data), quotechar=chr(128))
with tm.assert_produces_warning(parsers.ParserWarning):
read_table(StringIO(data), sep=r'\s')
with tm.assert_produces_warning(parsers.ParserWarning):
read_table(StringIO(data), skipfooter=1)
text = """ A B C D E
one two three four
a b 10.0032 5 -0.5109 -2.3358 -0.4645 0.05076 0.3640
a q 20 4 0.4473 1.4152 0.2834 1.00661 0.1744
x q 30 3 -0.6662 -0.5243 -0.3580 0.89145 2.5838"""
msg = 'Error tokenizing data'
with tm.assert_raises_regex(ParserError, msg):
read_table(StringIO(text), sep='\\s+')
with tm.assert_raises_regex(ParserError, msg):
read_table(StringIO(text), engine='c', sep='\\s+')
msg = "Only length-1 thousands markers supported"
data = """A|B|C
1|2,334|5
10|13|10.
"""
with tm.assert_raises_regex(ValueError, msg):
read_csv(StringIO(data), thousands=',,')
with tm.assert_raises_regex(ValueError, msg):
read_csv(StringIO(data), thousands='')
msg = "Only length-1 line terminators supported"
data = 'a,b,c~~1,2,3~~4,5,6'
with tm.assert_raises_regex(ValueError, msg):
read_csv(StringIO(data), lineterminator='~~')
def test_python_engine(self, python_engine):
from pandas.io.parsers import _python_unsupported as py_unsupported
data = """1,2,3,,
1,2,3,4,
1,2,3,4,5
1,2,,,
1,2,3,4,"""
for default in py_unsupported:
msg = ('The %r option is not supported '
'with the %r engine' % (default, python_engine))
kwargs = {default: object()}
with tm.assert_raises_regex(ValueError, msg):
read_csv(StringIO(data), engine=python_engine, **kwargs)
def test_python_engine_file_no_next(self, python_engine):
# see gh-16530
class NoNextBuffer(object):
def __init__(self, csv_data):
self.data = csv_data
def __iter__(self):
return self
def read(self):
return self.data
data = "a\n1"
msg = "The 'python' engine cannot iterate"
with tm.assert_raises_regex(ValueError, msg):
read_csv(NoNextBuffer(data), engine=python_engine)
class TestDeprecatedFeatures(object):
@pytest.mark.parametrize("engine", ["c", "python"])
@pytest.mark.parametrize("kwargs", [{"tupleize_cols": True},
{"tupleize_cols": False}])
def test_deprecated_args(self, engine, kwargs):
data = "1,2,3"
arg, _ = list(kwargs.items())[0]
with tm.assert_produces_warning(
FutureWarning, check_stacklevel=False):
read_csv(StringIO(data), engine=engine, **kwargs)
| {
"content_hash": "841fb7f5a97d432e9487bf1d118ea3a0",
"timestamp": "",
"source": "github",
"line_count": 137,
"max_line_length": 75,
"avg_line_length": 35.518248175182485,
"alnum_prop": 0.5990546650226058,
"repo_name": "louispotok/pandas",
"id": "3117f6fae55da0c9e5c1476edbec0eeb7240fd83",
"size": "4891",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "pandas/tests/io/parser/test_unsupported.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Batchfile",
"bytes": "3847"
},
{
"name": "C",
"bytes": "432930"
},
{
"name": "C++",
"bytes": "17193"
},
{
"name": "HTML",
"bytes": "551714"
},
{
"name": "Makefile",
"bytes": "563"
},
{
"name": "PowerShell",
"bytes": "2970"
},
{
"name": "Python",
"bytes": "13452425"
},
{
"name": "Shell",
"bytes": "25056"
},
{
"name": "Smarty",
"bytes": "2045"
}
],
"symlink_target": ""
} |
from pythonforandroid.util import current_directory, ensure_dir
from pythonforandroid.toolchain import shprint
from pythonforandroid.recipe import Recipe
from multiprocessing import cpu_count
from os.path import join
import sh
class LibgeosRecipe(Recipe):
version = '3.7.1'
url = 'https://github.com/libgeos/libgeos/archive/{version}.zip'
depends = []
built_libraries = {
'libgeos.so': 'install_target/lib',
'libgeos_c.so': 'install_target/lib'
}
need_stl_shared = True
def build_arch(self, arch):
source_dir = self.get_build_dir(arch.arch)
build_target = join(source_dir, 'build_target')
install_target = join(source_dir, 'install_target')
ensure_dir(build_target)
with current_directory(build_target):
env = self.get_recipe_env(arch)
shprint(sh.cmake, source_dir,
'-DANDROID_ABI={}'.format(arch.arch),
'-DANDROID_NATIVE_API_LEVEL={}'.format(self.ctx.ndk_api),
'-DANDROID_STL=' + self.stl_lib_name,
'-DCMAKE_TOOLCHAIN_FILE={}'.format(
join(self.ctx.ndk_dir, 'build', 'cmake',
'android.toolchain.cmake')),
'-DCMAKE_INSTALL_PREFIX={}'.format(install_target),
'-DCMAKE_BUILD_TYPE=Release',
'-DGEOS_ENABLE_TESTS=OFF',
'-DBUILD_SHARED_LIBS=1',
_env=env)
shprint(sh.make, '-j' + str(cpu_count()), _env=env)
# We make the install because this way we will have all the
# includes in one place (mostly we are interested in `geos_c.h`,
# which is not in the include folder, so this way we make easier to
# link with this library...case of shapely's recipe)
shprint(sh.make, 'install', _env=env)
recipe = LibgeosRecipe()
| {
"content_hash": "721d01d51dafda245dabf60f358516f9",
"timestamp": "",
"source": "github",
"line_count": 52,
"max_line_length": 79,
"avg_line_length": 37.21153846153846,
"alnum_prop": 0.5762273901808785,
"repo_name": "germn/python-for-android",
"id": "cff9fe0f5e1f7f9a3e707d5a02518641213a7474",
"size": "1935",
"binary": false,
"copies": "5",
"ref": "refs/heads/master",
"path": "pythonforandroid/recipes/libgeos/__init__.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "70942"
},
{
"name": "C++",
"bytes": "491"
},
{
"name": "CMake",
"bytes": "250"
},
{
"name": "CSS",
"bytes": "3487"
},
{
"name": "HTML",
"bytes": "11631"
},
{
"name": "Java",
"bytes": "516674"
},
{
"name": "Makefile",
"bytes": "27280"
},
{
"name": "Python",
"bytes": "1346905"
},
{
"name": "Shell",
"bytes": "5340"
}
],
"symlink_target": ""
} |
import datetime as dt
import os
import time
from cltk.corpus.greek.tlg.parse_tlg_indices import get_epithet_of_author
from cltk.corpus.greek.tlg.parse_tlg_indices import get_id_author
import pandas
from sklearn.externals import joblib
from sklearn.feature_extraction.text import CountVectorizer
def stream_lemmatized_files(corpus_dir):
# return all docs in a dir
user_dir = os.path.expanduser('~/cltk_data/user_data/' + corpus_dir)
files = os.listdir(user_dir)
for file in files:
filepath = os.path.join(user_dir, file)
with open(filepath) as fo:
#TODO rm words less the 3 chars long
yield file[3:-4], fo.read()
t0 = dt.datetime.utcnow()
map_id_author = get_id_author()
df = pandas.DataFrame(columns=['id', 'author' 'text', 'epithet'])
for _id, text in stream_lemmatized_files('tlg_lemmatized_no_accents_no_stops'):
author = map_id_author[_id]
epithet = get_epithet_of_author(_id)
df = df.append({'id': _id, 'author': author, 'text': text, 'epithet': epithet}, ignore_index=True)
print(df.shape)
print('... finished in {}'.format(dt.datetime.utcnow() - t0))
print('Number of texts:', len(df))
text_list = df['text'].tolist()
# make a list of short texts to drop
# For pres, get distributions of words per doc
short_text_drop_index = [index if len(text) > 500 else None for index, text in enumerate(text_list) ] # ~100 words
t0 = dt.datetime.utcnow()
# TODO: Consdier using generator to CV http://stackoverflow.com/a/21600406
# time & size counts, w/ 50 texts:
# 0:01:15 & 202M @ ngram_range=(1, 3), min_df=2, max_features=500
# 0:00:26 & 80M @ ngram_range=(1, 2), analyzer='word', min_df=2, max_features=5000
# 0:00:24 & 81M @ ngram_range=(1, 2), analyzer='word', min_df=2, max_features=50000
# time & size counts, w/ 1823 texts:
# 0:02:18 & 46MB @ ngram_range=(1, 1), analyzer='word', min_df=2, max_features=500000
# & @ ngram_range=(1, 2), analyzer='word', min_df=2, max_features=500000
max_features = 500000
ngrams = 2
vectorizer = CountVectorizer(ngram_range=(1, ngrams), analyzer='word',
min_df=2, max_features=max_features)
term_document_matrix = vectorizer.fit_transform(text_list) # input is a list of strings, 1 per document
vector_fp = os.path.expanduser('~/cltk_data/user_data/vectorizer_test_features{0}_ngrams{1}.pickle'.format(max_features, ngrams))
joblib.dump(vectorizer, vector_fp)
print('... finished in {}'.format(dt.datetime.utcnow() - t0))
| {
"content_hash": "1c82dc5cba30ab0fb6866c195867b62b",
"timestamp": "",
"source": "github",
"line_count": 69,
"max_line_length": 129,
"avg_line_length": 35.98550724637681,
"alnum_prop": 0.6830447039871124,
"repo_name": "kylepjohnson/notebooks",
"id": "d365751a2f16d4b8778d57c37d334ad429ceb6d7",
"size": "2483",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "public_talks/2016_10_26_harvard/scripts/make_vectorizer.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Jupyter Notebook",
"bytes": "36654362"
},
{
"name": "Python",
"bytes": "21629"
}
],
"symlink_target": ""
} |
"""Type hinting decorators allowing static or runtime type-checking for the SDK.
This module defines decorators which utilize the type-hints defined in
'type_hints.py' to allow annotation of the types of function arguments and
return values.
Type-hints for functions are annotated using two separate decorators. One is for
type-hinting the types of function arguments, the other for type-hinting the
function return value. Type-hints can either be specified in the form of
positional arguments::
@with_input_types(int, int)
def add(a, b):
return a + b
Keyword arguments::
@with_input_types(a=int, b=int)
def add(a, b):
return a + b
Or even a mix of both::
@with_input_types(int, b=int)
def add(a, b):
return a + b
Example usage for type-hinting arguments only::
@with_input_types(s=str)
def to_lower(a):
return a.lower()
Example usage for type-hinting return values only::
@with_output_types(Tuple[int, bool])
def compress_point(ec_point):
return ec_point.x, ec_point.y < 0
Example usage for type-hinting both arguments and return values::
@with_input_types(a=int)
@with_output_types(str)
def int_to_str(a):
return str(a)
Type-hinting a function with arguments that unpack tuples are also supported. As
an example, such a function would be defined as::
def foo((a, b)):
...
The valid type-hint for such as function looks like the following::
@with_input_types(a=int, b=int)
def foo((a, b)):
...
Notice that we hint the type of each unpacked argument independently, rather
than hinting the type of the tuple as a whole (Tuple[int, int]).
Optionally, type-hints can be type-checked at runtime. To toggle this behavior
this module defines two functions: 'enable_run_time_type_checking' and
'disable_run_time_type_checking'. NOTE: for this toggle behavior to work
properly it must appear at the top of the module where all functions are
defined, or before importing a module containing type-hinted functions.
"""
import inspect
import types
from apache_beam.typehints import native_type_compatibility
from apache_beam.typehints import typehints
from apache_beam.typehints.typehints import CompositeTypeHintError
from apache_beam.typehints.typehints import SimpleTypeHintError
from apache_beam.typehints.typehints import check_constraint
from apache_beam.typehints.typehints import validate_composite_type_param
__all__ = [
'with_input_types',
'with_output_types',
'WithTypeHints',
'TypeCheckError',
]
# This is missing in the builtin types module. str.upper is arbitrary, any
# method on a C-implemented type will do.
# pylint: disable=invalid-name
_MethodDescriptorType = type(str.upper)
# pylint: enable=invalid-name
# Monkeypatch inspect.getargspec to allow passing non-function objects.
# This is needed to use higher-level functions such as getcallargs.
_original_getargspec = inspect.getargspec
def getargspec(func):
try:
return _original_getargspec(func)
except TypeError:
if isinstance(func, type):
argspec = getargspec(func.__init__)
del argspec.args[0]
return argspec
elif callable(func):
try:
return _original_getargspec(func.__call__)
except TypeError:
# Return an ArgSpec with at least one positional argument,
# and any number of other (positional or keyword) arguments
# whose name won't match any real agument.
# Arguments with the %unknown% prefix will be ignored in the type
# checking code.
return inspect.ArgSpec(
['_'], '%unknown%varargs', '%unknown%keywords', ())
else:
raise
inspect.getargspec = getargspec
class IOTypeHints(object):
"""Encapsulates all type hint information about a Dataflow construct.
This should primarily be used via the WithTypeHints mixin class, though
may also be attached to other objects (such as Python functions).
"""
__slots__ = ('input_types', 'output_types')
def __init__(self, input_types=None, output_types=None):
self.input_types = input_types
self.output_types = output_types
def set_input_types(self, *args, **kwargs):
self.input_types = args, kwargs
def set_output_types(self, *args, **kwargs):
self.output_types = args, kwargs
def simple_output_type(self, context):
if self.output_types:
args, kwargs = self.output_types
if len(args) != 1 or kwargs:
raise TypeError('Expected simple output type hint for %s' % context)
return args[0]
def copy(self):
return IOTypeHints(self.input_types, self.output_types)
def with_defaults(self, hints):
if not hints:
return self
elif not self:
return hints
return IOTypeHints(self.input_types or hints.input_types,
self.output_types or hints.output_types)
def __nonzero__(self):
return bool(self.input_types or self.output_types)
def __repr__(self):
return 'IOTypeHints[inputs=%s, outputs=%s]' % (
self.input_types, self.output_types)
class WithTypeHints(object):
"""A mixin class that provides the ability to set and retrieve type hints.
"""
def __init__(self, *unused_args, **unused_kwargs):
self._type_hints = IOTypeHints()
def _get_or_create_type_hints(self):
# __init__ may have not been called
try:
return self._type_hints
except AttributeError:
self._type_hints = IOTypeHints()
return self._type_hints
def get_type_hints(self):
return (self._get_or_create_type_hints()
.with_defaults(self.default_type_hints())
.with_defaults(get_type_hints(self.__class__)))
def default_type_hints(self):
return None
def with_input_types(self, *arg_hints, **kwarg_hints):
self._get_or_create_type_hints().set_input_types(*arg_hints, **kwarg_hints)
return self
def with_output_types(self, *arg_hints, **kwarg_hints):
self._get_or_create_type_hints().set_output_types(*arg_hints, **kwarg_hints)
return self
class TypeCheckError(Exception):
pass
def _positional_arg_hints(arg, hints):
"""Returns the type of a (possibly tuple-packed) positional argument.
E.g. for lambda ((a, b), c): None the single positional argument is (as
returned by inspect) [[a, b], c] which should have type
Tuple[Tuple[Int, Any], float] when applied to the type hints
{a: int, b: Any, c: float}.
"""
if isinstance(arg, list):
return typehints.Tuple[[_positional_arg_hints(a, hints) for a in arg]]
return hints.get(arg, typehints.Any)
def _unpack_positional_arg_hints(arg, hint):
"""Unpacks the given hint according to the nested structure of arg.
For example, if arg is [[a, b], c] and hint is Tuple[Any, int], than
this function would return ((Any, Any), int) so it can be used in conjunction
with inspect.getcallargs.
"""
if isinstance(arg, list):
tuple_constraint = typehints.Tuple[[typehints.Any] * len(arg)]
if not typehints.is_consistent_with(hint, tuple_constraint):
raise TypeCheckError('Bad tuple arguments for %s: expected %s, got %s' %
(arg, tuple_constraint, hint))
if isinstance(hint, typehints.TupleConstraint):
return tuple(_unpack_positional_arg_hints(a, t)
for a, t in zip(arg, hint.tuple_types))
return (typehints.Any,) * len(arg)
return hint
def getcallargs_forhints(func, *typeargs, **typekwargs):
"""Like inspect.getcallargs, but understands that Tuple[] and an Any unpack.
"""
argspec = inspect.getargspec(func)
# Turn Tuple[x, y] into (x, y) so getcallargs can do the proper unpacking.
packed_typeargs = [_unpack_positional_arg_hints(arg, hint)
for (arg, hint) in zip(argspec.args, typeargs)]
packed_typeargs += list(typeargs[len(packed_typeargs):])
try:
callargs = inspect.getcallargs(func, *packed_typeargs, **typekwargs)
except TypeError as e:
raise TypeCheckError(e)
if argspec.defaults:
# Declare any default arguments to be Any.
for k, var in enumerate(reversed(argspec.args)):
if k >= len(argspec.defaults):
break
if callargs.get(var, None) is argspec.defaults[-k-1]:
callargs[var] = typehints.Any
# Patch up varargs and keywords
if argspec.varargs:
callargs[argspec.varargs] = typekwargs.get(
argspec.varargs, typehints.Tuple[typehints.Any, ...])
if argspec.keywords:
# TODO(robertwb): Consider taking the union of key and value types.
callargs[argspec.keywords] = typekwargs.get(
argspec.keywords, typehints.Dict[typehints.Any, typehints.Any])
return callargs
def get_type_hints(fn):
"""Gets the type hint associated with an arbitrary object fn.
Always returns a valid IOTypeHints object, creating one if necissary.
"""
# pylint: disable=protected-access
if not hasattr(fn, '_type_hints'):
try:
fn._type_hints = IOTypeHints()
except (AttributeError, TypeError):
# Can't add arbitrary attributes to this object,
# but might have some restrictions anyways...
hints = IOTypeHints()
if isinstance(fn, _MethodDescriptorType):
hints.set_input_types(fn.__objclass__)
return hints
return fn._type_hints
# pylint: enable=protected-access
def with_input_types(*positional_hints, **keyword_hints):
"""A decorator that type-checks defined type-hints with passed func arguments.
All type-hinted arguments can be specified using positional arguments,
keyword arguments, or a mix of both. Additionaly, all function arguments must
be type-hinted in totality if even one parameter is type-hinted.
Once fully decorated, if the arguments passed to the resulting function
violate the type-hint constraints defined, a :class:`TypeCheckError`
detailing the error will be raised.
To be used as:
.. testcode::
from apache_beam.typehints import with_input_types
@with_input_types(str)
def upper(s):
return s.upper()
Or:
.. testcode::
from apache_beam.typehints import with_input_types
from apache_beam.typehints import List
from apache_beam.typehints import Tuple
@with_input_types(ls=List[Tuple[int, int]])
def increment(ls):
[(i + 1, j + 1) for (i,j) in ls]
Args:
*positional_hints: Positional type-hints having identical order as the
function's formal arguments. Values for this argument must either be a
built-in Python type or an instance of a
:class:`~apache_beam.typehints.typehints.TypeConstraint` created by
'indexing' a
:class:`~apache_beam.typehints.typehints.CompositeTypeHint` instance
with a type parameter.
**keyword_hints: Keyword arguments mirroring the names of the parameters to
the decorated functions. The value of each keyword argument must either
be one of the allowed built-in Python types, a custom class, or an
instance of a :class:`~apache_beam.typehints.typehints.TypeConstraint`
created by 'indexing' a
:class:`~apache_beam.typehints.typehints.CompositeTypeHint` instance
with a type parameter.
Raises:
:class:`~exceptions.ValueError`: If not all function arguments have
corresponding type-hints specified. Or if the inner wrapper function isn't
passed a function object.
:class:`TypeCheckError`: If the any of the passed type-hint
constraints are not a type or
:class:`~apache_beam.typehints.typehints.TypeConstraint` instance.
Returns:
The original function decorated such that it enforces type-hint constraints
for all received function arguments.
"""
converted_positional_hints = (
native_type_compatibility.convert_to_beam_types(positional_hints))
converted_keyword_hints = (
native_type_compatibility.convert_to_beam_types(keyword_hints))
del positional_hints
del keyword_hints
def annotate(f):
if isinstance(f, types.FunctionType):
for t in (list(converted_positional_hints) +
list(converted_keyword_hints.values())):
validate_composite_type_param(
t, error_msg_prefix='All type hint arguments')
get_type_hints(f).set_input_types(*converted_positional_hints,
**converted_keyword_hints)
return f
return annotate
def with_output_types(*return_type_hint, **kwargs):
"""A decorator that type-checks defined type-hints for return values(s).
This decorator will type-check the return value(s) of the decorated function.
Only a single type-hint is accepted to specify the return type of the return
value. If the function to be decorated has multiple return values, then one
should use: ``Tuple[type_1, type_2]`` to annotate the types of the return
values.
If the ultimate return value for the function violates the specified type-hint
a :class:`TypeCheckError` will be raised detailing the type-constraint
violation.
This decorator is intended to be used like:
.. testcode::
from apache_beam.typehints import with_output_types
from apache_beam.typehints import Set
class Coordinate:
def __init__(self, x, y):
self.x = x
self.y = y
@with_output_types(Set[Coordinate])
def parse_ints(ints):
return {Coordinate(i, i) for i in ints}
Or with a simple type-hint:
.. testcode::
from apache_beam.typehints import with_output_types
@with_output_types(bool)
def negate(p):
return not p if p else p
Args:
*return_type_hint: A type-hint specifying the proper return type of the
function. This argument should either be a built-in Python type or an
instance of a :class:`~apache_beam.typehints.typehints.TypeConstraint`
created by 'indexing' a
:class:`~apache_beam.typehints.typehints.CompositeTypeHint`.
**kwargs: Not used.
Raises:
:class:`~exceptions.ValueError`: If any kwarg parameters are passed in,
or the length of **return_type_hint** is greater than ``1``. Or if the
inner wrapper function isn't passed a function object.
:class:`TypeCheckError`: If the **return_type_hint** object is
in invalid type-hint.
Returns:
The original function decorated such that it enforces type-hint constraints
for all return values.
"""
if kwargs:
raise ValueError("All arguments for the 'returns' decorator must be "
"positional arguments.")
if len(return_type_hint) != 1:
raise ValueError("'returns' accepts only a single positional argument. In "
"order to specify multiple return types, use the 'Tuple' "
"type-hint.")
return_type_hint = native_type_compatibility.convert_to_beam_type(
return_type_hint[0])
validate_composite_type_param(
return_type_hint,
error_msg_prefix='All type hint arguments'
)
def annotate(f):
get_type_hints(f).set_output_types(return_type_hint)
return f
return annotate
def _check_instance_type(
type_constraint, instance, var_name=None, verbose=False):
"""A helper function to report type-hint constraint violations.
Args:
type_constraint: An instance of a 'TypeConstraint' or a built-in Python
type.
instance: The candidate object which will be checked by to satisfy
'type_constraint'.
var_name: If 'instance' is an argument, then the actual name for the
parameter in the original function definition.
Raises:
TypeCheckError: If 'instance' fails to meet the type-constraint of
'type_constraint'.
"""
hint_type = (
"argument: '%s'" % var_name if var_name is not None else 'return type')
try:
check_constraint(type_constraint, instance)
except SimpleTypeHintError:
if verbose:
verbose_instance = '%s, ' % instance
else:
verbose_instance = ''
raise TypeCheckError('Type-hint for %s violated. Expected an '
'instance of %s, instead found %san instance of %s.'
% (hint_type, type_constraint,
verbose_instance, type(instance)))
except CompositeTypeHintError as e:
raise TypeCheckError('Type-hint for %s violated: %s' % (hint_type, e))
def _interleave_type_check(type_constraint, var_name=None):
"""Lazily type-check the type-hint for a lazily generated sequence type.
This function can be applied as a decorator or called manually in a curried
manner:
* @_interleave_type_check(List[int])
def gen():
yield 5
or
* gen = _interleave_type_check(Tuple[int, int], 'coord_gen')(gen)
As a result, all type-checking for the passed generator will occur at 'yield'
time. This way, we avoid having to depleat the generator in order to
type-check it.
Args:
type_constraint: An instance of a TypeConstraint. The output yielded of
'gen' will be type-checked according to this type constraint.
var_name: The variable name binded to 'gen' if type-checking a function
argument. Used solely for templating in error message generation.
Returns:
A function which takes a generator as an argument and returns a wrapped
version of the generator that interleaves type-checking at 'yield'
iteration. If the generator received is already wrapped, then it is simply
returned to avoid nested wrapping.
"""
def wrapper(gen):
if isinstance(gen, GeneratorWrapper):
return gen
return GeneratorWrapper(
gen,
lambda x: _check_instance_type(type_constraint, x, var_name)
)
return wrapper
class GeneratorWrapper(object):
"""A wrapper around a generator, allows execution of a callback per yield.
Additionally, wrapping a generator with this class allows one to assign
arbitary attributes to a generator object just as with a function object.
Attributes:
internal_gen: A instance of a generator object. As part of 'step' of the
generator, the yielded object will be passed to 'interleave_func'.
interleave_func: A callback accepting a single argument. This function will
be called with the result of each yielded 'step' in the internal
generator.
"""
def __init__(self, gen, interleave_func):
self.internal_gen = gen
self.interleave_func = interleave_func
def __getattr__(self, attr):
# TODO(laolu): May also want to intercept 'send' in the future if we move to
# a GeneratorHint with 3 type-params:
# * Generator[send_type, return_type, yield_type]
if attr == '__next__':
return self.__next__()
elif attr == '__iter__':
return self.__iter__()
return getattr(self.internal_gen, attr)
def next(self):
next_val = next(self.internal_gen)
self.interleave_func(next_val)
return next_val
def __iter__(self):
while True:
x = next(self.internal_gen)
self.interleave_func(x)
yield x
| {
"content_hash": "b099601fce7c850d91881bb1aca79ae9",
"timestamp": "",
"source": "github",
"line_count": 562,
"max_line_length": 80,
"avg_line_length": 33.45195729537367,
"alnum_prop": 0.6912765957446808,
"repo_name": "shakamunyi/beam",
"id": "89dc6afa34c80d1e4f1b2bc847d2c524c3b0eb48",
"size": "19585",
"binary": false,
"copies": "6",
"ref": "refs/heads/master",
"path": "sdks/python/apache_beam/typehints/decorators.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Go",
"bytes": "146134"
},
{
"name": "Groovy",
"bytes": "3274"
},
{
"name": "Java",
"bytes": "34540397"
},
{
"name": "Protocol Buffer",
"bytes": "137365"
},
{
"name": "Python",
"bytes": "4248665"
},
{
"name": "Shell",
"bytes": "53893"
}
],
"symlink_target": ""
} |
from __future__ import absolute_import
import re
from sentry.stacktraces.platform import get_behavior_family_for_platform
from sentry.utils.safe import setdefault_path
_windecl_hash = re.compile(r"^@?(.*?)@[0-9]+$")
_rust_hash = re.compile(r"::h[a-z0-9]{16}$")
_cpp_trailer_re = re.compile(r"(\bconst\b|&)$")
_rust_blanket_re = re.compile(r"^([A-Z] as )")
_lambda_re = re.compile(
r"""(?x)
# gcc
(?:
\{
lambda\(.*?\)\#\d+
\}
) |
# msvc
(?:
\blambda_[a-f0-9]{32}\b
) |
# clang
(?:
\$_\d+\b
)
"""
)
_anon_namespace_re = re.compile(
r"""(?x)
\?A0x[a-f0-9]{8}::
"""
)
PAIRS = {"(": ")", "{": "}", "[": "]", "<": ">"}
def replace_enclosed_string(s, start, end, replacement=None):
if start not in s:
return s
depth = 0
rv = []
pair_start = None
for idx, char in enumerate(s):
if char == start:
if depth == 0:
pair_start = idx
depth += 1
elif char == end:
depth -= 1
if depth == 0:
if replacement is not None:
if callable(replacement):
rv.append(replacement(s[pair_start + 1 : idx], pair_start))
else:
rv.append(replacement)
elif depth == 0:
rv.append(char)
return "".join(rv)
def split_func_tokens(s):
buf = []
rv = []
stack = []
end = 0
for idx, char in enumerate(s):
if char in PAIRS:
stack.append(PAIRS[char])
elif stack and char == stack[-1]:
stack.pop()
if not stack:
buf.append(s[end : idx + 1])
end = idx + 1
elif not stack:
if char.isspace():
if buf:
rv.append(buf)
buf = []
else:
buf.append(s[end : idx + 1])
end = idx + 1
if buf:
rv.append(buf)
return ["".join(x) for x in rv]
def trim_function_name(function, platform, normalize_lambdas=True):
"""Given a function value from the frame's function attribute this returns
a trimmed version that can be stored in `function_name`. This is only used
if the client did not supply a value itself already.
"""
if platform == "csharp":
return trim_csharp_function_name(function)
if get_behavior_family_for_platform(platform) == "native":
return trim_native_function_name(function, normalize_lambdas=normalize_lambdas)
return function
def trim_csharp_function_name(function):
"""This trims off signatures from C# frames. This takes advantage of the
Unity not emitting any return values and using a space before the argument
list.
Note that there is disagreement between Unity and the main .NET SDK about
the function names. The Unity SDK emits the entire function with module
in the `function` name similar to native, the .NET SDK emits it in individual
parts of the frame.
"""
return function.split(" (", 1)[0]
def trim_native_function_name(function, normalize_lambdas=True):
if function in ("<redacted>", "<unknown>"):
return function
original_function = function
function = function.strip()
# Ensure we don't operate on objc functions
if function.startswith(("[", "+[", "-[")):
return function
# Chop off C++ trailers
while True:
match = _cpp_trailer_re.search(function)
if match is None:
break
function = function[: match.start()].rstrip()
# Because operator<< really screws with our balancing, so let's work
# around that by replacing it with a character we do not observe in
# `split_func_tokens` or `replace_enclosed_string`.
function = (
function.replace("operator<<", u"operator⟨⟨")
.replace("operator<", u"operator⟨")
.replace("operator()", u"operator◯")
.replace(" -> ", u" ⟿ ")
.replace("`anonymous namespace'", u"〔anonymousnamespace〕")
)
# normalize C++ lambdas. This is necessary because different
# compilers use different rules for now to name a lambda and they are
# all quite inconsistent. This does not give us perfect answers to
# this problem but closer. In particular msvc will call a lambda
# something like `lambda_deadbeefeefffeeffeeff` whereas clang for
# instance will name it `main::$_0` which will tell us in which outer
# function it was declared.
if normalize_lambdas:
function = _lambda_re.sub("lambda", function)
# Normalize MSVC anonymous namespaces from inline functions. For inline
# functions, the compiler inconsistently renders anonymous namespaces with
# their hash. For regular functions, "`anonymous namespace'" is used.
# The regular expression matches the trailing "::" to avoid accidental
# replacement in mangled function names.
if normalize_lambdas:
function = _anon_namespace_re.sub(u"〔anonymousnamespace〕::", function)
# Remove the arguments if there is one.
def process_args(value, start):
value = value.strip()
if value in ("anonymous namespace", "operator"):
return "(%s)" % value
return ""
function = replace_enclosed_string(function, "(", ")", process_args)
# Resolve generic types, but special case rust which uses things like
# <Foo as Bar>::baz to denote traits.
def process_generics(value, start):
# Special case for lambdas
if value == "lambda" or _lambda_re.match(value):
return "<%s>" % value
if start > 0:
return "<T>"
# Rust special cases
value = _rust_blanket_re.sub("", value) # prefer trait for blanket impls
value = replace_enclosed_string(value, "<", ">", process_generics)
return value.split(" as ", 1)[0]
function = replace_enclosed_string(function, "<", ">", process_generics)
tokens = split_func_tokens(function)
# MSVC demangles generic operator functions with a space between the
# function name and the generics. Ensure that those two components both end
# up in the function name.
if len(tokens) > 1 and tokens[-1] == "<T>":
tokens.pop()
tokens[-1] += " <T>"
# find the token which is the function name. Since we chopped of C++
# trailers there are only two cases we care about: the token left to
# the -> return marker which is for instance used in Swift and if that
# is not found, the last token in the last.
#
# ["unsigned", "int", "whatever"] -> whatever
# ["@objc", "whatever", "->", "int"] -> whatever
try:
func_token = tokens[tokens.index(u"⟿") - 1]
except ValueError:
if tokens:
func_token = tokens[-1]
else:
func_token = None
if func_token:
function = (
func_token.replace(u"⟨", "<")
.replace(u"◯", "()")
.replace(u" ⟿ ", " -> ")
.replace(u"〔anonymousnamespace〕", "`anonymous namespace'")
)
# This really should never happen
else:
function = original_function
# trim off rust markers
function = _rust_hash.sub("", function)
# trim off windows decl markers
return _windecl_hash.sub("\\1", function)
def get_function_name_for_frame(frame, platform=None):
"""Given a frame object or dictionary this returns the actual function
name trimmed.
"""
if hasattr(frame, "get_raw_data"):
frame = frame.get_raw_data()
# if there is a raw function, prioritize the function unchanged
if frame.get("raw_function"):
return frame.get("function")
# otherwise trim the function on demand
rv = frame.get("function")
if rv:
return trim_function_name(rv, frame.get("platform") or platform)
def set_in_app(frame, value):
orig_in_app = frame.get("in_app")
if orig_in_app == value:
return
orig_in_app = int(orig_in_app) if orig_in_app is not None else -1
setdefault_path(frame, "data", "orig_in_app", value=orig_in_app)
frame["in_app"] = value
| {
"content_hash": "e563d2b411a2f6230c3a1eed87c11fa4",
"timestamp": "",
"source": "github",
"line_count": 262,
"max_line_length": 87,
"avg_line_length": 31.37786259541985,
"alnum_prop": 0.5891010825933585,
"repo_name": "beeftornado/sentry",
"id": "a90228d0d6e5c62a9c07aab22f65674d19f841f9",
"size": "8267",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/sentry/stacktraces/functions.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "157195"
},
{
"name": "HTML",
"bytes": "197026"
},
{
"name": "JavaScript",
"bytes": "380379"
},
{
"name": "Makefile",
"bytes": "2832"
},
{
"name": "Python",
"bytes": "6473603"
}
],
"symlink_target": ""
} |
from swgpy.object import *
def create(kernel):
result = Creature()
result.template = "object/creature/npc/droid/shared_droideka_base.iff"
result.attribute_template_id = 3
result.stfName("droid_name","droideka_base")
#### BEGIN MODIFICATIONS ####
#### END MODIFICATIONS ####
return result | {
"content_hash": "6b6541387ae4430eaa08445ba4068550",
"timestamp": "",
"source": "github",
"line_count": 13,
"max_line_length": 71,
"avg_line_length": 23.46153846153846,
"alnum_prop": 0.6983606557377049,
"repo_name": "anhstudios/swganh",
"id": "91cea1b492afdb8b53af920e6a0b0978b6a66862",
"size": "450",
"binary": false,
"copies": "2",
"ref": "refs/heads/develop",
"path": "data/scripts/templates/object/creature/npc/droid/shared_droideka_base.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "11887"
},
{
"name": "C",
"bytes": "7699"
},
{
"name": "C++",
"bytes": "2357839"
},
{
"name": "CMake",
"bytes": "41264"
},
{
"name": "PLSQL",
"bytes": "42065"
},
{
"name": "Python",
"bytes": "7503510"
},
{
"name": "SQLPL",
"bytes": "42770"
}
],
"symlink_target": ""
} |
"""
CcPy daemon configuration file parser
"""
import xml.etree.ElementTree as ET
import logging
from .common import LoggerName
DefCcPydConfigFileName = "/etc/ccpyd.conf"
Logger = logging.getLogger(LoggerName)
class ParseError(Exception):
pass
def _get_elem_value(element, default_value):
if element is not None:
return element.text
else:
return default_value
def parse(aCcPydConfigFileName=DefCcPydConfigFileName):
"""Parse CcPy daemon configuration file
Return a dictionary of config settings. See README for more info
Throw ParseError
"""
try:
Logger.debug("Reading ccpyd configuration from %s..." % aCcPydConfigFileName)
tree = ET.parse(aCcPydConfigFileName)
root = tree.getroot()
if (root.tag != 'ccpyd'):
raise Exception('Invalid root tag name: ' + root.tag)
ccpyConfig = _get_elem_value(root.find('./ccpyConfig'), "/etc/ccpy.conf")
loggingElem = root.find('./logging')
if loggingElem is not None and loggingElem.attrib['enabled'].lower() in (
'on',
'yes',
'true'):
logFile = _get_elem_value(loggingElem.find('./file'), "/var/log/ccpyd.log")
logLevel = _get_elem_value(loggingElem.find('./level'), "DEBUG")
return {'ccpyConfig': ccpyConfig,
'logging': True,
'logFile': logFile,
'logLevel': logLevel}
else:
return {'ccpyConfig': ccpyConfig,
'logging': False}
except BaseException as e:
raise ParseError("Failed to parse %s. %s" % (aCcPydConfigFileName, str(e)))
| {
"content_hash": "7fc070b0fd735d4bd2d77d51c352c659",
"timestamp": "",
"source": "github",
"line_count": 56,
"max_line_length": 87,
"avg_line_length": 30.196428571428573,
"alnum_prop": 0.6049674748669427,
"repo_name": "kindkaktus/CcPy",
"id": "66e9ea828922eeb29b4b2a21b856d4396bcfbf06",
"size": "1995",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "ccpy/ccpydconfparser.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "3772"
},
{
"name": "Python",
"bytes": "113441"
},
{
"name": "Roff",
"bytes": "3716"
},
{
"name": "Shell",
"bytes": "4464"
}
],
"symlink_target": ""
} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.